require "bearcat" require "canvas_sync/version" require "canvas_sync/engine" require "canvas_sync/misc_helper" require "canvas_sync/class_callback_executor" require "canvas_sync/job" require "canvas_sync/sidekiq_job" require "canvas_sync/api_syncable" require "canvas_sync/record" require "canvas_sync/jobs/report_starter" require "canvas_sync/jobs/report_checker" require "canvas_sync/jobs/report_processor_job" require "canvas_sync/batch_processor" require "canvas_sync/config" require "canvas_sync/job_batches/batch" Dir[File.dirname(__FILE__) + "/canvas_sync/jobs/*.rb"].each { |file| require file } Dir[File.dirname(__FILE__) + "/canvas_sync/processors/*.rb"].each { |file| require file } Dir[File.dirname(__FILE__) + "/canvas_sync/importers/*.rb"].each { |file| require file } Dir[File.dirname(__FILE__) + "/canvas_sync/generators/*.rb"].each { |file| require file } Dir[File.dirname(__FILE__) + "/canvas_sync/concerns/**/*.rb"].each { |file| require file } module CanvasSync SUPPORTED_MODELS = %w[ users pseudonyms courses groups group_memberships accounts terms enrollments sections assignments submissions roles admins assignment_groups context_modules context_module_items xlist user_observers grading_periods grading_period_groups content_migrations learning_outcomes learning_outcome_results course_nicknames rubrics rubric_associations rubric_assessments ].freeze SUPPORTED_TERM_SCOPE_MODELS = %w[ assignments submissions assignment_groups context_modules context_module_items rubrics rubric_associations rubric_assessments ].freeze DEFAULT_TERM_SCOPE_MODELS = %w[ assignments submissions assignment_groups context_modules context_module_items rubrics rubric_associations rubric_assessments ].freeze SUPPORTED_LIVE_EVENTS = %w[ course enrollment submission assignment user syllabus grade module module_item course_section ].freeze SUPPORTED_NON_PROV_REPORTS = %w[ graded_submissions ].freeze JobBatches::ChainBuilder.register_chain_job(CanvasSync::Jobs::SyncTermsJob, :sub_jobs) JobBatches::ChainBuilder.register_chain_job(CanvasSync::Jobs::TermBatchesJob, :sub_jobs) JobBatches::ChainBuilder.register_chain_job(CanvasSync::Jobs::BeginSyncChainJob, 0) class << self # Runs a standard provisioning sync job with no extra report types. # Terms will be synced first using the API. If you are syncing users/roles/admins # and have also specified a Term scope, Users/Roles/Admins will by synced first, before # every other model (as Users/Roles/Admins are never scoped to Term). # # @param models [Array] A list of models to sync. e.g., ['users', 'courses']. # must be one of SUPPORTED_MODELS # @param term_scope [Symbol, nil] An optional symbol representing a scope that exists on the Term model. # The provisioning report will be run for each of the terms contained in that scope. # @param legacy_support [Boolean | Array, false] This enables legacy_support, where rows are not bulk inserted. # For this to work your models must have a `create_or_udpate_from_csv` class method that takes a row # and inserts it into the database. If an array of model names is provided then only those models will use legacy support. # @param account_id [Integer, nil] This optional parameter can be used if your Term creation and # canvas_sync_client methods require an account ID. def provisioning_sync(models, **kwargs) validate_models!(models) default_provisioning_report_chain(models, **kwargs).process! end # Given a Model or Relation, scope it down to items that should be synced def sync_scope(scope) terms = %i[should_canvas_sync active_for_canvas_sync should_sync active_for_sync active] terms.each do |t| return scope.send(t) if scope.respond_to?(t) end model = scope.try(:model) || scope if model.try(:column_names)&.include?(:workflow_state) return scope.where.not(workflow_state: %w[deleted]) end Rails.logger.warn("Could not filter Syncable Scope for model '#{scope.try(:model)&.name || scope.name}'") scope end # Syncs terms, users/roles/admins if necessary, then the rest of the specified models. # # @param models [Array] # @param term_scope [String] # @param legacy_support [Boolean, false] This enables legacy_support, where rows are not bulk inserted. # For this to work your models must have a `create_or_udpate_from_csv` class method that takes a row # and inserts it into the database. # @param account_id [Integer, nil] This optional parameter can be used if your Term creation and # canvas_sync_client methods require an account ID. # @return [Hash] def default_provisioning_report_chain( models, term_scope: nil, term_scoped_models: DEFAULT_TERM_SCOPE_MODELS, options: {}, **kwargs ) # rubocop:disable Metrics/CyclomaticComplexity, Metrics/PerceivedComplexity, Metrics/LineLength return unless models.present? models.map! &:to_s term_scope = term_scope.to_s if term_scope options = options.deep_symbolize_keys! model_job_map = { terms: CanvasSync::Jobs::SyncTermsJob, accounts: CanvasSync::Jobs::SyncAccountsJob, roles: CanvasSync::Jobs::SyncRolesJob, admins: CanvasSync::Jobs::SyncAdminsJob, assignments: CanvasSync::Jobs::SyncAssignmentsJob, submissions: CanvasSync::Jobs::SyncSubmissionsJob, assignment_groups: CanvasSync::Jobs::SyncAssignmentGroupsJob, context_modules: CanvasSync::Jobs::SyncContextModulesJob, context_module_items: CanvasSync::Jobs::SyncContextModuleItemsJob, content_migrations: CanvasSync::Jobs::SyncContentMigrationsJob, rubrics: CanvasSync::Jobs::SyncRubricsJob, rubric_associations: CanvasSync::Jobs::SyncRubricAssociationsJob, rubric_assessments: CanvasSync::Jobs::SyncRubricAssessmentsJob, }.with_indifferent_access root_chain = base_canvas_sync_chain(**kwargs, globals: options[:global] || kwargs[:globals]) concurrent_root_chain = JobBatches::ChainBuilder.new(JobBatches::ConcurrentBatchJob) root_chain << concurrent_root_chain current_chain = concurrent_root_chain try_add_model_job = ->(model) { return unless models.include?(model) current_chain << { job: model_job_map[model].to_s, options: options[model.to_sym] || {} } models -= [model] } ############################## # General provisioning jobs (not term-scoped) ############################## # Accounts, users, roles, and admins cannot be scoped to term try_add_model_job.call('accounts') # These Models use the provisioning report, but are not term-scoped, # so we sync them outside of the term scoping to ensure work is not duplicated if term_scope.present? models -= (first_provisioning_models = models & ['users', 'pseudonyms', 'user_observers', 'grading_periods', 'grading_period_groups']) current_chain.insert(generate_provisioning_jobs(first_provisioning_models, options)) end try_add_model_job.call('roles') try_add_model_job.call('admins') (SUPPORTED_TERM_SCOPE_MODELS - term_scoped_models).each do |mdl| try_add_model_job.call(mdl) end ############################### # Per-term provisioning jobs ############################### term_parent_chain = current_chain per_term_chain = JobBatches::ChainBuilder.build(model_job_map[:terms], term_scope: term_scope) current_chain = per_term_chain term_scoped_models.each do |mdl| try_add_model_job.call(mdl) end current_chain.insert( generate_provisioning_jobs(models - ['terms'], options) ) # Skip syncing terms if not required if !current_chain.empty? || (models & ['terms']).present? term_parent_chain << per_term_chain end ############################### # Wrap it all up ############################### root_chain end def base_canvas_sync_chain( legacy_support: false, # Import records 1 by 1 instead of with bulk upserts account_id: nil, # legacy/non PandaPal apps updated_after: nil, full_sync_every: nil, batch_genre: nil, globals: {}, &blk ) global_options = { legacy_support: legacy_support, updated_after: updated_after, full_sync_every: full_sync_every, batch_genre: batch_genre, } global_options[:account_id] = account_id if account_id.present? global_options.merge!(globals) if globals JobBatches::ChainBuilder.build(CanvasSync::Jobs::BeginSyncChainJob, [], global_options, &blk) end def group_by_job_options(model_list, options_hash, only_split: nil, default_key: :provisioning) dup_models = [ *model_list ] unique_option_models = {} filtered_models = only_split ? (only_split & model_list) : model_list filtered_models.each do |m| mopts = options_hash[m.to_sym] || options_hash[default_key] unique_option_models[mopts] ||= [] unique_option_models[mopts] << m dup_models.delete(m) end if dup_models.present? mopts = options_hash[default_key] unique_option_models[mopts] ||= [] unique_option_models[mopts].concat(dup_models) end unique_option_models end def generate_provisioning_jobs(model_list, options_hash, job_options: {}, only_split: nil, default_key: :provisioning) # Group the model options as best we can. # This is mainly for backwards compatibility, since 'users' was previously it's own job unique_option_models = group_by_job_options( model_list, options_hash, only_split: only_split, default_key: default_key, ) unique_option_models.map do |mopts, models| opts = { models: models } opts.merge!(job_options) opts.merge!(mopts) if mopts.present? { job: CanvasSync::Jobs::SyncProvisioningReportJob.to_s, options: opts, } end end # Calls the canvas_sync_client in your app. If you have specified an account # ID when starting the job it will pass the account ID to your canvas_sync_client method. # # @param options [Hash] def get_canvas_sync_client(options) if options[:account_id] canvas_sync_client(options[:account_id]) else canvas_sync_client end end # Configure options for CanvasSync. See config.rb for valid configuration options. # # Example: # # CanvasSync.configure do |config| # config.classes_to_only_log_errors_on << "Blah" # end def configure yield config config end # Returns the CanvasSync config def config @config ||= CanvasSync::Config.new end def validate_models!(models) invalid = models - SUPPORTED_MODELS return if invalid.empty? raise "Invalid model(s) specified: #{invalid.join(', ')}. Only #{SUPPORTED_MODELS.join(', ')} are supported." end def validate_live_events!(events) invalid = events - SUPPORTED_LIVE_EVENTS return if invalid.empty? raise "Invalid live event(s) specified: #{invalid.join(', ')}. Only #{SUPPORTED_LIVE_EVENTS.join(', ')} are supported." end def logger return @logger if defined? @logger @logger = Logger.new(STDOUT) @logger.level = Logger::DEBUG @logger end def redis(*args, &blk) JobBatches::Batch.redis(*args, &blk) end def redis_prefix pfx = "cs" pfx = "#{Apartment::Tenant.current}:#{pfx}" if defined?(Apartment) pfx end end end