module CanvasSync module Jobs class TermBatchesJob < CanvasSync::Job def perform(options) if (jobs = options[:sub_jobs]).present? context = options[:context] || {} if options[:term_scope] Term.send(options[:term_scope]).find_each.map do |term| term_id = get_term_id(term) local_context = context.merge(canvas_term_id: term_id) # Override the delta-syncing date if: # 1. the Term hasn't been synced before or # 2. the Term underwent a period of not syncing if batch_context[:updated_after] term_last_sync = CanvasSync.redis.get(self.class.last_sync_key(term_id)) if !term_last_sync.present? || batch_context[:updated_after] > term_last_sync local_context[:updated_after] = term_last_sync.presence end end JobBatches::ManagedBatchJob.make_batch(jobs, ordered: false, concurrency: true) do |b| # TODO If we do a Chain UI, this will need to checkin somehow to indicate that the chain forked # Or chain steps just show a summary - eg "Started", "X Jobs Running", "Done" or "X Jobs Running, Y Jobs Done" - and not individual forks # For a step to be considered done, all previous sibling-level steps must be done and no batches pending b.description = "TermBatchJob(#{term_id}) Root" b.context = local_context b.on(:success, "#{self.class.to_s}.batch_finished") unless options[:mark_synced] == false end end else JobBatches::ConcurrentBatchJob.make_batch(jobs, context: context) end end end def self.batch_finished(status, opts) ctx = JobBatches::Batch.current_context term_id = ctx[:canvas_term_id] CanvasSync.redis.set(last_sync_key(term_id), ctx[:batch_start_time]) end def self.last_sync_key(term_id) ctx = JobBatches::Batch.current_context "#{CanvasSync.redis_prefix}:#{ctx[:batch_genre]}:#{term_id}:last_sync" end def get_term_id(term) term.try(:canvas_id) || term.canvas_term_id end end end end