module Eco module API class Session # Set of helpers to do most common tasks class Task < API::Common::Session::BaseSession NEWEST_FILE_MODE = [:newest, :last_file, :previous] LOAD_FILE_MODE = [:file] SAVE_FILE_MODE = [:save] API_MODE = [:api, :api_get] def file_people(filename = enviro.config.people.cache) logger.info("Going to get all the people via API") people = session.batch.get_people file = file_manager.save_json(people, filename, :timestamp) logger.info("#{people.length} people loaded and saved locally to #{file}.") Eco::API::Organization::People.new(people) end def people_refresh(people:, include_created: true) ini = people.length if include_created session.job_groups.find_jobs(type: :create).map do |job| people = people.merge(job.people) end end created = people.length - ini msg = "Going to refresh #{people.length} people with server data" msg += " (including #{created} that were created)" if created > 0 session.logger.info(msg) status = session.batch.get_people(people, silent: true) entries = status.people missing = people.length - entries.length session.logger.error("Missed to obtain #{missing} people during the refresh") if missing > 0 Eco::API::Organization::People.new(status.people) end def search(data, options: {}, silent: true) strict_search = session.config.people.strict_search? && (!options[:search]&.key?(:strict) || options.dig(:search, :strict)) # to scope people to be fresh data got via api session.logger.info("going to api get #{data.length} entries...") status = session.batch.search(data, silent: silent) people = Eco::API::Organization::People.new(status.people) # get the supervisors supers = people.each_with_object([]) do |person, sup| if sup_id = person.supervisor_id spr = {"id" => sup_id} sup.push(spr) unless sup.include?(spr) || people.person(id: sup_id, external_id: sup_id) end end if supers.length > 0 session.logger.info("going to api get #{supers.length} current supervisors...") status = session.batch.search(supers, silent: silent) people = people.merge(status.people, strict: strict_search) end supers = data.each_with_object([]) do |entry, sup| if entry.respond_to?(:supervisor_id) && !entry.supervisor_id.to_s.strip.empty? sup_id = entry.supervisor_id spr = {"id" => entry.supervisor_id} sup.push(spr) unless sup.include?(spr) || people.person(id: sup_id, external_id: sup_id) end end if supers.length > 0 session.logger.info("going to api get #{supers.length} supervisors as per entries...") status = session.batch.search(supers, silent: silent) people = people.merge(status.people, strict: strict_search) end session.logger.info("could get #{people.length} people (out of #{data.length} entries)") people end def load_people(filename = enviro.config.people.cache, modifier: [:newest, :api]) modifier = [modifier].flatten people = [] case when !!filename && (load_file?(modifier) || newest_file?(modifier)) case when newest_file?(modifier) # search input file based on pattern (in case the name has a timestamp) file = file_manager.dir.newest_file(file: filename) logger.info("previous file found: #{file}") if !!file else file = file_manager.dir.file(filename) end if !file logger.error("could not find the file #{file_manager.dir.file(filename)}") exit if !use_api?(modifier) people = self.load_people(modifier: modifier - NEWEST_FILE_MODE - LOAD_FILE_MODE) else people = file_manager.load_json(file) if !!people && people.is_a?(Array) logger.info("#{people&.length} people loaded from file #{file}") end end when use_api?(modifier) # no previous file: use API to get all people logger.info("Going to get all the people via API") people = session.batch.get_people if save_file?(modifier) && people && people.length > 0 file = file_manager.save_json(people, filename, :timestamp) logger.info("#{people.length } people saved to file #{file}.") end end Eco::API::Organization::People.new(people) end def s3upload_targets [].tap do |paths| session.config.s3storage.target_files.each_with_object(paths) do |file, arr| arr.push(session.s3upload(file: file)) end session.config.s3storage.target_directories.each_with_object(paths) do |folder, arr| arr.concat(session.s3upload(directory: folder)) end session.config.s3storage.target_file_patterns.each_with_object(paths) do |pattern, arr| filenames = [] case pattern when Regexp Dir.entries(".").sort.each do |file| next unless File.file?(file) # Skip directories filenames.push(file) if file =~ pattern end when String Dir.glob(pattern).sort.each do |file| next unless File.file?(file) # Skip directories filenames.push(file) end else # missconfiguration end filenames.each do |file| arr.push(session.s3upload(file: file)) end end end end private # MODIFIERS def use_api?(modifier) modifiers = [modifier].flatten modifiers.any? { |m| API_MODE.include?(m) } end def load_file?(modifier) modifiers = [modifier].flatten modifiers.any? { |m| LOAD_FILE_MODE.include?(m) } end def newest_file?(modifier) modifiers = [modifier].flatten modifiers.any? { |m| NEWEST_FILE_MODE.include?(m) } end def save_file?(modifier) modifiers = [modifier].flatten modifiers.any? { |m| SAVE_FILE_MODE.include?(m) } end end end end end