app/models/process/naf/log_archiver.rb in naf-2.1.9 vs app/models/process/naf/log_archiver.rb in naf-2.1.10
- old
+ new
@@ -6,18 +6,18 @@
NAF_JOBS_LOG_PATH = "#{::Naf::PREFIX_PATH}/#{::Naf.schema_name}/jobs/"
NAF_RUNNERS_LOG_PATH = "#{::Naf::PREFIX_PATH}/#{::Naf.schema_name}/runners/"
DATE_REGEX = /\d{8}_\d{6}/
LOG_RETENTION = 1
- def work
- # Use AWS credentials to access S3
- s3 = AWS::S3.new(access_key_id: AWS_ID,
+ def work
+ # Use AWS credentials to access S3
+ s3 = AWS::S3.new(access_key_id: AWS_ID,
secret_access_key: AWS_KEY,
ssl_verify_peer: false)
- # Each project will have a specific bucket
- bucket = s3.buckets[NAF_BUCKET]
+ # Each project will have a specific bucket
+ bucket = s3.buckets[NAF_BUCKET]
files = log_files
logger.info 'Starting to save files to s3...'
files.each do |file|
# Write file if not existent
@@ -29,17 +29,17 @@
end
end
logger.info 'Starting to archive files...'
archive_old_files(files)
- end
+ end
- private
+ private
- def project_name
- (`git remote -v`).slice(/\/\S+/).sub('.git','')[1..-1]
- end
+ def project_name
+ (`git remote -v`).slice(/\/\S+/).sub('.git','')[1..-1]
+ end
def log_files
files = Dir[NAF_JOBS_LOG_PATH + "*/*"]
files += Dir[NAF_RUNNERS_LOG_PATH + "*/*"]
# Sort log files based on time
@@ -61,15 +61,25 @@
::Naf::ApplicationType.first.created_at.strftime("%Y%m%d_%H%M%S")
end
def archive_old_files(files)
copy_files
- today = Time.zone.now.to_date
files.each do |file|
logger.info "Archived file: #{file}"
- directory = `dirname #{file}`
- `rm -r #{directory}`
end
+ File.delete(*files)
+
+ cleanup(NAF_JOBS_LOG_PATH + '*')
+ cleanup(NAF_RUNNERS_LOG_PATH + '*')
+ end
+
+ def cleanup(path)
+ Dir[path].select { |d| File.directory? d }. # select only directories
+ select { |d| (Dir.entries(d) - %w[ . .. ]).empty? }. # check if directory is empty
+ each do |d|
+ logger.info "Removing directory #{d}"
+ Dir.rmdir d
+ end
end
def copy_files
if File.directory?(Naf::LOGGING_ROOT_DIRECTORY + "/naf")
# Each archive will have a unique path based on the time archived