require 'json' module CiCd module Builder # noinspection RubySuperCallWithoutSuperclassInspection module Manifest module Build # module ClassMethods # --------------------------------------------------------------------------------------------------------------- def self.included(includer) end # --------------------------------------------------------------------------------------------------------------- # noinspection RubyHashKeysTypesInspection def prepareBuild() ret = super if ret == 0 @vars[:artifacts] = [] yaml = YAML.load(IO.read(ENV['MANIFEST_FILE'])) keys = Hash[yaml.keys.map.with_index.to_a].keys.sort # @logger.info keys.ai ordr = [] bads = [] apps = {} vars = {} rmap = { sha256: %w[_sha256], base_url: %w[_repo_base_url], url: %w[_url], version: %w[_app_version], build: %w[_app_build], } keys.each do |prod| rmap.keys.each do |var| vars[var] = '' end name = '' match = nil rmap.each do |var,lst| lst.each do |regexstr| match = prod.match(%r'^(.*?)#{regexstr}$') if match name = match[1] vars[var] = yaml[prod] break end end break if match end if match ordr << name unless apps[name] apps[name] = { name: name, } end rmap.keys.each do |var| apps[name][var] = vars[var] unless vars[var].empty? end else bads << prod end end @logger.debug "App entries: #{apps.ai}" if bads.size > 0 @logger.fatal "Bad entries: #{bads.map{|p| "#{p}: #{yaml[p]}"}.ai}" ret = Errors::BAD_ARTIFACTS else @vars[:components] = apps end end @vars[:return_code] = ret end # # --------------------------------------------------------------------------------------------------------------- # def makeBuild() # super # end def getVersionBuild(path,artifact,comp) version,build = File.split(path) if build.match(%r'^\d+$') and version.match(%r'/?\d+\.\d+\.?\d*$') version = File.basename(version) build = build.to_i else build = comp[1][:build] || 0 version = comp[1][:build].nil? ? '' : ( comp[1][:build] > 0 ? build.to_s : '' ) build = comp[1][:build] || 0 unless version.match(%r'^[.0-9]+$') version = comp[1][:version] || '' end if version.empty? version = artifact.dup version.gsub!(%r'\.*(tar\.gz|tgz|bzip2|bz2|jar|war|[a-z]+)$','') if artifact =~ %r'^#{comp[0]}' version.gsub!(%r'^#{comp[0]}\.*-*','') else version.gsub!(%r'^[a-zA-Z\-.]+','') end if version.empty? version = @vars[:build_ver] else uri,ver = File.split(path) if version =~ %r'^#{ver}' and version =~ %r'^(\d+\.){3}' build = version.dup version = ver build = build.gsub(%r'^#{version}(\.|-)*','') end end if build == 0 or build.empty? build = @vars[:build_num] end else end end [version,build] end # --------------------------------------------------------------------------------------------------------------- def packageBuild() @logger.step __method__.to_s if isSameDirectory(Dir.pwd, ENV['WORKSPACE']) if @vars.has_key?(:components) and not @vars[:components].empty? @vars[:return_code] = 0 clazz = getRepoClass('S3') if clazz.is_a?(Class) and not clazz.nil? @repo = clazz.new(self) if @vars[:return_code] == 0 lines = [] @vars[:artifacts] = [] # Deal with all artifacts of each component @vars[:components].each { |comp| artifact, path, version, build = parseComponent(comp) require 'uri' begin parts = URI(path).path.gsub(%r'^#{File::SEPARATOR}','').split(File::SEPARATOR) name = parts.shift bucket = getBucket(name) key = File.join(parts, '') @logger.info "S3://#{name}:#{key} URL: #{path} #{artifact}" objects = [] bucket.objects(prefix: key).each do |object| if artifact.empty? or (not artifact.empty? and object.key =~ %r'#{key}#{artifact}') objects << object end end @logger.debug "S3://#{name}:#{key} has #{objects.size} objects" local_dir = File.join(@vars[:local_dirs]['artifacts'],comp[0], '') Dir.mkdir(local_dir, 0700) unless File.directory?(local_dir) artifacts = [] changed = false # 1 or more objects on the key/ path if objects.size > 0 lines << "#{comp[0]}:#{artifact} v#{version} b#{build} - #{path}" # When we start pulling the artifacts then everything that is build 0 get this build number, in fact all artifacts get this build number! objects.each do |object| @logger.info "\tchecking #{object.key}" local = File.join(local_dir,File.basename(object.key)) etag = object.etag.gsub(%r/['"]/, '') download = if File.exists?(local) @logger.debug "\t\tchecking etag on #{local}" stat = File.stat(local) check = calcLocalETag(etag, local, stat.size) if etag != check or object.size != stat.size or object.last_modified > stat.mtime @logger.debug "\t\t#{etag} != \"#{check}\" #{object.size} != #{stat.size} #{object.last_modified} > #{stat.mtime}" true else @logger.debug "\t\tmatched #{etag}" false end else true end if download @logger.info "\t\tdownload #{object.size} bytes" response = object.get(:response_target => local) File.utime(response.last_modified, response.last_modified, local) @logger.info "\t\tdone" check = calcLocalETag(etag, local) unless check.eql?(etag) @logger.info "\tETag different: #{etag} != #{check}" changed = true end else @logger.info "\t\tunchanged" end artifacts << local end # The local file will be 1 artifact or an archive of the local artifacts when artifacts.size > 1 local = if artifacts.size > 0 if artifacts.size > 1 begin # require 'zlib' # require 'archive/tar/minitar' file = File.join(local_dir, "#{comp[0]}-#{version}.zip") if changed or not File.exists?(file) # output = File.open(file, 'wb') # output = Zlib::GzipWriter.new(output, Zlib::BEST_COMPRESSION, Zlib::RLE) # Dir.chdir(local_dir) do # Archive::Tar::Minitar.pack(artifacts.map{|f| f.gsub(%r'^#{local_dir}','')}, output, false ) # end zipped_files = artifacts.map{|f| f.gsub(%r'^#{local_dir}','')}.join(' ') Dir.chdir(local_dir) do res = %x(zip -o9X #{file} #{zipped_files}) end raise "Failed to zip #{file} containting #{zipped_files}" unless $?.exitstatus == 0 end file rescue Exception => e @logger.error "Artifact error: #{file} #{e.class.name} #{e.message}" File.unlink(file) raise e # ensure # output.close if output and not output.closed? end else artifacts[0] end else end addArtifact(@vars[:artifacts], local, local_dir, { module: comp[0], name: comp[0], build: build, version: version, file: local}) else @logger.fatal "Artifact not found: s3://#{name}/#{key}#{artifact}" @vars[:return_code] = Errors::ARTIFACT_NOT_FOUND end # rescue Aws::S3::Errors::NotFound => e # @logger.fatal "Artifact S3 error: #{artifact} #{e.class.name} #{e.message}" # raise e # rescue Aws::S3::Errors::NoSuchKey => e # @logger.error "Artifact S3 error: #{artifact} #{e.class.name} #{e.message}" rescue Exception => e @logger.error "Artifact error: #{artifact} #{e.class.name} #{e.message}" raise e end } if @vars[:return_code] == 0 cleanupAfterPackaging(lines) end else @logger.fatal "S3 repo error: Bucket #{ENV['AWS_S3_BUCKET']}" end else @logger.error "CiCd::Builder::Repo::#{type} is not a valid repo class" @vars[:return_code] = Errors::BUILDER_REPO_TYPE end else @logger.error 'No components found during preparation?' @vars[:return_code] = Errors::NO_COMPONENTS end else @logger.error "Not in WORKSPACE? '#{pwd}' does not match WORKSPACE='#{workspace}'" @vars[:return_code] = Errors::WORKSPACE_DIR end @vars[:return_code] end def cleanupAfterPackaging(lines) begin unless IO.write(@vars[:build_mff], lines.join("\n")) > 0 @logger.error "Nothing was written to build manifest '#{@vars[:build_mff]}'" @vars[:return_code] = Errors::MANIFEST_EMPTY end rescue => e @logger.error "Failed to write manifest '#{@vars[:build_mff]}' (#{e.message})" @vars[:return_code] = Errors::MANIFEST_WRITE end FileUtils.rmtree(@vars[:build_dir]) @vars[:return_code] = File.directory?(@vars[:build_dir]) ? Errors::BUILD_DIR : 0 unless @vars[:return_code] == 0 @logger.warn "Remove manifest '#{@vars[:build_mff]}' due to error" FileUtils.rm_f(@vars[:build_mff]) # @vars[:return_code] = File.exists?(@vars[:build_mff]) ? Errors::MANIFEST_DELETE : 0 end end def parseComponent(comp) if comp[1][:url] path, artifact = File.split(comp[1][:url]) version, build = getVersionBuild(path, artifact, comp) elsif comp[1][:base_url] artifact = '' if comp[1][:build].nil? version, build = comp[1][:version].split(%r'-') path = File.join(comp[1][:base_url], comp[1][:version]) else version, build = [comp[1][:version], comp[1][:build]] path = File.join(comp[1][:base_url], comp[1][:version], comp[1][:build]) end else path = '' artifact = '' version, build = getVersionBuild(path, artifact, comp) end return artifact, path, version, build end def getBucket(name = nil) @s3 = @repo.getS3() bucket = begin ::Aws::S3::Bucket.new(name: name || ENV['AWS_S3_BUCKET'], client: @s3) rescue Aws::S3::Errors::NotFound @vars[:return_code] = Errors::BUCKET nil rescue Exception => e @logger.error "S3 Bucket resource API error: #{e.class.name} #{e.message}" raise e end bucket end end end end end