require 'json' module CiCd module Builder # noinspection RubySuperCallWithoutSuperclassInspection module Manifest module Build # --------------------------------------------------------------------------------------------------------------- # noinspection RubyHashKeysTypesInspection def prepareBuild() @logger.step CLASS+'::'+__method__.to_s ret = super if ret == 0 @vars[:artifacts] = [] yaml = YAML.load(IO.read(ENV['MANIFEST_FILE'])) keys = Hash[yaml.keys.map.with_index.to_a].keys.sort @logger.debug "Manifest keys: #{keys.ai}" ordr = [] bads = [] apps = {} vars = {} rmap = { sha256: %w[_sha256], base_url: %w[_repo_base_url], url: %w[_url], version: %w[_app_version], build: %w[_app_build], } keys.each do |prod| rmap.keys.each do |var| vars[var] = '' end name = '' match = nil rmap.each do |var,lst| lst.each do |regexstr| match = prod.match(%r'^(.*?)#{regexstr}$') if match name = match[1] vars[var] = yaml[prod].to_s break end end @logger.debug "Prod: #{prod}, Var: #{var}, Regex: #{lst.ai}, Vars: #{vars.ai}" break if match end # @logger.debug "Vars: #{vars.ai}" if match ordr << name unless apps[name] apps[name] = { name: name, } end rmap.keys.each do |var| apps[name][var] = vars[var] unless vars[var].empty? end else bads << prod end end @logger.debug "App entries: #{apps.ai}" if bads.size > 0 @logger.fatal "Bad entries: #{bads.map{|p| "#{p}: #{yaml[p]}"}.ai}" ret = Errors::BAD_ARTIFACTS else @vars[:components] = apps end end @vars[:return_code] = ret end VER_RGX = %r'^\d+\.\d+(\.?\d)*$' MMP_RGX = %r'^(\d+\.?){2,3}$' EXT_RGX = %r'\.*(tar\.gz|tgz|tar.bzip2|tar.bz2|bzip2|bz2|jar|war|[a-z]+)$' # --------------------------------------------------------------------------------------------------------------- def getVersionBuildFromName(artifact) version = artifact.dup version.gsub!(EXT_RGX, '') version.gsub!(%r'^[a-zA-Z\-._]+', '') version.gsub!(%r'\.\.+', '.') build = '' if version.match(VER_RGX) if version.match(%r'\-') version,build = version.split(/-/) end end [version,build] end # --------------------------------------------------------------------------------------------------------------- def getVersionBuild(path,artifact,comp) cname,cdata = comp version,build = File.split(path) if build.match(%r'^\d+$') and version.match(%r'/?\d+\.\d+\.?\d*$') # Hole in one! version = File.basename(version) else if build.match(VER_RGX) version = build build = '' else version = cdata[:build].nil? ? '' : ( cdata[:build] > 0 ? build.to_s : '' ) end unless version.match(VER_RGX) version = cdata[:version] || '' end ver,bld = getVersionBuildFromName(artifact) if version.empty? version,build = [ver,bld] if version.empty? version = @vars[:build_ver] else _,ver = File.split(path) if version =~ %r'^#{ver}' if version =~ VER_RGX if version =~ %r'^#{build}' # prob the major part of version build = '' end else unless version.eql?(ver) build = version.dup version = ver build = build.gsub(%r'^#{version}(\.|-)*','') end end else # The whacky cases build = version.dup if ver =~ %r/\d+\.(\d+\.)*/ version = ver build = build.gsub(%r'^#{version}(\.|-)*','') else vra = version.gsub!(%r'^((\d+\.?)+)', '\1').split(/\./) version = vra[0..2].join('.') build = vra[-1] #build = @vars[:build_num] end end end else if ver.match(VER_RGX) if ver.match(MMP_RGX) if version.length < ver.length version = ver # Guessing it is the better version end else build = ver.dup # version.gsub!(/\.d+$/, '') build.gsub!(/^#{version}\.?/, '') end end end unless build.match(%r'^[1-9]\d*$') build = cdata[:build] build = @vars[:build_num] if (build.nil? or build.empty? or build.to_i == 0) end end [version,build] end # --------------------------------------------------------------------------------------------------------------- def packageBuild() @logger.info CLASS+'::'+__method__.to_s if isSameDirectory(Dir.pwd, ENV['WORKSPACE']) if @vars.has_key?(:components) and not @vars[:components].empty? @vars[:return_code] = 0 getRepoInstance('S3') if 0 == @vars[:return_code] lines = [] @vars[:artifacts] = [] # Deal with all artifacts of each component @vars[:components].each { |comp| processComponent(comp, lines) } if @vars[:return_code] == 0 cleanupAfterPackaging(lines) end end else @logger.error 'No components found during preparation?' @vars[:return_code] = Errors::NO_COMPONENTS end else @logger.error "Not in WORKSPACE? '#{pwd}' does not match WORKSPACE='#{workspace}'" @vars[:return_code] = Errors::WORKSPACE_DIR end @vars[:return_code] end # --------------------------------------------------------------------------------------------------------------- def cleanupAfterPackaging(lines) begin unless IO.write(@vars[:build_mff], lines.join("\n")) > 0 @logger.error "Nothing was written to build manifest '#{@vars[:build_mff]}'" @vars[:return_code] = Errors::MANIFEST_EMPTY end rescue => e @logger.error "Failed to write manifest '#{@vars[:build_mff]}' (#{e.message})" @vars[:return_code] = Errors::MANIFEST_WRITE end FileUtils.rmtree(@vars[:build_dir]) @vars[:return_code] = File.directory?(@vars[:build_dir]) ? Errors::BUILD_DIR : 0 unless @vars[:return_code] == 0 @logger.warn "Remove manifest '#{@vars[:build_mff]}' due to error" FileUtils.rm_f(@vars[:build_mff]) # @vars[:return_code] = File.exists?(@vars[:build_mff]) ? Errors::MANIFEST_DELETE : 0 end end protected # --------------------------------------------------------------------------------------------------------------- def processComponent(comp, lines) artifact, path, version, build = parseComponent(comp) cname,cdata = comp require 'uri' require 'digest' begin key, bucket, objects = getObjects(artifact, path,cdata) local_dir = File.join(@vars[:local_dirs]['artifacts'], cname, '') Dir.mkdir(local_dir, 0700) unless File.directory?(local_dir) artifacts = [] sha256 = {} changed = false # 1 or more objects on the key/ path if objects.size > 0 lines << "#{cname}:#{artifact} v#{version} b#{build} - #{path}" # When we start pulling the artifacts then everything that is build 0 get this build number, in fact all artifacts get this build number! objects.each do |object| @logger.info "\tchecking #{object.key}" base = File.basename(object.key) local = File.join(local_dir, base) etag = object.etag.gsub(%r/['"]/, '') download = shouldDownload?(etag, local, object) if download changed = doDownload(etag, local, object, bucket) else @logger.info "\t\tunchanged" end sha256[base] = nil if cdata[:sha256] hd = Digest::SHA256.file(local).hexdigest sha256[base] = hd == cdata[:sha256] ? true : hd end artifacts << local end # The local file will be 1 artifact or an archive of the local artifacts when artifacts.size > 1 if artifacts.size > 0 artifacts.each do |local| base = File.basename(local) if sha256[base].is_a?(String) msg = "Artifact checksum is invalid or manifest is incorrect. Artifact: s3://#{bucket}/#{key}#{artifact}, SHA256: Manifest=#{cdata[:sha256]}, Actual=#{sha256[base]}" if ENV['ENFORCE_CHECKSUMS'] and ENV['ENFORCE_CHECKSUMS'].downcase =~ %r/^(yes|on|set|1|enable|active|enforced?)$/ @logger.fatal msg @vars[:return_code] = Errors::ARTIFACT_CHECKSUM_BAD else @logger.warn msg end end end if 0 == @vars[:return_code] local = getLocalArtifact(artifacts, changed, comp, local_dir, version, build) base = File.basename(local) addArtifact(@vars[:artifacts], local, local_dir, { component: cname, module: cname, name: cname, build: build, version: version, file: local, sha256: (sha256.keys.include?(base)) ? (sha256[base].is_a?(TrueClass) ? cdata[:sha256] : sha256[base]) : Digest::SHA256.file(local).hexdigest, file_name: cdata[:file_name], file_ext: cdata[:file_ext], }) end end else @logger.fatal "Artifact not found: s3://#{bucket}/#{key}#{artifact}" @vars[:return_code] = Errors::ARTIFACT_NOT_FOUND end rescue Exception => e @logger.error "Artifact error: #{artifact} #{e.class.name} #{e.message}" raise e end end # --------------------------------------------------------------------------------------------------------------- def getObjects(artifact, path, cdata) parts = URI(path).path.gsub(%r'^#{File::SEPARATOR}', '').split(File::SEPARATOR) bucket,prefix = if cdata[:s3_bucket] and cdata[:s3_key] [cdata[:s3_bucket],File.join(File.dirname(cdata[:s3_key]),'')] else name = parts.shift [name,File.join(parts, '')] end @logger.info "S3://#{bucket}:#{prefix} URL: #{path} #{artifact}" objects = [] @s3 = @repo.getS3() #bucket_client = getBucket(bucket) bucket_objects = @s3.list_objects(bucket: bucket, prefix: prefix) if bucket_objects bucket_objects = bucket_objects[:contents] bucket_objects.each do |object| if artifact.empty? or (not artifact.empty? and object.key =~ %r'#{prefix}#{artifact}') objects << object end end end @logger.debug "S3://#{bucket}:#{prefix} has #{objects.size} objects" return prefix, bucket, objects end # --------------------------------------------------------------------------------------------------------------- def doDownload(etag, local, object, bucket) @logger.info "\t\tdownload #{object.size} bytes" begin response = object.get(:response_target => local) rescue NoMethodError response = @s3.get_object(bucket: bucket, key: object.key, response_target: local) end File.utime(response.last_modified, response.last_modified, local) @logger.info "\t\tdone" check = calcLocalETag(etag, local) if check.eql?(etag) false else @logger.info "\tETag different: #{etag} != #{check}" true end end # --------------------------------------------------------------------------------------------------------------- def shouldDownload?(etag, local, object) if File.exists?(local) @logger.debug "\t\tchecking etag on #{local}" stat = File.stat(local) check = calcLocalETag(etag, local, stat.size) if etag != check or object.size != stat.size or object.last_modified > stat.mtime @logger.debug "\t\t#{etag} != \"#{check}\" #{object.size} != #{stat.size} #{object.last_modified} > #{stat.mtime}" true else @logger.debug "\t\tmatched #{etag}" false end else true end end # --------------------------------------------------------------------------------------------------------------- def getLocalArtifact(artifacts, changed, comp, local_dir, version, build) cname,cdata = comp if artifacts.size > 1 begin file = File.join(local_dir, "#{cname}-#{version}-#{build}.zip") if changed or not File.exists?(file) zipped_files = artifacts.map { |f| f.gsub(%r'^#{local_dir}', '') }.join(' ') Dir.chdir(local_dir) do res = %x(zip -o9X #{file} #{zipped_files} 2>&1) @logger.info res end raise "Failed to zip #{file} containting #{zipped_files}" unless $?.exitstatus == 0 end file rescue Exception => e @logger.error "Artifact error: #{file} #{e.class.name} #{e.message}" File.unlink(file) raise e end else artifacts[0] end end # --------------------------------------------------------------------------------------------------------------- def parseComponent(comp) cname,cdata = comp if cdata[:url] path, artifact = File.split(cdata[:url]) version, build = (cdata[:version] and cdata[:build]) ? [cdata[:version], cdata[:build]] : getVersionBuild(path, artifact, comp) elsif cdata[:base_url] artifact = '' if cdata[:build].nil? # noinspection RubyUnusedLocalVariable version, build = cdata[:version].split(%r'-') # noinspection RubyUnusedLocalVariable path = File.join(cdata[:base_url], cdata[:version]) else version, build = [cdata[:version], cdata[:build]] path = File.join(cdata[:base_url], cdata[:version], cdata[:build]) end else path = '' artifact = '' version, build = (cdata[:version] and cdata[:build]) ? [cdata[:version], cdata[:build]] : getVersionBuild(path, artifact, comp) end return artifact, path, version, build end # --------------------------------------------------------------------------------------------------------------- def getBucket(name = nil) @s3 = @repo.getS3() begin ::Aws::S3::Bucket.new(name: name || ENV['AWS_S3_BUCKET'], client: @s3) rescue Aws::S3::Errors::NotFound @vars[:return_code] = Errors::BUCKET nil rescue Exception => e @logger.error "S3 Bucket resource API error: #{e.class.name} #{e.message}" raise e end end end end end end