tasks/nightly_repos.rake in packaging-0.88.77 vs tasks/nightly_repos.rake in packaging-0.99.0
- old
+ new
@@ -16,41 +16,36 @@
signing_server = Pkg::Config.signing_server
# Sign the repos please
Pkg::Util::File.empty_dir?("repos") and fail "There were no repos found in repos/. Maybe something in the pipeline failed?"
signing_bundle = ENV['SIGNING_BUNDLE']
- remote_repo = Pkg::Util::Net.remote_unpack_git_bundle(signing_server, 'HEAD', nil, signing_bundle)
+ remote_repo = Pkg::Util::Net.remote_bootstrap(signing_server, 'HEAD', nil, signing_bundle)
build_params = Pkg::Util::Net.remote_buildparams(signing_server, Pkg::Config)
Pkg::Util::Net.rsync_to('repos', signing_server, remote_repo)
- rake_command = <<-DOC
-cd #{remote_repo} ;
-#{Pkg::Util::Net.remote_bundle_install_command}
-bundle exec rake pl:jenkins:sign_repos GPG_KEY=#{Pkg::Util::Gpg.key} PARAMS_FILE=#{build_params}
-DOC
- Pkg::Util::Net.remote_execute(signing_server, rake_command)
+ Pkg::Util::Net.remote_ssh_cmd(signing_server, "cd #{remote_repo} ; rake pl:jenkins:sign_repos GPG_KEY=#{Pkg::Util::Gpg.key} PARAMS_FILE=#{build_params}")
Pkg::Util::Net.rsync_from("#{remote_repo}/repos/", signing_server, target)
- Pkg::Util::Net.remote_execute(signing_server, "rm -rf #{remote_repo}")
- Pkg::Util::Net.remote_execute(signing_server, "rm #{build_params}")
+ Pkg::Util::Net.remote_ssh_cmd(signing_server, "rm -rf #{remote_repo}")
+ Pkg::Util::Net.remote_ssh_cmd(signing_server, "rm #{build_params}")
puts "Signed packages staged in '#{target}' directory"
end
task :sign_repos => "pl:fetch" do
Pkg::Util::RakeUtils.invoke_task("pl:sign_rpms", "repos")
Pkg::Rpm::Repo.create_local_repos('repos')
Pkg::Rpm::Repo.sign_repos('repos')
Pkg::Deb::Repo.sign_repos('repos', 'Apt repository for signed builds')
- Pkg::Sign::Dmg.sign('repos') unless Dir['repos/apple/**/*.dmg'].empty?
- Pkg::Sign::Ips.sign('repos') unless Dir['repos/solaris/11/**/*.p5p'].empty?
- Pkg::Sign::Msi.sign('repos') unless Dir['repos/windows/**/*.msi'].empty?
+ Pkg::OSX.sign('repos') unless Dir['repos/apple/**/*.dmg'].empty?
+ Pkg::IPS.sign('repos') unless Dir['repos/solaris/11/**/*.p5p'].empty?
+ Pkg::MSI.sign('repos') unless Dir['repos/windows/**/*.msi'].empty?
end
task :ship_signed_repos, [:target_prefix] => "pl:fetch" do |t, args|
target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
target_dir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{target_prefix}_repos"
Pkg::Util::Execution.retry_on_fail(:times => 3) do
# Ship the now signed repos to the distribution server
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{target_dir}")
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{target_dir}")
Pkg::Util::Net.rsync_to("#{target_prefix}_repos/", Pkg::Config.distribution_server, target_dir)
end
end
# This task should be invoked after prepare_signed_repos, so that there are repos to pack up.
@@ -66,10 +61,64 @@
name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
versioning = args.versioning or fail ":versioning is a required argument for #{t}"
Pkg::Repo.create_all_repo_archives(name_of_archive, versioning)
end
+ # This is pretty similar to the 'pack_signed_repo' task. The difference here is that instead
+ # of creating a tarball for each repo passed, it adds each repo to a single archive, creating
+ # one 'all' tarball with all of the repos. This is useful for cutomers who have a PE master with
+ # no internet access. They can unpack the puppet-agent-all tarball into the location that
+ # pe_repo expects and use simplified agent install without needing internet access, or having to
+ # manually download each agent that they need to feed to pe_repo.
+ # This task should be invoked after prepare_signed_repos, so that there are repos to pack up.
+ task :pack_all_signed_repos, [:path_to_repo, :name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
+ # path_to_repo should be relative to ./pkg
+ name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
+ tar = Pkg::Util::Tool.check_tool('tar')
+
+ Dir.chdir("pkg") do
+ if versioning == 'ref'
+ local_target = File.join(Pkg::Config.project, Pkg::Config.ref, "repos")
+ elsif versioning == 'version'
+ local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version, "repos")
+ end
+
+ Dir.chdir(local_target) do
+ if !Pkg::Util::File.exist?("#{name_of_archive}.tar.gz")
+ warn "Skipping #{name_of_archive} because it (#{name_of_archive}.tar.gz) has no files"
+ else
+ if File.exist?("#{Pkg::Config.project}-all.tar")
+ tar_cmd = "--update"
+ else
+ tar_cmd = "--create"
+ end
+ Pkg::Util::Execution.ex("#{tar} --owner=0 --group=0 #{tar_cmd} --file #{Pkg::Config.project}-all.tar #{name_of_archive}.tar.gz")
+ end
+ end
+ end
+ end
+
+ # tar does not support adding or updating files in a compressed archive, so
+ # we have a task to compress the "all" tarball from the 'pack_all_signed_repos'
+ # task
+ task :compress_the_all_tarball, [:versioning] => ["pl:fetch"] do |t, args|
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
+ gzip = Pkg::Util::Tool.check_tool('gzip')
+ Dir.chdir("pkg") do
+ if versioning == 'ref'
+ local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
+ elsif versioning == 'version'
+ local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
+ end
+ Dir.chdir(local_target) do
+ Pkg::Util::Execution.ex("#{gzip} --fast #{File.join("repos", "#{Pkg::Config.project}-all.tar")}")
+ end
+ end
+ end
+
+
task :prepare_signed_repos, [:target_host, :target_prefix, :versioning] => ["clean", "pl:fetch"] do |t, args|
target_host = args.target_host or fail ":target_host is a required argument to #{t}"
target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
versioning = args.versioning or fail ":versioning is a required argument for #{t}"
mkdir("pkg")
@@ -145,11 +194,11 @@
target_basedir = args.target_basedir or fail ":target_basedir is a required argument to #{t}"
include_paths = []
if args.foss_only && Pkg::Config.foss_platforms && !Pkg::Config.foss_platforms.empty?
Pkg::Config.foss_platforms.each do |platform|
- include_paths << Pkg::Paths.repo_path(platform, legacy: true, nonfinal: true)
+ include_paths << Pkg::Paths.repo_path(platform, legacy: true)
if Pkg::Paths.repo_config_path(platform)
include_paths << Pkg::Paths.repo_config_path(platform)
end
end
else
@@ -242,15 +291,15 @@
local_pa = File.join(pa_source, version_string)
local_pe = pe_target
local_pa_latest = "#{pa_source}-latest"
local_pe_latest = "#{pe_target}-latest"
- Pkg::Util::Net.remote_execute(target_host, "mkdir -p '#{pe_target}'")
- Pkg::Util::Net.remote_execute(target_host, "mkdir -p '#{local_pe_latest}'")
- Pkg::Util::Net.remote_execute(target_host, "cp -r #{local_pa_latest}/* #{local_pe_latest}")
- Pkg::Util::Net.remote_execute(target_host, "sed -i 's|/#{File.basename(local_pa_latest)}|/#{pe_version}/#{File.basename(local_pa_latest)}|' #{local_pe_latest}/repo_configs/*/*")
- Pkg::Util::Net.remote_execute(target_host, "ln -sf '#{local_pa}' '#{local_pe}'")
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "mkdir -p '#{pe_target}'")
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "mkdir -p '#{local_pe_latest}'")
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "cp -r #{local_pa_latest}/* #{local_pe_latest}")
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "sed -i 's|/#{File.basename(local_pa_latest)}|/#{pe_version}/#{File.basename(local_pa_latest)}|' #{local_pe_latest}/repo_configs/*/*")
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "ln -sf '#{local_pa}' '#{local_pe}'")
end
task :nightly_repos => ["pl:fetch"] do
Pkg::Util::RakeUtils.invoke_task("pl:jenkins:generate_signed_repos", 'nightly')
end
@@ -277,10 +326,10 @@
latest_filepath = File.join(tempdir, "pkg")
FileUtils.mkdir_p(latest_filepath)
latest_filename = File.join(latest_filepath, "LATEST")
File.open(latest_filename, 'w') { |file| file.write(version) }
- Pkg::Util::Net.s3sync_to(latest_filename, target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
+ Pkg::Util::Net.s3sync_to(latest_filepath, target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
FileUtils.rm_rf latest_filepath
end
end
end