#!/usr/bin/env ruby require 'thor' require_relative '../lib/convection/control/cloud' require 'fileutils' require 'thread' require 'yaml' module Convection ## # Convection CLI ## class CLI < Thor include Thor::Actions DEFAULT_MODULE_PATH = 'root'.freeze S3_URI_REGEX = %r(^s3://(?[^/]+)/(?.+)).freeze def initialize(*args) super @cwd = Dir.getwd @errors = false end desc 'converge STACK', 'Converge your cloud' option :stack_group, :type => :string, :desc => 'The name of a stack group defined in your cloudfile to converge' option :stacks, :type => :array, :desc => 'A ordered space separated list of stacks to converge' option :exclude_stacks, :type => :array, :desc => 'A ordered space separated list of stacks NOT to converge' option :verbose, :type => :boolean, :aliases => '--v', :desc => 'Show stack progress', default: true option :'very-verbose', :type => :boolean, :aliases => '--vv', :desc => 'Show unchanged stacks', default: true option :cloudfiles, :type => :array, :default => %w(Cloudfile) option :delayed_output, :type => :boolean, :desc => 'Delay output until operation completion.', :default => false option :retain, :type => :boolean, :desc => 'Retain stack resources, without deleteing.', :default => false def converge(stack = nil) @outputs = [] operation('converge', stack) print_outputs(@outputs) if @outputs && @outputs.any? exit 1 if @errors end desc 'delete STACK', 'Delete stack(s) from your cloud' option :stack_group, :type => :string, :desc => 'The name of a stack group defined in your cloudfile to delete' option :stacks, :type => :array, :desc => 'A ordered space separated list of stacks to delete' option :cloudfile, :type => :string, :default => 'Cloudfile' option :verbose, :type => :boolean, :aliases => '--v', :desc => 'Show stack progress', default: true option :'very-verbose', :type => :boolean, :aliases => '--vv', :desc => 'Show unchanged stacks', default: true def delete(stack = nil) init_cloud stacks = @cloud.stacks_until(stack, options, &method(:emit_events)) if stacks.empty? say_status(:delete_failed, 'No stacks found matching the provided input (STACK, --stack-group, and/or --stacks).', :red) return end say_status(:delete, "Deleting the following stack(s): #{stacks.map(&:name).join(', ')}", :red) confirmation = ask('Are you sure you want to delete the above stack(s)?', limited_to: %w(yes no)) if confirmation.eql?('yes') @cloud.delete(stacks, &method(:emit_events)) else say_status(:delete_aborted, 'Aborted deletion of the above stack(s).', :green) end end desc 'diff STACK', 'Show changes that will be applied by converge' option :stack_group, :type => :string, :desc => 'The name of a stack group defined in your cloudfile to diff' option :stacks, :type => :array, :desc => 'A ordered space separated list of stacks to diff' option :exclude_stacks, :type => :array, :desc => 'A ordered space separated list of stacks NOT to diff' option :verbose, :type => :boolean, :aliases => '--v', :desc => 'Show stack progress' option :'very-verbose', :type => :boolean, :aliases => '--vv', :desc => 'Show unchanged stacks' option :cloudfiles, :type => :array, :default => %w(Cloudfile) option :delayed_output, :type => :boolean, :desc => 'Delay output until operation completion.', :default => false option :retain, :type => :boolean, :desc => 'Retain stack resources, without deleteing.', :default => false def diff(stack = nil) @outputs = [] operation('diff', stack) print_outputs(@outputs) if @outputs && @outputs.any? exit 1 if @errors end desc 'print_template STACK', 'Print the rendered template for STACK' option :cloudfile, :type => :string, :default => 'Cloudfile' def print_template(stack) init_cloud puts @cloud.stacks[stack].to_json(true) end desc 'describe-tasks [--stacks STACKS]', 'Describe tasks for a given stack' option :cloudfile, :type => :string, :default => 'Cloudfile' option :stacks, :type => :array, :desc => 'A ordered space separated list of stacks to diff', default: [] def describe_tasks init_cloud describe_stack_tasks(options[:stacks]) end desc 'run-tasks [--stack STACK]', 'Run tasks for a given stack' option :cloudfile, :type => :string, :default => 'Cloudfile' option :stack, :desc => 'The stack to run tasks for', :required => true def run_tasks init_cloud run_stack_tasks(options[:stack]) end desc 'validate STACK', 'Validate the rendered template for STACK' option :cloudfile, :type => :string, :default => 'Cloudfile' def validate(stack) init_cloud @cloud.stacks[stack].validate end desc 'describe-resources', 'Describe resources for a stack' option :cloudfile, :type => :string, :default => 'Cloudfile' option :stack, :desc => 'The stack to be described', :required => true option :type, :desc => 'An optional filter on the types of resources to be described', default: '*' option :properties, :type => :array, :desc => 'A space-separated list of properties to include in the output', default: %w(*) option :format, :type => :string, :default => 'json', :enum => %w(json yaml) def describe_resources init_cloud describe_stack_resources(options[:stack], options[:format], options[:properties], options[:type]) end desc 'terraForm-compare-resources', 'Compare resources in CloudFormation and in terraform state' option :stack_name, desc: 'The CloudFormation stack name (Stack#cloud_name)' option :stack_region, desc: 'The region where the stack resides', required: true option :state, desc: 'The source terraform state file (can be an s3:// uri)', required: true option :state_region, desc: 'The region where the state resides (required to load remote state)' def terraform_compare_resources terraform_resources = tfstate['modules'].flat_map { |m| m['resources'].reject { |k, _| k.start_with?('data.') } }.reduce({}, &:merge) cf = Aws::CloudFormation::Client.new(region: options[:stack_region]) cf_resources = cf.describe_stack_resources(stack_name: options[:stack_name]).stack_resources.map do |r| { logical_resource_id: r.logical_resource_id, physical_resource_id: r.physical_resource_id, type: r.resource_type } end cf_resources.each do |cf_resource| response = ask_tf_physical_id(cf_resource) cf_resource[:physical_resource_id] = response unless response.empty? say "Resource #{cf_resource[:physical_resource_id].inspect} #{cf_resource[:logical_resource_id].inspect}:" tf_resource = tf_resource_for(physical_resource_id: cf_resource[:physical_resource_id], resources: terraform_resources) if tf_resource[:attribute_key] && tf_resource[:partial] say " * Found physical ID included in Terraform resource #{tf_resource[:logical_resource_id].inspect} under attribute #{tf_resource[:attribute_key].inspect} with value #{tf_resource[:value].inspect}. Please validate this resource is included in state and configuration.", :yellow elsif tf_resource[:attribute_key] && !tf_resource[:partial] say " * Found physical ID exactly in Terraform resource #{tf_resource[:logical_resource_id].inspect} under attribute #{tf_resource[:attribute_key].inspect} with value #{tf_resource[:value].inspect}.", :green else say " * Unable to find physical ID in the provided Terraform state.", :red end end end # rubocop:disable Style/AsciiComments desc 'terraform-export STACK', 'Create terraform configuration for a given stack (including `terraform import` commands)' option :module_path, desc: 'The module path prefix for terraform', default: DEFAULT_MODULE_PATH option :output_directory, desc: 'The directory to create configuration files under', aliases: %w(-d --dir), default: '.'.freeze option :cloudfile, desc: 'The cloudfile to load', default: 'Cloudfile' option :confirm_root, desc: 'Prompt to confirm module path when --module-path="root"', default: true, type: :boolean option :overwrite, desc: 'Overwrite any conflicting tf.json files in --output-directory', default: false, type: :boolean option :skip_existing, desc: 'Skip any conflicting tf.json files in --output-directory', default: false, type: :boolean def terraform_export(stack_name) if options[:output_directory].empty? say_status :error, '--output-directory must not be empty.', :red exit 1 end if options[:confirm_root] && options[:module_path] == DEFAULT_MODULE_PATH say_status :warning, '--module-path was set to "root".', :yellow exit 0 unless yes?('Are you sure you want to generate terraform import commands in the root namespace?') end init_cloud @stack = @cloud.stacks.fetch(stack_name) template = @stack.template # Beware of 🐲. This patches this instance to include terraform compatible intrinsic functions where possible. # # This should only be done in single threaded environment in the terraform-export command as it breaks usage of cloudformation psuedo-functions. require 'convection/dsl/terraform_intrinsic_functions' Convection::DSL::TerraformIntrinsicFunctions.overload(Convection::DSL::IntrinsicFunctions.mixers) require 'convection/terraform/ext/stack' # Reload all resources and collection to ensure configuration is up to date. Amazon psuedo functions will have changed their return values. template.execute template.resource_collections.each(&method(:import_resources)) template.resources.each(&method(:import_resources)) end desc 'terraform-import-lambda-permissions', 'Import state for a lambda permission resource into terraform management' option :cloudfile, desc: 'The cloudfile to load', default: 'Cloudfile' option :module_path, desc: 'The module path prefix for terraform', default: DEFAULT_MODULE_PATH option :state, desc: 'The source terraform state file', required: true option :state_out, desc: 'The destination state file (default: --state)' def terraform_import_lambda_permissions(stack_name) # Lazy load this since we only want this in the terraform subcommand. require 'active_support/core_ext/string/inflections' state = File.expand_path(options[:state]) state_out = File.expand_path(options[:state_out]) if options[:state_out] unless state_out state_out = state FileUtils.cp(state, "#{state}.#{Time.now.to_i}.backup") end tfstate = JSON.parse(File.read(options[:state])) module_path = options[:module_path].split('.') # If the user typed "module" as the prefix use "root" since that is what is written to state # (not the module.NAME target format used for plans/applies). module_path[0] = 'root' if module_path[0] == 'module' module_state = tfstate.fetch('modules').find { |mod| mod['path'] == module_path } unless module_state warn "No module found with path #{module_path} defined in #{options[:state]}." exit 1 end tf_resources = module_state.fetch('resources') init_cloud stack = @cloud.stacks.fetch(stack_name) stack.template.all_resources.each do |name, resource| # Skip over this convection Resource unless it maps to a function permission. next unless resource.type == 'AWS::Lambda::Permission' # Get the function and permission resource summaries. permission_resource_summary = stack.resources[name] permission_resource = stack.template.resources[name] lambda_resource_logical_id = lambda_resource_logical_id(permission_resource: permission_resource) function_resource_summary = lambda_resource_logical_id && stack.resources[lambda_resource_logical_id] unless permission_resource_summary && function_resource_summary warn 'ERROR: Either the lambda permission or function you were trying to import have not been created in cloudformation. These resources are expected to exist in the same template.' exit 1 end # Skip creation of state if state for this permission already exists. if tf_resources["aws_lambda_permission.#{permission_resource_summary.logical_resource_id.underscore}"] warn "WARNING: Skipping resource that was found in state: aws_lambda_permission.#{permission_resource_summary.logical_resource_id.underscore}" next end function_name = resource.function_name.is_a?(Hash) ? function_resource_summary.physical_resource_id : resource.function_name source_logical_name = resource.source_arn.is_a?(Hash) ? resource.source_arn.fetch('Fn::GetAtt', []).first : resource.source_arn source_resource_summary = stack.resources[source_logical_name] && stack.resources[source_logical_name].physical_resource_id source_arn = resource.source_arn.is_a?(Hash) ? source_resource_summary : resource.source_arn unless source_arn warn "Unable to determine source arn from #{resource.source_arn}" end tf_resources["aws_lambda_permission.#{permission_resource_summary.logical_resource_id.underscore}"] = { type: 'aws_lambda_permission', depends_on: [], primary: { id: permission_resource_summary.physical_resource_id, attributes: { action: resource.action, function_name: function_name, id: permission_resource_summary.physical_resource_id, principal: resource.principal, qualifer: resource.qualifer, source_arn: source_arn, source_account: resource.source_account, statement_id: permission_resource_summary.physical_resource_id }.reject { |_key, value| value.nil? }, meta: {}, tainted: false }, deposed: [], provider: '' } end create_file state_out, JSON.pretty_generate(tfstate) end # rubocop:enable Style/AsciiComments no_commands do attr_accessor :last_event private def ask_tf_physical_id(cf_resource) question = case cf_resource[:type] when 'AWS::IAM::Policy' "What is the : for the #{cf_resource[:logical_resource_id].inspect} IAM policy?" when 'AWS::S3::BucketPolicy' "What is the name of the bucket that the #{cf_resource[:logical_resource_id].inspect} policy belongs to?" end return '' unless question ask(question, :yellow).chomp end def load_local_terraform_state(statefile) JSON.load(File.open(statefile)) end def load_remote_terraform_state(bucket, key) unless options[:state_region] && !options[:state_region].empty? say 'ERROR: --state-region must be specified to load remote state.', :red exit 1 end s3 = Aws::S3::Client.new(region: options[:state_region]) object = s3.get_object(bucket: bucket, key: key) JSON.load(object.body) end def tfstate return @tfstate if @tfstate match_data = options[:state].match(S3_URI_REGEX) if match_data @tfstate = load_remote_terraform_state(match_data['bucket'], match_data['key']) else @tfstate = load_local_terraform_state(options[:state]) end end def tf_resource_for(physical_resource_id:, resources:) logical_id, resource = resources.find do |_, resource| next true if resource['primary'].key(physical_resource_id) resource['primary'].values.any? { |v| v.is_a?(String) && v.include?(physical_resource_id) } end return {} unless logical_id && resource key = resource['primary'].key(physical_resource_id) return { attribute_key: key, logical_id: logical_id, partial: false, value: physical_resource_id } if key key, value = value['primary'].find { |v| v.include?(physical_resource_id) } return { attribute_key: key, logical_id: logical_id, partial: true, value: value } end def import_resources(resource_name, resource) empty_directory options[:output_directory] if resource.respond_to?(:to_hcl_json) || resource.respond_to?(:additional_hcl_files) if resource.respond_to?(:to_hcl_json) destination = File.join(options[:output_directory], "#{resource.name.underscore}.tf.json") create_file destination, resource.to_hcl_json(module_path: options[:module_path]), force: options[:overwrite], skip: options[:skip_existing] elsif resource.respond_to?(:additional_hcl_files) say "# Skipping to HCL generation with #to_hcl_json. Will generate HCL using #additional_hcl_files instead.", :cyan else say "# Unable to generate terraform configuration for #{resource.class}. Define #{resource.class}#to_hcl_json to do so.", :yellow end if resource.respond_to?(:additional_hcl_files) resource.additional_hcl_files(module_path: options[:module_path]).each do |file, content| destination = File.join(options[:output_directory], file.to_s) empty_directory File.dirname(destination) if file.to_s.include?('/') create_file destination, content.to_json, force: options[:overwrite], skip: options[:skip_existing] end end if resource.respond_to?(:terraform_import_commands) destination = File.join(options[:output_directory], "terraform-import-#{resource_name.underscore}-#{@stack._original_region}-#{@stack._original_cloud}.sh") lines = resource.terraform_import_commands(module_path: options[:module_path]) lines.each do |line| comment = line.start_with?('#') if options[:omit_comments] next if comment || line.strip.empty? end color = comment ? :bold : :cyan say line, color end create_file destination, lines.join("\n") else say "# Unable to determine terraform import commands for #{resource.class}. Define #{resource.class}#terraform_import_commands to do so.", :yellow end puts # Print an additional new line end def lambda_resource_logical_id(permission_resource:) return nil unless permission_resource if permission_resource.function_name.is_a?(Hash) return permission_resource.function_name['Ref'] if permission_resource.function_name.key?('Ref') end permission_resource.name.sub(/Permission.*$/, 'Lambda') end def operation(task_name, stack) work_q = Queue.new semaphore = Mutex.new unless options[:delayed_output] puts 'For easier reading when using multiple cloudfiles output can be delayed until task completion.' puts 'If you would like delayed output please use the "--delayed_output true" option.' end options[:cloudfiles].each { |cloudfile| work_q.push(cloud: Control::Cloud.new, cloudfile_path: cloudfile) } workers = (0...options[:cloudfiles].length).map do Thread.new do until work_q.empty? output = [] cloud_array = work_q.pop(true) cloud_array[:cloud].configure(File.absolute_path(cloud_array[:cloudfile_path], @cwd)) cloud = cloud_array[:cloud] region = cloud.cloudfile.region operation_kwargs = { stack_group: options[:stack_group], stacks: options[:stacks], exclude_stacks: options[:exclude_stacks], retain: options[:retain] } cloud.send(task_name, stack, operation_kwargs) do |event, errors| if options[:cloudfiles].length > 1 && options[:delayed_output] output << { event: event, errors: errors } else emit_events(event, errors, region: region) end semaphore.synchronize { @errors = errors.any? if errors } end if options[:cloudfiles].length > 1 && options[:delayed_output] semaphore.synchronize { @outputs << { cloud_name: cloud.cloudfile.name, region: region, logging: output } } end end end end workers.each(&:join) end def describe_stack_tasks(stacks_to_include) @cloud.stacks.map do |stack_name, stack| next if stacks_to_include.any? && !stacks_to_include.include?(stack_name) tasks = stack.tasks.values.flatten.uniq next if tasks.empty? puts "Stack #{stack_name} (#{stack.cloud_name}) includes the following tasks:" tasks.each_with_index do |task, index| puts " #{index}. #{task}" end end end def run_stack_tasks(stack_name) stack = @cloud.stacks[stack_name] tasks = stack.tasks.values.flatten.uniq if !stack say_status(:task_failed, 'No stacks found matching the provided input (--stack).', :red) exit 1 elsif tasks.empty? say_status(:task_failed, "No tasks defined for the stack #{stack_name}. Define them in your Cloudfile.", :red) exit 1 end puts "The following tasks are available to execute for the stack #{stack_name} (#{stack.cloud_name}):" tasks.each_with_index do |task, index| puts " #{index}. #{task}" end choices = 0.upto(tasks.length - 1).map(&:to_s) choice = ask('Which stack task would you like to execute? (ctrl-c to exit)', limited_to: choices) task = tasks[choice.to_i] say_status(:task_in_progress, "Task #{task} in progress for stack #{stack_name}.", :yellow) task.call(stack) if task.success? say_status(:task_complete, "Task #{task} successfully completed for stack #{stack_name}.", :green) else say_status(:task_failed, "Task #{task} failed to complete for stack #{stack_name}.", :red) exit 1 end end def describe_stack_resources(stack, format, properties_to_include, type) stack_template = @cloud.stacks[stack].current_template raise "No template defined for [#{stack}] stack" if stack_template.nil? stack_resources = stack_template['Resources'] stack_resources.select! { |_name, attrs| attrs['Type'] == type } unless type == '*' described_resources = {} stack_resources.each do |name, attrs| # Only include the resource type if we asked for all resource types attrs.reject! { |attr| attr == 'Type' } unless type == '*' # Only include those properties that were explicitly requested unless properties_to_include.size == 1 && properties_to_include[0] == '*' attrs['Properties'].select! { |prop| properties_to_include.include? prop } end described_resources[name] = attrs end case format when 'json' puts JSON.pretty_generate(described_resources) when 'yaml' puts described_resources.to_yaml end end def emit_events(event, *errors, region: nil) if event.is_a? Model::Event if options[:'very-verbose'] || event.name == :error print_info(event, region: region) elsif options[:verbose] print_info(event, region: region) if event.name == :compare end @last_event = event elsif event.is_a? Model::Diff if !options[:'very-verbose'] && !options[:verbose] print_info(last_event, region: region) unless last_event.nil? @last_event = nil end print_info(event, region: region) else print_info(event, region: region) end Array(errors).each do |error| error = RuntimeError.new(error) if error.is_a?(String) say "* #{ error.message }" error.backtrace.each { |trace| say " #{ trace }" } end end def print_info(say, region: nil) print "#{region} " if region say_status(*say.to_thor) end def print_outputs(outputs) outputs.each do |output| puts '********' puts "Cloud name: #{output[:cloud_name]}. Region: #{output[:region]}." puts '********' output[:logging].each do |hash| emit_events(hash[:event], Array(hash[:errors]), region: output[:region]) end end end def init_cloud if options['cloudfile'].nil? warn 'ERROR: The you must specify the --cloudfile option.' exit 1 end @cloud = Control::Cloud.new @cloud.configure(File.absolute_path(options['cloudfile'], @cwd)) end end end end Convection::CLI.start(ARGV)