-
1
require 'pathname'
-
1
require 'logger'
-
-
1
module Buildbox
-
1
autoload :API, "buildbox/api"
-
1
autoload :Artifact, "buildbox/artifact"
-
1
autoload :Build, "buildbox/build"
-
1
autoload :Command, "buildbox/command"
-
1
autoload :Canceler, "buildbox/canceler"
-
1
autoload :CLI, "buildbox/cli"
-
1
autoload :Configuration, "buildbox/configuration"
-
1
autoload :Monitor, "buildbox/monitor"
-
1
autoload :Platform, "buildbox/platform"
-
1
autoload :Runner, "buildbox/runner"
-
1
autoload :Script, "buildbox/script"
-
1
autoload :Server, "buildbox/server"
-
1
autoload :UTF8, "buildbox/utf8"
-
1
autoload :Agent, "buildbox/agent"
-
1
autoload :Uploader, "buildbox/uploader"
-
1
autoload :VERSION, "buildbox/version"
-
-
1
def self.config
-
@config ||= Configuration.new.tap(&:reload)
-
end
-
-
1
def self.logger
-
27
@logger ||= Logger.new(STDOUT).tap { |logger| logger.level = Logger::INFO }
-
end
-
-
1
def self.logger=(logger)
-
29
@logger = logger
-
end
-
-
1
def self.gem_path
-
path = File.expand_path(File.join(__FILE__, "..", ".."))
-
-
Pathname.new(path)
-
end
-
-
1
def self.home_path
-
path = Pathname.new File.join(Dir.home, ".buildbox")
-
path.mkpath unless path.exist?
-
-
Pathname.new(path)
-
end
-
end
-
1
require 'celluloid'
-
-
1
module Buildbox
-
1
class Agent
-
1
include Celluloid
-
1
include Celluloid::Logger
-
-
1
def initialize(access_token, api = Buildbox::API.new)
-
@api = api
-
@access_token = access_token
-
@uploader_pool = Uploader.pool(size: 10) # upload 10 things at a time
-
end
-
-
1
def process
-
return if @current_build
-
-
if @current_build = next_build
-
@api.update_build(@access_token, @current_build, :agent_accepted => @access_token)
-
-
montior = Monitor.new(@current_build, @access_token, @api).async.monitor
-
runner = Runner.start(@current_build)
-
-
@current_build.artifact_paths.each do |path|
-
upload_artifacts_from(runner.build_directory, path)
-
end
-
end
-
-
@current_build = nil
-
end
-
-
1
private
-
-
1
def next_build
-
@api.agent(@access_token, :hostname => hostname, :version => Buildbox::VERSION)
-
@api.next_build(@access_token)
-
rescue Buildbox::API::AgentNotFoundError
-
warn "Agent `#{@access_token}` does not exist"
-
nil
-
end
-
-
1
def hostname
-
`hostname`.chomp
-
end
-
-
1
def upload_artifacts_from(build_directory, artifact_path)
-
files = Artifact.files_to_upload(build_directory, artifact_path)
-
-
files.each_pair do |relative_path, absolute_path|
-
artifact = @api.create_artifact(@access_token, @current_build,
-
path: relative_path,
-
file_size: File.size(absolute_path))
-
-
@uploader_pool.upload(artifact[:uploader], absolute_path) do |state, response|
-
@api.update_artifact(@access_token, @current_build, artifact[:id], state: state)
-
end
-
end
-
rescue => e
-
error "There was an error uploading artifacts for path: #{artifact_path} (#{e.class.name}: #{e.message})"
-
e.backtrace[0..3].each { |line| error(line) }
-
end
-
end
-
end
-
1
require 'faraday'
-
1
require 'faraday_middleware'
-
1
require 'hashie/mash'
-
1
require 'delegate'
-
-
1
module Buildbox
-
1
class API
-
# Faraday uses debug to show response information, but when the agent is in
-
# DEBUG mode, it's kinda useless noise. So we use a ProxyLogger to only push
-
# the information we care about to the logger.
-
1
class ProxyLogger
-
1
def initialize(logger)
-
@logger = logger
-
end
-
-
1
def info(*args)
-
@logger.debug(*args)
-
end
-
-
1
def debug(*args)
-
# no-op
-
end
-
end
-
-
1
class AgentNotFoundError < Faraday::Error::ClientError; end
-
1
class ServerError < Faraday::Error::ClientError; end
-
-
1
def initialize(config = Buildbox.config, logger = Buildbox.logger)
-
@config = config
-
@logger = logger
-
end
-
-
1
def agent(access_token, options)
-
put(access_token, options)
-
rescue Faraday::Error::ClientError => e
-
if e.response && e.response[:status] == 404
-
raise AgentNotFoundError.new(e, e.response)
-
else
-
raise ServerError.new(e, e.response)
-
end
-
end
-
-
1
def next_build(access_token)
-
response = get("#{access_token}/builds/queue/next")
-
-
if build = response.build
-
Buildbox::Build.new(build)
-
else
-
nil
-
end
-
end
-
-
1
def update_build(access_token, build, options)
-
put("#{access_token}/builds/#{build.id}", options)
-
end
-
-
1
def create_artifact(access_token, build, options)
-
post("#{access_token}/builds/#{build.id}/artifacts", options)
-
end
-
-
1
def update_artifact(access_token, build, artifact_id, options)
-
put("#{access_token}/builds/#{build.id}/artifacts/#{artifact_id}", options)
-
end
-
-
1
private
-
-
1
def connection
-
@connection ||= Faraday.new(:url => @config.api_endpoint,
-
:ssl => { :ca_file => Buildbox.gem_path.join("lib", "certs", "cacert.pem").to_s }) do |faraday|
-
faraday.request :retry
-
faraday.request :json
-
-
faraday.response :logger, ProxyLogger.new(@logger)
-
faraday.response :mashify
-
-
# JSON needs to come after mashify as it needs to run before the mashify
-
# middleware.
-
faraday.response :json
-
faraday.response :raise_error
-
-
faraday.adapter Faraday.default_adapter
-
-
# Set some sensible defaults on the adapter.
-
faraday.options[:timeout] = 60
-
faraday.options[:open_timeout] = 60
-
end
-
end
-
-
1
def post(path, body = {})
-
connection.post(path) do |request|
-
request.body = body
-
request.headers['Content-Type'] = 'application/json'
-
end.body
-
end
-
-
1
def put(path, body = {})
-
connection.put(path) do |request|
-
request.body = body
-
end.body
-
end
-
-
1
def get(path)
-
connection.get(path).body
-
end
-
end
-
end
-
1
require 'fileutils'
-
1
require 'tempfile'
-
-
1
module Buildbox
-
1
class Artifact
-
1
include Celluloid::Logger
-
-
1
def self.files_to_upload(build_directory, glob)
-
3
new(build_directory, glob).files_to_upload
-
end
-
-
1
def initialize(build_directory, glob)
-
3
@build_directory = build_directory
-
3
@glob = glob
-
end
-
-
1
def files_to_upload
-
3
tmpdir = Dir.mktmpdir
-
3
path_to_absolute = {}
-
-
3
copy_files_to_upload(tmpdir).each do |file|
-
10
path_to_absolute[relativize_to_dir(file, tmpdir)] = file
-
end
-
-
3
path_to_absolute
-
end
-
-
1
private
-
-
1
def copy_files_to_upload(dir)
-
3
expanded_directory = File.expand_path(@build_directory)
-
3
absolute_glob = File.expand_path(@glob, expanded_directory)
-
-
3
target_files = Dir.glob(absolute_glob)
-
-
3
target_files.each do |file|
-
10
relative_path = relativize_to_dir(file, expanded_directory)
-
10
copy_to = File.join(dir, relative_path)
-
-
10
FileUtils.mkdir_p(File.dirname(copy_to))
-
10
FileUtils.cp(file, copy_to)
-
end
-
-
# Grab all the files we're going to upload.
-
17
Dir.glob(File.join(dir, "**", "*")).reject { |file| File.directory?(file) }
-
end
-
-
# /foo/build-directory/something.txt => /something.txt
-
# /var/random/something.txt => /var/random/something.txt
-
1
def relativize_to_dir(path, directory)
-
20
if path.to_s.index(directory.to_s) == 0
-
20
parts = path.to_s.split(directory.to_s)
-
20
parts.shift
-
20
parts.join(directory.to_s)
-
else
-
path
-
end
-
end
-
end
-
end
-
1
require 'hashie/mash'
-
-
1
module Buildbox
-
1
class Build < Hashie::Mash
-
1
def success?
-
11
exit_status == 0
-
end
-
-
1
def cancelling?
-
cancel_started == true
-
end
-
-
1
def started?
-
!started_at.nil?
-
end
-
-
1
def finished?
-
!finished_at.nil?
-
end
-
end
-
end
-
1
require 'optparse'
-
-
1
module Buildbox
-
1
class CLI
-
1
attr_reader :argv
-
-
1
def initialize(argv)
-
@argv = argv
-
@commands = {}
-
@options = {}
-
-
@commands['agent:setup'] = OptionParser.new do |opts|
-
opts.banner = "Usage: buildbox agent:setup [token]"
-
-
opts.on("--help", "You're looking at it.") do
-
puts @commands['agent:setup']
-
exit
-
end
-
end
-
-
@commands['agent:start'] = OptionParser.new do |opts|
-
opts.banner = "Usage: buildbox agent:start"
-
-
opts.on("--help", "You're looking at it.") do
-
puts @commands['agent:start']
-
exit
-
end
-
end
-
-
@commands['version'] = OptionParser.new do |opts|
-
opts.banner = "Usage: buildbox version"
-
end
-
end
-
-
1
def parse
-
global.order!
-
-
command = @argv.shift
-
-
if command
-
if @commands.has_key?(command)
-
@commands[command].parse!
-
else
-
puts "`#{command}` is an unknown command"
-
exit 1
-
end
-
-
if command == "version"
-
puts Buildbox::VERSION
-
exit
-
end
-
-
if command == "agent:start"
-
Buildbox::Server.new.start
-
elsif command == "agent:setup"
-
if @argv.length == 0
-
puts "No token provided"
-
exit 1
-
end
-
-
access_token = @argv.first
-
agent_access_tokens = Buildbox.config.agent_access_tokens
-
Buildbox.config.update(:agent_access_tokens => agent_access_tokens << access_token)
-
-
puts "Successfully added agent access token"
-
puts "You can now start the agent with: buildbox agent:start."
-
puts "If the agent is already running, you'll have to restart it for the new changes to take effect"
-
end
-
else
-
puts global.help
-
end
-
end
-
-
1
private
-
-
1
def global
-
@global ||= OptionParser.new do |opts|
-
opts.version = Buildbox::VERSION
-
opts.banner = 'Usage: buildbox COMMAND [command-specific-actions]'
-
-
opts.separator help
-
end
-
end
-
-
1
def help
-
<<HELP
-
-
agent:setup [access_token] # set the access token for the agent
-
agent:start # start the buildbox agent
-
version # display version
-
-
HELP
-
end
-
end
-
end
-
1
require 'childprocess'
-
-
# Inspiration from:
-
# https://github.com/mitchellh/vagrant/blob/master/lib/vagrant/util/subprocess.rb
-
-
1
begin
-
1
require 'pty' # PTY isn't available on Windows
-
rescue LoadError
-
end
-
-
1
module Buildbox
-
1
class Command
-
# The chunk size for reading from subprocess IO.
-
1
READ_CHUNK_SIZE = 4096
-
-
# An error which occurs when the process doesn't end within
-
# the given timeout.
-
1
class TimeoutExceeded < StandardError; end
-
-
1
attr_reader :output, :exit_status
-
-
1
def self.run(*args, &block)
-
13
command = new(*args, &block)
-
13
command.start(&block)
-
13
command
-
end
-
-
1
def initialize(*args)
-
27
@options = args.last.is_a?(Hash) ? args.pop : {}
-
27
@arguments = args.dup
-
27
@logger = Buildbox.logger
-
end
-
-
1
def arguments
-
54
[ *@arguments ].compact.map(&:to_s) # all arguments must be a string
-
end
-
-
1
def process
-
755
@process ||= ChildProcess.build(*arguments)
-
end
-
-
1
def start(&block)
-
# Get the timeout, if we have one
-
27
timeout = @options[:timeout]
-
-
# Set the directory for the process
-
27
process.cwd = File.expand_path(@options[:directory] || Dir.pwd)
-
-
# Create the pipes so we can read the output in real time. PTY
-
# isn't avaible on all platforms (heroku) so we just fallback to IO.pipe
-
# if it's not presetnt.
-
27
read_pipe, write_pipe = begin
-
27
PTY.open
-
rescue
-
1
IO.pipe
-
end
-
-
27
process.io.stdout = write_pipe
-
27
process.io.stderr = write_pipe
-
27
process.duplex = true
-
-
# Set the environment on the process
-
27
if @options[:environment]
-
14
@options[:environment].each_pair do |key, value|
-
2
process.environment[key] = value
-
end
-
end
-
-
# Start the process
-
27
process.start
-
-
# Make sure the stdin does not buffer
-
27
process.io.stdin.sync = true
-
-
27
@logger.debug("Process #{arguments} started with PID: #{process.pid}")
-
-
27
if RUBY_PLATFORM != "java"
-
# On Java, we have to close after. See down the method...
-
# Otherwise, we close the writer right here, since we're
-
# not on the writing side.
-
27
write_pipe.close
-
end
-
-
# Record the start time for timeout purposes
-
27
start_time = Time.now.to_i
-
-
# Track the output as it goes
-
27
output = ""
-
-
27
@logger.debug("Selecting on IO")
-
27
while true
-
469
results = IO.select([read_pipe], nil, nil, timeout || 0.1) || []
-
469
readers = results[0]
-
-
# Check if we have exceeded our timeout
-
469
raise TimeoutExceeded if timeout && (Time.now.to_i - start_time) > timeout
-
# Kill the process and wait a bit for it to disappear
-
# Process.kill('KILL', process.pid)
-
# Process.waitpid2(process.pid)
-
-
# Check the readers to see if they're ready
-
469
if readers && !readers.empty?
-
358
readers.each do |r|
-
# Read from the IO object
-
358
data = read_io(r)
-
-
# We don't need to do anything if the data is empty
-
358
next if data.empty?
-
-
338
output << cleaned_data = UTF8.clean(data)
-
338
yield cleaned_data if block_given?
-
end
-
end
-
-
# Break out if the process exited. We have to do this before
-
# attempting to write to stdin otherwise we'll get a broken pipe
-
# error.
-
469
break if process.exited?
-
end
-
-
# Wait for the process to end.
-
27
begin
-
27
remaining = (timeout || 32000) - (Time.now.to_i - start_time)
-
27
remaining = 0 if remaining < 0
-
27
@logger.debug("Waiting for process to exit. Remaining to timeout: #{remaining}")
-
-
27
process.poll_for_exit(remaining)
-
rescue ChildProcess::TimeoutError
-
raise TimeoutExceeded
-
end
-
-
27
@logger.debug("Exit status: #{process.exit_code}")
-
-
# Read the final output data, since it is possible we missed a small
-
# amount of text between the time we last read data and when the
-
# process exited.
-
-
# Read the extra data
-
27
extra_data = read_io(read_pipe)
-
-
# If there's some that we missed
-
27
if extra_data != ""
-
output << cleaned_data = UTF8.clean(extra_data)
-
yield cleaned_data if block_given?
-
end
-
-
27
if RUBY_PLATFORM == "java"
-
# On JRuby, we need to close the writers after the process,
-
# for some reason. See https://github.com/mitchellh/vagrant/pull/711
-
write_pipe.close
-
end
-
-
27
@output = output.chomp
-
27
@exit_status = process.exit_code
-
end
-
-
1
private
-
-
# Reads data from an IO object while it can, returning the data it reads.
-
# When it encounters a case when it can't read anymore, it returns the
-
# data.
-
#
-
# @return [String]
-
1
def read_io(io)
-
385
data = ""
-
-
385
while true
-
736
begin
-
736
if Platform.windows?
-
# Windows doesn't support non-blocking reads on
-
# file descriptors or pipes so we have to get
-
# a bit more creative.
-
-
# Check if data is actually ready on this IO device.
-
# We have to do this since `readpartial` will actually block
-
# until data is available, which can cause blocking forever
-
# in some cases.
-
results = IO.select([io], nil, nil, 0.1)
-
break if !results || results[0].empty?
-
-
# Read!
-
data << io.readpartial(READ_CHUNK_SIZE)
-
else
-
# Do a simple non-blocking read on the IO object
-
736
data << io.read_nonblock(READ_CHUNK_SIZE)
-
end
-
rescue Exception => e
-
# The catch-all rescue here is to support multiple Ruby versions,
-
# since we use some Ruby 1.9 specific exceptions.
-
-
385
breakable = false
-
-
# EOFError from OSX, EIO is raised by ubuntu
-
385
if e.is_a?(EOFError) || e.is_a?(Errno::EIO)
-
# An `EOFError` means this IO object is done!
-
49
breakable = true
-
elsif defined?(IO::WaitReadable) && e.is_a?(IO::WaitReadable)
-
# IO::WaitReadable is only available on Ruby 1.9+
-
-
# An IO::WaitReadable means there may be more IO but this
-
# IO object is not ready to be read from yet. No problem,
-
# we read as much as we can, so we break.
-
336
breakable = true
-
elsif e.is_a?(Errno::EAGAIN) || e.is_a?(Errno::EWOULDBLOCK)
-
# Otherwise, we just look for the EAGAIN error which should be
-
# all that IO::WaitReadable does in Ruby 1.9.
-
breakable = true
-
end
-
-
# Break out if we're supposed to. Otherwise re-raise the error
-
# because it is a real problem.
-
385
break if breakable
-
raise
-
end
-
end
-
-
385
data
-
end
-
end
-
end
-
1
require 'hashie/mash'
-
1
require 'json'
-
-
1
module Buildbox
-
1
class Configuration < Hashie::Mash
-
1
def agent_access_tokens
-
env_agents = ENV['BUILDBOX_AGENTS']
-
-
if env_agents.nil?
-
self[:agent_access_tokens] || []
-
else
-
env_agents.to_s.split(",")
-
end
-
end
-
-
1
def api_endpoint
-
endpoint = ENV['BUILDBOX_API_ENDPOINT'] || self[:api_endpoint] || "https://agent.buildbox.io/v1"
-
-
# hack to update legacy endpoints
-
if endpoint == "https://api.buildbox.io/v1"
-
self.api_endpoint = "https://agent.buildbox.io/v1"
-
save
-
api_endpoint
-
else
-
endpoint
-
end
-
end
-
-
1
def update(attributes)
-
attributes.each_pair { |key, value| self[key] = value }
-
save
-
end
-
-
1
def save
-
File.open(path, 'w+') { |file| file.write(pretty_json) }
-
end
-
-
1
def reload
-
if path.exist?
-
read_and_load
-
else
-
save && read_and_load
-
end
-
end
-
-
1
private
-
-
1
def pretty_json
-
JSON.pretty_generate(self)
-
end
-
-
1
def read_and_load
-
merge! JSON.parse(path.read)
-
end
-
-
1
def path
-
Buildbox.home_path.join("configuration.json")
-
end
-
end
-
end
-
1
require 'celluloid'
-
-
1
module Buildbox
-
1
class Monitor
-
1
include Celluloid
-
-
1
def initialize(build, access_token, api)
-
@build = build
-
@access_token = access_token
-
@api = api
-
end
-
-
1
def monitor
-
loop do
-
if @build.started?
-
# As the build can finish in between doing the update_build api_call
-
# and checking to see if the build has finished, we make sure we use the
-
# same finished_at timestamp throughout the entire method.
-
finished_at = @build.finished_at
-
-
updated_build = @api.update_build(@access_token, @build, :started_at => @build.started_at,
-
:finished_at => finished_at,
-
:output => @build.output,
-
:exit_status => @build.exit_status)
-
-
if updated_build.state == 'canceled' && !@build.cancelling?
-
Buildbox::Canceler.new(@build).async.cancel
-
end
-
-
break if finished_at
-
end
-
-
sleep 1
-
end
-
end
-
end
-
end
-
1
module Buildbox
-
1
class Platform
-
1
class << self
-
1
[:cygwin, :darwin, :bsd, :freebsd, :linux, :solaris].each do |type|
-
6
define_method("#{type}?") do
-
platform.include?(type.to_s)
-
end
-
end
-
-
1
def windows?
-
736
%W[mingw mswin].each do |text|
-
1472
return true if platform.include?(text)
-
end
-
-
736
false
-
end
-
-
1
def platform
-
1472
RbConfig::CONFIG["host_os"].downcase
-
end
-
end
-
end
-
end
-
1
require 'celluloid'
-
1
require 'fileutils'
-
1
require 'childprocess'
-
-
1
module Buildbox
-
1
class Runner
-
1
include Celluloid
-
1
include Celluloid::Logger
-
-
1
def self.start(build)
-
runner = new(build)
-
runner.start
-
runner
-
end
-
-
1
def initialize(build)
-
13
@build = build
-
end
-
-
1
def build_directory
-
28
@build_directory ||= Buildbox.home_path.join(@build.namespace)
-
end
-
-
1
def start
-
14
info "Starting to build #{@build.namespace}/#{@build.id} starting..."
-
-
14
FileUtils.mkdir_p(build_directory)
-
28
File.open(script_path, 'w+') { |file| file.write(@build.script) }
-
14
File.chmod(0777, script_path)
-
-
14
command = Command.new(script_path, :environment => @build.env, :directory => build_directory)
-
-
14
@build.output = ""
-
14
@build.process = command.process
-
14
@build.started_at = Time.now.utc
-
-
339
command.start { |chunk| @build.output << chunk }
-
-
14
@build.output = command.output
-
14
@build.exit_status = command.exit_status
-
-
14
File.delete(script_path)
-
-
14
@build.finished_at = Time.now.utc
-
-
14
info "#{@build.namespace} ##{@build.id} finished with exit status #{command.exit_status}"
-
end
-
-
1
private
-
-
1
def script_path
-
56
@script_path ||= Buildbox.home_path.join(script_name)
-
end
-
-
1
def script_name
-
13
name = "#{@build.namespace.gsub(/\//, '-')}-#{@build.id}"
-
13
name << ".bat" if ChildProcess.platform == :windows
-
13
name
-
end
-
end
-
end
-
1
require 'celluloid'
-
-
1
module Buildbox
-
1
class Server
-
1
INTERVAL = 5
-
-
1
def initialize(config = Buildbox.config, logger = Buildbox.logger)
-
@config = config
-
@logger = logger
-
@supervisors = []
-
end
-
-
1
def start
-
Celluloid.logger = @logger
-
-
agent_access_tokens.each do |access_token|
-
@supervisors << Buildbox::Agent.supervise(access_token)
-
-
@logger.info "Agent with access token `#{access_token}` has started."
-
end
-
-
loop do
-
@supervisors.each do |supervisor|
-
supervisor.actors.first.async.process
-
end
-
-
wait INTERVAL
-
end
-
end
-
-
1
private
-
-
1
def wait(interval)
-
@logger.debug "Sleeping for #{interval} seconds"
-
-
sleep interval
-
end
-
-
1
def agent_access_tokens
-
@config.agent_access_tokens
-
end
-
end
-
end
-
1
module Buildbox
-
1
module UTF8
-
# Replace or delete invalid UTF-8 characters from text, which is assumed
-
# to be in UTF-8.
-
#
-
# The text is expected to come from external to Integrity sources such as
-
# commit messages or build output.
-
#
-
# On ruby 1.9, invalid UTF-8 characters are replaced with question marks.
-
# On ruby 1.8, if iconv extension is present, invalid UTF-8 characters
-
# are removed.
-
# On ruby 1.8, if iconv extension is not present, the string is unmodified.
-
1
def self.clean(text)
-
# http://po-ru.com/diary/fixing-invalid-utf-8-in-ruby-revisited/
-
# http://stackoverflow.com/questions/9126782/how-to-change-deprecated-iconv-to-stringencode-for-invalid-utf8-correction
-
338
if text.respond_to?(:encoding)
-
# ruby 1.9
-
338
text = text.force_encoding('utf-8').encode(intermediate_encoding, :invalid => :replace, :replace => '?').encode('utf-8')
-
else
-
# ruby 1.8
-
# As no encoding checks are done, any string will be accepted.
-
# But delete invalid utf-8 characters anyway for consistency with 1.9.
-
iconv, iconv_fallback = clean_utf8_iconv
-
if iconv
-
begin
-
output = iconv.iconv(text)
-
rescue Iconv::IllegalSequence
-
output = iconv_fallback.iconv(text)
-
end
-
end
-
end
-
338
text
-
end
-
-
# Apparently utf-16 is not available everywhere, in particular not on travis.
-
# Try to find a usable encoding.
-
1
def self.intermediate_encoding
-
338
map = {}
-
338
Encoding.list.each do |encoding|
-
33800
map[encoding.name.downcase] = true
-
end
-
338
%w(utf-16 utf-16be utf-16le utf-7 utf-32 utf-32le utf-32be).each do |candidate|
-
338
if map[candidate]
-
338
return candidate
-
end
-
end
-
raise CannotFindEncoding, 'Cannot find an intermediate encoding for conversion to UTF-8'
-
end
-
-
1
def self.clean_utf8_iconv
-
unless @iconv_loaded
-
begin
-
require 'iconv'
-
rescue LoadError
-
@iconv = nil
-
else
-
@iconv = Iconv.new('utf-8//translit//ignore', 'utf-8')
-
# On some systems (Linux appears to be vulnerable, FreeBSD not)
-
# iconv chokes on invalid utf-8 with //translit//ignore.
-
@iconv_fallback = Iconv.new('utf-8//ignore', 'utf-8')
-
end
-
@iconv_loaded = true
-
end
-
[@iconv, @iconv_fallback]
-
end
-
end
-
end