Sha256: 117a23531b1c3be4edefad5f9379bd76f70ff65249a681c8f3912d860c6045dd
Contents?: true
Size: 1.68 KB
Versions: 5
Compression:
Stored size: 1.68 KB
Contents
#!/usr/bin/env ruby # frozen_string_literal: true $LOAD_PATH.unshift File.expand_path('../lib', __dir__) require 'broken_link_finder' require 'thor' class BrokenLinkFinderCLI < Thor desc 'crawl [URL]', 'Find broken links at the URL' option :recursive, type: :boolean, aliases: [:r], default: false, desc: 'Crawl the entire site.' option :threads, type: :numeric, aliases: [:t], default: BrokenLinkFinder::DEFAULT_MAX_THREADS, desc: 'Max number of threads to use when crawling recursively; 1 thread per web page.' option :sort_by_link, type: :boolean, aliases: [:l], default: false, desc: 'Makes report more concise if there are more pages crawled than broken links found. Use with -r on medium/large sites.' option :verbose, type: :boolean, aliases: [:v], default: false, desc: 'Display all ignored links.' option :concise, type: :boolean, aliases: [:c], default: false, desc: 'Display only a summary of broken links.' def crawl(url) url = "http://#{url}" unless url.start_with?('http') sort_by = options[:sort_by_link] ? :link : :page max_threads = options[:threads] broken_verbose = !options[:concise] ignored_verbose = options[:verbose] finder = BrokenLinkFinder::Finder.new(sort: sort_by, max_threads: max_threads) options[:recursive] ? finder.crawl_site(url) : finder.crawl_page(url) finder.pretty_print_link_report( broken_verbose: broken_verbose, ignored_verbose: ignored_verbose ) rescue Exception => e puts "An error has occurred: #{e.message}" end desc 'version', 'Display the currently installed version' def version puts "broken_link_finder v#{BrokenLinkFinder::VERSION}" end end BrokenLinkFinderCLI.start(ARGV)
Version data entries
5 entries across 5 versions & 1 rubygems