Sha256: 6fba60e005ee5a43074437f987d4f0fe5f50a1527ad6ab58ea0a439bf75e9356

Contents?: true

Size: 1.67 KB

Versions: 1

Compression:

Stored size: 1.67 KB

Contents

#!/usr/bin/env ruby

$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require 'broken_link_finder'
require 'thor'

class BrokenLinkFinderCLI < Thor
  desc 'crawl [URL]', 'Find broken links at the URL'
  option :recursive, type: :boolean, aliases: [:r], default: false, desc: 'Crawl the entire site.'
  option :threads, type: :numeric, aliases: [:t], default: BrokenLinkFinder::DEFAULT_MAX_THREADS, desc: 'Max number of threads to use when crawling recursively; 1 thread per web page.'
  option :sort_by_link, type: :boolean, aliases: [:l], default: false, desc: 'Makes report more concise if there are more pages crawled than broken links found. Use with -r on medium/large sites.'
  option :verbose, type: :boolean, aliases: [:v], default: false, desc: 'Display all ignored links.'
  option :concise, type: :boolean, aliases: [:c], default: false, desc: 'Display only a summary of broken links.'
  def crawl(url)
    url = "http://#{url}" unless url.start_with?('http')

    sort_by           = options[:sort_by_link] ? :link : :page
    max_threads       = options[:threads]
    broken_verbose    = !options[:concise]
    ignored_verbose   = options[:verbose]

    finder = BrokenLinkFinder::Finder.new(sort: sort_by, max_threads: max_threads)
    options[:recursive] ? finder.crawl_site(url) : finder.crawl_page(url)
    finder.pretty_print_link_report(
      broken_verbose: broken_verbose,
      ignored_verbose: ignored_verbose
    )
  rescue Exception => ex
    puts "An error has occurred: #{ex.message}"
  end

  desc 'version', 'Display the currently installed version'
  def version
    puts "broken_link_finder v#{BrokenLinkFinder::VERSION}"
  end
end

BrokenLinkFinderCLI.start(ARGV)

Version data entries

1 entries across 1 versions & 1 rubygems

Version Path
broken_link_finder-0.9.0 exe/broken_link_finder