lib/docurium.rb in docurium-0.6.0 vs lib/docurium.rb in docurium-0.7.0
- old
+ new
@@ -2,30 +2,34 @@
require 'tempfile'
require 'version_sorter'
require 'rocco'
require 'docurium/version'
require 'docurium/layout'
+require 'docurium/debug'
require 'libdetect'
require 'docurium/docparser'
require 'pp'
require 'rugged'
require 'redcarpet'
require 'redcarpet/compat'
+require 'parallel'
require 'thread'
# Markdown expects the old redcarpet compat API, so let's tell it what
# to use
Rocco::Markdown = RedcarpetCompat
class Docurium
- attr_accessor :branch, :output_dir, :data
+ attr_accessor :branch, :output_dir, :data, :head_data
- def initialize(config_file, repo = nil)
+ def initialize(config_file, cli_options = {}, repo = nil)
raise "You need to specify a config file" if !config_file
raise "You need to specify a valid config file" if !valid_config(config_file)
@sigs = {}
+ @head_data = nil
@repo = repo || Rugged::Repository.discover(config_file)
+ @cli_options = cli_options
end
def init_data(version = 'HEAD')
data = {:files => [], :functions => {}, :callbacks => {}, :globals => {}, :types => {}, :prefix => ''}
data[:prefix] = option_version(version, 'input', '')
@@ -113,97 +117,82 @@
end
def generate_doc_for(version)
index = Rugged::Index.new
read_subtree(index, version, option_version(version, 'input', ''))
+
data = parse_headers(index, version)
- data
+ examples = format_examples!(data, version)
+ [data, examples]
end
- def generate_docs(options)
+ def process_project(versions)
+ nversions = versions.count
+ Parallel.each_with_index(versions, finish: -> (version, index, result) do
+ data, examples = result
+ # There's still some work we need to do serially
+ tally_sigs!(version, data)
+ force_utf8(data)
+
+ puts "Adding documentation for #{version} [#{index}/#{nversions}]"
+
+ # Store it so we can show it at the end
+ @head_data = data if version == 'HEAD'
+
+ yield index, version, result if block_given?
+
+ end) do |version, index|
+ puts "Generating documentation for #{version} [#{index}/#{nversions}]"
+ generate_doc_for(version)
+ end
+ end
+
+ def generate_docs
output_index = Rugged::Index.new
write_site(output_index)
@tf = File.expand_path(File.join(File.dirname(__FILE__), 'docurium', 'layout.mustache'))
versions = get_versions
versions << 'HEAD'
# If the user specified versions, validate them and overwrite
- if !(vers = options[:for]).empty?
+ if !(vers = (@cli_options[:for] || [])).empty?
vers.each do |v|
next if versions.include?(v)
puts "Unknown version #{v}"
exit(false)
end
versions = vers
end
- nversions = versions.size
- output = Queue.new
- pipes = {}
- versions.each do |version|
- # We don't need to worry about joining since this process is
- # going to die immediately
- read, write = IO.pipe
- pid = Process.fork do
- read.close
-
- data = generate_doc_for(version)
- examples = format_examples!(data, version)
-
- Marshal.dump([version, data, examples], write)
- write.close
- end
-
- pipes[pid] = read
- write.close
+ if (@repo.config['user.name'].nil? || @repo.config['user.email'].nil?)
+ puts "ERROR: 'user.name' or 'user.email' is not configured. Docurium will not be able to commit the documentation"
+ exit(false)
end
- print "Generating documentation [0/#{nversions}]\r"
- head_data = nil
-
- # This may seem odd, but we need to keep reading from the pipe or
- # the buffer will fill and they'll block and never exit. Therefore
- # we can't rely on Process.wait to tell us when the work is
- # done. Instead read from all the pipes concurrently and send the
- # ruby objects through the queue.
- Thread.abort_on_exception = true
- pipes.each do |pid, read|
- Thread.new do
- result = read.read
- output << Marshal.load(result)
- end
- end
-
- for i in 1..nversions
- version, data, examples = output.pop
-
- # There's still some work we need to do serially
- tally_sigs!(version, data)
- force_utf8(data)
+ process_project(versions) do |i, version, result|
+ data, examples = result
sha = @repo.write(data.to_json, :blob)
- print "Generating documentation [#{i}/#{nversions}]\r"
+ print "Generating documentation [#{i}/#{versions.count}]\r"
- # Store it so we can show it at the end
- if version == 'HEAD'
- head_data = data
+ unless dry_run?
+ output_index.add(:path => "#{version}.json", :oid => sha, :mode => 0100644)
+ examples.each do |path, id|
+ output_index.add(:path => path, :oid => id, :mode => 0100644)
+ end
end
+ end
- output_index.add(:path => "#{version}.json", :oid => sha, :mode => 0100644)
- examples.each do |path, id|
- output_index.add(:path => path, :oid => id, :mode => 0100644)
- end
-
- if head_data
- puts ''
- show_warnings(data)
- end
-
+ if head_data
+ puts ''
+ show_warnings(head_data)
end
- # We tally the sigantures in the order they finished, which is
+ return if dry_run?
+
+ # We tally the signatures in the order they finished, which is
# arbitrary due to the concurrency, so we need to sort them once
- # they've finsihed.
+ # they've finished.
sort_sigs!
project = {
:versions => versions.reverse,
:github => @options['github'],
@@ -249,36 +238,129 @@
elsif data.respond_to?(:each)
data.each { |x| force_utf8(x) }
end
end
- def show_warnings(data)
- out '* checking your api'
+ class Warning
+ class UnmatchedParameter < Warning
+ def initialize(function, opts = {})
+ super :unmatched_param, :function, function, opts
+ end
+ def _message; "unmatched param"; end
+ end
+
+ class SignatureChanged < Warning
+ def initialize(function, opts = {})
+ super :signature_changed, :function, function, opts
+ end
+
+ def _message; "signature changed"; end
+ end
+
+ class MissingDocumentation < Warning
+ def initialize(type, identifier, opts = {})
+ super :missing_documentation, type, identifier, opts
+ end
+
+ def _message
+ ["%s %s is missing documentation", :type, :identifier]
+ end
+ end
+
+ WARNINGS = [
+ :unmatched_param,
+ :signature_changed,
+ :missing_documentation,
+ ]
+
+ attr_reader :warning, :type, :identifier, :file, :line, :column
+
+ def initialize(warning, type, identifier, opts = {})
+ raise ArgumentError.new("invalid warning class") unless WARNINGS.include?(warning)
+ @warning = warning
+ @type = type
+ @identifier = identifier
+ if type = opts.delete(:type)
+ @file = type[:file]
+ if input_dir = opts.delete(:input_dir)
+ File.expand_path(File.join(input_dir, @file))
+ end
+ @file ||= "<missing>"
+ @line = type[:line] || 1
+ @column = type[:column] || 1
+ end
+ end
+
+ def message
+ msg = self._message
+ msg.kind_of?(Array) ? msg.shift % msg.map {|a| self.send(a).to_s } : msg
+ end
+ end
+
+ def collect_warnings(data)
+ warnings = []
+ input_dir = File.join(@project_dir, option_version("HEAD", 'input'))
+
# check for unmatched paramaters
- unmatched = []
data[:functions].each do |f, fdata|
- unmatched << f if fdata[:comments] =~ /@param/
+ warnings << Warning::UnmatchedParameter.new(f, type: fdata, input_dir: input_dir) if fdata[:comments] =~ /@param/
end
- if unmatched.size > 0
- out ' - unmatched params in'
- unmatched.sort.each { |p| out ("\t" + p) }
- end
# check for changed signatures
sigchanges = []
- @sigs.each do |fun, data|
- if data[:changes]['HEAD']
- sigchanges << fun
+ @sigs.each do |fun, sig_data|
+ warnings << Warning::SignatureChanged.new(fun) if sig_data[:changes]['HEAD']
+ end
+
+ # check for undocumented things
+ types = [:functions, :callbacks, :globals, :types]
+ types.each do |type_id|
+ under_type = type_id.tap {|t| break t.to_s[0..-2].to_sym }
+ data[type_id].each do |ident, type|
+ under_type = type[:type] if type_id == :types
+
+ warnings << Warning::MissingDocumentation.new(under_type, ident, type: type, input_dir: input_dir) if type[:description].empty?
+
+ case type[:type]
+ when :struct
+ if type[:fields]
+ type[:fields].each do |field|
+ warnings << Warning::MissingDocumentation.new(:field, "#{ident}.#{field[:name]}", type: type, input_dir: input_dir) if field[:comments].empty?
+ end
+ end
+ end
end
end
- if sigchanges.size > 0
- out ' - signature changes in'
- sigchanges.sort.each { |p| out ("\t" + p) }
+ warnings
+ end
+
+ def check_warnings(options)
+ versions = []
+ versions << get_versions.pop
+ versions << 'HEAD'
+
+ process_project(versions)
+
+ collect_warnings(head_data).each do |warning|
+ puts "#{warning.file}:#{warning.line}:#{warning.column}: #{warning.message}"
end
end
+ def show_warnings(data)
+ out '* checking your api'
+
+ collect_warnings(data).group_by {|w| w.warning }.each do |klass, klass_warnings|
+ klass_warnings.group_by {|w| w.type }.each do |type, type_warnings|
+ out " - " + type_warnings[0].message
+ type_warnings.sort_by {|w| w.identifier }.each do |warning|
+ out "\t" + warning.identifier
+ end
+ end
+ end
+ end
+
def get_versions
releases = @repo.tags
.map { |tag| tag.name.gsub(%r(^refs/tags/), '') }
.delete_if { |tagname| tagname.match(%r(-rc\d*$)) }
VersionSorter.sort(releases)
@@ -290,14 +372,15 @@
files = headers.map do |file|
[file, @repo.lookup(index[file][:oid]).content]
end
data = init_data(version)
- parser = DocParser.new
- headers.each do |header|
- records = parser.parse_file(header, files)
- update_globals!(data, records)
+ DocParser.with_files(files, :prefix => version) do |parser|
+ headers.each do |header|
+ records = parser.parse_file(header, debug: interesting?(:file, header))
+ update_globals!(data, records)
+ end
end
data[:groups] = group_functions!(data)
data[:types] = data[:types].sort # make it an assoc array
find_type_usage!(data)
@@ -366,47 +449,61 @@
end
def group_functions!(data)
func = {}
data[:functions].each_pair do |key, value|
+ debug_set interesting?(:function, key)
+ debug "grouping #{key}: #{value}"
if @options['prefix']
k = key.gsub(@options['prefix'], '')
else
k = key
end
group, rest = k.split('_', 2)
+ debug "grouped: k: #{k}, group: #{group}, rest: #{rest}"
if group.empty?
puts "empty group for function #{key}"
next
end
+ debug "grouped: k: #{k}, group: #{group}, rest: #{rest}"
data[:functions][key][:group] = group
func[group] ||= []
func[group] << key
func[group].sort!
end
- misc = []
func.to_a.sort
end
def find_type_usage!(data)
- # go through all the functions and callbacks and see where other types are used and returned
+ # go through all functions, callbacks, and structs
+ # see which other types are used and returned
# store them in the types data
h = {}
h.merge!(data[:functions])
h.merge!(data[:callbacks])
- h.each do |func, fdata|
+
+ structs = data[:types].find_all {|t, tdata| (tdata[:type] == :struct and tdata[:fields] and not tdata[:fields].empty?) }
+ structs = Hash[structs.map {|t, tdata| [t, tdata] }]
+ h.merge!(structs)
+
+ h.each do |use, use_data|
data[:types].each_with_index do |tdata, i|
type, typeData = tdata
- data[:types][i][1][:used] ||= {:returns => [], :needs => []}
- if fdata[:return][:type].index(/#{type}[ ;\)\*]?/)
- data[:types][i][1][:used][:returns] << func
+
+ data[:types][i][1][:used] ||= {:returns => [], :needs => [], :fields => []}
+ if use_data[:return] && use_data[:return][:type].index(/#{type}[ ;\)\*]?/)
+ data[:types][i][1][:used][:returns] << use
data[:types][i][1][:used][:returns].sort!
end
- if fdata[:argline].index(/#{type}[ ;\)\*]?/)
- data[:types][i][1][:used][:needs] << func
+ if use_data[:argline] && use_data[:argline].index(/#{type}[ ;\)\*]?/)
+ data[:types][i][1][:used][:needs] << use
data[:types][i][1][:used][:needs].sort!
end
+ if use_data[:fields] and use_data[:fields].find {|f| f[:type] == type }
+ data[:types][i][1][:used][:fields] << use
+ data[:types][i][1][:used][:fields].sort!
+ end
end
end
end
def update_globals!(data, recs)
@@ -419,13 +516,32 @@
:meta => %W/brief defgroup ingroup comments/.map(&:to_sym),
}
file_map = {}
- md = Redcarpet::Markdown.new Redcarpet::Render::HTML, :no_intra_emphasis => true
+ md = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new({}), :no_intra_emphasis => true)
recs.each do |r|
+ types = %w(function file type).map(&:to_sym)
+ dbg = false
+ types.each do |t|
+ dbg ||= if r[:type] == t and interesting?(t, r[:name])
+ true
+ elsif t == :file and interesting?(:file, r[:file])
+ true
+ elsif [:struct, :enum].include?(r[:type]) and interesting?(:type, r[:name])
+ true
+ else
+ false
+ end
+ end
+
+ debug_set dbg
+
+ debug "processing record: #{r}"
+ debug
+
# initialize filemap for this file
file_map[r[:file]] ||= {
:file => r[:file], :functions => [], :meta => {}, :lines => 0
}
if file_map[r[:file]][:lines] < r[:lineto]
@@ -433,11 +549,11 @@
end
# process this type of record
case r[:type]
when :function, :callback
- t = r[:type] == :function ? :functions : :callbacks
+ t = r[:type] == :function ? :functions : :callbacks
data[t][r[:name]] ||= {}
wanted[:functions].each do |k|
next unless r.has_key? k
if k == :description || k == :comments
contents = md.render r[k]
@@ -501,27 +617,42 @@
end
end
when :struct, :fnptr
data[:types][r[:name]] ||= {}
+ known = data[:types][r[:name]]
r[:value] ||= r[:name]
- wanted[:types].each do |k|
- next unless r.has_key? k
- if k == :comments
- data[:types][r[:name]][k] = md.render r[k]
- else
- data[:types][r[:name]][k] = r[k]
+ # we don't want to override "opaque" structs with typedefs or
+ # "public" documentation
+ unless r[:tdef].nil? and known[:fields] and known[:comments] and known[:description]
+ wanted[:types].each do |k|
+ next unless r.has_key? k
+ if k == :comments
+ data[:types][r[:name]][k] = md.render r[k]
+ else
+ data[:types][r[:name]][k] = r[k]
+ end
end
+ else
+ # We're about to skip that type. Just make sure we preserve the
+ # :fields comment
+ if r[:fields] and known[:fields].empty?
+ data[:types][r[:name]][:fields] = r[:fields]
+ end
end
if r[:type] == :fnptr
data[:types][r[:name]][:type] = "function pointer"
end
else
# Anything else we want to record?
end
+ debug "processed record: #{r}"
+ debug
+
+ debug_restore
end
data[:files] << file_map.values[0]
end
@@ -547,7 +678,15 @@
add_dir_to_index(index, dirname + '/', dirname)
end
def out(text)
puts text
+ end
+
+ def dry_run?
+ @cli_options[:dry_run]
+ end
+
+ def interesting?(type, what)
+ @cli_options['debug'] || (@cli_options["debug-#{type}"] || []).include?(what)
end
end