lib/liquidoc.rb in liquidoc-0.12.0.pre.rc5 vs lib/liquidoc.rb in liquidoc-0.12.0.pre.rc6
- old
+ new
@@ -7,13 +7,15 @@
require 'asciidoctor-pdf'
require 'logger'
require 'csv'
require 'crack/xml'
require 'fileutils'
-require 'jekyll'
require 'open3'
require 'highline'
+require 'liquid/tags/jekyll'
+require 'liquid/filters/jekyll'
+require 'sterile'
# ===
# Table of Contents
# ===
#
@@ -23,11 +25,12 @@
# 4. object classes def
# 5. action-specific procs def
# 5a. parse procs def
# 5b. migrate procs def
# 5c. render procs def
-# 6. text manipulation modules/classes def
+# 5d. execute procs def
+# 6. text manipulation filters
# 7. command/option parser def
# 8. executive proc calls
# ===
# Default settings
@@ -37,10 +40,12 @@
@base_dir = @base_dir_def
@build_dir_def = @base_dir + '_build'
@build_dir = @build_dir_def
@configs_dir = @base_dir + '_configs'
@templates_dir = @base_dir + '_templates/'
+@includes_dirs_def = ['.','_templates','_templates/liquid','_templates/liquid/ops','_templates/ops','theme/_includes','_theme/layouts']
+@includes_dirs = @includes_dirs_def
@data_dir = @base_dir + '_data/'
@data_files = nil
@attributes_file_def = '_data/asciidoctor.yml'
@attributes_file = @attributes_file_def
@pdf_theme_file = 'theme/pdf-theme.yml'
@@ -77,11 +82,12 @@
# Establish source, template, index, etc details for build jobs from a config file
def config_build config_file, config_vars={}, data_files=nil, parse=false
@logger.debug "Using config file #{config_file}."
validate_file_input(config_file, "config")
- if config_vars.length > 0 or data_files or parse or contains_liquid(config_file)
+ config_base = File.read(config_file)
+ if config_vars.length > 0 or data_files or parse or config_base.contains_liquid?
@logger.debug "Config_vars: #{config_vars.length}"
# If config variables are passed on the CLI, we want to parse the config file
# and use the parsed version for the rest fo this routine
config_out = "#{@build_dir}/pre/#{File.basename(config_file)}"
data_obj = DataObj.new()
@@ -91,22 +97,24 @@
end
data_obj.add_data!(config_vars, "vars")
liquify(data_obj, config_file, config_out)
config_file = config_out
@logger.debug "Config parsed! Using #{config_out} for build."
- validate_file_input(config_file, "config")
end
+ validate_file_input(config_file, "config")
begin
config = YAML.load_file(config_file)
rescue Exception => ex
unless File.exists?(config_file)
@logger.error "Config file #{config_file} not found."
else
@logger.error "Problem loading config file #{config_file}. #{ex} Exiting."
end
raise "ConfigFileError"
end
+ # TESTS
+ # puts config[0].argify
cfg = BuildConfig.new(config) # convert the config file to a new object called 'cfg'
if @safemode
commands = ""
cfg.steps.each do |step|
if step['action'] == "execute"
@@ -143,18 +151,20 @@
build = Build.new(bld, type, data_obj) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type'
if build.template
# Prep & perform a Liquid-parsed build
@explainer.info build.message
build.add_data!(build.variables, "vars") if build.variables
+ includes_dirs = @includes_dirs
+ includes_dirs = build.includes_dirs if build.includes_dirs
+ build.add_data!({:includes_dirs=>includes_dirs})
liquify(build.data, build.template, build.output) # perform the liquify operation
else # Prep & perform a direct conversion
# Delete nested data and vars objects
build.data.remove_scope("data")
build.data.remove_scope("vars")
# Add vars from CLI or config args
build.data.add_data!(build.variables) unless build.variables.empty?
- build.data.add_data!(@passed_vars) unless @passed_vars.empty?
regurgidata(build.data, build.output)
end
end
when "migrate"
inclusive = true
@@ -167,12 +177,14 @@
when "render"
validate_file_input(step.source, "source") if step.source
builds = step.builds
for bld in builds
doc = AsciiDocument.new(step.source)
- attrs = ingest_attributes(step.data) if step.data # Set attributes from YAML files
- doc.add_attrs!(attrs) # Set attributes from the action-level data file
+ if step.data
+ attrs = ingest_attributes(step.data)
+ doc.add_attrs!(attrs) # Set attributes from the action-level data file
+ end
build = Build.new(bld, type) # create an instance of the Build class; Build.new accepts a 'bld' hash & action 'type' string
build.set("backend", derive_backend(doc.type, build.output) ) unless build.backend
@explainer.info build.message
render_doc(doc, build) # perform the render operation
end
@@ -207,35 +219,10 @@
@logger.error "Could not validate input file: #{error}"
raise "InvalidInput"
end
end
-def validate_config_structure config
- unless config.is_a? Array
- message = "The configuration file is not properly structured."
- @logger.error message
- raise "ConfigStructError"
- else
- if (defined?(config['action'])).nil?
- message = "Every listing in the configuration file needs an action type declaration."
- @logger.error message
- raise "ConfigStructError"
- end
- end
-# TODO More validation needed
-end
-
-def contains_liquid filename
- File.open(filename, "r") do |file_proc|
- file_proc.each_line do |row|
- if row.match(/.*\{\%.*\%\}.*|.*\{\{.*\}\}.*/)
- return true
- end
- end
- end
-end
-
def explainer_init out=nil
unless @explainer
if out == "STDOUT"
@explainer = Logger.new(STDOUT)
else
@@ -282,16 +269,11 @@
def initialize config
if (defined?(config['compile'][0])) # The config is formatted for vesions < 0.3.0; convert it
config = deprecated_format(config)
end
-
- # validations
- unless config.is_a? Array
- raise "ConfigStructError"
- end
-
+ validate(config)
@cfg = config
end
def steps
@cfg
@@ -304,18 +286,25 @@
n.merge!("action" => "parse") # the action type was not previously declared
end
return config['compile']
end
+ def validate config
+ unless config.is_a? Array
+ raise "ConfigStructError"
+ end
+ # TODO More validation needed
+ end
+
end #class BuildConfig
class BuildConfigStep
def initialize step
@step = step
if (defined?(@step['action'])).nil?
- raise "ConfigStructError"
+ raise "StepStructError"
end
@step['options'] = nil unless defined?(step['options'])
validate()
end
@@ -412,11 +401,11 @@
reqs = ["command"]
end
for req in reqs
if (defined?(@step[req])).nil?
@logger.error "Every #{@step['action']}-type in the configuration file needs a '#{req}' declaration."
- raise "ConfigStructError"
+ raise "ConfigStepError"
end
end
end
end #class Action
@@ -434,10 +423,14 @@
def template
@build['template']
end
+ def includes_dirs
+ @build['includes_dirs']
+ end
+
def output
@build['output']
end
def style
@@ -643,18 +636,18 @@
datatype = @datasrc['type']
if datatype.downcase == "yaml" # This is an expected common error, so let's do the user a solid
datatype = "yml"
end
else # If there's no 'type' defined, extract it from the filename and validate it
- unless @datasrc['ext'].downcase.match(/\.yml|\.json|\.xml|\.csv/)
+ unless @datasrc['ext'].downcase.match(/\.yml|\.json|\.xml|\.csv|\.adoc/)
# @logger.error "Data file extension must be one of: .yml, .json, .xml, or .csv or else declared in config file."
raise "FileExtensionUnknown"
end
datatype = self.ext
datatype = datatype[1..-1] # removes leading dot char
end
- unless datatype.downcase.match(/yml|json|xml|csv|regex/) # 'type' must be one of these permitted vals
+ unless datatype.downcase.match(/yml|json|xml|csv|regex|adoc/) # 'type' must be one of these permitted vals
# @logger.error "Declared data type must be one of: yaml, json, xml, csv, or regex."
raise "DataTypeUnrecognized"
end
datatype
end
@@ -831,10 +824,17 @@
data = parse_regex(datasrc.file, datasrc.pattern)
else
@logger.error "You must supply a regex pattern with your free-form data file."
raise "MissingRegexPattern"
end
+ when "adoc"
+ begin
+ doc = Asciidoctor.load_file(datasrc.file)
+ data = doc.attributes
+ rescue
+ @logger.error "Problem with AsciiDoc source file. Attributes not ingested."
+ end
end
return data
end
def parse_regex data_file, pattern
@@ -862,27 +862,29 @@
end
return output
end
# Parse given data using given template, generating given output
-def liquify data_obj, template_file, output
+def liquify data_obj, template_file, output="stdout"
validate_file_input(template_file, "template")
+ # inject :includes_dirs as needed
+ data_obj.add_data!({'includes_dirs' => @includes_dirs}) unless data_obj.data['includes_dirs']
begin
template = File.read(template_file) # reads the template file
template = Liquid::Template.parse(template) # compiles template
rendered = template.render(data_obj.data) # renders the output
rescue Exception => ex
message = "Problem rendering Liquid template. #{template_file}\n" \
"#{ex.class} thrown. #{ex.message}"
@logger.error message
raise message
end
- unless output.downcase == "stdout"
+ unless output == "stdout" || @output_type == "stdout"
output_file = output
generate_file(rendered, output_file)
else # if stdout
- puts "========\nOUTPUT: Rendered with template #{template_file}:\n\n#{rendered}\n"
+ puts rendered
end
end
def cli_liquify data_files=nil, template_file=nil, output_file=nil, passed_vars
# converts command-line options into liquify or regurgidata inputs
@@ -906,11 +908,11 @@
regurgidata(data_obj, output)
end
end
def regurgidata data_obj, output
- # converts data files from one format directly to another
+ # converts data object from one format directly to another
raise "UnrecognizedFileExtension" unless File.extname(output).match(/\.yml|\.json|\.xml|\.csv/)
case File.extname(output)
when ".yml"
new_data = data_obj.data.to_yaml
when ".json"
@@ -921,11 +923,10 @@
@logger.warn "CSV output not yet implemented."
end
if new_data
begin
generate_file(new_data, output)
- # File.open(output, 'w') { |file| file.write(new_data) }
@logger.info "Data converted and saved to #{output}."
rescue Exception => ex
@logger.error "#{ex.class}: #{ex.message}"
raise "FileWriteError"
end
@@ -1142,11 +1143,11 @@
def jekyll_serve build
# Locally serve Jekyll as per the primary Jekyll config file
@logger.debug "Attempting Jekyll serve operation."
config_file = build.props['files'][0]
if build.props['arguments']
- opts_args = build.props['arguments'].to_opts_args
+ opts_args = build.props['arguments'].argify
end
command = "bundle exec jekyll serve --config #{config_file} #{opts_args} --no-watch --skip-initial-build"
system command
end
@@ -1184,11 +1185,11 @@
end
if cmd.options['outfile']
contents = stdout
if cmd.options['outfile']
contents = "#{cmd.options['outfile']['prepend']}\n#{stdout}" if cmd.options['outfile']['prepend']
- contents = "#{stdout}/n#{cmd.options['outfile']['append']}" if cmd.options['outfile']['append']
+ contents = "#{stdout}\n#{cmd.options['outfile']['append']}" if cmd.options['outfile']['append']
generate_file(contents, cmd.options['outfile']['path'])
end
if cmd.options['stdout']
puts stdout
end
@@ -1198,28 +1199,10 @@
# ===
# Text manipulation Classes, Modules, procs, etc
# ===
-module HashMash
-
- def to_opts_args
- out = ''
- if self.is_a? Hash # TODO Should also be testing for flatness
- self.each do |opt,arg|
- out = out + " --#{opt} #{arg}"
- end
- end
- return out
- end
-
-end
-
-class Hash
- include HashMash
-end
-
module ForceArray
# So we can accept a list string ("item1.yml,item2.yml") or a single item ("item1.yml")
# and convert to array as needed
def force_array
obj = self
@@ -1229,77 +1212,365 @@
obj = obj.split(",") # Will even force a string with no commas to a 1-item array
else
obj = Array.new.push(obj)
end
else
- raise "ForceArrayFail"
+ if obj.class == Hash
+ obj = obj.to_array
+ else
+ raise "ForceArrayFail"
+ end
end
end
return obj.to_ary
end
+ def force_array!
+ self.force_array
+ end
+
end
class String
include ForceArray
-# Adapted from Nikhil Gupta
-# http://nikhgupta.com/code/wrapping-long-lines-in-ruby-for-display-in-source-files/
+ # Adapted from Nikhil Gupta
+ # http://nikhgupta.com/code/wrapping-long-lines-in-ruby-for-display-in-source-files/
def wrap options = {}
- width = options.fetch(:width, 76)
- commentchar = options.fetch(:commentchar, '')
+ width = options.fetch(:width, 76) # length to wrap at
+ pre = options.fetch(:prepend, '') # text to prepend
+ app = options.fetch(:append, '') # text to append
+ chars = pre.size + app.size
self.strip.split("\n").collect do |line|
- line.length > width ? line.gsub(/(.{1,#{width}})(\s+|$)/, "\\1\n#{commentchar}") : line
- end.map(&:strip).join("\n#{commentchar}")
+ line.length + chars.size > width ? line.gsub(/(.{1,#{(width - chars)}})(\s+|$)/, "#{pre}\\1#{app}\n") : "#{pre}#{line}#{app}\n"
+ end.map(&:rstrip).join("\n")
end
def indent options = {}
- spaces = " " * options.fetch(:spaces, 4)
- self.gsub(/^/, spaces).gsub(/^\s*$/, '')
+ # TODO: does not allow tabs; inserts explicit `\t` string
+ syms = options.fetch(:sym, ' ') * options.fetch(:by, 2)
+ self.gsub!(/^/m, "#{syms}")
+ self.sub!("#{syms}", "") unless options.fetch(:line1, false)
end
- def indent_with_wrap options = {}
- spaces = options.fetch(:spaces, 4)
- width = options.fetch(:width, 80)
- width = width > spaces ? width - spaces : 1
- self.wrap(width: width).indent(spaces: spaces)
+ def contains_liquid?
+ self.each_line do |row|
+ if row.match(/.*\{\%.*\%\}.*|.*\{\{.*\}\}.*/)
+ return true
+ end
+ end
+ return false
end
+ def quote_wrap options = {}
+ # When a string contains a certain pattern, wrap it in certain quotes
+ # Pass '\s' as pattern to wrap any string that contains 1 or more spaces or tabs
+ # pass '.' as pattern to always wrap.
+
+ pattern = options.fetch(:pattern, '\s').to_s
+ return self unless self.strip.match(/\s/)
+ quotes = options.fetch(:quotes, "single")
+ case quotes
+ when "single"
+ wrap = "''"
+ when "double"
+ wrap = '""'
+ when "backtick"
+ wrap = "``"
+ when "bracket"
+ wrap = "[]"
+ else
+ wrap = quotes
+ end
+ quotes << wrap[0] unless wrap[1]
+ return wrap[0] + self.strip + wrap[1]
+ end
+
end
class Array
include ForceArray
+
+ def to_hash
+ struct = {}
+ self.each do |p|
+ struct.merge!p if p.is_a? Hash
+ end
+ return struct
+ end
+
+ # Get all unique values for each item in an array, or each unique value of a desigated
+ # parameter in an array of hashes.
+ #
+ # @input : the object array
+ # @property : (optional) parameter in which to select unique values (for hashes)
+ def unique_property_values property=nil
+ return self.uniq unless property
+ new_ary = self.uniq { |i| i[property] }
+ out = new_ary.map { |i| i[property] }.compact
+ out
+ end
+
+ def concatenate_property_instances property=String
+ # flattens the values of instances of a given property throughout an array of Hashes
+ all_arrays = []
+ self.each do |i|
+ all_arrays << i[property]
+ end
+ return all_arrays.flatten
+ end
+
+ def repeated_property_values property=String
+ # testing for uniqueness globally among all values in subarrays (list-formatted values) of all instances of the property across all nodes in the parent array
+ # returns an array of duplicate items among all the tested arrays
+ #
+ # Example:
+ # array_of_hashes[0]['cue'] = ['one','two','three']
+ # array_of_hashes[1]['cue'] = ['three','four','five']
+ # array_of_hashes.duplicate_property_values('cue')
+ # #=> ['three']
+ # Due to the apperance of 'three' in both instances of cue.
+ firsts = []
+ dupes = []
+ self.each do |node|
+ return ['non-array property value present'] unless node[property].is_a? Array
+ node[property].each do |i|
+ dupes << i if firsts.include? i
+ firsts << i
+ end
+ end
+ return dupes
+ end
+
end
+class Hash
+ include ForceArray
+
+ def to_array op=nil
+ # Converts a hash of key-value pairs to a flat array based on the first tier
+ out = []
+ self.each do |k,v|
+ v = "<RemovedObject>" if v.is_a? Enumerable and op == "flatten"
+ out << {k => v}
+ end
+ return out
+ end
+
+ def argify options = {}
+ # Converts a hash of key-value pairs to command-line option/argument listings
+ # Can be called with optional arguments:
+ # template :: Liquid-formatted parsing template string
+ # Accepts:
+ #
+ # 'hyph' :: -<key> <value>
+ # 'hyphhyph' :: --<key> <value> (default)
+ # 'hyphchar' :: -<k> <value>
+ # 'dump' :: <key> <value>
+ # 'paramequal' :: <key>=<value>
+ # 'valonly' :: <value>
+ # delim :: Delimiter -- any ASCII characters that separate the arguments
+ #
+ # For template-based usage, express the variables:
+ # opt (the keyname) as {{opt}}
+ # arg (the value) as {{arg}}
+ # EXAMPLES (my_hash = {"key1"=>"val1", "key2"=>"val2"})
+ # my_hash.argify #=> key1 val1 key2 val2
+ # my_hash.argify('hyphhyph') #=> --key1 val1 --key2 val2
+ # my_hash.argify('paramequal') #=> key1=val1 key2=val2
+ # my_hash.argify('-a {{opt}}={{arg}}')#=> -a key1=val1 -a key2=val2
+ # my_hash.argify('valonly', '||') #=> val1||val2
+ # my_hash.argify("{{opt}} `{{arg}}`") #=> key1 `val1` key2 `val2`
+ raise "InvalidObject" unless self.is_a? Hash
+ template = options.fetch(:template, 'hyphhyph')
+ if template.contains_liquid?
+ tp = template # use the passed Liquid template
+ else
+ case template # use a preset Liquid template by name
+ when "dump"
+ tp = "{{opt}} {{arg | quote_wrap: 'single', '\s|,' }}"
+ when "hyph"
+ tp = "-{{opt}} {{arg | quote_wrap: 'single', '\s|,' }}"
+ when "hyphhyph"
+ tp = "--{{opt}} {{arg | quote_wrap: 'single', '\s|,' }}"
+ when "paramequal"
+ tp = "{{opt}}={{arg | quote_wrap: 'single', '\s|,' }}"
+ when "valonly"
+ tp = "{{arg | quote_wrap: 'single', '\s|,' }}"
+ else
+ return "Liquid: Unrecognized argify template name: #{template}"
+ end
+ end
+ begin
+ tpl = Liquid::Template.parse(tp)
+ first = true
+ out = ''
+ self.each do |k,v|
+ # establish datasource
+ v = "<Object>" if v.is_a? Hash
+ v = v.join(',') if v.is_a? Array
+ input = {"opt" => k.to_s, "arg" => v.to_s }
+ if first
+ dlm = ""
+ first = false
+ else
+ dlm = options.fetch(:delim, ' ')
+ end
+ out += dlm + tpl.render(input)
+ end
+ rescue
+ raise "Argify template processing failed"
+ end
+ return out
+ end
+
+
+end
+
# Extending Liquid filters/text manipulation
-module CustomFilters
+module LiquiDocFilters
include Jekyll::Filters
+ #
+ # sterile-based filters
+ #
- def plainwrap input
- input.wrap
+ def to_slug input, delim='-'
+ o = input.dup
+ opts = {:delimiter=>delim}
+ o.to_slug(opts)
end
- def commentwrap input
- input.wrap commentchar: "# "
+
+ def transliterate input
+ o = input.dup
+ o.transliterate
end
- def unwrap input # Not fully functional; inserts explicit '\n'
+
+ def smart_format input
+ o = input.dup
+ o.smart_format
+ end
+
+ def encode_entities input
+ o = input.dup
+ o.encode_entities
+ end
+
+ def titlecase input
+ o = input.dup
+ o.titlecase
+ end
+
+ def strip_tags input
+ o = input.dup
+ o.strip_tags
+ end
+
+ def sterilize input
+ o = input.dup
+ o.sterilize
+ end
+
+ #
+ # Custom Filters
+ #
+
+ def where_uniq input, property, value
+ o = input.where(input, property, value)
+ o[0] if o.size == 1
+ "No result" unless o.size
+ "Multiple results" if o.size > 1
+ end
+
+ def wrap input, width=80, prepend='', append='', vent=false
+ input.wrap(:width => width, :prepend => prepend, :append => append)
+ end
+
+ def plainwrap input, width=80
+ input.wrap(:width => width)
+ end
+
+ def commentwrap input, width=80, prepend='# '
+ input.wrap(:width => width, :pre => prepend)
+ end
+
+ def unwrap input, token1='&g59h%j1k;', token2='&ru8sf%df;'
if input
- token = "[g59hj1k]"
- input.gsub(/\n\n/, token).gsub(/\n/, ' ').gsub(token, "\n\n")
+ input.gsub(/(.)\n\n/, "\\1#{token1}").gsub(/([\."'])$\n([A-Z\(\_"'])/,"\\1#{token2}\\2").gsub(/\n/, '').gsub(token2,"\n").gsub(token1, "\n\n")
end
end
- def slugify input
- # Downcase
- # Turn unwanted chars into the seperator
+ def indent_lines input, by=2, sym=' ', line1=false
+ input.indent(:by => by, :sym => "#{sym}", :line1 => line1)
+ end
+
+ def slugify input, delim='-', snip=false
s = input.to_s.downcase
- s.gsub!(/[^a-zA-Z0-9\-_\+\/]+/i, "-")
+ s.gsub!(/[^a-z0-9]/, delim)
+ if snip
+ while s.match("#{delim}#{delim}")
+ s.gsub!("#{delim}#{delim}", "#{delim}")
+ end
+ s.gsub!(/^#{delim}+(.*)$/, "\\1")
+ s.gsub!(/^(.*)#{delim}+$/, "\\1")
+ end
s
end
+ def asciidocify input
+ Asciidoctor.convert(input, doctype: "inline")
+ end
+
+ def quote_wrap input, quotes="''", pattern="\s"
+ input.quote_wrap(:quotes => quotes, :pattern => pattern)
+ end
+
+ def to_cli_args input, template="paramequal", delim=" "
+ input.argify(:template => template, :delim => delim)
+ end
+
+ def hash_to_array input, op=nil
+ o = input.dup
+ o.to_array(op)
+ end
+
+ def holds_liquid input
+ o = false
+ o = true if input.contains_liquid?
+ o
+ end
+
+ def store_list_uniq input, property=nil
+ input.unique_property_values(property)
+ end
+
+ def store_list_concat input, property=String
+ input.concatenate_property_instances(property)
+ end
+
+ def store_list_dupes input, property=String
+ input.repeated_property_values(property)
+ end
+
def regexreplace input, regex, replacement=''
+ # deprecated in favor of re_replace as of 0.12.0
input.to_s.gsub(Regexp.new(regex), replacement.to_s)
end
+ def replace_regex input, regex, replacement='', multiline=true, global=true
+ pattern = Regexp.new(regex, Regexp::MULTILINE) if multiline
+ pattern = Regexp.new(regex) unless multiline
+ o = input.to_s.gsub(pattern, replacement.to_s) if global
+ o = input.to_s.sub(pattern, replacement.to_s) unless global
+ o
+ end
+
+ def match input, regex, multiline=true, global=true
+ pattern = Regexp.new(regex, Regexp::MULTILINE) if multiline
+ pattern = Regexp.new(regex) unless multiline
+ return true if input.to_s.match(pattern)
+ return false
+ end
+
def to_yaml input
o = input.to_yaml
o = o.gsub(/^\-\-\-$\n/, "")
o
end
@@ -1308,12 +1579,12 @@
o = input.to_json
o
end
end
-# register custom Liquid filters
-Liquid::Template.register_filter(CustomFilters)
+# Register custom Liquid filters
+Liquid::Template.register_filter(LiquiDocFilters)
# ===
# Command/options parser
# ===
@@ -1329,41 +1600,47 @@
pair[k] = v
@passed_attrs.merge!pair
end
# Global Options
- opts.on("-b PATH", "--base=PATH", "The base directory, relative to this script. Defaults to `.`, or pwd." ) do |n|
+ opts.on("-b PATH", "--base PATH", "The base directory, relative to this script. Defaults to `.`, or pwd." ) do |n|
@base_dir = n
end
- opts.on("-B PATH", "--build=PATH", "The directory under which LiquiDoc should save automatically preprocessed files. Defaults to #{@base_dir}_build. Can be absolute or relative to the base path (-b/--base=). Do NOT append '/' to the build path." ) do |n|
+ opts.on("-B PATH", "--build PATH", "The directory under which LiquiDoc should save automatically preprocessed files. Defaults to #{@base_dir}_build. Can be absolute or relative to the base path (-b/--base=). Do NOT append '/' to the build path." ) do |n|
@build_dir = n
end
- opts.on("-c", "--config=PATH", "Configuration file, enables preset source, template, and output.") do |n|
+ opts.on("-c", "--config PATH", "Configuration file, enables preset source, template, and output.") do |n|
@config_file = @base_dir + n
end
opts.on("-d PATH[,PATH]", "--data=PATH[,PATH]", "Semi-structured data source (input) path or paths. Ex. path/to/data.yml or data/file1.yml,data/file2.json. Required unless --config is called; optional with config." ) do |n|
data_files = n.split(',')
data_files = data_files.map! {|file| @base_dir + file}
@data_files = DataFiles.new(data_files)
end
- opts.on("-f PATH", "--from=PATH", "Directory to copy assets from." ) do |n|
+ opts.on("-f PATH", "--from PATH", "Directory to copy assets from." ) do |n|
@attributes_file = n
end
- opts.on("-i PATH", "--index=PATH", "An AsciiDoc index file for mapping an Asciidoctor build." ) do |n|
+ opts.on("-i PATH", "--index PATH", "An AsciiDoc index file for mapping an Asciidoctor build." ) do |n|
@index_file = n
end
opts.on("-o PATH", "--output=PATH", "Output file path for generated content. Ex. path/to/file.adoc. Required unless --config is called.") do |n|
@output = @base_dir + n
end
- opts.on("-t PATH", "--template=PATH", "Path to liquid template. Required unless --configuration is called." ) do |n|
+ opts.on("-t PATH", "--template PATH", "Path to liquid template. Required unless --configuration is called." ) do |n|
@template_file = @base_dir + n
+ end
+
+ opts.on("--includes PATH[,PATH]", "Paths to directories where includes (partials) can be found." ) do |n|
+ n = n.force_array
+ # n.map { |p| @base_dir + p }
+ @includes_dirs = @includes_dirs.concat n
end
opts.on("--verbose", "Run verbose debug logging.") do |n|
@logger.level = Logger::DEBUG
@verbose = true