lib/isodoc/mpfd/word_convert.rb in metanorma-mpfd-0.1.0 vs lib/isodoc/mpfd/word_convert.rb in metanorma-mpfd-0.1.1
- old
+ new
@@ -5,14 +5,10 @@
module IsoDoc
module Mpfd
# A {Converter} implementation that generates Word output, and a document
# schema encapsulation of the document for validation
class WordConvert < IsoDoc::WordConvert
- def rsd_html_path(file)
- File.join(File.dirname(__FILE__), File.join("html", file))
- end
-
def initialize(options)
@libdir = File.dirname(__FILE__)
super
FileUtils.cp html_doc_path("logo.jpg"), "logo.jpg"
FileUtils.cp html_doc_path('mpfa-logo-no-text@4x.png'), "mpfa-logo-no-text@4x.png"
@@ -55,53 +51,10 @@
make_body2(body, docxml)
make_body3(body, docxml)
end
end
- def make_body2(body, docxml)
- body.div **{ class: "WordSection2" } do |div2|
- info docxml, div2
- div2.p { |p| p << " " } # placeholder
- end
- #body.br **{ clear: "all", style: "page-break-before:auto;mso-break-type:section-break;" }
- section_break(body)
- end
-
- def title(isoxml, _out)
- main = isoxml&.at(ns("//title[@language='en']"))&.text
- set_metadata(:doctitle, main)
- end
-
- def generate_header(filename, dir)
- return unless @header
- template = Liquid::Template.parse(File.read(@header, encoding: "UTF-8"))
- meta = @meta.get
- meta[:filename] = filename
- params = meta.map { |k, v| [k.to_s, v] }.to_h
- File.open("header.html", "w") { |f| f.write(template.render(params)) }
- @files_to_delete << "header.html"
- "header.html"
- end
-
- def header_strip(h)
- h = h.to_s.gsub(%r{<br/>}, " ").sub(/<\/?h[12][^>]*>/, "")
- h1 = to_xhtml_fragment(h.dup)
- h1.traverse do |x|
- x.replace(" ") if x.name == "span" &&
- /mso-tab-count/.match(x["style"])
- x.remove if x.name == "span" && x["class"] == "MsoCommentReference"
- x.remove if x.name == "a" && x["epub:type"] == "footnote"
- x.replace(x.children) if x.name == "a"
- end
- from_xhtml(h1)
- end
-
- def info(isoxml, out)
- @meta.security isoxml, out
- super
- end
-
def annex_name(annex, name, div)
div.h1 **{ class: "Annex" } do |t|
t << "#{get_anchors[annex['id']][:label]} "
t.b do |b|
name&.children&.each { |c2| parse(c2, b) }
@@ -111,23 +64,10 @@
def pre_parse(node, out)
out.pre node.text # content.gsub(/</, "<").gsub(/>/, ">")
end
- def term_defs_boilerplate(div, source, term, preface)
- if source.empty? && term.nil?
- div << @no_terms_boilerplate
- else
- div << term_defs_boilerplate_cont(source, term)
- end
- end
-
- def i18n_init(lang, script)
- super
- @annex_lbl = "Appendix"
- end
-
def error_parse(node, out)
# catch elements not defined in ISO
case node.name
when "pre"
pre_parse(node, out)
@@ -148,23 +88,28 @@
YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml"))
elsif lang == "zh" && script == "Hans"
YAML.load_file(File.join(File.dirname(__FILE__),
"i18n-zh-Hans.yaml"))
else
- YAML.load_file(File.join(File.dirname(__FILE__), "i18n-zh-Hans.yaml"))
+ YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml"))
end
@labels = @labels.merge(y)
+ @annex_lbl = y["annex"]
@clause_lbl = y["clause"]
end
def terms_defs_title(f)
return f&.at(ns("./title"))&.content
end
TERM_CLAUSE = "//preface/terms | "\
"//preface/clause[descendant::terms]".freeze
+ SECTIONS_XPATH =
+ "//foreword | //introduction | //preface/terms | //preface/clause | //annex | "\
+ "//sections/clause | //bibliography/references | "\
+ "//bibliography/clause".freeze
def terms_defs(isoxml, out, num)
f = isoxml.at(ns(TERM_CLAUSE)) or return num
out.div **attr_code(id: f["id"]) do |div|
clause_name(nil, terms_defs_title(f), div, nil)
@@ -174,27 +119,11 @@
end
num
end
FRONT_CLAUSE = "//*[parent::preface]".freeze
- #FRONT_CLAUSE = "//clause[parent::preface] | //terms[parent::preface]".freeze
- def preface(isoxml, out)
- isoxml.xpath(ns(FRONT_CLAUSE)).each do |c|
- if c.name == "terms" then terms_defs isoxml, out, 0
- else
- out.div **attr_code(id: c["id"]) do |s|
- clause_name(get_anchors[c['id']][:label],
- c&.at(ns("./title"))&.content, s, nil)
- c.elements.reject { |c1| c1.name == "title" }.each do |c1|
- parse(c1, s)
- end
- end
- end
- end
- end
-
def initial_anchor_names(d)
d.xpath(ns(FRONT_CLAUSE)).each do |c|
preface_names(c)
sequential_asset_names(c)
end
@@ -202,18 +131,10 @@
clause_names(d, 0)
termnote_anchor_names(d)
termexample_anchor_names(d)
end
-
- def middle(isoxml, out)
- middle_title(out)
- clause isoxml, out
- annex isoxml, out
- bibliography isoxml, out
- end
-
def make_body2(body, docxml)
body.div **{ class: "WordSection2" } do |div2|
info docxml, div2
foreword docxml, div2
introduction docxml, div2
@@ -221,11 +142,10 @@
div2.p { |p| p << " " } # placeholder
end
section_break(body)
end
-
def middle(isoxml, out)
middle_title(out)
clause isoxml, out
annex isoxml, out
bibliography isoxml, out
@@ -262,13 +182,11 @@
end
def sect_names(clause, num, i, lvl, prev_lvl)
return i if clause.nil?
curr = i
- if clause["container"]
- retlvl = lvl+1
- else
+ if !clause["container"]
retlvl = lvl
i+=1
curr = i
name = num.nil? ? i.to_s : "#{num}.#{i}"
@anchors[clause["id"]] = { label: name, xref: l10n("#{@clause_lbl} #{name}"), level: lvl+1 }
@@ -289,28 +207,29 @@
end
def annex_naming(c, num, lvl, i)
if c["guidance"] then annex_names1(c, "#{num}E", lvl + 1)
else
- i+= 1
+ i+= 1 unless c["container"]
annex_names1(c, "#{num}.#{i}", lvl + 1)
end
i
end
def annex_names(clause, num)
@anchors[clause["id"]] = { label: annex_name_lbl(clause, num),
xref: "#{@annex_lbl} #{num}", level: 1 }
i = 0
clause.xpath(ns("./clause")).each do |c|
+ container_names(c, 0)
i = annex_naming(c, num, 1, i)
end
hierarchical_asset_names(clause, num)
end
def annex_names1(clause, num, level)
- @anchors[clause["id"]] = { label: num, xref: "#{@annex_lbl} #{num}",
- level: level }
+ clause["container"] or @anchors[clause["id"]] =
+ { label: num, xref: "#{@annex_lbl} #{num}", level: level }
i = 0
clause.xpath(ns("./clause")).each do |c|
i = annex_naming(c, num, level, i)
end
end