require "isodoc" require_relative "metadata" require_relative "xrefs" require_relative "refs" require_relative "section" require "fileutils" module IsoDoc module NIST module BaseConvert def keywords(_docxml, out) kw = @meta.get[:keywords] kw.empty? and return out.div **{ class: "Section3" } do |div| clause_name(nil, "Keywords", div, class: "IntroTitle") div.p kw.sort.join("; ") end end FRONT_CLAUSE = "//*[parent::preface][not(local-name() = 'abstract' or "\ "local-name() = 'foreword')]".freeze def skip_render(c, isoxml) return false unless c.name == "reviewernote" status = isoxml&.at(ns("//bibdata/status/stage"))&.text return true if status.nil? /^final/.match status end def fileloc(loc) File.join(File.dirname(__FILE__), loc) end def requirement_cleanup(docxml) docxml.xpath("//div[@class = 'recommend' or @class = 'require' "\ "or @class = 'permission']").each do |d| title = d.at("./p[@class = 'RecommendationTitle']") or next title.name = "b" title.delete("class") n = title.next_element n&.children&.first&.add_previous_sibling(" ") n&.children&.first&.add_previous_sibling(title.remove) end docxml end def dl_parse(node, out) return glossary_parse(node, out) if node["type"] == "glossary" super end def glossary_parse(node, out) out.dl **attr_code(id: node["id"], class: "glossary") do |v| node.elements.select { |n| dt_dd? n }.each_slice(2) do |dt, dd| v.dt **attr_code(id: dt["id"]) do |term| dt_parse(dt, term) end v.dd **attr_code(id: dd["id"]) do |listitem| dd.children.each { |n| parse(n, listitem) } end end end node.elements.reject { |n| dt_dd? n }.each { |n| parse(n, out) } end def error_parse(node, out) case node.name when "nistvariable" then nistvariable_parse(node, out) when "recommendation" then recommendation_parse(node, out) when "requirement" then requirement_parse(node, out) when "permission" then permission_parse(node, out) when "errata" then errata_parse(node, out) else super end end def boilerplate(node, out) super page_break(out) end def children_parse(node, out) node.children.each do |n| parse(n, out) end end def nistvariable_parse(node, out) out.span **{class: "nistvariable"} do |s| node.children.each { |n| parse(n, s) } end end def errata_parse(node, out) out.a **{ name: "errata_XYZZY" } out.table **table_attrs(node) do |t| errata_head(t) errata_body(t, node) end end def errata_head(t) t.thead do |h| h.tr do |tr| %w(Date Type Change Pages).each do |hdr| tr.th hdr end end end end def errata_body(t, node) t.tbody do |b| node.xpath(ns("./row")).each do |row| b.tr do |tr| %w{date type change pages}.each do |hdr| tr.td do |td| row&.at(ns("./#{hdr}"))&.children.each do |n| parse(n, td) end end end end end end end MIDDLE_CLAUSE = "//clause[parent::sections] | "\ "//terms[parent::sections]".freeze def middle(isoxml, out) middle_admonitions(isoxml, out) clause isoxml, out bibliography isoxml, out annex isoxml, out end def info(isoxml, out) @meta.series isoxml, out @meta.commentperiod isoxml, out @meta.note isoxml, out super end def wrap_brackets(txt) return txt if /^\[.*\]$/.match txt "[#{txt}]" end def get_linkend(node) link = anchor_linkend(node, docid_l10n(node["target"] || wrap_brackets(node['citeas']))) link += eref_localities(node.xpath(ns("./locality | ./localityStack")), link) contents = node.children.select do |c| !%w{locality localityStack}.include? c.name end return link if contents.nil? || contents.empty? Nokogiri::XML::NodeSet.new(node.document, contents).to_xml end def load_yaml(lang, script) y = if @i18nyaml then YAML.load_file(@i18nyaml) elsif lang == "en" YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml")) else YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml")) end super.merge(y) end def terms_parse(node, out) out.div **attr_code(id: node["id"]) do |div| node.at(ns("./title")) and clause_parse_title(node, div, node.at(ns("./title")), out) node.elements.each do |e| parse(e, div) unless %w{title source}.include? e.name end end end def termref_parse(node, out) end def term_cleanup(docxml) docxml.xpath("//table[@class = 'terms_dl']").each do |d| prev = d.previous_element next unless prev && prev.name == "table" && prev["class"] == "terms_dl" d.children.each { |n| prev.add_child(n.remove) } d.remove end docxml end def term_and_termref_parse(node, dt) pref = node.at(ns("./preferred")) source = node.xpath(ns("./termsource")) pref.children.each { |n| parse(n, dt) } return if source.empty? dt << "
" source.each_with_index do |s, i| i > 0 and dt << "; " s.elements.each { |n| parse(n, dt) } end end def term_rest_parse(node, dd) set_termdomain("") node.children.each do |n| parse(n, dd) unless %w(preferred termsource).include?(n.name) end end def modification_parse(node, out) out << @modified_lbl node.at(ns("./p[text()[normalize-space() != '']]")) and out << " — " node.at(ns("./p")).children.each { |n| parse(n, out) } end end end end