require "isodoc" require_relative "metadata" require_relative "xrefs" require_relative "refs" require "fileutils" module IsoDoc module NIST module BaseConvert def abstract(isoxml, out) f = isoxml.at(ns("//preface/abstract")) || return #page_break(out) out.div **attr_code(id: f["id"]) do |s| clause_name(nil, @abstract_lbl, s, class: "AbstractTitle") f.elements.each { |e| parse(e, s) unless e.name == "title" } end end def keywords(_docxml, out) kw = @meta.get[:keywords] kw.empty? and return out.div **{ class: "Section3" } do |div| clause_name(nil, "Keywords", div, class: "IntroTitle") div.p kw.sort.join("; ") end end FRONT_CLAUSE = "//*[parent::preface][not(local-name() = 'abstract' or local-name() = 'foreword')]".freeze # All "[preface]" sections should have class "IntroTitle" to prevent # page breaks # But for the Exec Summary def preface(isoxml, out) isoxml.xpath(ns(FRONT_CLAUSE)).each do |c| next if skip_render(c, isoxml) title = c&.at(ns("./title")) patent = ["Call for Patent Claims", "Patent Disclosure Notice"].include? title&.text out.div **attr_code(id: c["id"]) do |s| page_break(s) if patent clause_name(anchor(c['id'], :label), title, s, class: (c.name == "executivesummary") ? "NormalTitle" : "IntroTitle") c.elements.reject { |c1| c1.name == "title" }.each do |c1| parse(c1, s) end end end end def skip_render(c, isoxml) return false unless c.name == "reviewernote" status = isoxml&.at(ns("//bibdata/status/stage"))&.text return true if status.nil? /^final/.match status end def fileloc(loc) File.join(File.dirname(__FILE__), loc) end def requirement_cleanup(docxml) docxml.xpath("//div[@class = 'recommend' or @class = 'require' "\ "or @class = 'permission']").each do |d| title = d.at("./p[@class = 'RecommendationTitle']") or next title.name = "b" title.delete("class") n = title.next_element n&.children&.first&.add_previous_sibling(" ") n&.children&.first&.add_previous_sibling(title.remove) end docxml end def dl_parse(node, out) return glossary_parse(node, out) if node["type"] == "glossary" super end def glossary_parse(node, out) out.dl **attr_code(id: node["id"], class: "glossary") do |v| node.elements.select { |n| dt_dd? n }.each_slice(2) do |dt, dd| v.dt **attr_code(id: dt["id"]) do |term| dt_parse(dt, term) end v.dd **attr_code(id: dd["id"]) do |listitem| dd.children.each { |n| parse(n, listitem) } end end end node.elements.reject { |n| dt_dd? n }.each { |n| parse(n, out) } end def error_parse(node, out) case node.name when "nistvariable" then nistvariable_parse(node, out) when "recommendation" then recommendation_parse(node, out) when "requirement" then requirement_parse(node, out) when "permission" then permission_parse(node, out) when "errata" then errata_parse(node, out) else super end end def boilerplate(node, out) super page_break(out) end def children_parse(node, out) node.children.each do |n| parse(n, out) end end def nistvariable_parse(node, out) out.span **{class: "nistvariable"} do |s| node.children.each { |n| parse(n, s) } end end def errata_parse(node, out) out.a **{ name: "errata_XYZZY" } out.table **make_table_attr(node) do |t| errata_head(t) errata_body(t, node) end end def errata_head(t) t.thead do |h| h.tr do |tr| %w(Date Type Change Pages).each do |hdr| tr.th hdr end end end end def errata_body(t, node) t.tbody do |b| node.xpath(ns("./row")).each do |row| b.tr do |tr| %w{date type change pages}.each do |hdr| tr.td do |td| row&.at(ns("./#{hdr}"))&.children.each do |n| parse(n, td) end end end end end end end MIDDLE_CLAUSE = "//clause[parent::sections] | "\ "//terms[parent::sections]".freeze def middle(isoxml, out) clause isoxml, out bibliography isoxml, out annex isoxml, out end def info(isoxml, out) @meta.keywords isoxml, out @meta.series isoxml, out @meta.commentperiod isoxml, out @meta.note isoxml, out super end def wrap_brackets(txt) return txt if /^\[.*\]$/.match txt "[#{txt}]" end def get_linkend(node) link = anchor_linkend(node, docid_l10n(node["target"] || wrap_brackets(node['citeas']))) link += eref_localities(node.xpath(ns("./locality | ./localityStack")), link) contents = node.children.select { |c| !%w{locality localityStack}.include? c.name } return link if contents.nil? || contents.empty? Nokogiri::XML::NodeSet.new(node.document, contents).to_xml # so not # 3.1 end def load_yaml(lang, script) y = if @i18nyaml then YAML.load_file(@i18nyaml) elsif lang == "en" YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml")) else YAML.load_file(File.join(File.dirname(__FILE__), "i18n-en.yaml")) end super.merge(y) end def terms_parse(node, out) out.div **attr_code(id: node["id"]) do |div| node.at(ns("./title")) and clause_parse_title(node, div, node.at(ns("./title")), out) node.elements.each do |e| parse(e, div) unless %w{title source}.include? e.name end end end def bibliography_parse(node, out) title = node&.at(ns("./title"))&.text || "" out.div do |div| unless suppress_biblio_title(node) anchor(node['id'], :label, false) and clause_parse_title(node, div, node.at(ns("./title")), out) or div.h2 title, **{ class: "Section3" } end biblio_list(node, div, true) end end def suppress_biblio_title(node) return false unless node.parent.name == "annex" return false if node.parent.xpath("./references | ./clause | "\ "./terms | ./definitions").size > 1 title1 = node&.at(ns("./title"))&.text return true unless title1 title2 = node&.parent&.at(ns("./title"))&.text title1&.casecmp(title2) == 0 end def foreword(isoxml, out) f = isoxml.at(ns("//foreword")) || return out.div **attr_code(id: f["id"]) do |s| title = f.at(ns("./title")) s.h1(**{ class: "ForewordTitle" }) do |h1| title and title.children.each { |e| parse(e, h1) } end f.elements.each { |e| parse(e, s) unless e.name == "title" } end end end end end