lib/json/ld/evaluation_context.rb in json-ld-0.3.2 vs lib/json/ld/evaluation_context.rb in json-ld-0.9.0
- old
+ new
@@ -73,10 +73,14 @@
# @!attribute [rw] provided_context
# @return [EvaluationContext] A context provided to us that we can use without re-serializing
attr_accessor :provided_context
+ # @!attribute [r] remote_contexts
+ # @return [Array<String>] The list of remote contexts already processed
+ attr_accessor :remote_contexts
+
##
# Create new evaluation context
# @yield [ec]
# @yieldparam [EvaluationContext]
# @return [EvaluationContext]
@@ -89,10 +93,11 @@
@iri_to_curie = {}
@iri_to_term = {
RDF.to_uri.to_s => "rdf",
RDF::XSD.to_uri.to_s => "xsd"
}
+ @remote_contexts = []
@options = options
# Load any defined prefixes
(options[:prefixes] || {}).each_pair do |k, v|
@@ -119,10 +124,11 @@
when IO, StringIO
debug("parse") {"io: #{context}"}
# Load context document, if it is a string
begin
ctx = JSON.load(context)
+ raise JSON::LD::InvalidContext::LoadError, "Context missing @context key" if @options[:validate] && ctx['@context'].nil?
parse(ctx["@context"] || {})
rescue JSON::ParserError => e
debug("parse") {"Failed to parse @context from remote document at #{context}: #{e.message}"}
raise JSON::LD::InvalidContext::Syntax, "Failed to parse remote context at #{context}: #{e.message}" if @options[:validate]
self.dup
@@ -135,10 +141,12 @@
debug("parse") {"remote: #{context}, base: #{context_base || base}"}
# Load context document, if it is a string
ec = nil
begin
url = expand_iri(context, :base => context_base || base, :position => :subject)
+ raise JSON::LD::InvalidContext::LoadError if remote_contexts.include?(url)
+ @remote_contexts = @remote_contexts + [url]
ecdup = self.dup
ecdup.context_base = url # Set context_base for recursive remote contexts
RDF::Util::File.open_file(url) {|f| ec = ecdup.parse(f)}
ec.provided_context = context
ec.context_base = url
@@ -160,20 +168,21 @@
ec
when Hash
new_ec = self.dup
new_ec.provided_context = context.dup
+ # If context has a @vocab member: if its value is not a valid absolute IRI or null trigger an INVALID_VOCAB_MAPPING error; otherwise set the active context's vocabulary mapping to its value and remove the @vocab member from context.
{
'@language' => :default_language=,
'@vocab' => :vocab=
}.each do |key, setter|
v = context.fetch(key, false)
if v.nil? || v.is_a?(String)
context.delete(key)
debug("parse") {"Set #{key} to #{v.inspect}"}
new_ec.send(setter, v)
- elsif v
+ elsif v && @options[:validate]
raise InvalidContext::Syntax, "#{key.inspect} is invalid"
end
end
num_updates = 1
@@ -184,30 +193,52 @@
context.each do |key, value|
# Expand a string value, unless it matches a keyword
debug("parse") {"Hash[#{key}] = #{value.inspect}"}
if KEYWORDS.include?(key)
- raise InvalidContext::Syntax, "key #{key.inspect} must not be a keyword"
+ raise InvalidContext::Syntax, "key #{key.inspect} must not be a keyword" if @options[:validate]
+ next
elsif term_valid?(key)
# Remove all coercion information for the property
new_ec.set_coerce(key, nil)
new_ec.set_container(key, nil)
@languages.delete(key)
- # Extract IRI mapping. This is complicated, as @id may have been aliased
- value = value.fetch('@id', nil) if value.is_a?(Hash)
- raise InvalidContext::Syntax, "unknown mapping for #{key.inspect} to #{value.class}" unless value.is_a?(String) || value.nil?
+ # Extract IRI mapping. This is complicated, as @id may have been aliased. Also, if @id is explicitly set to nil, it inhibits and automatic mapping, so treat it as false, to distinguish from no mapping at all.
+ value = case value
+ when Hash
+ value.has_key?('@id') && value['@id'].nil? ? false : value.fetch('@id', nil)
+ when nil
+ false
+ else
+ value
+ end
- iri = new_ec.expand_iri(value, :position => :predicate) if value.is_a?(String)
+ # Explicitly say this is not mapped
+ if value == false
+ debug("parse") {"Map #{key} to nil"}
+ new_ec.set_mapping(key, nil)
+ next
+ end
+
+ iri = if value.is_a?(Array)
+ # expand each item according the IRI Expansion algorithm. If an item does not expand to a valid absolute IRI, raise an INVALID_PROPERTY_GENERATOR error; otherwise sort val and store it as IRI mapping in definition.
+ value.map do |v|
+ raise InvalidContext::Syntax, "unknown mapping for #{key.inspect} to #{v.inspect}" unless v.is_a?(String)
+ new_ec.expand_iri(v, :position => :predicate)
+ end.sort
+ elsif value
+ raise InvalidContext::Syntax, "unknown mapping for #{key.inspect} to #{value.inspect}" unless value.is_a?(String)
+ new_ec.expand_iri(value, :position => :predicate)
+ end
+
if iri && new_ec.mappings.fetch(key, nil) != iri
# Record term definition
new_ec.set_mapping(key, iri)
num_updates += 1
- elsif value.nil?
- new_ec.set_mapping(key, nil)
end
- else
+ elsif @options[:validate]
raise InvalidContext::Syntax, "key #{key.inspect} is invalid"
end
end
end
@@ -221,18 +252,22 @@
raise InvalidContext::Syntax, "mapping for #{key.inspect} missing one of @id, @language, @type or @container" if (%w(@id @language @type @container) & value.keys).empty?
value.each do |key2, value2|
iri = new_ec.expand_iri(value2, :position => :predicate) if value2.is_a?(String)
case key2
when '@type'
- raise InvalidContext::Syntax, "unknown mapping for '@type' to #{value2.class}" unless value2.is_a?(String) || value2.nil?
+ raise InvalidContext::Syntax, "unknown mapping for '@type' to #{value2.inspect}" unless value2.is_a?(String) || value2.nil?
if new_ec.coerce(key) != iri
- raise InvalidContext::Syntax, "unknown mapping for '@type' to #{iri.inspect}" unless RDF::URI(iri).absolute? || iri == '@id'
+ case iri
+ when '@id', /_:/, RDF::Node
+ else
+ raise InvalidContext::Syntax, "unknown mapping for '@type' to #{iri.inspect}" unless (RDF::URI(iri).absolute? rescue false)
+ end
# Record term coercion
new_ec.set_coerce(key, iri)
end
when '@container'
- raise InvalidContext::Syntax, "unknown mapping for '@container' to #{value2.class}" unless %w(@list @set).include?(value2)
+ raise InvalidContext::Syntax, "unknown mapping for '@container' to #{value2.inspect}" unless %w(@list @set @language @annotation).include?(value2)
if new_ec.container(key) != value2
debug("parse") {"container #{key.inspect} as #{value2.inspect}"}
new_ec.set_container(key, value2)
end
when '@language'
@@ -279,14 +314,14 @@
ctx = Hash.ordered
ctx['@language'] = default_language.to_s if default_language
ctx['@vocab'] = vocab.to_s if vocab
# Mappings
- mappings.keys.sort{|a, b| a.to_s <=> b.to_s}.each do |k|
+ mappings.keys.kw_sort{|a, b| a.to_s <=> b.to_s}.each do |k|
next unless term_valid?(k.to_s)
debug {"=> mappings[#{k}] => #{mappings[k]}"}
- ctx[k] = mappings[k].to_s
+ ctx[k] = mappings[k]
end
unless coercions.empty? && containers.empty? && languages.empty?
# Coerce
(coercions.keys + containers.keys + languages.keys).uniq.sort.each do |k|
@@ -308,11 +343,11 @@
ctx[k]["@type"] = dt
debug {"=> datatype[#{k}] => #{dt}"}
end
debug {"=> container(#{k}) => #{container(k)}"}
- if %w(@list @set).include?(container(k))
+ if %w(@list @set @language @annotation).include?(container(k))
ctx[k]["@container"] = container(k)
debug {"=> container[#{k}] => #{container(k).inspect}"}
end
debug {"=> language(#{k}) => #{language(k)}"}
@@ -342,18 +377,18 @@
#
# @param [String, #to_s] term
#
# @return [RDF::URI, String]
def mapping(term)
- @mappings.fetch(term.to_s, nil)
+ @mappings.fetch(term.to_s, false)
end
##
# Set term mapping
#
# @param [#to_s] term
- # @param [RDF::URI, String] value
+ # @param [RDF::URI, String, nil] value
#
# @return [RDF::URI, String]
def set_mapping(term, value)
term = term.to_s
term_sym = term.empty? ? "" : term.to_sym
@@ -386,12 +421,12 @@
# @param [String] property in unexpanded form
#
# @return [RDF::URI, '@id']
def coerce(property)
# Map property, if it's not an RDF::Value
- # @type and @graph always is an IRI
- return '@id' if [RDF.type, '@type', '@graph'].include?(property)
+ # @type is always is an IRI
+ return '@id' if [RDF.type, '@type'].include?(property)
@coercions.fetch(property, nil)
end
##
# Set term coercion
@@ -413,10 +448,11 @@
# Retrieve container mapping, add it if `value` is provided
#
# @param [String] property in unexpanded form
# @return [String]
def container(property)
+ return '@set' if property == '@graph'
@containers.fetch(property.to_s, nil)
end
##
# Set container mapping
@@ -470,37 +506,64 @@
# @param [Hash{Symbol => Object}] options
# @option options [:subject, :predicate, :type] position
# Useful when determining how to serialize.
# @option options [RDF::URI] base (self.base)
# Base IRI to use when expanding relative IRIs.
+ # @option options [Array<String>] path ([])
+ # Array of looked up iris, used to find cycles
+ # @option options [BlankNodeNamer] namer
+ # Blank Node namer to use for renaming Blank Nodes
#
- # @return [RDF::URI, String] IRI or String, if it's a keyword
+ # @return [RDF::Term, String, Array<RDF::URI>]
+ # IRI or String, if it's a keyword, or array of IRI, if it matches
+ # a property generator
# @raise [RDF::ReaderError] if the iri cannot be expanded
# @see http://json-ld.org/spec/latest/json-ld-api/#iri-expansion
def expand_iri(iri, options = {})
return iri unless iri.is_a?(String)
+
prefix, suffix = iri.split(':', 2)
- return mapping(iri) if mapping(iri) # If it's an exact match
+ unless (m = mapping(iri)) == false
+ # It's an exact match
+ debug("expand_iri") {"match: #{iri.inspect} to #{m.inspect}"} unless options[:quiet]
+ return case m
+ when nil
+ nil
+ when Array
+ # Return array of IRIs, if it's a property generator
+ m.map {|mm| uri(mm.to_s, options[:namer])}
+ else
+ uri(m.to_s, options[:namer])
+ end
+ end
debug("expand_iri") {"prefix: #{prefix.inspect}, suffix: #{suffix.inspect}, vocab: #{vocab.inspect}"} unless options[:quiet]
- base = [:subject].include?(options[:position]) ? options.fetch(:base, self.base) : nil
+ base = [:subject, :type].include?(options[:position]) ? options.fetch(:base, self.base) : nil
prefix = prefix.to_s
case
- when prefix == '_' && suffix then bnode(suffix)
+ when prefix == '_' && suffix then uri(bnode(suffix), options[:namer])
when iri.to_s[0,1] == "@" then iri
when suffix.to_s[0,2] == '//' then uri(iri)
- when mappings.fetch(prefix, false) then uri(mappings[prefix] + suffix.to_s)
+ when (mapping = mapping(prefix)) != false
+ debug("expand_iri") {"mapping: #{mapping(prefix).inspect}"} unless options[:quiet]
+ case mapping
+ when Array
+ # Return array of IRIs, if it's a property generator
+ mapping.map {|m| uri(m.to_s + suffix.to_s, options[:namer])}
+ else
+ uri(mapping.to_s + suffix.to_s, options[:namer])
+ end
when base then base.join(iri)
when vocab then uri("#{vocab}#{iri}")
else
# Otherwise, it must be an absolute IRI
u = uri(iri)
- u if u.absolute? || [:subject].include?(options[:position])
+ u if u.absolute? || [:subject, :type].include?(options[:position])
end
end
##
- # Compact an IRI
+ # Compacts an absolute IRI to the shortest matching term or compact IRI
#
# @param [RDF::URI] iri
# @param [Hash{Symbol => Object}] options ({})
# @option options [:subject, :predicate, :type] position
# Useful when determining how to serialize.
@@ -522,14 +585,13 @@
debug("compact_iri", "initial terms: #{matched_terms.inspect}")
# Create an empty list of terms _terms_ that will be populated with terms that are ranked according to how closely they match value. Initialize highest rank to 0, and set a flag list container to false.
terms = {}
- # If value is a @list add a term rank for each
- # term mapping to iri which has @container @list.
- debug("compact_iri", "#{value.inspect} is a list? #{list?(value).inspect}")
- if list?(value)
+ # If value is a @list select terms that match every item equivalently.
+ debug("compact_iri", "#{value.inspect} is a list? #{list?(value).inspect}") if value
+ if list?(value) && !annotation?(value)
list_terms = matched_terms.select {|t| container(t) == '@list'}
terms = list_terms.inject({}) do |memo, t|
memo[t] = term_rank(t, value)
memo
@@ -561,17 +623,10 @@
# Find terms having the greatest term match value
least_distance = terms.values.max
terms = terms.keys.select {|t| terms[t] == least_distance}
- # If terms is empty, and the active context has a @vocab which is a prefix of iri where the resulting relative IRI is not a term in the active context. The resulting relative IRI is the unmatched part of iri.
- if vocab && terms.empty? && iri.to_s.index(vocab) == 0 &&
- [:predicate, :type].include?(options[:position])
- terms << iri.to_s.sub(vocab, '')
- debug("vocab") {"vocab: #{vocab}, rel: #{terms.first}"}
- end
-
# If terms is empty, add a compact IRI representation of iri for each
# term in the active context which maps to an IRI which is a prefix for
# iri where the resulting compact IRI is not a term in the active
# context. The resulting compact IRI is the term associated with the
# partially matched IRI in the active context concatenated with a colon
@@ -603,10 +658,19 @@
end
debug("curies") {"selected #{terms.inspect}"}
end
+ # If terms is empty, and the active context has a @vocab which is a prefix of iri where the resulting relative IRI is not a term in the active context. The resulting relative IRI is the unmatched part of iri.
+ # Don't use vocab, if the result would collide with a term
+ if vocab && terms.empty? && iri.to_s.index(vocab) == 0 &&
+ !mapping(iri.to_s.sub(vocab, '')) &&
+ [:predicate, :type].include?(options[:position])
+ terms << iri.to_s.sub(vocab, '')
+ debug("vocab") {"vocab: #{vocab}, rel: #{terms.first}"}
+ end
+
# If we still don't have any terms and we're using standard_prefixes,
# try those, and add to mapping
if terms.empty? && @options[:standard_prefixes]
terms = RDF::Vocabulary.
select {|v| iri.index(v.to_uri.to_s) == 0}.
@@ -648,116 +712,63 @@
end
##
# Expand a value from compacted to expanded form making the context
# unnecessary. This method is used as part of more general expansion
- # and operates on RHS values, using a supplied key to determine @type and @container
- # coercion rules.
+ # and operates on RHS values, using a supplied key to determine @type and
+ # @container coercion rules.
#
# @param [String] property
# Associated property used to find coercion rules
# @param [Hash, String] value
# Value (literal or IRI) to be expanded
# @param [Hash{Symbol => Object}] options
# @option options [Boolean] :useNativeTypes (true) use native representations
+ # @option options [BlankNodeNamer] namer
+ # Blank Node namer to use for renaming Blank Nodes
#
# @return [Hash] Object representation of value
# @raise [RDF::ReaderError] if the iri cannot be expanded
# @see http://json-ld.org/spec/latest/json-ld-api/#value-expansion
def expand_value(property, value, options = {})
options = {:useNativeTypes => true}.merge(options)
depth(options) do
debug("expand_value") {"property: #{property.inspect}, value: #{value.inspect}, coerce: #{coerce(property).inspect}"}
- value = RDF::Literal(value) if RDF::Literal(value).has_datatype?
- dt = case value
- when RDF::Literal
- case value.datatype
- when RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.double then value.datatype
- else value
- end
- when RDF::Term then value.class.name
- else value
- end
- result = case dt
- when RDF::XSD.boolean
- debug("xsd:boolean")
- case coerce(property)
- when RDF::XSD.double.to_s
- {"@value" => value.to_s, "@type" => RDF::XSD.double.to_s}
- else
- if options[:useNativeTypes]
- # Unless there's coercion, to not modify representation
- {"@value" => (value.is_a?(RDF::Literal::Boolean) ? value.object : value)}
- else
- {"@value" => value.to_s, "@type" => RDF::XSD.boolean.to_s}
- end
- end
- when RDF::XSD.integer
- debug("xsd:integer")
- case coerce(property)
- when RDF::XSD.double.to_s
- {"@value" => RDF::Literal::Double.new(value, :canonicalize => true).to_s, "@type" => RDF::XSD.double.to_s}
- when RDF::XSD.integer.to_s, nil
- # Unless there's coercion, to not modify representation
- if options[:useNativeTypes]
- {"@value" => value.is_a?(RDF::Literal::Integer) ? value.object : value}
- else
- {"@value" => value.to_s, "@type" => RDF::XSD.integer.to_s}
- end
- else
- res = Hash.ordered
- res['@value'] = value.to_s
- res['@type'] = coerce(property)
- res
- end
- when RDF::XSD.double
- debug("xsd:double")
- case coerce(property)
- when RDF::XSD.integer.to_s
- {"@value" => value.to_int.to_s, "@type" => RDF::XSD.integer.to_s}
- when RDF::XSD.double.to_s
- {"@value" => RDF::Literal::Double.new(value, :canonicalize => true).to_s, "@type" => RDF::XSD.double.to_s}
- when nil
- if options[:useNativeTypes]
- # Unless there's coercion, to not modify representation
- {"@value" => value.is_a?(RDF::Literal::Double) ? value.object : value}
- else
- {"@value" => RDF::Literal::Double.new(value, :canonicalize => true).to_s, "@type" => RDF::XSD.double.to_s}
- end
- else
- res = Hash.ordered
- res['@value'] = value.to_s
- res['@type'] = coerce(property)
- res
- end
- when "RDF::URI", "RDF::Node"
+ value = if value.is_a?(RDF::Value)
+ value
+ elsif coerce(property) == '@id'
+ expand_iri(value, :position => :subject, :namer => options[:namer])
+ else
+ RDF::Literal(value)
+ end
+ debug("expand_value") {"normalized: #{value.inspect}"}
+
+ result = case value
+ when RDF::URI, RDF::Node
debug("URI | BNode") { value.to_s }
{'@id' => value.to_s}
when RDF::Literal
- debug("Literal")
+ debug("Literal") {"datatype: #{value.datatype.inspect}"}
res = Hash.ordered
- res['@value'] = value.to_s
- res['@type'] = value.datatype.to_s if value.has_datatype?
- res['@language'] = value.language.to_s if value.has_language?
- res
- else
- debug("else")
- case coerce(property)
- when '@id'
- {'@id' => expand_iri(value, :position => :subject).to_s}
- when nil
- debug("expand value") {"lang(prop): #{language(property).inspect}, def: #{default_language.inspect}"}
- language(property) ? {"@value" => value.to_s, "@language" => language(property)} : {"@value" => value.to_s}
+ if options[:useNativeTypes] && [RDF::XSD.boolean, RDF::XSD.integer, RDF::XSD.double].include?(value.datatype)
+ res['@value'] = value.object
+ res['@type'] = uri(coerce(property), options[:namer]) if coerce(property)
else
- res = Hash.ordered
+ value.canonicalize! if value.datatype == RDF::XSD.double
res['@value'] = value.to_s
- res['@type'] = coerce(property).to_s
- res
+ if coerce(property)
+ res['@type'] = uri(coerce(property), options[:namer]).to_s
+ elsif value.has_datatype?
+ res['@type'] = uri(value.datatype, options[:namer]).to_s
+ elsif value.has_language? || language(property)
+ res['@language'] = (value.language || language(property)).to_s
+ end
end
+ res
end
-
+
debug {"=> #{result.inspect}"}
result
end
end
@@ -778,16 +789,18 @@
raise ProcessingError::Lossy, "attempt to compact a non-object value: #{value.inspect}" unless value.is_a?(Hash)
depth(options) do
debug("compact_value") {"property: #{property.inspect}, value: #{value.inspect}, coerce: #{coerce(property).inspect}"}
+ # Remove @annotation if property has annotation
+ value.delete('@annotation') if container(property) == '@annotation'
+
result = case
- #when %w(boolean integer double).any? {|t| expand_iri(value['@type'], :position => :type) == RDF::XSD[t]}
- # # Compact native type
- # debug {" (native)"}
- # l = RDF::Literal(value['@value'], :datatype => expand_iri(value['@type'], :position => :type))
- # l.canonicalize.object
+ when value.has_key?('@annotation')
+ # Don't compact the value
+ debug {" (@annotation without container @annotation)"}
+ value
when coerce(property) == '@id' && value.has_key?('@id')
# Compact an @id coercion
debug {" (@id & coerce)"}
compact_iri(value['@id'], :position => :subject)
when value['@type'] && expand_iri(value['@type'], :position => :type) == coerce(property)
@@ -797,15 +810,15 @@
when value.has_key?('@id')
# Compact an IRI
value[self.alias('@id')] = compact_iri(value['@id'], :position => :subject)
debug {" (#{self.alias('@id')} => #{value['@id']})"}
value
- when value['@language'] && value['@language'] == language(property)
+ when value['@language'] && (value['@language'] == language(property) || container(property) == '@language')
# Compact language
debug {" (@language) == #{language(property).inspect}"}
value['@value']
- when value['@value'] && !value['@value'].is_a?(String)
+ when !value.fetch('@value', "").is_a?(String)
# Compact simple literal to string
debug {" (@value not string)"}
value['@value']
when value['@value'] && !value['@language'] && !value['@type'] && !coerce(property) && !default_language
# Compact simple literal to string
@@ -851,31 +864,40 @@
v.join(", ") + "]"
end
def dup
# Also duplicate mappings, coerce and list
+ that = self
ec = super
- ec.mappings = mappings.dup
- ec.coercions = coercions.dup
- ec.containers = containers.dup
- ec.languages = languages.dup
- ec.default_language = default_language
- ec.options = options
- ec.iri_to_term = iri_to_term.dup
- ec.iri_to_curie = iri_to_curie.dup
+ ec.instance_eval do
+ @mappings = that.mappings.dup
+ @coerceions = that.coercions.dup
+ @containers = that.containers.dup
+ @languages = that.languages.dup
+ @default_language = that.default_language
+ @options = that.options
+ @iri_to_term = that.iri_to_term.dup
+ @iri_to_curie = that.iri_to_curie.dup
+ end
ec
end
private
- def uri(value, append = nil)
- value = RDF::URI.new(value)
- value = value.join(append) if append
- value.validate! if @options[:validate]
- value.canonicalize! if @options[:canonicalize]
- value = RDF::URI.intern(value) if @options[:intern]
- value
+ def uri(value, namer = nil)
+ case value.to_s
+ when /^_:(.*)$/
+ # Map BlankNodes if a namer is given
+ debug "uri(bnode)#{value}: #{$1}"
+ bnode(namer ? namer.get_sym($1) : $1)
+ else
+ value = RDF::URI.new(value)
+ value.validate! if @options[:validate]
+ value.canonicalize! if @options[:canonicalize]
+ value = RDF::URI.intern(value) if @options[:intern]
+ value
+ end
end
# Keep track of allocated BNodes
#
# Don't actually use the name provided, to prevent name alias issues.
@@ -908,35 +930,53 @@
debug("term rank") { "null value: 3"}
3
elsif list?(value)
if value['@list'].empty?
# If the @list property is an empty array, if term has @container set to @list, term rank is 1, otherwise 0.
+ debug("term rank") { "empty list"}
container(term) == '@list' ? 1 : 0
else
- # Otherwise, return the sum of the term ranks for every entry in the list.
- depth {value['@list'].inject(0) {|memo, v| memo + term_rank(term, v)}}
+ debug("term rank") { "non-empty list"}
+ # Otherwise, return the most specific term, for which the term has some match against every value.
+ depth {value['@list'].map {|v| term_rank(term, v)}}.min
end
elsif value?(value)
val_type = value.fetch('@type', nil)
val_lang = value['@language'] || false if value.has_key?('@language')
debug("term rank") {"@val_type: #{val_type.inspect}, val_lang: #{val_lang.inspect}"}
if val_type
+ debug("term rank") { "typed value"}
coerce(term) == val_type ? 3 : (default_term ? 1 : 0)
elsif !value['@value'].is_a?(String)
+ debug("term rank") { "native value"}
default_term ? 2 : 1
elsif val_lang.nil?
debug("val_lang.nil") {"#{language(term).inspect} && #{coerce(term).inspect}"}
- language(term) == false || (default_term && default_language.nil?) ? 3 : 0
+ if language(term) == false || (default_term && default_language.nil?)
+ # Value has no language, and there is no default language and the term has no language
+ 3
+ elsif default_term
+ # The term has no language (or type), but it's different than the default
+ 2
+ else
+ 0
+ end
else
- if val_lang == language(term) || (default_term && default_language == val_lang)
+ debug("val_lang") {"#{language(term).inspect} && #{coerce(term).inspect}"}
+ if val_lang && container(term) == '@language'
3
+ elsif val_lang == language(term) || (default_term && default_language == val_lang)
+ 2
+ elsif default_term && container(term) == '@set'
+ 2 # Choose a set term before a non-set term, if there's a language
elsif default_term
1
else
0
end
end
else # node definition/reference
+ debug("node dev/ref")
coerce(term) == '@id' ? 3 : (default_term ? 1 : 0)
end
debug(" =>") {rank.inspect}
rank