\n" +
# "
\n" +
# (supers.size > 0 ?
# "#{span('Each', 'keyword')} #{termref(o.name, nil, o)} #{span('is a kind of', 'keyword')} #{supers.map{|s| termref(s.name, nil, s)}*', '}\n" :
# ''
# ) +
# if pi
# "#{span('Each', 'keyword')} #{termref(o.name, nil, o)} #{span('is identified by', 'keyword')} " +
# pi.role_sequence.all_role_ref_in_order.map do |rr|
# termref(
# rr.role.object_type.name,
# [ rr.leading_adjective,
# rr.role.role_name || rr.role.object_type.name,
# rr.trailing_adjective
# ].compact * '-',
# rr.role.object_type
# )
# end * ", " + "\n"
# else
# ''
# end +
# fact_types_dump(o, relevant_fact_types(o)) + "\n" +
# "
\n" +
# "
\n"
#
# defn_term + defn_detail
# end
#
# def relevant_fact_types(o)
# o.
# all_role.
# map{|r| [r, r.fact_type]}.
# reject { |r, ft| ft.is_a?(ActiveFacts::Metamodel::LinkFactType) }.
# select { |r, ft| ft.entity_type || has_another_nonstatic_role(ft, r) }
# end
#
# def has_another_nonstatic_role(ft, r)
# ft.all_role.detect do |rr|
# rr != r &&
# rr.object_type.is_a?(ActiveFacts::Metamodel::EntityType) &&
# !rr.object_type.is_static
# end
# end
#
# def fact_types_dump(o, ftm)
# ftm.
# map { |r, ft| [ft, " #{fact_type_dump(ft, o)}"] }.
# sort_by{|ft, text| [ ft.is_a?(ActiveFacts::Metamodel::TypeInheritance) ? 0 : 1, text]}.
# map{|ft, text| text} * "\n"
# end
#
# def fact_type_dump(ft, wrt = nil)
# if ft.entity_type
# div(
# div(span('Each ', 'keyword') + termref(ft.entity_type.name, nil, ft.entity_type) + span(' is where ', 'keyword')) +
# div(expand_fact_type(ft, wrt, true, 'some')),
# 'glossary-objectification'
# )
# else
# fact_type_block(ft, wrt)
# end
# end
#
# def fact_type_block(ft, wrt = nil, include_rolenames = true)
# div(expand_fact_type(ft, wrt, include_rolenames, ''), 'glossary-facttype')
# end
#
# def expand_fact_type(ft, wrt = nil, include_rolenames = true, wrt_qualifier = '')
# role = ft.all_role.detect{|r| r.object_type == wrt}
# preferred_reading = ft.reading_preferably_starting_with_role(role)
# alternate_readings = ft.all_reading.reject{|r| r == preferred_reading}
#
# div(
# expand_reading(preferred_reading, include_rolenames, wrt, wrt_qualifier),
# 'glossary-reading'
# )
# end
#
# def role_ref(rr, freq_con, l_adj, name, t_adj, role_name_def, literal)
# term_parts = [l_adj, termref(name, nil, rr.role.object_type), t_adj].compact
# [
# freq_con ? element(freq_con, :class=>:keyword) : nil,
# term_parts.size > 1 ? term([l_adj, termref(name, nil, rr.role.object_type), t_adj].compact*' ') : term_parts[0],
# role_name_def,
# literal
# ]
# end
#
# def expand_reading(reading, include_rolenames = true, wrt = nil, wrt_qualifier = '')
# role_refs = reading.role_sequence.all_role_ref.sort_by{|role_ref| role_ref.ordinal}
# lrr = role_refs[role_refs.size - 1]
# element(
# # element(rr.role.is_unique ? "one" : "some", :class=>:keyword) +
# reading.expand([], include_rolenames) do |rr, freq_con, l_adj, name, t_adj, role_name_def, literal|
# if role_name_def
# role_name_def = role_name_def.gsub(/\(as ([^)]+)\)/) {
# span("(as #{ termref(rr.role.object_type.name, $1, rr.role.object_type) })", 'keyword')
# }
# end
# # qualify the last role of the reading
# quantifier = ''
# if rr == lrr
# uniq = true
# (0 ... role_refs.size - 2).each{|i| uniq = uniq && role_refs[i].role.is_unique }
# quantifier = uniq ? "one" : "at least one"
# end
# role_ref(rr, quantifier, l_adj, name, t_adj, role_name_def, literal)
# end,
# {:class => 'reading'}
# )
# end
def boolean_type
'boolean'
end
def surrogate_type
'bigint'
end
# def component_type component, column_name
# case component
# when MM::Indicator
# boolean_type
# when MM::SurrogateKey
# surrogate_type
# when MM::ValueField, MM::Absorption
# object_type = component.object_type
# while object_type.is_a?(MM::EntityType)
# rr = object_type.preferred_identifier.role_sequence.all_role_ref.single
# raise "Can't produce a column for composite #{component.inspect}" unless rr
# object_type = rr.role.object_type
# end
# raise "A column can only be produced from a ValueType" unless object_type.is_a?(MM::ValueType)
#
# if component.is_a?(MM::Absorption)
# value_constraint ||= component.child_role.role_value_constraint
# end
#
# supertype = object_type
# begin
# object_type = supertype
# length ||= object_type.length
# scale ||= object_type.scale
# unless component.parent.parent and component.parent.foreign_key
# # No need to enforce value constraints that are already enforced by a foreign key
# value_constraint ||= object_type.value_constraint
# end
# end while supertype = object_type.supertype
# type, length = normalise_type(object_type.name, length)
# sql_type = "#{type}#{
# if !length
# ''
# else
# '(' + length.to_s + (scale ? ", #{scale}" : '') + ')'
# end
# # }#{
# # (component.path_mandatory ? '' : ' NOT') + ' NULL'
# # }#{
# # # REVISIT: This is an SQL Server-ism. Replace with a standard SQL SEQUENCE/
# # # Emit IDENTITY for columns auto-assigned on commit (except FKs)
# # if a = object_type.is_auto_assigned and a != 'assert' and
# # !component.all_foreign_key_field.detect{|fkf| fkf.foreign_key.source_composite == component.root}
# # ' IDENTITY'
# # else
# # ''
# # end
# }#{
# value_constraint ? check_clause(column_name, value_constraint) : ''
# }"
# when MM::Injection
# component.object_type.name
# else
# raise "Can't make a column from #{component}"
# end
# end
# def generate_index index, delayed_indices, indent
# nullable_columns =
# index.all_index_field.select do |ixf|
# !ixf.component.path_mandatory
# end
# contains_nullable_columns = nullable_columns.size > 0
#
# primary = index.composite_as_primary_index && !contains_nullable_columns
# column_names =
# index.all_index_field.map do |ixf|
# column_name(ixf.component)
# end
# clustering =
# (index.composite_as_primary_index ? ' CLUSTERED' : ' NONCLUSTERED')
#
# if contains_nullable_columns
# table_name = safe_table_name(index.composite)
# delayed_indices <<
# 'CREATE UNIQUE'+clustering+' INDEX '+
# escape("#{table_name(index.composite)}By#{column_names*''}", index_name_max) +
# " ON #{table_name}("+column_names.map{|n| escape(n, column_name_max)}*', ' +
# ") WHERE #{
# nullable_columns.
# map{|ixf| safe_column_name ixf.component}.
# map{|column_name| column_name + ' IS NOT NULL'} *
# ' AND '
# }"
# nil
# else
# # '-- '+index.inspect
# " " * indent + (primary ? 'PRIMARY KEY' : 'UNIQUE') +
# clustering +
# "(#{column_names.map{|n| escape(n, column_name_max)}*', '})"
# end
# end
# def generate_foreign_key fk, indent
# # '-- '+fk.inspect
# " " * indent + "FOREIGN KEY (" +
# fk.all_foreign_key_field.map{|fkf| safe_column_name fkf.component}*", " +
# ") REFERENCES