lib/loaders/loader_base.rb in datashift-0.40.0 vs lib/loaders/loader_base.rb in datashift-0.40.1
- old
+ new
@@ -28,32 +28,28 @@
# Fwd calls onto the DocumentContext
extend Forwardable
def_delegators :doc_context,
:load_object,
- :loaded_count, :failed_count,
+ :loaded_count, :failed_count, :processed_object_count,
:headers, :reporters, :reporters=
- attr_reader :configuration
-
def initialize
@file_name = ''
@doc_context = DocContext.new(Object)
@binder = Binder.new
-
- @configuration = DataShift::Loaders::Configuration.call
end
def setup_load_class(load_class)
@doc_context = DocContext.new( MapperUtils.ensure_class(load_class) )
end
- def run(file_name, object_class)
+ def run(file_name, load_class)
@file_name = file_name
- setup_load_class(object_class)
+ setup_load_class(load_class)
logger.info("Loading objects of type #{load_object_class}")
# no implementation - derived classes must implement
perform_load
@@ -64,11 +60,11 @@
def reset(object = nil)
doc_context.reset(object)
end
def abort_on_failure?
- !! configuration.abort_on_failure
+ !! DataShift::Configuration.call.abort_on_failure
end
def load_object_class
doc_context.klass
end
@@ -87,15 +83,10 @@
# Returns an instance of DataShift::Binder
#
# Given a list of free text column names from inbound headers,
# map all headers to a domain model containing details on operator, look ups etc.
#
- # See configuration options
- #
- # [:ignore] : List of column headers to ignore when building operator ma
- # [:include_all] : Include all headers in processing - takes precedence of :force_inclusion
- #
def bind_headers( headers )
logger.info("Binding #{headers.size} inbound headers to #{load_object_class.name}")
@binder ||= DataShift::Binder.new
@@ -108,16 +99,19 @@
raise MappingDefinitionError, 'Failed to map header row to set of database operators'
end
unless binder.missing_bindings.empty?
logger.warn("Following headings couldn't be mapped to #{load_object_class}:")
- binder.missing_bindings.each { |m| logger.warn("Heading [#{m.inbound_name}] - Index (#{m.inbound_index})") }
+ binder.missing_bindings.each { |m| logger.warn("Heading [#{m.source}] - Index (#{m.index})") }
- raise MappingDefinitionError, "Missing mappings for columns : #{binder.missing_bindings.join(',')}" if configuration.strict
+ if DataShift::Configuration.call.strict_inbound_mapping
+ raise MappingDefinitionError, "Missing mappings for columns : #{binder.missing_bindings.join(',')}"
+ end
+
end
- mandatory = DataShift::Mandatory.new(configuration.mandatory)
+ mandatory = DataShift::Mandatory.new(DataShift::Configuration.call.mandatory)
unless mandatory.contains_all?(binder)
mandatory.missing_columns.each do |er|
logger.error "Mandatory column missing - expected column '#{er}'"
end
@@ -139,24 +133,33 @@
# Format :
#
# LoaderClass:
# option: value
#
- def configure_from(yaml_file)
+ def configure_from(yaml_file, klass = nil, locale_key = 'data_flow_schema')
+ setup_load_class(klass) if(klass)
+
logger.info("Reading Datashift loader config from: #{yaml_file.inspect}")
data = YAML.load( ERB.new( IO.read(yaml_file) ).result )
logger.info("Read Datashift config: #{data.inspect}")
@config.merge!(data['LoaderBase']) if data['LoaderBase']
@config.merge!(data[self.class.name]) if data[self.class.name]
- DataShift::Transformation.factory { |f| f.configure_from(load_object_class, yaml_file) }
+ @binder ||= DataShift::Binder.new
- ContextFactory.configure(load_object_class, yaml_file)
+ data_flow_schema = DataShift::DataFlowSchema.new
+
+ # Includes configuring DataShift::Transformation
+ nodes = data_flow_schema.prepare_from_file(yaml_file, locale_key)
+
+ @binder.add_bindings_from_nodes( nodes )
+
+ PopulatorFactory.configure(load_object_class, yaml_file)
logger.info("Loader Options : #{@config.inspect}")
end
end