lib/assimilate/batch.rb in assimilate-0.4.2 vs lib/assimilate/batch.rb in assimilate-0.5.0

- old
+ new

@@ -118,21 +118,21 @@ dkey = @domainkey.gsub(/^_/,'') raise(Assimilate::DuplicateImportError, "duplicate batch for datestamp #{datestamp}") if @catalog.batches.find(dkey => @domain, 'datestamp' => @datestamp).to_a.any? raise(Assimilate::DuplicateImportError, "duplicate batch for file #{@filename}") if @catalog.batches.find(dkey => @domain, 'filename' => @filename).to_a.any? - @catalog.batches.insert({ + @catalog.batches.insert_one({ dkey => @domain, 'datestamp' => @datestamp, 'filename' => @filename }) end def apply_deletes unless @suppress_deletes @deleted_keys.each do |key| - @catalog.catalog.update( + @catalog.catalog.update_one( { @domainkey => domain, idfield => key }, { @@ -146,17 +146,17 @@ INSERT_BATCH_SIZE = 1000 # default batch size for bulk loading into mongo def apply_inserts @adds.each_slice(INSERT_BATCH_SIZE) do |slice| # mongo insert can't handle CSV::Row objects, must be converted to regular hashes - @catalog.catalog.insert(decorate(slice)) + @catalog.catalog.insert_many(decorate(slice)) end end def apply_updates marker = @catalog.config[:update_marker] @changes.each do |key, diffs| - @catalog.catalog.update( + @catalog.catalog.update_one( { @domainkey => domain, idfield => key }, {