Sha256: 7599b1e7c3689194dd4804ade105c671e19bb157cfc426c2ef9b6303c97d2a49
Contents?: true
Size: 1.79 KB
Versions: 20
Compression:
Stored size: 1.79 KB
Contents
module Friendly class DataStore attr_reader :database def initialize(database) @database = database end def insert(persistable, attributes) batch? ? batch_insert(persistable, attributes) : immediate_insert(persistable, attributes) end def all(persistable, query) filtered = dataset(persistable) filtered = filtered.where(query.conditions) unless query.conditions.empty? if query.limit || query.offset filtered = filtered.limit(query.limit, query.offset) end filtered = filtered.order(query.order) if query.order filtered.map end def first(persistable, query) dataset(persistable).first(query.conditions) end def update(persistable, id, attributes) dataset(persistable).where(:id => id).update(attributes) end def delete(persistable, id) dataset(persistable).where(:id => id).delete end def count(persistable, query) dataset(persistable).where(query.conditions).count end def start_batch Thread.current[:friendly_batch] = Hash.new { |h, k| h[k] = [] } end def reset_batch Thread.current[:friendly_batch] = nil end def flush_batch batch = Thread.current[:friendly_batch] batch.keys.each do |k| database.from(k).multi_insert(batch[k], :commit_every => 1000) end reset_batch end protected def dataset(persistable) database.from(persistable.table_name) end def immediate_insert(persistable, attributes) dataset(persistable).insert(attributes) end def batch_insert(persistable, attributes) Thread.current[:friendly_batch][persistable.table_name] << attributes end def batch? Thread.current[:friendly_batch] end end end
Version data entries
20 entries across 20 versions & 7 rubygems