lib/persevere_adapter.rb in dm-persevere-adapter-0.18.0 vs lib/persevere_adapter.rb in dm-persevere-adapter-0.21.0
- old
+ new
@@ -1,102 +1,225 @@
require 'rubygems'
require 'dm-core'
+require 'dm-aggregates'
require 'extlib'
require 'json'
+require 'bigdecimal'
require 'model_json_support'
require 'persevere'
+class BigDecimal
+ alias to_json_old to_json
+
+ def to_json
+ to_s
+ end
+end
+
module DataMapper
- module Migrations
- module PersevereAdapter
- # @api private
- def self.included(base)
- DataMapper.extend(Migrations::SingletonMethods)
+ module Aggregates
+ module PersevereAdapter
+ def aggregate(query)
+ records = []
+ fields = query.fields
+ field_size = fields.size
+
+ connect if @persevere.nil?
+ resources = Array.new
+ json_query = make_json_query(query)
+ path = "/#{query.model.storage_name}/#{json_query}"
- [ :Repository, :Model ].each do |name|
- DataMapper.const_get(name).send(:include, Migrations.const_get(name))
+ response = @persevere.retrieve(path)
+
+ if response.code == "200"
+ # results = JSON.parse(response.body)
+ results = [response.body]
+ results.each do |row_of_results|
+ row = query.fields.zip([row_of_results].flatten).map do |field, value|
+ if field.respond_to?(:operator)
+ send(field.operator, field.target, value)
+ else
+ field.typecast(value)
+ end
+ end
+ records << (field_size > 1 ? row : row[0])
end
end
+ records
+ end # aggregate method
+
+ private
+
+ def count(property, value)
+ value.to_i
+ end
+
+ def min(property, value)
+ values = JSON.parse("[#{value}]").flatten.compact
+ if values.is_a?(Array)
+ values.map! { |v| property.typecast(v) }
+ return values.sort[0].new_offset(Rational(Time.now.getlocal.gmt_offset/3600, 24)) if property.type == DateTime
+ return values.sort[0] + Time.now.gmt_offset if property.type == Time
+ return values.sort[0]
+ end
+ property.typecast(value)
+ end
+
+ def max(property, value)
+ values = JSON.parse("[#{value}]").flatten.compact
+ if values.is_a?(Array)
+ values.map! { |v| property.typecast(v) }
+ return values.sort[-1].new_offset(Rational(Time.now.getlocal.gmt_offset/3600, 24)) if property.type == DateTime
+ return values.sort[-1] + Time.now.gmt_offset if property.type == Time
+ return values.sort[-1]
+ end
+ property.typecast(value)
+ end
+
+ def avg(property, value)
+ values = JSON.parse(value).compact
+ result = values.inject(0.0){|sum,i| sum+=i }/values.length
+ property.type == Integer ? result.to_f : property.typecast(result)
+ end
+
+ def sum(property, value)
+ property.typecast(value)
+ end
+ end # module PersevereAdapter
+ end # module Aggregates
- # Returns whether the storage_name exists.
- #
- # @param [String] storage_name
- # a String defining the name of a storage, for example a table name.
- #
- # @return [Boolean]
- # true if the storage exists
- #
- # @api semipublic
- def storage_exists?(storage_name)
- class_names = JSON.parse(@persevere.retrieve('/Class/[=id]').body)
- return true if class_names.include?("Class/"+storage_name)
- false
+ module Migrations
+ module PersevereAdapter
+ # @api private
+ def self.included(base)
+ DataMapper.extend(Migrations::SingletonMethods)
+
+ [ :Repository, :Model ].each do |name|
+ DataMapper.const_get(name).send(:include, Migrations.const_get(name))
end
+ end
- ##
- # Creates the persevere schema from the model.
- #
- # @param [DataMapper::Model] model
- # The model that corresponds to the storage schema that needs to be created.
- #
- # @api semipublic
- def create_model_storage(model)
- name = self.name
- properties = model.properties_with_subclasses(name)
+ # Returns whether the storage_name exists.
+ #
+ # @param [String] storage_name
+ # a String defining the name of a storage, for example a table name.
+ #
+ # @return [Boolean]
+ # true if the storage exists
+ #
+ # @api semipublic
+ def storage_exists?(storage_name)
+ class_names = JSON.parse(@persevere.retrieve('/Class/[=id]').body)
+ return true if class_names.include?("Class/"+storage_name)
+ false
+ end
- return false if storage_exists?(model.storage_name(name))
- return false if properties.empty?
+ ##
+ # Creates the persevere schema from the model.
+ #
+ # @param [DataMapper::Model] model
+ # The model that corresponds to the storage schema that needs to be created.
+ #
+ # @api semipublic
+ def create_model_storage(model)
+ name = self.name
+ properties = model.properties_with_subclasses(name)
- schema_hash = model.to_json_schema_compatible_hash
+ return false if storage_exists?(model.storage_name(name))
+ return false if properties.empty?
- return true unless put_schema(schema_hash).nil?
- false
- end
+ schema_hash = model.to_json_schema_compatible_hash
- ##
- # Updates the persevere schema from the model.
- #
- # @param [DataMapper::Model] model
- # The model that corresponds to the storage schema that needs to be updated.
- #
- # @api semipublic
- def upgrade_model_storage(model)
- name = self.name
- properties = model.properties_with_subclasses(name)
+ return true unless put_schema(schema_hash) == false
+ false
+ end
- if success = create_model_storage(model)
- return properties
- end
+ ##
+ # Updates the persevere schema from the model.
+ #
+ # @param [DataMapper::Model] model
+ # The model that corresponds to the storage schema that needs to be updated.
+ #
+ # @api semipublic
+ def upgrade_model_storage(model)
+ name = self.name
+ properties = model.properties_with_subclasses(name)
- table_name = model.storage_name(name)
- schema_hash = model.to_json_schema_compatible_hash
- end
+ if success = create_model_storage(model)
+ return properties
+ end
- ##
- # Destroys the persevere schema from the model.
- #
- # @param [DataMapper::Model] model
- # The model that corresponds to the storage schema that needs to be destroyed.
- #
- # @api semipublic
- def destroy_model_storage(model)
- return true unless storage_exists?(model.storage_name(name))
- schema_hash = model.to_json_schema_compatible_hash
- return true unless delete_schema(schema_hash).nil?
- false
- end
+ table_name = model.storage_name(name)
+ schema_hash = model.to_json_schema_compatible_hash
+ end
- end # module PersevereAdapter
- end # module Migrations
-
+ ##
+ # Destroys the persevere schema from the model.
+ #
+ # @param [DataMapper::Model] model
+ # The model that corresponds to the storage schema that needs to be destroyed.
+ #
+ # @api semipublic
+ def destroy_model_storage(model)
+ return true unless storage_exists?(model.storage_name(name))
+ schema_hash = model.to_json_schema_compatible_hash
+ return true unless delete_schema(schema_hash) == false
+ false
+ end
+
+ end # module PersevereAdapter
+ end # module Migrations
+
+ class Reflection
+ module PersevereAdapter
+ @@reserved_classes = ['User','Transaction','Capability','File','Class']
+
+ # def reflect!
+ # fetch_models.map{|m| DataMapper::Factory.build(m) }
+ # end
+
+ def fetch_models
+ JSON.parse(self.get_schema).select{|schema| !@@reserved_classes.include?(schema['id'])}
+ end
+
+ end # module PersevereAdapter
+ end # class Reflection
+
+
module Adapters
class PersevereAdapter < AbstractAdapter
extend Chainable
extend Deprecate
-
+
include Migrations::PersevereAdapter
+
+ # Default types for all data object based adapters.
+ #
+ # @return [Hash] default types for data objects adapters.
+ #
+ # @api private
+ def type_map
+ length = Property::DEFAULT_LENGTH
+ precision = Property::DEFAULT_PRECISION
+ scale = Property::DEFAULT_SCALE_BIGDECIMAL
+
+ @type_map ||= {
+ Types::Serial => { :primitive => 'string' },
+ Types::Boolean => { :primitive => 'boolean' },
+ Integer => { :primitive => 'integer'},
+ String => { :primitive => 'string'},
+ Class => { :primitive => 'string'},
+ BigDecimal => { :primitive => 'number'},
+ Float => { :primitive => 'number'},
+ DateTime => { :primitive => 'string', :format => 'date-time'},
+ Date => { :primitive => 'string', :format => 'date'},
+ Time => { :primitive => 'string', :format => 'time'},
+ TrueClass => { :primitive => 'boolean'},
+ Types::Text => { :primitive => 'string'}
+ }.freeze
+ end
##
# Used by DataMapper to put records into a data-store: "INSERT"
# in SQL-speak. It takes an array of the resources (model
# instances) to be saved. Resources each have a key that can be
@@ -123,11 +246,12 @@
# sort of table.
#
tblname = resource.model.storage_name
path = "/#{tblname}/"
- payload = resource.attributes.reject{ |key,value| value.nil? }
+ payload = make_json_compatible_hash(resource)
+
payload.delete(:id)
response = @persevere.create(path, payload)
# Check the response, this needs to be more robust and raise
@@ -136,12 +260,15 @@
if response.code == "201"# good:
rsrc_hash = JSON.parse(response.body)
# Typecast attributes, DM expects them properly cast
resource.model.properties.each do |prop|
value = rsrc_hash[prop.field.to_s]
- if !value.nil?
- rsrc_hash[prop.field.to_s] = prop.typecast(value)
+ rsrc_hash[prop.field.to_s] = prop.typecast(value) unless value.nil?
+ # Shift date/time objects to the correct timezone because persevere is UTC
+ case prop
+ when DateTime then rsrc_hash[prop.field.to_s] = value.new_offset(Rational(Time.now.getlocal.gmt_offset/3600, 24))
+ when Time then rsrc_hash[prop.field.to_s] = value.getlocal
end
end
serial.set!(resource, rsrc_hash["id"]) unless serial.nil?
@@ -184,11 +311,11 @@
resources.each do |resource|
tblname = resource.model.storage_name
path = "/#{tblname}/#{resource.id}"
- payload = resource.attributes.reject{ |key,value| value.nil? }
+ payload = make_json_compatible_hash(resource)
result = @persevere.update(path, payload)
if result.code == "200"
updated += 1
@@ -233,33 +360,37 @@
# @api semipublic
def read_many(query)
connect if @persevere.nil?
resources = Array.new
- json_query = make_json_query(query)
+ json_query, headers = make_json_query(query)
tblname = query.model.storage_name
path = "/#{tblname}/#{json_query}"
-
- response = @persevere.retrieve(path)
-
- if response.code == "200"
+
+ response = @persevere.retrieve(path, headers)
+ if response.code.match(/20?/)
results = JSON.parse(response.body)
results.each do |rsrc_hash|
# Typecast attributes, DM expects them properly cast
query.model.properties.each do |prop|
value = rsrc_hash[prop.field.to_s]
- if !value.nil?
- rsrc_hash[prop.field.to_s] = prop.typecast(value)
+ rsrc_hash[prop.field.to_s] = prop.typecast(value) unless value.nil?
+ # Shift date/time objects to the correct timezone because persevere is UTC
+ case prop
+ when DateTime then rsrc_hash[prop.field.to_s] = value.new_offset(Rational(Time.now.getlocal.gmt_offset/3600, 24))
+ when Time then rsrc_hash[prop.field.to_s] = value.getlocal
end
end
end
resources = query.model.load(results, query)
end
+ # We could almost elimate this if regexp was working in persevere.
query.filter_records(resources)
+ # resources
end
alias :read :read_many
##
@@ -312,39 +443,38 @@
else
path = "/Class/#{project}/#{name}"
end
result = @persevere.retrieve(path)
-
+
if result.code == "200"
return result.body
else
return false
end
end
def put_schema(schema_hash, project = nil)
path = "/Class/"
-
+
if ! project.nil?
if schema_hash.has_key?("id")
if ! schema_hash['id'].index(project)
schema_hash['id'] = "#{project}/#{schema_hash['id']}"
end
else
puts "You need an id key/value in the hash"
end
end
-
result = @persevere.create(path, schema_hash)
if result.code == '201'
return JSON.parse(result.body)
else
return false
end
end
-
+
def update_schema(schema_hash, project = nil)
id = schema_hash['id']
payload = schema_hash.reject{|key,value| key.to_sym.eql?(:id) }
@@ -352,20 +482,20 @@
if project.nil?
path = "/Class/#{id}"
else
path = "/Class/#{project}/#{id}"
end
- # debugger
+
result = @persevere.update(path, payload)
-
+
if result.code == '200'
return result.body
else
return false
end
end
-
+
def delete_schema(schema_hash, project = nil)
if ! project.nil?
if schema_hash.has_key?("id")
if ! schema_hash['id'].index(project)
schema_hash['id'] = "#{project}/#{schema_hash['id']}"
@@ -374,11 +504,11 @@
puts "You need an id key/value in the hash"
end
end
path = "/Class/#{schema_hash['id']}"
result = @persevere.delete(path)
-
+
if result.code == "204"
return true
else
return false
end
@@ -458,62 +588,133 @@
#
# @param [Query] query
# The DataMapper query object passed in
#
# @api semipublic
- def make_json(resource)
- json_rsrc = nil
-
- # Gather up all the attributes
- json_rsrc = resource.attributes.to_json
+ def make_json_compatible_hash(resource)
+ json_rsrc = Hash.new
+ resource.attributes(:property).each do |property, value|
+ next if value.nil?
+ json_rsrc[property.field] = case value
+ when DateTime then value.new_offset(0).strftime("%Y-%m-%dT%H:%M:%SZ")
+ when Date then value.to_s
+ when Time then value.getutc.strftime("%H:%M:%S")
+ else value
+ end
+ end
+ json_rsrc
end
##
# Convert a DataMapper Query to a JSON Query.
#
# @param [Query] query
# The DataMapper query object passed in
#
# @api semipublic
-
def make_json_query(query)
+ def process_in(value, candidate_set)
+ result_string = Array.new
+ candidate_set.to_a.each do |candidate|
+ result_string << "#{value}=#{candidate}"
+ end
+ if result_string.length > 0
+ "(#{result_string.join("|")})"
+ else
+ "#{value}=''"
+ end
+ end
+
+ def process_condition(condition)
+ case condition
+ # Persevere 1.0 regular expressions are disable for security so we pass them back for DataMapper query filtering
+ # without regular expressions, the like operator is inordinately challenging hence we pass it back
+ # when :like then "RegExp(\"#{condition.value.gsub!('%', '*')}\").test(#{condition.subject.name})"
+ # when :regexp then "RegExp(\"#{condition.value.source}\").test(#{condition.subject.name})"
+ when DataMapper::Query::Conditions::RegexpComparison then []
+ when DataMapper::Query::Conditions::LikeComparison then []
+ when DataMapper::Query::Conditions::AndOperation then "(#{condition.operands.map { |op| process_condition(op) }.join("&")})"
+ when DataMapper::Query::Conditions::OrOperation then "(#{condition.operands.map { |op| process_condition(op) }.join("|")})"
+ when DataMapper::Query::Conditions::NotOperation then
+ inside = process_condition(condition.operand)
+ inside.empty? ? [] : "!(%s)" % inside
+ when DataMapper::Query::Conditions::InclusionComparison then process_in(condition.subject.name, condition.value)
+ when DataMapper::Query::Conditions::EqualToComparison then condition.to_s.gsub(' ', '').gsub('nil', 'undefined')
+ when Array
+ old_statement, bind_values = condition
+ statement = old_statement.dup
+ bind_values.each{ |bind_value| statement.sub!('?', bind_value.to_s) }
+ statement.gsub(' ', '')
+ else condition.to_s.gsub(' ', '')
+ end
+ end
+
+ json_query = ""
query_terms = Array.new
+ order_operations = Array.new
+ field_ops = Array.new
+ headers = Hash.new
- conditions = query.conditions
+ query.conditions.each do |condition|
+ query_terms << process_condition(condition)
+ end
- conditions.each do |condition|
- operator, property, bind_value = condition
- if ! property.nil? && !bind_value.nil?
- v = property.typecast(bind_value)
- if v.is_a?(String)
- value = "'#{bind_value}'"
- else
- value = "#{bind_value}"
+ if query_terms.flatten.length != 0
+ json_query += "[?#{query_terms.join("][?")}]"
+ end
+
+ query.fields.each do |field|
+ if field.respond_to?(:operator)
+ field_ops << case field.operator
+ when :count then
+ if field.target.is_a?(DataMapper::Property)
+ "[?#{field.target.name}!=undefined].length"
+ else # field.target is all.
+ ".length"
+ end
+ when :min
+ if field.target.type == DateTime || field.target.type == Time || field.target.type == Date
+ "[=#{field.target.name}]"
+ else
+ ".min(?#{field.target.name})"
+ end
+ when :max
+ if field.target.type == DateTime || field.target.type == Time || field.target.type == Date
+ "[=#{field.target.name}]"
+ else
+ ".max(?#{field.target.name})"
+ end
+ when :sum
+ ".sum(?#{field.target.name})"
+ when :avg
+ "[=#{field.target.name}]"
+ end
+ end
+ end
+
+ json_query += field_ops.join("")
+
+ if query.order && query.order.any?
+ query.order.map do |direction|
+ order_operations << case direction.operator
+ when :asc then "[\/#{direction.target.field}]"
+ when :desc then "[\\#{direction.target.field}]"
end
-
- query_terms << case operator
- when :eql then "#{property.field()}=#{value}"
- when :lt then "#{property.field()}<#{value}"
- when :gt then "#{property.field()}>#{value}"
- when :lte then "#{property.field()}<=#{value}"
- when :gte then "#{property.field()}=>#{value}"
- when :not then "#{property.field()}!=#{value}"
- when :like then "#{property.field()}~'*#{value}*'"
- else puts "Unknown condition: #{operator}"
- end
end
end
- if query_terms.length != 0
- query = "?#{query_terms.join("&")}"
- else
- query = ""
- end
+ json_query += order_operations.join("")
- query
+ offset = query.offset.to_i
+ limit = query.limit.nil? ? nil : query.limit.to_i + offset - 1
+
+ if offset != 0 || !limit.nil?
+ headers.merge!({"Range", "items=#{offset}-#{limit}"})
+ end
+ # puts "#{query.inspect}"
+ # puts json_query
+ return json_query, headers
end
end # class PersevereAdapter
const_added(:PersevereAdapter)
end # module Adapters
-
-
-end # module DataMapper
+end # module DataMapper
\ No newline at end of file