require 'yaml' require 'time' require 'nano/kernel/constant' module Og module SqlUtils # Escape an SQL string def escape(str) return nil unless str return str.gsub(/'/, "''") end # Convert a ruby time to an sql timestamp. #-- # TODO: Optimize this. #++ def timestamp(time = Time.now) return nil unless time return time.strftime("%Y-%m-%d %H:%M:%S") end # Output YYY-mm-dd #-- # TODO: Optimize this. #++ def date(date) return nil unless date return "#{date.year}-#{date.month}-#{date.mday}" end #-- # TODO: implement me! #++ def blob(val) val end # Parse an integer. def parse_int(int) int = int.to_i if int int end # Parse a float. def parse_float(fl) fl = fl.to_f if fl fl end # Parse sql datetime #-- # TODO: Optimize this. #++ def parse_timestamp(str) return nil unless str return Time.parse(str) end # Input YYYY-mm-dd #-- # TODO: Optimize this. #++ def parse_date(str) return nil unless str return Date.strptime(str) end # Parse a boolean # true, 1, t => true # other => false def parse_boolean(str) return true if (str=='true' || str=='t' || str=='1') return false end #-- # TODO: implement me!! #++ def parse_blob(val) val end # Escape the various Ruby types. def quote(val) case val when Fixnum, Integer, Float val ? val.to_s : 'NULL' when String val ? "'#{escape(val)}'" : 'NULL' when Time val ? "'#{timestamp(val)}'" : 'NULL' when Date val ? "'#{date(val)}'" : 'NULL' when TrueClass val ? "'t'" : 'NULL' else # gmosx: keep the '' for nil symbols. val ? escape(val.to_yaml) : '' end end # Apply table name conventions to a class name. def tableize(klass) "#{klass.to_s.gsub(/::/, "_").downcase}" end def table(klass) klass.ann.self[:sql_table] || klass.ann.self[:table] || "#{Og.table_prefix}#{tableize(klass)}" end def join_object_ordering(obj1, obj2) if obj1.class.to_s <= obj2.class.to_s return obj1, obj2 else return obj2, obj1, true end end def join_class_ordering(class1, class2) if class1.to_s <= class2.to_s return class1, class2 else return class2, class1, true end end def build_join_name(class1, class2, postfix = nil) # Don't reorder arguments, as this is used in places that # have already determined the order they want. "#{Og.table_prefix}j_#{tableize(class1)}_#{tableize(class2)}#{postfix}" end def join_table(class1, class2, postfix = nil) first, second = join_class_ordering(class1, class2) build_join_name(first, second, postfix) end def join_table_index(key) "#{key}_idx" end def join_table_key(klass) klass = klass.schema_inheritance_root_class if klass.schema_inheritance_child? "#{klass.to_s.split('::').last.downcase}_oid" end def join_table_keys(class1, class2) if class1 == class2 # Fix for the self-join case. return join_table_key(class1), "#{join_table_key(class2)}2" else return join_table_key(class1), join_table_key(class2) end end def ordered_join_table_keys(class1, class2) first, second = join_class_ordering(class1, class2) return join_table_keys(first, second) end def join_table_info(owner_class, target_class, postfix = nil) # some fixes for schema inheritance. raise "Undefined owner_class in #{target_class}" unless owner_class raise "Undefined target_class in #{owner_class}" unless target_class owner_class = owner_class.schema_inheritance_root_class if owner_class.schema_inheritance_child? target_class = target_class.schema_inheritance_root_class if target_class.schema_inheritance_child? owner_key, target_key = join_table_keys(owner_class, target_class) first, second, changed = join_class_ordering(owner_class, target_class) if changed first_key, second_key = target_key, owner_key else first_key, second_key = owner_key, target_key end return { :table => join_table(owner_class, target_class, postfix), :owner_key => owner_key, :target_key => target_key, :first_table => table(first), :first_key => first_key, :first_index => join_table_index(first_key), :second_table => table(second), :second_key => second_key, :second_index => join_table_index(second_key) } end # Subclasses can override this if they need a different # syntax. def create_join_table_sql(join_table_info, suffix = 'NOT NULL', key_type = 'integer') join_table = join_table_info[:table] first_index = join_table_info[:first_index] first_key = join_table_info[:first_key] second_key = join_table_info[:second_key] second_index = join_table_info[:second_index] sql = [] sql << %{ CREATE TABLE #{join_table} ( #{first_key} integer NOT NULL, #{second_key} integer NOT NULL, PRIMARY KEY(#{first_key}, #{second_key}) ) } # gmosx: not that useful? # sql << "CREATE INDEX #{first_index} ON #{join_table} (#{first_key})" # sql << "CREATE INDEX #{second_index} ON #{join_table} (#{second_key})" return sql end end # An abstract SQL powered store. class SqlStore < Store extend SqlUtils include SqlUtils # The connection to the backend SQL RDBMS. attr_accessor :conn def initialize(options) super # The default Ruby <-> SQL type mappings, should be valid # for most RDBM systems. @typemap = { Integer => 'integer', Fixnum => 'integer', Float => 'float', String => 'text', Time => 'timestamp', Date => 'date', TrueClass => 'boolean', Object => 'text', Array => 'text', Hash => 'text', Og::Blob => 'bytea' # psql } end #-- # FIXME: not working. #++ def enable_logging require 'glue/aspects' klass = self.class klass.send :include, Glue::Aspects klass.pre "Logger.info sql", :on => [:exec, :query] Glue::Aspects.wrap(klass, [:exec, :query]) end # Returns a list of tables that exist within the database but are # not managed by the supplied manager. def unmanaged_tables(manager) ret = Array.new mt = managed_tables(manager) @conn.list_tables.each do |table| ret << table unless mt.include?(table) end ret end # Returns a list of tables within the database that are there to # support a class managed by the supplied manager. def managed_tables(manager) ret = Array.new manager.managed_classes.each do |klass| ret << klass::OGTABLE ret.concat(klass.relations.reject{|rel| not rel.options[:join_table]}.map{|rel| rel.options[:join_table]}) end ret end # Enchants a class. def enchant(klass, manager) # setup the table where this class is mapped. if klass.schema_inheritance_child? # farms: allow deeper inheritance (TODO: use annotation :superclass) klass.const_set 'OGTABLE', table(klass.schema_inheritance_root_class) else klass.const_set 'OGTABLE', table(klass) end klass.module_eval 'def self.table; OGTABLE; end' eval_og_allocate(klass) super unless klass.polymorphic_parent? # precompile class specific lifecycle methods. eval_og_create_schema(klass) eval_og_insert(klass) eval_og_update(klass) eval_og_delete(klass) # create the table if needed. klass.allocate.og_create_schema(self) # finish up with eval_og_read, since we can't do that # until after the table is created. # Possible FIXME: This means you can't do any find-type # operations in og_create_schema. Luckily, you're most # likely to want to do .create, which is covered by # og_insert. eval_og_read(klass) end end # :section: Lifecycle methods. # Loads an object from the store using the primary key. def load(pk, klass) pk_field = klass.primary_key.field || klass.primary_key.symbol sql = "SELECT * FROM #{klass::OGTABLE} WHERE #{pk_field}=#{pk}" sql << " AND ogtype='#{klass}'" if klass.schema_inheritance_child? res = query sql read_one(res, klass) end alias_method :exist?, :load # Reloads an object from the store. def reload(obj, pk) raise 'Cannot reload unmanaged object' unless obj.saved? sql = "SELECT * FROM #{obj.class.table} WHERE #{obj.class.pk_symbol}=#{pk}" sql << " AND ogtype='#{obj.class}'" if obj.class.schema_inheritance_child? res = query sql obj.og_read(res.next, 0) ensure res.close if res end # If a properties collection is provided, only updates the # selected properties. Pass the required properties as symbols # or strings. #-- # gmosx, THINK: condition is not really useful here :( #++ def update(obj, options = nil) if options and properties = options[:only] if properties.is_a?(Array) set = [] for p in properties set << "#{p}=#{quote(obj.send(p))}" end set = set.join(',') else set = "#{properties}=#{quote(obj.send(properties))}" end sql = "UPDATE #{obj.class.table} SET #{set} WHERE #{obj.class.pk_symbol}=#{obj.pk}" sql << " AND #{options[:condition]}" if options[:condition] sql_update(sql) else obj.og_update(self, options) end end # Update selected properties of an object or class of # objects. def update_properties(target, *properties) update(target, :only => properties) end alias_method :pupdate, :update_properties alias_method :update_property, :update_properties # More generalized method, also allows for batch updates. def update_by_sql(target, set, options = nil) set = set.gsub(/@/, '') if target.is_a?(Class) sql = "UPDATE #{target.table} SET #{set} " sql << " WHERE #{options[:condition]}" if options and options[:condition] sql_update(sql) else sql = "UPDATE #{target.class.table} SET #{set} WHERE #{target.class.pk_symbol}=#{target.pk}" sql << " AND #{options[:condition]}" if options and options[:condition] sql_update(sql) end end # Find a collection of objects. # # === Examples # # User.find(:condition => 'age > 15', :order => 'score ASC', :offet => 10, :limit =>10) # Comment.find(:include => :entry) def find(options) klass = options[:class] sql = resolve_options(klass, options) read_all(query(sql), klass, options) end # Find one object. def find_one(options) klass = options[:class] # gmosx, THINK: should not set this by default. # options[:limit] ||= 1 sql = resolve_options(klass, options) read_one(query(sql), klass, options) end # Perform a custom sql query and deserialize the # results. def select(sql, klass) sql = "SELECT * FROM #{klass.table} " + sql unless sql =~ /SELECT/i read_all(query(sql), klass) end alias_method :find_by_sql, :select # Specialized one result version of select. def select_one(sql, klass) sql = "SELECT * FROM #{klass.table} " + sql unless sql =~ /SELECT/i read_one(query(sql), klass) end alias_method :find_by_sql_one, :select_one # Perform an aggregation over query results. def aggregate(options) if options.is_a?(String) sql = options else aggregate = options[:aggregate] || 'COUNT(*)' sql = "SELECT #{aggregate} FROM #{options[:class].table}" if condition = options[:condition] sql << " WHERE #{condition}" sql << " AND " if options[:class].schema_inheritance_child? else sql << " WHERE " if options[:class].schema_inheritance_child? end sql << "ogtype='#{options[:class]}'" if options[:class].schema_inheritance_child? end query(sql).first_value.to_i end alias_method :count, :aggregate # Relate two objects through an intermediate join table. # Typically used in joins_many and many_to_many relations. def join(obj1, obj2, table, options = nil) first, second = join_object_ordering(obj1, obj2) first_key, second_key = ordered_join_table_keys(obj1.class, obj2.class) if options exec "INSERT INTO #{table} (#{first_key},#{second_key}, #{options.keys.join(',')}) VALUES (#{first.pk},#{second.pk}, #{options.values.map { |v| quote(v) }.join(',')})" else exec "INSERT INTO #{table} (#{first_key},#{second_key}) VALUES (#{first.pk}, #{second.pk})" end end # Unrelate two objects be removing their relation from the # join table. def unjoin(obj1, obj2, table) first, second = join_object_ordering(obj1, obj2) first_key, second_key = ordered_join_table_keys(obj1.class, obj2.class) exec "DELETE FROM #{table} WHERE #{first_key}=#{first.pk} AND #{second_key}=#{second.pk}" end def delete_all(klass) exec "DELETE FROM #{klass.table}" end # :section: Transaction methods. # Start a new transaction. def start exec('START TRANSACTION') if @transaction_nesting < 1 @transaction_nesting += 1 end # Commit a transaction. def commit @transaction_nesting -= 1 exec('COMMIT') if @transaction_nesting < 1 end # Rollback a transaction. def rollback @transaction_nesting -= 1 exec('ROLLBACK') if @transaction_nesting < 1 end # :section: Low level methods. # Encapsulates a low level update method. def sql_update(sql) exec(sql) # return affected rows. end private # Create the sql table where objects of this class are # persisted. def create_table(klass) raise 'Not implemented' end # Drop the sql table where objects of this class are # persisted. def drop_table(klass) # Remove leftover data from some join tabkes. klass.relations.each do |rel| if rel.class.to_s == "Og::JoinsMany" and rel.join_table target_class = rel.target_class exec "DELETE FROM #{rel.join_table}" end end exec "DROP TABLE #{klass.table}" end alias_method :destroy, :drop_table alias_method :drop_schema, :drop_schema # Evolve (recreate) the sql table where objects of this class # are persisted. def evolve_table(klass) drop_table(klass) create_table(klass) end alias_method :update_table, :evolve_table # Return the field for the given property. def field_for_property(property) if f = property.field return f.to_s else return property.to_s end end # Create the fields that correpsond to the klass properties. # The generated fields array is used in create_table. # If the property has an :sql annotation this overrides the # default mapping. If the property has an :extra_sql annotation # the extra sql is appended after the default mapping. def fields_for_class(klass) fields = [] properties = klass.properties.dup if klass.ancestors.include? SchemaInheritanceBase # This class as a superclass in a single table inheritance # chain. So inject a special class ogtype field that # holds the class name. fields << "ogtype VARCHAR(30)" for desc in klass.descendents properties.update(desc.properties) end end for p in properties.values klass.index(p.symbol) if p.index field = field_for_property(p) if p.sql field << " #{p.sql}" else field << " #{type_for_class(p.klass)}" field << " UNIQUE" if p.unique field << " DEFAULT #{p.default.inspect} NOT NULL" if p.default field << " #{p.extra_sql}" if p.extra_sql end fields << field end return fields end def type_for_class(klass) @typemap[klass] end # Return an sql string evaluator for the property. # No need to optimize this, used only to precalculate code. # YAML is used to store general Ruby objects to be more # portable. #-- # FIXME: add extra handling for float. #++ def write_prop(p) if p.klass.ancestors.include?(Integer) return "#\{@#{p} || 'NULL'\}" elsif p.klass.ancestors.include?(Float) return "#\{@#{p} || 'NULL'\}" elsif p.klass.ancestors.include?(String) return %|#\{@#{p} ? "'#\{#{self.class}.escape(@#{p})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Time) return %|#\{@#{p} ? "'#\{#{self.class}.timestamp(@#{p})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Date) return %|#\{@#{p} ? "'#\{#{self.class}.date(@#{p})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(TrueClass) return "#\{@#{p} ? \"'t'\" : 'NULL' \}" elsif p.klass.ancestors.include?(Og::Blob) return %|#\{@#{p} ? "'#\{#{self.class}.escape(#{self.class}.blob(@#{p}))\}'" : 'NULL'\}| else # gmosx: keep the '' for nil symbols. return %|#\{@#{p} ? "'#\{#{self.class}.escape(@#{p}.to_yaml)\}'" : "''"\}| end end # Return an evaluator for reading the property. # No need to optimize this, used only to precalculate code. def read_prop(p, col) if p.klass.ancestors.include?(Integer) return "#{self.class}.parse_int(res[#{col} + offset])" elsif p.klass.ancestors.include?(Float) return "#{self.class}.parse_float(res[#{col} + offset])" elsif p.klass.ancestors.include?(String) return "res[#{col} + offset]" elsif p.klass.ancestors.include?(Time) return "#{self.class}.parse_timestamp(res[#{col} + offset])" elsif p.klass.ancestors.include?(Date) return "#{self.class}.parse_date(res[#{col} + offset])" elsif p.klass.ancestors.include?(TrueClass) return "#{self.class}.parse_boolean(res[#{col} + offset])" elsif p.klass.ancestors.include?(Og::Blob) return "#{self.class}.parse_blob(res[#{col} + offset])" else return "YAML::load(res[#{col} + offset])" end end # :section: Lifecycle method compilers. # Compile the og_insert method for the class. def eval_og_insert(klass) pk = klass.pk_symbol props = klass.properties.values.dup values = props.collect { |p| write_prop(p) }.join(',') if klass.ann.self[:superclass] or klass.ann.self[:subclasses] props << Property.new(:symbol => :ogtype, :klass => String) values << ", '#{klass}'" end sql = "INSERT INTO #{klass.table} (#{props.collect {|p| field_for_property(p)}.join(',')}) VALUES (#{values})" klass.module_eval %{ def og_insert(store) #{Glue::Aspects.gen_advice_code(:og_insert, klass.advices, :pre) if klass.respond_to?(:advices)} store.exec "#{sql}" #{Glue::Aspects.gen_advice_code(:og_insert, klass.advices, :post) if klass.respond_to?(:advices)} end } end # Compile the og_update method for the class. def eval_og_update(klass) pk = klass.pk_symbol pk_field = klass.primary_key.field || klass.primary_key.symbol props = klass.properties.values.reject { |p| pk == p.symbol } updates = props.collect { |p| "#{field_for_property(p)}=#{write_prop(p)}" } sql = "UPDATE #{klass::OGTABLE} SET #{updates.join(', ')} WHERE #{pk_field}=#\{@#{pk}\}" klass.module_eval %{ def og_update(store, options = nil) #{Glue::Aspects.gen_advice_code(:og_update, klass.advices, :pre) if klass.respond_to?(:advices)} sql = "#{sql}" sql << " AND \#{options[:condition]}" if options and options[:condition] changed = store.sql_update(sql) #{Glue::Aspects.gen_advice_code(:og_update, klass.advices, :post) if klass.respond_to?(:advices)} return changed end } end # Compile the og_read method for the class. This method is # used to read (deserialize) the given class from the store. # In order to allow for changing field/attribute orders a # field mapping hash is used. def eval_og_read(klass) code = [] props = klass.properties.values field_map = create_field_map(klass) for p in props f = field_for_property(p).to_sym if col = field_map[f] code << "@#{p} = #{read_prop(p, col)}" end end code = code.join('; ') klass.module_eval %{ def og_read(res, row = 0, offset = 0) #{Glue::Aspects.gen_advice_code(:og_read, klass.advices, :pre) if klass.respond_to?(:advices)} #{code} #{Glue::Aspects.gen_advice_code(:og_read, klass.advices, :post) if klass.respond_to?(:advices)} end } end #-- # FIXME: is pk needed as parameter? #++ def eval_og_delete(klass) klass.module_eval %{ def og_delete(store, pk, cascade = true) #{Glue::Aspects.gen_advice_code(:og_delete, klass.advices, :pre) if klass.respond_to?(:advices)} pk ||= @#{klass.pk_symbol} transaction do |tx| tx.exec "DELETE FROM #{klass.table} WHERE #{klass.pk_symbol}=\#{pk}" if cascade and #{klass}.ann.self[:descendants] #{klass}.ann.self.descendants.each do |dclass, foreign_key| tx.exec "DELETE FROM \#{dclass::OGTABLE} WHERE \#{foreign_key}=\#{pk}" end end end #{Glue::Aspects.gen_advice_code(:og_delete, klass.advices, :post) if klass.respond_to?(:advices)} end } end # Creates the schema for this class. Can be intercepted with # aspects to add special behaviours. def eval_og_create_schema(klass) klass.module_eval %{ def og_create_schema(store) if Og.create_schema #{Glue::Aspects.gen_advice_code(:og_create_schema, klass.advices, :pre) if klass.respond_to?(:advices)} # unless self.class.superclass.ancestors.include? SchemaInheritanceBase store.send(:create_table, #{klass}) # end #{Glue::Aspects.gen_advice_code(:og_create_schema, klass.advices, :post) if klass.respond_to?(:advices)} end end } end # Precompile a class specific allocate method. If this is an # STI parent classes it reads the class from the resultset. def eval_og_allocate(klass) if klass.schema_inheritance? klass.module_eval %{ def self.og_allocate(res, row = 0) Object.constant(res[0]).allocate end } else klass.module_eval %{ def self.og_allocate(res, row = 0) self.allocate end } end end # :section: Misc methods. def handle_sql_exception(ex, sql = nil) Logger.error "DB error #{ex}, [#{sql}]" Logger.error ex.backtrace.join("\n") raise StoreException.new(ex, sql) if Og.raise_store_exceptions # FIXME: should return :error or something. return nil end # Resolve the finder options. Also takes scope into account. #-- # FIXME: cleanup/refactor. #++ def resolve_options(klass, options) # Factor in scope. if scope = klass.get_scope scope = scope.dup scond = scope.delete(:condition) scope.update(options) options = scope end if sql = options[:sql] sql = "SELECT * FROM #{klass.table} " + sql unless sql =~ /SELECT/i return sql end tables = [klass::OGTABLE] if included = options[:include] join_conditions = [] for name in [included].flatten if rel = klass.relation(name) target_table = rel[:target_class]::OGTABLE tables << target_table if rel.is_a?(JoinsMany) tables << rel[:join_table] owner_key, target_key = klass.ogmanager.store.join_table_keys(klass, rel[:target_class]) join_conditions << "#{rel.join_table}.#{owner_key}=#{klass::OGTABLE}.#{rel.owner_class.primary_key} AND \ #{rel.join_table}.#{target_key}=#{rel.target_class::OGTABLE}.#{rel.target_class.primary_key}" else join_conditions << "#{klass::OGTABLE}.#{rel.foreign_key}=#{target_table}.#{rel.target_class.primary_key}" end else raise 'Unknown relation name' end end fields = tables.collect { |t| "#{t}.*" }.join(',') update_condition options, join_conditions.join(' AND ') elsif fields = options[:select] # query the provided fields. else fields = '*' end if join_table = options[:join_table] tables << join_table update_condition options, options[:join_condition] end # Factor in scope in the conditions. update_condition(options, scond) if scond # rp: type is not set in all instances such as Class.first so this fix goes here for now. if ogtype = options[:type] || (klass.schema_inheritance_child? ? "#{klass}" : nil) update_condition options, "ogtype='#{ogtype}'" end sql = "SELECT #{fields} FROM #{tables.join(',')}" if condition = options[:condition] || options[:where] sql << " WHERE #{condition}" end if order = options[:order] sql << " ORDER BY #{order}" end resolve_limit_options(options, sql) if extra = options[:extra] sql << " #{extra}" end return sql end # Subclasses can override this if they need some other order. # This is needed because different backends require different # order of the keywords. def resolve_limit_options(options, sql) if limit = options[:limit] sql << " LIMIT #{limit}" if offset = options[:offset] sql << " OFFSET #{offset}" end end end # :section: Deserialization methods. # Read a field (column) from a result set row. def read_field end # Dynamicaly deserializes a result set row into an object. # Used for specialized queries or join queries. Please # not that this deserialization method is slower than the # precompiled og_read method. def read_row(obj, res, res_row, row) res.fields.each_with_index do |field, idx| obj.instance_variable_set "@#{field}", res_row[idx] end end # Deserialize the join relations. def read_join_relations(obj, res_row, row, join_relations) offset = obj.class.properties.size for rel in join_relations rel_obj = rel[:target_class].og_allocate(res_row, row) rel_obj.og_read(res_row, row, offset) offset += rel_obj.class.properties.size obj.instance_variable_set("@#{rel[:name]}", rel_obj) end end # Deserialize one object from the ResultSet. def read_one(res, klass, options = nil) return nil if res.blank? if options and join_relations = options[:include] join_relations = [join_relations].flatten.collect do |n| klass.relation(n) end end res_row = res.next # causes STI classes to come back as the correct child class if accessed from the superclass klass = Og::Entity::entity_from_string(res_row.result.flatten[res_row.fieldnum('ogtype')]) if klass.schema_inheritance? obj = klass.og_allocate(res_row, 0) if options and options[:select] read_row(obj, res, res_row, 0) else obj.og_read(res_row) read_join_relations(obj, res_row, 0, join_relations) if join_relations end return obj ensure res.close end # Deserialize all objects from the ResultSet. def read_all(res, klass, options = nil) return [] if res.blank? if options and join_relations = options[:include] join_relations = [join_relations].flatten.collect do |n| klass.relation(n) end end objects = [] if options and options[:select] res.each_row do |res_row, row| obj = klass.og_allocate(res_row, row) read_row(obj, res, res_row, row) objects << obj end else res.each_row do |res_row, row| obj = klass.og_allocate(res_row, row) obj.og_read(res_row, row) read_join_relations(obj, res_row, row, join_relations) if join_relations objects << obj end end return objects ensure res.close end # Helper method that updates the condition string. def update_condition(options, cond, joiner = 'AND') if options[:condition] options[:condition] += " #{joiner} #{cond}" else options[:condition] = cond end end end end # * George Moschovitis # * Michael Neumann # * Ghislain Mary # * Ysabel # * Guillaume Pierronnet # * Rob Pitt