# * George Moschovitis # (c) 2004-2005 Navel, all rights reserved. # $Id: adapter.rb 281 2005-03-10 12:24:14Z gmosx $ require 'yaml' require 'singleton' require 'og/connection' module Og # An adapter communicates with the backend datastore. # The adapters for all supported datastores extend this # class. Typically, an RDBMS is used to implement a # datastore. class Adapter include Singleton # A mapping between Ruby and backend Datastore types. attr_accessor :typemap # A map for casting Ruby types to SQL safe textual # representations. attr_accessor :typecast # Lookup the adapter instance from the adapter name. def self.for_name(name) require "og/adapters/#{name}" eval %{ return #{name.capitalize}Adapter.instance } end def initialize # The default mappings, should be valid for most # RDBMS. @typemap = { Integer => 'integer', Fixnum => 'integer', Float => 'float', String => 'text', Time => 'timestamp', Date => 'date', TrueClass => 'boolean', Object => 'text', Array => 'text', Hash => 'text' } # The :s: is a marker that will be replaced with the # actual value to be casted. The default parameter of # the Hash handles all other types (Object, Array, etc) @typecast = Hash.new("'#\{#{self.class}.escape(:s:.to_yaml)\}'").update( Integer => "\#\{:s:\}", Float => "\#\{:s:\}", String => "'#\{#{self.class}.escape(:s:)\}'", Time => "'#\{#{self.class}.timestamp(:s:)\}'", Date => "'#\{#{self.class}.date(:s:)\}'", TrueClass => "#\{:s: ? \"'t'\" : 'NULL' \}" ) end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Utilities # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Escape an SQL string def self.escape(str) return nil unless str return str.gsub( /'/, "''" ) end # Convert a ruby time to an sql timestamp. # TODO: Optimize this def self.timestamp(time = Time.now) return nil unless time return time.strftime("%Y-%m-%d %H:%M:%S") end # Output YYY-mm-dd # TODO: Optimize this def self.date(date) return nil unless date return "#{date.year}-#{date.month}-#{date.mday}" end # Parse sql datetime # TODO: Optimize this def self.parse_timestamp(str) return nil unless str return Time.parse(str) end # Input YYYY-mm-dd # TODO: Optimize this def self.parse_date(str) return nil unless str return Date.strptime(str) end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Database methods # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Create the database. def create_db(database, user = nil, password = nil) Logger.info "Creating database '#{database}'." end # Drop the database. def drop_db(database, user = nil, password = nil) Logger.info "Dropping database '#{database}'." end # Create a new connection to the backend. def new_connection(db) return Connection.new(db) end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: O->R mapping methods and utilities. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Encode the name of the klass as an sql safe string. # The Module separators are replaced with _ and NOT stripped # out so that we can convert back to the original notation if # needed. The leading module if available is removed. def self.encode(klass) "#{klass.name.gsub(/^.*::/, "")}".gsub(/::/, "_").downcase end # The name of the SQL table where objects of this class # are stored. A prefix is needed to avoid colision with # reserved prefices (for example User maps to user which # is reserved in postgresql). The prefix should start # with an alphanumeric character to be compatible with # all RDBMS (most notable Oracle). # # You may want to override this method to map an existing # database schema using Og. def self.table(klass) "og_#{Og.table_prefix}#{encode(klass)}" end # The name of the join table for the two given classes. # A prefix is needed to avoid colision with reserved # prefices (for example User maps to user which # is reserved in postgresql). The prefix should start # with an alphanumeric character to be compatible with # all RDBMS (most notable Oracle). # # You may want to override this method to map an existing # database schema using Og. def self.join_table(klass1, klass2, field) "og_#{Og.table_prefix}j_#{encode(klass1)}_#{encode(klass2)}_#{field}" end # Return an sql string evaluator for the property. # No need to optimize this, used only to precalculate code. # YAML is used to store general Ruby objects to be more # portable. #-- # FIXME: add extra handling for float. #++ def write_prop(p) if p.klass.ancestors.include?(Integer) return "#\{@#{p.symbol} || 'NULL'\}" elsif p.klass.ancestors.include?(Float) return "#\{@#{p.symbol} || 'NULL'\}" elsif p.klass.ancestors.include?(String) return %|#\{@#{p.symbol} ? "'#\{#{self.class}.escape(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Time) return %|#\{@#{p.symbol} ? "'#\{#{self.class}.timestamp(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Date) return %|#\{@#{p.symbol} ? "'#\{#{self.class}.date(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(TrueClass) return "#\{@#{p.symbol} ? \"'t'\" : 'NULL' \}" else # gmosx: keep the '' for nil symbols. return %|#\{@#{p.symbol} ? "'#\{#{self.class}.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}| end end # Return an evaluator for reading the property. # No need to optimize this, used only to precalculate code. def read_prop(p, idx) if p.klass.ancestors.include?(Integer) return "res[#{idx}].to_i" elsif p.klass.ancestors.include?(Float) return "res[#{idx}].to_f" elsif p.klass.ancestors.include?(String) return "res[#{idx}]" elsif p.klass.ancestors.include?(Time) return "#{self.class}.parse_timestamp(res[#{idx}])" elsif p.klass.ancestors.include?(Date) return "#{self.class}.parse_date(res[#{idx}])" elsif p.klass.ancestors.include?(TrueClass) return "('0' != res[#{idx}])" else return "YAML::load(res[#{idx}])" end end # Create the fields that correpsond to the klass properties. # The generated fields array is used in create_table. # If the property has an :sql metadata this overrides the # default mapping. If the property has an :extra_sql metadata # the extra sql is appended after the default mapping. def create_fields(klass) fields = [] klass.__props.each do |p| klass.sql_index(p.symbol) if p.meta[:sql_index] field = "#{p.symbol}" if p.meta and p.meta[:sql] field << " #{p.meta[:sql]}" else field << " #{@typemap[p.klass]}" if p.meta # set default value (gmosx: not that useful in the # current implementation). if default = p.meta[:default] field << " DEFAULT #{default.inspect} NOT NULL" end # set unique field << " UNIQUE" if p.meta[:unique] # attach extra sql if extra_sql = p.meta[:extra_sql] field << " #{extra_sql}" end end end fields << field end return fields end # Create the managed object table. The properties of the # object are mapped to the table columns. Additional sql relations # and constrains are created (indicices, sequences, etc). def create_table(klass) raise 'Not implemented!' end # Returns the props that will be included in the insert query. # For some backends the oid should be stripped. def props_for_insert(klass) klass.__props end # Returns the code that actually inserts the object into the # database. Returns the code as String. def insert_code(klass, sql, pre_cb, post_cb) raise 'Not implemented!' end # Generate the mapping of the database fields to the # object properties. def calc_field_index(klass, og) # Implement if needed. end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Precompile lifecycle methods. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Precompile some code that gets executed all the time. # Deletion code is not precompiled, because it is not used # as frequently. def eval_lifecycle_methods(klass, db) eval_og_insert(klass, db) eval_og_update(klass, db) eval_og_read(klass, db) end # Generate the property for oid. def eval_og_oid(klass) klass.class_eval %{ prop_accessor :oid, Fixnum, :sql => "integer PRIMARY KEY" } end # Precompile the insert code for the given class. # The generated code sets the oid when inserting! def eval_og_insert(klass, db) # Attach object callbacks. if klass.instance_methods.include?('og_pre_insert') pre_cb = 'og_pre_insert(conn);' else pre_cb = '' end if klass.instance_methods.include?('og_post_insert') post_cb = 'og_post_insert(conn);' else post_cb = '' end if klass.instance_methods.include?('og_pre_insert_update') pre_cb << 'og_pre_insert_update(conn);' end if klass.instance_methods.include?('og_post_insert_update') post_cb << 'og_post_insert_update(conn);' end # Attach observers. if observers = klass.__meta[:og_observers] observers.each_with_index do |o, idx| if o.is_a?(Class) obs = "#{o}.instance" o = o.instance else obs = "self.class.__meta[:og_observers][#{idx}]" end if o.respond_to?(:og_pre_insert) pre_cb << "#{obs}.og_pre_insert(conn, self);" end if o.respond_to?(:og_post_insert) post_cb << "#{obs}.og_post_insert(conn, self);" end if o.respond_to?(:og_pre_insert_update) pre_cb << "#{obs}.og_pre_insert_update(conn, self);" end if o.respond_to?(:og_post_insert_update) post_cb << "#{obs}.og_post_insert_update(conn, self);" end end end klass.class_eval %{ def og_insert(conn) #{insert_code(klass, db, pre_cb, post_cb)} end } end # Precompile the update code for the given class. # Ignore the oid when updating! def eval_og_update(klass, db) props = klass.__props.reject { |p| :oid == p.symbol } updates = props.collect { |p| "#{p.name}=#{write_prop(p)}" } sql = "UPDATE #{klass::DBTABLE} SET #{updates.join(', ')} WHERE oid=#\{@oid\}" # Attach object callbacks. if klass.instance_methods.include?('og_pre_update') pre_cb = 'og_pre_update(conn);' else pre_cb = '' end if klass.instance_methods.include?('og_post_update') post_cb = 'og_post_update(conn);' else post_cb = '' end if klass.instance_methods.include?('og_pre_insert_update') pre_cb << 'og_pre_insert_update(conn);' end if klass.instance_methods.include?('og_post_insert_update') post_cb << 'og_post_insert_update(conn);' end # Attach observers. if observers = klass.__meta[:og_observers] observers.each_with_index do |o, idx| if o.is_a?(Class) obs = "#{o}.instance" o = o.instance else obs = "self.class.__meta[:og_observers][#{idx}]" end if o.respond_to?(:og_pre_update) pre_cb << "#{obs}.og_pre_update(conn, self);" end if o.respond_to?(:og_post_update) post_cb << "#{obs}.og_post_update(conn, self);" end if o.respond_to?(:og_pre_insert_update) pre_cb << "#{obs}.og_pre_insert_update(conn, self);" end if o.respond_to?(:og_post_insert_update) post_cb << "#{obs}.og_post_insert_update(conn, self);" end end end klass.class_eval %{ def og_update(conn) #{pre_cb} conn.exec "#{sql}" #{post_cb} end } end # Precompile the code to read (deserialize) objects of the # given class from the backend. In order to allow for changing # field/attribute orders we have to use a field mapping hash. def eval_og_read(klass, db) calc_field_index(klass, db) props = klass.__props code = [] props.each do |p| if idx = db.managed_classes[klass].field_index[p.name] # more fault tolerant if a new field is added and it # doesnt exist in the database. code << "@#{p.name} = #{read_prop(p, idx)}" end end # Attach object callbacks. if klass.instance_methods.include?('og_pre_read') pre_cb = 'og_pre_read(conn);' else pre_cb = '' end if klass.instance_methods.include?('og_post_read') post_cb = 'og_post_read(conn);' else post_cb = '' end # Attach observers. if observers = klass.__meta[:og_observers] observers.each_with_index do |o, idx| if o.is_a?(Class) obs = "#{o}.instance" o = o.instance else obs = "self.class.__meta[:og_observers][#{idx}]" end if o.respond_to?(:og_pre_read) pre_cb << "#{obs}.og_pre_read(conn, self);" end if o.respond_to?(:og_post_read) post_cb << "#{obs}.og_post_read(conn, self);" end end end klass.class_eval %{ def og_read(res, tuple = nil) #{pre_cb} #{code.join('; ')} #{post_cb} end } end end end