# * George Moschovitis # (c) 2004-2005 Navel, all rights reserved. # $Id: base.rb 17 2005-04-14 16:03:40Z gmosx $ require 'yaml' require 'singleton' require 'glue/property' require 'glue/array' require 'glue/time' require 'glue/attribute' require 'og' require 'og/errors' module Og # An adapter communicates with the backend datastore. # The adapters for all supported datastores extend this # class. Typically, an RDBMS is used to implement a # datastore. # # This is the base adapter implementation. Adapters for # well known RDBMS systems and other stores inherit # from this class. class Adapter include Singleton # A mapping between Ruby and backend Datastore types. attr_accessor :typemap # A map for casting Ruby types to SQL safe textual # representations. attr_accessor :typecast # Lookup the adapter instance from the adapter name. def self.for_name(name) # gmosx: RDoc complains about this, so lets use an # eval, AAAAAAAARGH! # require "og/adapters/#{name}" eval %{ require 'og/adapters/#{name}' return #{name.capitalize}Adapter.instance } end def initialize # The default mappings, should be valid for most # RDBMS. @typemap = { Integer => 'integer', Fixnum => 'integer', Float => 'float', String => 'text', Time => 'timestamp', Date => 'date', TrueClass => 'boolean', Object => 'text', Array => 'text', Hash => 'text' } # The :s: is a marker that will be replaced with the # actual value to be casted. The default parameter of # the Hash handles all other types (Object, Array, etc) @typecast = Hash.new("'#\{#{self.class}.escape(:s:.to_yaml)\}'").update( Integer => "\#\{:s:\}", Float => "\#\{:s:\}", String => "'#\{#{self.class}.escape(:s:)\}'", Time => "'#\{#{self.class}.timestamp(:s:)\}'", Date => "'#\{#{self.class}.date(:s:)\}'", TrueClass => "#\{:s: ? \"'t'\" : 'NULL' \}" ) end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Utilities # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Escape an SQL string def self.escape(str) return nil unless str return str.gsub( /'/, "''" ) end # Convert a ruby time to an sql timestamp. # TODO: Optimize this def self.timestamp(time = Time.now) return nil unless time return time.strftime("%Y-%m-%d %H:%M:%S") end # Output YYY-mm-dd # TODO: Optimize this def self.date(date) return nil unless date return "#{date.year}-#{date.month}-#{date.mday}" end # Parse sql datetime # TODO: Optimize this def self.parse_timestamp(str) return nil unless str return Time.parse(str) end # Input YYYY-mm-dd # TODO: Optimize this def self.parse_date(str) return nil unless str return Date.strptime(str) end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Database methods # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Create the database. def create_db(database, user = nil, password = nil) Logger.info "Creating database '#{database}'." Og.create_schema = true end # Drop the database. def drop_db(database, user = nil, password = nil) Logger.info "Dropping database '#{database}'." end # Create a new connection to the backend. def new_connection(db) return Connection.new(db) end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: O->R mapping methods and utilities. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Encode the name of the klass as an sql safe string. # The Module separators are replaced with _ and NOT stripped # out so that we can convert back to the original notation if # needed. The leading module if available is removed. def self.encode(klass) "#{klass.name.gsub(/^.*::/, "")}".gsub(/::/, "_").downcase end # The name of the SQL table where objects of this class # are stored. A prefix is needed to avoid colision with # reserved prefices (for example User maps to user which # is reserved in postgresql). The prefix should start # with an alphanumeric character to be compatible with # all RDBMS (most notable Oracle). # # You may want to override this method to map an existing # database schema using Og. def self.table(klass) "og_#{Og.table_prefix}#{encode(klass)}" end # The name of the join table for the two given classes. # A prefix is needed to avoid colision with reserved # prefices (for example User maps to user which # is reserved in postgresql). The prefix should start # with an alphanumeric character to be compatible with # all RDBMS (most notable Oracle). # # You may want to override this method to map an existing # database schema using Og. def self.join_table(klass1, klass2, field) "og_#{Og.table_prefix}j_#{encode(klass1)}_#{encode(klass2)}_#{field}" end # Return an sql string evaluator for the property. # No need to optimize this, used only to precalculate code. # YAML is used to store general Ruby objects to be more # portable. #-- # FIXME: add extra handling for float. #++ def write_prop(p) if p.klass.ancestors.include?(Integer) return "#\{@#{p.symbol} || 'NULL'\}" elsif p.klass.ancestors.include?(Float) return "#\{@#{p.symbol} || 'NULL'\}" elsif p.klass.ancestors.include?(String) return %|#\{@#{p.symbol} ? "'#\{#{self.class}.escape(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Time) return %|#\{@#{p.symbol} ? "'#\{#{self.class}.timestamp(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Date) return %|#\{@#{p.symbol} ? "'#\{#{self.class}.date(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(TrueClass) return "#\{@#{p.symbol} ? \"'t'\" : 'NULL' \}" else # gmosx: keep the '' for nil symbols. return %|#\{@#{p.symbol} ? "'#\{#{self.class}.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}| end end # Return an evaluator for reading the property. # No need to optimize this, used only to precalculate code. def read_prop(p, idx) if p.klass.ancestors.include?(Integer) return "res[#{idx}].to_i" elsif p.klass.ancestors.include?(Float) return "res[#{idx}].to_f" elsif p.klass.ancestors.include?(String) return "res[#{idx}]" elsif p.klass.ancestors.include?(Time) return "#{self.class}.parse_timestamp(res[#{idx}])" elsif p.klass.ancestors.include?(Date) return "#{self.class}.parse_date(res[#{idx}])" elsif p.klass.ancestors.include?(TrueClass) return "('0' != res[#{idx}])" else return "YAML::load(res[#{idx}])" end end # Create the fields that correpsond to the klass properties. # The generated fields array is used in create_table. # If the property has an :sql metadata this overrides the # default mapping. If the property has an :extra_sql metadata # the extra sql is appended after the default mapping. def create_fields(klass) fields = [] klass.__props.each do |p| klass.sql_index(p.symbol) if p.meta[:sql_index] field = "#{p.symbol}" if p.meta and p.meta[:sql] field << " #{p.meta[:sql]}" else field << " #{@typemap[p.klass]}" if p.meta # set default value (gmosx: not that useful in the # current implementation). if default = p.meta[:default] field << " DEFAULT #{default.inspect} NOT NULL" end # set unique field << " UNIQUE" if p.meta[:unique] # attach extra sql if extra_sql = p.meta[:extra_sql] field << " #{extra_sql}" end end end fields << field end return fields end # Create the managed object table. The properties of the # object are mapped to the table columns. Additional sql relations # and constrains are created (indicices, sequences, etc). def create_table(klass) raise 'Not implemented!' end # Returns the props that will be included in the insert query. # For some backends the oid should be stripped. def props_for_insert(klass) klass.__props end # Returns the code that actually inserts the object into the # database. Returns the code as String. def insert_code(klass, sql, pre_cb, post_cb) raise 'Not implemented!' end # Generate the mapping of the database fields to the # object properties. def calc_field_index(klass, og) # Implement if needed. end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Precompile lifecycle methods. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Precompile some code that gets executed all the time. # Deletion code is not precompiled, because it is not used # as frequently. def eval_lifecycle_methods(klass, db) eval_og_insert(klass, db) eval_og_update(klass, db) eval_og_read(klass, db) end # Generate the property for oid. def eval_og_oid(klass) klass.class_eval %{ prop_accessor :oid, Fixnum, :sql => "integer PRIMARY KEY" } end # Precompile the insert code for the given class. # The generated code sets the oid when inserting! def eval_og_insert(klass, db) klass.class_eval %{ def og_insert(conn) #{Aspects.gen_advice_code(:og_insert, klass.advices, :pre) if klass.respond_to?(:advices)} #{insert_code(klass, db)} #{Aspects.gen_advice_code(:og_insert, klass.advices, :post) if klass.respond_to?(:advices)} end } end # Precompile the update code for the given class. # Ignore the oid when updating! def eval_og_update(klass, db) props = klass.__props.reject { |p| :oid == p.symbol } updates = props.collect { |p| "#{p.name}=#{write_prop(p)}" } sql = "UPDATE #{klass::DBTABLE} SET #{updates.join(', ')} WHERE oid=#\{@oid\}" klass.class_eval %{ def og_update(conn) #{Aspects.gen_advice_code(:og_update, klass.advices, :pre) if klass.respond_to?(:advices)} conn.exec "#{sql}" #{Aspects.gen_advice_code(:og_update, klass.advices, :post) if klass.respond_to?(:advices)} end } end # Precompile the code to read (deserialize) objects of the # given class from the backend. In order to allow for changing # field/attribute orders we have to use a field mapping hash. def eval_og_read(klass, db) calc_field_index(klass, db) props = klass.__props code = [] props.each do |p| if idx = db.managed_classes[klass].field_index[p.name] # more fault tolerant if a new field is added and it # doesnt exist in the database. code << "@#{p.name} = #{read_prop(p, idx)}" end end klass.class_eval %{ def og_read(res, tuple = 0) #{Aspects.gen_advice_code(:og_read, klass.advices, :pre) if klass.respond_to?(:advices)} #{code.join('; ')} #{Aspects.gen_advice_code(:og_read, klass.advices, :post) if klass.respond_to?(:advices)} end } end end # A Connection to the Database. This file defines the skeleton # functionality. A store specific implementation file (adapter) # implements all methods. #-- # - support caching (memoize). # - use prepared statements. #++ class Connection # The Og database object. attr_reader :db # The actual connection to the backend store. attr_accessor :store # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Backend connection methods. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Initialize a connection to the database. def initialize(db) @db = db Logger.debug "Created DB connection." if $DBG end # Close the connection to the database. def close Logger.debug "Closed DB connection." if $DBG end # Create the managed object table. The properties of the # object are mapped to the table columns. Additional sql relations # and constrains are created (indicices, sequences, etc). def create_table(klass) raise 'Not implemented!' end # Drop the managed object table. def drop_table(klass) exec "DROP TABLE #{klass::DBTABLE}" end # Prepare an sql statement. def prepare(sql) raise 'Not implemented!' end # Execute an SQL query and return the result. def query(sql) raise 'Not implemented!' end # Execute an SQL query, no result returned. def exec(sql) raise 'Not implemented!' end alias_method :execute, :exec # Start a new transaction. def start exec 'START TRANSACTION' end # Commit a transaction. def commit exec 'COMMIT' end # Rollback a transaction. def rollback exec 'ROLLBACK' end # Transaction helper. In the transaction block use # the db pointer to the backend. def transaction(&block) begin start yield(self) commit rescue => ex Logger.error "DB Error: ERROR IN TRANSACTION" Logger.error "#{ex}" Logger.error "#{ex.backtrace}" rollback end end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Deserialization methods. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Is the given resultset valid? def valid_res?(res) return !(res.nil?) end # Read (deserialize) one row of the resultset. def read_one(res, klass) raise 'Not implemented!' end # Read (deserialize) all rows of the resultset. def read_all(res, klass) raise 'Not implemented!' end # Read the first column of the resultset as an Integer. def read_int(res, idx = 0) raise 'Not implemented!' end # Get a row from the resultset. def get_row(res) return res end # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # :section: Managed object methods. # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # Save an object to the database. Insert if this is a new object or # update if this is already stored in the database. def save(obj) if obj.oid # object allready inserted, update! obj.og_update(self) else # not in the database, insert! obj.og_insert(self) end end alias_method :<<, :save alias_method :put, :save # Force insertion of managed object. def insert(obj) obj.og_insert(self) end # Force update of managed object. def update(obj) obj.og_update(self) end # Update only specific fields of the managed object. # # Input: # sql = the sql code to updated the properties. # # WARNING: the object in memory is not updated. #-- # TODO: should update the object in memory. #++ def update_properties(update_sql, obj_or_oid, klass = nil) oid = obj_or_oid.to_i klass = obj_or_oid.class unless klass exec "UPDATE #{klass::DBTABLE} SET #{update_sql} WHERE oid=#{oid}" end alias_method :pupdate, :update_properties alias_method :update_propery, :update_properties # Load an object from the database. # # Input: # oid = the object oid, OR the object name. def load(oid, klass) if oid.to_i > 0 # a valid Fixnum ? load_by_oid(oid, klass) else load_by_name(oid, klass) end end alias_method :get, :load # Load an object by oid. def load_by_oid(oid, klass) res = query "SELECT * FROM #{klass::DBTABLE} WHERE oid=#{oid}" read_one(res, klass) end alias_method :get_by_oid, :load_by_oid # Load an object by name. def load_by_name(name, klass) res = query "SELECT * FROM #{klass::DBTABLE} WHERE name='#{name}'" read_one(res, klass) end alias_method :get_by_name, :load_by_name # Load all objects of the given klass. # Used to be called 'collect' in an earlier version. def load_all(klass, extrasql = nil) res = query "SELECT * FROM #{klass::DBTABLE} #{extrasql}" read_all(res, klass) end alias_method :get_all, :load_all # Perform a standard SQL query to the database. Deserializes the # results. def select(sql, klass) unless sql =~ /SELECT/i sql = "SELECT * FROM #{klass::DBTABLE} WHERE #{sql}" end res = query(sql) read_all(res, klass) end # Optimized for one result. def select_one(sql, klass) unless sql =~ /SELECT/i sql = "SELECT * FROM #{klass::DBTABLE} WHERE #{sql}" end res = query(sql) read_one(res, klass) end # Perform a count query. def count(sql, klass = nil) unless sql =~ /SELECT/i sql = "SELECT COUNT(*) FROM #{klass::DBTABLE} WHERE #{sql}" end res = query(sql) return read_int(res) end # Delete an object from the database. Allways perform a deep delete. # # No need to optimize here with pregenerated code. Deletes are # not used as much as reads or writes. # # Input: # # obj_or_oid = Object or oid to delete. # klass = Class of object (can be nil if an object is passed) # #-- # TODO: pre evaluate for symmetry to the other methods #++ def delete(obj_or_oid, klass = nil, cascade = true) oid = obj_or_oid.to_i klass = obj_or_oid.class unless klass # this is a class callback! if klass.respond_to?(:og_pre_delete) klass.og_pre_delete(self, obj_or_oid) end # TODO: implement this as stored procedure? naaah. # TODO: also handle many_to_many relations. transaction do |tx| tx.exec "DELETE FROM #{klass::DBTABLE} WHERE oid=#{oid}" if cascade and klass.__meta.include?(:descendants) klass.__meta[:descendants].each do |dclass, linkback| tx.exec "DELETE FROM #{dclass::DBTABLE} WHERE #{linkback}=#{oid}" end end end end alias_method :delete!, :delete protected # Handles an adapter exception. def handle_db_exception(ex, sql = nil) Logger.error "DB error #{ex}, [#{sql}]" Logger.error ex.backtrace.join("\n") raise SqlException.new(ex, sql) if Og.raise_db_exceptions # FIXME: should return :error or something. return nil end end end