begin require 'postgres' rescue Object => ex Logger.error 'Ruby-PostgreSQL bindings are not installed!' Logger.error ex end #-- # Customize to make more compatible with Og. #++ class PGconn # :nodoc: all # Lists all the tables within the database. def list_tables begin r = self.exec "SELECT c.relname FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind='r' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid)" rescue Exception # Racing return [] end ret = r.result.flatten r.clear ret end # Returns true if a table exists within the database, false # otherwise. def table_exists?(table) #rp: this should be abstracted to the sql abstractor begin r = self.exec "SELECT c.relname FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind='r' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid) AND c.relname='#{self.class.escape(table.to_s)}'" rescue Exception return false # Racing... end ret = r.result.size != 0 r.clear ret end # Returns the PostgreSQL OID of a table within the database or # nil if it doesn't exist. Mostly for internal usage. def table_oid(table) begin r = self.exec "SELECT c.oid FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind='r' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid) AND c.relname='#{self.class.escape(table.to_s)}'" rescue Exception return nil # Racing... end ret = r.result.flatten.first r.clear ret end # Returns an array of arrays containing the list of fields within a # table. Each element contains two elements, the first is the field # name and the second is the field type. Returns nil if the table # does not exist. def table_field_list(table) return nil unless pg_oid = table_oid(table) r = self.exec "SELECT a.attname, pg_catalog.format_type(a.atttypid, a.atttypmod) FROM pg_catalog.pg_attribute a WHERE a.attrelid = '#{pg_oid}' AND a.attnum > 0 AND NOT a.attisdropped ORDER BY a.attnum" ret = r.result r.clear ret end # Returns a hash containing the foreign key constrains within a table. # The keys are constraint names and the values are the constraint # definitions. def table_foreign_keys(table) return nil unless pg_oid = table_oid(table) r = self.exec "SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_catalog.pg_constraint r WHERE r.conrelid = '#{pg_oid}' AND r.contype = 'f'" res = r.result ret = Hash.new res.each do |double| ret[double.first] = double.last end r.clear ret end # Returns a hash keyed by table (as a string) with each value also # being a hash keyed by the constraint name (as a string) and the # value being a string that contains the constraint definition. def all_foreign_keys loop_counter = 0 loop_max = 5 begin r = self.exec "SELECT c.relname,r.conname, pg_catalog.pg_get_constraintdef(r.oid, true) as condef FROM pg_catalog.pg_class c LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace JOIN pg_catalog.pg_constraint r ON r.conrelid = c.oid WHERE c.relkind='r' AND r.contype ='f' AND n.nspname NOT IN ('pg_catalog', 'pg_toast') AND pg_catalog.pg_table_is_visible(c.oid);" rescue RuntimeError => ex raise unless ex.message =~ /cache lookup failed for relation (\d+)/ # Racing # damaged_relation = $1 # Logger.error "Got damage to #{damaged_relation}" loop_counter += 1 if loop_counter > loop_max Logger.error "PostgreSQL had more than #{loop_max} cache errors, your database is almost certainly corrupt as pg_class does not match the PostgreSQL cache. Either use pg_dump to save the data, re-create the database, let og rebuild the schema and use pg_restore to restore the data, or repair it by hand" exit end Logger.error "There is a problem with PostgreSQL's internal cache, retrying... (#{loop_counter} of #{loop_max})" # you have a horrible setup anyhow, and it allows your # horrible setup to work (deleting tables so fast # in parallel PostgreSQL's internal lookups fail) sleep 2 retry end res = r.result ret = Hash.new res.each do |tripple| ret[tripple.first] ||= Hash.new ret[tripple[0]][tripple[1]] = tripple[2] end r.clear ret end end require 'og/store/sql' #-- # Customize the standard postgres resultset to make # more compatible with Og. #++ class PGresult # :nodoc: all def blank? 0 == num_tuples end def next self end def each_row for row in (0...num_tuples) yield(self, row) end end def first_value val = getvalue(0, 0) clear return val end alias_method :close, :clear end module Og module PsqlUtils include SqlUtils def escape(str) return nil unless str return PGconn.escape(str.to_s) end # TODO, mneumann: # # Blobs are actually a lot faster (and uses up less storage) for large data I # think, as they need not to be encoded and decoded. I'd like to have both ;-) # BYTEA is easier to handle than BLOBs, but if you implement BLOBs in a way # that they are transparent to the user (as I did in Ruby/DBI), I'd prefer that # way. def blob(val) val.gsub(/[\000-\037\047\134\177-\377]/) do |b| "\\#{ b[0].to_s(8).rjust(3, '0') }" end end def parse_blob(val) return '' unless val val.gsub(/\\(\\|'|[0-3][0-7][0-7])/) do |s| if s.size == 2 then s[1,1] else s[1,3].oct.chr end end end end # A Store that persists objects into a PostgreSQL database. # To read documentation about the methods, consult the documentation # for SqlStore and Store. # # This is the reference Og store. # # === Design # # The getvalue interface is used instead of each for extra # performance. class PsqlStore < SqlStore extend PsqlUtils include PsqlUtils def self.create(options) # gmosx: system is used to avoid shell expansion. system 'createdb', options[:name], '-U', options[:user] super end def self.destroy(options) system 'dropdb', options[:name], '-U', options[:user] super end # Purges all tables from the database. def self.destroy_tables(options) conn = PGconn.connect( options[:address], options[:port], nil, nil, options[:name], options[:user].to_s, options[:password].to_s ) conn.list_tables.each do |table| begin conn.exec "DROP TABLE #{table} CASCADE" Logger.debug "Dropped database table #{table}" if $DBG rescue RuntimeError => ex catch :ok do # Racing throw :ok if ex.message =~ /tuple concurrently updated/ throw :ok if ex.message =~ /does not exist/ throw :ok if ex.message =~ /cache lookup failed/ raise end end end conn.close end def initialize(options) super @typemap.update(Og::Blob => 'bytea') @conn = PGconn.connect( options[:address], options[:port], nil, nil, options[:name], options[:user].to_s, options[:password].to_s ) schema_order = options[:schema_order] encoding = options[:encoding] min_messages = options[:min_messages] @conn.exec("SET search_path TO #{schema_order}") if schema_order @conn.exec("SET client_encoding TO '#{encoding}'") if encoding @conn.exec("SET client_min_messages TO '#{min_messages}'") if min_messages rescue => ex # gmosx: any idea how to better test this? if ex.to_s =~ /database .* does not exist/i Logger.info "Database '#{options[:name]}' not found!" self.class.create(options) retry end raise end def close @conn.close super end def enchant(klass, manager) if klass.schema_inheritance_child? klass.const_set 'OGSEQ', "#{table(klass.schema_inheritance_root_class)}_oid_seq" else klass.const_set 'OGSEQ', "#{table(klass)}_oid_seq" end if klass.ann.self.primary_key.symbol == :oid unless klass.properties.include? :oid klass.property :oid, Fixnum, :sql => 'serial PRIMARY KEY' end end super end def query(sql) Logger.debug sql if $DBG return @conn.exec(sql) rescue => ex handle_sql_exception(ex, sql) end def exec(sql) Logger.debug sql if $DBG @conn.exec(sql).clear rescue => ex handle_sql_exception(ex, sql) end def sql_update(sql) Logger.debug sql if $DBG res = @conn.exec(sql) changed = res.cmdtuples res.clear changed end # Start a new transaction. def start # neumann: works with earlier PSQL databases too. exec('BEGIN TRANSACTION') if @transaction_nesting < 1 @transaction_nesting += 1 end # Returns the Og::Manager that owns this store. def manager manager = nil ok = false ObjectSpace.each_object(Og::Manager) do |manager| if manager.store.__id__ == self.__id__ ok = true break end end raise RuntimeError, "#{self.class} could not find it's manager" unless ok manager end # Returns an array containing the constraints needed for this relation. # The array contains hashes with the format: # # :table => The name of the table to which the constraint should be # applied. # :referenced_table => The name of the table which the foreign key # refers to. # :fk => The name of the field to turn into a foreign key. # :pk => The primary key of the referenced table. # :update => The action that should be taken if the primary key # of a referenced row is changed. # :delete => The action that should be taken if a referenced # row is deleted. # :name => The name of the constraint to apply. def constraint_info(rel) if rel.join_table info = join_table_info(rel) constraints = [ { :fk => info[:first_key], :referenced_table => info[:first_table], :table => rel.join_table, :pk => ( rel.owner_class.primary_key.field || rel.owner_class.primary_key.symbol ), :update => 'CASCADE', :delete => 'CASCADE'}, { :fk => info[:second_key], :referenced_table => info[:second_table], :table => rel.join_table, :pk => ( rel.target_class.primary_key.field || rel.target_class.primary_key.symbol ), :update => 'CASCADE', :delete => 'CASCADE' } ] elsif rel.class == Og::HasMany constraints = [ { :fk => rel.foreign_key, :table => rel.target_class::OGTABLE, :referenced_table => rel.owner_class::OGTABLE, :pk => ( rel.owner_class.primary_key.field || rel.owner_class.primary_key.symbol ), :update => 'SET NULL', :delete => 'SET NULL' } ] else constraints = [ { :fk => rel.foreign_key, :table => rel.owner_class::OGTABLE, :referenced_table => rel.target_class::OGTABLE, :pk => ( rel.target_class.primary_key.field || rel.target_class.primary_key.symbol ), :update => 'SET NULL', :delete => 'SET NULL' } ] end constraints.each do |constraint| constraint[:name] = constraint_name(constraint) end # This checks for not-yet-enchanted entities, is there a better way? constraints.reject{|info| [info[:table], info[:referenced_table]].include?(:OGTABLE) } end # Returns a hash keyed by table (as a string) with each value also # being a hash keyed by the constraint name (as a string) and the # value being a string that contains the constraint definition. # # This format matches the actual constrains returned by the # all_foreign_keys method added to the PGConn class. def all_needed_constraints relations = manager.managed_classes.map{|klass| klass.relations}.flatten.uniq need_constraints = Hash.new relations.each do |relation| infos = constraint_info(relation) infos.each do |info| # Skip constraints we already know we need next if need_constraints[info[:table]] and need_constraints[info[:table]].has_key? info[:name] need_constraints[info[:table]] ||= Hash.new need_constraints[info[:table]][info[:name]] = constraint_definition(info) end end need_constraints end # Returns an SQL fragment containing the correct definition for a foreign key constraint. def constraint_definition(info) "FOREIGN KEY (#{info[:fk]}) REFERENCES #{info[:referenced_table]}(#{info[:pk]}) ON UPDATE #{info[:update]} ON DELETE #{info[:delete]}" end # Works the same as all_needed_constraints but only acts on one class and # returns the same hash as part of yet another hash with two keys, tables # and constraints. This is done to prevent having to resolve the # relations again later just to map tables. def needed_constraints(klass) need_constraints = Hash.new tables = Array.new (klass.relations + klass.resolve_remote_relations).each do |rel| constraint_info(rel).each do |info| tables.concat [info[:table], info[:referenced_table]] need_constraints[info[:table]] ||= Hash.new need_constraints[info[:table]][info[:name]] = constraint_definition(info) end end { :tables => tables.uniq, :constraints => need_constraints } end # Returns the appropriate constraint prefix for a foreign key constraint. def constraint_prefix "#{Og.table_prefix}c" end # Returns the appropriate name for a constraint element generated by # the constraint_info method. def constraint_name(hash) "#{constraint_prefix}_#{hash[:table]}_#{hash[:fk]}" end def needed_constraints_sql(klass = nil) if klass constraints = needed_constraints(klass) all_needed = constraints[:constraints] all_existing = Hash.new constraints[:tables].each do |table| all_existing[table] = @conn.table_foreign_keys(table) end else all_existing = @conn.all_foreign_keys all_needed = all_needed_constraints end drop_constraints = Array.new create_constraints = Array.new all_needed.each_pair do |table,constraints| constraints.each_pair do |name,definition| # If neither of these are matched, the constraint already exists # and has the correct definition. if all_existing[table].nil? or all_existing[table][name].nil? # Does not exist in database create_constraints << "ALTER TABLE #{table} ADD CONSTRAINT #{name} #{definition}" elsif all_existing[table][name] != definition # Exists in database and matches the object structure but has the # wrong definition (unlikely to happen very often). Logger.debug "PostgreSQL database contains a constraint on table '#{table}' named '#{name}' which is incorrectly defined and will be redefined (OLD: '#{all_existing[table][name]}', NEW: '#{definition}')" if $DBG drop_constraints << "ALTER TABLE #{table} DROP CONSTRAINT #{name}" create_constraints << "ALTER TABLE #{table} ADD CONSTRAINT #{name} #{definition}" end end end # You can't do this when managing classes seperately without spidering # each other class managed by this stores manager as other classes # can want relations within the same tables too. I will add spidering # support at some point but this isn't very important since these # complicated and convoluted routines will now rarely happen thank # to the setup hooking. unless klass all_existing.each_pair do |table,constraints| constraints.each_key do |name| if all_needed[table].nil? or all_needed[table][name].nil? # Exists in database but doesn't match object model at all raise Exception if table.to_s.downcase == "table" Logger.debug "PostgreSQL database contains a constraint on table '#{table}' named '#{name}' which does not match the object model and will be deleted" if $DBG drop_constraints << "ALTER TABLE #{table} DROP CONSTRAINT #{name}" end end end end { :drop => drop_constraints, :create => create_constraints } end # Takes a hash with constraints to drop and create and performs # the work. def create_constraints(param = nil) subsection_only = !!param sql_hash = param ? param : needed_constraints_sql Logger.debug "PostgreSQL processing foreign key constraints" unless subsection_only if $DBG started = Time.now deleted = 0 nulled_relations = 0 deleted_relations = 0 created = 0 sql_hash[:drop].each do |sql| begin @conn.exec(sql) rescue RuntimeError => ex raise unless ex.message =~ /does not exist/ end deleted += 1 end nullified_relations = 0 sql_hash[:create].each do |sql| con_retry = true begin @conn.exec(sql) created += 1 rescue PGError,RuntimeError => ex next if ex.message =~ /already exists/ # Racing unless ex.message =~ /.*violates foreign key constraint.*/ Logger.error "PostgreSQL connection returned an error for query #{sql}" raise end if @options[:evolve_schema] == true and @options[:evolve_schema_cautious] == false table, name, fk, referenced_table, pk = sql.match(/^ALTER TABLE (\S+) ADD CONSTRAINT (\S+) FOREIGN KEY \((\S+)\) REFERENCES ([^ (]+)[ (]+([^)]+)/).captures raise if [table,fk,pk,referenced_table].include? nil cleaner_sql = "UPDATE #{table} SET #{fk} = NULL WHERE #{fk} NOT IN (SELECT #{pk} FROM #{referenced_table})" begin @conn.exec(cleaner_sql) if cleaner_sql[0..5] == "UPDATE" nulled_relations += 1 else deleted_relations += 1 end rescue PGError,RuntimeError => ex if ex.message =~ /.*violates not-null constraint.*/ cleaner_sql = "DELETE FROM #{table} WHERE #{fk} NOT IN (SELECT #{pk} FROM #{referenced_table})" retry end Logger.error "PostgreSQL connection returned an error for query '#{cleaner_sql}' which was attempting to tidy up ready for the query '#{sql}'" raise end Logger.error "There were relationships in table #{table} that did not exist so they have been set to NULL (or deleted if this was not possible, i.e. for a join table)." if con_retry con_retry = false retry end else Logger.error "There are relationships in table #{table} that do not exist. Your database is corrupt. Please fix these or enable evolve_schema not in cautious mode and they will be fixed automatically." end end end finished = Time.now taken = Kernel.sprintf("%.2f", finished - started) broken_relations = nulled_relations + deleted_relations text = "PostgreSQL finished setting constraints. " need_comma = false if [0,0,0] == [deleted,created,broken_relations] return if subsection_only # Make less chatty for short calls text << "No action was taken, " else text << "#{created} constraints were added, " if created != 0 text << "#{deleted} constraints were deleted, " if deleted != 0 if broken_relations != 0 text.gsub!(/,([^,]+)$/,' and \1') text << "#{broken_relations} relations were broken causing " if nullified_relations != 0 text << "#{nullified_relations} relations to have non-existant foreign keys set to null" text << (deleted_relations == 0 ? ", " : " and ") end text << "#{nullified_relations} relations to have rows with non-existant foreign keys deleted, " if deleted_relations != 0 end end text = text[0..-3].gsub(/,([^,]+)$/,' and \1') text << " in #{taken} seconds." Logger.debug text if $DBG end # Called by Og.manager (in turn called by Og.setup) when Og.setup # has finished, allowing better processing of foreign key # constraints and possibly other enhancements. def post_setup create_constraints end # Deserialize one object from the ResultSet. def read_one(res, klass, options = nil) return nil if res.blank? if options and join_relations = options[:include] join_relations = [join_relations].flatten.collect do |n| klass.relation(n) end end res_row = res.next # causes STI classes to come back as the correct child class # if accessed from the superclass. klass = Og::Entity::entity_from_string(res_row.result.flatten[res_row.fieldnum('ogtype')]) if klass.schema_inheritance? obj = klass.og_allocate(res_row, 0) if options and options[:select] read_row(obj, res, res_row, 0) else obj.og_read(res_row) read_join_relations(obj, res_row, 0, join_relations) if join_relations end return obj ensure res.close end private def create_table(klass) fields = fields_for_class(klass) unless @conn.table_exists? klass::OGTABLE sql = "CREATE TABLE #{klass::OGTABLE} (#{fields.join(', ')}" # Create table constraints. if constraints = klass.ann.self[:sql_constraint] sql << ", #{constraints.join(', ')}" end sql << ") WITHOUT OIDS;" # Create indices. if indices = klass.ann.self[:index] for data in indices idx, options = *data idx = idx.to_s pre_sql, post_sql = options[:pre], options[:post] idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "") sql << " CREATE #{pre_sql} INDEX #{klass::OGTABLE}_#{idxname}_idx #{post_sql} ON #{klass::OGTABLE} (#{idx});" end end begin res = @conn.exec(sql) res.clear Logger.info "Created table '#{klass::OGTABLE}'." rescue RuntimeError => ex catch :ok do # Racing throw :ok if ex.message =~ /duplicate key violates unique constraint "pg_class_relname_nsp_index"/ throw :ok if ex.message =~ /already exists/ raise end end else Logger.debug "Table #{klass::OGTABLE} already exists" if $DBG #rp: basic field interrogation # TODO: Add type checking. actual_fields = @conn.table_field_list(klass::OGTABLE).map {|pair| pair.first} #Make new ones always - don't destroy by default because it might contain data you want back. need_fields = fields.each do |needed_field| field_name = needed_field[0..(needed_field.index(' ')-1)] next if actual_fields.include?(field_name) if @options[:evolve_schema] == true Logger.debug "Adding field '#{needed_field}' to '#{klass::OGTABLE}'" if $DBG sql = "ALTER TABLE #{klass::OGTABLE} ADD COLUMN #{needed_field}" begin @conn.exec(sql) rescue RuntimeError => ex raise unless ex.message =~ /already exists/ end else Logger.info "WARNING: Table '#{klass::OGTABLE}' is missing field '#{needed_field}' and :evolve_schema is not set to true!" end end #Drop old ones needed_fields = fields.map {|f| f =~ /^([^ ]+)/; $1} actual_fields.each do |obsolete_field| next if needed_fields.include?(obsolete_field) if @options[:evolve_schema] == true and @options[:evolve_schema_cautious] == false sql = "ALTER TABLE #{klass::OGTABLE} DROP COLUMN #{obsolete_field}" begin @conn.exec(sql) rescue RuntimeError => ex raise unless ex.message =~ /does not exist/ Logger.debug "Removed obsolete field '#{obsolete_field}' from '#{klass::OGTABLE}'" if $DBG end else Logger.info "WARNING: You have an obsolete field '#{obsolete_field}' on table '#{klass::OGTABLE}' and :evolve_schema is not set or is in cautious mode!" end end end # Create join tables if needed. Join tables are used in # 'many_to_many' relations. # For some reason this is missing a self join case # and therefore can't be used. # if join_tables = klass.ann.self[:join_tables] # for info in join_tables # unless @conn.table_exists? info[:table] # join_tables = Array.new # join_tables = klass.relations.reject{|rel| !rel.join_table}.map{|rel| join_table_info(rel)} if join_tables = klass.ann.self[:join_tables] for info in join_tables unless @conn.table_exists? info[:table] create_join_table_sql(info).each do |sql| begin res = @conn.exec(sql) res.clear rescue RuntimeError => ex raise unless ex.message =~ /duplicate key violates unique constraint "pg_class_relname_nsp_index"/ # Racing end end Logger.debug "Created jointable '#{info[:table]}'." if $DBG else Logger.debug "Join table '#{info[:table]}' already exists." if $DBG end end end # If we are being called by Og.setup, we can use a much cleaner method # for constructing foreign key constraints. return if @options[:called_by_og_setup] # Strip out old constraints... this shouldn't always be necessary but # must be here for now while glycerin is still bleeding-edge to fix # changes and a nasty error that made it into the glycerin developers # darcs repo (but NOT into any released version of Nitro) unless @options[:leave_constraints] == true or @stripped_constraints Logger.debug "Stripping PostgreSQL foreign key constraints" if $DBG all_foreign_keys.map{|k| k[1].map{|v| [k[0],v[0]] }[0]}.each do |table,constraint| prefix = constraint_prefix next unless constraint[0-prefix.size..-1] == constraint_prefix begin m.store.conn.exec "ALTER TABLE #{table} DROP CONSTRAINT #{constraint}" rescue Exception end end end # Create sql constraints create_constraints(needed_constraints_sql(klass)) end def drop_table(klass) # foreign key constraints will remove the need to do manual cleanup on # related rows. exec "DROP TABLE #{klass.table} CASCADE" end def create_field_map(klass) begin res = @conn.exec "SELECT * FROM #{klass::OGTABLE} LIMIT 1" rescue RuntimeError => ex raise unless ex.message =~ /does not exist/ or ex.message =~ /deleted while still in use/ # Racing create_table(klass) retry end map = {} # Check if the field should be ignored. ignore = klass.ann[:self][:ignore_field] || klass.ann[:self][:ignore_fields] || klass.ann[:self][:ignore_columns] for field in res.fields field_name = field.to_sym unless (ignore and ignore.include?(field_name)) map[field_name] = res.fieldnum(field) end end return map ensure res.clear if res end def read_prop(p, col) if p.klass.ancestors.include?(Integer) return "#{self.class}.parse_int(res.getvalue(row, #{col} + offset))" elsif p.klass.ancestors.include?(Float) return "#{self.class}.parse_float(res.getvalue(row, #{col} + offset))" elsif p.klass.ancestors.include?(String) return "res.getvalue(row, #{col} + offset)" elsif p.klass.ancestors.include?(Time) return "#{self.class}.parse_timestamp(res.getvalue(row, #{col} + offset))" elsif p.klass.ancestors.include?(Date) return "#{self.class}.parse_date(res.getvalue(row, #{col} + offset))" elsif p.klass.ancestors.include?(TrueClass) return %|('t' == res.getvalue(row, #{col} + offset))| elsif p.klass.ancestors.include?(Og::Blob) return "#{self.class}.parse_blob(res.getvalue(row, #{col} + offset))" else return "YAML.load(res.getvalue(row, #{col} + offset))" end end #-- # TODO: create stored procedure. #++ def eval_og_insert(klass) props = klass.properties.values.dup values = props.collect { |p| write_prop(p) }.join(',') if klass.schema_inheritance? props << Property.new(:symbol => :ogtype, :klass => String) values << ", '#{klass}'" end sql = "INSERT INTO #{klass::OGTABLE} (#{props.collect {|p| field_for_property(p)}.join(',')}) VALUES (#{values})" klass.class_eval %{ def og_insert(store) #{::Aspects.gen_advice_code(:og_insert, klass.advices, :pre) if klass.respond_to?(:advices)} res = store.conn.exec "SELECT nextval('#{klass::OGSEQ}')" @#{klass.pk_symbol} = res.getvalue(0, 0).to_i res.clear store.conn.exec("#{sql}").clear #{::Aspects.gen_advice_code(:og_insert, klass.advices, :post) if klass.respond_to?(:advices)} end } end def eval_og_allocate(klass) if klass.schema_inheritance? klass.module_eval %{ def self.og_allocate(res, row = 0) Object.constant(res.getvalue(row, 0)).allocate end } else klass.module_eval %{ def self.og_allocate(res, row = 0) self.allocate end } end end def read_row(obj, res, res_row, row) res.fields.each_with_index do |field, idx| obj.instance_variable_set "@#{field}", res.getvalue(row, idx) end end end end # * George Moschovitis # * Rob Pitt # * Michael Neumann # * Ysabel