# code: # * George Moschovitis # # (c) 2004 Navel, all rights reserved. # $Id: psql.rb 159 2004-11-18 10:18:30Z gmosx $ require "postgres" require "og/backend" module Og # = Utils # # A collection of useful utilities. # module Utils # Escape an SQL string # def self.escape(str) return nil unless str return PGconn.escape(str) end # Convert a ruby time to an sql timestamp. # TODO: Optimize this # def self.timestamp(time = Time.now) return nil unless time return time.strftime("%Y-%m-%d %H:%M:%S") end # Output YYY-mm-dd # TODO: Optimize this # def self.date(date) return nil unless date return "#{date.year}-#{date.month}-#{date.mday}" end # Parse sql datetime # TODO: Optimize this # def self.parse_timestamp(str) return Time.parse(str) end # Input YYYY-mm-dd # TODO: Optimize this # def self.parse_date(str) return nil unless str return Date.strptime(str) end # Return an sql string evaluator for the property. # No need to optimize this, used only to precalculate code. # YAML is used to store general Ruby objects to be more # portable. # # FIXME: add extra handling for float. # def self.write_prop(p) if p.klass.ancestors.include?(Integer) return "#\{@#{p.symbol} || 'NULL'\}" elsif p.klass.ancestors.include?(Float) return "#\{@#{p.symbol} || 'NULL'\}" elsif p.klass.ancestors.include?(String) return "'#\{Og::Utils.escape(@#{p.symbol})\}'" elsif p.klass.ancestors.include?(Time) return %|#\{@#{p.symbol} ? "'#\{Og::Utils.timestamp(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(Date) return %|#\{@#{p.symbol} ? "'#\{Og::Utils.date(@#{p.symbol})\}'" : 'NULL'\}| elsif p.klass.ancestors.include?(TrueClass) return "#\{@#{p.symbol} || 'NULL'\}" else return %|#\{@#{p.symbol} ? "'#\{Og::Utils.escape(@#{p.symbol}.to_yaml)\}'" : "''"\}| end end # Return an evaluator for reading the property. # No need to optimize this, used only to precalculate code. # def self.read_prop(p, idx) if p.klass.ancestors.include?(Integer) return "res.getvalue(tuple, #{idx}).to_i()" elsif p.klass.ancestors.include?(Float) return "res.getvalue(tuple, #{idx}).to_f()" elsif p.klass.ancestors.include?(String) return "res.getvalue(tuple, #{idx})" elsif p.klass.ancestors.include?(Time) return "Og::Utils.parse_timestamp(res.getvalue(tuple, #{idx}))" elsif p.klass.ancestors.include?(Date) return "Og::Utils.parse_date(res.getvalue(tuple, #{idx}))" elsif p.klass.ancestors.include?(TrueClass) return "('true' == res.getvalue(tuple, #{idx}))" else return "YAML::load(res.getvalue(tuple, #{idx}))" end end # Returns the code that actually inserts the object into the # database. Returns the code as String. # def self.insert_code(klass, sql, pre_cb, post_cb) %{ #{pre_cb} res = conn.db.query("SELECT nextval('#{klass::DBSEQ}')") @oid = res.getvalue(0, 0).to_i conn.exec "#{sql}" #{post_cb} } end # generate the mapping of the database fields to the # object properties. # def self.calc_field_index(klass, og) res = og.query "SELECT * FROM #{klass::DBTABLE} LIMIT 1" meta = og.managed_classes[klass] for field in res.fields meta.field_index[field] = res.fieldnum(field) end end # Generate the property for oid # def self.eval_og_oid(klass) klass.class_eval %{ prop_accessor :oid, Fixnum, :sql => "integer PRIMARY KEY" } end end # = PsqlBackend # # Implements a PostgreSQL powered backend. # class PsqlBackend < Og::Backend # A mapping between Ruby and SQL types. # TYPEMAP = { Integer => "integer", Fixnum => "integer", Float => "float", String => "text", Time => "timestamp", Date => "date", TrueClass => "boolean", Object => "text", Array => "text", Hash => "text" } # Intitialize the connection to the RDBMS. # def initialize(config) begin @conn = PGconn.connect(nil, nil, nil, nil, config[:database], config[:user], config[:password]) rescue => ex # gmosx: any idea how to better test this? if ex.to_s =~ /database .* does not exist/i $log.info "Database '#{config[:database]}' not found!" PsqlBackend.create_db(config[:database], config[:user]) retry end raise end end # Create the database. # def self.create_db(database, user = nil, password = nil) $log.info "Creating database '#{database}'." `createdb #{database} -U #{user}` end # Drop the database. # def self.drop_db(database, user = nil, password = nil) $log.info "Dropping database '#{database}'." `dropdb #{database} -U #{user}` end # Execute an SQL query and return the result # def query(sql) $log.debug sql if $DBG return @conn.exec(sql) end # Execute an SQL query, no result returned. # def exec(sql) $log.debug sql if $DBG res = @conn.exec(sql) res.clear() end # Execute an SQL query and return the result. Wrapped in a rescue # block. # def safe_query(sql) $log.debug sql if $DBG begin return @conn.exec(sql) rescue => ex $log.error "DB error #{ex}, [#{sql}]" $log.error ex.backtrace return nil end end # Execute an SQL query, no result returned. Wrapped in a rescue # block. # def safe_exec(sql) $log.debug sql if $DBG begin res = @conn.exec(sql) res.clear() rescue => ex $log.error "DB error #{ex}, [#{sql}]" $log.error ex.backtrace end end # Check if it is a valid resultset. # def valid?(res) return !(res.nil? or 0 == res.num_tuples) end # Create the managed object table. The properties of the # object are mapped to the table columns. Additional sql relations # and constrains are created (indicices, sequences, etc). # def create_table(klass) fields = [] klass.__props.each do |p| klass.sql_index(p.symbol) if p.meta[:sql_index] field = "#{p.symbol}" if p.meta and p.meta[:sql] field << " #{p.meta[:sql]}" else field << " #{TYPEMAP[p.klass]}" end fields << field end sql = "CREATE TABLE #{klass::DBTABLE} (#{fields.join(', ')}" # Create table constrains if klass.__meta and constrains = klass.__meta[:sql_constrain] sql << ", #{constrains.join(', ')}" end sql << ") WITHOUT OIDS;" # Create indices if klass.__meta for data in klass.__meta[:sql_index] idx, options = *data idx = idx.to_s pre_sql, post_sql = options[:pre], options[:post] idxname = idx.gsub(/ /, "").gsub(/,/, "_").gsub(/\(.*\)/, "") sql << " CREATE #{pre_sql} INDEX #{klass::DBTABLE}_#{idxname}_idx #{post_sql} ON #{klass::DBTABLE} (#{idx});" end end begin exec(sql) $log.info "Created table '#{klass::DBTABLE}'." rescue => ex # gmosx: any idea how to better test this? if ex.to_s =~ /relation .* already exists/i $log.debug "Table already exists" if $DBG else raise end end # create the sequence for this table. Even if the table # uses the oids_seq, attempt to create it. This makes # the system more fault tolerant. begin exec "CREATE SEQUENCE #{klass::DBSEQ}" $log.info "Created sequence '#{klass::DBSEQ}'." rescue => ex # gmosx: any idea how to better test this? if ex.to_s =~ /relation .* already exists/i $log.debug "Sequence already exists" if $DBG else raise end end end # Drop the managed object table # def drop_table(klass) super exec "DROP SEQUENCE #{klass::DBSEQ}" end # Deserialize one row of the resultset. # def deserialize_one(res, klass) return nil unless valid?(res) # gmosx: Managed objects should have no params constructor. entity = klass.new() entity.og_deserialize(res, 0) # get_join_fields(res, 0, entity, join_fields) if join_fields res.clear() return entity end # Deserialize all rows of the resultset. # def deserialize_all(res, klass) return nil unless valid?(res) entities = [] for tuple in (0...res.num_tuples) entity = klass.new() entity.og_deserialize(res, tuple) # get_join_fields(res, tuple, entity, join_fields) if join_fields entities << entity end res.clear() return entities end # Return a single integer value from the resultset. # def get_int(res, idx = 0) return res.getvalue(0, idx).to_i end end end # module