lib/picky/sources/db.rb in picky-1.5.0 vs lib/picky/sources/db.rb in picky-1.5.1
- old
+ new
@@ -26,10 +26,12 @@
# The database connection options that were either passed in or loaded from the given file.
#
attr_reader :connection_options
+ @@traversal_id = :__picky_id
+
def initialize select_statement, options = { file: 'app/db.yml' }
@select_statement = select_statement
@database = create_database_adapter
@options = options
end
@@ -84,15 +86,13 @@
on_database.execute "DROP TABLE IF EXISTS #{origin}"
on_database.execute "CREATE TABLE #{origin} AS #{select_statement}"
# TODO Use rename_column ASAP.
#
if on_database.adapter_name == "PostgreSQL"
- on_database.execute "ALTER TABLE #{origin} RENAME COLUMN id TO indexed_id"
- on_database.execute "ALTER TABLE #{origin} ADD COLUMN id SERIAL PRIMARY KEY"
+ on_database.execute "ALTER TABLE #{origin} ADD COLUMN #{@@traversal_id} SERIAL PRIMARY KEY"
else
- on_database.execute "ALTER TABLE #{origin} CHANGE COLUMN id indexed_id INTEGER"
- on_database.execute "ALTER TABLE #{origin} ADD COLUMN id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT"
+ on_database.execute "ALTER TABLE #{origin} ADD COLUMN #{@@traversal_id} INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT"
end
# Execute any special queries this type needs executed.
#
on_database.execute type.after_indexing if type.after_indexing
@@ -101,11 +101,11 @@
# Counts all the entries that are used for the index.
#
def count type # :nodoc:
connect_backend
- database.connection.select_value("SELECT COUNT(id) FROM #{snapshot_table_name(type)}").to_i
+ database.connection.select_value("SELECT COUNT(#{@@traversal_id}) FROM #{snapshot_table_name(type)}").to_i
end
#
#
def snapshot_table_name type # :nodoc:
@@ -128,23 +128,23 @@
select_statement = harvest_statement_with_offset(type, category, offset)
# TODO Rewrite ASAP.
#
if database.connection.adapter_name == "PostgreSQL"
- id_key = 'indexed_id'
+ id_key = 'id'
text_key = category.from.to_s
database.connection.execute(select_statement).each do |hash|
- indexed_id, text = hash.values_at id_key, text_key
+ id, text = hash.values_at id_key, text_key
next unless text
text.force_encoding 'utf-8' # TODO Still needed? Or move to backend?
- yield indexed_id, text
+ yield id, text
end
else
- database.connection.execute(select_statement).each do |indexed_id, text|
+ database.connection.execute(select_statement).each do |id, text|
next unless text
text.force_encoding 'utf-8' # TODO Still needed? Or move to backend?
- yield indexed_id, text
+ yield id, text
end
end
end
# Builds a harvest statement for getting data to index.
@@ -154,16 +154,16 @@
def harvest_statement_with_offset type, category, offset # :nodoc:
statement = harvest_statement type, category
statement += statement.include?('WHERE') ? ' AND' : ' WHERE'
- "#{statement} st.id > #{offset} LIMIT #{chunksize}"
+ "#{statement} st.#{@@traversal_id} > #{offset} LIMIT #{chunksize}"
end
# The harvest statement used to pull data from the snapshot table.
#
def harvest_statement type, category # :nodoc:
- "SELECT indexed_id, #{category.from} FROM #{snapshot_table_name(type)} st"
+ "SELECT id, #{category.from} FROM #{snapshot_table_name(type)} st"
end
# The amount of records that are loaded each chunk.
#
def chunksize # :nodoc:
\ No newline at end of file