lib/sup/index.rb in sup-0.6 vs lib/sup/index.rb in sup-0.7

- old
+ new

@@ -1,9 +1,11 @@ ## the index structure for redwood. interacts with ferret. require 'fileutils' require 'ferret' +require 'fastthread' + begin require 'chronic' $have_chronic = true rescue LoadError => e Redwood::log "optional 'chronic' library not found (run 'gem install chronic' to install)" @@ -21,16 +23,22 @@ def method_missing m; @h[m.to_s] end end include Singleton + ## these two accessors should ONLY be used by single-threaded programs. + ## otherwise you will have a naughty ferret on your hands. attr_reader :index alias ferret index + def initialize dir=BASE_DIR + @index_mutex = Monitor.new + @dir = dir @sources = {} @sources_dirty = false + @source_mutex = Monitor.new wsa = Ferret::Analysis::WhiteSpaceAnalyzer.new false sa = Ferret::Analysis::StandardAnalyzer.new [], true @analyzer = Ferret::Analysis::PerFieldAnalyzer.new wsa @analyzer[:body] = sa @@ -64,18 +72,23 @@ def stop_lock_update_thread @lock_update_thread.kill if @lock_update_thread @lock_update_thread = nil end + def possibly_pluralize number_of, kind + "#{number_of} #{kind}" + + if number_of == 1 then "" else "s" end + end + def fancy_lock_error_message_for e - secs = Time.now - e.mtime - mins = secs.to_i / 60 + secs = (Time.now - e.mtime).to_i + mins = secs / 60 time = if mins == 0 - "#{secs.to_i} seconds" + possibly_pluralize secs , "second" else - "#{mins} minutes" + possibly_pluralize mins, "minute" end <<EOS Error: the sup index is locked by another process! User '#{e.user}' on host '#{e.host}' is running #{e.pname} with pid #{e.pid}. The process was alive @@ -115,48 +128,54 @@ save_sources save_index end def add_source source - raise "duplicate source!" if @sources.include? source - @sources_dirty = true - max = @sources.max_of { |id, s| s.is_a?(DraftLoader) || s.is_a?(SentLoader) ? 0 : id } - source.id ||= (max || 0) + 1 - ##source.id += 1 while @sources.member? source.id - @sources[source.id] = source + @source_mutex.synchronize do + raise "duplicate source!" if @sources.include? source + @sources_dirty = true + max = @sources.max_of { |id, s| s.is_a?(DraftLoader) || s.is_a?(SentLoader) ? 0 : id } + source.id ||= (max || 0) + 1 + ##source.id += 1 while @sources.member? source.id + @sources[source.id] = source + end end def sources ## favour the inbox by listing non-archived sources first - @sources.values.sort_by { |s| s.id }.partition { |s| !s.archived? }.flatten + @source_mutex.synchronize { @sources.values }.sort_by { |s| s.id }.partition { |s| !s.archived? }.flatten end def source_for uri; sources.find { |s| s.is_source_for? uri }; end def usual_sources; sources.find_all { |s| s.usual? }; end def load_index dir=File.join(@dir, "ferret") if File.exists? dir Redwood::log "loading index..." - @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer) - Redwood::log "loaded index of #{@index.size} messages" + @index_mutex.synchronize do + @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer) + Redwood::log "loaded index of #{@index.size} messages" + end else Redwood::log "creating index..." - field_infos = Ferret::Index::FieldInfos.new :store => :yes - field_infos.add_field :message_id, :index => :untokenized - field_infos.add_field :source_id - field_infos.add_field :source_info - field_infos.add_field :date, :index => :untokenized - field_infos.add_field :body - field_infos.add_field :label - field_infos.add_field :attachments - field_infos.add_field :subject - field_infos.add_field :from - field_infos.add_field :to - field_infos.add_field :refs - field_infos.add_field :snippet, :index => :no, :term_vector => :no - field_infos.create_index dir - @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer) + @index_mutex.synchronize do + field_infos = Ferret::Index::FieldInfos.new :store => :yes + field_infos.add_field :message_id, :index => :untokenized + field_infos.add_field :source_id + field_infos.add_field :source_info + field_infos.add_field :date, :index => :untokenized + field_infos.add_field :body + field_infos.add_field :label + field_infos.add_field :attachments + field_infos.add_field :subject + field_infos.add_field :from + field_infos.add_field :to + field_infos.add_field :refs + field_infos.add_field :snippet, :index => :no, :term_vector => :no + field_infos.create_index dir + @index = Ferret::Index::Index.new(:path => dir, :analyzer => @analyzer) + end end end ## Syncs the message to the index: deleting if it's already there, ## and adding either way. Index state will be determined by m.labels. @@ -164,11 +183,13 @@ ## docid and entry can be specified if they're already known. def sync_message m, docid=nil, entry=nil, opts={} docid, entry = load_entry_for_id m.id unless docid && entry raise "no source info for message #{m.id}" unless m.source && m.source_info - raise "trying to delete non-corresponding entry #{docid} with index message-id #{@index[docid][:message_id].inspect} and parameter message id #{m.id.inspect}" if docid && @index[docid][:message_id] != m.id + @index_mutex.synchronize do + raise "trying to delete non-corresponding entry #{docid} with index message-id #{@index[docid][:message_id].inspect} and parameter message id #{m.id.inspect}" if docid && @index[docid][:message_id] != m.id + end source_id = if m.source.is_a? Integer m.source else @@ -229,13 +250,15 @@ :to => (entry[:to] || (m.to + m.cc + m.bcc).map { |x| x.indexable_content }.join(" ")), :subject => (entry[:subject] || wrap_subj(Message.normalize_subj(m.subj))), :refs => (entry[:refs] || (m.refs + m.replytos).uniq.join(" ")), } - @index.delete docid if docid - @index.add_document d - + @index_mutex.synchronize do + @index.delete docid if docid + @index.add_document d + end + docid, entry = load_entry_for_id m.id ## this hasn't been triggered in a long time. TODO: decide whether it's still a problem. raise "just added message #{m.id.inspect} but couldn't find it in a search" unless docid true end @@ -243,36 +266,41 @@ def save_index fn=File.join(@dir, "ferret") # don't have to do anything, apparently end def contains_id? id - @index.search(Ferret::Search::TermQuery.new(:message_id, id)).total_hits > 0 + @index_mutex.synchronize { @index.search(Ferret::Search::TermQuery.new(:message_id, id)).total_hits > 0 } end - def contains? m; contains_id? m.id; end - def size; @index.size; end + def contains? m; contains_id? m.id end + def size; @index_mutex.synchronize { @index.size } end + def empty?; size == 0 end ## you should probably not call this on a block that doesn't break ## rather quickly because the results can be very large. EACH_BY_DATE_NUM = 100 def each_id_by_date opts={} - return if @index.size == 0 # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted + return if empty? # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted query = build_query opts offset = 0 while true - results = @index.search(query, :sort => "date DESC", :limit => EACH_BY_DATE_NUM, :offset => offset) + limit = (opts[:limit])? [EACH_BY_DATE_NUM, opts[:limit] - offset].min : EACH_BY_DATE_NUM + results = @index_mutex.synchronize { @index.search query, :sort => "date DESC", :limit => limit, :offset => offset } Redwood::log "got #{results.total_hits} results for query (offset #{offset}) #{query.inspect}" - results.hits.each { |hit| yield @index[hit.doc][:message_id], lambda { build_message hit.doc } } - break if offset >= results.total_hits - EACH_BY_DATE_NUM - offset += EACH_BY_DATE_NUM + results.hits.each do |hit| + yield @index_mutex.synchronize { @index[hit.doc][:message_id] }, lambda { build_message hit.doc } + end + break if opts[:limit] and offset >= opts[:limit] - limit + break if offset >= results.total_hits - limit + offset += limit end end def num_results_for opts={} - return 0 if @index.size == 0 # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted + return 0 if empty? # otherwise ferret barfs ###TODO: remove this once my ferret patch is accepted q = build_query opts - index.search(q, :limit => 1).total_hits + @index_mutex.synchronize { @index.search(q, :limit => 1).total_hits } end ## yield all messages in the thread containing 'm' by repeatedly ## querying the index. yields pairs of message ids and ## message-building lambdas, so that building an unwanted message @@ -302,14 +330,14 @@ q.add_query sq, :must q.add_query Ferret::Search::RangeQuery.new(:date, :>= => date_min.to_indexable_s, :<= => date_max.to_indexable_s), :must q = build_query :qobj => q - p1 = @index.search(q).hits.map { |hit| @index[hit.doc][:message_id] } + p1 = @index_mutex.synchronize { @index.search(q).hits.map { |hit| @index[hit.doc][:message_id] } } Redwood::log "found #{p1.size} results for subject query #{q}" - p2 = @index.search(q.to_s, :limit => :all).hits.map { |hit| @index[hit.doc][:message_id] } + p2 = @index_mutex.synchronize { @index.search(q.to_s, :limit => :all).hits.map { |hit| @index[hit.doc][:message_id] } } Redwood::log "found #{p2.size} results in string form" pending = (pending + p1 + p2).uniq end @@ -328,23 +356,25 @@ q = build_query :qobj => q num_queries += 1 killed = false - @index.search_each(q, :limit => :all) do |docid, score| - break if opts[:limit] && messages.size >= opts[:limit] - if @index[docid][:label].split(/\s+/).include?("killed") && opts[:skip_killed] - killed = true - break + @index_mutex.synchronize do + @index.search_each(q, :limit => :all) do |docid, score| + break if opts[:limit] && messages.size >= opts[:limit] + if @index[docid][:label].split(/\s+/).include?("killed") && opts[:skip_killed] + killed = true + break + end + mid = @index[docid][:message_id] + unless messages.member?(mid) + #Redwood::log "got #{mid} as a child of #{id}" + messages[mid] ||= lambda { build_message docid } + refs = @index[docid][:refs].split(" ") + pending += refs.select { |id| !searched[id] } + end end - mid = @index[docid][:message_id] - unless messages.member?(mid) - #Redwood::log "got #{mid} as a child of #{id}" - messages[mid] ||= lambda { build_message docid } - refs = @index[docid][:refs].split(" ") - pending += refs.select { |id| !searched[id] } - end end end if killed Redwood::log "thread for #{m.id} is killed, ignoring" @@ -356,40 +386,48 @@ end end ## builds a message object from a ferret result def build_message docid - doc = @index[docid] - source = @sources[doc[:source_id].to_i] - #puts "building message #{doc[:message_id]} (#{source}##{doc[:source_info]})" - raise "invalid source #{doc[:source_id]}" unless source + @index_mutex.synchronize do + doc = @index[docid] - fake_header = { - "date" => Time.at(doc[:date].to_i), - "subject" => unwrap_subj(doc[:subject]), - "from" => doc[:from], - "to" => doc[:to].split(/\s+/).join(", "), # reformat - "message-id" => doc[:message_id], - "references" => doc[:refs].split(/\s+/).map { |x| "<#{x}>" }.join(" "), - } + source = @source_mutex.synchronize { @sources[doc[:source_id].to_i] } + raise "invalid source #{doc[:source_id]}" unless source - Message.new :source => source, :source_info => doc[:source_info].to_i, - :labels => doc[:label].split(" ").map { |s| s.intern }, - :snippet => doc[:snippet], :header => fake_header + #puts "building message #{doc[:message_id]} (#{source}##{doc[:source_info]})" + + fake_header = { + "date" => Time.at(doc[:date].to_i), + "subject" => unwrap_subj(doc[:subject]), + "from" => doc[:from], + "to" => doc[:to].split(/\s+/).join(", "), # reformat + "message-id" => doc[:message_id], + "references" => doc[:refs].split(/\s+/).map { |x| "<#{x}>" }.join(" "), + } + + Message.new :source => source, :source_info => doc[:source_info].to_i, + :labels => doc[:label].split(" ").map { |s| s.intern }, + :snippet => doc[:snippet], :header => fake_header + end end def fresh_thread_id; @next_thread_id += 1; end def wrap_subj subj; "__START_SUBJECT__ #{subj} __END_SUBJECT__"; end def unwrap_subj subj; subj =~ /__START_SUBJECT__ (.*?) __END_SUBJECT__/ && $1; end - def drop_entry docno; @index.delete docno; end + def drop_entry docno; @index_mutex.synchronize { @index.delete docno } end def load_entry_for_id mid - results = @index.search(Ferret::Search::TermQuery.new(:message_id, mid)) - return if results.total_hits == 0 - docid = results.hits[0].doc - [docid, @index[docid]] + @index_mutex.synchronize do + results = @index.search Ferret::Search::TermQuery.new(:message_id, mid) + return if results.total_hits == 0 + docid = results.hits[0].doc + entry = @index[docid] + entry_dup = entry.fields.inject({}) { |h, f| h[f] = entry[f]; h } + [docid, entry_dup] + end end def load_contacts emails, h={} q = Ferret::Search::BooleanQuery.new true emails.each do |e| @@ -401,37 +439,41 @@ q.add_query Ferret::Search::TermQuery.new(:label, "spam"), :must_not Redwood::log "contact search: #{q}" contacts = {} num = h[:num] || 20 - @index.search_each(q, :sort => "date DESC", :limit => :all) do |docid, score| - break if contacts.size >= num - #Redwood::log "got message #{docid} to: #{@index[docid][:to].inspect} and from: #{@index[docid][:from].inspect}" - f = @index[docid][:from] - t = @index[docid][:to] + @index_mutex.synchronize do + @index.search_each q, :sort => "date DESC", :limit => :all do |docid, score| + break if contacts.size >= num + #Redwood::log "got message #{docid} to: #{@index[docid][:to].inspect} and from: #{@index[docid][:from].inspect}" + f = @index[docid][:from] + t = @index[docid][:to] - if AccountManager.is_account_email? f - t.split(" ").each { |e| contacts[PersonManager.person_for(e)] = true } - else - contacts[PersonManager.person_for(f)] = true + if AccountManager.is_account_email? f + t.split(" ").each { |e| contacts[PersonManager.person_for(e)] = true } + else + contacts[PersonManager.person_for(f)] = true + end end end contacts.keys.compact end def load_sources fn=Redwood::SOURCE_FN source_array = (Redwood::load_yaml_obj(fn) || []).map { |o| Recoverable.new o } - @sources = Hash[*(source_array).map { |s| [s.id, s] }.flatten] - @sources_dirty = false + @source_mutex.synchronize do + @sources = Hash[*(source_array).map { |s| [s.id, s] }.flatten] + @sources_dirty = false + end end def has_any_from_source_with_label? source, label q = Ferret::Search::BooleanQuery.new q.add_query Ferret::Search::TermQuery.new("source_id", source.id.to_s), :must q.add_query Ferret::Search::TermQuery.new("label", label.to_s), :must - index.search(q, :limit => 1).total_hits > 0 + @index_mutex.synchronize { @index.search(q, :limit => 1).total_hits > 0 } end protected ## do any specialized parsing @@ -519,10 +561,22 @@ chronic_failure = true end end subs = nil if chronic_failure end + + ## limit:42 restrict the search to 42 results + subs = subs.gsub(/\blimit:(\S+)\b/) do + lim = $1 + if lim =~ /^\d+$/ + extraopts[:limit] = lim.to_i + '' + else + BufferManager.flash "Can't understand limit #{lim.inspect}!" + subs = nil + end + end if subs [@qparser.parse(subs), extraopts] else nil @@ -548,19 +602,21 @@ query.add_query Ferret::Search::TermQuery.new("label", "killed"), :must_not if opts[:skip_killed] query end def save_sources fn=Redwood::SOURCE_FN - if @sources_dirty || @sources.any? { |id, s| s.dirty? } - bakfn = fn + ".bak" - if File.exists? fn + @source_mutex.synchronize do + if @sources_dirty || @sources.any? { |id, s| s.dirty? } + bakfn = fn + ".bak" + if File.exists? fn + File.chmod 0600, fn + FileUtils.mv fn, bakfn, :force => true unless File.exists?(bakfn) && File.size(fn) == 0 + end + Redwood::save_yaml_obj sources.sort_by { |s| s.id.to_i }, fn, true File.chmod 0600, fn - FileUtils.mv fn, bakfn, :force => true unless File.exists?(bakfn) && File.size(fn) == 0 end - Redwood::save_yaml_obj sources.sort_by { |s| s.id.to_i }, fn, true - File.chmod 0600, fn + @sources_dirty = false end - @sources_dirty = false end end end