lib/sup/source.rb in sup-0.8.1 vs lib/sup/source.rb in sup-0.9
- old
+ new
@@ -32,16 +32,16 @@
## simulate it.
##
## To write a new source, subclass this class, and implement:
##
## - start_offset
- ## - end_offset (exclusive!)
+ ## - end_offset (exclusive!) (or, #done?)
## - load_header offset
## - load_message offset
## - raw_header offset
## - raw_message offset
- ## - check
+ ## - check (optional)
## - next (or each, if you prefer): should return a message and an
## array of labels.
##
## ... where "offset" really means unique id. (You can tell I
## started with mbox.)
@@ -76,10 +76,11 @@
@archived = archived
@id = id
@dirty = false
end
+ ## overwrite me if you have a disk incarnation (currently used only for sup-sync-back)
def file_path; nil end
def to_s; @uri.to_s; end
def seek_to! o; self.cur_offset = o; end
def reset!; seek_to! start_offset; end
@@ -90,37 +91,42 @@
## check should throw a FatalSourceError or an OutOfSyncSourcError
## if it can detect a problem. it is called when the sup starts up
## to proactively notify the user of any source problems.
def check; end
+ ## yields successive offsets and labels, starting at #cur_offset.
+ ##
+ ## when implementing a source, you can overwrite either #each or #next. the
+ ## default #each just calls next over and over.
def each
self.cur_offset ||= start_offset
until done?
- n, labels = self.next
- raise "no message" unless n
- yield n, labels
+ offset, labels = self.next
+ yield offset, labels
end
end
- ## read a raw email header from a filehandle (or anything that responds to
- ## #gets), and turn it into a hash of key-value pairs.
+ ## utility method to read a raw email header from an IO stream and turn it
+ ## into a hash of key-value pairs. minor special semantics for certain headers.
##
- ## WARNING! THIS IS A SPEED-CRITICAL SECTION. Everything you do here will have
- ## a significant effect on Sup's processing speed of email from ALL sources.
+ ## THIS IS A SPEED-CRITICAL SECTION. Everything you do here will have a
+ ## significant effect on Sup's processing speed of email from ALL sources.
## Little things like string interpolation, regexp interpolation, += vs <<,
## all have DRAMATIC effects. BE CAREFUL WHAT YOU DO!
def self.parse_raw_email_header f
header = {}
last = nil
while(line = f.gets)
case line
## these three can occur multiple times, and we want the first one
when /^(Delivered-To|X-Original-To|Envelope-To):\s*(.*?)\s*$/i; header[last = $1.downcase] ||= $2
- ## mark this guy specially. not sure why i care.
+ ## regular header: overwrite (not that we should see more than one)
+ ## TODO: figure out whether just using the first occurrence changes
+ ## anything (which would simplify the logic slightly)
when /^([^:\s]+):\s*(.*?)\s*$/i; header[last = $1.downcase] = $2
- when /^\r*$/; break
+ when /^\r*$/; break # blank line signifies end of header
else
if last
header[last] << " " unless header[last].empty?
header[last] << line.strip
end
@@ -131,28 +137,99 @@
v = header[k] or next
next unless Rfc2047.is_encoded? v
header[k] = begin
Rfc2047.decode_to $encoding, v
rescue Errno::EINVAL, Iconv::InvalidEncoding, Iconv::IllegalSequence => e
- #Redwood::log "warning: error decoding RFC 2047 header (#{e.class.name}): #{e.message}"
+ #debug "warning: error decoding RFC 2047 header (#{e.class.name}): #{e.message}"
v
end
end
header
end
protected
## convenience function
def parse_raw_email_header f; self.class.parse_raw_email_header f end
-
+
def Source.expand_filesystem_uri uri
uri.gsub "~", File.expand_path("~")
end
def cur_offset= o
@cur_offset = o
@dirty = true
+ end
+end
+
+## if you have a @labels instance variable, include this
+## to serialize them nicely as an array, rather than as a
+## nasty set.
+module SerializeLabelsNicely
+ def before_marshal # can return an object
+ c = clone
+ c.instance_eval { @labels = @labels.to_a.map { |l| l.to_s } }
+ c
+ end
+
+ def after_unmarshal!
+ @labels = Set.new(@labels.map { |s| s.to_sym })
+ end
+end
+
+class SourceManager
+ include Singleton
+
+ def initialize
+ @sources = {}
+ @sources_dirty = false
+ @source_mutex = Monitor.new
+ end
+
+ def [](id)
+ @source_mutex.synchronize { @sources[id] }
+ end
+
+ def add_source source
+ @source_mutex.synchronize do
+ raise "duplicate source!" if @sources.include? source
+ @sources_dirty = true
+ max = @sources.max_of { |id, s| s.is_a?(DraftLoader) || s.is_a?(SentLoader) ? 0 : id }
+ source.id ||= (max || 0) + 1
+ ##source.id += 1 while @sources.member? source.id
+ @sources[source.id] = source
+ end
+ end
+
+ def sources
+ ## favour the inbox by listing non-archived sources first
+ @source_mutex.synchronize { @sources.values }.sort_by { |s| s.id }.partition { |s| !s.archived? }.flatten
+ end
+
+ def source_for uri; sources.find { |s| s.is_source_for? uri }; end
+ def usual_sources; sources.find_all { |s| s.usual? }; end
+
+ def load_sources fn=Redwood::SOURCE_FN
+ source_array = (Redwood::load_yaml_obj(fn) || []).map { |o| Recoverable.new o }
+ @source_mutex.synchronize do
+ @sources = Hash[*(source_array).map { |s| [s.id, s] }.flatten]
+ @sources_dirty = false
+ end
+ end
+
+ def save_sources fn=Redwood::SOURCE_FN
+ @source_mutex.synchronize do
+ if @sources_dirty || @sources.any? { |id, s| s.dirty? }
+ bakfn = fn + ".bak"
+ if File.exists? fn
+ File.chmod 0600, fn
+ FileUtils.mv fn, bakfn, :force => true unless File.exists?(bakfn) && File.size(fn) == 0
+ end
+ Redwood::save_yaml_obj sources, fn, true
+ File.chmod 0600, fn
+ end
+ @sources_dirty = false
+ end
end
end
end