lib/resque_cleaner.rb in resque-cleaner-0.0.1 vs lib/resque_cleaner.rb in resque-cleaner-0.0.2
- old
+ new
@@ -33,46 +33,41 @@
@failure
end
# Stats by date.
def stats_by_date(&block)
- jobs = select(&block)
- summary = {}
+ jobs, stats = select(&block), {}
jobs.each do |job|
date = job["failed_at"][0,10]
- summary[date] ||= 0
- summary[date] += 1
+ stats[date] ||= 0
+ stats[date] += 1
end
- if print?
- log too_many_message if @limiter.on?
- summary.keys.sort.each do |k|
- log "%s: %4d" % [k,summary[k]]
- end
- log "%10s: %4d" % ["total", @limiter.count]
- end
- summary
+ print_stats(stats) if print?
+ stats
end
# Stats by class.
def stats_by_class(&block)
- jobs = select(&block)
- summary = {}
+ jobs, stats = select(&block), {}
jobs.each do |job|
klass = job["payload"]["class"]
- summary[klass] ||= 0
- summary[klass] += 1
+ stats[klass] ||= 0
+ stats[klass] += 1
end
- if print?
- log too_many_message if @limiter.on?
- summary.keys.sort.each do |k|
- log "%15s: %4d" % [k,summary[k]]
- end
- log "%15s: %4d" % ["total", @limiter.count]
+ print_stats(stats) if print?
+ stats
+ end
+
+ # Print stats
+ def print_stats(stats)
+ log too_many_message if @limiter.on?
+ stats.keys.sort.each do |k|
+ log "%15s: %4d" % [k,stats[k]]
end
- summary
+ log "%15s: %4d" % ["total", @limiter.count]
end
# Returns every jobs for which block evaluates to true.
def select(&block)
jobs = @limiter.jobs
@@ -130,54 +125,44 @@
c = @limiter.maximum
redis.ltrim(:failed, -c, -1)
c
end
- # Returns Proc which you can add a useful condition easily.
- # e.g.
- # cleaner.clear &cleaner.proc.retried
- # #=> Clears all jobs retried.
- # cleaner.select &cleaner.proc.after(10.days.ago).klass(EmailJob)
- # #=> Selects all EmailJob failed within 10 days.
- # cleaner.select &cleaner.proc{|j| j["exception"]=="RunTimeError"}.klass(EmailJob)
- # #=> Selects all EmailJob failed with RunTimeError.
- def proc(&block)
- FilterProc.new(&block)
- end
-
- # Provides typical proc you can filter jobs.
- class FilterProc < Proc
- def retried
- FilterProc.new {|job| self.call(job) && job['retried_at'].blank?}
+ # Exntends job(Hash instance) with some helper methods.
+ module FailedJobEx
+ # Returns true if the job has been already retried. Otherwise returns
+ # false.
+ def retried?
+ self['retried_at'].blank?
end
- alias :requeued :retried
+ alias :requeued? :retried?
- def before(time)
+ # Returns true if the job processed(failed) before the given time.
+ # Otherwise returns false.
+ # You can pass Time object or String.
+ def before?(time)
time = Time.parse(time) if time.is_a?(String)
- FilterProc.new {|job| self.call(job) && Time.parse(job['failed_at']) <= time}
+ Time.parse(self['failed_at']) < time
end
- def after(time)
+ # Returns true if the job processed(failed) after the given time.
+ # Otherwise returns false.
+ # You can pass Time object or String.
+ def after?(time)
time = Time.parse(time) if time.is_a?(String)
- FilterProc.new {|job| self.call(job) && Time.parse(job['failed_at']) >= time}
+ Time.parse(self['failed_at']) >= time
end
- def klass(klass_or_name)
- FilterProc.new {|job| self.call(job) && job["payload"]["class"] == klass_or_name.to_s}
+ # Returns true if the class of the job matches. Otherwise returns false.
+ def klass?(klass_or_name)
+ self["payload"]["class"] == klass_or_name.to_s
end
- def queue(queue)
- FilterProc.new {|job| self.call(job) && job["queue"] == queue.to_s}
+ # Returns true if the queue of the job matches. Otherwise returns false.
+ def queue?(queue)
+ self["queue"] == queue.to_s
end
-
- def self.new(&block)
- if block
- super
- else
- super {|job| true}
- end
- end
end
# Through the Limiter class, you accesses only the last x(default 1000)
# jobs.
class Limiter
@@ -212,14 +197,15 @@
else
all( - count, count)
end
end
- # wraps Resque's all and returns always array.
+ # Wraps Resque's all and returns always array.
def all(index=0,count=1)
jobs = @cleaner.failure.all( index, count)
jobs = [] unless jobs
jobs = [jobs] unless jobs.is_a?(Array)
+ jobs.each{|j| j.extend FailedJobEx}
jobs
end
# Returns a start index of jobs in :failed list.
def start_index