lib/semantic_logger/appender/mongodb.rb in semantic_logger-2.10.0 vs lib/semantic_logger/appender/mongodb.rb in semantic_logger-2.11.0
- old
+ new
@@ -28,11 +28,11 @@
# message: 'Invalid value',
# stack_trace: []
# }
#
class MongoDB < SemanticLogger::Appender::Base
- attr_reader :db, :collection_name
+ attr_reader :db, :collection_name, :collection
attr_accessor :host_name, :write_concern, :application
# Create a MongoDB Appender instance
#
# SemanticLogger::Appender::MongoDB.new(:db => Mongo::Connection.new['database'])
@@ -50,11 +50,11 @@
# Default: first part of host name extracted from Socket
#
# :write_concern [Integer]
# Write concern to use
# see: http://docs.mongodb.org/manual/reference/write-concern/
- # Default: 0
+ # Default: 1
#
# :application [String]
# Name of the application to include in the document written to mongo
# Default: nil (None)
#
@@ -81,25 +81,33 @@
# The Proc must return true or false
def initialize(params={}, &block)
@db = params[:db] || raise('Missing mandatory parameter :db')
@collection_name = params[:collection_name] || 'semantic_logger'
@host_name = params[:host_name] || Socket.gethostname.split('.').first
- @write_concern = params[:write_concern] || 0
+ @write_concern = params[:write_concern] || 1
@application = params[:application]
filter = params[:filter]
# Create a collection that will hold the lesser of 1GB space or 10K documents
@collection_size = params[:collection_size] || 1024**3
@collection_max = params[:collection_max]
+ reopen
+
# Create the collection and necessary indexes
create_indexes
# Set the log level and formatter
super(params[:level], filter, &block)
end
+ # After forking an active process call #reopen to re-open
+ # open the handles to resources
+ def reopen
+ @collection = db[@collection_name]
+ end
+
# Create the required capped collection
# Features of capped collection:
# * No indexes by default (not even on _id)
# * Documents cannot be deleted,
# * Document updates cannot make them any larger
@@ -109,11 +117,11 @@
# Creates an index based on tags to support faster lookups
def create_indexes
options = {:capped => true, :size => @collection_size}
options[:max] = @collection_max if @collection_max
db.create_collection(collection_name, options)
- collection.ensure_index('tags')
+ db[@collection_name].ensure_index('tags')
end
# Purge all data from the capped collection by dropping the collection
# and recreating it.
# Also useful when the size of the capped collection needs to be changed
@@ -121,15 +129,10 @@
collection.drop
@collection = nil
create_indexes
end
- # Return the collection being used to write the log document to
- def collection
- @collection ||= db[collection_name]
- end
-
# Flush all pending logs to disk.
# Waits for all sent documents to be written to disk
def flush
db.get_last_error
end
@@ -180,10 +183,10 @@
def log(log)
# Ensure minimum log level is met, and check filter
return false if (level_index > (log.level_index || 0)) || !include_message?(log)
# Insert log entry into Mongo
- collection.insert(formatter.call(log), :w=>@write_concern)
+ collection.insert(formatter.call(log), w: @write_concern)
true
end
end
end
\ No newline at end of file