lib/new_relic/agent/agent.rb in newrelic_rpm-3.2.0.1 vs lib/new_relic/agent/agent.rb in newrelic_rpm-3.3.0.beta1
- old
+ new
@@ -35,11 +35,10 @@
@metric_ids = {}
@stats_engine = NewRelic::Agent::StatsEngine.new
@transaction_sampler = NewRelic::Agent::TransactionSampler.new
@sql_sampler = NewRelic::Agent::SqlSampler.new
@stats_engine.transaction_sampler = @transaction_sampler
- @stats_engine.sql_sampler = @sql_sampler
@error_collector = NewRelic::Agent::ErrorCollector.new
@connect_attempts = 0
@request_timeout = NewRelic::Control.instance.fetch('timeout', 2 * 60)
@@ -773,17 +772,19 @@
# method.
def configure_transaction_tracer!(server_enabled, sample_rate)
# Ask the server for permission to send transaction samples.
# determined by subscription license.
@transaction_sampler.config['enabled'] = server_enabled
- @sql_sampler.disable unless @transaction_sampler.config['enabled']
-
+ @sql_sampler.configure!
@should_send_samples = @config_should_send_samples && server_enabled
-
+
if @should_send_samples
# I don't think this is ever true, but...
enable_random_samples!(sample_rate) if @should_send_random_samples
+
+ @transaction_sampler.slow_capture_threshold = @slowest_transaction_threshold
+
log.debug "Transaction tracing threshold is #{@slowest_transaction_threshold} seconds."
else
log.debug "Transaction traces will not be sent to the New Relic service."
end
end
@@ -860,10 +861,15 @@
@report_period = config_data['data_report_period']
@url_rules = config_data['url_rules']
@beacon_configuration = BeaconConfiguration.new(config_data)
@server_side_config_enabled = config_data['listen_to_server_config']
+ if @server_side_config_enabled
+ log.info "Using config from server"
+ log.debug "Server provided config: #{config_data.inspect}"
+ end
+
control.merge_server_side_config(config_data) if @server_side_config_enabled
config_transaction_tracer
log_connection!(config_data)
configure_transaction_tracer!(config_data['collect_traces'], config_data['sample_rate'])
configure_error_collector!(config_data['collect_errors'])
@@ -1061,10 +1067,14 @@
def harvest_and_send_slowest_sample
harvest_transaction_traces
unless @traces.empty?
now = Time.now
log.debug "Sending (#{@traces.length}) transaction traces"
+
+ # REMOVE THIS BEFORE SHIPPING
+ log.info "Sending tts with GUIDS #{@traces.collect{|t| t.guid}.join(",")}"
+
begin
options = { :keep_backtraces => true }
options[:record_sql] = @record_sql unless @record_sql == :off
if @transaction_sampler.explain_enabled
options[:explain_sql] = @transaction_sampler.explain_threshold
@@ -1132,19 +1142,19 @@
# big payloads get all the compression possible, to stay under
# the 2,000,000 byte post threshold
def compress_data(object)
dump = Marshal.dump(object)
- # this checks to make sure mongrel won't choke on big uploads
- check_post_size(dump)
-
dump_size = dump.size
return [dump, 'identity'] if dump_size < (64*1024)
- compression = dump_size < 2000000 ? Zlib::BEST_SPEED : Zlib::BEST_COMPRESSION
+ compressed_dump = Zlib::Deflate.deflate(dump, Zlib::DEFAULT_COMPRESSION)
- [Zlib::Deflate.deflate(dump, compression), 'deflate']
+ # this checks to make sure mongrel won't choke on big uploads
+ check_post_size(compressed_dump)
+
+ [compressed_dump, 'deflate']
end
# Raises a PostTooBigException if the post_string is longer
# than the limit configured in the control object
def check_post_size(post_string)