spec/outputs/kafka_spec.rb in logstash-output-kafka-0.1.9 vs spec/outputs/kafka_spec.rb in logstash-output-kafka-0.1.10
- old
+ new
@@ -27,28 +27,28 @@
end
context 'when outputting messages' do
it 'should send logstash event to kafka broker' do
expect_any_instance_of(Kafka::Producer).to receive(:send_msg)
- .with(simple_kafka_config['topic_id'], nil, event.to_hash.to_json)
+ .with(simple_kafka_config['topic_id'], nil, event.to_json)
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
kafka.register
kafka.receive(event)
end
it 'should support Event#sprintf placeholders in topic_id' do
topic_field = 'topic_name'
expect_any_instance_of(Kafka::Producer).to receive(:send_msg)
- .with(event[topic_field], nil, event.to_hash.to_json)
+ .with(event[topic_field], nil, event.to_json)
kafka = LogStash::Outputs::Kafka.new({'topic_id' => "%{#{topic_field}}"})
kafka.register
kafka.receive(event)
end
it 'should support Event#sprintf placeholders in partition_key_format' do
partition_field = 'host'
expect_any_instance_of(Kafka::Producer).to receive(:send_msg)
- .with(simple_kafka_config['topic_id'], event[partition_field], event.to_hash.to_json)
+ .with(simple_kafka_config['topic_id'], event[partition_field], event.to_json)
kafka = LogStash::Outputs::Kafka.new({'topic_id' => simple_kafka_config['topic_id'],
'partition_key_format' => "%{#{partition_field}}"})
kafka.register
kafka.receive(event)
end