spec/codecs/netflow_spec.rb in logstash-codec-netflow-4.3.1 vs spec/codecs/netflow_spec.rb in logstash-codec-netflow-4.3.2
- old
+ new
@@ -15,10 +15,15 @@
[].tap do |events|
data.each { |packet| subject.decode(packet){|event| events << event}}
end
end
+ let(:is_LS_8) do
+ logstash_version = Gem::Version.create(LOGSTASH_CORE_VERSION)
+ Gem::Requirement.create('>= 8.0').satisfied_by?(logstash_version)
+ end
+
### NETFLOW v5
context "Netflow 5 valid 01" do
let(:data) do
# this netflow raw data was produced with softflowd and captured with netcat
@@ -26,15 +31,17 @@
# nc -k -4 -u -l 127.0.0.1 8765 > netflow5.dat
data = []
data << IO.read(File.join(File.dirname(__FILE__), "netflow5.dat"), :mode => "rb")
end
+ let(:micros) { is_LS_8 ? "328" : "" }
+
let(:json_events) do
events = []
events << <<-END
{
- "@timestamp": "2015-05-02T18:38:08.280Z",
+ "@timestamp": "2015-05-02T18:38:08.280#{micros}Z",
"netflow": {
"version": 5,
"flow_seq_num": 0,
"engine_type": 0,
"engine_id": 0,
@@ -46,12 +53,12 @@
"ipv4_next_hop": "0.0.0.0",
"input_snmp": 0,
"output_snmp": 0,
"in_pkts": 5,
"in_bytes": 230,
- "first_switched": "2015-06-21T11:40:52.194Z",
- "last_switched": "2015-05-02T18:38:08.476Z",
+ "first_switched": "2015-06-21T11:40:52.194#{micros}Z",
+ "last_switched": "2015-05-02T18:38:08.476#{micros}Z",
"l4_src_port": 54435,
"l4_dst_port": 22,
"tcp_flags": 16,
"protocol": 6,
"src_tos": 0,
@@ -64,11 +71,11 @@
}
END
events << <<-END
{
- "@timestamp": "2015-05-02T18:38:08.280Z",
+ "@timestamp": "2015-05-02T18:38:08.280#{micros}Z",
"netflow": {
"version": 5,
"flow_seq_num": 0,
"engine_type": 0,
"engine_id": 0,
@@ -80,12 +87,12 @@
"ipv4_next_hop": "0.0.0.0",
"input_snmp": 0,
"output_snmp": 0,
"in_pkts": 4,
"in_bytes": 304,
- "first_switched": "2015-06-21T11:40:52.194Z",
- "last_switched": "2015-05-02T18:38:08.476Z",
+ "first_switched": "2015-06-21T11:40:52.194#{micros}Z",
+ "last_switched": "2015-05-02T18:38:08.476#{micros}Z",
"l4_src_port": 22,
"l4_dst_port": 54435,
"tcp_flags": 24,
"protocol": 6,
"src_tos": 0,
@@ -833,15 +840,17 @@
let(:data) do
packets = []
packets << IO.read(File.join(File.dirname(__FILE__), "netflow5_test_microtik.dat"), :mode => "rb")
end
+ let(:micros) { is_LS_8 ? "932" : "" }
+
let(:json_events) do
events = []
events << <<-END
{
- "@timestamp": "2016-07-21T13:51:57.514Z",
+ "@timestamp": "2016-07-21T13:51:57.514#{micros}Z",
"netflow": {
"version": 5,
"flow_seq_num": 8140050,
"engine_type": 0,
"engine_id": 0,
@@ -853,12 +862,12 @@
"ipv4_next_hop": "192.168.0.1",
"input_snmp": 13,
"output_snmp": 46,
"in_pkts": 13,
"in_bytes": 11442,
- "first_switched": "2016-07-21T13:51:42.254Z",
- "last_switched": "2016-07-21T13:51:42.254Z",
+ "first_switched": "2016-07-21T13:51:42.254#{micros}Z",
+ "last_switched": "2016-07-21T13:51:42.254#{micros}Z",
"l4_src_port": 80,
"l4_dst_port": 51826,
"tcp_flags": 82,
"protocol": 6,
"src_tos": 40,
@@ -1328,11 +1337,11 @@
"netflow": {
"icmpTypeCodeIPv4": 0,
"ixiaDstLongitude": 100.33540344238281,
"ixiaHttpUserAgent": "",
"ixiaDeviceName": "unknown",
- "flowStartMilliseconds": "2018-10-25T12:24:19.881Z",
+ "flowStartMilliseconds": "2018-10-25T12:24:19.882Z",
"destinationIPv4Address": "202.170.60.247",
"ixiaDeviceId": 0,
"ixiaL7AppName": "unknown",
"ixiaBrowserId": 0,
"ixiaDstLatitude": 5.411200046539307,
@@ -2027,20 +2036,23 @@
data = []
data << IO.read(File.join(File.dirname(__FILE__), "ipfix_test_netscaler_tpl.dat"), :mode => "rb")
data << IO.read(File.join(File.dirname(__FILE__), "ipfix_test_netscaler_data.dat"), :mode => "rb")
end
+ # in LS 8 the precision is up to nanos in LS 7 is up to millis
+ let(:nanos) { is_LS_8 ? "128468" : "" }
+
let(:json_events) do
events = []
events << <<-END
{
"@timestamp": "2016-11-11T12:09:19.000Z",
"netflow": {
"netscalerHttpReqUserAgent": "Mozilla/5.0 (Commodore 64; kobo.com) Gecko/20100101 Firefox/75.0",
"destinationTransportPort": 443,
"netscalerHttpReqCookie": "beer=123456789abcdefghijklmnopqrstuvw; AnotherCookie=1234567890abcdefghijklmnopqr; Shameless.Plug=Thankyou.Rakuten.Kobo.Inc.For.Allowing.me.time.to.work.on.this.and.contribute.back.to.the.community; Padding=aaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbccccccccccccccddddddddddddddddddddddeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffgggggggggggggggggggggggghhhhhhhhhhhhhhhhhiiiiiiiiiiiiiiiiiiiiiijjjjjjjjjjjjjjjjjjjjjjjjkkkkkkkkkkkkkkkkkklllllllllllllllmmmmmmmmmm; more=less; GJquote=There.is.no.spoon; GarrySays=Nice!!; LastPadding=aaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbcccccccccccccccccccdddddddddddeeeeeeee",
- "flowEndMicroseconds": "2016-11-11T12:09:19.000Z",
+ "flowEndMicroseconds": "2016-11-11T12:09:19.000#{nanos}Z",
"netscalerHttpReqUrl": "/aa/bb/ccccc/ddddddddddddddddddddddddd",
"sourceIPv4Address": "192.168.0.1",
"netscalerHttpReqMethod": "GET",
"netscalerHttpReqHost": "www.kobo.com",
"egressInterface": 2147483651,
@@ -2055,11 +2067,11 @@
"destinationIPv4Address": "10.0.0.1",
"observationPointId": 167954698,
"netscalerHttpReqVia": "1.1 akamai.net(ghost) (AkamaiGHost)",
"netscalerConnectionId": 14460661,
"tcpControlBits": 24,
- "flowStartMicroseconds": "2016-11-11T12:09:19.000Z",
+ "flowStartMicroseconds": "2016-11-11T12:09:19.000#{nanos}Z",
"ingressInterface": 8,
"version": 10,
"packetDeltaCount": 2,
"netscalerUnknown330": 0,
"netscalerConnectionChainID": "00e0ed1c9ca80300efb42558596b0800",
@@ -2083,11 +2095,10 @@
it "should decode raw data" do
expect(decode.size).to eq(3)
expect(decode[0].get("[netflow][version]")).to eq(10)
expect(decode[0].get("[netflow][sourceIPv4Address]")).to eq('192.168.0.1')
expect(decode[0].get("[netflow][destinationIPv4Address]")).to eq('10.0.0.1')
- expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
expect(decode[0].get("[netflow][netscalerConnectionId]")).to eq(14460661)
expect(decode[1].get("[netflow][version]")).to eq(10)
expect(decode[1].get("[netflow][flowId]")).to eq(14460662)
expect(decode[1].get("[netflow][observationPointId]")).to eq(167954698)
expect(decode[1].get("[netflow][netscalerFlowFlags]")).to eq(1157636096)
@@ -2095,10 +2106,20 @@
expect(decode[2].get("[netflow][version]")).to eq(10)
expect(decode[2].get("[netflow][netscalerAppUnitNameAppId]")).to eq(239927296)
expect(decode[2].get("[netflow][netscalerHttpReqXForwardedFor]")).to eq('11.222.33.255')
end
+ if Gem::Requirement.create('>= 8.0').satisfied_by?(Gem::Version.create(LOGSTASH_CORE_VERSION))
+ it "should decode raw data decoding flowEndMicroseconds with nano precision" do
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000127768Z')
+ end
+ else
+ it "should decode raw data decoding flowEndMicroseconds with millis precision" do
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
+ end
+ end
+
it "should decode variable length fields" do
expect(decode[2].get("[netflow][netscalerHttpReqUrl]")).to eq('/aa/bb/ccccc/ddddddddddddddddddddddddd')
expect(decode[2].get("[netflow][netscalerHttpReqHost]")).to eq('www.kobo.com')
expect(decode[2].get("[netflow][netscalerHttpReqUserAgent]")).to eq('Mozilla/5.0 (Commodore 64; kobo.com) Gecko/20100101 Firefox/75.0')
expect(decode[2].get("[netflow][netscalerHttpReqVia]")).to eq('1.1 akamai.net(ghost) (AkamaiGHost)')
@@ -2960,11 +2981,11 @@
"ipClassOfService": 2,
"reverseInitialTCPFlags": 18,
"tcpSequenceNumber": 340533701,
"silkAppLabel": 0,
"sourceTransportPort": 63499,
- "flowEndMilliseconds": "2016-12-25T12:58:34.346Z",
+ "flowEndMilliseconds": "2016-12-25T12:58:34.347Z",
"flowAttributes": 0,
"destinationIPv4Address": "172.16.32.215",
"octetTotalCount": 172,
"vlanId": 0,
"reverseIpClassOfService": 0,
@@ -3063,10 +3084,15 @@
end
end
# New subject with config, ordered testing since we need caching before data processing
describe LogStash::Codecs::Netflow, 'configured with template caching', :order => :defined do
+ let(:is_LS_8) do
+ logstash_version = Gem::Version.create(LOGSTASH_CORE_VERSION)
+ Gem::Requirement.create('>= 8.0').satisfied_by?(logstash_version)
+ end
+
context "IPFIX Netscaler with variable length fields" do
subject do
LogStash::Codecs::Netflow.new(cache_config).tap do |codec|
expect{codec.register}.not_to raise_error
end
@@ -3169,14 +3195,17 @@
it "should cache templates" do
expect(cache.size).to eq(0)
expect(JSON.parse(File.read("#{tmp_dir}/ipfix_templates.cache"))).to eq(JSON.parse(cached_templates))
end
+ # in LS 8 the precision is up to nanos in LS 7 is up to millis
+ let(:nanos) { is_LS_8 ? "127768" : "" }
+
it "should decode raw data based on cached templates" do
expect(decode.size).to eq(3)
expect(decode[0].get("[netflow][version]")).to eq(10)
- expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq("2016-11-11T12:09:19.000#{nanos}Z")
expect(decode[0].get("[netflow][netscalerConnectionId]")).to eq(14460661)
expect(decode[1].get("[netflow][version]")).to eq(10)
expect(decode[1].get("[netflow][observationPointId]")).to eq(167954698)
expect(decode[1].get("[netflow][netscalerFlowFlags]")).to eq(1157636096)
expect(decode[2].get("[netflow][version]")).to eq(10)
@@ -3213,16 +3242,25 @@
end
it "should decode raw data" do
expect(decode.size).to eq(3)
expect(decode[0].get("[netflow][version]")).to eq(10)
- expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
expect(decode[0].get("[netflow][netscalerConnectionId]")).to eq(14460661)
expect(decode[1].get("[netflow][version]")).to eq(10)
expect(decode[1].get("[netflow][observationPointId]")).to eq(167954698)
expect(decode[1].get("[netflow][netscalerFlowFlags]")).to eq(1157636096)
expect(decode[2].get("[netflow][version]")).to eq(10)
expect(decode[2].get("[netflow][netscalerAppUnitNameAppId]")).to eq(239927296)
+ end
+
+ if Gem::Requirement.create('>= 8.0').satisfied_by?(Gem::Version.create(LOGSTASH_CORE_VERSION))
+ it "should decode raw data decoding flowEndMicroseconds with nano precision" do
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000127768Z')
+ end
+ else
+ it "should decode raw data decoding flowEndMicroseconds with millis precision" do
+ expect(decode[0].get("[netflow][flowEndMicroseconds]")).to eq('2016-11-11T12:09:19.000Z')
+ end
end
it "should include flowset_id" do
expect(decode[0].get("[netflow][flowset_id]")).to eq(258)
expect(decode[1].get("[netflow][flowset_id]")).to eq(257)