spec/acceptance/rest/stats_spec.rb in ably-0.7.5 vs spec/acceptance/rest/stats_spec.rb in ably-0.7.6
- old
+ new
@@ -7,21 +7,21 @@
LAST_YEAR = Time.now.year - 1
LAST_INTERVAL = Time.new(LAST_YEAR, 2, 3, 15, 5, 0) # 3rd Feb 20(x) 16:05:00
STATS_FIXTURES = [
{
- intervalId: Ably::Models::Stat.to_interval_id(LAST_INTERVAL - 120, :minute),
+ intervalId: Ably::Models::Stats.to_interval_id(LAST_INTERVAL - 120, :minute),
inbound: { realtime: { messages: { count: 50, data: 5000 } } },
outbound: { realtime: { messages: { count: 20, data: 2000 } } }
},
{
- intervalId: Ably::Models::Stat.to_interval_id(LAST_INTERVAL - 60, :minute),
+ intervalId: Ably::Models::Stats.to_interval_id(LAST_INTERVAL - 60, :minute),
inbound: { realtime: { messages: { count: 60, data: 6000 } } },
outbound: { realtime: { messages: { count: 10, data: 1000 } } }
},
{
- intervalId: Ably::Models::Stat.to_interval_id(LAST_INTERVAL, :minute),
+ intervalId: Ably::Models::Stats.to_interval_id(LAST_INTERVAL, :minute),
inbound: { realtime: { messages: { count: 70, data: 7000 } } },
outbound: { realtime: { messages: { count: 40, data: 4000 } } },
persisted: { presence: { count: 20, data: 2000 } },
connections: { tls: { peak: 20, opened: 10 } },
channels: { peak: 50, opened: 30 },
@@ -34,68 +34,73 @@
reload_test_app # ensure no previous stats interfere
TestApp.instance.create_test_stats(STATS_FIXTURES)
end
vary_by_protocol do
- let(:client) { Ably::Rest::Client.new(api_key: api_key, environment: environment, protocol: protocol) }
+ let(:client) { Ably::Rest::Client.new(key: api_key, environment: environment, protocol: protocol) }
describe 'fetching application stats' do
context 'by minute' do
let(:first_inbound_realtime_count) { STATS_FIXTURES.first[:inbound][:realtime][:messages][:count] }
let(:last_inbound_realtime_count) { STATS_FIXTURES.last[:inbound][:realtime][:messages][:count] }
context 'with :from set to last interval and :limit set to 1' do
let(:subject) { client.stats(start: as_since_epoch(LAST_INTERVAL), by: :minute, limit: 1) }
- let(:stat) { subject.first}
+ let(:stat) { subject.items.first }
it 'retrieves only one stat' do
- expect(subject.count).to eql(1)
+ expect(subject.items.count).to eql(1)
end
+ it 'returns zero value for any missing metrics' do
+ expect(stat.channels.refused).to eql(0)
+ expect(stat.outbound.webhook.all.count).to eql(0)
+ end
+
it 'returns all aggregated message data' do
- expect(stat.all[:messages][:count]).to eql(70 + 40) # inbound + outbound
- expect(stat.all[:messages][:data]).to eql(7000 + 4000) # inbound + outbound
+ expect(stat.all.messages.count).to eql(70 + 40) # inbound + outbound
+ expect(stat.all.messages.data).to eql(7000 + 4000) # inbound + outbound
end
it 'returns inbound realtime all data' do
- expect(stat.inbound[:realtime][:all][:count]).to eql(70)
- expect(stat.inbound[:realtime][:all][:data]).to eql(7000)
+ expect(stat.inbound.realtime.all.count).to eql(70)
+ expect(stat.inbound.realtime.all.data).to eql(7000)
end
it 'returns inbound realtime message data' do
- expect(stat.inbound[:realtime][:messages][:count]).to eql(70)
- expect(stat.inbound[:realtime][:messages][:data]).to eql(7000)
+ expect(stat.inbound.realtime.messages.count).to eql(70)
+ expect(stat.inbound.realtime.messages.data).to eql(7000)
end
it 'returns outbound realtime all data' do
- expect(stat.outbound[:realtime][:all][:count]).to eql(40)
- expect(stat.outbound[:realtime][:all][:data]).to eql(4000)
+ expect(stat.outbound.realtime.all.count).to eql(40)
+ expect(stat.outbound.realtime.all.data).to eql(4000)
end
it 'returns persisted presence all data' do
- expect(stat.persisted[:all][:count]).to eql(20)
- expect(stat.persisted[:all][:data]).to eql(2000)
+ expect(stat.persisted.all.count).to eql(20)
+ expect(stat.persisted.all.data).to eql(2000)
end
it 'returns connections all data' do
- expect(stat.connections[:tls][:peak]).to eql(20)
- expect(stat.connections[:tls][:opened]).to eql(10)
+ expect(stat.connections.tls.peak).to eql(20)
+ expect(stat.connections.tls.opened).to eql(10)
end
it 'returns channels all data' do
- expect(stat.channels[:peak]).to eql(50)
- expect(stat.channels[:opened]).to eql(30)
+ expect(stat.channels.peak).to eql(50)
+ expect(stat.channels.opened).to eql(30)
end
it 'returns api_requests data' do
- expect(stat.api_requests[:succeeded]).to eql(50)
- expect(stat.api_requests[:failed]).to eql(10)
+ expect(stat.api_requests.succeeded).to eql(50)
+ expect(stat.api_requests.failed).to eql(10)
end
it 'returns token_requests data' do
- expect(stat.token_requests[:succeeded]).to eql(60)
- expect(stat.token_requests[:failed]).to eql(20)
+ expect(stat.token_requests.succeeded).to eql(60)
+ expect(stat.token_requests.failed).to eql(20)
end
it 'returns stat objects with #interval_granularity equal to :minute' do
expect(stat.interval_granularity).to eq(:minute)
end
@@ -110,46 +115,46 @@
end
context 'with :start set to first interval, :limit set to 1 and direction :forwards' do
let(:first_interval) { LAST_INTERVAL - 120 }
let(:subject) { client.stats(start: as_since_epoch(first_interval), by: :minute, direction: :forwards, limit: 1) }
- let(:stat) { subject.first}
+ let(:stat) { subject.items.first }
it 'returns the first interval stats as stats are provided forwards from :start' do
- expect(stat.inbound[:realtime][:all][:count]).to eql(first_inbound_realtime_count)
+ expect(stat.inbound.realtime.all.count).to eql(first_inbound_realtime_count)
end
it 'returns 3 pages of stats' do
- expect(subject).to be_first_page
- expect(subject).to_not be_last_page
- page3 = subject.next_page.next_page
- expect(page3).to be_last_page
- expect(page3.first.inbound[:realtime][:all][:count]).to eql(last_inbound_realtime_count)
+ expect(subject).to be_first
+ expect(subject).to_not be_last
+ page3 = subject.next.next
+ expect(page3).to be_last
+ expect(page3.items.first.inbound.realtime.all.count).to eql(last_inbound_realtime_count)
end
end
context 'with :end set to last interval, :limit set to 1 and direction :backwards' do
let(:subject) { client.stats(:end => as_since_epoch(LAST_INTERVAL), by: :minute, direction: :backwards, limit: 1) }
- let(:stat) { subject.first}
+ let(:stat) { subject.items.first }
it 'returns the 3rd interval stats first as stats are provided backwards from :end' do
- expect(stat.inbound[:realtime][:all][:count]).to eql(last_inbound_realtime_count)
+ expect(stat.inbound.realtime.all.count).to eql(last_inbound_realtime_count)
end
it 'returns 3 pages of stats' do
- expect(subject).to be_first_page
- expect(subject).to_not be_last_page
- page3 = subject.next_page.next_page
- expect(page3.first.inbound[:realtime][:all][:count]).to eql(first_inbound_realtime_count)
+ expect(subject).to be_first
+ expect(subject).to_not be_last
+ page3 = subject.next.next
+ expect(page3.items.first.inbound.realtime.all.count).to eql(first_inbound_realtime_count)
end
end
end
[:hour, :day, :month].each do |interval|
context "by #{interval}" do
let(:subject) { client.stats(start: as_since_epoch(LAST_INTERVAL), by: interval, direction: 'forwards', limit: 1) }
- let(:stat) { subject.first }
+ let(:stat) { subject.items.first }
let(:aggregate_messages_count) do
STATS_FIXTURES.inject(0) do |sum, fixture|
sum + fixture[:inbound][:realtime][:messages][:count] + fixture[:outbound][:realtime][:messages][:count]
end
end
@@ -158,13 +163,13 @@
sum + fixture[:inbound][:realtime][:messages][:data] + fixture[:outbound][:realtime][:messages][:data]
end
end
it 'should aggregate the stats for that period' do
- expect(subject.count).to eql(1)
+ expect(subject.items.count).to eql(1)
- expect(stat.all[:messages][:count]).to eql(aggregate_messages_count)
- expect(stat.all[:messages][:data]).to eql(aggregate_messages_data)
+ expect(stat.all.messages.count).to eql(aggregate_messages_count)
+ expect(stat.all.messages.data).to eql(aggregate_messages_data)
end
end
end
end
end