Skip to content

Commit

Permalink
add specs
Browse files Browse the repository at this point in the history
  • Loading branch information
mensfeld committed Jul 21, 2023
1 parent f3c184a commit 931472e
Show file tree
Hide file tree
Showing 2 changed files with 166 additions and 3 deletions.
3 changes: 2 additions & 1 deletion lib/karafka/web/installer.rb
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ class Installer
rss: 0,
listeners_count: 0,
utilization: 0,
lag_stored: 0
lag_stored: 0,
errors: 0
}.freeze

# Default empty historicals for first record in Kafka
Expand Down
166 changes: 164 additions & 2 deletions spec/lib/karafka/web/ui/models/historicals_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,169 @@
it { expect(historicals.seconds).to eq([]) }
end

context 'when we had previous historicals in the same recent window' do
pending
context 'when we had no previous historicals but we do have current stats' do
let(:state) do
default_state.merge(
stats: default_state[:stats].merge(messages: 10, batches: 2)
)
end

it { expect(historicals.days).to eq([]) }
it { expect(historicals.hours).to eq([]) }
it { expect(historicals.minutes).to eq([]) }
it { expect(historicals.seconds).to eq([]) }
end

context 'when we had historicals in the same recent window as current' do
let(:state) do
progress = default_state[:stats].merge(messages: 10, batches: 1)

default_state.merge(
stats: default_state[:stats].merge(messages: 23, batches: 2),
historicals: {
days: [[dispatched_at - 2, progress]],
hours: [[dispatched_at - 2, progress]],
minutes: [[dispatched_at - 2, progress]],
seconds: [[dispatched_at - 2, progress]]
}
)
end

it { expect(historicals.days.size).to eq(1) }
it { expect(historicals.hours.size).to eq(1) }
it { expect(historicals.minutes.size).to eq(1) }
it { expect(historicals.seconds.size).to eq(1) }
it { expect(historicals.seconds.first.first).to eq(dispatched_at) }
it { expect(historicals.seconds.first.last[:batch_size]).to eq(13) }
it { expect(historicals.seconds.first.last[:batches]).to eq(1) }
end

context 'when we have short drifters in a historical window' do
let(:state) do
progress = default_state[:stats].merge(messages: 10, batches: 1, processes: 2, rss: 100)

default_state.merge(
stats: default_state[:stats].merge(messages: 23, batches: 2),
historicals: {
days: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 99, progress]
],
hours: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 99, progress]
],
minutes: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 99, progress]
],
seconds: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 99, progress]
]
}
)
end

it { expect(historicals.days.size).to eq(3) }
it { expect(historicals.hours.size).to eq(3) }
it { expect(historicals.minutes.size).to eq(3) }
it { expect(historicals.seconds.size).to eq(3) }
it { expect(historicals.seconds.map(&:first)).not_to include(dispatched_at - 99) }
it { expect(historicals.seconds.last.first).to eq(dispatched_at) }
it { expect(historicals.seconds.first.last[:batch_size]).to eq(0) }
it { expect(historicals.seconds.first.last[:batches]).to eq(0) }
it { expect(historicals.seconds.first.last[:process_rss]).to eq(50) }
end

context 'when we have long drifters in a historical window' do
let(:state) do
progress = default_state[:stats].merge(messages: 10, batches: 1, processes: 2, rss: 100)

default_state.merge(
stats: default_state[:stats].merge(messages: 23, batches: 2),
historicals: {
days: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 91, progress]
],
hours: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 91, progress]
],
minutes: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 91, progress]
],
seconds: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 91, progress]
]
}
)
end

it { expect(historicals.days.size).to eq(3) }
it { expect(historicals.hours.size).to eq(3) }
it { expect(historicals.minutes.size).to eq(3) }
it { expect(historicals.seconds.size).to eq(5) }
it { expect(historicals.seconds.map(&:first)).not_to include(dispatched_at - 6_000) }
it { expect(historicals.seconds.map(&:first)).to include(dispatched_at - 91) }
it { expect(historicals.seconds.map(&:first)).to include(dispatched_at - 96) }
it { expect(historicals.seconds.last.first).to eq(dispatched_at) }
it { expect(historicals.seconds.first.last[:batch_size]).to eq(0) }
it { expect(historicals.seconds.first.last[:batches]).to eq(0) }
it { expect(historicals.seconds.first.last[:process_rss]).to eq(50) }
end

context 'when we have do not have long drifters in a historical window' do
let(:state) do
progress = default_state[:stats].merge(messages: 10, batches: 1, processes: 2, rss: 100)

default_state.merge(
stats: default_state[:stats].merge(messages: 23, batches: 2),
historicals: {
days: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 95, progress]
],
hours: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 95, progress]
],
minutes: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 95, progress]
],
seconds: [
[dispatched_at - 6_000, progress],
[dispatched_at - 100, progress],
[dispatched_at - 95, progress]
]
}
)
end

it { expect(historicals.days.size).to eq(3) }
it { expect(historicals.hours.size).to eq(3) }
it { expect(historicals.minutes.size).to eq(3) }
it { expect(historicals.seconds.size).to eq(4) }
it { expect(historicals.seconds.map(&:first)).not_to include(dispatched_at - 6_000) }
it { expect(historicals.seconds.map(&:first)).to include(dispatched_at - 95) }
it { expect(historicals.seconds.last.first).to eq(dispatched_at) }
it { expect(historicals.seconds.first.last[:batch_size]).to eq(0) }
it { expect(historicals.seconds.first.last[:batches]).to eq(0) }
it { expect(historicals.seconds.first.last[:process_rss]).to eq(50) }
end
end

0 comments on commit 931472e

Please sign in to comment.