mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 09:41:40 -05:00
Add tests for daily generation job
This commit is contained in:
parent
848bc367c3
commit
0c85ed761a
4 changed files with 200 additions and 15 deletions
|
|
@ -30,10 +30,10 @@ cache_preheating_job:
|
|||
class: "Cache::PreheatingJob"
|
||||
queue: default
|
||||
|
||||
# tracks_cleanup_job:
|
||||
# cron: "0 2 * * 0" # every Sunday at 02:00
|
||||
# class: "Tracks::CleanupJob"
|
||||
# queue: tracks
|
||||
tracks_daily_generation_job:
|
||||
cron: "0 2 * * *" # every day at 02:00
|
||||
class: "Tracks::DailyGenerationJob"
|
||||
queue: tracks
|
||||
|
||||
place_name_fetching_job:
|
||||
cron: "30 0 * * *" # every day at 00:30
|
||||
|
|
|
|||
138
spec/jobs/tracks/daily_generation_job_spec.rb
Normal file
138
spec/jobs/tracks/daily_generation_job_spec.rb
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::DailyGenerationJob, type: :job do
|
||||
let(:job) { described_class.new }
|
||||
|
||||
before do
|
||||
# Clear any existing jobs
|
||||
ActiveJob::Base.queue_adapter.enqueued_jobs.clear
|
||||
|
||||
# Mock the incremental processing callback to avoid interference
|
||||
allow_any_instance_of(Point).to receive(:trigger_incremental_track_generation)
|
||||
end
|
||||
|
||||
describe 'queue configuration' do
|
||||
it 'uses the tracks queue' do
|
||||
expect(described_class.queue_name).to eq('tracks')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
let(:user1) { create(:user) }
|
||||
let(:user2) { create(:user) }
|
||||
let(:user3) { create(:user) }
|
||||
|
||||
context 'with users having recent activity' do
|
||||
before do
|
||||
# User1 - has points created yesterday (should be processed)
|
||||
create(:point, user: user1, created_at: 1.day.ago, timestamp: 1.day.ago.to_i)
|
||||
|
||||
# User2 - has points created 1.5 days ago (should be processed)
|
||||
create(:point, user: user2, created_at: 1.5.days.ago, timestamp: 1.5.days.ago.to_i)
|
||||
|
||||
# User3 - has points created 3 days ago (should NOT be processed)
|
||||
create(:point, user: user3, created_at: 3.days.ago, timestamp: 3.days.ago.to_i)
|
||||
end
|
||||
|
||||
it 'enqueues parallel generation jobs for users with recent activity' do
|
||||
expect {
|
||||
job.perform
|
||||
}.to have_enqueued_job(Tracks::ParallelGeneratorJob).twice
|
||||
end
|
||||
|
||||
it 'enqueues jobs with correct mode and chunk size' do
|
||||
job.perform
|
||||
|
||||
enqueued_jobs = ActiveJob::Base.queue_adapter.enqueued_jobs
|
||||
parallel_jobs = enqueued_jobs.select { |job| job['job_class'] == 'Tracks::ParallelGeneratorJob' }
|
||||
|
||||
expect(parallel_jobs.size).to eq(2)
|
||||
|
||||
parallel_jobs.each do |enqueued_job|
|
||||
args = enqueued_job['arguments']
|
||||
user_id = args[0]
|
||||
options = args[1]
|
||||
|
||||
expect([user1.id, user2.id]).to include(user_id)
|
||||
expect(options['mode']['value']).to eq('daily') # ActiveJob serializes symbols
|
||||
expect(options['chunk_size']['value']).to eq(6.hours.to_i) # ActiveJob serializes durations
|
||||
expect(options['start_at']).to be_present
|
||||
expect(options['end_at']).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
it 'does not enqueue jobs for users without recent activity' do
|
||||
job.perform
|
||||
|
||||
enqueued_jobs = ActiveJob::Base.queue_adapter.enqueued_jobs
|
||||
parallel_jobs = enqueued_jobs.select { |job| job['job_class'] == 'Tracks::ParallelGeneratorJob' }
|
||||
user_ids = parallel_jobs.map { |job| job['arguments'][0] }
|
||||
|
||||
expect(user_ids).to contain_exactly(user1.id, user2.id)
|
||||
expect(user_ids).not_to include(user3.id)
|
||||
end
|
||||
|
||||
it 'logs the process' do
|
||||
allow(Rails.logger).to receive(:info)
|
||||
|
||||
expect(Rails.logger).to receive(:info).with("Starting daily track generation for users with recent activity")
|
||||
expect(Rails.logger).to receive(:info).with("Completed daily track generation")
|
||||
|
||||
job.perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'with no users having recent activity' do
|
||||
before do
|
||||
# All users have old points (older than 2 days)
|
||||
create(:point, user: user1, created_at: 3.days.ago, timestamp: 3.days.ago.to_i)
|
||||
end
|
||||
|
||||
it 'does not enqueue any parallel generation jobs' do
|
||||
expect {
|
||||
job.perform
|
||||
}.not_to have_enqueued_job(Tracks::ParallelGeneratorJob)
|
||||
end
|
||||
|
||||
it 'still logs start and completion' do
|
||||
allow(Rails.logger).to receive(:info)
|
||||
|
||||
expect(Rails.logger).to receive(:info).with("Starting daily track generation for users with recent activity")
|
||||
expect(Rails.logger).to receive(:info).with("Completed daily track generation")
|
||||
|
||||
job.perform
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user processing fails' do
|
||||
before do
|
||||
create(:point, user: user1, created_at: 1.day.ago, timestamp: 1.day.ago.to_i)
|
||||
|
||||
# Mock Tracks::ParallelGeneratorJob to raise an error
|
||||
allow(Tracks::ParallelGeneratorJob).to receive(:perform_later).and_raise(StandardError.new("Job failed"))
|
||||
allow(Rails.logger).to receive(:info)
|
||||
end
|
||||
|
||||
it 'logs the error and continues processing' do
|
||||
expect(Rails.logger).to receive(:error).with("Failed to enqueue daily track generation for user #{user1.id}: Job failed")
|
||||
expect(ExceptionReporter).to receive(:call).with(instance_of(StandardError), "Daily track generation failed for user #{user1.id}")
|
||||
|
||||
expect {
|
||||
job.perform
|
||||
}.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'with users having no points' do
|
||||
it 'does not process users without any points' do
|
||||
# user1, user2, user3 exist but have no points
|
||||
|
||||
expect {
|
||||
job.perform
|
||||
}.not_to have_enqueued_job(Tracks::ParallelGeneratorJob)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -121,14 +121,57 @@ RSpec.describe Point, type: :model do
|
|||
end
|
||||
end
|
||||
|
||||
xdescribe '#trigger_incremental_track_generation' do
|
||||
describe '#trigger_incremental_track_generation' do
|
||||
let(:user) { create(:user) }
|
||||
let(:point) do
|
||||
create(:point, track: track, import_id: nil, timestamp: 1.hour.ago.to_i, reverse_geocoded_at: 1.hour.ago)
|
||||
create(:point, user: user, import_id: nil, timestamp: 1.hour.ago.to_i, reverse_geocoded_at: 1.hour.ago)
|
||||
end
|
||||
let(:track) { create(:track) }
|
||||
|
||||
it 'enqueues Tracks::IncrementalCheckJob' do
|
||||
expect { point.send(:trigger_incremental_track_generation) }.to have_enqueued_job(Tracks::IncrementalCheckJob).with(point.user_id, point.id)
|
||||
before do
|
||||
# Stub user settings that might be called during incremental processing
|
||||
allow_any_instance_of(User).to receive_message_chain(:safe_settings, :minutes_between_routes).and_return(30)
|
||||
allow_any_instance_of(User).to receive_message_chain(:safe_settings, :meters_between_routes).and_return(500)
|
||||
allow_any_instance_of(User).to receive_message_chain(:safe_settings, :live_map_enabled).and_return(false)
|
||||
end
|
||||
|
||||
it 'calls Tracks::IncrementalProcessor with user and point' do
|
||||
processor_double = double('processor')
|
||||
expect(Tracks::IncrementalProcessor).to receive(:new).with(user, point).and_return(processor_double)
|
||||
expect(processor_double).to receive(:call)
|
||||
|
||||
point.send(:trigger_incremental_track_generation)
|
||||
end
|
||||
|
||||
it 'does not raise error when processor fails' do
|
||||
allow(Tracks::IncrementalProcessor).to receive(:new).and_raise(StandardError.new("Processor failed"))
|
||||
|
||||
expect {
|
||||
point.send(:trigger_incremental_track_generation)
|
||||
}.to raise_error(StandardError, "Processor failed")
|
||||
end
|
||||
end
|
||||
|
||||
describe 'after_create_commit callback' do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
before do
|
||||
# Stub user settings that might be called during incremental processing
|
||||
allow_any_instance_of(User).to receive_message_chain(:safe_settings, :minutes_between_routes).and_return(30)
|
||||
allow_any_instance_of(User).to receive_message_chain(:safe_settings, :meters_between_routes).and_return(500)
|
||||
allow_any_instance_of(User).to receive_message_chain(:safe_settings, :live_map_enabled).and_return(false)
|
||||
end
|
||||
|
||||
it 'triggers incremental track generation for non-imported points' do
|
||||
expect_any_instance_of(Point).to receive(:trigger_incremental_track_generation)
|
||||
|
||||
create(:point, user: user, import_id: nil, timestamp: 1.hour.ago.to_i)
|
||||
end
|
||||
|
||||
it 'does not trigger incremental track generation for imported points' do
|
||||
import = create(:import, user: user)
|
||||
expect_any_instance_of(Point).not_to receive(:trigger_incremental_track_generation)
|
||||
|
||||
create(:point, user: user, import: import, timestamp: 1.hour.ago.to_i)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,6 +3,10 @@
|
|||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Tracks::IncrementalProcessor do
|
||||
before do
|
||||
# Mock the incremental processing callback to avoid double calls
|
||||
allow_any_instance_of(Point).to receive(:trigger_incremental_track_generation)
|
||||
end
|
||||
let(:user) { create(:user) }
|
||||
let(:safe_settings) { user.safe_settings }
|
||||
|
||||
|
|
@ -18,7 +22,7 @@ RSpec.describe Tracks::IncrementalProcessor do
|
|||
let(:processor) { described_class.new(user, imported_point) }
|
||||
|
||||
it 'does not process imported points' do
|
||||
expect(Tracks::CreateJob).not_to receive(:perform_later)
|
||||
expect(Tracks::ParallelGeneratorJob).not_to receive(:perform_later)
|
||||
|
||||
processor.call
|
||||
end
|
||||
|
|
@ -29,7 +33,7 @@ RSpec.describe Tracks::IncrementalProcessor do
|
|||
let(:processor) { described_class.new(user, new_point) }
|
||||
|
||||
it 'processes first point' do
|
||||
expect(Tracks::CreateJob).to receive(:perform_later)
|
||||
expect(Tracks::ParallelGeneratorJob).to receive(:perform_later)
|
||||
.with(user.id, start_at: nil, end_at: nil, mode: :incremental)
|
||||
processor.call
|
||||
end
|
||||
|
|
@ -46,7 +50,7 @@ RSpec.describe Tracks::IncrementalProcessor do
|
|||
end
|
||||
|
||||
it 'processes when time threshold exceeded' do
|
||||
expect(Tracks::CreateJob).to receive(:perform_later)
|
||||
expect(Tracks::ParallelGeneratorJob).to receive(:perform_later)
|
||||
.with(user.id, start_at: nil, end_at: Time.zone.at(previous_point.timestamp), mode: :incremental)
|
||||
processor.call
|
||||
end
|
||||
|
|
@ -64,7 +68,7 @@ RSpec.describe Tracks::IncrementalProcessor do
|
|||
end
|
||||
|
||||
it 'uses existing track end time as start_at' do
|
||||
expect(Tracks::CreateJob).to receive(:perform_later)
|
||||
expect(Tracks::ParallelGeneratorJob).to receive(:perform_later)
|
||||
.with(user.id, start_at: existing_track.end_at, end_at: Time.zone.at(previous_point.timestamp), mode: :incremental)
|
||||
processor.call
|
||||
end
|
||||
|
|
@ -87,7 +91,7 @@ RSpec.describe Tracks::IncrementalProcessor do
|
|||
end
|
||||
|
||||
it 'processes when distance threshold exceeded' do
|
||||
expect(Tracks::CreateJob).to receive(:perform_later)
|
||||
expect(Tracks::ParallelGeneratorJob).to receive(:perform_later)
|
||||
.with(user.id, start_at: nil, end_at: Time.zone.at(previous_point.timestamp), mode: :incremental)
|
||||
processor.call
|
||||
end
|
||||
|
|
@ -106,7 +110,7 @@ RSpec.describe Tracks::IncrementalProcessor do
|
|||
end
|
||||
|
||||
it 'does not process when thresholds not exceeded' do
|
||||
expect(Tracks::CreateJob).not_to receive(:perform_later)
|
||||
expect(Tracks::ParallelGeneratorJob).not_to receive(:perform_later)
|
||||
processor.call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in a new issue