mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 09:41:40 -05:00
Make tracks generation system live
This commit is contained in:
parent
84d71cc011
commit
848bc367c3
4 changed files with 51 additions and 5 deletions
46
app/jobs/tracks/daily_generation_job.rb
Normal file
46
app/jobs/tracks/daily_generation_job.rb
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Daily job to handle bulk track processing for users with recent activity
|
||||
# This serves as a backup to incremental processing and handles any missed tracks
|
||||
class Tracks::DailyGenerationJob < ApplicationJob
|
||||
queue_as :tracks
|
||||
|
||||
def perform
|
||||
Rails.logger.info "Starting daily track generation for users with recent activity"
|
||||
|
||||
users_with_recent_activity.find_each do |user|
|
||||
process_user_tracks(user)
|
||||
end
|
||||
|
||||
Rails.logger.info "Completed daily track generation"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def users_with_recent_activity
|
||||
# Find users who have created points in the last 2 days
|
||||
# This gives buffer to handle cross-day tracks
|
||||
User.joins(:points)
|
||||
.where(points: { created_at: 2.days.ago..Time.current })
|
||||
.distinct
|
||||
end
|
||||
|
||||
def process_user_tracks(user)
|
||||
# Process tracks for the last 2 days with buffer
|
||||
start_at = 3.days.ago.beginning_of_day # Extra buffer for cross-day tracks
|
||||
end_at = Time.current
|
||||
|
||||
Rails.logger.info "Enqueuing daily track generation for user #{user.id}"
|
||||
|
||||
Tracks::ParallelGeneratorJob.perform_later(
|
||||
user.id,
|
||||
start_at: start_at,
|
||||
end_at: end_at,
|
||||
mode: :daily,
|
||||
chunk_size: 6.hours # Smaller chunks for recent data processing
|
||||
)
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to enqueue daily track generation for user #{user.id}: #{e.message}"
|
||||
ExceptionReporter.call(e, "Daily track generation failed for user #{user.id}")
|
||||
end
|
||||
end
|
||||
|
|
@ -33,8 +33,8 @@ class Point < ApplicationRecord
|
|||
after_create :async_reverse_geocode, if: -> { DawarichSettings.store_geodata? && !reverse_geocoded? }
|
||||
after_create :set_country
|
||||
after_create_commit :broadcast_coordinates
|
||||
# after_create_commit :trigger_incremental_track_generation, if: -> { import_id.nil? }
|
||||
# after_commit :recalculate_track, on: :update, if: -> { track.present? }
|
||||
after_create_commit :trigger_incremental_track_generation, if: -> { import_id.nil? }
|
||||
after_commit :recalculate_track, on: :update, if: -> { track.present? }
|
||||
|
||||
def self.without_raw_data
|
||||
select(column_names - ['raw_data'])
|
||||
|
|
@ -103,6 +103,6 @@ class Point < ApplicationRecord
|
|||
end
|
||||
|
||||
def trigger_incremental_track_generation
|
||||
Tracks::IncrementalCheckJob.perform_later(user.id, id)
|
||||
Tracks::IncrementalProcessor.new(user, self).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class Tracks::IncrementalProcessor
|
|||
start_at = find_start_time
|
||||
end_at = find_end_time
|
||||
|
||||
Tracks::CreateJob.perform_later(user.id, start_at:, end_at:, mode: :incremental)
|
||||
Tracks::ParallelGeneratorJob.perform_later(user.id, start_at:, end_at:, mode: :incremental)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class CreateTracksFromPoints < ActiveRecord::Migration[8.0]
|
|||
|
||||
# Use explicit parameters for bulk historical processing:
|
||||
# - No time limits (start_at: nil, end_at: nil) = process ALL historical data
|
||||
Tracks::CreateJob.perform_later(
|
||||
Tracks::ParallelGeneratorJob.perform_later(
|
||||
user.id,
|
||||
start_at: nil,
|
||||
end_at: nil,
|
||||
|
|
|
|||
Loading…
Reference in a new issue