dawarich/app/services/google_maps/records_parser.rb

52 lines
1.1 KiB
Ruby
Raw Normal View History

2024-05-18 09:00:44 -04:00
# frozen_string_literal: true
class GoogleMaps::RecordsParser
2025-01-21 04:07:54 -05:00
BATCH_SIZE = 1000
2024-05-18 09:00:44 -04:00
attr_reader :import
def initialize(import)
@import = import
2025-01-21 04:07:54 -05:00
@batch = []
2024-05-18 09:00:44 -04:00
end
2025-01-21 04:07:54 -05:00
def call(records)
Array(records).each do |record|
@batch << parse_json(record)
2025-01-21 04:07:54 -05:00
if @batch.size >= BATCH_SIZE
bulk_insert_points
@batch = []
end
end
2024-06-10 13:08:27 -04:00
2025-01-21 04:07:54 -05:00
bulk_insert_points if @batch.any?
2024-05-18 09:00:44 -04:00
end
private
def parse_json(json)
{
latitude: json['latitudeE7'].to_f / 10**7,
longitude: json['longitudeE7'].to_f / 10**7,
timestamp: Timestamps.parse_timestamp(json['timestamp'] || json['timestampMs']),
2024-06-10 13:08:27 -04:00
altitude: json['altitude'],
velocity: json['velocity'],
2025-01-21 04:07:54 -05:00
raw_data: json,
topic: 'Google Maps Timeline Export',
tracker_id: 'google-maps-timeline-export',
import_id: import.id,
user_id: import.user_id,
created_at: Time.current,
updated_at: Time.current
}
2024-05-18 09:00:44 -04:00
end
2025-01-21 04:07:54 -05:00
def bulk_insert_points
Point.upsert_all(
@batch,
unique_by: %i[latitude longitude timestamp user_id],
returning: false
)
end
2024-05-18 09:00:44 -04:00
end