diff --git a/app/models/point.rb b/app/models/point.rb index a438bdb5..641a9b4b 100644 --- a/app/models/point.rb +++ b/app/models/point.rb @@ -30,7 +30,7 @@ class Point < ApplicationRecord after_create :async_reverse_geocode after_create_commit :broadcast_coordinates - after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? } + # after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? } def self.without_raw_data select(column_names - ['raw_data']) diff --git a/app/services/geojson/import_parser.rb b/app/services/geojson/import_parser.rb index 95edaf10..4e016d7c 100644 --- a/app/services/geojson/import_parser.rb +++ b/app/services/geojson/import_parser.rb @@ -12,18 +12,17 @@ class Geojson::ImportParser end def call - import.file.download do |file| - json = Oj.load(file) + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification + json = Oj.load(file_content) - data = Geojson::Params.new(json).call + data = Geojson::Params.new(json).call - data.each.with_index(1) do |point, index| - next if point_exists?(point, user_id) + data.each.with_index(1) do |point, index| + next if point_exists?(point, user_id) - Point.create!(point.merge(user_id:, import_id: import.id)) + Point.create!(point.merge(user_id:, import_id: import.id)) - broadcast_import_progress(import, index) - end + broadcast_import_progress(import, index) end end end diff --git a/app/services/google_maps/phone_takeout_parser.rb b/app/services/google_maps/phone_takeout_parser.rb index dae1441d..132ac14a 100644 --- a/app/services/google_maps/phone_takeout_parser.rb +++ b/app/services/google_maps/phone_takeout_parser.rb @@ -51,7 +51,7 @@ class GoogleMaps::PhoneTakeoutParser raw_signals = [] raw_array = [] - file_content = SecureFileDownloader.new(import.file).download_with_verification + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification json = Oj.load(file_content) diff --git a/app/services/google_maps/records_storage_importer.rb b/app/services/google_maps/records_storage_importer.rb index 48a7c00c..35e49eea 100644 --- a/app/services/google_maps/records_storage_importer.rb +++ b/app/services/google_maps/records_storage_importer.rb @@ -23,7 +23,7 @@ class GoogleMaps::RecordsStorageImporter attr_reader :import, :user def process_file_in_batches - file_content = SecureFileDownloader.new(import.file).download_with_verification + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification locations = parse_file(file_content) process_locations_in_batches(locations) if locations.present? end diff --git a/app/services/google_maps/semantic_history_parser.rb b/app/services/google_maps/semantic_history_parser.rb index b8d38c5d..c9752151 100644 --- a/app/services/google_maps/semantic_history_parser.rb +++ b/app/services/google_maps/semantic_history_parser.rb @@ -61,17 +61,12 @@ class GoogleMaps::SemanticHistoryParser end def points_data - data = nil + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification + json = Oj.load(file_content) - import.file.download do |f| - json = Oj.load(f) - - data = json['timelineObjects'].flat_map do |timeline_object| - parse_timeline_object(timeline_object) - end.compact - end - - data + json['timelineObjects'].flat_map do |timeline_object| + parse_timeline_object(timeline_object) + end.compact end def parse_timeline_object(timeline_object) diff --git a/app/services/gpx/track_importer.rb b/app/services/gpx/track_importer.rb index 24dd2798..0bb0d516 100644 --- a/app/services/gpx/track_importer.rb +++ b/app/services/gpx/track_importer.rb @@ -13,17 +13,16 @@ class Gpx::TrackImporter end def call - import.file.download do |file| - json = Hash.from_xml(file) + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification + json = Hash.from_xml(file_content) - tracks = json['gpx']['trk'] - tracks_arr = tracks.is_a?(Array) ? tracks : [tracks] + tracks = json['gpx']['trk'] + tracks_arr = tracks.is_a?(Array) ? tracks : [tracks] - points = tracks_arr.map { parse_track(_1) }.flatten.compact - points_data = points.map { prepare_point(_1) }.compact + points = tracks_arr.map { parse_track(_1) }.flatten.compact + points_data = points.map { prepare_point(_1) }.compact - bulk_insert_points(points_data) - end + bulk_insert_points(points_data) end private diff --git a/app/services/secure_file_downloader.rb b/app/services/imports/secure_file_downloader.rb similarity index 98% rename from app/services/secure_file_downloader.rb rename to app/services/imports/secure_file_downloader.rb index c3e23446..f4bd2091 100644 --- a/app/services/secure_file_downloader.rb +++ b/app/services/imports/secure_file_downloader.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -class SecureFileDownloader +class Imports::SecureFileDownloader DOWNLOAD_TIMEOUT = 300 # 5 minutes timeout MAX_RETRIES = 3 diff --git a/app/services/own_tracks/importer.rb b/app/services/own_tracks/importer.rb index 75cd88ab..bc63f5f6 100644 --- a/app/services/own_tracks/importer.rb +++ b/app/services/own_tracks/importer.rb @@ -11,20 +11,19 @@ class OwnTracks::Importer end def call - import.file.download do |file| - parsed_data = OwnTracks::RecParser.new(file).call + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification + parsed_data = OwnTracks::RecParser.new(file_content).call - points_data = parsed_data.map do |point| - OwnTracks::Params.new(point).call.merge( - import_id: import.id, - user_id: user_id, - created_at: Time.current, - updated_at: Time.current - ) - end - - bulk_insert_points(points_data) + points_data = parsed_data.map do |point| + OwnTracks::Params.new(point).call.merge( + import_id: import.id, + user_id: user_id, + created_at: Time.current, + updated_at: Time.current + ) end + + bulk_insert_points(points_data) end private diff --git a/app/services/photos/import_parser.rb b/app/services/photos/import_parser.rb index fec7bba8..d0022cfd 100644 --- a/app/services/photos/import_parser.rb +++ b/app/services/photos/import_parser.rb @@ -11,11 +11,10 @@ class Photos::ImportParser end def call - import.file.download do |file| - json = Oj.load(file) + file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification + json = Oj.load(file_content) - json.each.with_index(1) { |point, index| create_point(point, index) } - end + json.each.with_index(1) { |point, index| create_point(point, index) } end def create_point(point, index) diff --git a/spec/models/point_spec.rb b/spec/models/point_spec.rb index 6a096f18..7f5f03e9 100644 --- a/spec/models/point_spec.rb +++ b/spec/models/point_spec.rb @@ -79,14 +79,4 @@ RSpec.describe Point, type: :model do end end end - - describe 'callbacks' do - describe '#update_import_points_count' do - let(:point) { create(:point, import_id: 1) } - - it 'updates the import points count' do - expect { point.destroy }.to have_enqueued_job(Import::UpdatePointsCountJob).with(1) - end - end - end end diff --git a/spec/services/google_maps/records_storage_importer_spec.rb b/spec/services/google_maps/records_storage_importer_spec.rb index 86ce44ac..dd7df250 100644 --- a/spec/services/google_maps/records_storage_importer_spec.rb +++ b/spec/services/google_maps/records_storage_importer_spec.rb @@ -148,10 +148,10 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do # Create a mock that will return a successful result # The internal retries are implemented inside SecureFileDownloader, # not in the RecordsStorageImporter - downloader = instance_double(SecureFileDownloader) + downloader = instance_double(Imports::SecureFileDownloader) # Create the downloader mock before it gets used - expect(SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader) + expect(Imports::SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader) # The SecureFileDownloader handles all the retries internally # From the perspective of the importer, it just gets the file content @@ -164,10 +164,10 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do it 'fails after max retries' do # The retry mechanism is in SecureFileDownloader, not RecordsStorageImporter # So we need to simulate that the method throws the error after internal retries - downloader = instance_double(SecureFileDownloader) + downloader = instance_double(Imports::SecureFileDownloader) # Create the downloader mock before it gets used - expect only one call from the importer - expect(SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader) + expect(Imports::SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader) # This should be called once, and the internal retries should have been attempted # After the max retries, it will still raise the Timeout::Error that bubbles up diff --git a/spec/services/secure_file_downloader_spec.rb b/spec/services/imports/secure_file_downloader_spec.rb similarity index 98% rename from spec/services/secure_file_downloader_spec.rb rename to spec/services/imports/secure_file_downloader_spec.rb index ac532ff5..32ba569b 100644 --- a/spec/services/secure_file_downloader_spec.rb +++ b/spec/services/imports/secure_file_downloader_spec.rb @@ -2,7 +2,7 @@ require 'rails_helper' -RSpec.describe SecureFileDownloader do +RSpec.describe Imports::SecureFileDownloader do let(:file_content) { 'test content' } let(:file_size) { file_content.bytesize } let(:checksum) { Base64.strict_encode64(Digest::MD5.digest(file_content)) }