mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 01:31:39 -05:00
Use secure file downloader for imports
This commit is contained in:
parent
45a310319f
commit
e433ed4d1c
12 changed files with 42 additions and 61 deletions
|
|
@ -30,7 +30,7 @@ class Point < ApplicationRecord
|
|||
|
||||
after_create :async_reverse_geocode
|
||||
after_create_commit :broadcast_coordinates
|
||||
after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? }
|
||||
# after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? }
|
||||
|
||||
def self.without_raw_data
|
||||
select(column_names - ['raw_data'])
|
||||
|
|
|
|||
|
|
@ -12,18 +12,17 @@ class Geojson::ImportParser
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Oj.load(file_content)
|
||||
|
||||
data = Geojson::Params.new(json).call
|
||||
data = Geojson::Params.new(json).call
|
||||
|
||||
data.each.with_index(1) do |point, index|
|
||||
next if point_exists?(point, user_id)
|
||||
data.each.with_index(1) do |point, index|
|
||||
next if point_exists?(point, user_id)
|
||||
|
||||
Point.create!(point.merge(user_id:, import_id: import.id))
|
||||
Point.create!(point.merge(user_id:, import_id: import.id))
|
||||
|
||||
broadcast_import_progress(import, index)
|
||||
end
|
||||
broadcast_import_progress(import, index)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ class GoogleMaps::PhoneTakeoutParser
|
|||
raw_signals = []
|
||||
raw_array = []
|
||||
|
||||
file_content = SecureFileDownloader.new(import.file).download_with_verification
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
|
||||
json = Oj.load(file_content)
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class GoogleMaps::RecordsStorageImporter
|
|||
attr_reader :import, :user
|
||||
|
||||
def process_file_in_batches
|
||||
file_content = SecureFileDownloader.new(import.file).download_with_verification
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
locations = parse_file(file_content)
|
||||
process_locations_in_batches(locations) if locations.present?
|
||||
end
|
||||
|
|
|
|||
|
|
@ -61,17 +61,12 @@ class GoogleMaps::SemanticHistoryParser
|
|||
end
|
||||
|
||||
def points_data
|
||||
data = nil
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Oj.load(file_content)
|
||||
|
||||
import.file.download do |f|
|
||||
json = Oj.load(f)
|
||||
|
||||
data = json['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
end
|
||||
|
||||
data
|
||||
json['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
end
|
||||
|
||||
def parse_timeline_object(timeline_object)
|
||||
|
|
|
|||
|
|
@ -13,17 +13,16 @@ class Gpx::TrackImporter
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
json = Hash.from_xml(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Hash.from_xml(file_content)
|
||||
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
|
||||
points = tracks_arr.map { parse_track(_1) }.flatten.compact
|
||||
points_data = points.map { prepare_point(_1) }.compact
|
||||
points = tracks_arr.map { parse_track(_1) }.flatten.compact
|
||||
points_data = points.map { prepare_point(_1) }.compact
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SecureFileDownloader
|
||||
class Imports::SecureFileDownloader
|
||||
DOWNLOAD_TIMEOUT = 300 # 5 minutes timeout
|
||||
MAX_RETRIES = 3
|
||||
|
||||
|
|
@ -11,20 +11,19 @@ class OwnTracks::Importer
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
parsed_data = OwnTracks::RecParser.new(file).call
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
parsed_data = OwnTracks::RecParser.new(file_content).call
|
||||
|
||||
points_data = parsed_data.map do |point|
|
||||
OwnTracks::Params.new(point).call.merge(
|
||||
import_id: import.id,
|
||||
user_id: user_id,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
points_data = parsed_data.map do |point|
|
||||
OwnTracks::Params.new(point).call.merge(
|
||||
import_id: import.id,
|
||||
user_id: user_id,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -11,11 +11,10 @@ class Photos::ImportParser
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Oj.load(file_content)
|
||||
|
||||
json.each.with_index(1) { |point, index| create_point(point, index) }
|
||||
end
|
||||
json.each.with_index(1) { |point, index| create_point(point, index) }
|
||||
end
|
||||
|
||||
def create_point(point, index)
|
||||
|
|
|
|||
|
|
@ -79,14 +79,4 @@ RSpec.describe Point, type: :model do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'callbacks' do
|
||||
describe '#update_import_points_count' do
|
||||
let(:point) { create(:point, import_id: 1) }
|
||||
|
||||
it 'updates the import points count' do
|
||||
expect { point.destroy }.to have_enqueued_job(Import::UpdatePointsCountJob).with(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -148,10 +148,10 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do
|
|||
# Create a mock that will return a successful result
|
||||
# The internal retries are implemented inside SecureFileDownloader,
|
||||
# not in the RecordsStorageImporter
|
||||
downloader = instance_double(SecureFileDownloader)
|
||||
downloader = instance_double(Imports::SecureFileDownloader)
|
||||
|
||||
# Create the downloader mock before it gets used
|
||||
expect(SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader)
|
||||
expect(Imports::SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader)
|
||||
|
||||
# The SecureFileDownloader handles all the retries internally
|
||||
# From the perspective of the importer, it just gets the file content
|
||||
|
|
@ -164,10 +164,10 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do
|
|||
it 'fails after max retries' do
|
||||
# The retry mechanism is in SecureFileDownloader, not RecordsStorageImporter
|
||||
# So we need to simulate that the method throws the error after internal retries
|
||||
downloader = instance_double(SecureFileDownloader)
|
||||
downloader = instance_double(Imports::SecureFileDownloader)
|
||||
|
||||
# Create the downloader mock before it gets used - expect only one call from the importer
|
||||
expect(SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader)
|
||||
expect(Imports::SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader)
|
||||
|
||||
# This should be called once, and the internal retries should have been attempted
|
||||
# After the max retries, it will still raise the Timeout::Error that bubbles up
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe SecureFileDownloader do
|
||||
RSpec.describe Imports::SecureFileDownloader do
|
||||
let(:file_content) { 'test content' }
|
||||
let(:file_size) { file_content.bytesize }
|
||||
let(:checksum) { Base64.strict_encode64(Digest::MD5.digest(file_content)) }
|
||||
Loading…
Reference in a new issue