mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 01:01:39 -05:00
Move json loading to a module
This commit is contained in:
parent
ed3bb4fb67
commit
96c9f1030c
9 changed files with 45 additions and 80 deletions
|
|
@ -104,12 +104,7 @@ class ImportsController < ApplicationController
|
|||
|
||||
import = current_user.imports.build(name: blob.filename.to_s)
|
||||
import.file.attach(blob)
|
||||
|
||||
# Auto-detect source if not already set
|
||||
if import.source.blank?
|
||||
detected_source = detect_import_source(import.file)
|
||||
import.source = detected_source if detected_source
|
||||
end
|
||||
import.source = detect_import_source(import.file) if import.source.blank?
|
||||
|
||||
import.save!
|
||||
|
||||
|
|
@ -117,16 +112,9 @@ class ImportsController < ApplicationController
|
|||
end
|
||||
|
||||
def detect_import_source(file_attachment)
|
||||
# Download file to temporary location for source detection
|
||||
temp_file_path = Imports::SecureFileDownloader.new(file_attachment).download_to_temp_file
|
||||
|
||||
# Detect source using optimized file-based detection
|
||||
detector = Imports::SourceDetector.new_from_file(temp_file_path)
|
||||
detected_source = detector.detect_source
|
||||
|
||||
Rails.logger.info "Auto-detected import source: #{detected_source || 'unknown'} for file: #{file_attachment.filename}"
|
||||
|
||||
detected_source
|
||||
|
||||
Imports::SourceDetector.new_from_file(temp_file_path).detect_source
|
||||
rescue StandardError => e
|
||||
Rails.logger.warn "Failed to auto-detect import source for #{file_attachment.filename}: #{e.message}"
|
||||
nil
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
class Geojson::Importer
|
||||
include Imports::Broadcaster
|
||||
include Imports::FileLoader
|
||||
include PointValidation
|
||||
|
||||
attr_reader :import, :user_id, :file_path
|
||||
|
|
@ -25,15 +26,4 @@ class Geojson::Importer
|
|||
broadcast_import_progress(import, index)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def load_json_data
|
||||
if file_path && File.exist?(file_path)
|
||||
Oj.load_file(file_path, mode: :compat)
|
||||
else
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
Oj.load(file_content, mode: :compat)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
class GoogleMaps::PhoneTakeoutImporter
|
||||
include Imports::Broadcaster
|
||||
include Imports::FileLoader
|
||||
|
||||
attr_reader :import, :user_id, :file_path
|
||||
|
||||
|
|
@ -47,12 +48,7 @@ class GoogleMaps::PhoneTakeoutImporter
|
|||
raw_signals = []
|
||||
raw_array = []
|
||||
|
||||
json = if file_path && File.exist?(file_path)
|
||||
Oj.load_file(file_path, mode: :compat)
|
||||
else
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
Oj.load(file_content, mode: :compat)
|
||||
end
|
||||
json = load_json_data
|
||||
|
||||
if json.is_a?(Array)
|
||||
raw_array = parse_raw_array(json)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@
|
|||
# via the UI, vs the CLI, which uses the `GoogleMaps::RecordsImporter` class.
|
||||
|
||||
class GoogleMaps::RecordsStorageImporter
|
||||
include Imports::FileLoader
|
||||
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(import, user_id, file_path = nil)
|
||||
|
|
@ -24,31 +26,13 @@ class GoogleMaps::RecordsStorageImporter
|
|||
attr_reader :import, :user, :file_path
|
||||
|
||||
def process_file_in_batches
|
||||
locations = if file_path && File.exist?(file_path)
|
||||
# Use streaming for large files
|
||||
parse_file_from_path(file_path)
|
||||
else
|
||||
# Fallback to traditional method
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
parse_file(file_content)
|
||||
end
|
||||
parsed_file = load_json_data
|
||||
return unless parsed_file.is_a?(Hash) && parsed_file['locations']
|
||||
|
||||
locations = parsed_file['locations']
|
||||
process_locations_in_batches(locations) if locations.present?
|
||||
end
|
||||
|
||||
def parse_file_from_path(file_path)
|
||||
parsed_file = Oj.load_file(file_path, mode: :compat)
|
||||
return nil unless parsed_file.is_a?(Hash) && parsed_file['locations']
|
||||
|
||||
parsed_file['locations']
|
||||
end
|
||||
|
||||
def parse_file(file_content)
|
||||
parsed_file = Oj.load(file_content, mode: :compat)
|
||||
return nil unless parsed_file.is_a?(Hash) && parsed_file['locations']
|
||||
|
||||
parsed_file['locations']
|
||||
end
|
||||
|
||||
def process_locations_in_batches(locations)
|
||||
batch = []
|
||||
index = 0
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
class GoogleMaps::SemanticHistoryImporter
|
||||
include Imports::Broadcaster
|
||||
include Imports::FileLoader
|
||||
|
||||
BATCH_SIZE = 1000
|
||||
attr_reader :import, :user_id, :file_path
|
||||
|
|
@ -69,16 +70,6 @@ class GoogleMaps::SemanticHistoryImporter
|
|||
end.compact
|
||||
end
|
||||
|
||||
def load_json_data
|
||||
if file_path && File.exist?(file_path)
|
||||
# Use streaming JSON loading for better memory efficiency
|
||||
Oj.load_file(file_path, mode: :compat)
|
||||
else
|
||||
# Fallback to traditional method
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
Oj.load(file_content, mode: :compat)
|
||||
end
|
||||
end
|
||||
|
||||
def parse_timeline_object(timeline_object)
|
||||
if timeline_object['activitySegment'].present?
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ require 'rexml/document'
|
|||
|
||||
class Gpx::TrackImporter
|
||||
include Imports::Broadcaster
|
||||
include Imports::FileLoader
|
||||
|
||||
attr_reader :import, :user_id, :file_path
|
||||
|
||||
|
|
@ -14,11 +15,7 @@ class Gpx::TrackImporter
|
|||
end
|
||||
|
||||
def call
|
||||
file_content = if file_path && File.exist?(file_path)
|
||||
File.read(file_path)
|
||||
else
|
||||
Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
end
|
||||
file_content = load_file_content
|
||||
json = Hash.from_xml(file_content)
|
||||
|
||||
tracks = json['gpx']['trk']
|
||||
|
|
|
|||
26
app/services/imports/file_loader.rb
Normal file
26
app/services/imports/file_loader.rb
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Imports
|
||||
module FileLoader
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
private
|
||||
|
||||
def load_json_data
|
||||
if file_path && File.exist?(file_path)
|
||||
Oj.load_file(file_path, mode: :compat)
|
||||
else
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
Oj.load(file_content, mode: :compat)
|
||||
end
|
||||
end
|
||||
|
||||
def load_file_content
|
||||
if file_path && File.exist?(file_path)
|
||||
File.read(file_path)
|
||||
else
|
||||
Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
class OwnTracks::Importer
|
||||
include Imports::Broadcaster
|
||||
include Imports::FileLoader
|
||||
|
||||
attr_reader :import, :user_id, :file_path
|
||||
|
||||
|
|
@ -12,11 +13,7 @@ class OwnTracks::Importer
|
|||
end
|
||||
|
||||
def call
|
||||
file_content = if file_path && File.exist?(file_path)
|
||||
File.read(file_path)
|
||||
else
|
||||
Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
end
|
||||
file_content = load_file_content
|
||||
parsed_data = OwnTracks::RecParser.new(file_content).call
|
||||
|
||||
points_data = parsed_data.map do |point|
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
class Photos::Importer
|
||||
include Imports::Broadcaster
|
||||
include Imports::FileLoader
|
||||
include PointValidation
|
||||
attr_reader :import, :user_id, :file_path
|
||||
|
||||
|
|
@ -12,12 +13,7 @@ class Photos::Importer
|
|||
end
|
||||
|
||||
def call
|
||||
json = if file_path && File.exist?(file_path)
|
||||
Oj.load_file(file_path, mode: :compat)
|
||||
else
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
Oj.load(file_content, mode: :compat)
|
||||
end
|
||||
json = load_json_data
|
||||
|
||||
json.each.with_index(1) { |point, index| create_point(point, index) }
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in a new issue