Use Active Storage to import GPX files

This commit is contained in:
Eugene Burmakin 2025-03-23 18:37:10 +01:00
parent b1a0aa2179
commit a93b49ee80
20 changed files with 74 additions and 88 deletions

View file

@ -27,7 +27,6 @@ gem 'rgeo'
gem 'rgeo-activerecord' gem 'rgeo-activerecord'
gem 'rswag-api' gem 'rswag-api'
gem 'rswag-ui' gem 'rswag-ui'
gem 'shrine', '~> 3.6'
gem 'sidekiq' gem 'sidekiq'
gem 'sidekiq-cron' gem 'sidekiq-cron'
gem 'sidekiq-limit_fetch' gem 'sidekiq-limit_fetch'

View file

@ -29,20 +29,12 @@ class ImportsController < ApplicationController
source: params[:import][:source] source: params[:import][:source]
) )
file = File.read(file) import.file.attach(file)
raw_data =
case params[:import][:source]
when 'gpx' then Hash.from_xml(file)
when 'owntracks' then OwnTracks::RecParser.new(file).call
else JSON.parse(file)
end
import.update(raw_data:)
import.id import.id
end end
import_ids.each { ImportJob.perform_later(current_user.id, _1) } import_ids.each { Import::ProcessJob.perform_later(_1) }
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
rescue StandardError => e rescue StandardError => e

View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
class Import::ProcessJob < ApplicationJob
queue_as :imports
def perform(import_id)
import = Import.find(import_id)
import.process!
end
end

View file

@ -1,12 +0,0 @@
# frozen_string_literal: true
class ImportJob < ApplicationJob
queue_as :imports
def perform(user_id, import_id)
user = User.find(user_id)
import = user.imports.find(import_id)
import.process!
end
end

View file

@ -6,7 +6,9 @@ class Import < ApplicationRecord
delegate :count, to: :points, prefix: true delegate :count, to: :points, prefix: true
include ImportUploader::Attachment(:raw) has_one_attached :file
after_commit -> { Import::ProcessJob.perform_later(id) }, on: :create
enum :source, { enum :source, {
google_semantic_history: 0, owntracks: 1, google_records: 2, google_semantic_history: 0, owntracks: 1, google_records: 2,

View file

@ -15,6 +15,8 @@ class User < ApplicationRecord
has_many :places, through: :visits has_many :places, through: :visits
has_many :trips, dependent: :destroy has_many :trips, dependent: :destroy
has_many_attached :import_files
after_create :create_api_key after_create :create_api_key
after_create :import_sample_points after_create :import_sample_points
after_commit :activate, on: :create, if: -> { DawarichSettings.self_hosted? } after_commit :activate, on: :create, if: -> { DawarichSettings.self_hosted? }
@ -123,16 +125,15 @@ class User < ApplicationRecord
Rails.env.production? || Rails.env.production? ||
(Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS']) (Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS'])
raw_data = Hash.from_xml(
File.read(Rails.root.join('lib/assets/sample_points.gpx'))
)
import = imports.create( import = imports.create(
name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME', name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME',
source: 'gpx', source: 'gpx'
raw_data:
) )
ImportJob.perform_later(id, import.id) import.file.attach(
Rack::Test::UploadedFile.new(
Rails.root.join('lib/assets/sample_points.gpx'), 'application/xml'
)
)
end end
end end

View file

@ -3,23 +3,26 @@
class Gpx::TrackImporter class Gpx::TrackImporter
include Imports::Broadcaster include Imports::Broadcaster
attr_reader :import, :json, :user_id attr_reader :import, :user_id
def initialize(import, user_id) def initialize(import, user_id)
@import = import @import = import
@json = import.raw_data
@user_id = user_id @user_id = user_id
end end
def call def call
import.file.open do |file|
json = Hash.from_xml(file)
tracks = json['gpx']['trk'] tracks = json['gpx']['trk']
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks] tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
points = tracks_arr.map { parse_track(_1) }.flatten.compact points = tracks_arr.map { parse_track(_1) }.flatten.compact
points_data = points.map.with_index(1) { |point, index| prepare_point(point, index) }.compact points_data = points.map { prepare_point(_1) }.compact
bulk_insert_points(points_data) bulk_insert_points(points_data)
end end
end
private private
@ -32,7 +35,7 @@ class Gpx::TrackImporter
segments_array.compact.map { |segment| segment['trkpt'] } segments_array.compact.map { |segment| segment['trkpt'] }
end end
def prepare_point(point, index) def prepare_point(point)
return if point['lat'].blank? || point['lon'].blank? || point['time'].blank? return if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
{ {

View file

@ -23,7 +23,7 @@ class Immich::ImportGeodata
import.raw_data = immich_data_json import.raw_data = immich_data_json
import.save! import.save!
ImportJob.perform_later(user.id, import.id) Import::ProcessJob.perform_later(import.id)
end end
private private

View file

@ -51,7 +51,7 @@ class Imports::Watcher
import.save! import.save!
ImportJob.perform_later(user.id, import.id) Import::ProcessJob.perform_later(import.id)
end end
def find_or_initialize_import(user, file_name) def find_or_initialize_import(user, file_name)

View file

@ -24,7 +24,7 @@ class Photoprism::ImportGeodata
return create_import_failed_notification(import.name) unless import.new_record? return create_import_failed_notification(import.name) unless import.new_record?
import.update!(raw_data: json_data) import.update!(raw_data: json_data)
ImportJob.perform_later(user.id, import.id) Import::ProcessJob.perform_later(import.id)
end end
def find_or_create_import(json_data) def find_or_create_import(json_data)

View file

@ -1,5 +0,0 @@
# frozen_string_literal: true
class ImportUploader < Shrine
# plugins and uploading logic
end

View file

@ -1,13 +0,0 @@
# frozen_string_literal: true
require 'shrine'
require 'shrine/storage/file_system'
Shrine.storages = {
cache: Shrine::Storage::FileSystem.new('public', prefix: 'uploads/cache'), # temporary
store: Shrine::Storage::FileSystem.new('public', prefix: 'uploads') # permanent
}
Shrine.plugin :activerecord # loads Active Record integration
Shrine.plugin :cached_attachment_data # enables retaining cached file across form redisplays
Shrine.plugin :restore_cached_data # extracts metadata for assigned cached files

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
# This migration comes from active_storage (originally 20170806125915) # This migration comes from active_storage (originally 20170806125915)
class CreateActiveStorageTables < ActiveRecord::Migration[7.0] class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
def change def change
@ -33,7 +35,8 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
t.datetime :created_at, null: false t.datetime :created_at, null: false
end end
t.index [ :record_type, :record_id, :name, :blob_id ], name: :index_active_storage_attachments_uniqueness, unique: true t.index %i[record_type record_id name blob_id], name: :index_active_storage_attachments_uniqueness,
unique: true
t.foreign_key :active_storage_blobs, column: :blob_id t.foreign_key :active_storage_blobs, column: :blob_id
end end
@ -41,12 +44,13 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
t.belongs_to :blob, null: false, index: false, type: foreign_key_type t.belongs_to :blob, null: false, index: false, type: foreign_key_type
t.string :variation_digest, null: false t.string :variation_digest, null: false
t.index [ :blob_id, :variation_digest ], name: :index_active_storage_variant_records_uniqueness, unique: true t.index %i[blob_id variation_digest], name: :index_active_storage_variant_records_uniqueness, unique: true
t.foreign_key :active_storage_blobs, column: :blob_id t.foreign_key :active_storage_blobs, column: :blob_id
end end
end end
private private
def primary_and_foreign_key_types def primary_and_foreign_key_types
config = Rails.configuration.generators config = Rails.configuration.generators
setting = config.options[config.orm][:primary_key_type] setting = config.options[config.orm][:primary_key_type]

View file

@ -2,9 +2,9 @@
require 'rails_helper' require 'rails_helper'
RSpec.describe ImportJob, type: :job do RSpec.describe Import::ProcessJob, type: :job do
describe '#perform' do describe '#perform' do
subject(:perform) { described_class.new.perform(user.id, import.id) } subject(:perform) { described_class.new.perform(import.id) }
let(:user) { create(:user) } let(:user) { create(:user) }
let!(:import) { create(:import, user:, name: 'owntracks_export.json') } let!(:import) { create(:import, user:, name: 'owntracks_export.json') }

View file

@ -72,7 +72,7 @@ RSpec.describe User, type: :model do
expect(user.imports.first.name).to eq('DELETE_ME_this_is_a_demo_import_DELETE_ME') expect(user.imports.first.name).to eq('DELETE_ME_this_is_a_demo_import_DELETE_ME')
expect(user.imports.first.source).to eq('gpx') expect(user.imports.first.source).to eq('gpx')
expect(ImportJob).to have_been_enqueued.with(user.id, user.imports.first.id) expect(Import::ProcessJob).to have_been_enqueued.with(user.imports.first.id)
end end
end end
end end

View file

@ -46,7 +46,7 @@ RSpec.describe 'Imports', type: :request do
it 'queues import job' do it 'queues import job' do
expect do expect do
post imports_path, params: { import: { source: 'owntracks', files: [file] } } post imports_path, params: { import: { source: 'owntracks', files: [file] } }
end.to have_enqueued_job(ImportJob).on_queue('imports').at_least(1).times end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
end end
it 'creates a new import' do it 'creates a new import' do
@ -64,7 +64,7 @@ RSpec.describe 'Imports', type: :request do
it 'queues import job' do it 'queues import job' do
expect do expect do
post imports_path, params: { import: { source: 'gpx', files: [file] } } post imports_path, params: { import: { source: 'gpx', files: [file] } }
end.to have_enqueued_job(ImportJob).on_queue('imports').at_least(1).times end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
end end
it 'creates a new import' do it 'creates a new import' do

View file

@ -8,8 +8,12 @@ RSpec.describe Gpx::TrackImporter do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:file_path) { Rails.root.join('spec/fixtures/files/gpx/gpx_track_single_segment.gpx') } let(:file_path) { Rails.root.join('spec/fixtures/files/gpx/gpx_track_single_segment.gpx') }
let(:raw_data) { Hash.from_xml(File.read(file_path)) } let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/xml') }
let(:import) { create(:import, user:, name: 'gpx_track.gpx', raw_data:) } let(:import) { create(:import, user:, name: 'gpx_track.gpx', source: 'gpx') }
before do
import.file.attach(file)
end
context 'when file has a single segment' do context 'when file has a single segment' do
it 'creates points' do it 'creates points' do

View file

@ -88,8 +88,8 @@ RSpec.describe Immich::ImportGeodata do
expect { service }.to change { Import.count }.by(1) expect { service }.to change { Import.count }.by(1)
end end
it 'enqueues ImportJob' do it 'enqueues Import::ProcessJob' do
expect(ImportJob).to receive(:perform_later) expect(Import::ProcessJob).to receive(:perform_later)
service service
end end
@ -101,8 +101,8 @@ RSpec.describe Immich::ImportGeodata do
expect { service }.not_to(change { Import.count }) expect { service }.not_to(change { Import.count })
end end
it 'does not enqueue ImportJob' do it 'does not enqueue Import::ProcessJob' do
expect(ImportJob).to_not receive(:perform_later) expect(Import::ProcessJob).to_not receive(:perform_later)
service service
end end

View file

@ -23,7 +23,7 @@ RSpec.describe Imports::Watcher do
end end
it 'enqueues importing jobs for the user' do it 'enqueues importing jobs for the user' do
expect { service }.to have_enqueued_job(ImportJob).exactly(6).times expect { service }.to have_enqueued_job(Import::ProcessJob).exactly(6).times
end end
context 'when the import already exists' do context 'when the import already exists' do
@ -41,8 +41,8 @@ RSpec.describe Imports::Watcher do
end end
context 'when user does not exist' do context 'when user does not exist' do
it 'does not call ImportJob' do it 'does not call Import::ProcessJob' do
expect(ImportJob).not_to receive(:perform_later) expect(Import::ProcessJob).not_to receive(:perform_later)
service service
end end

View file

@ -154,8 +154,8 @@ RSpec.describe Photoprism::ImportGeodata do
expect { service }.to change { Import.count }.by(1) expect { service }.to change { Import.count }.by(1)
end end
it 'enqueues ImportJob' do it 'enqueues Import::ProcessJob' do
expect(ImportJob).to receive(:perform_later) expect(Import::ProcessJob).to receive(:perform_later)
service service
end end
@ -167,8 +167,8 @@ RSpec.describe Photoprism::ImportGeodata do
expect { service }.not_to(change { Import.count }) expect { service }.not_to(change { Import.count })
end end
it 'does not enqueue ImportJob' do it 'does not enqueue Import::ProcessJob' do
expect(ImportJob).to_not receive(:perform_later) expect(Import::ProcessJob).to_not receive(:perform_later)
service service
end end