Merge pull request #980 from Freika/feature/bucket-import-export

Feature/bucket import export
This commit is contained in:
Evgenii Burmakin 2025-04-02 21:03:49 +02:00 committed by GitHub
commit 6b473edb86
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
68 changed files with 556 additions and 406 deletions

View file

@ -4,13 +4,29 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/) The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/). and this project adheres to [Semantic Versioning](http://semver.org/).
# 0.25.4 - 2025-03-24
# 0.25.4 - 2025-04-02
In this release we're changing the way import files are being stored. Previously, they were being stored in the `raw_data` column of the `imports` table. Now, they are being attached to the import record. All new imports will be using the new storage, to migrate existing imports, you can use the `rake imports:migrate_to_new_storage` task. Run it in the container shell.
This is an optional task, that will not affect your points or other data.
Big imports might take a while to migrate, so be patient.
If your hardware doesn't have enough memory to migrate the imports, you can delete your imports and re-import them.
## Changed
- Import files are now being attached to the import record instead of being stored in the `raw_data` database column.
- Import files can now be stored in S3-compatible storage.
- Export files are now being attached to the export record instead of being stored in the file system.
- Export files can now be stored in S3-compatible storage.
## Fixed ## Fixed
- Moving points on the map now works correctly. #957 - Moving points on the map now works correctly. #957
- `rake points:migrate_to_lonlat` task now also reindexes the points table. - `rake points:migrate_to_lonlat` task now also reindexes the points table.
# 0.25.3 - 2025-03-22 # 0.25.3 - 2025-03-22
## Fixed ## Fixed

View file

@ -5,6 +5,10 @@ git_source(:github) { |repo| "https://github.com/#{repo}.git" }
ruby File.read('.ruby-version').strip ruby File.read('.ruby-version').strip
# https://meta.discourse.org/t/cant-rebuild-due-to-aws-sdk-gem-bump-and-new-aws-data-integrity-protections/354217/40
gem 'aws-sdk-s3', '~> 1.177.0', require: false
gem 'aws-sdk-core', '~> 3.215.1', require: false
gem 'aws-sdk-kms', '~> 1.96.0', require: false
gem 'bootsnap', require: false gem 'bootsnap', require: false
gem 'chartkick' gem 'chartkick'
gem 'data_migrate' gem 'data_migrate'
@ -27,7 +31,6 @@ gem 'rgeo'
gem 'rgeo-activerecord' gem 'rgeo-activerecord'
gem 'rswag-api' gem 'rswag-api'
gem 'rswag-ui' gem 'rswag-ui'
gem 'shrine', '~> 3.6'
gem 'sidekiq' gem 'sidekiq'
gem 'sidekiq-cron' gem 'sidekiq-cron'
gem 'sidekiq-limit_fetch' gem 'sidekiq-limit_fetch'

View file

@ -79,6 +79,22 @@ GEM
public_suffix (>= 2.0.2, < 7.0) public_suffix (>= 2.0.2, < 7.0)
ast (2.4.2) ast (2.4.2)
attr_extras (7.1.0) attr_extras (7.1.0)
aws-eventstream (1.3.2)
aws-partitions (1.1072.0)
aws-sdk-core (3.215.1)
aws-eventstream (~> 1, >= 1.3.0)
aws-partitions (~> 1, >= 1.992.0)
aws-sigv4 (~> 1.9)
jmespath (~> 1, >= 1.6.1)
aws-sdk-kms (1.96.0)
aws-sdk-core (~> 3, >= 3.210.0)
aws-sigv4 (~> 1.5)
aws-sdk-s3 (1.177.0)
aws-sdk-core (~> 3, >= 3.210.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.5)
aws-sigv4 (1.11.0)
aws-eventstream (~> 1, >= 1.0.2)
base64 (0.2.0) base64 (0.2.0)
bcrypt (3.1.20) bcrypt (3.1.20)
benchmark (0.4.0) benchmark (0.4.0)
@ -91,7 +107,6 @@ GEM
coderay (1.1.3) coderay (1.1.3)
concurrent-ruby (1.3.5) concurrent-ruby (1.3.5)
connection_pool (2.5.0) connection_pool (2.5.0)
content_disposition (1.0.0)
crack (1.0.0) crack (1.0.0)
bigdecimal bigdecimal
rexml rexml
@ -121,8 +136,6 @@ GEM
dotenv-rails (3.1.7) dotenv-rails (3.1.7)
dotenv (= 3.1.7) dotenv (= 3.1.7)
railties (>= 6.1) railties (>= 6.1)
down (5.4.2)
addressable (~> 2.8)
drb (2.2.1) drb (2.2.1)
erubi (1.13.1) erubi (1.13.1)
et-orbi (1.2.11) et-orbi (1.2.11)
@ -164,6 +177,7 @@ GEM
pp (>= 0.6.0) pp (>= 0.6.0)
rdoc (>= 4.0.0) rdoc (>= 4.0.0)
reline (>= 0.4.2) reline (>= 0.4.2)
jmespath (1.6.2)
json (2.10.1) json (2.10.1)
json-schema (5.0.1) json-schema (5.0.1)
addressable (~> 2.8) addressable (~> 2.8)
@ -371,9 +385,6 @@ GEM
securerandom (0.4.1) securerandom (0.4.1)
shoulda-matchers (6.4.0) shoulda-matchers (6.4.0)
activesupport (>= 5.2.0) activesupport (>= 5.2.0)
shrine (3.6.0)
content_disposition (~> 1.0)
down (~> 5.1)
sidekiq (7.3.9) sidekiq (7.3.9)
base64 base64
connection_pool (>= 2.3.0) connection_pool (>= 2.3.0)
@ -453,6 +464,9 @@ PLATFORMS
DEPENDENCIES DEPENDENCIES
activerecord-postgis-adapter activerecord-postgis-adapter
aws-sdk-core (~> 3.215.1)
aws-sdk-kms (~> 1.96.0)
aws-sdk-s3 (~> 1.177.0)
bootsnap bootsnap
chartkick chartkick
data_migrate data_migrate
@ -488,7 +502,6 @@ DEPENDENCIES
rswag-ui rswag-ui
rubocop-rails rubocop-rails
shoulda-matchers shoulda-matchers
shrine (~> 3.6)
sidekiq sidekiq
sidekiq-cron sidekiq-cron
sidekiq-limit_fetch sidekiq-limit_fetch

View file

@ -11,9 +11,13 @@ class ExportsController < ApplicationController
def create def create
export_name = export_name =
"export_from_#{params[:start_at].to_date}_to_#{params[:end_at].to_date}.#{params[:file_format]}" "export_from_#{params[:start_at].to_date}_to_#{params[:end_at].to_date}.#{params[:file_format]}"
export = current_user.exports.create(name: export_name, status: :created) export = current_user.exports.create(
name: export_name,
ExportJob.perform_later(export.id, params[:start_at], params[:end_at], file_format: params[:file_format]) status: :created,
format: params[:file_format],
start_at: params[:start_at],
end_at: params[:end_at]
)
redirect_to exports_url, notice: 'Export was successfully initiated. Please wait until it\'s finished.' redirect_to exports_url, notice: 'Export was successfully initiated. Please wait until it\'s finished.'
rescue StandardError => e rescue StandardError => e
@ -23,11 +27,7 @@ class ExportsController < ApplicationController
end end
def destroy def destroy
ActiveRecord::Base.transaction do @export.destroy
@export.destroy
File.delete(Rails.root.join('public', 'exports', @export.name))
end
redirect_to exports_url, notice: 'Export was successfully destroyed.', status: :see_other redirect_to exports_url, notice: 'Export was successfully destroyed.', status: :see_other
end end
@ -37,8 +37,4 @@ class ExportsController < ApplicationController
def set_export def set_export
@export = current_user.exports.find(params[:id]) @export = current_user.exports.find(params[:id])
end end
def export_params
params.require(:export).permit(:name, :url, :status)
end
end end

View file

@ -23,27 +23,17 @@ class ImportsController < ApplicationController
def create def create
files = import_params[:files].reject(&:blank?) files = import_params[:files].reject(&:blank?)
import_ids = files.map do |file| files.each do |file|
import = current_user.imports.create( import = current_user.imports.build(
name: file.original_filename, name: file.original_filename,
source: params[:import][:source] source: params[:import][:source]
) )
file = File.read(file) import.file.attach(io: file, filename: file.original_filename, content_type: file.content_type)
raw_data = import.save!
case params[:import][:source]
when 'gpx' then Hash.from_xml(file)
when 'owntracks' then OwnTracks::RecParser.new(file).call
else JSON.parse(file)
end
import.update(raw_data:)
import.id
end end
import_ids.each { ImportJob.perform_later(current_user.id, _1) }
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
rescue StandardError => e rescue StandardError => e
Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all

View file

@ -3,9 +3,9 @@
class ExportJob < ApplicationJob class ExportJob < ApplicationJob
queue_as :exports queue_as :exports
def perform(export_id, start_at, end_at, file_format: :json) def perform(export_id)
export = Export.find(export_id) export = Export.find(export_id)
Exports::Create.new(export:, start_at:, end_at:, file_format:).call Exports::Create.new(export:).call
end end
end end

View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
class Import::ProcessJob < ApplicationJob
queue_as :imports
def perform(import_id)
import = Import.find(import_id)
import.process!
end
end

View file

@ -1,12 +0,0 @@
# frozen_string_literal: true
class ImportJob < ApplicationJob
queue_as :imports
def perform(user_id, import_id)
user = User.find(user_id)
import = user.imports.find(import_id)
import.process!
end
end

View file

@ -4,16 +4,16 @@ class Export < ApplicationRecord
belongs_to :user belongs_to :user
enum :status, { created: 0, processing: 1, completed: 2, failed: 3 } enum :status, { created: 0, processing: 1, completed: 2, failed: 3 }
enum :format, { json: 0, gpx: 1 }
validates :name, presence: true validates :name, presence: true
before_destroy :delete_export_file has_one_attached :file
private after_commit -> { ExportJob.perform_later(id) }, on: :create
after_commit -> { file.purge }, on: :destroy
def delete_export_file def process!
file_path = Rails.root.join('public', 'exports', "#{name}.json") Exports::Create.new(export: self).call
File.delete(file_path) if File.exist?(file_path)
end end
end end

View file

@ -6,7 +6,10 @@ class Import < ApplicationRecord
delegate :count, to: :points, prefix: true delegate :count, to: :points, prefix: true
include ImportUploader::Attachment(:raw) has_one_attached :file
after_commit -> { Import::ProcessJob.perform_later(id) }, on: :create
after_commit -> { file.purge }, on: :destroy
enum :source, { enum :source, {
google_semantic_history: 0, owntracks: 1, google_records: 2, google_semantic_history: 0, owntracks: 1, google_records: 2,
@ -27,4 +30,12 @@ class Import < ApplicationRecord
[time.year, time.month] [time.year, time.month]
end.uniq end.uniq
end end
def migrate_to_new_storage
return if file.attached?
raw_file = File.new(raw_data)
file.attach(io: raw_file, filename: name, content_type: 'application/json')
end
end end

View file

@ -118,21 +118,22 @@ class User < ApplicationRecord
settings.try(:[], 'maps')&.try(:[], 'url')&.strip! settings.try(:[], 'maps')&.try(:[], 'url')&.strip!
end end
# rubocop:disable Metrics/MethodLength
def import_sample_points def import_sample_points
return unless Rails.env.development? || return unless Rails.env.development? ||
Rails.env.production? || Rails.env.production? ||
(Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS']) (Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS'])
raw_data = Hash.from_xml(
File.read(Rails.root.join('lib/assets/sample_points.gpx'))
)
import = imports.create( import = imports.create(
name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME', name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME',
source: 'gpx', source: 'gpx'
raw_data:
) )
ImportJob.perform_later(id, import.id) import.file.attach(
Rack::Test::UploadedFile.new(
Rails.root.join('lib/assets/sample_points.gpx'), 'application/xml'
)
)
end end
# rubocop:enable Metrics/MethodLength
end end

View file

@ -1,28 +1,30 @@
# frozen_string_literal: true # frozen_string_literal: true
class Exports::Create class Exports::Create
def initialize(export:, start_at:, end_at:, file_format: :json) def initialize(export:)
@export = export @export = export
@user = export.user @user = export.user
@start_at = start_at.to_datetime @start_at = export.start_at
@end_at = end_at.to_datetime @end_at = export.end_at
@file_format = file_format @file_format = export.format
end end
def call def call
export.update!(status: :processing) ActiveRecord::Base.transaction do
export.update!(status: :processing)
points = time_framed_points points = time_framed_points
data = points_data(points) data = points_data(points)
create_export_file(data) attach_export_file(data)
export.update!(status: :completed, url: "exports/#{export.name}") export.update!(status: :completed)
create_export_finished_notification notify_export_finished
end
rescue StandardError => e rescue StandardError => e
create_failed_export_notification(e) notify_export_failed(e)
export.update!(status: :failed) export.update!(status: :failed)
end end
@ -38,7 +40,7 @@ class Exports::Create
.order(timestamp: :asc) .order(timestamp: :asc)
end end
def create_export_finished_notification def notify_export_finished
Notifications::Create.new( Notifications::Create.new(
user:, user:,
kind: :info, kind: :info,
@ -47,7 +49,7 @@ class Exports::Create
).call ).call
end end
def create_failed_export_notification(error) def notify_export_failed(error)
Notifications::Create.new( Notifications::Create.new(
user:, user:,
kind: :error, kind: :error,
@ -72,18 +74,18 @@ class Exports::Create
Points::GpxSerializer.new(points, export.name).call Points::GpxSerializer.new(points, export.name).call
end end
def create_export_file(data) def attach_export_file(data)
dir_path = Rails.root.join('public/exports') export.file.attach(io: StringIO.new(data.to_s), filename: export.name, content_type:)
FileUtils.mkdir_p(dir_path) unless Dir.exist?(dir_path)
file_path = dir_path.join(export.name)
Rails.logger.info("Creating export file at: #{file_path}")
File.open(file_path, 'w') { |file| file.write(data) }
rescue StandardError => e rescue StandardError => e
Rails.logger.error("Failed to create export file: #{e.message}") Rails.logger.error("Failed to create export file: #{e.message}")
raise raise
end end
def content_type
case file_format.to_sym
when :json then 'application/json'
when :gpx then 'application/gpx+xml'
else raise ArgumentError, "Unsupported file format: #{file_format}"
end
end
end end

View file

@ -2,34 +2,28 @@
class Geojson::ImportParser class Geojson::ImportParser
include Imports::Broadcaster include Imports::Broadcaster
include PointValidation
attr_reader :import, :json, :user_id attr_reader :import, :user_id
def initialize(import, user_id) def initialize(import, user_id)
@import = import @import = import
@json = import.raw_data
@user_id = user_id @user_id = user_id
end end
def call def call
data = Geojson::Params.new(json).call import.file.download do |file|
json = Oj.load(file)
data.each.with_index(1) do |point, index| data = Geojson::Params.new(json).call
next if point_exists?(point, user_id)
Point.create!(point.merge(user_id:, import_id: import.id)) data.each.with_index(1) do |point, index|
next if point_exists?(point, user_id)
broadcast_import_progress(import, index) Point.create!(point.merge(user_id:, import_id: import.id))
broadcast_import_progress(import, index)
end
end end
end end
private
def point_exists?(params, user_id)
Point.exists?(
lonlat: params[:lonlat],
timestamp: params[:timestamp],
user_id:
)
end
end end

View file

@ -48,13 +48,15 @@ class GoogleMaps::PhoneTakeoutParser
raw_signals = [] raw_signals = []
raw_array = [] raw_array = []
if import.raw_data.is_a?(Array) import.file.download do |file|
raw_array = parse_raw_array(import.raw_data) json = Oj.load(file)
else
if import.raw_data['semanticSegments'] if json.is_a?(Array)
semantic_segments = parse_semantic_segments(import.raw_data['semanticSegments']) raw_array = parse_raw_array(json)
else
semantic_segments = parse_semantic_segments(json['semanticSegments']) if json['semanticSegments']
raw_signals = parse_raw_signals(json['rawSignals']) if json['rawSignals']
end end
raw_signals = parse_raw_signals(import.raw_data['rawSignals']) if import.raw_data['rawSignals']
end end
semantic_segments + raw_signals + raw_array semantic_segments + raw_signals + raw_array

View file

@ -13,8 +13,6 @@ class GoogleMaps::SemanticHistoryParser
end end
def call def call
points_data = parse_json
points_data.each_slice(BATCH_SIZE) do |batch| points_data.each_slice(BATCH_SIZE) do |batch|
@current_index += batch.size @current_index += batch.size
process_batch(batch) process_batch(batch)
@ -62,10 +60,18 @@ class GoogleMaps::SemanticHistoryParser
) )
end end
def parse_json def points_data
import.raw_data['timelineObjects'].flat_map do |timeline_object| data = nil
parse_timeline_object(timeline_object)
end.compact import.file.download do |f|
json = Oj.load(f)
data = json['timelineObjects'].flat_map do |timeline_object|
parse_timeline_object(timeline_object)
end.compact
end
data
end end
def parse_timeline_object(timeline_object) def parse_timeline_object(timeline_object)

View file

@ -3,22 +3,25 @@
class Gpx::TrackImporter class Gpx::TrackImporter
include Imports::Broadcaster include Imports::Broadcaster
attr_reader :import, :json, :user_id attr_reader :import, :user_id
def initialize(import, user_id) def initialize(import, user_id)
@import = import @import = import
@json = import.raw_data
@user_id = user_id @user_id = user_id
end end
def call def call
tracks = json['gpx']['trk'] import.file.download do |file|
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks] json = Hash.from_xml(file)
points = tracks_arr.map { parse_track(_1) }.flatten.compact tracks = json['gpx']['trk']
points_data = points.map.with_index(1) { |point, index| prepare_point(point, index) }.compact tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
bulk_insert_points(points_data) points = tracks_arr.map { parse_track(_1) }.flatten.compact
points_data = points.map { prepare_point(_1) }.compact
bulk_insert_points(points_data)
end
end end
private private
@ -32,7 +35,7 @@ class Gpx::TrackImporter
segments_array.compact.map { |segment| segment['trkpt'] } segments_array.compact.map { |segment| segment['trkpt'] }
end end
def prepare_point(point, index) def prepare_point(point)
return if point['lat'].blank? || point['lon'].blank? || point['time'].blank? return if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
{ {

View file

@ -20,10 +20,13 @@ class Immich::ImportGeodata
create_import_failed_notification(import.name) and return unless import.new_record? create_import_failed_notification(import.name) and return unless import.new_record?
import.raw_data = immich_data_json import.file.attach(
import.save! io: StringIO.new(immich_data_json.to_json),
filename: file_name,
content_type: 'application/json'
)
ImportJob.perform_later(user.id, import.id) import.save!
end end
private private

View file

@ -14,7 +14,7 @@ class Imports::Create
create_import_finished_notification(import, user) create_import_finished_notification(import, user)
schedule_stats_creating(user.id) schedule_stats_creating(user.id)
# schedule_visit_suggesting(user.id, import) # Disabled until places & visits are reworked schedule_visit_suggesting(user.id, import)
rescue StandardError => e rescue StandardError => e
create_import_failed_notification(import, user, e) create_import_failed_notification(import, user, e)
end end
@ -44,7 +44,7 @@ class Imports::Create
start_at = Time.zone.at(points.first.timestamp) start_at = Time.zone.at(points.first.timestamp)
end_at = Time.zone.at(points.last.timestamp) end_at = Time.zone.at(points.last.timestamp)
VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:) VisitSuggestingJob.perform_later(user_id:, start_at:, end_at:)
end end
def create_import_finished_notification(import, user) def create_import_finished_notification(import, user)

View file

@ -16,7 +16,7 @@ class Imports::Watcher
file_names = file_names(user_directory_path) file_names = file_names(user_directory_path)
file_names.each do |file_name| file_names.each do |file_name|
process_file(user, user_directory_path, file_name) create_import(user, user_directory_path, file_name)
end end
end end
end end
@ -26,49 +26,29 @@ class Imports::Watcher
def user_directories def user_directories
Dir.entries(WATCHED_DIR_PATH).select do |entry| Dir.entries(WATCHED_DIR_PATH).select do |entry|
path = File.join(WATCHED_DIR_PATH, entry) path = File.join(WATCHED_DIR_PATH, entry)
File.directory?(path) && !['.', '..'].include?(entry) File.directory?(path) && !['.', '..'].include?(entry)
end end
end end
def find_user(file_name)
email = file_name.split('_').first
User.find_by(email:)
end
def file_names(directory_path) def file_names(directory_path)
Dir.entries(directory_path).select { |file| SUPPORTED_FORMATS.include?(File.extname(file)) } Dir.entries(directory_path).select { |file| SUPPORTED_FORMATS.include?(File.extname(file)) }
end end
def process_file(user, directory_path, file_name) def create_import(user, directory_path, file_name)
file_path = File.join(directory_path, file_name) file_path = File.join(directory_path, file_name)
import = Import.find_or_initialize_by(user:, name: file_name) import = Import.find_or_initialize_by(user:, name: file_name)
return if import.persisted? return if import.persisted?
import.source = source(file_name) import.source = source(file_name)
import.raw_data = raw_data(file_path, import.source) import.file.attach(
io: File.open(file_path),
filename: file_name,
content_type: mime_type(import.source)
)
import.save! import.save!
ImportJob.perform_later(user.id, import.id)
end
def find_or_initialize_import(user, file_name)
import_name = file_name.split('_')[1..].join('_')
Import.find_or_initialize_by(user:, name: import_name)
end
def set_import_attributes(import, file_path, file_name)
source = source(file_name)
import.source = source
import.raw_data = raw_data(file_path, source)
import.save!
import.id
end end
def source(file_name) def source(file_name)
@ -89,16 +69,13 @@ class Imports::Watcher
end end
end end
def raw_data(file_path, source) def mime_type(source)
file = File.read(file_path)
case source.to_sym case source.to_sym
when :gpx when :gpx then 'application/xml'
Hash.from_xml(file)
when :json, :geojson, :google_phone_takeout, :google_records, :google_semantic_history when :json, :geojson, :google_phone_takeout, :google_records, :google_semantic_history
JSON.parse(file) 'application/json'
when :owntracks when :owntracks
OwnTracks::RecParser.new(file).call 'application/octet-stream'
else else
raise UnsupportedSourceError, "Unsupported source: #{source}" raise UnsupportedSourceError, "Unsupported source: #{source}"
end end

View file

@ -3,25 +3,28 @@
class OwnTracks::Importer class OwnTracks::Importer
include Imports::Broadcaster include Imports::Broadcaster
attr_reader :import, :data, :user_id attr_reader :import, :user_id
def initialize(import, user_id) def initialize(import, user_id)
@import = import @import = import
@data = import.raw_data
@user_id = user_id @user_id = user_id
end end
def call def call
points_data = data.map.with_index(1) do |point, index| import.file.download do |file|
OwnTracks::Params.new(point).call.merge( parsed_data = OwnTracks::RecParser.new(file).call
import_id: import.id,
user_id: user_id,
created_at: Time.current,
updated_at: Time.current
)
end
bulk_insert_points(points_data) points_data = parsed_data.map do |point|
OwnTracks::Params.new(point).call.merge(
import_id: import.id,
user_id: user_id,
created_at: Time.current,
updated_at: Time.current
)
end
bulk_insert_points(points_data)
end
end end
private private

View file

@ -10,11 +10,8 @@ class OwnTracks::RecParser
def call def call
file.split("\n").map do |line| file.split("\n").map do |line|
parts = line.split("\t") parts = line.split("\t")
if parts.size > 2 && parts[1].strip == '*'
JSON.parse(parts[2]) Oj.load(parts[2]) if parts.size > 2 && parts[1].strip == '*'
else
nil
end
end.compact end.compact
end end
end end

View file

@ -23,8 +23,13 @@ class Photoprism::ImportGeodata
import = find_or_create_import(json_data) import = find_or_create_import(json_data)
return create_import_failed_notification(import.name) unless import.new_record? return create_import_failed_notification(import.name) unless import.new_record?
import.update!(raw_data: json_data) import.file.attach(
ImportJob.perform_later(user.id, import.id) io: StringIO.new(json_data.to_json),
filename: file_name(json_data),
content_type: 'application/json'
)
import.save!
end end
def find_or_create_import(json_data) def find_or_create_import(json_data)

View file

@ -2,7 +2,7 @@
class Photos::ImportParser class Photos::ImportParser
include Imports::Broadcaster include Imports::Broadcaster
include PointValidation
attr_reader :import, :json, :user_id attr_reader :import, :json, :user_id
def initialize(import, user_id) def initialize(import, user_id)
@ -29,12 +29,4 @@ class Photos::ImportParser
broadcast_import_progress(import, index) broadcast_import_progress(import, index)
end end
def point_exists?(point, timestamp)
Point.exists?(
lonlat: "POINT(#{point['longitude']} #{point['latitude']})",
timestamp:,
user_id:
)
end
end end

View file

@ -1,5 +0,0 @@
# frozen_string_literal: true
class ImportUploader < Shrine
# plugins and uploading logic
end

View file

@ -41,7 +41,11 @@
<td><%= export.status %></td> <td><%= export.status %></td>
<td> <td>
<% if export.completed? %> <% if export.completed? %>
<%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %> <% if export.url.present? %>
<%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
<% else %>
<%= link_to 'Download', export.file.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
<% end %>
<% end %> <% end %>
<%= link_to 'Delete', export, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %> <%= link_to 'Delete', export, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
</td> </td>

View file

@ -0,0 +1 @@
41976cfff86107bc1bb52cec7d8107b0

View file

@ -98,4 +98,6 @@ Rails.application.configure do
config.logger = Logger.new($stdout) config.logger = Logger.new($stdout)
config.lograge.enabled = true config.lograge.enabled = true
config.lograge.formatter = Lograge::Formatters::Json.new config.lograge.formatter = Lograge::Formatters::Json.new
config.active_storage.service = ENV['SELF_HOSTED'] == 'true' ? :local : :s3
end end

View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
require 'aws-sdk-core'
Aws.config.update(
{
region: ENV['AWS_REGION'],
endpoint: ENV['AWS_ENDPOINT'],
credentials: Aws::Credentials.new(ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY'])
}
)

View file

@ -1,13 +0,0 @@
# frozen_string_literal: true
require 'shrine'
require 'shrine/storage/file_system'
Shrine.storages = {
cache: Shrine::Storage::FileSystem.new('public', prefix: 'uploads/cache'), # temporary
store: Shrine::Storage::FileSystem.new('public', prefix: 'uploads') # permanent
}
Shrine.plugin :activerecord # loads Active Record integration
Shrine.plugin :cached_attachment_data # enables retaining cached file across form redisplays
Shrine.plugin :restore_cached_data # extracts metadata for assigned cached files

View file

@ -6,13 +6,15 @@ local:
service: Disk service: Disk
root: <%= Rails.root.join("storage") %> root: <%= Rails.root.join("storage") %>
# Use bin/rails credentials:edit to set the AWS secrets (as aws:access_key_id|secret_access_key) # Only load S3 config if not in test environment
# amazon: <% unless Rails.env.test? %>
# service: S3 s3:
# access_key_id: <%= Rails.application.credentials.dig(:aws, :access_key_id) %> service: S3
# secret_access_key: <%= Rails.application.credentials.dig(:aws, :secret_access_key) %> access_key_id: <%= ENV.fetch("AWS_ACCESS_KEY_ID") %>
# region: us-east-1 secret_access_key: <%= ENV.fetch("AWS_SECRET_ACCESS_KEY") %>
# bucket: your_own_bucket-<%= Rails.env %> region: <%= ENV.fetch("AWS_REGION") %>
bucket: <%= ENV.fetch("AWS_BUCKET") %>
<% end %>
# Remember not to checkin your GCS keyfile to a repository # Remember not to checkin your GCS keyfile to a repository
# google: # google:

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
# This migration comes from active_storage (originally 20170806125915) # This migration comes from active_storage (originally 20170806125915)
class CreateActiveStorageTables < ActiveRecord::Migration[7.0] class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
def change def change
@ -19,7 +21,7 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
t.datetime :created_at, null: false t.datetime :created_at, null: false
end end
t.index [ :key ], unique: true t.index [:key], unique: true
end end
create_table :active_storage_attachments, id: primary_key_type do |t| create_table :active_storage_attachments, id: primary_key_type do |t|
@ -33,7 +35,8 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
t.datetime :created_at, null: false t.datetime :created_at, null: false
end end
t.index [ :record_type, :record_id, :name, :blob_id ], name: :index_active_storage_attachments_uniqueness, unique: true t.index %i[record_type record_id name blob_id], name: :index_active_storage_attachments_uniqueness,
unique: true
t.foreign_key :active_storage_blobs, column: :blob_id t.foreign_key :active_storage_blobs, column: :blob_id
end end
@ -41,17 +44,18 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
t.belongs_to :blob, null: false, index: false, type: foreign_key_type t.belongs_to :blob, null: false, index: false, type: foreign_key_type
t.string :variation_digest, null: false t.string :variation_digest, null: false
t.index [ :blob_id, :variation_digest ], name: :index_active_storage_variant_records_uniqueness, unique: true t.index %i[blob_id variation_digest], name: :index_active_storage_variant_records_uniqueness, unique: true
t.foreign_key :active_storage_blobs, column: :blob_id t.foreign_key :active_storage_blobs, column: :blob_id
end end
end end
private private
def primary_and_foreign_key_types
config = Rails.configuration.generators def primary_and_foreign_key_types
setting = config.options[config.orm][:primary_key_type] config = Rails.configuration.generators
primary_key_type = setting || :primary_key setting = config.options[config.orm][:primary_key_type]
foreign_key_type = setting || :bigint primary_key_type = setting || :primary_key
[primary_key_type, foreign_key_type] foreign_key_type = setting || :bigint
end [primary_key_type, foreign_key_type]
end
end end

View file

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddFormatStartAtEndAtToExports < ActiveRecord::Migration[8.0]
def change
add_column :exports, :format, :integer, default: 0
add_column :exports, :start_at, :datetime
add_column :exports, :end_at, :datetime
end
end

5
db/schema.rb generated
View file

@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[8.0].define(version: 2025_03_03_194043) do ActiveRecord::Schema[8.0].define(version: 2025_03_24_180755) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_catalog.plpgsql" enable_extension "pg_catalog.plpgsql"
enable_extension "postgis" enable_extension "postgis"
@ -74,6 +74,9 @@ ActiveRecord::Schema[8.0].define(version: 2025_03_03_194043) do
t.bigint "user_id", null: false t.bigint "user_id", null: false
t.datetime "created_at", null: false t.datetime "created_at", null: false
t.datetime "updated_at", null: false t.datetime "updated_at", null: false
t.integer "format", default: 0
t.datetime "start_at"
t.datetime "end_at"
t.index ["status"], name: "index_exports_on_status" t.index ["status"], name: "index_exports_on_status"
t.index ["user_id"], name: "index_exports_on_user_id" t.index ["user_id"], name: "index_exports_on_user_id"
end end

View file

@ -1,8 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
# Usage: rake import:big_file['/path/to/file.json','user@email.com']
namespace :import do namespace :import do
# Usage: rake import:big_file['/path/to/file.json','user@email.com']
desc 'Accepts a file path and user email and imports the data into the database' desc 'Accepts a file path and user email and imports the data into the database'
task :big_file, %i[file_path user_email] => :environment do |_, args| task :big_file, %i[file_path user_email] => :environment do |_, args|

13
lib/tasks/imports.rake Normal file
View file

@ -0,0 +1,13 @@
# frozen_string_literal: true
namespace :imports do
desc 'Migrate existing imports from `raw_data` to the new file storage'
task migrate_to_new_storage: :environment do
Import.find_each do |import|
import.migrate_to_new_storage
rescue StandardError => e
puts "Error migrating import #{import.id}: #{e.message}"
end
end
end

View file

@ -3,8 +3,8 @@
FactoryBot.define do FactoryBot.define do
factory :export do factory :export do
name { 'export' } name { 'export' }
url { 'exports/export.json' } status { :created }
status { 1 } format { :json }
user user
end end
end end

View file

@ -3,8 +3,7 @@
FactoryBot.define do FactoryBot.define do
factory :import do factory :import do
user user
name { 'MARCH_2024.json' } name { 'owntracks_export.json' }
source { Import.sources[:owntracks] } source { Import.sources[:owntracks] }
raw_data { OwnTracks::RecParser.new(File.read('spec/fixtures/files/owntracks/2024-03.rec')).call }
end end
end end

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"activitySegment": {
"startLocation": { "latitudeE7": 123422222, "longitudeE7": 123422222 },
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"activitySegment": {
"startLocation": { "latitudeE7": 123466666, "longitudeE7": 123466666 },
"duration": { "startTimestampMs": "1742844302585" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"activitySegment": {
"startLocation": { "latitudeE7": 123455555, "longitudeE7": 123455555 },
"duration": { "startTimestamp": "1742844232" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"activitySegment": {
"startLocation": { "latitudeE7": 123444444, "longitudeE7": 123444444 },
"duration": { "startTimestamp": "1742844302585" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"activitySegment": {
"startLocation": { "latitudeE7": 123433333, "longitudeE7": 123433333 },
"duration": { "startTimestamp": "2025-03-24T20:20:23+01:00" }
}
}
]
}

View file

@ -0,0 +1,14 @@
{
"timelineObjects": [
{
"activitySegment": {
"waypointPath": {
"waypoints": [
{ "latE7": 123411111, "lngE7": 123411111 }
]
},
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
}
}
]
}

View file

@ -0,0 +1,9 @@
{
"timelineObjects": [
{
"activitySegment": {
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"location": { "latitudeE7": 123477777, "longitudeE7": 123477777 },
"duration": { "startTimestamp": "1742844232" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"location": { "latitudeE7": 123488888, "longitudeE7": 123488888 },
"duration": { "startTimestamp": "2025-03-24T20:25:02+01:00" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"location": { "latitudeE7": 123511111, "longitudeE7": 123511111 },
"duration": { "startTimestamp": "1742844302585" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"location": { "latitudeE7": 123499999, "longitudeE7": 123499999 },
"duration": { "startTimestamp": "1742844302" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"location": { "latitudeE7": 123522222, "longitudeE7": 123522222 },
"duration": { "startTimestampMs": "1742844302585" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"location": {},
"duration": { "startTimestamp": "2025-03-24 20:25:02 +0100" }
}
}
]
}

View file

@ -0,0 +1,10 @@
{
"timelineObjects": [
{
"placeVisit": {
"otherCandidateLocations": [{ "latitudeE7": 123533333, "longitudeE7": 123533333 }],
"duration": { "startTimestamp": "2025-03-24 20:25:02 +0100" }
}
}
]
}

View file

@ -8,8 +8,8 @@ RSpec.describe ExportJob, type: :job do
let(:end_at) { Time.zone.now } let(:end_at) { Time.zone.now }
it 'calls the Exports::Create service class' do it 'calls the Exports::Create service class' do
expect(Exports::Create).to receive(:new).with(export:, start_at:, end_at:, file_format: :json).and_call_original expect(Exports::Create).to receive(:new).with(export:).and_call_original
described_class.perform_now(export.id, start_at, end_at) described_class.perform_now(export.id)
end end
end end

View file

@ -2,12 +2,17 @@
require 'rails_helper' require 'rails_helper'
RSpec.describe ImportJob, type: :job do RSpec.describe Import::ProcessJob, type: :job do
describe '#perform' do describe '#perform' do
subject(:perform) { described_class.new.perform(user.id, import.id) } subject(:perform) { described_class.new.perform(import.id) }
let(:user) { create(:user) } let(:user) { create(:user) }
let!(:import) { create(:import, user:, name: 'owntracks_export.json') } let!(:import) { create(:import, user:, name: '2024-03.rec') }
let(:file_path) { Rails.root.join('spec/fixtures/files/owntracks/2024-03.rec') }
before do
import.file.attach(io: File.open(file_path), filename: '2024-03.rec', content_type: 'application/octet-stream')
end
it 'creates points' do it 'creates points' do
expect { perform }.to change { Point.count }.by(9) expect { perform }.to change { Point.count }.by(9)

View file

@ -9,5 +9,6 @@ RSpec.describe Export, type: :model do
describe 'enums' do describe 'enums' do
it { is_expected.to define_enum_for(:status).with_values(created: 0, processing: 1, completed: 2, failed: 3) } it { is_expected.to define_enum_for(:status).with_values(created: 0, processing: 1, completed: 2, failed: 3) }
it { is_expected.to define_enum_for(:format).with_values(json: 0, gpx: 1) }
end end
end end

View file

@ -36,4 +36,23 @@ RSpec.describe Import, type: :model do
expect(import.years_and_months_tracked).to eq([[2024, 11]]) expect(import.years_and_months_tracked).to eq([[2024, 11]])
end end
end end
describe '#migrate_to_new_storage' do
let(:raw_data) { Rails.root.join('spec/fixtures/files/geojson/export.json') }
let(:import) { create(:import, source: 'geojson', raw_data:) }
it 'attaches the file to the import' do
import.migrate_to_new_storage
expect(import.file.attached?).to be_truthy
end
context 'when file is attached' do
it 'is a importable file' do
import.migrate_to_new_storage
expect { import.process! }.to change(Point, :count).by(10)
end
end
end
end end

View file

@ -72,7 +72,7 @@ RSpec.describe User, type: :model do
expect(user.imports.first.name).to eq('DELETE_ME_this_is_a_demo_import_DELETE_ME') expect(user.imports.first.name).to eq('DELETE_ME_this_is_a_demo_import_DELETE_ME')
expect(user.imports.first.source).to eq('gpx') expect(user.imports.first.source).to eq('gpx')
expect(ImportJob).to have_been_enqueued.with(user.id, user.imports.first.id) expect(Import::ProcessJob).to have_been_enqueued.with(user.imports.first.id)
end end
end end
end end

View file

@ -76,25 +76,9 @@ RSpec.describe '/exports', type: :request do
end end
describe 'DELETE /destroy' do describe 'DELETE /destroy' do
let!(:export) { create(:export, user:, url: 'exports/export.json', name: 'export.json') } let!(:export) { create(:export, user:, name: 'export.json') }
let(:export_file) { Rails.root.join('public', 'exports', export.name) }
before do before { sign_in user }
sign_in user
FileUtils.mkdir_p(File.dirname(export_file))
File.write(export_file, '{"some": "data"}')
end
after { FileUtils.rm_f(export_file) }
it 'removes the export file from disk' do
expect(File.exist?(export_file)).to be true
delete export_url(export)
expect(File.exist?(export_file)).to be false
end
it 'destroys the requested export' do it 'destroys the requested export' do
expect { delete export_url(export) }.to change(Export, :count).by(-1) expect { delete export_url(export) }.to change(Export, :count).by(-1)

View file

@ -46,7 +46,7 @@ RSpec.describe 'Imports', type: :request do
it 'queues import job' do it 'queues import job' do
expect do expect do
post imports_path, params: { import: { source: 'owntracks', files: [file] } } post imports_path, params: { import: { source: 'owntracks', files: [file] } }
end.to have_enqueued_job(ImportJob).on_queue('imports').at_least(1).times end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
end end
it 'creates a new import' do it 'creates a new import' do
@ -64,7 +64,7 @@ RSpec.describe 'Imports', type: :request do
it 'queues import job' do it 'queues import job' do
expect do expect do
post imports_path, params: { import: { source: 'gpx', files: [file] } } post imports_path, params: { import: { source: 'gpx', files: [file] } }
end.to have_enqueued_job(ImportJob).on_queue('imports').at_least(1).times end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
end end
it 'creates a new import' do it 'creates a new import' do

View file

@ -4,15 +4,17 @@ require 'rails_helper'
RSpec.describe Exports::Create do RSpec.describe Exports::Create do
describe '#call' do describe '#call' do
subject(:create_export) { described_class.new(export:, start_at:, end_at:, file_format:).call } subject(:create_export) { described_class.new(export:).call }
let(:file_format) { :json } let(:file_format) { :json }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:start_at) { DateTime.new(2021, 1, 1).to_s } let(:start_at) { DateTime.new(2021, 1, 1).to_s }
let(:end_at) { DateTime.new(2021, 1, 2).to_s } let(:end_at) { DateTime.new(2021, 1, 2).to_s }
let(:export_name) { "#{start_at.to_date}_#{end_at.to_date}.#{file_format}" } let(:export_name) { "#{start_at.to_date}_#{end_at.to_date}.#{file_format}" }
let(:export) { create(:export, user:, name: export_name, status: :created) } let(:export) do
let(:export_content) { Points::GeojsonSerializer.new(points).call } create(:export, user:, name: export_name, status: :created, format: file_format, start_at:, end_at:)
end
let(:export_content) { Points::GeojsonSerializer.new(points).call }
let(:reverse_geocoded_at) { Time.zone.local(2021, 1, 1) } let(:reverse_geocoded_at) { Time.zone.local(2021, 1, 1) }
let!(:points) do let!(:points) do
10.times.map do |i| 10.times.map do |i|
@ -35,10 +37,10 @@ RSpec.describe Exports::Create do
expect(File.read(file_path).strip).to eq(export_content) expect(File.read(file_path).strip).to eq(export_content)
end end
it 'sets the export url' do it 'sets the export file' do
create_export create_export
expect(export.reload.url).to eq("exports/#{export.name}") expect(export.reload.file.attached?).to be_truthy
end end
it 'updates the export status to completed' do it 'updates the export status to completed' do
@ -53,7 +55,7 @@ RSpec.describe Exports::Create do
context 'when an error occurs' do context 'when an error occurs' do
before do before do
allow(File).to receive(:open).and_raise(StandardError) allow_any_instance_of(Points::GeojsonSerializer).to receive(:call).and_raise(StandardError)
end end
it 'updates the export status to failed' do it 'updates the export status to failed' do

View file

@ -12,8 +12,12 @@ RSpec.describe Geojson::ImportParser do
context 'when file content is an object' do context 'when file content is an object' do
let(:file_path) { Rails.root.join('spec/fixtures/files/geojson/export.json') } let(:file_path) { Rails.root.join('spec/fixtures/files/geojson/export.json') }
let(:raw_data) { JSON.parse(File.read(file_path)) } let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/json') }
let(:import) { create(:import, user:, name: 'geojson.json', raw_data:) } let(:import) { create(:import, user:, name: 'geojson.json', file:) }
before do
import.file.attach(io: File.open(file_path), filename: 'geojson.json', content_type: 'application/json')
end
it 'creates new points' do it 'creates new points' do
expect { service }.to change { Point.count }.by(10) expect { service }.to change { Point.count }.by(10)

View file

@ -8,11 +8,15 @@ RSpec.describe GoogleMaps::PhoneTakeoutParser do
let(:user) { create(:user) } let(:user) { create(:user) }
before do
import.file.attach(io: File.open(file_path), filename: 'phone_takeout.json', content_type: 'application/json')
end
context 'when file content is an object' do context 'when file content is an object' do
# This file contains 3 duplicates # This file contains 3 duplicates
let(:file_path) { Rails.root.join('spec/fixtures/files/google/phone-takeout.json') } let(:file_path) { Rails.root.join('spec/fixtures/files/google/phone-takeout.json') }
let(:raw_data) { JSON.parse(File.read(file_path)) } let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/json') }
let(:import) { create(:import, user:, name: 'phone_takeout.json', raw_data:) } let(:import) { create(:import, user:, name: 'phone_takeout.json', file:) }
context 'when file exists' do context 'when file exists' do
it 'creates points' do it 'creates points' do
@ -24,8 +28,8 @@ RSpec.describe GoogleMaps::PhoneTakeoutParser do
context 'when file content is an array' do context 'when file content is an array' do
# This file contains 4 duplicates # This file contains 4 duplicates
let(:file_path) { Rails.root.join('spec/fixtures/files/google/location-history.json') } let(:file_path) { Rails.root.join('spec/fixtures/files/google/location-history.json') }
let(:raw_data) { JSON.parse(File.read(file_path)) } let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/json') }
let(:import) { create(:import, user:, name: 'phone_takeout.json', raw_data:) } let(:import) { create(:import, user:, name: 'phone_takeout.json', file:) }
context 'when file exists' do context 'when file exists' do
it 'creates points' do it 'creates points' do

View file

@ -7,36 +7,28 @@ RSpec.describe GoogleMaps::SemanticHistoryParser do
subject(:parser) { described_class.new(import, user.id).call } subject(:parser) { described_class.new(import, user.id).call }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:time) { Time.zone.now } let!(:import) { create(:import, user:) }
let(:file_path) { Rails.root.join("spec/fixtures/files/google/location-history/#{file_name}.json") }
before do
import.file.attach(
io: File.open(file_path),
filename: 'semantic_history.json',
content_type: 'application/json'
)
end
context 'when activitySegment is present' do context 'when activitySegment is present' do
context 'when startLocation is blank' do context 'when startLocation is blank' do
let(:import) { create(:import, raw_data: { 'timelineObjects' => [activity_segment] }) } let(:file_name) { 'with_activitySegment_without_startLocation' }
let(:activity_segment) do
{
'activitySegment' => {
'waypointPath' => {
'waypoints' => [
{ 'latE7' => 123_456_789, 'lngE7' => 123_456_789 }
]
},
'duration' => { 'startTimestamp' => time.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3411111 12.3411111)')
end end
context 'when waypointPath is blank' do context 'when waypointPath is blank' do
let(:activity_segment) do let(:file_name) { 'with_activitySegment_without_startLocation_without_waypointPath' }
{
'activitySegment' => {
'duration' => { 'startTimestamp' => time.to_s }
}
}
end
it 'does not create a point' do it 'does not create a point' do
expect { parser }.not_to change(Point, :count) expect { parser }.not_to change(Point, :count)
@ -45,78 +37,47 @@ RSpec.describe GoogleMaps::SemanticHistoryParser do
end end
context 'when startLocation is present' do context 'when startLocation is present' do
let(:import) { create(:import, raw_data: { 'timelineObjects' => [activity_segment] }) } let(:file_name) { 'with_activitySegment_with_startLocation' }
let(:activity_segment) do
{
'activitySegment' => {
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => time.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3422222 12.3422222)')
end end
context 'with different timestamp formats' do context 'with different timestamp formats' do
context 'when timestamp is in ISO format' do context 'when timestamp is in ISO format' do
let(:activity_segment) do let(:file_name) { 'with_activitySegment_with_startLocation_with_iso_timestamp' }
{
'activitySegment' => {
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => time.iso8601 }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3433333 12.3433333)')
end end
end end
context 'when timestamp is in seconds format' do context 'when timestamp is in seconds format' do
let(:activity_segment) do let(:file_name) { 'with_activitySegment_with_startLocation_timestamp_in_seconds_format' }
{
'activitySegment' => {
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => time.to_i.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3444444 12.3444444)')
end end
end end
context 'when timestamp is in milliseconds format' do context 'when timestamp is in milliseconds format' do
let(:activity_segment) do let(:file_name) { 'with_activitySegment_with_startLocation_timestamp_in_milliseconds_format' }
{
'activitySegment' => {
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => (time.to_f * 1000).to_i.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3455555 12.3455555)')
end end
end end
context 'when timestampMs is used' do context 'when timestampMs is used' do
let(:activity_segment) do let(:file_name) { 'with_activitySegment_with_startLocation_timestampMs' }
{
'activitySegment' => {
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'timestampMs' => (time.to_f * 1000).to_i.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3466666 12.3466666)')
end end
end end
end end
@ -125,110 +86,65 @@ RSpec.describe GoogleMaps::SemanticHistoryParser do
context 'when placeVisit is present' do context 'when placeVisit is present' do
context 'when location with coordinates is present' do context 'when location with coordinates is present' do
let(:import) { create(:import, raw_data: { 'timelineObjects' => [place_visit] }) } let(:file_name) { 'with_placeVisit_with_location_with_coordinates' }
let(:place_visit) do
{
'placeVisit' => {
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => time.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3477777 12.3477777)')
end end
context 'with different timestamp formats' do context 'with different timestamp formats' do
context 'when timestamp is in ISO format' do context 'when timestamp is in ISO format' do
let(:place_visit) do let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_iso_timestamp' }
{
'placeVisit' => {
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => time.iso8601 }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3488888 12.3488888)')
end end
end end
context 'when timestamp is in seconds format' do context 'when timestamp is in seconds format' do
let(:place_visit) do let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_seconds_timestamp' }
{
'placeVisit' => {
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => time.to_i.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3499999 12.3499999)')
end end
end end
context 'when timestamp is in milliseconds format' do context 'when timestamp is in milliseconds format' do
let(:place_visit) do let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_milliseconds_timestamp' }
{
'placeVisit' => {
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'startTimestamp' => (time.to_f * 1000).to_i.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3511111 12.3511111)')
end end
end end
context 'when timestampMs is used' do context 'when timestampMs is used' do
let(:place_visit) do let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_timestampMs' }
{
'placeVisit' => {
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
'duration' => { 'timestampMs' => (time.to_f * 1000).to_i.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3522222 12.3522222)')
end end
end end
end end
end end
context 'when location with coordinates is blank' do context 'when location with coordinates is blank' do
let(:import) { create(:import, raw_data: { 'timelineObjects' => [place_visit] }) } let(:file_name) { 'with_placeVisit_without_location_with_coordinates' }
let(:place_visit) do
{
'placeVisit' => {
'location' => {},
'duration' => { 'startTimestamp' => time.to_s }
}
}
end
it 'does not create a point' do it 'does not create a point' do
expect { parser }.not_to change(Point, :count) expect { parser }.not_to change(Point, :count)
end end
context 'when otherCandidateLocations is present' do context 'when otherCandidateLocations is present' do
let(:place_visit) do let(:file_name) { 'with_placeVisit_without_location_with_coordinates_with_otherCandidateLocations' }
{
'placeVisit' => {
'otherCandidateLocations' => [{ 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 }],
'duration' => { 'startTimestamp' => time.to_s }
}
}
end
it 'creates a point' do it 'creates a point' do
expect { parser }.to change(Point, :count).by(1) expect { parser }.to change(Point, :count).by(1)
expect(Point.last.lonlat.to_s).to eq('POINT (12.3533333 12.3533333)')
end end
end end
end end

View file

@ -8,8 +8,12 @@ RSpec.describe Gpx::TrackImporter do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:file_path) { Rails.root.join('spec/fixtures/files/gpx/gpx_track_single_segment.gpx') } let(:file_path) { Rails.root.join('spec/fixtures/files/gpx/gpx_track_single_segment.gpx') }
let(:raw_data) { Hash.from_xml(File.read(file_path)) } let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/xml') }
let(:import) { create(:import, user:, name: 'gpx_track.gpx', raw_data:) } let(:import) { create(:import, user:, name: 'gpx_track.gpx', source: 'gpx') }
before do
import.file.attach(file)
end
context 'when file has a single segment' do context 'when file has a single segment' do
it 'creates points' do it 'creates points' do

View file

@ -88,8 +88,8 @@ RSpec.describe Immich::ImportGeodata do
expect { service }.to change { Import.count }.by(1) expect { service }.to change { Import.count }.by(1)
end end
it 'enqueues ImportJob' do it 'enqueues Import::ProcessJob' do
expect(ImportJob).to receive(:perform_later) expect(Import::ProcessJob).to receive(:perform_later)
service service
end end
@ -101,8 +101,8 @@ RSpec.describe Immich::ImportGeodata do
expect { service }.not_to(change { Import.count }) expect { service }.not_to(change { Import.count })
end end
it 'does not enqueue ImportJob' do it 'does not enqueue Import::ProcessJob' do
expect(ImportJob).to_not receive(:perform_later) expect(Import::ProcessJob).to_not receive(:perform_later)
service service
end end

View file

@ -9,6 +9,13 @@ RSpec.describe Imports::Create do
describe '#call' do describe '#call' do
context 'when source is google_semantic_history' do context 'when source is google_semantic_history' do
let(:import) { create(:import, source: 'google_semantic_history') } let(:import) { create(:import, source: 'google_semantic_history') }
let(:file_path) { Rails.root.join('spec/fixtures/files/google/semantic_history.json') }
let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/json') }
before do
import.file.attach(io: File.open(file_path), filename: 'semantic_history.json',
content_type: 'application/json')
end
it 'calls the GoogleMaps::SemanticHistoryParser' do it 'calls the GoogleMaps::SemanticHistoryParser' do
expect(GoogleMaps::SemanticHistoryParser).to \ expect(GoogleMaps::SemanticHistoryParser).to \
@ -29,6 +36,12 @@ RSpec.describe Imports::Create do
context 'when source is owntracks' do context 'when source is owntracks' do
let(:import) { create(:import, source: 'owntracks') } let(:import) { create(:import, source: 'owntracks') }
let(:file_path) { Rails.root.join('spec/fixtures/files/owntracks/2024-03.rec') }
let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/octet-stream') }
before do
import.file.attach(io: File.open(file_path), filename: '2024-03.rec', content_type: 'application/octet-stream')
end
it 'calls the OwnTracks::Importer' do it 'calls the OwnTracks::Importer' do
expect(OwnTracks::Importer).to \ expect(OwnTracks::Importer).to \
@ -50,7 +63,7 @@ RSpec.describe Imports::Create do
end end
end end
xit 'schedules visit suggesting' do it 'schedules visit suggesting' do
Sidekiq::Testing.inline! do Sidekiq::Testing.inline! do
expect { service.call }.to have_enqueued_job(VisitSuggestingJob) expect { service.call }.to have_enqueued_job(VisitSuggestingJob)
end end
@ -72,6 +85,13 @@ RSpec.describe Imports::Create do
context 'when source is gpx' do context 'when source is gpx' do
let(:import) { create(:import, source: 'gpx') } let(:import) { create(:import, source: 'gpx') }
let(:file_path) { Rails.root.join('spec/fixtures/files/gpx/gpx_track_single_segment.gpx') }
let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/octet-stream') }
before do
import.file.attach(io: File.open(file_path), filename: 'gpx_track_single_segment.gpx',
content_type: 'application/octet-stream')
end
it 'calls the Gpx::TrackImporter' do it 'calls the Gpx::TrackImporter' do
expect(Gpx::TrackImporter).to \ expect(Gpx::TrackImporter).to \

View file

@ -23,7 +23,7 @@ RSpec.describe Imports::Watcher do
end end
it 'enqueues importing jobs for the user' do it 'enqueues importing jobs for the user' do
expect { service }.to have_enqueued_job(ImportJob).exactly(6).times expect { service }.to have_enqueued_job(Import::ProcessJob).exactly(6).times
end end
context 'when the import already exists' do context 'when the import already exists' do
@ -41,8 +41,8 @@ RSpec.describe Imports::Watcher do
end end
context 'when user does not exist' do context 'when user does not exist' do
it 'does not call ImportJob' do it 'does not call Import::ProcessJob' do
expect(ImportJob).not_to receive(:perform_later) expect(Import::ProcessJob).not_to receive(:perform_later)
service service
end end

View file

@ -7,7 +7,13 @@ RSpec.describe OwnTracks::Importer do
subject(:parser) { described_class.new(import, user.id).call } subject(:parser) { described_class.new(import, user.id).call }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:import) { create(:import, user:, name: 'owntracks_export.json') } let(:import) { create(:import, user:, name: '2024-03.rec') }
let(:file_path) { Rails.root.join('spec/fixtures/files/owntracks/2024-03.rec') }
let(:file) { Rack::Test::UploadedFile.new(file_path, 'text/plain') }
before do
import.file.attach(io: File.open(file_path), filename: '2024-03.rec', content_type: 'text/plain')
end
context 'when file exists' do context 'when file exists' do
it 'creates points' do it 'creates points' do

View file

@ -154,8 +154,8 @@ RSpec.describe Photoprism::ImportGeodata do
expect { service }.to change { Import.count }.by(1) expect { service }.to change { Import.count }.by(1)
end end
it 'enqueues ImportJob' do it 'enqueues Import::ProcessJob' do
expect(ImportJob).to receive(:perform_later) expect(Import::ProcessJob).to receive(:perform_later)
service service
end end
@ -167,8 +167,8 @@ RSpec.describe Photoprism::ImportGeodata do
expect { service }.not_to(change { Import.count }) expect { service }.not_to(change { Import.count })
end end
it 'does not enqueue ImportJob' do it 'does not enqueue Import::ProcessJob' do
expect(ImportJob).to_not receive(:perform_later) expect(Import::ProcessJob).to_not receive(:perform_later)
service service
end end