From 8ad0b20d3d29e1c9e0475e71c08e0104ee8b399a Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Sat, 28 Jun 2025 12:22:56 +0200 Subject: [PATCH] Add import data feature --- .../design_iterations/trip_page_1.html | 283 ++++++++++++++++ .../design_iterations/trip_page_2.html | 238 +++++++++++++ .../design_iterations/trip_page_3.html | 316 ++++++++++++++++++ .../design_iterations/trip_page_3_1.html | 189 +++++++++++ CHANGELOG.md | 5 + app/controllers/settings/users_controller.rb | 36 +- app/jobs/users/export_data_job.rb | 2 + app/jobs/users/import_data_job.rb | 64 ++++ app/models/import.rb | 17 +- app/models/point.rb | 7 +- app/services/exception_reporter.rb | 4 +- app/services/imports/create.rb | 6 + app/services/notifications.rb | 18 + app/services/notifications/create.rb | 16 - app/services/users/export_data.rb | 68 +++- app/services/users/export_data/points.rb | 45 ++- app/services/users/import_data.rb | 202 +++++++++++ app/services/users/import_data/areas.rb | 53 +++ app/services/users/import_data/exports.rb | 92 +++++ app/services/users/import_data/imports.rb | 102 ++++++ .../users/import_data/notifications.rb | 49 +++ app/services/users/import_data/places.rb | 76 +++++ app/services/users/import_data/points.rb | 191 +++++++++++ app/services/users/import_data/settings.rb | 27 ++ app/services/users/import_data/stats.rb | 48 +++ app/services/users/import_data/trips.rb | 49 +++ app/services/users/import_data/visits.rb | 90 +++++ app/views/devise/registrations/edit.html.erb | 27 +- app/views/imports/index.html.erb | 1 + config/environments/development.rb | 3 +- config/environments/production.rb | 2 +- .../20250627184017_add_status_to_imports.rb | 10 + db/schema.rb | 17 +- spec/jobs/users/import_data_job_spec.rb | 183 ++++++++++ spec/models/import_spec.rb | 3 +- .../services/users/export_data/points_spec.rb | 53 +++ spec/services/users/export_data_spec.rb | 108 +++++- spec/services/users/import_data/areas_spec.rb | 161 +++++++++ .../users/import_data/imports_spec.rb | 270 +++++++++++++++ .../users/import_data/notifications_spec.rb | 181 ++++++++++ .../services/users/import_data/places_spec.rb | 216 ++++++++++++ .../services/users/import_data/points_spec.rb | 139 ++++++++ .../users/import_data/settings_spec.rb | 82 +++++ spec/services/users/import_data/stats_spec.rb | 188 +++++++++++ spec/services/users/import_data/trips_spec.rb | 186 +++++++++++ spec/services/users/import_data_spec.rb | 297 ++++++++++++++++ 46 files changed, 4356 insertions(+), 64 deletions(-) create mode 100644 .superdesign/design_iterations/trip_page_1.html create mode 100644 .superdesign/design_iterations/trip_page_2.html create mode 100644 .superdesign/design_iterations/trip_page_3.html create mode 100644 .superdesign/design_iterations/trip_page_3_1.html create mode 100644 app/jobs/users/import_data_job.rb create mode 100644 app/services/notifications.rb delete mode 100644 app/services/notifications/create.rb create mode 100644 app/services/users/import_data.rb create mode 100644 app/services/users/import_data/areas.rb create mode 100644 app/services/users/import_data/exports.rb create mode 100644 app/services/users/import_data/imports.rb create mode 100644 app/services/users/import_data/notifications.rb create mode 100644 app/services/users/import_data/places.rb create mode 100644 app/services/users/import_data/points.rb create mode 100644 app/services/users/import_data/settings.rb create mode 100644 app/services/users/import_data/stats.rb create mode 100644 app/services/users/import_data/trips.rb create mode 100644 app/services/users/import_data/visits.rb create mode 100644 db/migrate/20250627184017_add_status_to_imports.rb create mode 100644 spec/jobs/users/import_data_job_spec.rb create mode 100644 spec/services/users/import_data/areas_spec.rb create mode 100644 spec/services/users/import_data/imports_spec.rb create mode 100644 spec/services/users/import_data/notifications_spec.rb create mode 100644 spec/services/users/import_data/places_spec.rb create mode 100644 spec/services/users/import_data/points_spec.rb create mode 100644 spec/services/users/import_data/settings_spec.rb create mode 100644 spec/services/users/import_data/stats_spec.rb create mode 100644 spec/services/users/import_data/trips_spec.rb create mode 100644 spec/services/users/import_data_spec.rb diff --git a/.superdesign/design_iterations/trip_page_1.html b/.superdesign/design_iterations/trip_page_1.html new file mode 100644 index 00000000..fb29fe20 --- /dev/null +++ b/.superdesign/design_iterations/trip_page_1.html @@ -0,0 +1,283 @@ + + + + + + European Grand Tour - Trip Details + + + +
+ +
+

+ European Grand Tour +

+

+ A 21-day journey through the heart of Europe, discovering historic cities, stunning landscapes, and rich cultural heritage. +

+
+ + +
+ +
+
+
+ +
+
+

Interactive Map

+

Route visualization would appear here

+
+ + +
+
+ Start: Amsterdam +
+
+
+
+ End: Rome +
+
+
+
+
+ + +
+ +
+

Trip Statistics

+ +
+
+
3,247 km
+
Total Distance
+
+ +
+
21 days
+
Duration
+
+ +
+
7 countries
+
Countries Visited
+
+
+
+ + +
+

Countries Visited

+
+
+ Netherlands + 3 days +
+
+ Germany + 4 days +
+
+ Austria + 2 days +
+
+ Switzerland + 3 days +
+
+ France + 4 days +
+
+ Monaco + 1 day +
+
+ Italy + 4 days +
+
+
+
+
+ + +
+
+

Trip Photos

+
147 photos
+
+ + +
+ +
+
+
+
+
+
+
+

Amsterdam Canal

+
+
+ +
+
+
+
+
+
+
+

Berlin Wall

+
+
+ +
+
+
+
+
+
+
+

Alpine Vista

+
+
+ +
+
+
+
+
+
+
+

Swiss Mountains

+
+
+ +
+
+
+
+
+
+
+

Eiffel Tower

+
+
+ +
+
+
+
+
+
+
+

Monaco Harbor

+
+
+ +
+
+
+
+
+
+
+

Colosseum

+
+
+ +
+
+
+
+
+
+
+

Roman Forum

+
+
+ + +
+ +
+
+
+ + +
+

Trip Timeline

+ +
+
+
+
+
+
Day 1-3: Amsterdam, Netherlands
+
Explored canals, visited museums, experienced local culture
+
+
+ +
+
+
+
Day 4-7: Berlin & Munich, Germany
+
Historical sites, traditional cuisine, alpine preparation
+
+
+ +
+
+
+
Day 8-9: Salzburg, Austria
+
Mozart's birthplace, stunning architecture
+
+
+ +
+
+
+
Day 10-12: Zurich & Alps, Switzerland
+
Mountain adventures, pristine lakes, scenic drives
+
+
+ +
+
+
+
Day 13-16: Paris & Lyon, France
+
Art, cuisine, romance, and French countryside
+
+
+ +
+
+
+
Day 17: Monaco
+
Luxury, casinos, and Mediterranean coastline
+
+
+ +
+
+
+
Day 18-21: Rome, Italy
+
Ancient history, incredible food, perfect ending
+
+
+
+
+
+
+ + \ No newline at end of file diff --git a/.superdesign/design_iterations/trip_page_2.html b/.superdesign/design_iterations/trip_page_2.html new file mode 100644 index 00000000..bd2133b6 --- /dev/null +++ b/.superdesign/design_iterations/trip_page_2.html @@ -0,0 +1,238 @@ + + + + + + Asian Adventure - Trip Details + + + + +
+ + +
+

+ Asian Adventure +

+

+ A journey through Southeast Asia's cultural treasures +

+
+ + +
+ + +
+
+
+
+

Interactive Map

+

Route visualization

+
+
+
+ + +
+ + +
+

Trip Statistics

+ +
+
+
2,847 km
+
Total Distance
+
+ +
+
18 days
+
Duration
+
+ +
+
5 countries
+
Countries Visited
+
+
+
+ + +
+

Countries

+
+
+ Thailand + 6 days +
+
+ Vietnam + 4 days +
+
+ Cambodia + 3 days +
+
+ Laos + 3 days +
+
+ Myanmar + 2 days +
+
+
+ + +
+

Highlights

+
+
+
+ 12 temples visited +
+
+
+ 4 cooking classes +
+
+
+ 8 markets explored +
+
+
+ 3 boat rides +
+
+
+
+
+ + +
+
+

Trip Photos

+ 247 photos +
+ + +
+ +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + +
+ +
+
+ + +
+

Trip Timeline

+ +
+
+
+ Day 1-6 +
+
+

Bangkok & Northern Thailand

+

+ Explored the bustling streets of Bangkok, visited ancient temples, and trekked through the mountains of Chiang Mai. +

+
+
+ +
+
+ Day 7-10 +
+
+

Ho Chi Minh City & Hanoi

+

+ Discovered Vietnamese culture, cuisine, and history across the country's two major cities. +

+
+
+ +
+
+ Day 11-13 +
+
+

Siem Reap, Cambodia

+

+ Marveled at the ancient temples of Angkor Wat and experienced traditional Khmer culture. +

+
+
+ +
+
+ Day 14-16 +
+
+

Luang Prabang, Laos

+

+ Experienced the peaceful atmosphere of this UNESCO World Heritage city along the Mekong River. +

+
+
+ +
+
+ Day 17-18 +
+
+

Yangon, Myanmar

+

+ Concluded the journey with visits to golden pagodas and local markets in Myanmar's largest city. +

+
+
+
+
+
+ + \ No newline at end of file diff --git a/.superdesign/design_iterations/trip_page_3.html b/.superdesign/design_iterations/trip_page_3.html new file mode 100644 index 00000000..8e635fcf --- /dev/null +++ b/.superdesign/design_iterations/trip_page_3.html @@ -0,0 +1,316 @@ + + + + + + Coast to Coast Adventure - Trip Details + + + + + +
+
+ + +
+

+ Coast to Coast Adventure +

+

+ New York City to San Francisco • October 2024 +

+
+ + +
+ + +
+
+
+

Route Overview

+

Interactive journey across America

+
+
+
+
+ + + + +
+

Interactive Map

+
+
+
+
+ + +
+
+

Trip Statistics

+ +
+ +
+
+
+

Total Distance

+

2,908 mi

+
+
+ + + +
+
+
+ + +
+
+
+

Duration

+

14 days

+
+
+ + + +
+
+
+ + +
+
+
+

States Visited

+

12

+
+
+ + + +
+
+
+
+ + +
+

States Crossed

+
+
+
+ New York +
+
+
+ Pennsylvania +
+
+
+ Ohio +
+
+
+ Indiana +
+
+
+ Illinois +
+
+
+ Iowa +
+
+
+ Nebraska +
+
+
+ Colorado +
+
+
+ Utah +
+
+
+ Nevada +
+
+
+ California +
+
+
+
+
+ + +
+
+

Trip Highlights

+ +
+ +
+
+
+
+ + + +
+

Golden Gate Bridge

+
+
+
+ + +
+
+
+ + + +
+

Chicago Skyline

+
+
+ +
+
+
+ + + +
+

Rocky Mountains

+
+
+ +
+
+
+ + + +
+

Monument Valley

+
+
+ +
+
+
+ + + +
+

Route 66

+
+
+
+ + +
+ +
+
+
+
+ + +
+ +
+

Key Stops

+
+
+ Times Square, NYC + Day 1 +
+
+ Millennium Park, Chicago + Day 4 +
+
+ Rocky Mountain National Park + Day 8 +
+
+ Arches National Park + Day 10 +
+
+ Golden Gate Bridge, SF + Day 14 +
+
+
+ + +
+

Weather Summary

+
+
+ Average Temperature + 68°F +
+
+ Sunny Days + 11 of 14 +
+
+ Rain Days + 2 of 14 +
+
+ Best Weather + Utah, Nevada +
+
+
+ + +
+

Trip Notes

+
+

Perfect timing for fall foliage in the Midwest. Colorado mountains were breathtaking with early snow caps.

+

Route 66 sections in Illinois and Missouri provided authentic American road trip experience.

+

Utah's landscape diversity exceeded expectations - from desert to mountain passes.

+
+
+
+
+
+ + \ No newline at end of file diff --git a/.superdesign/design_iterations/trip_page_3_1.html b/.superdesign/design_iterations/trip_page_3_1.html new file mode 100644 index 00000000..b50ad622 --- /dev/null +++ b/.superdesign/design_iterations/trip_page_3_1.html @@ -0,0 +1,189 @@ + + + + + + Coast to Coast Adventure - Trip Details + + + + + +
+
+ + +
+
+
+

Coast to Coast Adventure

+

NYC → SF • Oct 2024

+
+
+
2,908 mi
+
14 days
+
+
+
+ + +
+ + +
+
+
+
+
+ + + + +
+

Route Map

+
+
+
+
+ + +
+ +
+

Trip Stats

+
+
+ Distance + 2,908 mi +
+
+ Duration + 14 days +
+
+ States + 12 +
+
+ Photos + 247 +
+
+
+ + +
+

Route

+
+ NY → PA → OH → IN → IL → IA → NE → CO → UT → NV → CA +
+
+ + +
+

Highlights

+
+
+ + + +
+
+ + + +
+
+ + + +
+
+ + + +
+
+
+
+
+ + +
+ +
+

Key Stops

+
+
+ Times Square + Day 1 +
+
+ Chicago + Day 4 +
+
+ Rocky Mountains + Day 8 +
+
+ Arches NP + Day 10 +
+
+ Golden Gate + Day 14 +
+
+
+ + +
+

Weather

+
+
+ Avg Temp + 68°F +
+
+ Sunny Days + 11/14 +
+
+ Rain Days + 2/14 +
+
+ Best + Utah, Nevada +
+
+
+ + +
+

Notes

+
+

Fall foliage in Midwest was perfect timing.

+

Route 66 sections provided authentic experience.

+

Utah landscape diversity exceeded expectations.

+
+
+
+
+
+ + \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index c379fccd..a24e2f08 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - [x] All your stats - [ ] In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation. + - [ ] User can select to override settings or not. - Export file size is now displayed in the exports and imports lists. @@ -27,6 +28,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - Oj is now being used for JSON serialization. +## Fixed + +- Email links now use the SMTP domain if set. #1469 + # 0.28.1 - 2025-06-11 ## Fixed diff --git a/app/controllers/settings/users_controller.rb b/app/controllers/settings/users_controller.rb index 421204dc..a3a5899d 100644 --- a/app/controllers/settings/users_controller.rb +++ b/app/controllers/settings/users_controller.rb @@ -2,7 +2,8 @@ class Settings::UsersController < ApplicationController before_action :authenticate_self_hosted! - before_action :authenticate_admin! + before_action :authenticate_admin!, except: [:export, :import] + before_action :authenticate_user!, only: [:export, :import] def index @users = User.order(created_at: :desc) @@ -53,7 +54,40 @@ class Settings::UsersController < ApplicationController end def import + unless params[:archive].present? + redirect_to edit_user_registration_path, alert: 'Please select a ZIP archive to import.' + return + end + archive_file = params[:archive] + + # Validate file type + unless archive_file.content_type == 'application/zip' || + archive_file.content_type == 'application/x-zip-compressed' || + File.extname(archive_file.original_filename).downcase == '.zip' + redirect_to edit_user_registration_path, alert: 'Please upload a valid ZIP file.' + return + end + + # Create Import record for user data archive + import = current_user.imports.build( + name: archive_file.original_filename, + source: :user_data_archive + ) + + import.file.attach(archive_file) + + if import.save + redirect_to edit_user_registration_path, + notice: 'Your data import has been started. You will receive a notification when it completes.' + else + redirect_to edit_user_registration_path, + alert: 'Failed to start import. Please try again.' + end + rescue StandardError => e + ExceptionReporter.call(e, 'User data import failed to start') + redirect_to edit_user_registration_path, + alert: 'An error occurred while starting the import. Please try again.' end private diff --git a/app/jobs/users/export_data_job.rb b/app/jobs/users/export_data_job.rb index 2c823f4c..1eb38846 100644 --- a/app/jobs/users/export_data_job.rb +++ b/app/jobs/users/export_data_job.rb @@ -3,6 +3,8 @@ class Users::ExportDataJob < ApplicationJob queue_as :exports + sidekiq_options retry: false + def perform(user_id) user = User.find(user_id) diff --git a/app/jobs/users/import_data_job.rb b/app/jobs/users/import_data_job.rb new file mode 100644 index 00000000..cfd6a7a3 --- /dev/null +++ b/app/jobs/users/import_data_job.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +class Users::ImportDataJob < ApplicationJob + queue_as :imports + + sidekiq_options retry: false + + def perform(import_id) + import = Import.find(import_id) + user = import.user + + # Download the archive file to a temporary location + archive_path = download_import_archive(import) + + # Validate that the archive file exists + unless File.exist?(archive_path) + raise StandardError, "Archive file not found: #{archive_path}" + end + + # Perform the import + import_stats = Users::ImportData.new(user, archive_path).import + + Rails.logger.info "Import completed successfully for user #{user.email}: #{import_stats}" + rescue StandardError => e + user_id = user&.id || import&.user_id || "unknown" + ExceptionReporter.call(e, "Import job failed for user #{user_id}") + + # Create failure notification if user is available + if user + ::Notifications::Create.new( + user: user, + title: 'Data import failed', + content: "Your data import failed with error: #{e.message}. Please check the archive format and try again.", + kind: :error + ).call + end + + raise e + ensure + # Clean up the uploaded archive file if it exists + if archive_path && File.exist?(archive_path) + File.delete(archive_path) + Rails.logger.info "Cleaned up archive file: #{archive_path}" + end + end + + private + + def download_import_archive(import) + require 'tmpdir' + + timestamp = Time.current.to_i + filename = "user_import_#{import.user_id}_#{import.id}_#{timestamp}.zip" + temp_path = File.join(Dir.tmpdir, filename) + + File.open(temp_path, 'wb') do |file_handle| + import.file.download do |chunk| + file_handle.write(chunk) + end + end + + temp_path + end +end diff --git a/app/models/import.rb b/app/models/import.rb index b2932802..c9000b75 100644 --- a/app/models/import.rb +++ b/app/models/import.rb @@ -11,13 +11,24 @@ class Import < ApplicationRecord validates :name, presence: true, uniqueness: { scope: :user_id } + enum :status, { created: 0, processing: 1, completed: 2, failed: 3 } + enum :source, { google_semantic_history: 0, owntracks: 1, google_records: 2, - google_phone_takeout: 3, gpx: 4, immich_api: 5, geojson: 6, photoprism_api: 7 + google_phone_takeout: 3, gpx: 4, immich_api: 5, geojson: 6, photoprism_api: 7, + user_data_archive: 8 } def process! - Imports::Create.new(user, self).call + if user_data_archive? + process_user_data_archive! + else + Imports::Create.new(user, self).call + end + end + + def process_user_data_archive! + Users::ImportDataJob.perform_later(id) end def reverse_geocoded_points_count @@ -39,7 +50,7 @@ class Import < ApplicationRecord file.attach(io: raw_file, filename: name, content_type: 'application/json') end - private + private def remove_attached_file file.purge_later diff --git a/app/models/point.rb b/app/models/point.rb index 6620dc14..e4d7b0eb 100644 --- a/app/models/point.rb +++ b/app/models/point.rb @@ -77,7 +77,7 @@ class Point < ApplicationRecord timestamp.to_s, velocity.to_s, id.to_s, - country.to_s + country_name.to_s ] ) end @@ -87,4 +87,9 @@ class Point < ApplicationRecord self.country_id = found_in_country&.id save! if changed? end + + def country_name + # Safely get country name from association or attribute + self.country&.name || read_attribute(:country) || '' + end end diff --git a/app/services/exception_reporter.rb b/app/services/exception_reporter.rb index c3b4f44a..a21a4aaa 100644 --- a/app/services/exception_reporter.rb +++ b/app/services/exception_reporter.rb @@ -1,10 +1,10 @@ # frozen_string_literal: true class ExceptionReporter - def self.call(exception) + def self.call(exception, human_message = nil) return unless DawarichSettings.self_hosted? - Rails.logger.error "Exception: #{exception.message}" + Rails.logger.error "#{human_message}: #{exception.message}" Sentry.capture_exception(exception) end diff --git a/app/services/imports/create.rb b/app/services/imports/create.rb index b7d6bc0d..d96ba38a 100644 --- a/app/services/imports/create.rb +++ b/app/services/imports/create.rb @@ -9,13 +9,19 @@ class Imports::Create end def call + import.update!(status: :processing) + importer(import.source).new(import, user.id).call schedule_stats_creating(user.id) schedule_visit_suggesting(user.id, import) update_import_points_count(import) rescue StandardError => e + import.update!(status: :failed) + create_import_failed_notification(import, user, e) + ensure + import.update!(status: :completed) if import.completed? end private diff --git a/app/services/notifications.rb b/app/services/notifications.rb new file mode 100644 index 00000000..6a85bd2e --- /dev/null +++ b/app/services/notifications.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +module Notifications + class Create + attr_reader :user, :kind, :title, :content + + def initialize(user:, kind:, title:, content:) + @user = user + @kind = kind + @title = title + @content = content + end + + def call + Notification.create!(user:, kind:, title:, content:) + end + end +end diff --git a/app/services/notifications/create.rb b/app/services/notifications/create.rb deleted file mode 100644 index 47506d67..00000000 --- a/app/services/notifications/create.rb +++ /dev/null @@ -1,16 +0,0 @@ -# frozen_string_literal: true - -class Notifications::Create - attr_reader :user, :kind, :title, :content - - def initialize(user:, kind:, title:, content:) - @user = user - @kind = kind - @title = title - @content = content - end - - def call - Notification.create!(user:, kind:, title:, content:) - end -end diff --git a/app/services/users/export_data.rb b/app/services/users/export_data.rb index 527ad2ae..a7e3c61a 100644 --- a/app/services/users/export_data.rb +++ b/app/services/users/export_data.rb @@ -6,6 +6,17 @@ require 'zip' # # Output JSON Structure Example: # { +# "counts": { +# "areas": 5, +# "imports": 12, +# "exports": 3, +# "trips": 8, +# "stats": 24, +# "notifications": 10, +# "points": 15000, +# "visits": 45, +# "places": 20 +# }, # "settings": { # "distance_unit": "km", # "timezone": "UTC", @@ -227,7 +238,11 @@ class Users::ExportData # Stream JSON writing instead of building in memory File.open(json_file_path, 'w') do |file| - file.write('{"settings":') + # Start JSON and add counts summary + file.write('{"counts":') + file.write(calculate_entity_counts.to_json) + + file.write(',"settings":') file.write(user.safe_settings.settings.to_json) file.write(',"areas":') @@ -281,7 +296,7 @@ class Users::ExportData # Mark export as failed if an error occurs export_record.update!(status: :failed) if export_record - ExceptionReporter.call(e) + ExceptionReporter.call(e, 'Export failed') raise e ensure @@ -302,30 +317,44 @@ class Users::ExportData @files_directory end + def calculate_entity_counts + Rails.logger.info "Calculating entity counts for export" + + counts = { + areas: user.areas.count, + imports: user.imports.count, + exports: user.exports.count, + trips: user.trips.count, + stats: user.stats.count, + notifications: user.notifications.count, + points: user.tracked_points.count, + visits: user.visits.count, + places: user.places.count + } + + Rails.logger.info "Entity counts: #{counts}" + counts + end + def create_zip_archive(export_directory, zip_file_path) + # Set global compression level for better file size reduction + original_compression = Zip.default_compression + Zip.default_compression = Zlib::BEST_COMPRESSION + # Create zip archive with optimized compression Zip::File.open(zip_file_path, Zip::File::CREATE) do |zipfile| - # Set higher compression for better file size reduction - zipfile.default_compression = Zip::Entry::DEFLATED - zipfile.default_compression_level = 9 # Maximum compression - Dir.glob(export_directory.join('**', '*')).each do |file| next if File.directory?(file) || file == zip_file_path.to_s relative_path = file.sub(export_directory.to_s + '/', '') - # Add file with specific compression settings - zipfile.add(relative_path, file) do |entry| - # JSON files compress very well, so use maximum compression - if file.end_with?('.json') - entry.compression_level = 9 - else - # For other files (images, etc.), use balanced compression - entry.compression_level = 6 - end - end + # Add file to the zip archive + zipfile.add(relative_path, file) end end + ensure + # Restore original compression level + Zip.default_compression = original_compression if original_compression end def cleanup_temporary_files(export_directory) @@ -334,14 +363,17 @@ class Users::ExportData Rails.logger.info "Cleaning up temporary export directory: #{export_directory}" FileUtils.rm_rf(export_directory) rescue StandardError => e - ExceptionReporter.call(e) + ExceptionReporter.call(e, 'Failed to cleanup temporary files') end def create_success_notification + counts = calculate_entity_counts + summary = "#{counts[:points]} points, #{counts[:visits]} visits, #{counts[:places]} places, #{counts[:trips]} trips" + ::Notifications::Create.new( user: user, title: 'Export completed', - content: 'Your data export has been processed successfully. You can download it from the exports page.', + content: "Your data export has been processed successfully (#{summary}). You can download it from the exports page.", kind: :info ).call end diff --git a/app/services/users/export_data/points.rb b/app/services/users/export_data/points.rb index 4e17f857..e7beceab 100644 --- a/app/services/users/export_data/points.rb +++ b/app/services/users/export_data/points.rb @@ -9,14 +9,15 @@ class Users::ExportData::Points # Single optimized query with all joins to avoid N+1 queries points_sql = <<-SQL SELECT - p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy, + p.id, p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy, p.ping, p.tracker_id, p.topic, p.trigger, p.bssid, p.ssid, p.connection, p.vertical_accuracy, p.mode, p.inrids, p.in_regions, p.raw_data, p.city, p.country, p.geodata, p.reverse_geocoded_at, p.course, p.course_accuracy, p.external_track_id, p.created_at, p.updated_at, - p.lonlat, - ST_X(p.lonlat::geometry) as longitude, - ST_Y(p.lonlat::geometry) as latitude, + p.lonlat, p.longitude, p.latitude, + -- Extract coordinates from lonlat if individual fields are missing + COALESCE(p.longitude, ST_X(p.lonlat::geometry)) as computed_longitude, + COALESCE(p.latitude, ST_Y(p.lonlat::geometry)) as computed_latitude, -- Import reference i.name as import_name, i.source as import_source, @@ -42,7 +43,16 @@ class Users::ExportData::Points Rails.logger.info "Processing #{result.count} points for export..." # Process results efficiently - result.map do |row| + result.filter_map do |row| + # Skip points without any coordinate data + has_lonlat = row['lonlat'].present? + has_coordinates = row['computed_longitude'].present? && row['computed_latitude'].present? + + unless has_lonlat || has_coordinates + Rails.logger.debug "Skipping point without coordinates: id=#{row['id'] || 'unknown'}" + next + end + point_hash = { 'battery_status' => row['battery_status'], 'battery' => row['battery'], @@ -70,11 +80,12 @@ class Users::ExportData::Points 'course_accuracy' => row['course_accuracy'], 'external_track_id' => row['external_track_id'], 'created_at' => row['created_at'], - 'updated_at' => row['updated_at'], - 'longitude' => row['longitude'], - 'latitude' => row['latitude'] + 'updated_at' => row['updated_at'] } + # Ensure all coordinate fields are populated + populate_coordinate_fields(point_hash, row) + # Add relationship references only if they exist if row['import_name'] point_hash['import_reference'] = { @@ -107,4 +118,22 @@ class Users::ExportData::Points private attr_reader :user + + def populate_coordinate_fields(point_hash, row) + longitude = row['computed_longitude'] + latitude = row['computed_latitude'] + lonlat = row['lonlat'] + + # If lonlat is present, use it and the computed coordinates + if lonlat.present? + point_hash['lonlat'] = lonlat + point_hash['longitude'] = longitude + point_hash['latitude'] = latitude + elsif longitude.present? && latitude.present? + # If lonlat is missing but we have coordinates, reconstruct lonlat + point_hash['longitude'] = longitude + point_hash['latitude'] = latitude + point_hash['lonlat'] = "POINT(#{longitude} #{latitude})" + end + end end diff --git a/app/services/users/import_data.rb b/app/services/users/import_data.rb new file mode 100644 index 00000000..f456c577 --- /dev/null +++ b/app/services/users/import_data.rb @@ -0,0 +1,202 @@ +# frozen_string_literal: true + +require 'zip' + +# Users::ImportData - Imports complete user data from exported archive +# +# This service processes a ZIP archive created by Users::ExportData and recreates +# the user's data with preserved relationships. The import follows a specific order +# to handle foreign key dependencies: +# +# 1. Settings (applied directly to user) +# 2. Areas (standalone user data) +# 3. Places (referenced by visits) +# 4. Imports (including file attachments) +# 5. Exports (including file attachments) +# 6. Trips (standalone user data) +# 7. Stats (standalone user data) +# 8. Notifications (standalone user data) +# 9. Visits (references places) +# 10. Points (references imports, countries, visits) +# +# Files are restored to their original locations and properly attached to records. + +class Users::ImportData + def initialize(user, archive_path) + @user = user + @archive_path = archive_path + @import_stats = { + settings_updated: false, + areas_created: 0, + places_created: 0, + imports_created: 0, + exports_created: 0, + trips_created: 0, + stats_created: 0, + notifications_created: 0, + visits_created: 0, + points_created: 0, + files_restored: 0 + } + end + + def import + # Create a temporary directory for extraction + @import_directory = Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{Time.current.to_i}") + FileUtils.mkdir_p(@import_directory) + + ActiveRecord::Base.transaction do + extract_archive + data = load_json_data + + import_in_correct_order(data) + + create_success_notification + + @import_stats + end + rescue StandardError => e + ExceptionReporter.call(e, 'Data import failed') + create_failure_notification(e) + raise e + ensure + cleanup_temporary_files(@import_directory) if @import_directory&.exist? + end + + private + + attr_reader :user, :archive_path, :import_stats + + def extract_archive + Rails.logger.info "Extracting archive: #{archive_path}" + + Zip::File.open(archive_path) do |zip_file| + zip_file.each do |entry| + extraction_path = @import_directory.join(entry.name) + + # Ensure directory exists + FileUtils.mkdir_p(File.dirname(extraction_path)) + + # Extract file + entry.extract(extraction_path) + end + end + end + + def load_json_data + json_path = @import_directory.join('data.json') + + unless File.exist?(json_path) + raise StandardError, "Data file not found in archive: data.json" + end + + JSON.parse(File.read(json_path)) + rescue JSON::ParserError => e + raise StandardError, "Invalid JSON format in data file: #{e.message}" + end + + def import_in_correct_order(data) + Rails.logger.info "Starting data import for user: #{user.email}" + + # Log expected counts if available + if data['counts'] + Rails.logger.info "Expected entity counts from export: #{data['counts']}" + end + + # Import in dependency order + import_settings(data['settings']) if data['settings'] + import_areas(data['areas']) if data['areas'] + import_places(data['places']) if data['places'] + import_imports(data['imports']) if data['imports'] + import_exports(data['exports']) if data['exports'] + import_trips(data['trips']) if data['trips'] + import_stats(data['stats']) if data['stats'] + import_notifications(data['notifications']) if data['notifications'] + import_visits(data['visits']) if data['visits'] + import_points(data['points']) if data['points'] + + Rails.logger.info "Data import completed. Stats: #{@import_stats}" + end + + def import_settings(settings_data) + Users::ImportData::Settings.new(user, settings_data).call + @import_stats[:settings_updated] = true + end + + def import_areas(areas_data) + areas_created = Users::ImportData::Areas.new(user, areas_data).call + @import_stats[:areas_created] = areas_created + end + + def import_places(places_data) + places_created = Users::ImportData::Places.new(user, places_data).call + @import_stats[:places_created] = places_created + end + + def import_imports(imports_data) + imports_created, files_restored = Users::ImportData::Imports.new(user, imports_data, @import_directory.join('files')).call + @import_stats[:imports_created] = imports_created + @import_stats[:files_restored] += files_restored + end + + def import_exports(exports_data) + exports_created, files_restored = Users::ImportData::Exports.new(user, exports_data, @import_directory.join('files')).call + @import_stats[:exports_created] = exports_created + @import_stats[:files_restored] += files_restored + end + + def import_trips(trips_data) + trips_created = Users::ImportData::Trips.new(user, trips_data).call + @import_stats[:trips_created] = trips_created + end + + def import_stats(stats_data) + stats_created = Users::ImportData::Stats.new(user, stats_data).call + @import_stats[:stats_created] = stats_created + end + + def import_notifications(notifications_data) + notifications_created = Users::ImportData::Notifications.new(user, notifications_data).call + @import_stats[:notifications_created] = notifications_created + end + + def import_visits(visits_data) + visits_created = Users::ImportData::Visits.new(user, visits_data).call + @import_stats[:visits_created] = visits_created + end + + def import_points(points_data) + points_created = Users::ImportData::Points.new(user, points_data).call + @import_stats[:points_created] = points_created + end + + def cleanup_temporary_files(import_directory) + return unless File.directory?(import_directory) + + Rails.logger.info "Cleaning up temporary import directory: #{import_directory}" + FileUtils.rm_rf(import_directory) + rescue StandardError => e + ExceptionReporter.call(e, 'Failed to cleanup temporary files') + end + + def create_success_notification + summary = "#{@import_stats[:points_created]} points, #{@import_stats[:visits_created]} visits, " \ + "#{@import_stats[:places_created]} places, #{@import_stats[:trips_created]} trips" + + ::Notifications::Create.new( + user: user, + title: 'Data import completed', + content: "Your data has been imported successfully (#{summary}).", + kind: :info + ).call + end + + def create_failure_notification(error) + ::Notifications::Create.new( + user: user, + title: 'Data import failed', + content: "Your data import failed with error: #{error.message}. Please check the archive format and try again.", + kind: :error + ).call + end +end diff --git a/app/services/users/import_data/areas.rb b/app/services/users/import_data/areas.rb new file mode 100644 index 00000000..4fa6f000 --- /dev/null +++ b/app/services/users/import_data/areas.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +class Users::ImportData::Areas + def initialize(user, areas_data) + @user = user + @areas_data = areas_data + end + + def call + return 0 unless areas_data.is_a?(Array) + + Rails.logger.info "Importing #{areas_data.size} areas for user: #{user.email}" + + areas_created = 0 + + areas_data.each do |area_data| + next unless area_data.is_a?(Hash) + + # Skip if area already exists (match by name and coordinates) + existing_area = user.areas.find_by( + name: area_data['name'], + latitude: area_data['latitude'], + longitude: area_data['longitude'] + ) + + if existing_area + Rails.logger.debug "Area already exists: #{area_data['name']}" + next + end + + # Create new area + area_attributes = area_data.merge(user: user) + # Ensure radius is present (required by model validation) + area_attributes['radius'] ||= 100 # Default radius if not provided + + area = user.areas.create!(area_attributes) + areas_created += 1 + + Rails.logger.debug "Created area: #{area.name}" + rescue ActiveRecord::RecordInvalid => e + ExceptionReporter.call(e, "Failed to create area") + + next + end + + Rails.logger.info "Areas import completed. Created: #{areas_created}" + areas_created + end + + private + + attr_reader :user, :areas_data +end diff --git a/app/services/users/import_data/exports.rb b/app/services/users/import_data/exports.rb new file mode 100644 index 00000000..fc34fb93 --- /dev/null +++ b/app/services/users/import_data/exports.rb @@ -0,0 +1,92 @@ +# frozen_string_literal: true + +class Users::ImportData::Exports + def initialize(user, exports_data, files_directory) + @user = user + @exports_data = exports_data + @files_directory = files_directory + end + + def call + return [0, 0] unless exports_data.is_a?(Array) + + Rails.logger.info "Importing #{exports_data.size} exports for user: #{user.email}" + + exports_created = 0 + files_restored = 0 + + exports_data.each do |export_data| + next unless export_data.is_a?(Hash) + + # Check if export already exists (match by name and created_at) + existing_export = user.exports.find_by( + name: export_data['name'], + created_at: export_data['created_at'] + ) + + if existing_export + Rails.logger.debug "Export already exists: #{export_data['name']}" + next + end + + # Create new export + export_record = create_export_record(export_data) + exports_created += 1 + + # Restore file if present + if export_data['file_name'] && restore_export_file(export_record, export_data) + files_restored += 1 + end + + Rails.logger.debug "Created export: #{export_record.name}" + end + + Rails.logger.info "Exports import completed. Created: #{exports_created}, Files: #{files_restored}" + [exports_created, files_restored] + end + + private + + attr_reader :user, :exports_data, :files_directory + + def create_export_record(export_data) + export_attributes = prepare_export_attributes(export_data) + user.exports.create!(export_attributes) + end + + def prepare_export_attributes(export_data) + export_data.except( + 'file_name', + 'original_filename', + 'file_size', + 'content_type', + 'file_error' + ).merge(user: user) + end + + def restore_export_file(export_record, export_data) + file_path = files_directory.join(export_data['file_name']) + + unless File.exist?(file_path) + Rails.logger.warn "Export file not found: #{export_data['file_name']}" + return false + end + + begin + # Attach the file to the export record + export_record.file.attach( + io: File.open(file_path), + filename: export_data['original_filename'] || export_data['file_name'], + content_type: export_data['content_type'] || 'application/octet-stream' + ) + + Rails.logger.debug "Restored file for export: #{export_record.name}" + + true + rescue StandardError => e + ExceptionReporter.call(e, "Export file restoration failed") + + false + end + end +end diff --git a/app/services/users/import_data/imports.rb b/app/services/users/import_data/imports.rb new file mode 100644 index 00000000..167e55bb --- /dev/null +++ b/app/services/users/import_data/imports.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +class Users::ImportData::Imports + def initialize(user, imports_data, files_directory) + @user = user + @imports_data = imports_data + @files_directory = files_directory + end + + def call + return [0, 0] unless imports_data.is_a?(Array) + + Rails.logger.info "Importing #{imports_data.size} imports for user: #{user.email}" + + imports_created = 0 + files_restored = 0 + + imports_data.each do |import_data| + next unless import_data.is_a?(Hash) + + # Check if import already exists (match by name, source, and created_at) + existing_import = user.imports.find_by( + name: import_data['name'], + source: import_data['source'], + created_at: import_data['created_at'] + ) + + if existing_import + Rails.logger.debug "Import already exists: #{import_data['name']}" + next + end + + # Create new import + import_record = create_import_record(import_data) + next unless import_record # Skip if creation failed + + imports_created += 1 + + # Restore file if present + if import_data['file_name'] && restore_import_file(import_record, import_data) + files_restored += 1 + end + end + + Rails.logger.info "Imports import completed. Created: #{imports_created}, Files restored: #{files_restored}" + [imports_created, files_restored] + end + + private + + attr_reader :user, :imports_data, :files_directory + + def create_import_record(import_data) + import_attributes = prepare_import_attributes(import_data) + + begin + import_record = user.imports.create!(import_attributes) + Rails.logger.debug "Created import: #{import_record.name}" + import_record + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create import: #{e.message}" + nil + end + end + + def prepare_import_attributes(import_data) + import_data.except( + 'file_name', + 'original_filename', + 'file_size', + 'content_type', + 'file_error', + 'updated_at' + ).merge(user: user) + end + + def restore_import_file(import_record, import_data) + file_path = files_directory.join(import_data['file_name']) + + unless File.exist?(file_path) + Rails.logger.warn "Import file not found: #{import_data['file_name']}" + return false + end + + begin + # Attach the file to the import record + import_record.file.attach( + io: File.open(file_path), + filename: import_data['original_filename'] || import_data['file_name'], + content_type: import_data['content_type'] || 'application/octet-stream' + ) + + Rails.logger.debug "Restored file for import: #{import_record.name}" + + true + rescue StandardError => e + ExceptionReporter.call(e, "Import file restoration failed") + + false + end + end +end diff --git a/app/services/users/import_data/notifications.rb b/app/services/users/import_data/notifications.rb new file mode 100644 index 00000000..842435b8 --- /dev/null +++ b/app/services/users/import_data/notifications.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +class Users::ImportData::Notifications + def initialize(user, notifications_data) + @user = user + @notifications_data = notifications_data + end + + def call + return 0 unless notifications_data.is_a?(Array) + + Rails.logger.info "Importing #{notifications_data.size} notifications for user: #{user.email}" + + notifications_created = 0 + + notifications_data.each do |notification_data| + next unless notification_data.is_a?(Hash) + + # Check if notification already exists (match by title, content, and created_at) + existing_notification = user.notifications.find_by( + title: notification_data['title'], + content: notification_data['content'], + created_at: notification_data['created_at'] + ) + + if existing_notification + Rails.logger.debug "Notification already exists: #{notification_data['title']}" + next + end + + # Create new notification + notification_attributes = notification_data.except('created_at', 'updated_at') + notification = user.notifications.create!(notification_attributes) + notifications_created += 1 + + Rails.logger.debug "Created notification: #{notification.title}" + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create notification: #{e.message}" + next + end + + Rails.logger.info "Notifications import completed. Created: #{notifications_created}" + notifications_created + end + + private + + attr_reader :user, :notifications_data +end diff --git a/app/services/users/import_data/places.rb b/app/services/users/import_data/places.rb new file mode 100644 index 00000000..b8226cb1 --- /dev/null +++ b/app/services/users/import_data/places.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +class Users::ImportData::Places + def initialize(user, places_data) + @user = user + @places_data = places_data + end + + def call + return 0 unless places_data.is_a?(Array) + + Rails.logger.info "Importing #{places_data.size} places for user: #{user.email}" + + places_created = 0 + + places_data.each do |place_data| + next unless place_data.is_a?(Hash) + + # Find or create place by name and coordinates + place = find_or_create_place(place_data) + places_created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record? + end + + Rails.logger.info "Places import completed. Created: #{places_created}" + places_created + end + + private + + attr_reader :user, :places_data + + def find_or_create_place(place_data) + name = place_data['name'] + latitude = place_data['latitude']&.to_f + longitude = place_data['longitude']&.to_f + + # Skip if essential data is missing + unless name.present? && latitude.present? && longitude.present? + Rails.logger.debug "Skipping place with missing required data: #{place_data.inspect}" + return nil + end + + # Try to find existing place by name first, then by coordinates + existing_place = Place.find_by(name: name) + + # If no place with same name, check by coordinates + unless existing_place + existing_place = Place.where(latitude: latitude, longitude: longitude).first + end + + if existing_place + Rails.logger.debug "Place already exists: #{name}" + existing_place.define_singleton_method(:previously_new_record?) { false } + return existing_place + end + + # Create new place with lonlat point + place_attributes = place_data.except('created_at', 'updated_at', 'latitude', 'longitude') + place_attributes['lonlat'] = "POINT(#{longitude} #{latitude})" + place_attributes['latitude'] = latitude + place_attributes['longitude'] = longitude + # Remove any user reference since Place doesn't belong to user directly + place_attributes.delete('user') + + begin + place = Place.create!(place_attributes) + place.define_singleton_method(:previously_new_record?) { true } + Rails.logger.debug "Created place: #{place.name}" + + place + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create place: #{e.message}" + nil + end + end +end diff --git a/app/services/users/import_data/points.rb b/app/services/users/import_data/points.rb new file mode 100644 index 00000000..a33427d1 --- /dev/null +++ b/app/services/users/import_data/points.rb @@ -0,0 +1,191 @@ +# frozen_string_literal: true + +class Users::ImportData::Points + def initialize(user, points_data) + @user = user + @points_data = points_data + end + + def call + return 0 unless points_data.is_a?(Array) + + Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}" + + points_created = 0 + skipped_invalid = 0 + + points_data.each do |point_data| + next unless point_data.is_a?(Hash) + + # Skip points with invalid or missing required data + unless valid_point_data?(point_data) + skipped_invalid += 1 + next + end + + # Check if point already exists (match by coordinates, timestamp, and user) + if point_exists?(point_data) + next + end + + # Create new point + point_record = create_point_record(point_data) + points_created += 1 if point_record + + if points_created % 1000 == 0 + Rails.logger.debug "Imported #{points_created} points..." + end + end + + if skipped_invalid > 0 + Rails.logger.warn "Skipped #{skipped_invalid} points with invalid or missing required data" + end + + Rails.logger.info "Points import completed. Created: #{points_created}" + points_created + end + + private + + attr_reader :user, :points_data + + def point_exists?(point_data) + return false unless point_data['lonlat'].present? && point_data['timestamp'].present? + + Point.exists?( + lonlat: point_data['lonlat'], + timestamp: point_data['timestamp'], + user_id: user.id + ) + rescue StandardError => e + Rails.logger.debug "Error checking if point exists: #{e.message}" + false + end + + def create_point_record(point_data) + point_attributes = prepare_point_attributes(point_data) + + begin + # Create point and skip the automatic country assignment callback since we're handling it manually + point = Point.create!(point_attributes) + + # If we have a country assigned via country_info, update the point to set it + if point_attributes[:country].present? + point.update_column(:country_id, point_attributes[:country].id) + point.reload + end + + point + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create point: #{e.message}" + Rails.logger.error "Point data: #{point_data.inspect}" + Rails.logger.error "Prepared attributes: #{point_attributes.inspect}" + nil + rescue StandardError => e + Rails.logger.error "Unexpected error creating point: #{e.message}" + Rails.logger.error "Point data: #{point_data.inspect}" + Rails.logger.error "Prepared attributes: #{point_attributes.inspect}" + Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}" + nil + end + end + + def prepare_point_attributes(point_data) + # Start with base attributes, excluding fields that need special handling + attributes = point_data.except( + 'created_at', + 'updated_at', + 'import_reference', + 'country_info', + 'visit_reference', + 'country' # Exclude the string country field - handled via country_info relationship + ).merge(user: user) + + # Handle lonlat reconstruction if missing (for backward compatibility) + ensure_lonlat_field(attributes, point_data) + + # Find and assign related records + assign_import_reference(attributes, point_data['import_reference']) + assign_country_reference(attributes, point_data['country_info']) + assign_visit_reference(attributes, point_data['visit_reference']) + + attributes + end + + def assign_import_reference(attributes, import_reference) + return unless import_reference.is_a?(Hash) + + import = user.imports.find_by( + name: import_reference['name'], + source: import_reference['source'], + created_at: import_reference['created_at'] + ) + + attributes[:import] = import if import + end + + def assign_country_reference(attributes, country_info) + return unless country_info.is_a?(Hash) + + # Try to find country by all attributes first + country = Country.find_by( + name: country_info['name'], + iso_a2: country_info['iso_a2'], + iso_a3: country_info['iso_a3'] + ) + + # If not found by all attributes, try to find by name only + if country.nil? && country_info['name'].present? + country = Country.find_by(name: country_info['name']) + end + + # If still not found, create a new country record with minimal data + if country.nil? && country_info['name'].present? + country = Country.find_or_create_by(name: country_info['name']) do |new_country| + new_country.iso_a2 = country_info['iso_a2'] || country_info['name'][0..1].upcase + new_country.iso_a3 = country_info['iso_a3'] || country_info['name'][0..2].upcase + new_country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))" # Default geometry + end + end + + attributes[:country] = country if country + end + + def assign_visit_reference(attributes, visit_reference) + return unless visit_reference.is_a?(Hash) + + visit = user.visits.find_by( + name: visit_reference['name'], + started_at: visit_reference['started_at'], + ended_at: visit_reference['ended_at'] + ) + + attributes[:visit] = visit if visit + end + + def valid_point_data?(point_data) + # Check for required fields + return false unless point_data.is_a?(Hash) + return false unless point_data['timestamp'].present? + + # Check if we have either lonlat or longitude/latitude + has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(') + has_coordinates = point_data['longitude'].present? && point_data['latitude'].present? + + return false unless has_lonlat || has_coordinates + + true + rescue StandardError => e + Rails.logger.debug "Point validation failed: #{e.message} for data: #{point_data.inspect}" + false + end + + def ensure_lonlat_field(attributes, point_data) + # If lonlat is missing but we have longitude/latitude, reconstruct it + if attributes['lonlat'].blank? && point_data['longitude'].present? && point_data['latitude'].present? + longitude = point_data['longitude'].to_f + latitude = point_data['latitude'].to_f + attributes['lonlat'] = "POINT(#{longitude} #{latitude})" + end + end +end diff --git a/app/services/users/import_data/settings.rb b/app/services/users/import_data/settings.rb new file mode 100644 index 00000000..943b63a1 --- /dev/null +++ b/app/services/users/import_data/settings.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +class Users::ImportData::Settings + def initialize(user, settings_data) + @user = user + @settings_data = settings_data + end + + def call + return false unless settings_data.is_a?(Hash) + + Rails.logger.info "Importing settings for user: #{user.email}" + + # Merge imported settings with existing settings + current_settings = user.settings || {} + updated_settings = current_settings.merge(settings_data) + + user.update!(settings: updated_settings) + + Rails.logger.info "Settings import completed" + true + end + + private + + attr_reader :user, :settings_data +end diff --git a/app/services/users/import_data/stats.rb b/app/services/users/import_data/stats.rb new file mode 100644 index 00000000..3ad22bb6 --- /dev/null +++ b/app/services/users/import_data/stats.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +class Users::ImportData::Stats + def initialize(user, stats_data) + @user = user + @stats_data = stats_data + end + + def call + return 0 unless stats_data.is_a?(Array) + + Rails.logger.info "Importing #{stats_data.size} stats for user: #{user.email}" + + stats_created = 0 + + stats_data.each do |stat_data| + next unless stat_data.is_a?(Hash) + + # Check if stat already exists (match by year and month) + existing_stat = user.stats.find_by( + year: stat_data['year'], + month: stat_data['month'] + ) + + if existing_stat + Rails.logger.debug "Stat already exists: #{stat_data['year']}-#{stat_data['month']}" + next + end + + # Create new stat + stat_attributes = stat_data.except('created_at', 'updated_at') + stat = user.stats.create!(stat_attributes) + stats_created += 1 + + Rails.logger.debug "Created stat: #{stat.year}-#{stat.month}" + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create stat: #{e.message}" + next + end + + Rails.logger.info "Stats import completed. Created: #{stats_created}" + stats_created + end + + private + + attr_reader :user, :stats_data +end diff --git a/app/services/users/import_data/trips.rb b/app/services/users/import_data/trips.rb new file mode 100644 index 00000000..7f8d3f72 --- /dev/null +++ b/app/services/users/import_data/trips.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +class Users::ImportData::Trips + def initialize(user, trips_data) + @user = user + @trips_data = trips_data + end + + def call + return 0 unless trips_data.is_a?(Array) + + Rails.logger.info "Importing #{trips_data.size} trips for user: #{user.email}" + + trips_created = 0 + + trips_data.each do |trip_data| + next unless trip_data.is_a?(Hash) + + # Check if trip already exists (match by name and timestamps) + existing_trip = user.trips.find_by( + name: trip_data['name'], + started_at: trip_data['started_at'], + ended_at: trip_data['ended_at'] + ) + + if existing_trip + Rails.logger.debug "Trip already exists: #{trip_data['name']}" + next + end + + # Create new trip + trip_attributes = trip_data.except('created_at', 'updated_at') + trip = user.trips.create!(trip_attributes) + trips_created += 1 + + Rails.logger.debug "Created trip: #{trip.name}" + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create trip: #{e.message}" + next + end + + Rails.logger.info "Trips import completed. Created: #{trips_created}" + trips_created + end + + private + + attr_reader :user, :trips_data +end diff --git a/app/services/users/import_data/visits.rb b/app/services/users/import_data/visits.rb new file mode 100644 index 00000000..fbdac9a1 --- /dev/null +++ b/app/services/users/import_data/visits.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +class Users::ImportData::Visits + def initialize(user, visits_data) + @user = user + @visits_data = visits_data + end + + def call + return 0 unless visits_data.is_a?(Array) + + Rails.logger.info "Importing #{visits_data.size} visits for user: #{user.email}" + + visits_created = 0 + + visits_data.each do |visit_data| + next unless visit_data.is_a?(Hash) + + # Check if visit already exists (match by name, timestamps, and place reference) + existing_visit = find_existing_visit(visit_data) + + if existing_visit + Rails.logger.debug "Visit already exists: #{visit_data['name']}" + next + end + + # Create new visit + begin + visit_record = create_visit_record(visit_data) + visits_created += 1 + Rails.logger.debug "Created visit: #{visit_record.name}" + rescue ActiveRecord::RecordInvalid => e + Rails.logger.error "Failed to create visit: #{e.message}" + next + end + end + + Rails.logger.info "Visits import completed. Created: #{visits_created}" + visits_created + end + + private + + attr_reader :user, :visits_data + + def find_existing_visit(visit_data) + user.visits.find_by( + name: visit_data['name'], + started_at: visit_data['started_at'], + ended_at: visit_data['ended_at'] + ) + end + + def create_visit_record(visit_data) + visit_attributes = prepare_visit_attributes(visit_data) + user.visits.create!(visit_attributes) + end + + def prepare_visit_attributes(visit_data) + attributes = visit_data.except('place_reference') + + # Find and assign place if referenced + if visit_data['place_reference'] + place = find_referenced_place(visit_data['place_reference']) + attributes[:place] = place if place + end + + attributes + end + + def find_referenced_place(place_reference) + return nil unless place_reference.is_a?(Hash) + + name = place_reference['name'] + latitude = place_reference['latitude'].to_f + longitude = place_reference['longitude'].to_f + + # Find place by name and coordinates (global search since places are not user-specific) + place = Place.find_by(name: name) || + Place.where("latitude = ? AND longitude = ?", latitude, longitude).first + + if place + Rails.logger.debug "Found referenced place: #{name}" + else + Rails.logger.warn "Referenced place not found: #{name} (#{latitude}, #{longitude})" + end + + place + end +end diff --git a/app/views/devise/registrations/edit.html.erb b/app/views/devise/registrations/edit.html.erb index cf753bc5..25e742a3 100644 --- a/app/views/devise/registrations/edit.html.erb +++ b/app/views/devise/registrations/edit.html.erb @@ -64,8 +64,33 @@

<%= link_to "Export my data", export_settings_users_path, class: 'btn btn-primary' %> - <%= link_to "Import my data", import_settings_users_path, class: 'btn btn-primary' %> +

+ + + + + + diff --git a/app/views/imports/index.html.erb b/app/views/imports/index.html.erb index 5e3bfc18..c503497f 100644 --- a/app/views/imports/index.html.erb +++ b/app/views/imports/index.html.erb @@ -45,6 +45,7 @@ <% if DawarichSettings.store_geodata? %> Reverse geocoded points <% end %> + Status Created at diff --git a/config/environments/development.rb b/config/environments/development.rb index 9d8c02c9..e16d6608 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -88,7 +88,8 @@ Rails.application.configure do hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',') - config.action_mailer.default_url_options = { host: hosts.first, port: 3000 } + config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] || hosts.first } + config.hosts.concat(hosts) if hosts.present? config.force_ssl = ENV.fetch('APPLICATION_PROTOCOL', 'http').downcase == 'https' diff --git a/config/environments/production.rb b/config/environments/production.rb index 4d6d0330..22b3a3d2 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -103,7 +103,7 @@ Rails.application.configure do # config.host_authorization = { exclude: ->(request) { request.path == "/up" } } hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',') - config.action_mailer.default_url_options = { host: hosts.first, port: 3000 } + config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] } config.hosts.concat(hosts) if hosts.present? config.action_mailer.delivery_method = :smtp diff --git a/db/migrate/20250627184017_add_status_to_imports.rb b/db/migrate/20250627184017_add_status_to_imports.rb new file mode 100644 index 00000000..1cafdff7 --- /dev/null +++ b/db/migrate/20250627184017_add_status_to_imports.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +class AddStatusToImports < ActiveRecord::Migration[8.0] + disable_ddl_transaction! + + def change + add_column :imports, :status, :integer, default: 0, null: false + add_index :imports, :status, algorithm: :concurrently + end +end diff --git a/db/schema.rb b/db/schema.rb index 189d5395..2c81e6bb 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema[8.0].define(version: 2025_06_25_185030) do +ActiveRecord::Schema[8.0].define(version: 2025_06_27_184017) do # These are extensions that must be enabled in order to support this database enable_extension "pg_catalog.plpgsql" enable_extension "postgis" @@ -107,7 +107,9 @@ ActiveRecord::Schema[8.0].define(version: 2025_06_25_185030) do t.integer "processed", default: 0 t.jsonb "raw_data" t.integer "points_count", default: 0 + t.integer "status", default: 0, null: false t.index ["source"], name: "index_imports_on_source" + t.index ["status"], name: "index_imports_on_status" t.index ["user_id"], name: "index_imports_on_user_id" end @@ -230,6 +232,18 @@ ActiveRecord::Schema[8.0].define(version: 2025_06_25_185030) do t.index ["user_id"], name: "index_trips_on_user_id" end + create_table "user_data_imports", force: :cascade do |t| + t.bigint "user_id", null: false + t.string "status", default: "pending", null: false + t.string "archive_file_name" + t.text "error_message" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["status"], name: "index_user_data_imports_on_status" + t.index ["user_id", "created_at"], name: "index_user_data_imports_on_user_id_and_created_at" + t.index ["user_id"], name: "index_user_data_imports_on_user_id" + end + create_table "users", force: :cascade do |t| t.string "email", default: "", null: false t.string "encrypted_password", default: "", null: false @@ -282,6 +296,7 @@ ActiveRecord::Schema[8.0].define(version: 2025_06_25_185030) do add_foreign_key "points", "visits" add_foreign_key "stats", "users" add_foreign_key "trips", "users" + add_foreign_key "user_data_imports", "users" add_foreign_key "visits", "areas" add_foreign_key "visits", "places" add_foreign_key "visits", "users" diff --git a/spec/jobs/users/import_data_job_spec.rb b/spec/jobs/users/import_data_job_spec.rb new file mode 100644 index 00000000..5776ff6d --- /dev/null +++ b/spec/jobs/users/import_data_job_spec.rb @@ -0,0 +1,183 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportDataJob, type: :job do + let(:user) { create(:user) } + let(:import) { create(:import, user: user, source: :user_data_archive, name: 'test_export.zip') } + let(:archive_path) { Rails.root.join('tmp', 'test_export.zip') } + let(:job) { described_class.new } + + before do + # Create a mock ZIP file + FileUtils.touch(archive_path) + + # Mock the import file attachment + allow(import).to receive(:file).and_return( + double('ActiveStorage::Attached::One', + download: proc { |&block| + File.read(archive_path).each_char { |c| block.call(c) } + } + ) + ) + end + + after do + FileUtils.rm_f(archive_path) if File.exist?(archive_path) + end + + describe '#perform' do + context 'when import is successful' do + before do + # Mock the import service + import_service = instance_double(Users::ImportData) + allow(Users::ImportData).to receive(:new).and_return(import_service) + allow(import_service).to receive(:import).and_return({ + settings_updated: true, + areas_created: 2, + places_created: 3, + imports_created: 1, + exports_created: 1, + trips_created: 2, + stats_created: 1, + notifications_created: 2, + visits_created: 4, + points_created: 1000, + files_restored: 7 + }) + + # Mock file operations + allow(File).to receive(:exist?).and_return(true) + allow(File).to receive(:delete) + allow(Rails.logger).to receive(:info) + end + + it 'calls the import service with correct parameters' do + expect(Users::ImportData).to receive(:new).with(user, anything) + + job.perform(import.id) + end + + it 'calls import on the service' do + import_service = instance_double(Users::ImportData) + allow(Users::ImportData).to receive(:new).and_return(import_service) + expect(import_service).to receive(:import) + + job.perform(import.id) + end + + it 'completes successfully without updating import status' do + expect(import).not_to receive(:update!) + + job.perform(import.id) + end + + it 'does not create error notifications when successful' do + expect(::Notifications::Create).not_to receive(:new) + + job.perform(import.id) + end + end + + context 'when import fails' do + let(:error_message) { 'Import failed due to invalid archive' } + let(:error) { StandardError.new(error_message) } + + before do + # Mock the import service to raise an error + import_service = instance_double(Users::ImportData) + allow(Users::ImportData).to receive(:new).and_return(import_service) + allow(import_service).to receive(:import).and_raise(error) + + # Mock notification creation + notification_service = instance_double(::Notifications::Create, call: true) + allow(::Notifications::Create).to receive(:new).and_return(notification_service) + + # Mock file operations + allow(File).to receive(:exist?).and_return(true) + allow(File).to receive(:delete) + allow(Rails.logger).to receive(:info) + + # Mock ExceptionReporter + allow(ExceptionReporter).to receive(:call) + end + + it 'reports the error to ExceptionReporter' do + expect(ExceptionReporter).to receive(:call).with(error, "Import job failed for user #{user.id}") + + expect { job.perform(import.id) }.to raise_error(StandardError, error_message) + end + + it 'does not update import status on failure' do + expect(import).not_to receive(:update!) + + expect { job.perform(import.id) }.to raise_error(StandardError, error_message) + end + + it 'creates a failure notification for the user' do + expect(::Notifications::Create).to receive(:new).with( + user: user, + title: 'Data import failed', + content: "Your data import failed with error: #{error_message}. Please check the archive format and try again.", + kind: :error + ) + + expect { job.perform(import.id) }.to raise_error(StandardError, error_message) + end + + it 're-raises the error' do + expect { job.perform(import.id) }.to raise_error(StandardError, error_message) + end + end + + context 'when import does not exist' do + let(:non_existent_import_id) { 999999 } + + it 'raises ActiveRecord::RecordNotFound' do + expect { job.perform(non_existent_import_id) }.to raise_error(ActiveRecord::RecordNotFound) + end + + it 'does not create a notification when import is not found' do + expect(::Notifications::Create).not_to receive(:new) + + expect { job.perform(non_existent_import_id) }.to raise_error(ActiveRecord::RecordNotFound) + end + end + + context 'when archive file download fails' do + let(:error_message) { 'File download error' } + let(:error) { StandardError.new(error_message) } + + before do + # Mock file download to fail + allow(import).to receive(:file).and_return( + double('ActiveStorage::Attached::One', download: proc { raise error }) + ) + + # Mock notification creation + notification_service = instance_double(::Notifications::Create, call: true) + allow(::Notifications::Create).to receive(:new).and_return(notification_service) + end + + it 'creates notification with the correct user object' do + notification_service = instance_double(::Notifications::Create, call: true) + expect(::Notifications::Create).to receive(:new).with( + user: user, + title: 'Data import failed', + content: a_string_matching(/Your data import failed with error:.*Please check the archive format and try again\./), + kind: :error + ).and_return(notification_service) + + expect(notification_service).to receive(:call) + + expect { job.perform(import.id) }.to raise_error(StandardError) + end + end + end + + describe 'job configuration' do + it 'is queued in the imports queue' do + expect(described_class.queue_name).to eq('imports') + end + end +end diff --git a/spec/models/import_spec.rb b/spec/models/import_spec.rb index ccb61bf5..88f06f02 100644 --- a/spec/models/import_spec.rb +++ b/spec/models/import_spec.rb @@ -25,7 +25,8 @@ RSpec.describe Import, type: :model do gpx: 4, immich_api: 5, geojson: 6, - photoprism_api: 7 + photoprism_api: 7, + user_data_archive: 8 ) end end diff --git a/spec/services/users/export_data/points_spec.rb b/spec/services/users/export_data/points_spec.rb index defc2413..b2fa0a52 100644 --- a/spec/services/users/export_data/points_spec.rb +++ b/spec/services/users/export_data/points_spec.rb @@ -50,6 +50,8 @@ RSpec.describe Users::ExportData::Points, type: :service do course: 45.5, course_accuracy: 2.5, external_track_id: 'ext-123', + longitude: -74.006, + latitude: 40.7128, lonlat: 'POINT(-74.006 40.7128)' ) end @@ -57,6 +59,8 @@ RSpec.describe Users::ExportData::Points, type: :service do create(:point, user: user, timestamp: 1640995260, + longitude: -73.9857, + latitude: 40.7484, lonlat: 'POINT(-73.9857 40.7484)' ) end @@ -211,5 +215,54 @@ RSpec.describe Users::ExportData::Points, type: :service do expect(subject.size).to eq(3) end end + + context 'when points have missing coordinate data' do + let!(:point_with_lonlat_only) do + # Point with lonlat but missing individual coordinates + point = create(:point, user: user, lonlat: 'POINT(10.0 50.0)', external_track_id: 'lonlat-only') + # Clear individual coordinate fields to simulate legacy data + point.update_columns(longitude: nil, latitude: nil) + point + end + + let!(:point_with_coordinates_only) do + # Point with coordinates but missing lonlat + point = create(:point, user: user, longitude: 15.0, latitude: 55.0, external_track_id: 'coords-only') + # Clear lonlat field to simulate missing geometry + point.update_columns(lonlat: nil) + point + end + + let!(:point_without_coordinates) do + # Point with no coordinate data at all + point = create(:point, user: user, external_track_id: 'no-coords') + point.update_columns(longitude: nil, latitude: nil, lonlat: nil) + point + end + + it 'includes all coordinate fields for points with lonlat only' do + point_data = subject.find { |p| p['external_track_id'] == 'lonlat-only' } + + expect(point_data).to be_present + expect(point_data['lonlat']).to be_present + expect(point_data['longitude']).to eq(10.0) + expect(point_data['latitude']).to eq(50.0) + end + + it 'includes all coordinate fields for points with coordinates only' do + point_data = subject.find { |p| p['external_track_id'] == 'coords-only' } + + expect(point_data).to be_present + expect(point_data['lonlat']).to eq('POINT(15.0 55.0)') + expect(point_data['longitude']).to eq(15.0) + expect(point_data['latitude']).to eq(55.0) + end + + it 'skips points without any coordinate data' do + point_data = subject.find { |p| p['external_track_id'] == 'no-coords' } + + expect(point_data).to be_nil + end + end end end diff --git a/spec/services/users/export_data_spec.rb b/spec/services/users/export_data_spec.rb index d4e36f9a..4fd45749 100644 --- a/spec/services/users/export_data_spec.rb +++ b/spec/services/users/export_data_spec.rb @@ -39,8 +39,18 @@ RSpec.describe Users::ExportData, type: :service do # Mock user settings allow(user).to receive(:safe_settings).and_return(double(settings: { theme: 'dark' })) + # Mock user associations for counting (needed before error occurs) + allow(user).to receive(:areas).and_return(double(count: 5)) + allow(user).to receive(:imports).and_return(double(count: 12)) + allow(user).to receive(:trips).and_return(double(count: 8)) + allow(user).to receive(:stats).and_return(double(count: 24)) + allow(user).to receive(:notifications).and_return(double(count: 10)) + allow(user).to receive(:tracked_points).and_return(double(count: 15000)) + allow(user).to receive(:visits).and_return(double(count: 45)) + allow(user).to receive(:places).and_return(double(count: 20)) + # Mock Export creation and file attachment - exports_double = double('Exports') + exports_double = double('Exports', count: 3) allow(user).to receive(:exports).and_return(exports_double) allow(exports_double).to receive(:create!).and_return(export_record) allow(export_record).to receive(:update!) @@ -137,6 +147,22 @@ RSpec.describe Users::ExportData, type: :service do result = service.export expect(result).to eq(export_record) end + + it 'calculates entity counts correctly' do + counts = service.send(:calculate_entity_counts) + + expect(counts).to eq({ + areas: 5, + imports: 12, + exports: 3, + trips: 8, + stats: 24, + notifications: 10, + points: 15000, + visits: 45, + places: 20 + }) + end end context 'when an error occurs during export' do @@ -145,7 +171,7 @@ RSpec.describe Users::ExportData, type: :service do before do # Mock Export creation first - exports_double = double('Exports') + exports_double = double('Exports', count: 3) allow(user).to receive(:exports).and_return(exports_double) allow(exports_double).to receive(:create!).and_return(export_record) allow(export_record).to receive(:update!) @@ -153,10 +179,21 @@ RSpec.describe Users::ExportData, type: :service do # Mock user settings and other dependencies that are needed before the error allow(user).to receive(:safe_settings).and_return(double(settings: { theme: 'dark' })) + # Mock user associations for counting + allow(user).to receive(:areas).and_return(double(count: 5)) + allow(user).to receive(:imports).and_return(double(count: 12)) + # exports already mocked above + allow(user).to receive(:trips).and_return(double(count: 8)) + allow(user).to receive(:stats).and_return(double(count: 24)) + allow(user).to receive(:notifications).and_return(double(count: 10)) + allow(user).to receive(:tracked_points).and_return(double(count: 15000)) + allow(user).to receive(:visits).and_return(double(count: 45)) + allow(user).to receive(:places).and_return(double(count: 20)) + # Then set up the error condition - make it happen during the JSON writing step allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_raise(StandardError, error_message) - allow(Rails.logger).to receive(:error) + allow(ExceptionReporter).to receive(:call) # Mock cleanup method and pathname existence allow(service).to receive(:cleanup_temporary_files) @@ -169,8 +206,8 @@ RSpec.describe Users::ExportData, type: :service do expect { service.export }.to raise_error(StandardError, error_message) end - it 'logs the error' do - expect(Rails.logger).to receive(:error).with("Export failed: #{error_message}") + it 'reports the error via ExceptionReporter' do + expect(ExceptionReporter).to receive(:call).with(an_instance_of(StandardError), 'Export failed') expect { service.export }.to raise_error(StandardError, error_message) end @@ -188,7 +225,7 @@ RSpec.describe Users::ExportData, type: :service do context 'when export record creation fails' do before do - exports_double = double('Exports') + exports_double = double('Exports', count: 3) allow(user).to receive(:exports).and_return(exports_double) allow(exports_double).to receive(:create!).and_raise(ActiveRecord::RecordInvalid) end @@ -203,7 +240,7 @@ RSpec.describe Users::ExportData, type: :service do before do # Mock Export creation - exports_double = double('Exports') + exports_double = double('Exports', count: 3) allow(user).to receive(:exports).and_return(exports_double) allow(exports_double).to receive(:create!).and_return(export_record) allow(export_record).to receive(:update!) @@ -221,6 +258,18 @@ RSpec.describe Users::ExportData, type: :service do allow(Users::ExportData::Places).to receive(:new).and_return(double(call: [])) allow(user).to receive(:safe_settings).and_return(double(settings: {})) + + # Mock user associations for counting + allow(user).to receive(:areas).and_return(double(count: 5)) + allow(user).to receive(:imports).and_return(double(count: 12)) + # exports already mocked above + allow(user).to receive(:trips).and_return(double(count: 8)) + allow(user).to receive(:stats).and_return(double(count: 24)) + allow(user).to receive(:notifications).and_return(double(count: 10)) + allow(user).to receive(:tracked_points).and_return(double(count: 15000)) + allow(user).to receive(:visits).and_return(double(count: 45)) + allow(user).to receive(:places).and_return(double(count: 20)) + allow(File).to receive(:open).and_call_original allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_yield(StringIO.new) @@ -292,11 +341,11 @@ RSpec.describe Users::ExportData, type: :service do before do allow(File).to receive(:directory?).and_return(true) allow(FileUtils).to receive(:rm_rf).and_raise(StandardError, 'Permission denied') - allow(Rails.logger).to receive(:error) + allow(ExceptionReporter).to receive(:call) end - it 'logs the error but does not re-raise' do - expect(Rails.logger).to receive(:error).with('Failed to cleanup temporary files: Permission denied') + it 'reports the error via ExceptionReporter but does not re-raise' do + expect(ExceptionReporter).to receive(:call).with(an_instance_of(StandardError), 'Failed to cleanup temporary files') expect { service.send(:cleanup_temporary_files, export_directory) }.not_to raise_error end @@ -314,5 +363,44 @@ RSpec.describe Users::ExportData, type: :service do end end end + + describe '#calculate_entity_counts' do + before do + # Mock user associations for counting + allow(user).to receive(:areas).and_return(double(count: 5)) + allow(user).to receive(:imports).and_return(double(count: 12)) + allow(user).to receive(:exports).and_return(double(count: 3)) + allow(user).to receive(:trips).and_return(double(count: 8)) + allow(user).to receive(:stats).and_return(double(count: 24)) + allow(user).to receive(:notifications).and_return(double(count: 10)) + allow(user).to receive(:tracked_points).and_return(double(count: 15000)) + allow(user).to receive(:visits).and_return(double(count: 45)) + allow(user).to receive(:places).and_return(double(count: 20)) + allow(Rails.logger).to receive(:info) + end + + it 'returns correct counts for all entity types' do + counts = service.send(:calculate_entity_counts) + + expect(counts).to eq({ + areas: 5, + imports: 12, + exports: 3, + trips: 8, + stats: 24, + notifications: 10, + points: 15000, + visits: 45, + places: 20 + }) + end + + it 'logs the calculation process' do + expect(Rails.logger).to receive(:info).with("Calculating entity counts for export") + expect(Rails.logger).to receive(:info).with(/Entity counts:/) + + service.send(:calculate_entity_counts) + end + end end end diff --git a/spec/services/users/import_data/areas_spec.rb b/spec/services/users/import_data/areas_spec.rb new file mode 100644 index 00000000..cc71bfde --- /dev/null +++ b/spec/services/users/import_data/areas_spec.rb @@ -0,0 +1,161 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Areas, type: :service do + let(:user) { create(:user) } + let(:areas_data) do + [ + { + 'name' => 'Home', + 'latitude' => '40.7128', + 'longitude' => '-74.0060', + 'radius' => 100, + 'created_at' => '2024-01-01T00:00:00Z', + 'updated_at' => '2024-01-01T00:00:00Z' + }, + { + 'name' => 'Work', + 'latitude' => '40.7589', + 'longitude' => '-73.9851', + 'radius' => 50, + 'created_at' => '2024-01-02T00:00:00Z', + 'updated_at' => '2024-01-02T00:00:00Z' + } + ] + end + let(:service) { described_class.new(user, areas_data) } + + describe '#call' do + context 'with valid areas data' do + it 'creates new areas for the user' do + expect { service.call }.to change { user.areas.count }.by(2) + end + + it 'creates areas with correct attributes' do + service.call + + home_area = user.areas.find_by(name: 'Home') + expect(home_area).to have_attributes( + name: 'Home', + latitude: 40.7128, + longitude: -74.0060, + radius: 100 + ) + + work_area = user.areas.find_by(name: 'Work') + expect(work_area).to have_attributes( + name: 'Work', + latitude: 40.7589, + longitude: -73.9851, + radius: 50 + ) + end + + it 'returns the number of areas created' do + result = service.call + expect(result).to eq(2) + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing 2 areas for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Areas import completed. Created: 2") + + service.call + end + end + + context 'with duplicate areas' do + before do + # Create an existing area with same name and coordinates + user.areas.create!( + name: 'Home', + latitude: 40.7128, + longitude: -74.0060, + radius: 100 + ) + end + + it 'skips duplicate areas' do + expect { service.call }.to change { user.areas.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Area already exists: Home") + + service.call + end + + it 'returns only the count of newly created areas' do + result = service.call + expect(result).to eq(1) + end + end + + context 'with invalid area data' do + let(:areas_data) do + [ + { 'name' => 'Valid Area', 'latitude' => '40.7128', 'longitude' => '-74.0060', 'radius' => 100 }, + 'invalid_data', + { 'name' => 'Another Valid Area', 'latitude' => '40.7589', 'longitude' => '-73.9851', 'radius' => 50 } + ] + end + + it 'skips invalid entries and imports valid ones' do + expect { service.call }.to change { user.areas.count }.by(2) + end + + it 'returns the count of valid areas created' do + result = service.call + expect(result).to eq(2) + end + end + + context 'with nil areas data' do + let(:areas_data) { nil } + + it 'does not create any areas' do + expect { service.call }.not_to change { user.areas.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with non-array areas data' do + let(:areas_data) { 'invalid_data' } + + it 'does not create any areas' do + expect { service.call }.not_to change { user.areas.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with empty areas data' do + let(:areas_data) { [] } + + it 'does not create any areas' do + expect { service.call }.not_to change { user.areas.count } + end + + it 'logs the import process with 0 count' do + expect(Rails.logger).to receive(:info).with("Importing 0 areas for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Areas import completed. Created: 0") + + service.call + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + end +end diff --git a/spec/services/users/import_data/imports_spec.rb b/spec/services/users/import_data/imports_spec.rb new file mode 100644 index 00000000..9934d2d8 --- /dev/null +++ b/spec/services/users/import_data/imports_spec.rb @@ -0,0 +1,270 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Imports, type: :service do + let(:user) { create(:user) } + let(:files_directory) { Rails.root.join('tmp', 'test_files') } + let(:imports_data) do + [ + { + 'name' => '2023_MARCH.json', + 'source' => 'google_semantic_history', + 'created_at' => '2024-01-01T00:00:00Z', + 'updated_at' => '2024-01-01T00:00:00Z', + 'processed' => true, + 'file_name' => 'import_1_2023_MARCH.json', + 'original_filename' => '2023_MARCH.json', + 'file_size' => 2048576, + 'content_type' => 'application/json' + }, + { + 'name' => '2023_APRIL.json', + 'source' => 'owntracks', + 'created_at' => '2024-01-02T00:00:00Z', + 'updated_at' => '2024-01-02T00:00:00Z', + 'processed' => false, + 'file_name' => 'import_2_2023_APRIL.json', + 'original_filename' => '2023_APRIL.json', + 'file_size' => 1048576, + 'content_type' => 'application/json' + } + ] + end + let(:service) { described_class.new(user, imports_data, files_directory) } + + before do + FileUtils.mkdir_p(files_directory) + # Create mock files + File.write(files_directory.join('import_1_2023_MARCH.json'), '{"test": "data"}') + File.write(files_directory.join('import_2_2023_APRIL.json'), '{"more": "data"}') + + # Mock the Import job to prevent it from being enqueued + allow(Import::ProcessJob).to receive(:perform_later) + end + + after do + FileUtils.rm_rf(files_directory) if files_directory.exist? + end + + describe '#call' do + context 'with valid imports data' do + it 'creates new imports for the user' do + expect { service.call }.to change { user.imports.count }.by(2) + end + + it 'creates imports with correct attributes' do + service.call + + march_import = user.imports.find_by(name: '2023_MARCH.json') + expect(march_import).to have_attributes( + name: '2023_MARCH.json', + source: 'google_semantic_history', + processed: 1 + ) + + april_import = user.imports.find_by(name: '2023_APRIL.json') + expect(april_import).to have_attributes( + name: '2023_APRIL.json', + source: 'owntracks', + processed: 0 + ) + end + + it 'attaches files to the imports' do + service.call + + march_import = user.imports.find_by(name: '2023_MARCH.json') + expect(march_import.file).to be_attached + expect(march_import.file.filename.to_s).to eq('2023_MARCH.json') + expect(march_import.file.content_type).to eq('application/json') + + april_import = user.imports.find_by(name: '2023_APRIL.json') + expect(april_import.file).to be_attached + expect(april_import.file.filename.to_s).to eq('2023_APRIL.json') + expect(april_import.file.content_type).to eq('application/json') + end + + it 'returns the number of imports and files created' do + imports_created, files_restored = service.call + expect(imports_created).to eq(2) + expect(files_restored).to eq(2) + end + + it 'logs the import process' do + allow(Rails.logger).to receive(:info) # Allow all info logs (including ActiveStorage) + expect(Rails.logger).to receive(:info).with("Importing 2 imports for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Imports import completed. Created: 2, Files restored: 2") + + service.call + end + end + + context 'with duplicate imports' do + before do + # Create an existing import with same name, source, and created_at + user.imports.create!( + name: '2023_MARCH.json', + source: 'google_semantic_history', + created_at: Time.parse('2024-01-01T00:00:00Z') + ) + end + + it 'skips duplicate imports' do + expect { service.call }.to change { user.imports.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Import already exists: 2023_MARCH.json") + + service.call + end + + it 'returns only the count of newly created imports' do + imports_created, files_restored = service.call + expect(imports_created).to eq(1) + expect(files_restored).to eq(1) + end + end + + context 'with missing files' do + before do + FileUtils.rm_f(files_directory.join('import_1_2023_MARCH.json')) + end + + it 'creates imports but logs file errors' do + expect(Rails.logger).to receive(:warn).with(/Import file not found/) + + imports_created, files_restored = service.call + expect(imports_created).to eq(2) + expect(files_restored).to eq(1) # Only one file was successfully restored + end + + it 'creates imports without file attachments for missing files' do + service.call + + march_import = user.imports.find_by(name: '2023_MARCH.json') + expect(march_import.file).not_to be_attached + end + end + + context 'with imports that have no files (null file_name)' do + let(:imports_data) do + [ + { + 'name' => 'No File Import', + 'source' => 'gpx', + 'created_at' => '2024-01-01T00:00:00Z', + 'processed' => true, + 'file_name' => nil, + 'original_filename' => nil + } + ] + end + + it 'creates imports without attempting file restoration' do + expect { service.call }.to change { user.imports.count }.by(1) + end + + it 'returns correct counts' do + imports_created, files_restored = service.call + expect(imports_created).to eq(1) + expect(files_restored).to eq(0) + end + end + + context 'with invalid import data' do + let(:imports_data) do + [ + { 'name' => 'Valid Import', 'source' => 'owntracks' }, + 'invalid_data', + { 'name' => 'Another Valid Import', 'source' => 'gpx' } + ] + end + + it 'skips invalid entries and imports valid ones' do + expect { service.call }.to change { user.imports.count }.by(2) + end + + it 'returns the count of valid imports created' do + imports_created, files_restored = service.call + expect(imports_created).to eq(2) + expect(files_restored).to eq(0) # No files for these imports + end + end + + context 'with validation errors' do + let(:imports_data) do + [ + { 'name' => 'Valid Import', 'source' => 'owntracks' }, + { 'source' => 'owntracks' }, # missing name + { 'name' => 'Missing Source Import' } # missing source + ] + end + + it 'only creates valid imports' do + expect { service.call }.to change { user.imports.count }.by(2) + + # Verify only the valid imports were created (name is required, source defaults to first enum) + created_imports = user.imports.pluck(:name, :source) + expect(created_imports).to contain_exactly( + ['Valid Import', 'owntracks'], + ['Missing Source Import', 'google_semantic_history'] + ) + end + + it 'logs validation errors' do + expect(Rails.logger).to receive(:error).at_least(:once) + + service.call + end + end + + context 'with nil imports data' do + let(:imports_data) { nil } + + it 'does not create any imports' do + expect { service.call }.not_to change { user.imports.count } + end + + it 'returns [0, 0]' do + result = service.call + expect(result).to eq([0, 0]) + end + end + + context 'with non-array imports data' do + let(:imports_data) { 'invalid_data' } + + it 'does not create any imports' do + expect { service.call }.not_to change { user.imports.count } + end + + it 'returns [0, 0]' do + result = service.call + expect(result).to eq([0, 0]) + end + end + + context 'with empty imports data' do + let(:imports_data) { [] } + + it 'does not create any imports' do + expect { service.call }.not_to change { user.imports.count } + end + + it 'logs the import process with 0 count' do + expect(Rails.logger).to receive(:info).with("Importing 0 imports for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Imports import completed. Created: 0, Files restored: 0") + + service.call + end + + it 'returns [0, 0]' do + result = service.call + expect(result).to eq([0, 0]) + end + end + end +end diff --git a/spec/services/users/import_data/notifications_spec.rb b/spec/services/users/import_data/notifications_spec.rb new file mode 100644 index 00000000..4e71d540 --- /dev/null +++ b/spec/services/users/import_data/notifications_spec.rb @@ -0,0 +1,181 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Notifications, type: :service do + let(:user) { create(:user) } + let(:notifications_data) do + [ + { + 'kind' => 'info', + 'title' => 'Import completed', + 'content' => 'Your data import has been processed successfully', + 'read_at' => '2024-01-01T12:30:00Z', + 'created_at' => '2024-01-01T12:00:00Z', + 'updated_at' => '2024-01-01T12:30:00Z' + }, + { + 'kind' => 'error', + 'title' => 'Import failed', + 'content' => 'There was an error processing your data', + 'read_at' => nil, + 'created_at' => '2024-01-02T10:00:00Z', + 'updated_at' => '2024-01-02T10:00:00Z' + } + ] + end + let(:service) { described_class.new(user, notifications_data) } + + describe '#call' do + context 'with valid notifications data' do + it 'creates new notifications for the user' do + expect { service.call }.to change { user.notifications.count }.by(2) + end + + it 'creates notifications with correct attributes' do + service.call + + import_notification = user.notifications.find_by(title: 'Import completed') + expect(import_notification).to have_attributes( + kind: 'info', + title: 'Import completed', + content: 'Your data import has been processed successfully', + read_at: Time.parse('2024-01-01T12:30:00Z') + ) + + error_notification = user.notifications.find_by(title: 'Import failed') + expect(error_notification).to have_attributes( + kind: 'error', + title: 'Import failed', + content: 'There was an error processing your data', + read_at: nil + ) + end + + it 'returns the number of notifications created' do + result = service.call + expect(result).to eq(2) + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing 2 notifications for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Notifications import completed. Created: 2") + + service.call + end + end + + context 'with duplicate notifications' do + before do + # Create an existing notification with same title, content, and created_at + user.notifications.create!( + kind: 'info', + title: 'Import completed', + content: 'Your data import has been processed successfully', + created_at: Time.parse('2024-01-01T12:00:00Z') + ) + end + + it 'skips duplicate notifications' do + expect { service.call }.to change { user.notifications.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Notification already exists: Import completed") + + service.call + end + + it 'returns only the count of newly created notifications' do + result = service.call + expect(result).to eq(1) + end + end + + context 'with invalid notification data' do + let(:notifications_data) do + [ + { 'kind' => 'info', 'title' => 'Valid Notification', 'content' => 'Valid content' }, + 'invalid_data', + { 'kind' => 'error', 'title' => 'Another Valid Notification', 'content' => 'Another valid content' } + ] + end + + it 'skips invalid entries and imports valid ones' do + expect { service.call }.to change { user.notifications.count }.by(2) + end + + it 'returns the count of valid notifications created' do + result = service.call + expect(result).to eq(2) + end + end + + context 'with validation errors' do + let(:notifications_data) do + [ + { 'kind' => 'info', 'title' => 'Valid Notification', 'content' => 'Valid content' }, + { 'kind' => 'info', 'content' => 'Missing title' }, # missing title + { 'kind' => 'error', 'title' => 'Missing content' } # missing content + ] + end + + it 'only creates valid notifications' do + expect { service.call }.to change { user.notifications.count }.by(1) + end + + it 'logs validation errors' do + expect(Rails.logger).to receive(:error).at_least(:once) + + service.call + end + end + + context 'with nil notifications data' do + let(:notifications_data) { nil } + + it 'does not create any notifications' do + expect { service.call }.not_to change { user.notifications.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with non-array notifications data' do + let(:notifications_data) { 'invalid_data' } + + it 'does not create any notifications' do + expect { service.call }.not_to change { user.notifications.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with empty notifications data' do + let(:notifications_data) { [] } + + it 'does not create any notifications' do + expect { service.call }.not_to change { user.notifications.count } + end + + it 'logs the import process with 0 count' do + expect(Rails.logger).to receive(:info).with("Importing 0 notifications for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Notifications import completed. Created: 0") + + service.call + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + end +end diff --git a/spec/services/users/import_data/places_spec.rb b/spec/services/users/import_data/places_spec.rb new file mode 100644 index 00000000..f00f09a8 --- /dev/null +++ b/spec/services/users/import_data/places_spec.rb @@ -0,0 +1,216 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Places, type: :service do + let(:user) { create(:user) } + let(:places_data) do + [ + { + 'name' => 'Home', + 'latitude' => '40.7128', + 'longitude' => '-74.0060', + 'source' => 'manual', + 'geodata' => { 'address' => '123 Main St' }, + 'created_at' => '2024-01-01T00:00:00Z', + 'updated_at' => '2024-01-01T00:00:00Z' + }, + { + 'name' => 'Office', + 'latitude' => '40.7589', + 'longitude' => '-73.9851', + 'source' => 'photon', + 'geodata' => { 'properties' => { 'name' => 'Office Building' } }, + 'created_at' => '2024-01-02T00:00:00Z', + 'updated_at' => '2024-01-02T00:00:00Z' + } + ] + end + let(:service) { described_class.new(user, places_data) } + + describe '#call' do + context 'with valid places data' do + it 'creates new places' do + expect { service.call }.to change { Place.count }.by(2) + end + + it 'creates places with correct attributes' do + service.call + + home_place = Place.find_by(name: 'Home') + expect(home_place).to have_attributes( + name: 'Home', + source: 'manual' + ) + expect(home_place.lat).to be_within(0.0001).of(40.7128) + expect(home_place.lon).to be_within(0.0001).of(-74.0060) + expect(home_place.geodata).to eq('address' => '123 Main St') + + office_place = Place.find_by(name: 'Office') + expect(office_place).to have_attributes( + name: 'Office', + source: 'photon' + ) + expect(office_place.lat).to be_within(0.0001).of(40.7589) + expect(office_place.lon).to be_within(0.0001).of(-73.9851) + expect(office_place.geodata).to eq('properties' => { 'name' => 'Office Building' }) + end + + it 'returns the number of places created' do + result = service.call + expect(result).to eq(2) + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing 2 places for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Places import completed. Created: 2") + + service.call + end + end + + context 'with duplicate places (same name)' do + before do + # Create an existing place with same name + create(:place, name: 'Home', + latitude: 40.7128, longitude: -74.0060, + lonlat: 'POINT(-74.0060 40.7128)') + end + + it 'skips duplicate places' do + expect { service.call }.to change { Place.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Place already exists: Home") + + service.call + end + + it 'returns only the count of newly created places' do + result = service.call + expect(result).to eq(1) + end + end + + context 'with duplicate places (same coordinates)' do + before do + # Create an existing place with same coordinates but different name + create(:place, name: 'Different Name', + latitude: 40.7128, longitude: -74.0060, + lonlat: 'POINT(-74.0060 40.7128)') + end + + it 'skips duplicate places by coordinates' do + expect { service.call }.to change { Place.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Place already exists: Home") + + service.call + end + end + + context 'with places having same name but different coordinates' do + before do + create(:place, name: 'Different Place', + latitude: 41.0000, longitude: -75.0000, + lonlat: 'POINT(-75.0000 41.0000)') + end + + it 'creates both places since coordinates and names differ' do + expect { service.call }.to change { Place.count }.by(2) + end + end + + context 'with invalid place data' do + let(:places_data) do + [ + { 'name' => 'Valid Place', 'latitude' => '40.7128', 'longitude' => '-74.0060' }, + 'invalid_data', + { 'name' => 'Another Valid Place', 'latitude' => '40.7589', 'longitude' => '-73.9851' } + ] + end + + it 'skips invalid entries and imports valid ones' do + expect { service.call }.to change { Place.count }.by(2) + end + + it 'returns the count of valid places created' do + result = service.call + expect(result).to eq(2) + end + end + + context 'with missing required fields' do + let(:places_data) do + [ + { 'name' => 'Valid Place', 'latitude' => '40.7128', 'longitude' => '-74.0060' }, + { 'latitude' => '40.7589', 'longitude' => '-73.9851' }, # missing name + { 'name' => 'Invalid Place', 'longitude' => '-73.9851' }, # missing latitude + { 'name' => 'Another Invalid Place', 'latitude' => '40.7589' } # missing longitude + ] + end + + it 'only creates places with all required fields' do + expect { service.call }.to change { Place.count }.by(1) + end + + it 'logs skipped records with missing data' do + allow(Rails.logger).to receive(:debug) # Allow all debug logs + expect(Rails.logger).to receive(:debug).with(/Skipping place with missing required data/).at_least(:once) + + service.call + end + end + + context 'with nil places data' do + let(:places_data) { nil } + + it 'does not create any places' do + expect { service.call }.not_to change { Place.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with non-array places data' do + let(:places_data) { 'invalid_data' } + + it 'does not create any places' do + expect { service.call }.not_to change { Place.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with empty places data' do + let(:places_data) { [] } + + it 'does not create any places' do + expect { service.call }.not_to change { Place.count } + end + + it 'logs the import process with 0 count' do + expect(Rails.logger).to receive(:info).with("Importing 0 places for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Places import completed. Created: 0") + + service.call + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + end +end diff --git a/spec/services/users/import_data/points_spec.rb b/spec/services/users/import_data/points_spec.rb new file mode 100644 index 00000000..b96c2d78 --- /dev/null +++ b/spec/services/users/import_data/points_spec.rb @@ -0,0 +1,139 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Points, type: :service do + let(:user) { create(:user) } + let(:service) { described_class.new(user, points_data) } + + describe '#call' do + context 'when importing points with country information' do + let(:country) { create(:country, name: 'Germany', iso_a2: 'DE', iso_a3: 'DEU') } + let(:points_data) do + [ + { + 'timestamp' => 1640995200, + 'lonlat' => 'POINT(13.4050 52.5200)', + 'city' => 'Berlin', + 'country' => 'Germany', # String field from export + 'country_info' => { + 'name' => 'Germany', + 'iso_a2' => 'DE', + 'iso_a3' => 'DEU' + } + } + ] + end + + before do + country # Create the country + end + + it 'creates points without type errors' do + expect { service.call }.not_to raise_error + end + + it 'assigns the correct country association' do + service.call + point = user.tracked_points.last + expect(point.country).to eq(country) + end + + it 'excludes the string country field from attributes' do + service.call + point = user.tracked_points.last + # The country association should be set, not the string attribute + expect(point.read_attribute(:country)).to be_nil + expect(point.country).to eq(country) + end + end + + context 'when country does not exist in database' do + let(:points_data) do + [ + { + 'timestamp' => 1640995200, + 'lonlat' => 'POINT(13.4050 52.5200)', + 'city' => 'Berlin', + 'country' => 'NewCountry', + 'country_info' => { + 'name' => 'NewCountry', + 'iso_a2' => 'NC', + 'iso_a3' => 'NCO' + } + } + ] + end + + it 'creates the country and assigns it' do + expect { service.call }.to change(Country, :count).by(1) + + point = user.tracked_points.last + expect(point.country.name).to eq('NewCountry') + expect(point.country.iso_a2).to eq('NC') + expect(point.country.iso_a3).to eq('NCO') + end + end + + context 'when points_data is empty' do + let(:points_data) { [] } + + it 'returns 0 without errors' do + expect(service.call).to eq(0) + end + end + + context 'when points_data is not an array' do + let(:points_data) { 'invalid' } + + it 'returns 0 without errors' do + expect(service.call).to eq(0) + end + end + + context 'when points have invalid or missing data' do + let(:points_data) do + [ + { + 'timestamp' => 1640995200, + 'lonlat' => 'POINT(13.4050 52.5200)', + 'city' => 'Berlin' + }, + { + # Missing lonlat but has longitude/latitude (should be reconstructed) + 'timestamp' => 1640995220, + 'longitude' => 11.5820, + 'latitude' => 48.1351, + 'city' => 'Munich' + }, + { + # Missing lonlat and coordinates + 'timestamp' => 1640995260, + 'city' => 'Hamburg' + }, + { + # Missing timestamp + 'lonlat' => 'POINT(11.5820 48.1351)', + 'city' => 'Stuttgart' + }, + { + # Invalid lonlat format + 'timestamp' => 1640995320, + 'lonlat' => 'invalid format', + 'city' => 'Frankfurt' + } + ] + end + + it 'imports valid points and reconstructs lonlat when needed' do + expect(service.call).to eq(2) # Two valid points (original + reconstructed) + expect(user.tracked_points.count).to eq(2) + + # Check that lonlat was reconstructed properly + munich_point = user.tracked_points.find_by(city: 'Munich') + expect(munich_point).to be_present + expect(munich_point.lonlat.to_s).to match(/POINT\s*\(11\.582\s+48\.1351\)/) + end + end + end +end diff --git a/spec/services/users/import_data/settings_spec.rb b/spec/services/users/import_data/settings_spec.rb new file mode 100644 index 00000000..83740d22 --- /dev/null +++ b/spec/services/users/import_data/settings_spec.rb @@ -0,0 +1,82 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Settings, type: :service do + let(:user) { create(:user, settings: { existing_setting: 'value', theme: 'light' }) } + let(:settings_data) { { 'theme' => 'dark', 'distance_unit' => 'km', 'new_setting' => 'test' } } + let(:service) { described_class.new(user, settings_data) } + + describe '#call' do + context 'with valid settings data' do + it 'merges imported settings with existing settings' do + expect { service.call }.to change { user.reload.settings }.to( + 'existing_setting' => 'value', + 'theme' => 'dark', + 'distance_unit' => 'km', + 'new_setting' => 'test' + ) + end + + it 'gives precedence to imported settings over existing ones' do + service.call + + expect(user.reload.settings['theme']).to eq('dark') + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing settings for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Settings import completed") + + service.call + end + end + + context 'with nil settings data' do + let(:settings_data) { nil } + + it 'does not change user settings' do + expect { service.call }.not_to change { user.reload.settings } + end + + it 'does not log import process' do + expect(Rails.logger).not_to receive(:info) + + service.call + end + end + + context 'with non-hash settings data' do + let(:settings_data) { 'invalid_data' } + + it 'does not change user settings' do + expect { service.call }.not_to change { user.reload.settings } + end + + it 'does not log import process' do + expect(Rails.logger).not_to receive(:info) + + service.call + end + end + + context 'with empty settings data' do + let(:settings_data) { {} } + + it 'preserves existing settings without adding new ones' do + original_settings = user.settings.dup + + service.call + + expect(user.reload.settings).to eq(original_settings) + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing settings for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Settings import completed") + + service.call + end + end + end +end diff --git a/spec/services/users/import_data/stats_spec.rb b/spec/services/users/import_data/stats_spec.rb new file mode 100644 index 00000000..d3645309 --- /dev/null +++ b/spec/services/users/import_data/stats_spec.rb @@ -0,0 +1,188 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Stats, type: :service do + let(:user) { create(:user) } + let(:stats_data) do + [ + { + 'year' => 2024, + 'month' => 1, + 'distance' => 456.78, + 'daily_distance' => [[1, 15.2], [2, 23.5], [3, 18.1]], + 'toponyms' => [ + { 'country' => 'United States', 'cities' => [{ 'city' => 'New York' }] } + ], + 'created_at' => '2024-02-01T00:00:00Z', + 'updated_at' => '2024-02-01T00:00:00Z' + }, + { + 'year' => 2024, + 'month' => 2, + 'distance' => 321.45, + 'daily_distance' => [[1, 12.3], [2, 19.8], [3, 25.4]], + 'toponyms' => [ + { 'country' => 'Canada', 'cities' => [{ 'city' => 'Toronto' }] } + ], + 'created_at' => '2024-03-01T00:00:00Z', + 'updated_at' => '2024-03-01T00:00:00Z' + } + ] + end + let(:service) { described_class.new(user, stats_data) } + + describe '#call' do + context 'with valid stats data' do + it 'creates new stats for the user' do + expect { service.call }.to change { user.stats.count }.by(2) + end + + it 'creates stats with correct attributes' do + service.call + + jan_stats = user.stats.find_by(year: 2024, month: 1) + expect(jan_stats).to have_attributes( + year: 2024, + month: 1, + distance: 456 + ) + expect(jan_stats.daily_distance).to eq([[1, 15.2], [2, 23.5], [3, 18.1]]) + expect(jan_stats.toponyms).to eq([{ 'country' => 'United States', 'cities' => [{ 'city' => 'New York' }] }]) + + feb_stats = user.stats.find_by(year: 2024, month: 2) + expect(feb_stats).to have_attributes( + year: 2024, + month: 2, + distance: 321 + ) + expect(feb_stats.daily_distance).to eq([[1, 12.3], [2, 19.8], [3, 25.4]]) + expect(feb_stats.toponyms).to eq([{ 'country' => 'Canada', 'cities' => [{ 'city' => 'Toronto' }] }]) + end + + it 'returns the number of stats created' do + result = service.call + expect(result).to eq(2) + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing 2 stats for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Stats import completed. Created: 2") + + service.call + end + end + + context 'with duplicate stats (same year and month)' do + before do + # Create an existing stat with same year and month + user.stats.create!( + year: 2024, + month: 1, + distance: 100.0 + ) + end + + it 'skips duplicate stats' do + expect { service.call }.to change { user.stats.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Stat already exists: 2024-1") + + service.call + end + + it 'returns only the count of newly created stats' do + result = service.call + expect(result).to eq(1) + end + end + + context 'with invalid stat data' do + let(:stats_data) do + [ + { 'year' => 2024, 'month' => 1, 'distance' => 456.78 }, + 'invalid_data', + { 'year' => 2024, 'month' => 2, 'distance' => 321.45 } + ] + end + + it 'skips invalid entries and imports valid ones' do + expect { service.call }.to change { user.stats.count }.by(2) + end + + it 'returns the count of valid stats created' do + result = service.call + expect(result).to eq(2) + end + end + + context 'with validation errors' do + let(:stats_data) do + [ + { 'year' => 2024, 'month' => 1, 'distance' => 456.78 }, + { 'month' => 1, 'distance' => 321.45 }, # missing year + { 'year' => 2024, 'distance' => 123.45 } # missing month + ] + end + + it 'only creates valid stats' do + expect { service.call }.to change { user.stats.count }.by(1) + end + + it 'logs validation errors' do + expect(Rails.logger).to receive(:error).at_least(:once) + + service.call + end + end + + context 'with nil stats data' do + let(:stats_data) { nil } + + it 'does not create any stats' do + expect { service.call }.not_to change { user.stats.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with non-array stats data' do + let(:stats_data) { 'invalid_data' } + + it 'does not create any stats' do + expect { service.call }.not_to change { user.stats.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with empty stats data' do + let(:stats_data) { [] } + + it 'does not create any stats' do + expect { service.call }.not_to change { user.stats.count } + end + + it 'logs the import process with 0 count' do + expect(Rails.logger).to receive(:info).with("Importing 0 stats for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Stats import completed. Created: 0") + + service.call + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + end +end diff --git a/spec/services/users/import_data/trips_spec.rb b/spec/services/users/import_data/trips_spec.rb new file mode 100644 index 00000000..3f96b481 --- /dev/null +++ b/spec/services/users/import_data/trips_spec.rb @@ -0,0 +1,186 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData::Trips, type: :service do + let(:user) { create(:user) } + let(:trips_data) do + [ + { + 'name' => 'Business Trip to NYC', + 'started_at' => '2024-01-15T08:00:00Z', + 'ended_at' => '2024-01-18T20:00:00Z', + 'distance' => 1245.67, + 'created_at' => '2024-01-19T00:00:00Z', + 'updated_at' => '2024-01-19T00:00:00Z' + }, + { + 'name' => 'Weekend Getaway', + 'started_at' => '2024-02-10T09:00:00Z', + 'ended_at' => '2024-02-12T18:00:00Z', + 'distance' => 456.78, + 'created_at' => '2024-02-13T00:00:00Z', + 'updated_at' => '2024-02-13T00:00:00Z' + } + ] + end + let(:service) { described_class.new(user, trips_data) } + + before do + # Mock the job enqueuing to avoid it interfering with tests + allow(Trips::CalculateAllJob).to receive(:perform_later) + end + + describe '#call' do + context 'with valid trips data' do + it 'creates new trips for the user' do + expect { service.call }.to change { user.trips.count }.by(2) + end + + it 'creates trips with correct attributes' do + service.call + + business_trip = user.trips.find_by(name: 'Business Trip to NYC') + expect(business_trip).to have_attributes( + name: 'Business Trip to NYC', + started_at: Time.parse('2024-01-15T08:00:00Z'), + ended_at: Time.parse('2024-01-18T20:00:00Z'), + distance: 1245 + ) + + weekend_trip = user.trips.find_by(name: 'Weekend Getaway') + expect(weekend_trip).to have_attributes( + name: 'Weekend Getaway', + started_at: Time.parse('2024-02-10T09:00:00Z'), + ended_at: Time.parse('2024-02-12T18:00:00Z'), + distance: 456 + ) + end + + it 'returns the number of trips created' do + result = service.call + expect(result).to eq(2) + end + + it 'logs the import process' do + expect(Rails.logger).to receive(:info).with("Importing 2 trips for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Trips import completed. Created: 2") + + service.call + end + end + + context 'with duplicate trips' do + before do + # Create an existing trip with same name and times + user.trips.create!( + name: 'Business Trip to NYC', + started_at: Time.parse('2024-01-15T08:00:00Z'), + ended_at: Time.parse('2024-01-18T20:00:00Z'), + distance: 1000.0 + ) + end + + it 'skips duplicate trips' do + expect { service.call }.to change { user.trips.count }.by(1) + end + + it 'logs when skipping duplicates' do + allow(Rails.logger).to receive(:debug) # Allow any debug logs + expect(Rails.logger).to receive(:debug).with("Trip already exists: Business Trip to NYC") + + service.call + end + + it 'returns only the count of newly created trips' do + result = service.call + expect(result).to eq(1) + end + end + + context 'with invalid trip data' do + let(:trips_data) do + [ + { 'name' => 'Valid Trip', 'started_at' => '2024-01-15T08:00:00Z', 'ended_at' => '2024-01-18T20:00:00Z' }, + 'invalid_data', + { 'name' => 'Another Valid Trip', 'started_at' => '2024-02-10T09:00:00Z', 'ended_at' => '2024-02-12T18:00:00Z' } + ] + end + + it 'skips invalid entries and imports valid ones' do + expect { service.call }.to change { user.trips.count }.by(2) + end + + it 'returns the count of valid trips created' do + result = service.call + expect(result).to eq(2) + end + end + + context 'with validation errors' do + let(:trips_data) do + [ + { 'name' => 'Valid Trip', 'started_at' => '2024-01-15T08:00:00Z', 'ended_at' => '2024-01-18T20:00:00Z' }, + { 'started_at' => '2024-01-15T08:00:00Z', 'ended_at' => '2024-01-18T20:00:00Z' }, # missing name + { 'name' => 'Invalid Trip' } # missing required timestamps + ] + end + + it 'only creates valid trips' do + expect { service.call }.to change { user.trips.count }.by(1) + end + + it 'logs validation errors' do + expect(Rails.logger).to receive(:error).at_least(:once) + + service.call + end + end + + context 'with nil trips data' do + let(:trips_data) { nil } + + it 'does not create any trips' do + expect { service.call }.not_to change { user.trips.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with non-array trips data' do + let(:trips_data) { 'invalid_data' } + + it 'does not create any trips' do + expect { service.call }.not_to change { user.trips.count } + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + + context 'with empty trips data' do + let(:trips_data) { [] } + + it 'does not create any trips' do + expect { service.call }.not_to change { user.trips.count } + end + + it 'logs the import process with 0 count' do + expect(Rails.logger).to receive(:info).with("Importing 0 trips for user: #{user.email}") + expect(Rails.logger).to receive(:info).with("Trips import completed. Created: 0") + + service.call + end + + it 'returns 0' do + result = service.call + expect(result).to eq(0) + end + end + end +end diff --git a/spec/services/users/import_data_spec.rb b/spec/services/users/import_data_spec.rb new file mode 100644 index 00000000..5d57b97f --- /dev/null +++ b/spec/services/users/import_data_spec.rb @@ -0,0 +1,297 @@ +# frozen_string_literal: true + +require 'rails_helper' + +RSpec.describe Users::ImportData, type: :service do + let(:user) { create(:user) } + let(:archive_path) { Rails.root.join('tmp', 'test_export.zip') } + let(:service) { described_class.new(user, archive_path) } + let(:import_directory) { Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_1234567890") } + + before do + allow(Time).to receive(:current).and_return(Time.at(1234567890)) + allow(FileUtils).to receive(:mkdir_p) + allow(FileUtils).to receive(:rm_rf) + allow(File).to receive(:directory?).and_return(true) + end + + describe '#import' do + let(:sample_data) do + { + 'counts' => { + 'areas' => 2, + 'places' => 3, + 'imports' => 1, + 'exports' => 1, + 'trips' => 2, + 'stats' => 1, + 'notifications' => 2, + 'visits' => 4, + 'points' => 1000 + }, + 'settings' => { 'theme' => 'dark' }, + 'areas' => [{ 'name' => 'Home', 'latitude' => '40.7128', 'longitude' => '-74.0060' }], + 'places' => [{ 'name' => 'Office', 'latitude' => '40.7589', 'longitude' => '-73.9851' }], + 'imports' => [{ 'name' => 'test.json', 'source' => 'owntracks' }], + 'exports' => [{ 'name' => 'export.json', 'status' => 'completed' }], + 'trips' => [{ 'name' => 'Trip to NYC', 'distance' => 100.5 }], + 'stats' => [{ 'year' => 2024, 'month' => 1, 'distance' => 456.78 }], + 'notifications' => [{ 'title' => 'Test', 'content' => 'Test notification' }], + 'visits' => [{ 'name' => 'Work Visit', 'duration' => 3600 }], + 'points' => [{ 'latitude' => 40.7128, 'longitude' => -74.0060, 'timestamp' => 1234567890 }] + } + end + + before do + # Mock ZIP file extraction + zipfile_mock = double('ZipFile') + allow(zipfile_mock).to receive(:each) + allow(Zip::File).to receive(:open).with(archive_path).and_yield(zipfile_mock) + + # Mock JSON loading and File operations + allow(File).to receive(:exist?).and_return(false) + allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true) + allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(sample_data.to_json) + + # Mock all import services + allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true)) + allow(Users::ImportData::Areas).to receive(:new).and_return(double(call: 2)) + allow(Users::ImportData::Places).to receive(:new).and_return(double(call: 3)) + allow(Users::ImportData::Imports).to receive(:new).and_return(double(call: [1, 5])) + allow(Users::ImportData::Exports).to receive(:new).and_return(double(call: [1, 2])) + allow(Users::ImportData::Trips).to receive(:new).and_return(double(call: 2)) + allow(Users::ImportData::Stats).to receive(:new).and_return(double(call: 1)) + allow(Users::ImportData::Notifications).to receive(:new).and_return(double(call: 2)) + allow(Users::ImportData::Visits).to receive(:new).and_return(double(call: 4)) + allow(Users::ImportData::Points).to receive(:new).and_return(double(call: 1000)) + + # Mock notifications + allow(::Notifications::Create).to receive(:new).and_return(double(call: true)) + + # Mock cleanup + allow(service).to receive(:cleanup_temporary_files) + allow_any_instance_of(Pathname).to receive(:exist?).and_return(true) + end + + context 'when import is successful' do + it 'creates import directory' do + expect(FileUtils).to receive(:mkdir_p).with(import_directory) + + service.import + end + + it 'extracts the archive' do + expect(Zip::File).to receive(:open).with(archive_path) + + service.import + end + + it 'loads JSON data from extracted files' do + expect(File).to receive(:exist?).with(import_directory.join('data.json')) + expect(File).to receive(:read).with(import_directory.join('data.json')) + + service.import + end + + it 'calls all import services in correct order' do + expect(Users::ImportData::Settings).to receive(:new).with(user, sample_data['settings']).ordered + expect(Users::ImportData::Areas).to receive(:new).with(user, sample_data['areas']).ordered + expect(Users::ImportData::Places).to receive(:new).with(user, sample_data['places']).ordered + expect(Users::ImportData::Imports).to receive(:new).with(user, sample_data['imports'], import_directory.join('files')).ordered + expect(Users::ImportData::Exports).to receive(:new).with(user, sample_data['exports'], import_directory.join('files')).ordered + expect(Users::ImportData::Trips).to receive(:new).with(user, sample_data['trips']).ordered + expect(Users::ImportData::Stats).to receive(:new).with(user, sample_data['stats']).ordered + expect(Users::ImportData::Notifications).to receive(:new).with(user, sample_data['notifications']).ordered + expect(Users::ImportData::Visits).to receive(:new).with(user, sample_data['visits']).ordered + expect(Users::ImportData::Points).to receive(:new).with(user, sample_data['points']).ordered + + service.import + end + + it 'creates success notification with import stats' do + expect(::Notifications::Create).to receive(:new).with( + user: user, + title: 'Data import completed', + content: match(/1000 points.*4 visits.*3 places.*2 trips/), + kind: :info + ) + + service.import + end + + it 'cleans up temporary files' do + expect(service).to receive(:cleanup_temporary_files).with(import_directory) + + service.import + end + + it 'returns import statistics' do + result = service.import + + expect(result).to include( + settings_updated: true, + areas_created: 2, + places_created: 3, + imports_created: 1, + exports_created: 1, + trips_created: 2, + stats_created: 1, + notifications_created: 2, + visits_created: 4, + points_created: 1000, + files_restored: 7 + ) + end + + it 'logs expected counts if available' do + allow(Rails.logger).to receive(:info) # Allow other log messages + expect(Rails.logger).to receive(:info).with(/Expected entity counts from export:/) + + service.import + end + end + + context 'when JSON file is missing' do + before do + allow(File).to receive(:exist?).and_return(false) + allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(false) + allow(ExceptionReporter).to receive(:call) + end + + it 'raises an error' do + expect { service.import }.to raise_error(StandardError, 'Data file not found in archive: data.json') + end + end + + context 'when JSON is invalid' do + before do + allow(File).to receive(:exist?).and_return(false) + allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true) + allow(File).to receive(:read).with(import_directory.join('data.json')).and_return('invalid json') + allow(ExceptionReporter).to receive(:call) + end + + it 'raises a JSON parse error' do + expect { service.import }.to raise_error(StandardError, /Invalid JSON format in data file/) + end + end + + context 'when an error occurs during import' do + let(:error_message) { 'Something went wrong' } + + before do + allow(File).to receive(:exist?).and_return(false) + allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true) + allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(sample_data.to_json) + allow(Users::ImportData::Settings).to receive(:new).and_raise(StandardError, error_message) + allow(ExceptionReporter).to receive(:call) + allow(::Notifications::Create).to receive(:new).and_return(double(call: true)) + end + + it 'creates failure notification' do + expect(::Notifications::Create).to receive(:new).with( + user: user, + title: 'Data import failed', + content: "Your data import failed with error: #{error_message}. Please check the archive format and try again.", + kind: :error + ) + + expect { service.import }.to raise_error(StandardError, error_message) + end + + it 'reports error via ExceptionReporter' do + expect(ExceptionReporter).to receive(:call).with( + an_instance_of(StandardError), + 'Data import failed' + ) + + expect { service.import }.to raise_error(StandardError, error_message) + end + + it 'still cleans up temporary files' do + expect(service).to receive(:cleanup_temporary_files) + + expect { service.import }.to raise_error(StandardError, error_message) + end + + it 're-raises the error' do + expect { service.import }.to raise_error(StandardError, error_message) + end + end + + context 'when data sections are missing' do + let(:minimal_data) { { 'settings' => { 'theme' => 'dark' } } } + + before do + # Reset JSON file mocking + allow(File).to receive(:exist?).and_return(false) + allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true) + allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(minimal_data.to_json) + + # Only expect Settings to be called + allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true)) + allow(::Notifications::Create).to receive(:new).and_return(double(call: true)) + end + + it 'only imports available sections' do + expect(Users::ImportData::Settings).to receive(:new).with(user, minimal_data['settings']) + expect(Users::ImportData::Areas).not_to receive(:new) + expect(Users::ImportData::Places).not_to receive(:new) + + service.import + end + end + end + + describe 'private methods' do + describe '#cleanup_temporary_files' do + context 'when directory exists' do + before do + allow(File).to receive(:directory?).and_return(true) + allow(Rails.logger).to receive(:info) + end + + it 'removes the directory' do + expect(FileUtils).to receive(:rm_rf).with(import_directory) + + service.send(:cleanup_temporary_files, import_directory) + end + + it 'logs the cleanup' do + expect(Rails.logger).to receive(:info).with("Cleaning up temporary import directory: #{import_directory}") + + service.send(:cleanup_temporary_files, import_directory) + end + end + + context 'when cleanup fails' do + before do + allow(File).to receive(:directory?).and_return(true) + allow(FileUtils).to receive(:rm_rf).and_raise(StandardError, 'Permission denied') + allow(ExceptionReporter).to receive(:call) + end + + it 'reports error via ExceptionReporter but does not re-raise' do + expect(ExceptionReporter).to receive(:call).with( + an_instance_of(StandardError), + 'Failed to cleanup temporary files' + ) + + expect { service.send(:cleanup_temporary_files, import_directory) }.not_to raise_error + end + end + + context 'when directory does not exist' do + before do + allow(File).to receive(:directory?).and_return(false) + end + + it 'does not attempt cleanup' do + expect(FileUtils).not_to receive(:rm_rf) + + service.send(:cleanup_temporary_files, import_directory) + end + end + end + end +end