mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Merge pull request #1465 from Freika/feature/user-export
Feature/user export
This commit is contained in:
commit
fd166c2a2f
86 changed files with 7249 additions and 63 deletions
|
|
@ -1 +1 @@
|
|||
0.28.1
|
||||
0.29.0
|
||||
|
|
|
|||
36
CHANGELOG.md
36
CHANGELOG.md
|
|
@ -4,6 +4,42 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# [0.29.0] - 2025-06-30
|
||||
|
||||
You can now move your user data between Dawarich instances. Simply go to your Account settings and click on the "Export my data" button under the password section. An export will be created and you will be able to download it on Exports page once it's ready.
|
||||
|
||||
To import your data on a new Dawarich instance, create a new user and upload the exported zip file. You can import your data also on the Account page, by clicking "Import my data" button under the password section.
|
||||
|
||||
The feature is experimental and not yet aimed to replace a proper backup solution. Please use at your own risk.
|
||||
|
||||
## Added
|
||||
|
||||
- In the User Settings, you can now export your user data as a zip file. It will contain the following:
|
||||
- All your points
|
||||
- All your places
|
||||
- All your visits
|
||||
- All your areas
|
||||
- All your imports with files
|
||||
- All your exports with files
|
||||
- All your trips
|
||||
- All your notifications
|
||||
- All your stats
|
||||
|
||||
- In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation.
|
||||
- Export file size is now displayed in the exports and imports lists.
|
||||
- A button to download an import file is now displayed in the imports list. It may not work properly for imports created before the 0.25.4 release.
|
||||
- Imports now have statuses.
|
||||
|
||||
## Changed
|
||||
|
||||
- Oj is now being used for JSON serialization.
|
||||
|
||||
## Fixed
|
||||
|
||||
- Email links now use the SMTP domain if set. #1469
|
||||
|
||||
|
||||
|
||||
# 0.28.1 - 2025-06-11
|
||||
|
||||
## Fixed
|
||||
|
|
|
|||
1
Gemfile
1
Gemfile
|
|
@ -34,6 +34,7 @@ gem 'rgeo-activerecord'
|
|||
gem 'rgeo-geojson'
|
||||
gem 'rswag-api'
|
||||
gem 'rswag-ui'
|
||||
gem 'rubyzip', '~> 2.4'
|
||||
gem 'sentry-ruby'
|
||||
gem 'sentry-rails'
|
||||
gem 'stackprof'
|
||||
|
|
|
|||
|
|
@ -557,6 +557,7 @@ DEPENDENCIES
|
|||
rswag-specs
|
||||
rswag-ui
|
||||
rubocop-rails
|
||||
rubyzip (~> 2.4)
|
||||
selenium-webdriver
|
||||
sentry-rails
|
||||
sentry-ruby
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class ImportsController < ApplicationController
|
|||
@imports =
|
||||
current_user
|
||||
.imports
|
||||
.select(:id, :name, :source, :created_at, :processed)
|
||||
.select(:id, :name, :source, :created_at, :processed, :status)
|
||||
.order(created_at: :desc)
|
||||
.page(params[:page])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
class Settings::UsersController < ApplicationController
|
||||
before_action :authenticate_self_hosted!
|
||||
before_action :authenticate_admin!
|
||||
before_action :authenticate_admin!, except: [:export, :import]
|
||||
before_action :authenticate_user!, only: [:export, :import]
|
||||
|
||||
def index
|
||||
@users = User.order(created_at: :desc)
|
||||
|
|
@ -46,9 +47,54 @@ class Settings::UsersController < ApplicationController
|
|||
end
|
||||
end
|
||||
|
||||
def export
|
||||
current_user.export_data
|
||||
|
||||
redirect_to exports_path, notice: 'Your data is being exported. You will receive a notification when it is ready.'
|
||||
end
|
||||
|
||||
def import
|
||||
unless params[:archive].present?
|
||||
redirect_to edit_user_registration_path, alert: 'Please select a ZIP archive to import.'
|
||||
return
|
||||
end
|
||||
|
||||
archive_file = params[:archive]
|
||||
|
||||
validate_archive_file(archive_file)
|
||||
|
||||
import = current_user.imports.build(
|
||||
name: archive_file.original_filename,
|
||||
source: :user_data_archive
|
||||
)
|
||||
|
||||
import.file.attach(archive_file)
|
||||
|
||||
if import.save
|
||||
redirect_to edit_user_registration_path,
|
||||
notice: 'Your data import has been started. You will receive a notification when it completes.'
|
||||
else
|
||||
redirect_to edit_user_registration_path,
|
||||
alert: 'Failed to start import. Please try again.'
|
||||
end
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'User data import failed to start')
|
||||
redirect_to edit_user_registration_path,
|
||||
alert: 'An error occurred while starting the import. Please try again.'
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user_params
|
||||
params.require(:user).permit(:email, :password)
|
||||
end
|
||||
|
||||
def validate_archive_file(archive_file)
|
||||
unless archive_file.content_type == 'application/zip' ||
|
||||
archive_file.content_type == 'application/x-zip-compressed' ||
|
||||
File.extname(archive_file.original_filename).downcase == '.zip'
|
||||
|
||||
redirect_to edit_user_registration_path, alert: 'Please upload a valid ZIP file.' and return
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -31,6 +31,11 @@ export default class extends BaseController {
|
|||
if (pointsCell) {
|
||||
pointsCell.textContent = new Intl.NumberFormat().format(data.import.points_count);
|
||||
}
|
||||
|
||||
const statusCell = row.querySelector('[data-status-display]');
|
||||
if (statusCell && data.import.status) {
|
||||
statusCell.textContent = data.import.status;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
13
app/jobs/users/export_data_job.rb
Normal file
13
app/jobs/users/export_data_job.rb
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportDataJob < ApplicationJob
|
||||
queue_as :exports
|
||||
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(user_id)
|
||||
user = User.find(user_id)
|
||||
|
||||
Users::ExportData.new(user).export
|
||||
end
|
||||
end
|
||||
66
app/jobs/users/import_data_job.rb
Normal file
66
app/jobs/users/import_data_job.rb
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportDataJob < ApplicationJob
|
||||
queue_as :imports
|
||||
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(import_id)
|
||||
import = Import.find(import_id)
|
||||
user = import.user
|
||||
|
||||
archive_path = download_import_archive(import)
|
||||
|
||||
unless File.exist?(archive_path)
|
||||
raise StandardError, "Archive file not found: #{archive_path}"
|
||||
end
|
||||
|
||||
import_stats = Users::ImportData.new(user, archive_path).import
|
||||
|
||||
Rails.logger.info "Import completed successfully for user #{user.email}: #{import_stats}"
|
||||
rescue ActiveRecord::RecordNotFound => e
|
||||
ExceptionReporter.call(e, "Import job failed for import_id #{import_id} - import not found")
|
||||
|
||||
raise e
|
||||
rescue StandardError => e
|
||||
user_id = user&.id || import&.user_id || 'unknown'
|
||||
ExceptionReporter.call(e, "Import job failed for user #{user_id}")
|
||||
|
||||
create_import_failed_notification(user, e)
|
||||
|
||||
raise e
|
||||
ensure
|
||||
if archive_path && File.exist?(archive_path)
|
||||
File.delete(archive_path)
|
||||
|
||||
Rails.logger.info "Cleaned up archive file: #{archive_path}"
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def download_import_archive(import)
|
||||
require 'tmpdir'
|
||||
|
||||
timestamp = Time.current.to_i
|
||||
filename = "user_import_#{import.user_id}_#{import.id}_#{timestamp}.zip"
|
||||
temp_path = File.join(Dir.tmpdir, filename)
|
||||
|
||||
File.open(temp_path, 'wb') do |file_handle|
|
||||
import.file.download do |chunk|
|
||||
file_handle.write(chunk)
|
||||
end
|
||||
end
|
||||
|
||||
temp_path
|
||||
end
|
||||
|
||||
def create_import_failed_notification(user, error)
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: "Your data import failed with error: #{error.message}. Please check the archive format and try again.",
|
||||
kind: :error
|
||||
).call
|
||||
end
|
||||
end
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Country < ApplicationRecord
|
||||
has_many :points, dependent: :nullify
|
||||
|
||||
validates :name, :iso_a2, :iso_a3, :geom, presence: true
|
||||
|
||||
def self.containing_point(lon, lat)
|
||||
|
|
|
|||
|
|
@ -4,13 +4,14 @@ class Export < ApplicationRecord
|
|||
belongs_to :user
|
||||
|
||||
enum :status, { created: 0, processing: 1, completed: 2, failed: 3 }
|
||||
enum :file_format, { json: 0, gpx: 1 }
|
||||
enum :file_format, { json: 0, gpx: 1, archive: 2 }
|
||||
enum :file_type, { points: 0, user_data: 1 }
|
||||
|
||||
validates :name, presence: true
|
||||
|
||||
has_one_attached :file
|
||||
|
||||
after_commit -> { ExportJob.perform_later(id) }, on: :create
|
||||
after_commit -> { ExportJob.perform_later(id) }, on: :create, unless: -> { user_data? || archive? }
|
||||
after_commit -> { remove_attached_file }, on: :destroy
|
||||
|
||||
def process!
|
||||
|
|
|
|||
|
|
@ -6,16 +6,32 @@ class Import < ApplicationRecord
|
|||
|
||||
has_one_attached :file
|
||||
|
||||
after_commit -> { Import::ProcessJob.perform_later(id) }, on: :create
|
||||
# Flag to skip background processing during user data import
|
||||
attr_accessor :skip_background_processing
|
||||
|
||||
after_commit -> { Import::ProcessJob.perform_later(id) unless skip_background_processing }, on: :create
|
||||
after_commit :remove_attached_file, on: :destroy
|
||||
|
||||
validates :name, presence: true, uniqueness: { scope: :user_id }
|
||||
|
||||
enum :status, { created: 0, processing: 1, completed: 2, failed: 3 }
|
||||
|
||||
enum :source, {
|
||||
google_semantic_history: 0, owntracks: 1, google_records: 2,
|
||||
google_phone_takeout: 3, gpx: 4, immich_api: 5, geojson: 6, photoprism_api: 7
|
||||
google_phone_takeout: 3, gpx: 4, immich_api: 5, geojson: 6, photoprism_api: 7,
|
||||
user_data_archive: 8
|
||||
}
|
||||
|
||||
def process!
|
||||
Imports::Create.new(user, self).call
|
||||
if user_data_archive?
|
||||
process_user_data_archive!
|
||||
else
|
||||
Imports::Create.new(user, self).call
|
||||
end
|
||||
end
|
||||
|
||||
def process_user_data_archive!
|
||||
Users::ImportDataJob.perform_later(id)
|
||||
end
|
||||
|
||||
def reverse_geocoded_points_count
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ class Point < ApplicationRecord
|
|||
belongs_to :import, optional: true, counter_cache: true
|
||||
belongs_to :visit, optional: true
|
||||
belongs_to :user
|
||||
belongs_to :country, optional: true
|
||||
|
||||
validates :timestamp, :lonlat, presence: true
|
||||
validates :lonlat, uniqueness: {
|
||||
|
|
@ -28,7 +29,7 @@ class Point < ApplicationRecord
|
|||
scope :visited, -> { where.not(visit_id: nil) }
|
||||
scope :not_visited, -> { where(visit_id: nil) }
|
||||
|
||||
after_create :async_reverse_geocode, if: -> { DawarichSettings.store_geodata? }
|
||||
after_create :async_reverse_geocode, if: -> { DawarichSettings.store_geodata? && !reverse_geocoded? }
|
||||
after_create :set_country
|
||||
after_create_commit :broadcast_coordinates
|
||||
|
||||
|
|
@ -76,7 +77,7 @@ class Point < ApplicationRecord
|
|||
timestamp.to_s,
|
||||
velocity.to_s,
|
||||
id.to_s,
|
||||
country.to_s
|
||||
country_name.to_s
|
||||
]
|
||||
)
|
||||
end
|
||||
|
|
@ -86,4 +87,9 @@ class Point < ApplicationRecord
|
|||
self.country_id = found_in_country&.id
|
||||
save! if changed?
|
||||
end
|
||||
|
||||
def country_name
|
||||
# Safely get country name from association or attribute
|
||||
self.country&.name || read_attribute(:country) || ''
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -115,6 +115,10 @@ class User < ApplicationRecord
|
|||
JWT.encode(payload, secret_key, 'HS256')
|
||||
end
|
||||
|
||||
def export_data
|
||||
Users::ExportDataJob.perform_later(id)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_api_key
|
||||
|
|
|
|||
397
app/services/countries/iso_code_mapper.rb
Normal file
397
app/services/countries/iso_code_mapper.rb
Normal file
|
|
@ -0,0 +1,397 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Countries::IsoCodeMapper
|
||||
# Comprehensive country data with name, ISO codes, and flag emoji
|
||||
# Based on ISO 3166-1 standard
|
||||
COUNTRIES = {
|
||||
'AF' => { name: 'Afghanistan', iso2: 'AF', iso3: 'AFG', flag: '🇦🇫' },
|
||||
'AL' => { name: 'Albania', iso2: 'AL', iso3: 'ALB', flag: '🇦🇱' },
|
||||
'DZ' => { name: 'Algeria', iso2: 'DZ', iso3: 'DZA', flag: '🇩🇿' },
|
||||
'AS' => { name: 'American Samoa', iso2: 'AS', iso3: 'ASM', flag: '🇦🇸' },
|
||||
'AD' => { name: 'Andorra', iso2: 'AD', iso3: 'AND', flag: '🇦🇩' },
|
||||
'AO' => { name: 'Angola', iso2: 'AO', iso3: 'AGO', flag: '🇦🇴' },
|
||||
'AI' => { name: 'Anguilla', iso2: 'AI', iso3: 'AIA', flag: '🇦🇮' },
|
||||
'AQ' => { name: 'Antarctica', iso2: 'AQ', iso3: 'ATA', flag: '🇦🇶' },
|
||||
'AG' => { name: 'Antigua and Barbuda', iso2: 'AG', iso3: 'ATG', flag: '🇦🇬' },
|
||||
'AR' => { name: 'Argentina', iso2: 'AR', iso3: 'ARG', flag: '🇦🇷' },
|
||||
'AM' => { name: 'Armenia', iso2: 'AM', iso3: 'ARM', flag: '🇦🇲' },
|
||||
'AW' => { name: 'Aruba', iso2: 'AW', iso3: 'ABW', flag: '🇦🇼' },
|
||||
'AU' => { name: 'Australia', iso2: 'AU', iso3: 'AUS', flag: '🇦🇺' },
|
||||
'AT' => { name: 'Austria', iso2: 'AT', iso3: 'AUT', flag: '🇦🇹' },
|
||||
'AZ' => { name: 'Azerbaijan', iso2: 'AZ', iso3: 'AZE', flag: '🇦🇿' },
|
||||
'BS' => { name: 'Bahamas', iso2: 'BS', iso3: 'BHS', flag: '🇧🇸' },
|
||||
'BH' => { name: 'Bahrain', iso2: 'BH', iso3: 'BHR', flag: '🇧🇭' },
|
||||
'BD' => { name: 'Bangladesh', iso2: 'BD', iso3: 'BGD', flag: '🇧🇩' },
|
||||
'BB' => { name: 'Barbados', iso2: 'BB', iso3: 'BRB', flag: '🇧🇧' },
|
||||
'BY' => { name: 'Belarus', iso2: 'BY', iso3: 'BLR', flag: '🇧🇾' },
|
||||
'BE' => { name: 'Belgium', iso2: 'BE', iso3: 'BEL', flag: '🇧🇪' },
|
||||
'BZ' => { name: 'Belize', iso2: 'BZ', iso3: 'BLZ', flag: '🇧🇿' },
|
||||
'BJ' => { name: 'Benin', iso2: 'BJ', iso3: 'BEN', flag: '🇧🇯' },
|
||||
'BM' => { name: 'Bermuda', iso2: 'BM', iso3: 'BMU', flag: '🇧🇲' },
|
||||
'BT' => { name: 'Bhutan', iso2: 'BT', iso3: 'BTN', flag: '🇧🇹' },
|
||||
'BO' => { name: 'Bolivia', iso2: 'BO', iso3: 'BOL', flag: '🇧🇴' },
|
||||
'BA' => { name: 'Bosnia and Herzegovina', iso2: 'BA', iso3: 'BIH', flag: '🇧🇦' },
|
||||
'BW' => { name: 'Botswana', iso2: 'BW', iso3: 'BWA', flag: '🇧🇼' },
|
||||
'BR' => { name: 'Brazil', iso2: 'BR', iso3: 'BRA', flag: '🇧🇷' },
|
||||
'BN' => { name: 'Brunei Darussalam', iso2: 'BN', iso3: 'BRN', flag: '🇧🇳' },
|
||||
'BG' => { name: 'Bulgaria', iso2: 'BG', iso3: 'BGR', flag: '🇧🇬' },
|
||||
'BF' => { name: 'Burkina Faso', iso2: 'BF', iso3: 'BFA', flag: '🇧🇫' },
|
||||
'BI' => { name: 'Burundi', iso2: 'BI', iso3: 'BDI', flag: '🇧🇮' },
|
||||
'KH' => { name: 'Cambodia', iso2: 'KH', iso3: 'KHM', flag: '🇰🇭' },
|
||||
'CM' => { name: 'Cameroon', iso2: 'CM', iso3: 'CMR', flag: '🇨🇲' },
|
||||
'CA' => { name: 'Canada', iso2: 'CA', iso3: 'CAN', flag: '🇨🇦' },
|
||||
'CV' => { name: 'Cape Verde', iso2: 'CV', iso3: 'CPV', flag: '🇨🇻' },
|
||||
'KY' => { name: 'Cayman Islands', iso2: 'KY', iso3: 'CYM', flag: '🇰🇾' },
|
||||
'CF' => { name: 'Central African Republic', iso2: 'CF', iso3: 'CAF', flag: '🇨🇫' },
|
||||
'TD' => { name: 'Chad', iso2: 'TD', iso3: 'TCD', flag: '🇹🇩' },
|
||||
'CL' => { name: 'Chile', iso2: 'CL', iso3: 'CHL', flag: '🇨🇱' },
|
||||
'CN' => { name: 'China', iso2: 'CN', iso3: 'CHN', flag: '🇨🇳' },
|
||||
'CO' => { name: 'Colombia', iso2: 'CO', iso3: 'COL', flag: '🇨🇴' },
|
||||
'KM' => { name: 'Comoros', iso2: 'KM', iso3: 'COM', flag: '🇰🇲' },
|
||||
'CG' => { name: 'Congo', iso2: 'CG', iso3: 'COG', flag: '🇨🇬' },
|
||||
'CD' => { name: 'Congo, Democratic Republic of the', iso2: 'CD', iso3: 'COD', flag: '🇨🇩' },
|
||||
'CK' => { name: 'Cook Islands', iso2: 'CK', iso3: 'COK', flag: '🇨🇰' },
|
||||
'CR' => { name: 'Costa Rica', iso2: 'CR', iso3: 'CRI', flag: '🇨🇷' },
|
||||
'CI' => { name: 'Côte d\'Ivoire', iso2: 'CI', iso3: 'CIV', flag: '🇨🇮' },
|
||||
'HR' => { name: 'Croatia', iso2: 'HR', iso3: 'HRV', flag: '🇭🇷' },
|
||||
'CU' => { name: 'Cuba', iso2: 'CU', iso3: 'CUB', flag: '🇨🇺' },
|
||||
'CY' => { name: 'Cyprus', iso2: 'CY', iso3: 'CYP', flag: '🇨🇾' },
|
||||
'CZ' => { name: 'Czech Republic', iso2: 'CZ', iso3: 'CZE', flag: '🇨🇿' },
|
||||
'DK' => { name: 'Denmark', iso2: 'DK', iso3: 'DNK', flag: '🇩🇰' },
|
||||
'DJ' => { name: 'Djibouti', iso2: 'DJ', iso3: 'DJI', flag: '🇩🇯' },
|
||||
'DM' => { name: 'Dominica', iso2: 'DM', iso3: 'DMA', flag: '🇩🇲' },
|
||||
'DO' => { name: 'Dominican Republic', iso2: 'DO', iso3: 'DOM', flag: '🇩🇴' },
|
||||
'EC' => { name: 'Ecuador', iso2: 'EC', iso3: 'ECU', flag: '🇪🇨' },
|
||||
'EG' => { name: 'Egypt', iso2: 'EG', iso3: 'EGY', flag: '🇪🇬' },
|
||||
'SV' => { name: 'El Salvador', iso2: 'SV', iso3: 'SLV', flag: '🇸🇻' },
|
||||
'GQ' => { name: 'Equatorial Guinea', iso2: 'GQ', iso3: 'GNQ', flag: '🇬🇶' },
|
||||
'ER' => { name: 'Eritrea', iso2: 'ER', iso3: 'ERI', flag: '🇪🇷' },
|
||||
'EE' => { name: 'Estonia', iso2: 'EE', iso3: 'EST', flag: '🇪🇪' },
|
||||
'ET' => { name: 'Ethiopia', iso2: 'ET', iso3: 'ETH', flag: '🇪🇹' },
|
||||
'FK' => { name: 'Falkland Islands (Malvinas)', iso2: 'FK', iso3: 'FLK', flag: '🇫🇰' },
|
||||
'FO' => { name: 'Faroe Islands', iso2: 'FO', iso3: 'FRO', flag: '🇫🇴' },
|
||||
'FJ' => { name: 'Fiji', iso2: 'FJ', iso3: 'FJI', flag: '🇫🇯' },
|
||||
'FI' => { name: 'Finland', iso2: 'FI', iso3: 'FIN', flag: '🇫🇮' },
|
||||
'FR' => { name: 'France', iso2: 'FR', iso3: 'FRA', flag: '🇫🇷' },
|
||||
'GF' => { name: 'French Guiana', iso2: 'GF', iso3: 'GUF', flag: '🇬🇫' },
|
||||
'PF' => { name: 'French Polynesia', iso2: 'PF', iso3: 'PYF', flag: '🇵🇫' },
|
||||
'GA' => { name: 'Gabon', iso2: 'GA', iso3: 'GAB', flag: '🇬🇦' },
|
||||
'GM' => { name: 'Gambia', iso2: 'GM', iso3: 'GMB', flag: '🇬🇲' },
|
||||
'GE' => { name: 'Georgia', iso2: 'GE', iso3: 'GEO', flag: '🇬🇪' },
|
||||
'DE' => { name: 'Germany', iso2: 'DE', iso3: 'DEU', flag: '🇩🇪' },
|
||||
'GH' => { name: 'Ghana', iso2: 'GH', iso3: 'GHA', flag: '🇬🇭' },
|
||||
'GI' => { name: 'Gibraltar', iso2: 'GI', iso3: 'GIB', flag: '🇬🇮' },
|
||||
'GR' => { name: 'Greece', iso2: 'GR', iso3: 'GRC', flag: '🇬🇷' },
|
||||
'GL' => { name: 'Greenland', iso2: 'GL', iso3: 'GRL', flag: '🇬🇱' },
|
||||
'GD' => { name: 'Grenada', iso2: 'GD', iso3: 'GRD', flag: '🇬🇩' },
|
||||
'GP' => { name: 'Guadeloupe', iso2: 'GP', iso3: 'GLP', flag: '🇬🇵' },
|
||||
'GU' => { name: 'Guam', iso2: 'GU', iso3: 'GUM', flag: '🇬🇺' },
|
||||
'GT' => { name: 'Guatemala', iso2: 'GT', iso3: 'GTM', flag: '🇬🇹' },
|
||||
'GG' => { name: 'Guernsey', iso2: 'GG', iso3: 'GGY', flag: '🇬🇬' },
|
||||
'GN' => { name: 'Guinea', iso2: 'GN', iso3: 'GIN', flag: '🇬🇳' },
|
||||
'GW' => { name: 'Guinea-Bissau', iso2: 'GW', iso3: 'GNB', flag: '🇬🇼' },
|
||||
'GY' => { name: 'Guyana', iso2: 'GY', iso3: 'GUY', flag: '🇬🇾' },
|
||||
'HT' => { name: 'Haiti', iso2: 'HT', iso3: 'HTI', flag: '🇭🇹' },
|
||||
'VA' => { name: 'Holy See (Vatican City State)', iso2: 'VA', iso3: 'VAT', flag: '🇻🇦' },
|
||||
'HN' => { name: 'Honduras', iso2: 'HN', iso3: 'HND', flag: '🇭🇳' },
|
||||
'HK' => { name: 'Hong Kong', iso2: 'HK', iso3: 'HKG', flag: '🇭🇰' },
|
||||
'HU' => { name: 'Hungary', iso2: 'HU', iso3: 'HUN', flag: '🇭🇺' },
|
||||
'IS' => { name: 'Iceland', iso2: 'IS', iso3: 'ISL', flag: '🇮🇸' },
|
||||
'IN' => { name: 'India', iso2: 'IN', iso3: 'IND', flag: '🇮🇳' },
|
||||
'ID' => { name: 'Indonesia', iso2: 'ID', iso3: 'IDN', flag: '🇮🇩' },
|
||||
'IR' => { name: 'Iran, Islamic Republic of', iso2: 'IR', iso3: 'IRN', flag: '🇮🇷' },
|
||||
'IQ' => { name: 'Iraq', iso2: 'IQ', iso3: 'IRQ', flag: '🇮🇶' },
|
||||
'IE' => { name: 'Ireland', iso2: 'IE', iso3: 'IRL', flag: '🇮🇪' },
|
||||
'IM' => { name: 'Isle of Man', iso2: 'IM', iso3: 'IMN', flag: '🇮🇲' },
|
||||
'IL' => { name: 'Israel', iso2: 'IL', iso3: 'ISR', flag: '🇮🇱' },
|
||||
'IT' => { name: 'Italy', iso2: 'IT', iso3: 'ITA', flag: '🇮🇹' },
|
||||
'JM' => { name: 'Jamaica', iso2: 'JM', iso3: 'JAM', flag: '🇯🇲' },
|
||||
'JP' => { name: 'Japan', iso2: 'JP', iso3: 'JPN', flag: '🇯🇵' },
|
||||
'JE' => { name: 'Jersey', iso2: 'JE', iso3: 'JEY', flag: '🇯🇪' },
|
||||
'JO' => { name: 'Jordan', iso2: 'JO', iso3: 'JOR', flag: '🇯🇴' },
|
||||
'KZ' => { name: 'Kazakhstan', iso2: 'KZ', iso3: 'KAZ', flag: '🇰🇿' },
|
||||
'KE' => { name: 'Kenya', iso2: 'KE', iso3: 'KEN', flag: '🇰🇪' },
|
||||
'KI' => { name: 'Kiribati', iso2: 'KI', iso3: 'KIR', flag: '🇰🇮' },
|
||||
'KP' => { name: 'Korea, Democratic People\'s Republic of', iso2: 'KP', iso3: 'PRK', flag: '🇰🇵' },
|
||||
'KR' => { name: 'Korea, Republic of', iso2: 'KR', iso3: 'KOR', flag: '🇰🇷' },
|
||||
'KW' => { name: 'Kuwait', iso2: 'KW', iso3: 'KWT', flag: '🇰🇼' },
|
||||
'KG' => { name: 'Kyrgyzstan', iso2: 'KG', iso3: 'KGZ', flag: '🇰🇬' },
|
||||
'LA' => { name: 'Lao People\'s Democratic Republic', iso2: 'LA', iso3: 'LAO', flag: '🇱🇦' },
|
||||
'LV' => { name: 'Latvia', iso2: 'LV', iso3: 'LVA', flag: '🇱🇻' },
|
||||
'LB' => { name: 'Lebanon', iso2: 'LB', iso3: 'LBN', flag: '🇱🇧' },
|
||||
'LS' => { name: 'Lesotho', iso2: 'LS', iso3: 'LSO', flag: '🇱🇸' },
|
||||
'LR' => { name: 'Liberia', iso2: 'LR', iso3: 'LBR', flag: '🇱🇷' },
|
||||
'LY' => { name: 'Libya', iso2: 'LY', iso3: 'LBY', flag: '🇱🇾' },
|
||||
'LI' => { name: 'Liechtenstein', iso2: 'LI', iso3: 'LIE', flag: '🇱🇮' },
|
||||
'LT' => { name: 'Lithuania', iso2: 'LT', iso3: 'LTU', flag: '🇱🇹' },
|
||||
'LU' => { name: 'Luxembourg', iso2: 'LU', iso3: 'LUX', flag: '🇱🇺' },
|
||||
'MO' => { name: 'Macao', iso2: 'MO', iso3: 'MAC', flag: '🇲🇴' },
|
||||
'MK' => { name: 'North Macedonia', iso2: 'MK', iso3: 'MKD', flag: '🇲🇰' },
|
||||
'MG' => { name: 'Madagascar', iso2: 'MG', iso3: 'MDG', flag: '🇲🇬' },
|
||||
'MW' => { name: 'Malawi', iso2: 'MW', iso3: 'MWI', flag: '🇲🇼' },
|
||||
'MY' => { name: 'Malaysia', iso2: 'MY', iso3: 'MYS', flag: '🇲🇾' },
|
||||
'MV' => { name: 'Maldives', iso2: 'MV', iso3: 'MDV', flag: '🇲🇻' },
|
||||
'ML' => { name: 'Mali', iso2: 'ML', iso3: 'MLI', flag: '🇲🇱' },
|
||||
'MT' => { name: 'Malta', iso2: 'MT', iso3: 'MLT', flag: '🇲🇹' },
|
||||
'MH' => { name: 'Marshall Islands', iso2: 'MH', iso3: 'MHL', flag: '🇲🇭' },
|
||||
'MQ' => { name: 'Martinique', iso2: 'MQ', iso3: 'MTQ', flag: '🇲🇶' },
|
||||
'MR' => { name: 'Mauritania', iso2: 'MR', iso3: 'MRT', flag: '🇲🇷' },
|
||||
'MU' => { name: 'Mauritius', iso2: 'MU', iso3: 'MUS', flag: '🇲🇺' },
|
||||
'YT' => { name: 'Mayotte', iso2: 'YT', iso3: 'MYT', flag: '🇾🇹' },
|
||||
'MX' => { name: 'Mexico', iso2: 'MX', iso3: 'MEX', flag: '🇲🇽' },
|
||||
'FM' => { name: 'Micronesia, Federated States of', iso2: 'FM', iso3: 'FSM', flag: '🇫🇲' },
|
||||
'MD' => { name: 'Moldova, Republic of', iso2: 'MD', iso3: 'MDA', flag: '🇲🇩' },
|
||||
'MC' => { name: 'Monaco', iso2: 'MC', iso3: 'MCO', flag: '🇲🇨' },
|
||||
'MN' => { name: 'Mongolia', iso2: 'MN', iso3: 'MNG', flag: '🇲🇳' },
|
||||
'ME' => { name: 'Montenegro', iso2: 'ME', iso3: 'MNE', flag: '🇲🇪' },
|
||||
'MS' => { name: 'Montserrat', iso2: 'MS', iso3: 'MSR', flag: '🇲🇸' },
|
||||
'MA' => { name: 'Morocco', iso2: 'MA', iso3: 'MAR', flag: '🇲🇦' },
|
||||
'MZ' => { name: 'Mozambique', iso2: 'MZ', iso3: 'MOZ', flag: '🇲🇿' },
|
||||
'MM' => { name: 'Myanmar', iso2: 'MM', iso3: 'MMR', flag: '🇲🇲' },
|
||||
'NA' => { name: 'Namibia', iso2: 'NA', iso3: 'NAM', flag: '🇳🇦' },
|
||||
'NR' => { name: 'Nauru', iso2: 'NR', iso3: 'NRU', flag: '🇳🇷' },
|
||||
'NP' => { name: 'Nepal', iso2: 'NP', iso3: 'NPL', flag: '🇳🇵' },
|
||||
'NL' => { name: 'Netherlands', iso2: 'NL', iso3: 'NLD', flag: '🇳🇱' },
|
||||
'NC' => { name: 'New Caledonia', iso2: 'NC', iso3: 'NCL', flag: '🇳🇨' },
|
||||
'NZ' => { name: 'New Zealand', iso2: 'NZ', iso3: 'NZL', flag: '🇳🇿' },
|
||||
'NI' => { name: 'Nicaragua', iso2: 'NI', iso3: 'NIC', flag: '🇳🇮' },
|
||||
'NE' => { name: 'Niger', iso2: 'NE', iso3: 'NER', flag: '🇳🇪' },
|
||||
'NG' => { name: 'Nigeria', iso2: 'NG', iso3: 'NGA', flag: '🇳🇬' },
|
||||
'NU' => { name: 'Niue', iso2: 'NU', iso3: 'NIU', flag: '🇳🇺' },
|
||||
'NF' => { name: 'Norfolk Island', iso2: 'NF', iso3: 'NFK', flag: '🇳🇫' },
|
||||
'MP' => { name: 'Northern Mariana Islands', iso2: 'MP', iso3: 'MNP', flag: '🇲🇵' },
|
||||
'NO' => { name: 'Norway', iso2: 'NO', iso3: 'NOR', flag: '🇳🇴' },
|
||||
'OM' => { name: 'Oman', iso2: 'OM', iso3: 'OMN', flag: '🇴🇲' },
|
||||
'PK' => { name: 'Pakistan', iso2: 'PK', iso3: 'PAK', flag: '🇵🇰' },
|
||||
'PW' => { name: 'Palau', iso2: 'PW', iso3: 'PLW', flag: '🇵🇼' },
|
||||
'PS' => { name: 'Palestine, State of', iso2: 'PS', iso3: 'PSE', flag: '🇵🇸' },
|
||||
'PA' => { name: 'Panama', iso2: 'PA', iso3: 'PAN', flag: '🇵🇦' },
|
||||
'PG' => { name: 'Papua New Guinea', iso2: 'PG', iso3: 'PNG', flag: '🇵🇬' },
|
||||
'PY' => { name: 'Paraguay', iso2: 'PY', iso3: 'PRY', flag: '🇵🇾' },
|
||||
'PE' => { name: 'Peru', iso2: 'PE', iso3: 'PER', flag: '🇵🇪' },
|
||||
'PH' => { name: 'Philippines', iso2: 'PH', iso3: 'PHL', flag: '🇵🇭' },
|
||||
'PN' => { name: 'Pitcairn', iso2: 'PN', iso3: 'PCN', flag: '🇵🇳' },
|
||||
'PL' => { name: 'Poland', iso2: 'PL', iso3: 'POL', flag: '🇵🇱' },
|
||||
'PT' => { name: 'Portugal', iso2: 'PT', iso3: 'PRT', flag: '🇵🇹' },
|
||||
'PR' => { name: 'Puerto Rico', iso2: 'PR', iso3: 'PRI', flag: '🇵🇷' },
|
||||
'QA' => { name: 'Qatar', iso2: 'QA', iso3: 'QAT', flag: '🇶🇦' },
|
||||
'RE' => { name: 'Réunion', iso2: 'RE', iso3: 'REU', flag: '🇷🇪' },
|
||||
'RO' => { name: 'Romania', iso2: 'RO', iso3: 'ROU', flag: '🇷🇴' },
|
||||
'RU' => { name: 'Russian Federation', iso2: 'RU', iso3: 'RUS', flag: '🇷🇺' },
|
||||
'RW' => { name: 'Rwanda', iso2: 'RW', iso3: 'RWA', flag: '🇷🇼' },
|
||||
'BL' => { name: 'Saint Barthélemy', iso2: 'BL', iso3: 'BLM', flag: '🇧🇱' },
|
||||
'SH' => { name: 'Saint Helena, Ascension and Tristan da Cunha', iso2: 'SH', iso3: 'SHN', flag: '🇸🇭' },
|
||||
'KN' => { name: 'Saint Kitts and Nevis', iso2: 'KN', iso3: 'KNA', flag: '🇰🇳' },
|
||||
'LC' => { name: 'Saint Lucia', iso2: 'LC', iso3: 'LCA', flag: '🇱🇨' },
|
||||
'MF' => { name: 'Saint Martin (French part)', iso2: 'MF', iso3: 'MAF', flag: '🇲🇫' },
|
||||
'PM' => { name: 'Saint Pierre and Miquelon', iso2: 'PM', iso3: 'SPM', flag: '🇵🇲' },
|
||||
'VC' => { name: 'Saint Vincent and the Grenadines', iso2: 'VC', iso3: 'VCT', flag: '🇻🇨' },
|
||||
'WS' => { name: 'Samoa', iso2: 'WS', iso3: 'WSM', flag: '🇼🇸' },
|
||||
'SM' => { name: 'San Marino', iso2: 'SM', iso3: 'SMR', flag: '🇸🇲' },
|
||||
'ST' => { name: 'Sao Tome and Principe', iso2: 'ST', iso3: 'STP', flag: '🇸🇹' },
|
||||
'SA' => { name: 'Saudi Arabia', iso2: 'SA', iso3: 'SAU', flag: '🇸🇦' },
|
||||
'SN' => { name: 'Senegal', iso2: 'SN', iso3: 'SEN', flag: '🇸🇳' },
|
||||
'RS' => { name: 'Serbia', iso2: 'RS', iso3: 'SRB', flag: '🇷🇸' },
|
||||
'SC' => { name: 'Seychelles', iso2: 'SC', iso3: 'SYC', flag: '🇸🇨' },
|
||||
'SL' => { name: 'Sierra Leone', iso2: 'SL', iso3: 'SLE', flag: '🇸🇱' },
|
||||
'SG' => { name: 'Singapore', iso2: 'SG', iso3: 'SGP', flag: '🇸🇬' },
|
||||
'SX' => { name: 'Sint Maarten (Dutch part)', iso2: 'SX', iso3: 'SXM', flag: '🇸🇽' },
|
||||
'SK' => { name: 'Slovakia', iso2: 'SK', iso3: 'SVK', flag: '🇸🇰' },
|
||||
'SI' => { name: 'Slovenia', iso2: 'SI', iso3: 'SVN', flag: '🇸🇮' },
|
||||
'SB' => { name: 'Solomon Islands', iso2: 'SB', iso3: 'SLB', flag: '🇸🇧' },
|
||||
'SO' => { name: 'Somalia', iso2: 'SO', iso3: 'SOM', flag: '🇸🇴' },
|
||||
'ZA' => { name: 'South Africa', iso2: 'ZA', iso3: 'ZAF', flag: '🇿🇦' },
|
||||
'GS' => { name: 'South Georgia and the South Sandwich Islands', iso2: 'GS', iso3: 'SGS', flag: '🇬🇸' },
|
||||
'SS' => { name: 'South Sudan', iso2: 'SS', iso3: 'SSD', flag: '🇸🇸' },
|
||||
'ES' => { name: 'Spain', iso2: 'ES', iso3: 'ESP', flag: '🇪🇸' },
|
||||
'LK' => { name: 'Sri Lanka', iso2: 'LK', iso3: 'LKA', flag: '🇱🇰' },
|
||||
'SD' => { name: 'Sudan', iso2: 'SD', iso3: 'SDN', flag: '🇸🇩' },
|
||||
'SR' => { name: 'Suriname', iso2: 'SR', iso3: 'SUR', flag: '🇸🇷' },
|
||||
'SJ' => { name: 'Svalbard and Jan Mayen', iso2: 'SJ', iso3: 'SJM', flag: '🇸🇯' },
|
||||
'SE' => { name: 'Sweden', iso2: 'SE', iso3: 'SWE', flag: '🇸🇪' },
|
||||
'CH' => { name: 'Switzerland', iso2: 'CH', iso3: 'CHE', flag: '🇨🇭' },
|
||||
'SY' => { name: 'Syrian Arab Republic', iso2: 'SY', iso3: 'SYR', flag: '🇸🇾' },
|
||||
'TW' => { name: 'Taiwan, Province of China', iso2: 'TW', iso3: 'TWN', flag: '🇹🇼' },
|
||||
'TJ' => { name: 'Tajikistan', iso2: 'TJ', iso3: 'TJK', flag: '🇹🇯' },
|
||||
'TZ' => { name: 'Tanzania, United Republic of', iso2: 'TZ', iso3: 'TZA', flag: '🇹🇿' },
|
||||
'TH' => { name: 'Thailand', iso2: 'TH', iso3: 'THA', flag: '🇹🇭' },
|
||||
'TL' => { name: 'Timor-Leste', iso2: 'TL', iso3: 'TLS', flag: '🇹🇱' },
|
||||
'TG' => { name: 'Togo', iso2: 'TG', iso3: 'TGO', flag: '🇹🇬' },
|
||||
'TK' => { name: 'Tokelau', iso2: 'TK', iso3: 'TKL', flag: '🇹🇰' },
|
||||
'TO' => { name: 'Tonga', iso2: 'TO', iso3: 'TON', flag: '🇹🇴' },
|
||||
'TT' => { name: 'Trinidad and Tobago', iso2: 'TT', iso3: 'TTO', flag: '🇹🇹' },
|
||||
'TN' => { name: 'Tunisia', iso2: 'TN', iso3: 'TUN', flag: '🇹🇳' },
|
||||
'TR' => { name: 'Turkey', iso2: 'TR', iso3: 'TUR', flag: '🇹🇷' },
|
||||
'TM' => { name: 'Turkmenistan', iso2: 'TM', iso3: 'TKM', flag: '🇹🇲' },
|
||||
'TC' => { name: 'Turks and Caicos Islands', iso2: 'TC', iso3: 'TCA', flag: '🇹🇨' },
|
||||
'TV' => { name: 'Tuvalu', iso2: 'TV', iso3: 'TUV', flag: '🇹🇻' },
|
||||
'UG' => { name: 'Uganda', iso2: 'UG', iso3: 'UGA', flag: '🇺🇬' },
|
||||
'UA' => { name: 'Ukraine', iso2: 'UA', iso3: 'UKR', flag: '🇺🇦' },
|
||||
'AE' => { name: 'United Arab Emirates', iso2: 'AE', iso3: 'ARE', flag: '🇦🇪' },
|
||||
'GB' => { name: 'United Kingdom', iso2: 'GB', iso3: 'GBR', flag: '🇬🇧' },
|
||||
'US' => { name: 'United States', iso2: 'US', iso3: 'USA', flag: '🇺🇸' },
|
||||
'UM' => { name: 'United States Minor Outlying Islands', iso2: 'UM', iso3: 'UMI', flag: '🇺🇲' },
|
||||
'UY' => { name: 'Uruguay', iso2: 'UY', iso3: 'URY', flag: '🇺🇾' },
|
||||
'UZ' => { name: 'Uzbekistan', iso2: 'UZ', iso3: 'UZB', flag: '🇺🇿' },
|
||||
'VU' => { name: 'Vanuatu', iso2: 'VU', iso3: 'VUT', flag: '🇻🇺' },
|
||||
'VE' => { name: 'Venezuela, Bolivarian Republic of', iso2: 'VE', iso3: 'VEN', flag: '🇻🇪' },
|
||||
'VN' => { name: 'Viet Nam', iso2: 'VN', iso3: 'VNM', flag: '🇻🇳' },
|
||||
'VG' => { name: 'Virgin Islands, British', iso2: 'VG', iso3: 'VGB', flag: '🇻🇬' },
|
||||
'VI' => { name: 'Virgin Islands, U.S.', iso2: 'VI', iso3: 'VIR', flag: '🇻🇮' },
|
||||
'WF' => { name: 'Wallis and Futuna', iso2: 'WF', iso3: 'WLF', flag: '🇼🇫' },
|
||||
'EH' => { name: 'Western Sahara', iso2: 'EH', iso3: 'ESH', flag: '🇪🇭' },
|
||||
'YE' => { name: 'Yemen', iso2: 'YE', iso3: 'YEM', flag: '🇾🇪' },
|
||||
'ZM' => { name: 'Zambia', iso2: 'ZM', iso3: 'ZMB', flag: '🇿🇲' },
|
||||
'ZW' => { name: 'Zimbabwe', iso2: 'ZW', iso3: 'ZWE', flag: '🇿🇼' }
|
||||
}.freeze
|
||||
|
||||
# Country name aliases and variations for better matching
|
||||
COUNTRY_ALIASES = {
|
||||
'Russia' => 'Russian Federation',
|
||||
'South Korea' => 'Korea, Republic of',
|
||||
'North Korea' => 'Korea, Democratic People\'s Republic of',
|
||||
'United States of America' => 'United States',
|
||||
'USA' => 'United States',
|
||||
'UK' => 'United Kingdom',
|
||||
'Britain' => 'United Kingdom',
|
||||
'Great Britain' => 'United Kingdom',
|
||||
'England' => 'United Kingdom',
|
||||
'Scotland' => 'United Kingdom',
|
||||
'Wales' => 'United Kingdom',
|
||||
'Northern Ireland' => 'United Kingdom',
|
||||
'Macedonia' => 'North Macedonia',
|
||||
'Czech Republic' => 'Czech Republic',
|
||||
'Czechia' => 'Czech Republic',
|
||||
'Vatican' => 'Holy See (Vatican City State)',
|
||||
'Vatican City' => 'Holy See (Vatican City State)',
|
||||
'Taiwan' => 'Taiwan, Province of China',
|
||||
'Hong Kong SAR' => 'Hong Kong',
|
||||
'Macao SAR' => 'Macao',
|
||||
'Moldova' => 'Moldova, Republic of',
|
||||
'Bolivia' => 'Bolivia',
|
||||
'Venezuela' => 'Venezuela, Bolivarian Republic of',
|
||||
'Iran' => 'Iran, Islamic Republic of',
|
||||
'Syria' => 'Syrian Arab Republic',
|
||||
'Tanzania' => 'Tanzania, United Republic of',
|
||||
'Laos' => 'Lao People\'s Democratic Republic',
|
||||
'Vietnam' => 'Viet Nam',
|
||||
'Palestine' => 'Palestine, State of',
|
||||
'Congo' => 'Congo',
|
||||
'Democratic Republic of Congo' => 'Congo, Democratic Republic of the',
|
||||
'DRC' => 'Congo, Democratic Republic of the',
|
||||
'Ivory Coast' => 'Côte d\'Ivoire',
|
||||
'Cape Verde' => 'Cape Verde',
|
||||
'East Timor' => 'Timor-Leste',
|
||||
'Burma' => 'Myanmar',
|
||||
'Swaziland' => 'Eswatini'
|
||||
}.freeze
|
||||
|
||||
def self.iso_a3_from_a2(iso_a2)
|
||||
return nil if iso_a2.blank?
|
||||
|
||||
country_data = COUNTRIES[iso_a2.upcase]
|
||||
country_data&.dig(:iso3)
|
||||
end
|
||||
|
||||
def self.iso_codes_from_country_name(country_name)
|
||||
return [nil, nil] if country_name.blank?
|
||||
|
||||
# Try exact match first
|
||||
country_data = find_country_by_name(country_name)
|
||||
return [country_data[:iso2], country_data[:iso3]] if country_data
|
||||
|
||||
# Try aliases
|
||||
standard_name = COUNTRY_ALIASES[country_name]
|
||||
if standard_name
|
||||
country_data = find_country_by_name(standard_name)
|
||||
return [country_data[:iso2], country_data[:iso3]] if country_data
|
||||
end
|
||||
|
||||
# Try case-insensitive match
|
||||
country_data = COUNTRIES.values.find { |data| data[:name].downcase == country_name.downcase }
|
||||
return [country_data[:iso2], country_data[:iso3]] if country_data
|
||||
|
||||
# Try partial match (country name contains or is contained in a known name)
|
||||
country_data = COUNTRIES.values.find do |data|
|
||||
data[:name].downcase.include?(country_name.downcase) ||
|
||||
country_name.downcase.include?(data[:name].downcase)
|
||||
end
|
||||
return [country_data[:iso2], country_data[:iso3]] if country_data
|
||||
|
||||
# No match found
|
||||
[nil, nil]
|
||||
end
|
||||
|
||||
def self.fallback_codes_from_country_name(country_name)
|
||||
return [nil, nil] if country_name.blank?
|
||||
|
||||
# First try to find proper ISO codes from country name
|
||||
iso_a2, iso_a3 = iso_codes_from_country_name(country_name)
|
||||
return [iso_a2, iso_a3] if iso_a2 && iso_a3
|
||||
|
||||
# Only use character-based fallback as a last resort
|
||||
# This is still not ideal but better than nothing
|
||||
fallback_a2 = country_name[0..1].upcase
|
||||
fallback_a3 = country_name[0..2].upcase
|
||||
|
||||
[fallback_a2, fallback_a3]
|
||||
end
|
||||
|
||||
def self.standardize_country_name(country_name)
|
||||
return nil if country_name.blank?
|
||||
|
||||
# Try exact match first
|
||||
country_data = find_country_by_name(country_name)
|
||||
return country_data[:name] if country_data
|
||||
|
||||
# Try aliases
|
||||
standard_name = COUNTRY_ALIASES[country_name]
|
||||
return standard_name if standard_name
|
||||
|
||||
# Try case-insensitive match
|
||||
country_data = COUNTRIES.values.find { |data| data[:name].downcase == country_name.downcase }
|
||||
return country_data[:name] if country_data
|
||||
|
||||
# Try partial match
|
||||
country_data = COUNTRIES.values.find do |data|
|
||||
data[:name].downcase.include?(country_name.downcase) ||
|
||||
country_name.downcase.include?(data[:name].downcase)
|
||||
end
|
||||
return country_data[:name] if country_data
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def self.country_flag(iso_a2)
|
||||
return nil if iso_a2.blank?
|
||||
|
||||
country_data = COUNTRIES[iso_a2.upcase]
|
||||
country_data&.dig(:flag)
|
||||
end
|
||||
|
||||
def self.country_by_iso2(iso_a2)
|
||||
return nil if iso_a2.blank?
|
||||
|
||||
COUNTRIES[iso_a2.upcase]
|
||||
end
|
||||
|
||||
def self.country_by_name(country_name)
|
||||
return nil if country_name.blank?
|
||||
|
||||
find_country_by_name(country_name) ||
|
||||
find_country_by_name(COUNTRY_ALIASES[country_name]) ||
|
||||
COUNTRIES.values.find { |data| data[:name].downcase == country_name.downcase }
|
||||
end
|
||||
|
||||
def self.all_countries
|
||||
COUNTRIES.values
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def self.find_country_by_name(name)
|
||||
return nil if name.blank?
|
||||
|
||||
COUNTRIES.values.find { |data| data[:name] == name }
|
||||
end
|
||||
end
|
||||
|
|
@ -10,8 +10,8 @@ class CountriesAndCities
|
|||
|
||||
def call
|
||||
points
|
||||
.reject { |point| point.country.nil? || point.city.nil? }
|
||||
.group_by(&:country)
|
||||
.reject { |point| point.read_attribute(:country).nil? || point.city.nil? }
|
||||
.group_by { |point| point.read_attribute(:country) }
|
||||
.transform_values { |country_points| process_country_points(country_points) }
|
||||
.map { |country, cities| CountryData.new(country: country, cities: cities) }
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ExceptionReporter
|
||||
def self.call(exception)
|
||||
def self.call(exception, human_message = 'Exception reported')
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
Rails.logger.error "#{human_message}: #{exception.message}"
|
||||
|
||||
Sentry.capture_exception(exception)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class GoogleMaps::RecordsStorageImporter
|
|||
|
||||
def call
|
||||
process_file_in_batches
|
||||
rescue Oj::ParseError => e
|
||||
rescue Oj::ParseError, JSON::ParserError => e
|
||||
Rails.logger.error("JSON parsing error: #{e.message}")
|
||||
raise
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,21 @@ module Imports::Broadcaster
|
|||
action: 'update',
|
||||
import: {
|
||||
id: import.id,
|
||||
points_count: index
|
||||
points_count: index,
|
||||
status: import.status
|
||||
}
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def broadcast_status_update
|
||||
ImportsChannel.broadcast_to(
|
||||
import.user,
|
||||
{
|
||||
action: 'status_update',
|
||||
import: {
|
||||
id: import.id,
|
||||
status: import.status
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Imports::Create
|
||||
include Imports::Broadcaster
|
||||
|
||||
attr_reader :user, :import
|
||||
|
||||
def initialize(user, import)
|
||||
|
|
@ -9,13 +11,24 @@ class Imports::Create
|
|||
end
|
||||
|
||||
def call
|
||||
import.update!(status: :processing)
|
||||
broadcast_status_update
|
||||
|
||||
importer(import.source).new(import, user.id).call
|
||||
|
||||
schedule_stats_creating(user.id)
|
||||
schedule_visit_suggesting(user.id, import)
|
||||
update_import_points_count(import)
|
||||
rescue StandardError => e
|
||||
import.update!(status: :failed)
|
||||
broadcast_status_update
|
||||
|
||||
create_import_failed_notification(import, user, e)
|
||||
ensure
|
||||
if import.processing?
|
||||
import.update!(status: :completed)
|
||||
broadcast_status_update
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
18
app/services/notifications.rb
Normal file
18
app/services/notifications.rb
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Notifications
|
||||
class Create
|
||||
attr_reader :user, :kind, :title, :content
|
||||
|
||||
def initialize(user:, kind:, title:, content:)
|
||||
@user = user
|
||||
@kind = kind
|
||||
@title = title
|
||||
@content = content
|
||||
end
|
||||
|
||||
def call
|
||||
Notification.create!(user:, kind:, title:, content:)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Notifications::Create
|
||||
attr_reader :user, :kind, :title, :content
|
||||
|
||||
def initialize(user:, kind:, title:, content:)
|
||||
@user = user
|
||||
@kind = kind
|
||||
@title = title
|
||||
@content = content
|
||||
end
|
||||
|
||||
def call
|
||||
Notification.create!(user:, kind:, title:, content:)
|
||||
end
|
||||
end
|
||||
|
|
@ -23,9 +23,11 @@ class ReverseGeocoding::Points::FetchData
|
|||
response = Geocoder.search([point.lat, point.lon]).first
|
||||
return if response.blank? || response.data['error'].present?
|
||||
|
||||
country_record = Country.find_by(name: response.country) if response.country
|
||||
|
||||
point.update!(
|
||||
city: response.city,
|
||||
country: response.country,
|
||||
country_id: country_record&.id,
|
||||
geodata: response.data,
|
||||
reverse_geocoded_at: Time.current
|
||||
)
|
||||
|
|
|
|||
388
app/services/users/export_data.rb
Normal file
388
app/services/users/export_data.rb
Normal file
|
|
@ -0,0 +1,388 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'zip'
|
||||
|
||||
# Users::ExportData - Exports complete user data with preserved relationships
|
||||
#
|
||||
# Output JSON Structure Example:
|
||||
# {
|
||||
# "counts": {
|
||||
# "areas": 5,
|
||||
# "imports": 12,
|
||||
# "exports": 3,
|
||||
# "trips": 8,
|
||||
# "stats": 24,
|
||||
# "notifications": 10,
|
||||
# "points": 15000,
|
||||
# "visits": 45,
|
||||
# "places": 20
|
||||
# },
|
||||
# "settings": {
|
||||
# "distance_unit": "km",
|
||||
# "timezone": "UTC",
|
||||
# "immich_url": "https://immich.example.com",
|
||||
# // ... other user settings (exported via user.safe_settings.settings)
|
||||
# },
|
||||
# "areas": [
|
||||
# {
|
||||
# "name": "Home",
|
||||
# "latitude": "40.7128",
|
||||
# "longitude": "-74.0060",
|
||||
# "radius": 100,
|
||||
# "created_at": "2024-01-01T00:00:00Z",
|
||||
# "updated_at": "2024-01-01T00:00:00Z"
|
||||
# }
|
||||
# ],
|
||||
# "imports": [
|
||||
# {
|
||||
# "name": "2023_MARCH.json",
|
||||
# "source": "google_semantic_history",
|
||||
# "created_at": "2024-01-01T00:00:00Z",
|
||||
# "updated_at": "2024-01-01T00:00:00Z",
|
||||
# "raw_points": 15432,
|
||||
# "doubles": 23,
|
||||
# "processed": 15409,
|
||||
# "points_count": 15409,
|
||||
# "status": "completed",
|
||||
# "file_name": "import_1_2023_MARCH.json",
|
||||
# "original_filename": "2023_MARCH.json",
|
||||
# "file_size": 2048576,
|
||||
# "content_type": "application/json"
|
||||
# // Note: file_error may be present if file download fails
|
||||
# // Note: file_name and original_filename will be null if no file attached
|
||||
# }
|
||||
# ],
|
||||
# "exports": [
|
||||
# {
|
||||
# "name": "export_2024-01-01_to_2024-01-31.json",
|
||||
# "url": null,
|
||||
# "status": "completed",
|
||||
# "file_format": "json",
|
||||
# "file_type": "points",
|
||||
# "start_at": "2024-01-01T00:00:00Z",
|
||||
# "end_at": "2024-01-31T23:59:59Z",
|
||||
# "created_at": "2024-02-01T00:00:00Z",
|
||||
# "updated_at": "2024-02-01T00:00:00Z",
|
||||
# "file_name": "export_1_export_2024-01-01_to_2024-01-31.json",
|
||||
# "original_filename": "export_2024-01-01_to_2024-01-31.json",
|
||||
# "file_size": 1048576,
|
||||
# "content_type": "application/json"
|
||||
# // Note: file_error may be present if file download fails
|
||||
# // Note: file_name and original_filename will be null if no file attached
|
||||
# }
|
||||
# ],
|
||||
# "trips": [
|
||||
# {
|
||||
# "name": "Business Trip to NYC",
|
||||
# "started_at": "2024-01-15T08:00:00Z",
|
||||
# "ended_at": "2024-01-18T20:00:00Z",
|
||||
# "distance": 1245,
|
||||
# "path": null, // PostGIS LineString geometry
|
||||
# "visited_countries": {"US": "United States", "CA": "Canada"},
|
||||
# "created_at": "2024-01-19T00:00:00Z",
|
||||
# "updated_at": "2024-01-19T00:00:00Z"
|
||||
# }
|
||||
# ],
|
||||
# "stats": [
|
||||
# {
|
||||
# "year": 2024,
|
||||
# "month": 1,
|
||||
# "distance": 456, // Note: integer, not float
|
||||
# "daily_distance": {"1": 15.2, "2": 23.5}, // jsonb object
|
||||
# "toponyms": [
|
||||
# {"country": "United States", "cities": [{"city": "New York"}]}
|
||||
# ],
|
||||
# "created_at": "2024-02-01T00:00:00Z",
|
||||
# "updated_at": "2024-02-01T00:00:00Z"
|
||||
# }
|
||||
# ],
|
||||
# "notifications": [
|
||||
# {
|
||||
# "kind": "info",
|
||||
# "title": "Import completed",
|
||||
# "content": "Your data import has been processed successfully",
|
||||
# "read_at": "2024-01-01T12:30:00Z", // null if unread
|
||||
# "created_at": "2024-01-01T12:00:00Z",
|
||||
# "updated_at": "2024-01-01T12:30:00Z"
|
||||
# }
|
||||
# ],
|
||||
# "points": [
|
||||
# {
|
||||
# "battery_status": "charging",
|
||||
# "battery": 85,
|
||||
# "timestamp": 1704067200,
|
||||
# "altitude": 15.5,
|
||||
# "velocity": 25.5,
|
||||
# "accuracy": 5.0,
|
||||
# "ping": "test-ping",
|
||||
# "tracker_id": "tracker-123",
|
||||
# "topic": "owntracks/user/device",
|
||||
# "trigger": "manual_event",
|
||||
# "bssid": "aa:bb:cc:dd:ee:ff",
|
||||
# "ssid": "TestWiFi",
|
||||
# "connection": "wifi",
|
||||
# "vertical_accuracy": 3.0,
|
||||
# "mode": 2,
|
||||
# "inrids": ["region1", "region2"],
|
||||
# "in_regions": ["home", "work"],
|
||||
# "raw_data": {"test": "data"},
|
||||
# "city": "New York",
|
||||
# "country": "United States",
|
||||
# "geodata": {"address": "123 Main St"},
|
||||
# "reverse_geocoded_at": "2024-01-01T00:00:00Z",
|
||||
# "course": 45.5,
|
||||
# "course_accuracy": 2.5,
|
||||
# "external_track_id": "ext-123",
|
||||
# "lonlat": "POINT(-74.006 40.7128)",
|
||||
# "longitude": -74.006,
|
||||
# "latitude": 40.7128,
|
||||
# "created_at": "2024-01-01T00:00:00Z",
|
||||
# "updated_at": "2024-01-01T00:00:00Z",
|
||||
# "import_reference": {
|
||||
# "name": "2023_MARCH.json",
|
||||
# "source": "google_semantic_history",
|
||||
# "created_at": "2024-01-01T00:00:00Z"
|
||||
# },
|
||||
# "country_info": {
|
||||
# "name": "United States",
|
||||
# "iso_a2": "US",
|
||||
# "iso_a3": "USA"
|
||||
# },
|
||||
# "visit_reference": {
|
||||
# "name": "Work Visit",
|
||||
# "started_at": "2024-01-01T08:00:00Z",
|
||||
# "ended_at": "2024-01-01T17:00:00Z"
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# // Example of point without relationships (edge cases)
|
||||
# "timestamp": 1704070800,
|
||||
# "altitude": 10.0,
|
||||
# "longitude": -73.9857,
|
||||
# "latitude": 40.7484,
|
||||
# "lonlat": "POINT(-73.9857 40.7484)",
|
||||
# "created_at": "2024-01-01T00:05:00Z",
|
||||
# "updated_at": "2024-01-01T00:05:00Z",
|
||||
# "import_reference": null, // Orphaned point
|
||||
# "country_info": null, // No country data
|
||||
# "visit_reference": null // Not part of a visit
|
||||
# // ... other point fields may be null
|
||||
# }
|
||||
# ],
|
||||
# "visits": [
|
||||
# {
|
||||
# "area_id": 123,
|
||||
# "started_at": "2024-01-01T08:00:00Z",
|
||||
# "ended_at": "2024-01-01T17:00:00Z",
|
||||
# "duration": 32400,
|
||||
# "name": "Work Visit",
|
||||
# "status": "suggested",
|
||||
# "created_at": "2024-01-01T00:00:00Z",
|
||||
# "updated_at": "2024-01-01T00:00:00Z",
|
||||
# "place_reference": {
|
||||
# "name": "Office Building",
|
||||
# "latitude": "40.7589",
|
||||
# "longitude": "-73.9851",
|
||||
# "source": "manual"
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# // Example of visit without place
|
||||
# "area_id": null,
|
||||
# "started_at": "2024-01-02T10:00:00Z",
|
||||
# "ended_at": "2024-01-02T12:00:00Z",
|
||||
# "duration": 7200,
|
||||
# "name": "Unknown Location",
|
||||
# "status": "confirmed",
|
||||
# "created_at": "2024-01-02T00:00:00Z",
|
||||
# "updated_at": "2024-01-02T00:00:00Z",
|
||||
# "place_reference": null // No associated place
|
||||
# }
|
||||
# ],
|
||||
# "places": [
|
||||
# {
|
||||
# "name": "Office Building",
|
||||
# "longitude": "-73.9851",
|
||||
# "latitude": "40.7589",
|
||||
# "city": "New York",
|
||||
# "country": "United States",
|
||||
# "source": "manual",
|
||||
# "geodata": {"properties": {"name": "Office Building"}},
|
||||
# "reverse_geocoded_at": "2024-01-01T00:00:00Z",
|
||||
# "lonlat": "POINT(-73.9851 40.7589)",
|
||||
# "created_at": "2024-01-01T00:00:00Z",
|
||||
# "updated_at": "2024-01-01T00:00:00Z"
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
# Import Strategy Notes:
|
||||
# 1. Countries: Look up by name/ISO codes, create if missing
|
||||
# 2. Imports: Match by name + source + created_at, create new import records
|
||||
# 3. Places: Match by name + coordinates, create if missing
|
||||
# 4. Visits: Match by name + timestamps + place_reference, create if missing
|
||||
# 5. Points: Import with reconstructed foreign keys from references
|
||||
# 6. Files: Import files are available in the files/ directory with names from file_name fields
|
||||
|
||||
class Users::ExportData
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def export
|
||||
timestamp = Time.current.strftime('%Y%m%d_%H%M%S')
|
||||
@export_directory = Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}")
|
||||
@files_directory = @export_directory.join('files')
|
||||
|
||||
FileUtils.mkdir_p(@files_directory)
|
||||
|
||||
export_record = user.exports.create!(
|
||||
name: "user_data_export_#{timestamp}.zip",
|
||||
file_format: :archive,
|
||||
file_type: :user_data,
|
||||
status: :processing
|
||||
)
|
||||
|
||||
begin
|
||||
json_file_path = @export_directory.join('data.json')
|
||||
|
||||
# Stream JSON writing instead of building in memory
|
||||
File.open(json_file_path, 'w') do |file|
|
||||
file.write('{"counts":')
|
||||
file.write(calculate_entity_counts.to_json)
|
||||
|
||||
file.write(',"settings":')
|
||||
file.write(user.safe_settings.settings.to_json)
|
||||
|
||||
file.write(',"areas":')
|
||||
file.write(Users::ExportData::Areas.new(user).call.to_json)
|
||||
|
||||
file.write(',"imports":')
|
||||
file.write(Users::ExportData::Imports.new(user, @files_directory).call.to_json)
|
||||
|
||||
file.write(',"exports":')
|
||||
file.write(Users::ExportData::Exports.new(user, @files_directory).call.to_json)
|
||||
|
||||
file.write(',"trips":')
|
||||
file.write(Users::ExportData::Trips.new(user).call.to_json)
|
||||
|
||||
file.write(',"stats":')
|
||||
file.write(Users::ExportData::Stats.new(user).call.to_json)
|
||||
|
||||
file.write(',"notifications":')
|
||||
file.write(Users::ExportData::Notifications.new(user).call.to_json)
|
||||
|
||||
file.write(',"points":')
|
||||
file.write(Users::ExportData::Points.new(user).call.to_json)
|
||||
|
||||
file.write(',"visits":')
|
||||
file.write(Users::ExportData::Visits.new(user).call.to_json)
|
||||
|
||||
file.write(',"places":')
|
||||
file.write(Users::ExportData::Places.new(user).call.to_json)
|
||||
|
||||
file.write('}')
|
||||
end
|
||||
|
||||
zip_file_path = @export_directory.join('export.zip')
|
||||
create_zip_archive(@export_directory, zip_file_path)
|
||||
|
||||
export_record.file.attach(
|
||||
io: File.open(zip_file_path),
|
||||
filename: export_record.name,
|
||||
content_type: 'application/zip'
|
||||
)
|
||||
|
||||
export_record.update!(status: :completed)
|
||||
|
||||
create_success_notification
|
||||
|
||||
export_record
|
||||
rescue StandardError => e
|
||||
export_record.update!(status: :failed) if export_record
|
||||
|
||||
ExceptionReporter.call(e, 'Export failed')
|
||||
|
||||
raise e
|
||||
ensure
|
||||
cleanup_temporary_files(@export_directory) if @export_directory&.exist?
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
|
||||
def export_directory
|
||||
@export_directory
|
||||
end
|
||||
|
||||
def files_directory
|
||||
@files_directory
|
||||
end
|
||||
|
||||
def calculate_entity_counts
|
||||
Rails.logger.info "Calculating entity counts for export"
|
||||
|
||||
counts = {
|
||||
areas: user.areas.count,
|
||||
imports: user.imports.count,
|
||||
exports: user.exports.count,
|
||||
trips: user.trips.count,
|
||||
stats: user.stats.count,
|
||||
notifications: user.notifications.count,
|
||||
points: user.tracked_points.count,
|
||||
visits: user.visits.count,
|
||||
places: user.places.count
|
||||
}
|
||||
|
||||
Rails.logger.info "Entity counts: #{counts}"
|
||||
counts
|
||||
end
|
||||
|
||||
def create_zip_archive(export_directory, zip_file_path)
|
||||
original_compression = Zip.default_compression
|
||||
Zip.default_compression = Zip::Entry::DEFLATED
|
||||
|
||||
Zip::File.open(zip_file_path, Zip::File::CREATE) do |zipfile|
|
||||
Dir.glob(export_directory.join('**', '*')).each do |file|
|
||||
next if File.directory?(file) || file == zip_file_path.to_s
|
||||
|
||||
relative_path = file.sub(export_directory.to_s + '/', '')
|
||||
|
||||
zipfile.add(relative_path, file)
|
||||
end
|
||||
end
|
||||
ensure
|
||||
Zip.default_compression = original_compression if original_compression
|
||||
end
|
||||
|
||||
def cleanup_temporary_files(export_directory)
|
||||
return unless File.directory?(export_directory)
|
||||
|
||||
Rails.logger.info "Cleaning up temporary export directory: #{export_directory}"
|
||||
FileUtils.rm_rf(export_directory)
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to cleanup temporary files')
|
||||
end
|
||||
|
||||
def create_success_notification
|
||||
counts = calculate_entity_counts
|
||||
summary = "#{counts[:points]} points, " \
|
||||
"#{counts[:visits]} visits, " \
|
||||
"#{counts[:places]} places, " \
|
||||
"#{counts[:trips]} trips, " \
|
||||
"#{counts[:areas]} areas, " \
|
||||
"#{counts[:imports]} imports, " \
|
||||
"#{counts[:exports]} exports, " \
|
||||
"#{counts[:stats]} stats, " \
|
||||
"#{counts[:notifications]} notifications"
|
||||
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
title: 'Export completed',
|
||||
content: "Your data export has been processed successfully (#{summary}). You can download it from the exports page.",
|
||||
kind: :info
|
||||
).call
|
||||
end
|
||||
end
|
||||
15
app/services/users/export_data/areas.rb
Normal file
15
app/services/users/export_data/areas.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Areas
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
user.areas.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
78
app/services/users/export_data/exports.rb
Normal file
78
app/services/users/export_data/exports.rb
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'parallel'
|
||||
|
||||
class Users::ExportData::Exports
|
||||
def initialize(user, files_directory)
|
||||
@user = user
|
||||
@files_directory = files_directory
|
||||
end
|
||||
|
||||
def call
|
||||
exports_with_files = user.exports.includes(:file_attachment).to_a
|
||||
|
||||
if exports_with_files.size > 1
|
||||
results = Parallel.map(exports_with_files, in_threads: 2) do |export|
|
||||
process_export(export)
|
||||
end
|
||||
results
|
||||
else
|
||||
exports_with_files.map { |export| process_export(export) }
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :files_directory
|
||||
|
||||
def process_export(export)
|
||||
Rails.logger.info "Processing export #{export.name}"
|
||||
|
||||
export_hash = export.as_json(except: %w[user_id id])
|
||||
|
||||
if export.file.attached?
|
||||
add_file_data_to_export(export, export_hash)
|
||||
else
|
||||
add_empty_file_data_to_export(export_hash)
|
||||
end
|
||||
|
||||
Rails.logger.info "Export #{export.name} processed"
|
||||
|
||||
export_hash
|
||||
end
|
||||
|
||||
def add_file_data_to_export(export, export_hash)
|
||||
sanitized_filename = generate_sanitized_export_filename(export)
|
||||
file_path = files_directory.join(sanitized_filename)
|
||||
|
||||
begin
|
||||
download_and_save_export_file(export, file_path)
|
||||
add_file_metadata_to_export(export, export_hash, sanitized_filename)
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e)
|
||||
|
||||
export_hash['file_error'] = "Failed to download: #{e.message}"
|
||||
end
|
||||
end
|
||||
|
||||
def add_empty_file_data_to_export(export_hash)
|
||||
export_hash['file_name'] = nil
|
||||
export_hash['original_filename'] = nil
|
||||
end
|
||||
|
||||
def generate_sanitized_export_filename(export)
|
||||
"export_#{export.id}_#{export.file.blob.filename}".gsub(/[^0-9A-Za-z._-]/, '_')
|
||||
end
|
||||
|
||||
def download_and_save_export_file(export, file_path)
|
||||
file_content = Imports::SecureFileDownloader.new(export.file).download_with_verification
|
||||
File.write(file_path, file_content, mode: 'wb')
|
||||
end
|
||||
|
||||
def add_file_metadata_to_export(export, export_hash, sanitized_filename)
|
||||
export_hash['file_name'] = sanitized_filename
|
||||
export_hash['original_filename'] = export.file.blob.filename.to_s
|
||||
export_hash['file_size'] = export.file.blob.byte_size
|
||||
export_hash['content_type'] = export.file.blob.content_type
|
||||
end
|
||||
end
|
||||
78
app/services/users/export_data/imports.rb
Normal file
78
app/services/users/export_data/imports.rb
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'parallel'
|
||||
|
||||
class Users::ExportData::Imports
|
||||
def initialize(user, files_directory)
|
||||
@user = user
|
||||
@files_directory = files_directory
|
||||
end
|
||||
|
||||
def call
|
||||
imports_with_files = user.imports.includes(:file_attachment).to_a
|
||||
|
||||
if imports_with_files.size > 1
|
||||
results = Parallel.map(imports_with_files, in_threads: 2) do |import|
|
||||
process_import(import)
|
||||
end
|
||||
results
|
||||
else
|
||||
imports_with_files.map { |import| process_import(import) }
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :files_directory
|
||||
|
||||
def process_import(import)
|
||||
Rails.logger.info "Processing import #{import.name}"
|
||||
|
||||
import_hash = import.as_json(except: %w[user_id raw_data id])
|
||||
|
||||
if import.file.attached?
|
||||
add_file_data_to_import(import, import_hash)
|
||||
else
|
||||
add_empty_file_data_to_import(import_hash)
|
||||
end
|
||||
|
||||
Rails.logger.info "Import #{import.name} processed"
|
||||
|
||||
import_hash
|
||||
end
|
||||
|
||||
def add_file_data_to_import(import, import_hash)
|
||||
sanitized_filename = generate_sanitized_filename(import)
|
||||
file_path = files_directory.join(sanitized_filename)
|
||||
|
||||
begin
|
||||
download_and_save_import_file(import, file_path)
|
||||
add_file_metadata_to_import(import, import_hash, sanitized_filename)
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e)
|
||||
|
||||
import_hash['file_error'] = "Failed to download: #{e.message}"
|
||||
end
|
||||
end
|
||||
|
||||
def add_empty_file_data_to_import(import_hash)
|
||||
import_hash['file_name'] = nil
|
||||
import_hash['original_filename'] = nil
|
||||
end
|
||||
|
||||
def generate_sanitized_filename(import)
|
||||
"import_#{import.id}_#{import.file.blob.filename}".gsub(/[^0-9A-Za-z._-]/, '_')
|
||||
end
|
||||
|
||||
def download_and_save_import_file(import, file_path)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
File.write(file_path, file_content, mode: 'wb')
|
||||
end
|
||||
|
||||
def add_file_metadata_to_import(import, import_hash, sanitized_filename)
|
||||
import_hash['file_name'] = sanitized_filename
|
||||
import_hash['original_filename'] = import.file.blob.filename.to_s
|
||||
import_hash['file_size'] = import.file.blob.byte_size
|
||||
import_hash['content_type'] = import.file.blob.content_type
|
||||
end
|
||||
end
|
||||
17
app/services/users/export_data/notifications.rb
Normal file
17
app/services/users/export_data/notifications.rb
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Notifications
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
# Export all notifications for the user
|
||||
user.notifications
|
||||
.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
15
app/services/users/export_data/places.rb
Normal file
15
app/services/users/export_data/places.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Places
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
user.places.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
136
app/services/users/export_data/points.rb
Normal file
136
app/services/users/export_data/points.rb
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Points
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
points_sql = <<-SQL
|
||||
SELECT
|
||||
p.id, p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy,
|
||||
p.ping, p.tracker_id, p.topic, p.trigger, p.bssid, p.ssid, p.connection,
|
||||
p.vertical_accuracy, p.mode, p.inrids, p.in_regions, p.raw_data,
|
||||
p.city, p.country, p.geodata, p.reverse_geocoded_at, p.course,
|
||||
p.course_accuracy, p.external_track_id, p.created_at, p.updated_at,
|
||||
p.lonlat, p.longitude, p.latitude,
|
||||
-- Extract coordinates from lonlat if individual fields are missing
|
||||
COALESCE(p.longitude, ST_X(p.lonlat::geometry)) as computed_longitude,
|
||||
COALESCE(p.latitude, ST_Y(p.lonlat::geometry)) as computed_latitude,
|
||||
-- Import reference
|
||||
i.name as import_name,
|
||||
i.source as import_source,
|
||||
i.created_at as import_created_at,
|
||||
-- Country info
|
||||
c.name as country_name,
|
||||
c.iso_a2 as country_iso_a2,
|
||||
c.iso_a3 as country_iso_a3,
|
||||
-- Visit reference
|
||||
v.name as visit_name,
|
||||
v.started_at as visit_started_at,
|
||||
v.ended_at as visit_ended_at
|
||||
FROM points p
|
||||
LEFT JOIN imports i ON p.import_id = i.id
|
||||
LEFT JOIN countries c ON p.country_id = c.id
|
||||
LEFT JOIN visits v ON p.visit_id = v.id
|
||||
WHERE p.user_id = $1
|
||||
ORDER BY p.id
|
||||
SQL
|
||||
|
||||
result = ActiveRecord::Base.connection.exec_query(points_sql, 'Points Export', [user.id])
|
||||
|
||||
Rails.logger.info "Processing #{result.count} points for export..."
|
||||
|
||||
result.filter_map do |row|
|
||||
has_lonlat = row['lonlat'].present?
|
||||
has_coordinates = row['computed_longitude'].present? && row['computed_latitude'].present?
|
||||
|
||||
unless has_lonlat || has_coordinates
|
||||
Rails.logger.debug "Skipping point without coordinates: id=#{row['id'] || 'unknown'}"
|
||||
next
|
||||
end
|
||||
|
||||
point_hash = {
|
||||
'battery_status' => row['battery_status'],
|
||||
'battery' => row['battery'],
|
||||
'timestamp' => row['timestamp'],
|
||||
'altitude' => row['altitude'],
|
||||
'velocity' => row['velocity'],
|
||||
'accuracy' => row['accuracy'],
|
||||
'ping' => row['ping'],
|
||||
'tracker_id' => row['tracker_id'],
|
||||
'topic' => row['topic'],
|
||||
'trigger' => row['trigger'],
|
||||
'bssid' => row['bssid'],
|
||||
'ssid' => row['ssid'],
|
||||
'connection' => row['connection'],
|
||||
'vertical_accuracy' => row['vertical_accuracy'],
|
||||
'mode' => row['mode'],
|
||||
'inrids' => row['inrids'] || [],
|
||||
'in_regions' => row['in_regions'] || [],
|
||||
'raw_data' => row['raw_data'],
|
||||
'city' => row['city'],
|
||||
'country' => row['country'],
|
||||
'geodata' => row['geodata'],
|
||||
'reverse_geocoded_at' => row['reverse_geocoded_at'],
|
||||
'course' => row['course'],
|
||||
'course_accuracy' => row['course_accuracy'],
|
||||
'external_track_id' => row['external_track_id'],
|
||||
'created_at' => row['created_at'],
|
||||
'updated_at' => row['updated_at']
|
||||
}
|
||||
|
||||
# Ensure all coordinate fields are populated
|
||||
populate_coordinate_fields(point_hash, row)
|
||||
|
||||
# Add relationship references only if they exist
|
||||
if row['import_name']
|
||||
point_hash['import_reference'] = {
|
||||
'name' => row['import_name'],
|
||||
'source' => row['import_source'],
|
||||
'created_at' => row['import_created_at']
|
||||
}
|
||||
end
|
||||
|
||||
if row['country_name']
|
||||
point_hash['country_info'] = {
|
||||
'name' => row['country_name'],
|
||||
'iso_a2' => row['country_iso_a2'],
|
||||
'iso_a3' => row['country_iso_a3']
|
||||
}
|
||||
end
|
||||
|
||||
if row['visit_name']
|
||||
point_hash['visit_reference'] = {
|
||||
'name' => row['visit_name'],
|
||||
'started_at' => row['visit_started_at'],
|
||||
'ended_at' => row['visit_ended_at']
|
||||
}
|
||||
end
|
||||
|
||||
point_hash
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
|
||||
def populate_coordinate_fields(point_hash, row)
|
||||
longitude = row['computed_longitude']
|
||||
latitude = row['computed_latitude']
|
||||
lonlat = row['lonlat']
|
||||
|
||||
# If lonlat is present, use it and the computed coordinates
|
||||
if lonlat.present?
|
||||
point_hash['lonlat'] = lonlat
|
||||
point_hash['longitude'] = longitude
|
||||
point_hash['latitude'] = latitude
|
||||
elsif longitude.present? && latitude.present?
|
||||
# If lonlat is missing but we have coordinates, reconstruct lonlat
|
||||
point_hash['longitude'] = longitude
|
||||
point_hash['latitude'] = latitude
|
||||
point_hash['lonlat'] = "POINT(#{longitude} #{latitude})"
|
||||
end
|
||||
end
|
||||
end
|
||||
15
app/services/users/export_data/stats.rb
Normal file
15
app/services/users/export_data/stats.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Stats
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
user.stats.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
15
app/services/users/export_data/trips.rb
Normal file
15
app/services/users/export_data/trips.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Trips
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
user.trips.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
30
app/services/users/export_data/visits.rb
Normal file
30
app/services/users/export_data/visits.rb
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Visits
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
user.visits.includes(:place).map do |visit|
|
||||
visit_hash = visit.as_json(except: %w[user_id place_id id])
|
||||
|
||||
if visit.place
|
||||
visit_hash['place_reference'] = {
|
||||
'name' => visit.place.name,
|
||||
'latitude' => visit.place.lat.to_s,
|
||||
'longitude' => visit.place.lon.to_s,
|
||||
'source' => visit.place.source
|
||||
}
|
||||
else
|
||||
visit_hash['place_reference'] = nil
|
||||
end
|
||||
|
||||
visit_hash
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user
|
||||
end
|
||||
261
app/services/users/import_data.rb
Normal file
261
app/services/users/import_data.rb
Normal file
|
|
@ -0,0 +1,261 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'zip'
|
||||
|
||||
# Users::ImportData - Imports complete user data from exported archive
|
||||
#
|
||||
# This service processes a ZIP archive created by Users::ExportData and recreates
|
||||
# the user's data with preserved relationships. The import follows a specific order
|
||||
# to handle foreign key dependencies:
|
||||
#
|
||||
# 1. Settings (applied directly to user)
|
||||
# 2. Areas (standalone user data)
|
||||
# 3. Places (referenced by visits)
|
||||
# 4. Imports (including file attachments)
|
||||
# 5. Exports (including file attachments)
|
||||
# 6. Trips (standalone user data)
|
||||
# 7. Stats (standalone user data)
|
||||
# 8. Notifications (standalone user data)
|
||||
# 9. Visits (references places)
|
||||
# 10. Points (references imports, countries, visits)
|
||||
#
|
||||
# Files are restored to their original locations and properly attached to records.
|
||||
|
||||
class Users::ImportData
|
||||
def initialize(user, archive_path)
|
||||
@user = user
|
||||
@archive_path = archive_path
|
||||
@import_stats = {
|
||||
settings_updated: false,
|
||||
areas_created: 0,
|
||||
places_created: 0,
|
||||
imports_created: 0,
|
||||
exports_created: 0,
|
||||
trips_created: 0,
|
||||
stats_created: 0,
|
||||
notifications_created: 0,
|
||||
visits_created: 0,
|
||||
points_created: 0,
|
||||
files_restored: 0
|
||||
}
|
||||
end
|
||||
|
||||
def import
|
||||
@import_directory = Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{Time.current.to_i}")
|
||||
FileUtils.mkdir_p(@import_directory)
|
||||
|
||||
ActiveRecord::Base.transaction do
|
||||
extract_archive
|
||||
data = load_json_data
|
||||
|
||||
import_in_correct_order(data)
|
||||
|
||||
create_success_notification
|
||||
|
||||
@import_stats
|
||||
end
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Data import failed')
|
||||
create_failure_notification(e)
|
||||
raise e
|
||||
ensure
|
||||
cleanup_temporary_files(@import_directory) if @import_directory&.exist?
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :archive_path, :import_stats
|
||||
|
||||
def extract_archive
|
||||
Rails.logger.info "Extracting archive: #{archive_path}"
|
||||
|
||||
Zip::File.open(archive_path) do |zip_file|
|
||||
zip_file.each do |entry|
|
||||
extraction_path = @import_directory.join(entry.name)
|
||||
|
||||
FileUtils.mkdir_p(File.dirname(extraction_path))
|
||||
|
||||
entry.extract(extraction_path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def load_json_data
|
||||
json_path = @import_directory.join('data.json')
|
||||
|
||||
unless File.exist?(json_path)
|
||||
raise StandardError, "Data file not found in archive: data.json"
|
||||
end
|
||||
|
||||
JSON.parse(File.read(json_path))
|
||||
rescue JSON::ParserError => e
|
||||
raise StandardError, "Invalid JSON format in data file: #{e.message}"
|
||||
end
|
||||
|
||||
def import_in_correct_order(data)
|
||||
Rails.logger.info "Starting data import for user: #{user.email}"
|
||||
|
||||
if data['counts']
|
||||
Rails.logger.info "Expected entity counts from export: #{data['counts']}"
|
||||
end
|
||||
|
||||
Rails.logger.debug "Available data keys: #{data.keys.inspect}"
|
||||
|
||||
import_settings(data['settings']) if data['settings']
|
||||
import_areas(data['areas']) if data['areas']
|
||||
|
||||
# Import places first to ensure they're available for visits
|
||||
places_imported = import_places(data['places']) if data['places']
|
||||
Rails.logger.info "Places import phase completed: #{places_imported} places imported"
|
||||
|
||||
import_imports(data['imports']) if data['imports']
|
||||
import_exports(data['exports']) if data['exports']
|
||||
import_trips(data['trips']) if data['trips']
|
||||
import_stats(data['stats']) if data['stats']
|
||||
import_notifications(data['notifications']) if data['notifications']
|
||||
|
||||
# Import visits after places to ensure proper place resolution
|
||||
visits_imported = import_visits(data['visits']) if data['visits']
|
||||
Rails.logger.info "Visits import phase completed: #{visits_imported} visits imported"
|
||||
|
||||
import_points(data['points']) if data['points']
|
||||
|
||||
# Final validation check
|
||||
if data['counts']
|
||||
validate_import_completeness(data['counts'])
|
||||
end
|
||||
|
||||
Rails.logger.info "Data import completed. Stats: #{@import_stats}"
|
||||
end
|
||||
|
||||
def import_settings(settings_data)
|
||||
Rails.logger.debug "Importing settings: #{settings_data.inspect}"
|
||||
Users::ImportData::Settings.new(user, settings_data).call
|
||||
@import_stats[:settings_updated] = true
|
||||
end
|
||||
|
||||
def import_areas(areas_data)
|
||||
Rails.logger.debug "Importing #{areas_data&.size || 0} areas"
|
||||
areas_created = Users::ImportData::Areas.new(user, areas_data).call
|
||||
@import_stats[:areas_created] = areas_created
|
||||
end
|
||||
|
||||
def import_places(places_data)
|
||||
Rails.logger.debug "Importing #{places_data&.size || 0} places"
|
||||
places_created = Users::ImportData::Places.new(user, places_data).call
|
||||
@import_stats[:places_created] = places_created
|
||||
places_created
|
||||
end
|
||||
|
||||
def import_imports(imports_data)
|
||||
Rails.logger.debug "Importing #{imports_data&.size || 0} imports"
|
||||
imports_created, files_restored = Users::ImportData::Imports.new(user, imports_data, @import_directory.join('files')).call
|
||||
@import_stats[:imports_created] = imports_created
|
||||
@import_stats[:files_restored] += files_restored
|
||||
end
|
||||
|
||||
def import_exports(exports_data)
|
||||
Rails.logger.debug "Importing #{exports_data&.size || 0} exports"
|
||||
exports_created, files_restored = Users::ImportData::Exports.new(user, exports_data, @import_directory.join('files')).call
|
||||
@import_stats[:exports_created] = exports_created
|
||||
@import_stats[:files_restored] += files_restored
|
||||
end
|
||||
|
||||
def import_trips(trips_data)
|
||||
Rails.logger.debug "Importing #{trips_data&.size || 0} trips"
|
||||
trips_created = Users::ImportData::Trips.new(user, trips_data).call
|
||||
@import_stats[:trips_created] = trips_created
|
||||
end
|
||||
|
||||
def import_stats(stats_data)
|
||||
Rails.logger.debug "Importing #{stats_data&.size || 0} stats"
|
||||
stats_created = Users::ImportData::Stats.new(user, stats_data).call
|
||||
@import_stats[:stats_created] = stats_created
|
||||
end
|
||||
|
||||
def import_notifications(notifications_data)
|
||||
Rails.logger.debug "Importing #{notifications_data&.size || 0} notifications"
|
||||
notifications_created = Users::ImportData::Notifications.new(user, notifications_data).call
|
||||
@import_stats[:notifications_created] = notifications_created
|
||||
end
|
||||
|
||||
def import_visits(visits_data)
|
||||
Rails.logger.debug "Importing #{visits_data&.size || 0} visits"
|
||||
visits_created = Users::ImportData::Visits.new(user, visits_data).call
|
||||
@import_stats[:visits_created] = visits_created
|
||||
visits_created
|
||||
end
|
||||
|
||||
def import_points(points_data)
|
||||
Rails.logger.info "About to import #{points_data&.size || 0} points"
|
||||
|
||||
begin
|
||||
points_created = Users::ImportData::Points.new(user, points_data).call
|
||||
|
||||
@import_stats[:points_created] = points_created
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Points import failed')
|
||||
@import_stats[:points_created] = 0
|
||||
end
|
||||
end
|
||||
|
||||
def cleanup_temporary_files(import_directory)
|
||||
return unless File.directory?(import_directory)
|
||||
|
||||
Rails.logger.info "Cleaning up temporary import directory: #{import_directory}"
|
||||
FileUtils.rm_rf(import_directory)
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to cleanup temporary files')
|
||||
end
|
||||
|
||||
def create_success_notification
|
||||
summary = "#{@import_stats[:points_created]} points, " \
|
||||
"#{@import_stats[:visits_created]} visits, " \
|
||||
"#{@import_stats[:places_created]} places, " \
|
||||
"#{@import_stats[:trips_created]} trips, " \
|
||||
"#{@import_stats[:areas_created]} areas, " \
|
||||
"#{@import_stats[:imports_created]} imports, " \
|
||||
"#{@import_stats[:exports_created]} exports, " \
|
||||
"#{@import_stats[:stats_created]} stats, " \
|
||||
"#{@import_stats[:files_restored]} files restored, " \
|
||||
"#{@import_stats[:notifications_created]} notifications"
|
||||
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
title: 'Data import completed',
|
||||
content: "Your data has been imported successfully (#{summary}).",
|
||||
kind: :info
|
||||
).call
|
||||
end
|
||||
|
||||
def create_failure_notification(error)
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: "Your data import failed with error: #{error.message}. Please check the archive format and try again.",
|
||||
kind: :error
|
||||
).call
|
||||
end
|
||||
|
||||
def validate_import_completeness(expected_counts)
|
||||
Rails.logger.info "Validating import completeness..."
|
||||
|
||||
discrepancies = []
|
||||
|
||||
expected_counts.each do |entity, expected_count|
|
||||
actual_count = @import_stats[:"#{entity}_created"] || 0
|
||||
|
||||
if actual_count < expected_count
|
||||
discrepancy = "#{entity}: expected #{expected_count}, got #{actual_count} (#{expected_count - actual_count} missing)"
|
||||
discrepancies << discrepancy
|
||||
Rails.logger.warn "Import discrepancy - #{discrepancy}"
|
||||
end
|
||||
end
|
||||
|
||||
if discrepancies.any?
|
||||
Rails.logger.warn "Import completed with discrepancies: #{discrepancies.join(', ')}"
|
||||
else
|
||||
Rails.logger.info "Import validation successful - all entities imported correctly"
|
||||
end
|
||||
end
|
||||
end
|
||||
137
app/services/users/import_data/areas.rb
Normal file
137
app/services/users/import_data/areas.rb
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Areas
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, areas_data)
|
||||
@user = user
|
||||
@areas_data = areas_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless areas_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{areas_data.size} areas for user: #{user.email}"
|
||||
|
||||
valid_areas = filter_and_prepare_areas
|
||||
|
||||
if valid_areas.empty?
|
||||
Rails.logger.info "Areas import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
deduplicated_areas = filter_existing_areas(valid_areas)
|
||||
|
||||
if deduplicated_areas.size < valid_areas.size
|
||||
Rails.logger.debug "Skipped #{valid_areas.size - deduplicated_areas.size} duplicate areas"
|
||||
end
|
||||
|
||||
total_created = bulk_import_areas(deduplicated_areas)
|
||||
|
||||
Rails.logger.info "Areas import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :areas_data
|
||||
|
||||
def filter_and_prepare_areas
|
||||
valid_areas = []
|
||||
skipped_count = 0
|
||||
|
||||
areas_data.each do |area_data|
|
||||
next unless area_data.is_a?(Hash)
|
||||
|
||||
unless valid_area_data?(area_data)
|
||||
skipped_count += 1
|
||||
|
||||
next
|
||||
end
|
||||
|
||||
prepared_attributes = prepare_area_attributes(area_data)
|
||||
valid_areas << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} areas with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_areas
|
||||
end
|
||||
|
||||
def prepare_area_attributes(area_data)
|
||||
attributes = area_data.except('created_at', 'updated_at')
|
||||
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
attributes['radius'] ||= 100
|
||||
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare area attributes: #{e.message}"
|
||||
Rails.logger.error "Area data: #{area_data.inspect}"
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_areas(areas)
|
||||
return areas if areas.empty?
|
||||
|
||||
existing_areas_lookup = {}
|
||||
user.areas.select(:name, :latitude, :longitude).each do |area|
|
||||
key = [area.name, area.latitude.to_f, area.longitude.to_f]
|
||||
existing_areas_lookup[key] = true
|
||||
end
|
||||
|
||||
filtered_areas = areas.reject do |area|
|
||||
key = [area[:name], area[:latitude].to_f, area[:longitude].to_f]
|
||||
if existing_areas_lookup[key]
|
||||
Rails.logger.debug "Area already exists: #{area[:name]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_areas
|
||||
end
|
||||
|
||||
def bulk_import_areas(areas)
|
||||
total_created = 0
|
||||
|
||||
areas.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
result = Area.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} areas, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process area batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_area_data?(area_data)
|
||||
return false unless area_data.is_a?(Hash)
|
||||
return false unless area_data['name'].present?
|
||||
return false unless area_data['latitude'].present?
|
||||
return false unless area_data['longitude'].present?
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Area validation failed: #{e.message} for data: #{area_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
88
app/services/users/import_data/exports.rb
Normal file
88
app/services/users/import_data/exports.rb
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Exports
|
||||
def initialize(user, exports_data, files_directory)
|
||||
@user = user
|
||||
@exports_data = exports_data
|
||||
@files_directory = files_directory
|
||||
end
|
||||
|
||||
def call
|
||||
return [0, 0] unless exports_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{exports_data.size} exports for user: #{user.email}"
|
||||
|
||||
exports_created = 0
|
||||
files_restored = 0
|
||||
|
||||
exports_data.each do |export_data|
|
||||
next unless export_data.is_a?(Hash)
|
||||
|
||||
existing_export = user.exports.find_by(
|
||||
name: export_data['name'],
|
||||
created_at: export_data['created_at']
|
||||
)
|
||||
|
||||
if existing_export
|
||||
Rails.logger.debug "Export already exists: #{export_data['name']}"
|
||||
next
|
||||
end
|
||||
|
||||
export_record = create_export_record(export_data)
|
||||
exports_created += 1
|
||||
|
||||
if export_data['file_name'] && restore_export_file(export_record, export_data)
|
||||
files_restored += 1
|
||||
end
|
||||
|
||||
Rails.logger.debug "Created export: #{export_record.name}"
|
||||
end
|
||||
|
||||
Rails.logger.info "Exports import completed. Created: #{exports_created}, Files: #{files_restored}"
|
||||
[exports_created, files_restored]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :exports_data, :files_directory
|
||||
|
||||
def create_export_record(export_data)
|
||||
export_attributes = prepare_export_attributes(export_data)
|
||||
user.exports.create!(export_attributes)
|
||||
end
|
||||
|
||||
def prepare_export_attributes(export_data)
|
||||
export_data.except(
|
||||
'file_name',
|
||||
'original_filename',
|
||||
'file_size',
|
||||
'content_type',
|
||||
'file_error'
|
||||
).merge(user: user)
|
||||
end
|
||||
|
||||
def restore_export_file(export_record, export_data)
|
||||
file_path = files_directory.join(export_data['file_name'])
|
||||
|
||||
unless File.exist?(file_path)
|
||||
Rails.logger.warn "Export file not found: #{export_data['file_name']}"
|
||||
return false
|
||||
end
|
||||
|
||||
begin
|
||||
export_record.file.attach(
|
||||
io: File.open(file_path),
|
||||
filename: export_data['original_filename'] || export_data['file_name'],
|
||||
content_type: export_data['content_type'] || 'application/octet-stream'
|
||||
)
|
||||
|
||||
Rails.logger.debug "Restored file for export: #{export_record.name}"
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, "Export file restoration failed")
|
||||
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
100
app/services/users/import_data/imports.rb
Normal file
100
app/services/users/import_data/imports.rb
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Imports
|
||||
def initialize(user, imports_data, files_directory)
|
||||
@user = user
|
||||
@imports_data = imports_data
|
||||
@files_directory = files_directory
|
||||
end
|
||||
|
||||
def call
|
||||
return [0, 0] unless imports_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{imports_data.size} imports for user: #{user.email}"
|
||||
|
||||
imports_created = 0
|
||||
files_restored = 0
|
||||
|
||||
imports_data.each do |import_data|
|
||||
next unless import_data.is_a?(Hash)
|
||||
|
||||
existing_import = user.imports.find_by(
|
||||
name: import_data['name'],
|
||||
source: import_data['source'],
|
||||
created_at: import_data['created_at']
|
||||
)
|
||||
|
||||
if existing_import
|
||||
Rails.logger.debug "Import already exists: #{import_data['name']}"
|
||||
next
|
||||
end
|
||||
|
||||
import_record = create_import_record(import_data)
|
||||
next unless import_record # Skip if creation failed
|
||||
|
||||
imports_created += 1
|
||||
|
||||
if import_data['file_name'] && restore_import_file(import_record, import_data)
|
||||
files_restored += 1
|
||||
end
|
||||
end
|
||||
|
||||
Rails.logger.info "Imports import completed. Created: #{imports_created}, Files restored: #{files_restored}"
|
||||
[imports_created, files_restored]
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :imports_data, :files_directory
|
||||
|
||||
def create_import_record(import_data)
|
||||
import_attributes = prepare_import_attributes(import_data)
|
||||
|
||||
begin
|
||||
import_record = user.imports.build(import_attributes)
|
||||
import_record.skip_background_processing = true
|
||||
import_record.save!
|
||||
Rails.logger.debug "Created import: #{import_record.name}"
|
||||
import_record
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create import: #{e.message}"
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def prepare_import_attributes(import_data)
|
||||
import_data.except(
|
||||
'file_name',
|
||||
'original_filename',
|
||||
'file_size',
|
||||
'content_type',
|
||||
'file_error',
|
||||
'updated_at'
|
||||
).merge(user: user)
|
||||
end
|
||||
|
||||
def restore_import_file(import_record, import_data)
|
||||
file_path = files_directory.join(import_data['file_name'])
|
||||
|
||||
unless File.exist?(file_path)
|
||||
Rails.logger.warn "Import file not found: #{import_data['file_name']}"
|
||||
return false
|
||||
end
|
||||
|
||||
begin
|
||||
import_record.file.attach(
|
||||
io: File.open(file_path),
|
||||
filename: import_data['original_filename'] || import_data['file_name'],
|
||||
content_type: import_data['content_type'] || 'application/octet-stream'
|
||||
)
|
||||
|
||||
Rails.logger.debug "Restored file for import: #{import_record.name}"
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Import file restoration failed')
|
||||
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
167
app/services/users/import_data/notifications.rb
Normal file
167
app/services/users/import_data/notifications.rb
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Notifications
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, notifications_data)
|
||||
@user = user
|
||||
@notifications_data = notifications_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless notifications_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{notifications_data.size} notifications for user: #{user.email}"
|
||||
|
||||
valid_notifications = filter_and_prepare_notifications
|
||||
|
||||
if valid_notifications.empty?
|
||||
Rails.logger.info 'Notifications import completed. Created: 0'
|
||||
return 0
|
||||
end
|
||||
|
||||
deduplicated_notifications = filter_existing_notifications(valid_notifications)
|
||||
|
||||
if deduplicated_notifications.size < valid_notifications.size
|
||||
Rails.logger.debug "Skipped #{valid_notifications.size - deduplicated_notifications.size} duplicate notifications"
|
||||
end
|
||||
|
||||
total_created = bulk_import_notifications(deduplicated_notifications)
|
||||
|
||||
Rails.logger.info "Notifications import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :notifications_data
|
||||
|
||||
def filter_and_prepare_notifications
|
||||
valid_notifications = []
|
||||
skipped_count = 0
|
||||
|
||||
notifications_data.each do |notification_data|
|
||||
next unless notification_data.is_a?(Hash)
|
||||
|
||||
unless valid_notification_data?(notification_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
prepared_attributes = prepare_notification_attributes(notification_data)
|
||||
valid_notifications << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} notifications with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_notifications
|
||||
end
|
||||
|
||||
def prepare_notification_attributes(notification_data)
|
||||
attributes = notification_data.except('updated_at')
|
||||
|
||||
attributes['user_id'] = user.id
|
||||
|
||||
unless attributes['created_at'].present?
|
||||
attributes['created_at'] = Time.current
|
||||
end
|
||||
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare notification attributes: #{e.message}"
|
||||
Rails.logger.error "Notification data: #{notification_data.inspect}"
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_notifications(notifications)
|
||||
return notifications if notifications.empty?
|
||||
|
||||
existing_notifications_lookup = {}
|
||||
user.notifications.select(:title, :content, :created_at, :kind).each do |notification|
|
||||
primary_key = [notification.title.strip, notification.content.strip]
|
||||
|
||||
exact_key = [notification.title.strip, notification.content.strip, normalize_timestamp(notification.created_at)]
|
||||
|
||||
existing_notifications_lookup[primary_key] = true
|
||||
existing_notifications_lookup[exact_key] = true
|
||||
end
|
||||
|
||||
filtered_notifications = notifications.reject do |notification|
|
||||
title = notification[:title]&.strip
|
||||
content = notification[:content]&.strip
|
||||
|
||||
primary_key = [title, content]
|
||||
exact_key = [title, content, normalize_timestamp(notification[:created_at])]
|
||||
|
||||
if existing_notifications_lookup[primary_key] || existing_notifications_lookup[exact_key]
|
||||
Rails.logger.debug "Notification already exists: #{notification[:title]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_notifications
|
||||
end
|
||||
|
||||
def normalize_timestamp(timestamp)
|
||||
case timestamp
|
||||
when String then Time.parse(timestamp).to_i
|
||||
when Time, DateTime then timestamp.to_i
|
||||
else
|
||||
timestamp.to_s
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Failed to normalize timestamp #{timestamp}: #{e.message}"
|
||||
timestamp.to_s
|
||||
end
|
||||
|
||||
def bulk_import_notifications(notifications)
|
||||
total_created = 0
|
||||
|
||||
notifications.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
result = Notification.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} notifications, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process notification batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_notification_data?(notification_data)
|
||||
return false unless notification_data.is_a?(Hash)
|
||||
|
||||
unless notification_data['title'].present?
|
||||
Rails.logger.error "Failed to create notification: Validation failed: Title can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless notification_data['content'].present?
|
||||
Rails.logger.error "Failed to create notification: Validation failed: Content can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Notification validation failed: #{e.message} for data: #{notification_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
80
app/services/users/import_data/places.rb
Normal file
80
app/services/users/import_data/places.rb
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Places
|
||||
def initialize(user, places_data)
|
||||
@user = user
|
||||
@places_data = places_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless places_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{places_data.size} places for user: #{user.email}"
|
||||
|
||||
places_created = 0
|
||||
|
||||
places_data.each do |place_data|
|
||||
next unless place_data.is_a?(Hash)
|
||||
|
||||
place = find_or_create_place_for_import(place_data)
|
||||
places_created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record?
|
||||
end
|
||||
|
||||
Rails.logger.info "Places import completed. Created: #{places_created}"
|
||||
places_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :places_data
|
||||
|
||||
def find_or_create_place_for_import(place_data)
|
||||
name = place_data['name']
|
||||
latitude = place_data['latitude']&.to_f
|
||||
longitude = place_data['longitude']&.to_f
|
||||
|
||||
unless name.present? && latitude.present? && longitude.present?
|
||||
Rails.logger.debug "Skipping place with missing required data: #{place_data.inspect}"
|
||||
return nil
|
||||
end
|
||||
|
||||
Rails.logger.debug "Processing place for import: #{name} at (#{latitude}, #{longitude})"
|
||||
|
||||
# During import, we prioritize data integrity for the importing user
|
||||
# First try exact match (name + coordinates)
|
||||
existing_place = Place.where(
|
||||
name: name,
|
||||
latitude: latitude,
|
||||
longitude: longitude
|
||||
).first
|
||||
|
||||
if existing_place
|
||||
Rails.logger.debug "Found exact place match: #{name} at (#{latitude}, #{longitude}) -> existing place ID #{existing_place.id}"
|
||||
existing_place.define_singleton_method(:previously_new_record?) { false }
|
||||
return existing_place
|
||||
end
|
||||
|
||||
Rails.logger.debug "No exact match found for #{name} at (#{latitude}, #{longitude}). Creating new place."
|
||||
|
||||
# If no exact match, create a new place to ensure data integrity
|
||||
# This prevents data loss during import even if similar places exist
|
||||
place_attributes = place_data.except('created_at', 'updated_at', 'latitude', 'longitude')
|
||||
place_attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
|
||||
place_attributes['latitude'] = latitude
|
||||
place_attributes['longitude'] = longitude
|
||||
place_attributes.delete('user')
|
||||
|
||||
Rails.logger.debug "Creating place with attributes: #{place_attributes.inspect}"
|
||||
|
||||
begin
|
||||
place = Place.create!(place_attributes)
|
||||
place.define_singleton_method(:previously_new_record?) { true }
|
||||
Rails.logger.debug "Created place during import: #{place.name} (ID: #{place.id})"
|
||||
|
||||
place
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create place: #{place_data.inspect}, error: #{e.message}"
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
281
app/services/users/import_data/points.rb
Normal file
281
app/services/users/import_data/points.rb
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Points
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, points_data)
|
||||
@user = user
|
||||
@points_data = points_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless points_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}"
|
||||
Rails.logger.debug "First point sample: #{points_data.first.inspect}"
|
||||
|
||||
preload_reference_data
|
||||
|
||||
valid_points = filter_and_prepare_points
|
||||
|
||||
if valid_points.empty?
|
||||
Rails.logger.warn "No valid points to import after filtering"
|
||||
Rails.logger.debug "Original points_data size: #{points_data.size}"
|
||||
return 0
|
||||
end
|
||||
|
||||
deduplicated_points = deduplicate_points(valid_points)
|
||||
|
||||
Rails.logger.info "Prepared #{deduplicated_points.size} unique valid points (#{points_data.size - deduplicated_points.size} duplicates/invalid skipped)"
|
||||
|
||||
total_created = bulk_import_points(deduplicated_points)
|
||||
|
||||
Rails.logger.info "Points import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :points_data, :imports_lookup, :countries_lookup, :visits_lookup
|
||||
|
||||
def preload_reference_data
|
||||
@imports_lookup = {}
|
||||
user.imports.each do |import|
|
||||
string_key = [import.name, import.source, import.created_at.utc.iso8601]
|
||||
integer_key = [import.name, Import.sources[import.source], import.created_at.utc.iso8601]
|
||||
|
||||
@imports_lookup[string_key] = import
|
||||
@imports_lookup[integer_key] = import
|
||||
end
|
||||
Rails.logger.debug "Loaded #{user.imports.size} imports with #{@imports_lookup.size} lookup keys"
|
||||
|
||||
@countries_lookup = {}
|
||||
Country.all.each do |country|
|
||||
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
|
||||
@countries_lookup[country.name] = country
|
||||
end
|
||||
Rails.logger.debug "Loaded #{Country.count} countries for lookup"
|
||||
|
||||
@visits_lookup = user.visits.index_by { |visit|
|
||||
[visit.name, visit.started_at.utc.iso8601, visit.ended_at.utc.iso8601]
|
||||
}
|
||||
Rails.logger.debug "Loaded #{@visits_lookup.size} visits for lookup"
|
||||
end
|
||||
|
||||
def filter_and_prepare_points
|
||||
valid_points = []
|
||||
skipped_count = 0
|
||||
|
||||
points_data.each_with_index do |point_data, index|
|
||||
next unless point_data.is_a?(Hash)
|
||||
|
||||
unless valid_point_data?(point_data)
|
||||
skipped_count += 1
|
||||
Rails.logger.debug "Skipped point #{index}: invalid data - #{point_data.slice('timestamp', 'longitude', 'latitude', 'lonlat')}"
|
||||
next
|
||||
end
|
||||
|
||||
prepared_attributes = prepare_point_attributes(point_data)
|
||||
unless prepared_attributes
|
||||
skipped_count += 1
|
||||
Rails.logger.debug "Skipped point #{index}: failed to prepare attributes"
|
||||
next
|
||||
end
|
||||
|
||||
valid_points << prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} points with invalid or missing required data"
|
||||
end
|
||||
|
||||
Rails.logger.debug "Filtered #{valid_points.size} valid points from #{points_data.size} total"
|
||||
valid_points
|
||||
end
|
||||
|
||||
def prepare_point_attributes(point_data)
|
||||
attributes = point_data.except(
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'import_reference',
|
||||
'country_info',
|
||||
'visit_reference',
|
||||
'country'
|
||||
)
|
||||
|
||||
ensure_lonlat_field(attributes, point_data)
|
||||
|
||||
attributes.delete('longitude')
|
||||
attributes.delete('latitude')
|
||||
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
resolve_import_reference(attributes, point_data['import_reference'])
|
||||
resolve_country_reference(attributes, point_data['country_info'])
|
||||
resolve_visit_reference(attributes, point_data['visit_reference'])
|
||||
|
||||
result = attributes.symbolize_keys
|
||||
|
||||
Rails.logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}"
|
||||
result
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to prepare point attributes')
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def resolve_import_reference(attributes, import_reference)
|
||||
return unless import_reference.is_a?(Hash)
|
||||
|
||||
created_at = normalize_timestamp_for_lookup(import_reference['created_at'])
|
||||
|
||||
import_key = [
|
||||
import_reference['name'],
|
||||
import_reference['source'],
|
||||
created_at
|
||||
]
|
||||
|
||||
import = imports_lookup[import_key]
|
||||
if import
|
||||
attributes['import_id'] = import.id
|
||||
Rails.logger.debug "Resolved import reference: #{import_reference['name']} -> #{import.id}"
|
||||
else
|
||||
Rails.logger.debug "Import not found for reference: #{import_reference.inspect}"
|
||||
Rails.logger.debug "Available imports: #{imports_lookup.keys.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def resolve_country_reference(attributes, country_info)
|
||||
return unless country_info.is_a?(Hash)
|
||||
|
||||
country_key = [country_info['name'], country_info['iso_a2'], country_info['iso_a3']]
|
||||
country = countries_lookup[country_key]
|
||||
|
||||
if country.nil? && country_info['name'].present?
|
||||
country = countries_lookup[country_info['name']]
|
||||
end
|
||||
|
||||
if country
|
||||
attributes['country_id'] = country.id
|
||||
Rails.logger.debug "Resolved country reference: #{country_info['name']} -> #{country.id}"
|
||||
else
|
||||
Rails.logger.debug "Country not found for: #{country_info.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
def resolve_visit_reference(attributes, visit_reference)
|
||||
return unless visit_reference.is_a?(Hash)
|
||||
|
||||
started_at = normalize_timestamp_for_lookup(visit_reference['started_at'])
|
||||
ended_at = normalize_timestamp_for_lookup(visit_reference['ended_at'])
|
||||
|
||||
visit_key = [
|
||||
visit_reference['name'],
|
||||
started_at,
|
||||
ended_at
|
||||
]
|
||||
|
||||
visit = visits_lookup[visit_key]
|
||||
if visit
|
||||
attributes['visit_id'] = visit.id
|
||||
Rails.logger.debug "Resolved visit reference: #{visit_reference['name']} -> #{visit.id}"
|
||||
else
|
||||
Rails.logger.debug "Visit not found for reference: #{visit_reference.inspect}"
|
||||
Rails.logger.debug "Available visits: #{visits_lookup.keys.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def deduplicate_points(points)
|
||||
points.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] }
|
||||
end
|
||||
|
||||
def normalize_point_keys(points)
|
||||
all_keys = points.flat_map(&:keys).uniq
|
||||
|
||||
# Normalize each point to have all keys (with nil for missing ones)
|
||||
points.map do |point|
|
||||
normalized = {}
|
||||
all_keys.each do |key|
|
||||
normalized[key] = point[key]
|
||||
end
|
||||
normalized
|
||||
end
|
||||
end
|
||||
|
||||
def bulk_import_points(points)
|
||||
total_created = 0
|
||||
|
||||
points.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
Rails.logger.debug "Processing batch of #{batch.size} points"
|
||||
Rails.logger.debug "First point in batch: #{batch.first.inspect}"
|
||||
|
||||
normalized_batch = normalize_point_keys(batch)
|
||||
|
||||
result = Point.upsert_all(
|
||||
normalized_batch,
|
||||
unique_by: %i[lonlat timestamp user_id],
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} points, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process point batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "First point in failed batch: #{batch.first.inspect}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_point_data?(point_data)
|
||||
return false unless point_data.is_a?(Hash)
|
||||
return false unless point_data['timestamp'].present?
|
||||
|
||||
has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(')
|
||||
has_coordinates = point_data['longitude'].present? && point_data['latitude'].present?
|
||||
|
||||
return false unless has_lonlat || has_coordinates
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Point validation failed: #{e.message} for data: #{point_data.inspect}"
|
||||
false
|
||||
end
|
||||
|
||||
def ensure_lonlat_field(attributes, point_data)
|
||||
if attributes['lonlat'].blank? && point_data['longitude'].present? && point_data['latitude'].present?
|
||||
longitude = point_data['longitude'].to_f
|
||||
latitude = point_data['latitude'].to_f
|
||||
attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
|
||||
Rails.logger.debug "Reconstructed lonlat: #{attributes['lonlat']}"
|
||||
end
|
||||
end
|
||||
|
||||
def normalize_timestamp_for_lookup(timestamp)
|
||||
return nil if timestamp.blank?
|
||||
|
||||
case timestamp
|
||||
when String
|
||||
Time.parse(timestamp).utc.iso8601
|
||||
when Time, DateTime
|
||||
timestamp.utc.iso8601
|
||||
else
|
||||
timestamp.to_s
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Failed to normalize timestamp #{timestamp}: #{e.message}"
|
||||
timestamp.to_s
|
||||
end
|
||||
end
|
||||
26
app/services/users/import_data/settings.rb
Normal file
26
app/services/users/import_data/settings.rb
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Settings
|
||||
def initialize(user, settings_data)
|
||||
@user = user
|
||||
@settings_data = settings_data
|
||||
end
|
||||
|
||||
def call
|
||||
return false unless settings_data.is_a?(Hash)
|
||||
|
||||
Rails.logger.info "Importing settings for user: #{user.email}"
|
||||
|
||||
current_settings = user.settings || {}
|
||||
updated_settings = current_settings.merge(settings_data)
|
||||
|
||||
user.update!(settings: updated_settings)
|
||||
|
||||
Rails.logger.info "Settings import completed"
|
||||
true
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :settings_data
|
||||
end
|
||||
145
app/services/users/import_data/stats.rb
Normal file
145
app/services/users/import_data/stats.rb
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Stats
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, stats_data)
|
||||
@user = user
|
||||
@stats_data = stats_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless stats_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{stats_data.size} stats for user: #{user.email}"
|
||||
|
||||
valid_stats = filter_and_prepare_stats
|
||||
|
||||
if valid_stats.empty?
|
||||
Rails.logger.info "Stats import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
deduplicated_stats = filter_existing_stats(valid_stats)
|
||||
|
||||
if deduplicated_stats.size < valid_stats.size
|
||||
Rails.logger.debug "Skipped #{valid_stats.size - deduplicated_stats.size} duplicate stats"
|
||||
end
|
||||
|
||||
total_created = bulk_import_stats(deduplicated_stats)
|
||||
|
||||
Rails.logger.info "Stats import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :stats_data
|
||||
|
||||
def filter_and_prepare_stats
|
||||
valid_stats = []
|
||||
skipped_count = 0
|
||||
|
||||
stats_data.each do |stat_data|
|
||||
next unless stat_data.is_a?(Hash)
|
||||
|
||||
unless valid_stat_data?(stat_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
prepared_attributes = prepare_stat_attributes(stat_data)
|
||||
valid_stats << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} stats with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_stats
|
||||
end
|
||||
|
||||
def prepare_stat_attributes(stat_data)
|
||||
attributes = stat_data.except('created_at', 'updated_at')
|
||||
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to prepare stat attributes')
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_stats(stats)
|
||||
return stats if stats.empty?
|
||||
|
||||
existing_stats_lookup = {}
|
||||
user.stats.select(:year, :month).each do |stat|
|
||||
key = [stat.year, stat.month]
|
||||
existing_stats_lookup[key] = true
|
||||
end
|
||||
|
||||
filtered_stats = stats.reject do |stat|
|
||||
key = [stat[:year], stat[:month]]
|
||||
if existing_stats_lookup[key]
|
||||
Rails.logger.debug "Stat already exists: #{stat[:year]}-#{stat[:month]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_stats
|
||||
end
|
||||
|
||||
def bulk_import_stats(stats)
|
||||
total_created = 0
|
||||
|
||||
stats.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
result = Stat.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} stats, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to process stat batch')
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_stat_data?(stat_data)
|
||||
return false unless stat_data.is_a?(Hash)
|
||||
|
||||
unless stat_data['year'].present?
|
||||
Rails.logger.error "Failed to create stat: Validation failed: Year can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless stat_data['month'].present?
|
||||
Rails.logger.error "Failed to create stat: Validation failed: Month can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless stat_data['distance'].present?
|
||||
Rails.logger.error "Failed to create stat: Validation failed: Distance can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Stat validation failed: #{e.message} for data: #{stat_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
175
app/services/users/import_data/trips.rb
Normal file
175
app/services/users/import_data/trips.rb
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Trips
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, trips_data)
|
||||
@user = user
|
||||
@trips_data = trips_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless trips_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{trips_data.size} trips for user: #{user.email}"
|
||||
|
||||
valid_trips = filter_and_prepare_trips
|
||||
|
||||
if valid_trips.empty?
|
||||
Rails.logger.info "Trips import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
deduplicated_trips = filter_existing_trips(valid_trips)
|
||||
|
||||
if deduplicated_trips.size < valid_trips.size
|
||||
Rails.logger.debug "Skipped #{valid_trips.size - deduplicated_trips.size} duplicate trips"
|
||||
end
|
||||
|
||||
total_created = bulk_import_trips(deduplicated_trips)
|
||||
|
||||
Rails.logger.info "Trips import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :trips_data
|
||||
|
||||
def filter_and_prepare_trips
|
||||
valid_trips = []
|
||||
skipped_count = 0
|
||||
|
||||
trips_data.each do |trip_data|
|
||||
next unless trip_data.is_a?(Hash)
|
||||
|
||||
unless valid_trip_data?(trip_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
prepared_attributes = prepare_trip_attributes(trip_data)
|
||||
valid_trips << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} trips with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_trips
|
||||
end
|
||||
|
||||
def prepare_trip_attributes(trip_data)
|
||||
attributes = trip_data.except('created_at', 'updated_at')
|
||||
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to prepare trip attributes')
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_trips(trips)
|
||||
return trips if trips.empty?
|
||||
|
||||
existing_trips_lookup = {}
|
||||
user.trips.select(:name, :started_at, :ended_at).each do |trip|
|
||||
key = [trip.name, normalize_timestamp(trip.started_at), normalize_timestamp(trip.ended_at)]
|
||||
existing_trips_lookup[key] = true
|
||||
end
|
||||
|
||||
filtered_trips = trips.reject do |trip|
|
||||
key = [trip[:name], normalize_timestamp(trip[:started_at]), normalize_timestamp(trip[:ended_at])]
|
||||
if existing_trips_lookup[key]
|
||||
Rails.logger.debug "Trip already exists: #{trip[:name]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_trips
|
||||
end
|
||||
|
||||
def normalize_timestamp(timestamp)
|
||||
case timestamp
|
||||
when String
|
||||
Time.parse(timestamp).utc.iso8601
|
||||
when Time, DateTime
|
||||
timestamp.utc.iso8601
|
||||
else
|
||||
timestamp.to_s
|
||||
end
|
||||
rescue StandardError
|
||||
timestamp.to_s
|
||||
end
|
||||
|
||||
def bulk_import_trips(trips)
|
||||
total_created = 0
|
||||
|
||||
trips.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
result = Trip.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} trips, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, 'Failed to process trip batch')
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_trip_data?(trip_data)
|
||||
return false unless trip_data.is_a?(Hash)
|
||||
|
||||
return false unless validate_trip_name(trip_data)
|
||||
return false unless validate_trip_started_at(trip_data)
|
||||
return false unless validate_trip_ended_at(trip_data)
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Trip validation failed: #{e.message} for data: #{trip_data.inspect}"
|
||||
false
|
||||
end
|
||||
|
||||
|
||||
def validate_trip_name(trip_data)
|
||||
if trip_data['name'].present?
|
||||
true
|
||||
else
|
||||
Rails.logger.debug 'Trip validation failed: Name can\'t be blank'
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def validate_trip_started_at(trip_data)
|
||||
if trip_data['started_at'].present?
|
||||
true
|
||||
else
|
||||
Rails.logger.debug 'Trip validation failed: Started at can\'t be blank'
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def validate_trip_ended_at(trip_data)
|
||||
if trip_data['ended_at'].present?
|
||||
true
|
||||
else
|
||||
Rails.logger.debug 'Trip validation failed: Ended at can\'t be blank'
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
129
app/services/users/import_data/visits.rb
Normal file
129
app/services/users/import_data/visits.rb
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Visits
|
||||
def initialize(user, visits_data)
|
||||
@user = user
|
||||
@visits_data = visits_data
|
||||
end
|
||||
|
||||
def call
|
||||
return 0 unless visits_data.is_a?(Array)
|
||||
|
||||
Rails.logger.info "Importing #{visits_data.size} visits for user: #{user.email}"
|
||||
|
||||
visits_created = 0
|
||||
|
||||
visits_data.each do |visit_data|
|
||||
next unless visit_data.is_a?(Hash)
|
||||
|
||||
existing_visit = find_existing_visit(visit_data)
|
||||
|
||||
if existing_visit
|
||||
Rails.logger.debug "Visit already exists: #{visit_data['name']}"
|
||||
next
|
||||
end
|
||||
|
||||
begin
|
||||
visit_record = create_visit_record(visit_data)
|
||||
visits_created += 1
|
||||
Rails.logger.debug "Created visit: #{visit_record.name}"
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create visit: #{visit_data.inspect}, error: #{e.message}"
|
||||
ExceptionReporter.call(e, 'Failed to create visit during import')
|
||||
next
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Unexpected error creating visit: #{visit_data.inspect}, error: #{e.message}"
|
||||
ExceptionReporter.call(e, 'Unexpected error during visit import')
|
||||
next
|
||||
end
|
||||
end
|
||||
|
||||
Rails.logger.info "Visits import completed. Created: #{visits_created}"
|
||||
visits_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :visits_data
|
||||
|
||||
def find_existing_visit(visit_data)
|
||||
user.visits.find_by(
|
||||
name: visit_data['name'],
|
||||
started_at: visit_data['started_at'],
|
||||
ended_at: visit_data['ended_at']
|
||||
)
|
||||
end
|
||||
|
||||
def create_visit_record(visit_data)
|
||||
visit_attributes = prepare_visit_attributes(visit_data)
|
||||
user.visits.create!(visit_attributes)
|
||||
end
|
||||
|
||||
def prepare_visit_attributes(visit_data)
|
||||
attributes = visit_data.except('place_reference')
|
||||
|
||||
if visit_data['place_reference']
|
||||
place = find_or_create_referenced_place(visit_data['place_reference'])
|
||||
attributes[:place] = place if place
|
||||
end
|
||||
|
||||
attributes
|
||||
end
|
||||
|
||||
def find_or_create_referenced_place(place_reference)
|
||||
return nil unless place_reference.is_a?(Hash)
|
||||
|
||||
name = place_reference['name']
|
||||
latitude = place_reference['latitude']&.to_f
|
||||
longitude = place_reference['longitude']&.to_f
|
||||
|
||||
return nil unless name.present? && latitude.present? && longitude.present?
|
||||
|
||||
Rails.logger.debug "Looking for place reference: #{name} at (#{latitude}, #{longitude})"
|
||||
|
||||
# First try exact match (name + coordinates)
|
||||
place = Place.where(
|
||||
name: name,
|
||||
latitude: latitude,
|
||||
longitude: longitude
|
||||
).first
|
||||
|
||||
if place
|
||||
Rails.logger.debug "Found exact place match for visit: #{name} -> existing place ID #{place.id}"
|
||||
return place
|
||||
end
|
||||
|
||||
# Try coordinate-only match with close proximity
|
||||
place = Place.where(
|
||||
"latitude BETWEEN ? AND ? AND longitude BETWEEN ? AND ?",
|
||||
latitude - 0.0001, latitude + 0.0001,
|
||||
longitude - 0.0001, longitude + 0.0001
|
||||
).first
|
||||
|
||||
if place
|
||||
Rails.logger.debug "Found nearby place match for visit: #{name} -> #{place.name} (ID: #{place.id})"
|
||||
return place
|
||||
end
|
||||
|
||||
# If no match found, create the place to ensure visit import succeeds
|
||||
# This handles cases where places weren't imported in the places phase
|
||||
Rails.logger.info "Creating missing place during visit import: #{name} at (#{latitude}, #{longitude})"
|
||||
|
||||
begin
|
||||
place = Place.create!(
|
||||
name: name,
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
lonlat: "POINT(#{longitude} #{latitude})",
|
||||
source: place_reference['source'] || 'manual'
|
||||
)
|
||||
|
||||
Rails.logger.debug "Created missing place for visit: #{place.name} (ID: #{place.id})"
|
||||
place
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create missing place: #{place_reference.inspect}, error: #{e.message}"
|
||||
ExceptionReporter.call(e, 'Failed to create missing place during visit import')
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -61,6 +61,39 @@
|
|||
<% end %>
|
||||
|
||||
<p class='mt-3'>Unhappy? <%= link_to "Cancel my account", registration_path(resource_name), data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: 'btn' %></p>
|
||||
<div class="divider"></div>
|
||||
<p class='mt-3 flex flex-col gap-2'>
|
||||
<%= link_to "Export my data", export_settings_users_path, class: 'btn btn-primary', data: {
|
||||
turbo_confirm: "Are you sure you want to export your data?",
|
||||
turbo_method: :get
|
||||
} %>
|
||||
<button class='btn btn-primary' onclick="import_modal.showModal()">Import my data</button>
|
||||
</p>
|
||||
|
||||
<!-- Import Data Modal -->
|
||||
<dialog id="import_modal" class="modal">
|
||||
<div class="modal-box">
|
||||
<h3 class="font-bold text-lg mb-4">Import your data</h3>
|
||||
<p class="mb-4 text-sm text-gray-600">Upload a ZIP file containing your exported Dawarich data to restore your points, trips, and settings.</p>
|
||||
|
||||
<%= form_with url: import_settings_users_path, method: :post, multipart: true, class: 'space-y-4', data: { turbo: false } do |f| %>
|
||||
<div class="form-control">
|
||||
<%= f.label :archive, class: 'label' do %>
|
||||
<span class="label-text">Select ZIP archive</span>
|
||||
<% end %>
|
||||
<%= f.file_field :archive, accept: '.zip', required: true, class: 'file-input file-input-bordered w-full' %>
|
||||
</div>
|
||||
|
||||
<div class="modal-action">
|
||||
<%= f.submit "Import Data", class: 'btn btn-primary', data: { disable_with: 'Importing...' } %>
|
||||
<button type="button" class="btn" onclick="import_modal.close()">Cancel</button>
|
||||
</div>
|
||||
<% end %>
|
||||
</div>
|
||||
<form method="dialog" class="modal-backdrop">
|
||||
<button>close</button>
|
||||
</form>
|
||||
</dialog>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -24,10 +24,11 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="overflow-x-auto">
|
||||
<table class="table">
|
||||
<table class="table overflow-x-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>File size</th>
|
||||
<th>Created at</th>
|
||||
<th>Status</th>
|
||||
<th>Actions</th>
|
||||
|
|
@ -37,9 +38,10 @@
|
|||
<% @exports.each do |export| %>
|
||||
<tr>
|
||||
<td><%= export.name %></td>
|
||||
<td><%= number_to_human_size(export.file&.byte_size) || 'N/A' %></td>
|
||||
<td><%= human_datetime(export.created_at) %></td>
|
||||
<td><%= export.status %></td>
|
||||
<td>
|
||||
<td class="whitespace-nowrap">
|
||||
<% if export.completed? %>
|
||||
<% if export.file.present? %>
|
||||
<%= link_to 'Download', rails_blob_path(export.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
|
|
|
|||
|
|
@ -36,15 +36,15 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="overflow-x-auto">
|
||||
<table class="table">
|
||||
<table class="table overflow-x-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>File size</th>
|
||||
<th>Imported points</th>
|
||||
<% if DawarichSettings.store_geodata? %>
|
||||
<th>Reverse geocoded points</th>
|
||||
<% end %>
|
||||
<th>Status</th>
|
||||
<th>Created at</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody
|
||||
|
|
@ -64,15 +64,18 @@
|
|||
|
||||
<%= link_to '📋', points_path(import_id: import.id) %>
|
||||
</td>
|
||||
<td><%= number_to_human_size(import.file&.byte_size) || 'N/A' %></td>
|
||||
<td data-points-count>
|
||||
<%= number_with_delimiter import.processed %>
|
||||
</td>
|
||||
<% if DawarichSettings.store_geodata? %>
|
||||
<td data-reverse-geocoded-points-count>
|
||||
<%= number_with_delimiter import.reverse_geocoded_points_count %>
|
||||
</td>
|
||||
<% end %>
|
||||
<td data-status-display><%= import.status %></td>
|
||||
<td><%= human_datetime(import.created_at) %></td>
|
||||
<td class="whitespace-nowrap">
|
||||
<% if import.file.present? %>
|
||||
<%= link_to 'Download', rails_blob_path(import.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: import.name %>
|
||||
<% end %>
|
||||
<%= link_to 'Delete', import, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
|
||||
</td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</tbody>
|
||||
|
|
|
|||
|
|
@ -88,7 +88,8 @@ Rails.application.configure do
|
|||
|
||||
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',')
|
||||
|
||||
config.action_mailer.default_url_options = { host: hosts.first, port: 3000 }
|
||||
config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] || hosts.first, port: ENV.fetch('PORT', 3000) }
|
||||
|
||||
config.hosts.concat(hosts) if hosts.present?
|
||||
|
||||
config.force_ssl = ENV.fetch('APPLICATION_PROTOCOL', 'http').downcase == 'https'
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ Rails.application.configure do
|
|||
# config.host_authorization = { exclude: ->(request) { request.path == "/up" } }
|
||||
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',')
|
||||
|
||||
config.action_mailer.default_url_options = { host: hosts.first, port: 3000 }
|
||||
config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] }
|
||||
config.hosts.concat(hosts) if hosts.present?
|
||||
|
||||
config.action_mailer.delivery_method = :smtp
|
||||
|
|
|
|||
3
config/initializers/oj.rb
Normal file
3
config/initializers/oj.rb
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
Oj.optimize_rails
|
||||
|
|
@ -36,7 +36,13 @@ Rails.application.routes.draw do
|
|||
resources :settings, only: :index
|
||||
namespace :settings do
|
||||
resources :background_jobs, only: %i[index create]
|
||||
resources :users, only: %i[index create destroy edit update]
|
||||
resources :users, only: %i[index create destroy edit update] do
|
||||
collection do
|
||||
get 'export'
|
||||
post 'import'
|
||||
end
|
||||
end
|
||||
|
||||
resources :maps, only: %i[index]
|
||||
patch 'maps', to: 'maps#update'
|
||||
end
|
||||
|
|
|
|||
15
db/migrate/20250625185030_add_file_type_to_exports.rb
Normal file
15
db/migrate/20250625185030_add_file_type_to_exports.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddFileTypeToExports < ActiveRecord::Migration[8.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
def up
|
||||
add_column :exports, :file_type, :integer, default: 0, null: false
|
||||
add_index :exports, :file_type, algorithm: :concurrently
|
||||
end
|
||||
|
||||
def down
|
||||
remove_index :exports, :file_type, algorithm: :concurrently
|
||||
remove_column :exports, :file_type
|
||||
end
|
||||
end
|
||||
12
db/migrate/20250627184017_add_status_to_imports.rb
Normal file
12
db/migrate/20250627184017_add_status_to_imports.rb
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddStatusToImports < ActiveRecord::Migration[8.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
def change
|
||||
add_column :imports, :status, :integer, default: 0, null: false
|
||||
add_index :imports, :status, algorithm: :concurrently
|
||||
|
||||
Import.update_all(status: :completed)
|
||||
end
|
||||
end
|
||||
9
db/schema.rb
generated
9
db/schema.rb
generated
|
|
@ -10,7 +10,7 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema[8.0].define(version: 2025_05_15_192211) do
|
||||
ActiveRecord::Schema[8.0].define(version: 2025_06_27_184017) do
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "pg_catalog.plpgsql"
|
||||
enable_extension "postgis"
|
||||
|
|
@ -77,9 +77,6 @@ ActiveRecord::Schema[8.0].define(version: 2025_05_15_192211) do
|
|||
t.index ["name"], name: "index_countries_on_name"
|
||||
end
|
||||
|
||||
create_table "data_migrations", primary_key: "version", id: :string, force: :cascade do |t|
|
||||
end
|
||||
|
||||
create_table "exports", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.string "url"
|
||||
|
|
@ -90,6 +87,8 @@ ActiveRecord::Schema[8.0].define(version: 2025_05_15_192211) do
|
|||
t.integer "file_format", default: 0
|
||||
t.datetime "start_at"
|
||||
t.datetime "end_at"
|
||||
t.integer "file_type", default: 0, null: false
|
||||
t.index ["file_type"], name: "index_exports_on_file_type"
|
||||
t.index ["status"], name: "index_exports_on_status"
|
||||
t.index ["user_id"], name: "index_exports_on_user_id"
|
||||
end
|
||||
|
|
@ -105,7 +104,9 @@ ActiveRecord::Schema[8.0].define(version: 2025_05_15_192211) do
|
|||
t.integer "processed", default: 0
|
||||
t.jsonb "raw_data"
|
||||
t.integer "points_count", default: 0
|
||||
t.integer "status", default: 0, null: false
|
||||
t.index ["source"], name: "index_imports_on_source"
|
||||
t.index ["status"], name: "index_imports_on_status"
|
||||
t.index ["user_id"], name: "index_imports_on_user_id"
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ FactoryBot.define do
|
|||
tracker_id { 'MyString' }
|
||||
import_id { '' }
|
||||
city { nil }
|
||||
country { nil }
|
||||
reverse_geocoded_at { nil }
|
||||
course { nil }
|
||||
course_accuracy { nil }
|
||||
|
|
@ -33,6 +32,33 @@ FactoryBot.define do
|
|||
user
|
||||
country_id { nil }
|
||||
|
||||
# Add transient attribute to handle country strings
|
||||
transient do
|
||||
country { nil } # Allow country to be passed as string
|
||||
end
|
||||
|
||||
# Handle country string assignment by creating Country objects
|
||||
after(:create) do |point, evaluator|
|
||||
if evaluator.country.is_a?(String)
|
||||
# Set both the country string attribute and the Country association
|
||||
country_obj = Country.find_or_create_by(name: evaluator.country) do |country|
|
||||
iso_a2, iso_a3 = Countries::IsoCodeMapper.fallback_codes_from_country_name(evaluator.country)
|
||||
country.iso_a2 = iso_a2
|
||||
country.iso_a3 = iso_a3
|
||||
country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))"
|
||||
end
|
||||
point.update_columns(
|
||||
country: evaluator.country,
|
||||
country_id: country_obj.id
|
||||
)
|
||||
elsif evaluator.country
|
||||
point.update_columns(
|
||||
country: evaluator.country.name,
|
||||
country_id: evaluator.country.id
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
trait :with_known_location do
|
||||
lonlat { 'POINT(37.6173 55.755826)' }
|
||||
end
|
||||
|
|
@ -62,9 +88,23 @@ FactoryBot.define do
|
|||
end
|
||||
|
||||
trait :reverse_geocoded do
|
||||
country { FFaker::Address.country }
|
||||
city { FFaker::Address.city }
|
||||
reverse_geocoded_at { Time.current }
|
||||
|
||||
after(:build) do |point, evaluator|
|
||||
# Only set country if not already set by transient attribute
|
||||
unless point.read_attribute(:country)
|
||||
country_name = FFaker::Address.country
|
||||
country_obj = Country.find_or_create_by(name: country_name) do |country|
|
||||
iso_a2, iso_a3 = Countries::IsoCodeMapper.fallback_codes_from_country_name(country_name)
|
||||
country.iso_a2 = iso_a2
|
||||
country.iso_a3 = iso_a3
|
||||
country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))"
|
||||
end
|
||||
point.write_attribute(:country, country_name) # Set the string attribute directly
|
||||
point.country_id = country_obj.id # Set the association
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
15
spec/jobs/users/export_data_job_spec.rb
Normal file
15
spec/jobs/users/export_data_job_spec.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportDataJob, type: :job do
|
||||
let(:user) { create(:user) }
|
||||
let(:export_data) { Users::ExportData.new(user) }
|
||||
|
||||
it 'exports the user data' do
|
||||
expect(Users::ExportData).to receive(:new).with(user).and_return(export_data)
|
||||
expect(export_data).to receive(:export)
|
||||
|
||||
Users::ExportDataJob.perform_now(user.id)
|
||||
end
|
||||
end
|
||||
173
spec/jobs/users/import_data_job_spec.rb
Normal file
173
spec/jobs/users/import_data_job_spec.rb
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportDataJob, type: :job do
|
||||
let(:user) { create(:user) }
|
||||
let(:import) { create(:import, user: user, source: :user_data_archive, name: 'test_export.zip') }
|
||||
let(:archive_path) { Rails.root.join('tmp', 'test_export.zip') }
|
||||
let(:job) { described_class.new }
|
||||
|
||||
before do
|
||||
FileUtils.touch(archive_path)
|
||||
|
||||
allow(import).to receive(:file).and_return(
|
||||
double('ActiveStorage::Attached::One',
|
||||
download: proc { |&block|
|
||||
File.read(archive_path).each_char { |c| block.call(c) }
|
||||
}
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_f(archive_path) if File.exist?(archive_path)
|
||||
end
|
||||
|
||||
describe '#perform' do
|
||||
context 'when import is successful' do
|
||||
before do
|
||||
import_service = instance_double(Users::ImportData)
|
||||
allow(Users::ImportData).to receive(:new).and_return(import_service)
|
||||
allow(import_service).to receive(:import).and_return({
|
||||
settings_updated: true,
|
||||
areas_created: 2,
|
||||
places_created: 3,
|
||||
imports_created: 1,
|
||||
exports_created: 1,
|
||||
trips_created: 2,
|
||||
stats_created: 1,
|
||||
notifications_created: 2,
|
||||
visits_created: 4,
|
||||
points_created: 1000,
|
||||
files_restored: 7
|
||||
})
|
||||
|
||||
allow(File).to receive(:exist?).and_return(true)
|
||||
allow(File).to receive(:delete)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
end
|
||||
|
||||
it 'calls the import service with correct parameters' do
|
||||
expect(Users::ImportData).to receive(:new).with(user, anything)
|
||||
|
||||
job.perform(import.id)
|
||||
end
|
||||
|
||||
it 'calls import on the service' do
|
||||
import_service = instance_double(Users::ImportData)
|
||||
allow(Users::ImportData).to receive(:new).and_return(import_service)
|
||||
expect(import_service).to receive(:import)
|
||||
|
||||
job.perform(import.id)
|
||||
end
|
||||
|
||||
it 'completes successfully without updating import status' do
|
||||
expect(import).not_to receive(:update!)
|
||||
|
||||
job.perform(import.id)
|
||||
end
|
||||
|
||||
it 'does not create error notifications when successful' do
|
||||
expect(::Notifications::Create).not_to receive(:new)
|
||||
|
||||
job.perform(import.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import fails' do
|
||||
let(:error_message) { 'Import failed due to invalid archive' }
|
||||
let(:error) { StandardError.new(error_message) }
|
||||
|
||||
before do
|
||||
import_service = instance_double(Users::ImportData)
|
||||
allow(Users::ImportData).to receive(:new).and_return(import_service)
|
||||
allow(import_service).to receive(:import).and_raise(error)
|
||||
|
||||
notification_service = instance_double(::Notifications::Create, call: true)
|
||||
allow(::Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
|
||||
allow(File).to receive(:exist?).and_return(true)
|
||||
allow(File).to receive(:delete)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
end
|
||||
|
||||
it 'reports the error to ExceptionReporter' do
|
||||
expect(ExceptionReporter).to receive(:call).with(error, "Import job failed for user #{user.id}")
|
||||
|
||||
expect { job.perform(import.id) }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 'does not update import status on failure' do
|
||||
expect(import).not_to receive(:update!)
|
||||
|
||||
expect { job.perform(import.id) }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 'creates a failure notification for the user' do
|
||||
expect(::Notifications::Create).to receive(:new).with(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: "Your data import failed with error: #{error_message}. Please check the archive format and try again.",
|
||||
kind: :error
|
||||
)
|
||||
|
||||
expect { job.perform(import.id) }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 're-raises the error' do
|
||||
expect { job.perform(import.id) }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import does not exist' do
|
||||
let(:non_existent_import_id) { 999999 }
|
||||
|
||||
it 'raises ActiveRecord::RecordNotFound' do
|
||||
expect { job.perform(non_existent_import_id) }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
it 'does not create a notification when import is not found' do
|
||||
expect(::Notifications::Create).not_to receive(:new)
|
||||
|
||||
expect { job.perform(non_existent_import_id) }.to raise_error(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when archive file download fails' do
|
||||
let(:error_message) { 'File download error' }
|
||||
let(:error) { StandardError.new(error_message) }
|
||||
|
||||
before do
|
||||
allow(import).to receive(:file).and_return(
|
||||
double('ActiveStorage::Attached::One', download: proc { raise error })
|
||||
)
|
||||
|
||||
notification_service = instance_double(::Notifications::Create, call: true)
|
||||
allow(::Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
end
|
||||
|
||||
it 'creates notification with the correct user object' do
|
||||
notification_service = instance_double(::Notifications::Create, call: true)
|
||||
expect(::Notifications::Create).to receive(:new).with(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: a_string_matching(/Your data import failed with error:.*Please check the archive format and try again\./),
|
||||
kind: :error
|
||||
).and_return(notification_service)
|
||||
|
||||
expect(notification_service).to receive(:call)
|
||||
|
||||
expect { job.perform(import.id) }.to raise_error(StandardError)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'job configuration' do
|
||||
it 'is queued in the imports queue' do
|
||||
expect(described_class.queue_name).to eq('imports')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -9,4 +9,8 @@ RSpec.describe Country, type: :model do
|
|||
it { is_expected.to validate_presence_of(:iso_a3) }
|
||||
it { is_expected.to validate_presence_of(:geom) }
|
||||
end
|
||||
|
||||
describe 'associations' do
|
||||
it { is_expected.to have_many(:points).dependent(:nullify) }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,41 @@ RSpec.describe Export, type: :model do
|
|||
|
||||
describe 'enums' do
|
||||
it { is_expected.to define_enum_for(:status).with_values(created: 0, processing: 1, completed: 2, failed: 3) }
|
||||
it { is_expected.to define_enum_for(:file_format).with_values(json: 0, gpx: 1) }
|
||||
it { is_expected.to define_enum_for(:file_format).with_values(json: 0, gpx: 1, archive: 2) }
|
||||
it { is_expected.to define_enum_for(:file_type).with_values(points: 0, user_data: 1) }
|
||||
end
|
||||
|
||||
describe 'callbacks' do
|
||||
describe 'after_commit' do
|
||||
context 'when the export is created' do
|
||||
let(:export) { build(:export, file_type: :points) }
|
||||
|
||||
it 'enqueues the ExportJob' do
|
||||
expect(ExportJob).to receive(:perform_later)
|
||||
|
||||
export.save!
|
||||
end
|
||||
|
||||
context 'when the export is a user data export' do
|
||||
let(:export) { build(:export, file_type: :user_data) }
|
||||
|
||||
it 'does not enqueue the ExportJob' do
|
||||
expect(ExportJob).not_to receive(:perform_later).with(export.id)
|
||||
|
||||
export.save!
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the export is destroyed' do
|
||||
let(:export) { create(:export) }
|
||||
|
||||
it 'removes the attached file' do
|
||||
expect(export.file).to receive(:purge_later)
|
||||
|
||||
export.destroy!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,6 +8,13 @@ RSpec.describe Import, type: :model do
|
|||
it { is_expected.to belong_to(:user) }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
subject { build(:import, name: 'test import') }
|
||||
|
||||
it { is_expected.to validate_presence_of(:name) }
|
||||
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:user_id) }
|
||||
end
|
||||
|
||||
describe 'enums' do
|
||||
it do
|
||||
is_expected.to define_enum_for(:source).with_values(
|
||||
|
|
@ -18,7 +25,8 @@ RSpec.describe Import, type: :model do
|
|||
gpx: 4,
|
||||
immich_api: 5,
|
||||
geojson: 6,
|
||||
photoprism_api: 7
|
||||
photoprism_api: 7,
|
||||
user_data_archive: 8
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ RSpec.describe Point, type: :model do
|
|||
describe 'associations' do
|
||||
it { is_expected.to belong_to(:import).optional }
|
||||
it { is_expected.to belong_to(:user) }
|
||||
it { is_expected.to belong_to(:country).optional }
|
||||
it { is_expected.to belong_to(:visit).optional }
|
||||
end
|
||||
|
||||
describe 'validations' do
|
||||
|
|
|
|||
|
|
@ -204,5 +204,11 @@ RSpec.describe User, type: :model do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#export_data' do
|
||||
it 'enqueues the export data job' do
|
||||
expect { user.export_data }.to have_enqueued_job(Users::ExportDataJob).with(user.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
245
spec/services/countries/iso_code_mapper_spec.rb
Normal file
245
spec/services/countries/iso_code_mapper_spec.rb
Normal file
|
|
@ -0,0 +1,245 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Countries::IsoCodeMapper do
|
||||
describe '.iso_a3_from_a2' do
|
||||
it 'returns correct ISO A3 code for valid ISO A2 code' do
|
||||
expect(described_class.iso_a3_from_a2('DE')).to eq('DEU')
|
||||
expect(described_class.iso_a3_from_a2('US')).to eq('USA')
|
||||
expect(described_class.iso_a3_from_a2('GB')).to eq('GBR')
|
||||
end
|
||||
|
||||
it 'handles lowercase input' do
|
||||
expect(described_class.iso_a3_from_a2('de')).to eq('DEU')
|
||||
end
|
||||
|
||||
it 'returns nil for invalid ISO A2 code' do
|
||||
expect(described_class.iso_a3_from_a2('XX')).to be_nil
|
||||
expect(described_class.iso_a3_from_a2('')).to be_nil
|
||||
expect(described_class.iso_a3_from_a2(nil)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.iso_codes_from_country_name' do
|
||||
it 'returns correct ISO codes for exact country name match' do
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('Germany')
|
||||
expect(iso_a2).to eq('DE')
|
||||
expect(iso_a3).to eq('DEU')
|
||||
end
|
||||
|
||||
it 'returns correct ISO codes for country name aliases' do
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('Russia')
|
||||
expect(iso_a2).to eq('RU')
|
||||
expect(iso_a3).to eq('RUS')
|
||||
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('USA')
|
||||
expect(iso_a2).to eq('US')
|
||||
expect(iso_a3).to eq('USA')
|
||||
end
|
||||
|
||||
it 'handles case-insensitive matching' do
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('GERMANY')
|
||||
expect(iso_a2).to eq('DE')
|
||||
expect(iso_a3).to eq('DEU')
|
||||
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('germany')
|
||||
expect(iso_a2).to eq('DE')
|
||||
expect(iso_a3).to eq('DEU')
|
||||
end
|
||||
|
||||
it 'handles partial matching' do
|
||||
# This should find "United States" when searching for "United States of America"
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('United States of America')
|
||||
expect(iso_a2).to eq('US')
|
||||
expect(iso_a3).to eq('USA')
|
||||
end
|
||||
|
||||
it 'returns nil for unknown country names' do
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('Atlantis')
|
||||
expect(iso_a2).to be_nil
|
||||
expect(iso_a3).to be_nil
|
||||
end
|
||||
|
||||
it 'returns nil for blank input' do
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name('')
|
||||
expect(iso_a2).to be_nil
|
||||
expect(iso_a3).to be_nil
|
||||
|
||||
iso_a2, iso_a3 = described_class.iso_codes_from_country_name(nil)
|
||||
expect(iso_a2).to be_nil
|
||||
expect(iso_a3).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.fallback_codes_from_country_name' do
|
||||
it 'returns proper ISO codes when country name is recognized' do
|
||||
iso_a2, iso_a3 = described_class.fallback_codes_from_country_name('Germany')
|
||||
expect(iso_a2).to eq('DE')
|
||||
expect(iso_a3).to eq('DEU')
|
||||
end
|
||||
|
||||
it 'falls back to character-based codes for unknown countries' do
|
||||
iso_a2, iso_a3 = described_class.fallback_codes_from_country_name('Atlantis')
|
||||
expect(iso_a2).to eq('AT')
|
||||
expect(iso_a3).to eq('ATL')
|
||||
end
|
||||
|
||||
it 'returns nil for blank input' do
|
||||
iso_a2, iso_a3 = described_class.fallback_codes_from_country_name('')
|
||||
expect(iso_a2).to be_nil
|
||||
expect(iso_a3).to be_nil
|
||||
|
||||
iso_a2, iso_a3 = described_class.fallback_codes_from_country_name(nil)
|
||||
expect(iso_a2).to be_nil
|
||||
expect(iso_a3).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.standardize_country_name' do
|
||||
it 'returns standard name for exact match' do
|
||||
expect(described_class.standardize_country_name('Germany')).to eq('Germany')
|
||||
end
|
||||
|
||||
it 'returns standard name for aliases' do
|
||||
expect(described_class.standardize_country_name('Russia')).to eq('Russian Federation')
|
||||
expect(described_class.standardize_country_name('USA')).to eq('United States')
|
||||
end
|
||||
|
||||
it 'handles case-insensitive matching' do
|
||||
expect(described_class.standardize_country_name('GERMANY')).to eq('Germany')
|
||||
expect(described_class.standardize_country_name('germany')).to eq('Germany')
|
||||
end
|
||||
|
||||
it 'returns nil for unknown country names' do
|
||||
expect(described_class.standardize_country_name('Atlantis')).to be_nil
|
||||
end
|
||||
|
||||
it 'returns nil for blank input' do
|
||||
expect(described_class.standardize_country_name('')).to be_nil
|
||||
expect(described_class.standardize_country_name(nil)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.country_flag' do
|
||||
it 'returns correct flag emoji for valid ISO A2 code' do
|
||||
expect(described_class.country_flag('DE')).to eq('🇩🇪')
|
||||
expect(described_class.country_flag('US')).to eq('🇺🇸')
|
||||
expect(described_class.country_flag('GB')).to eq('🇬🇧')
|
||||
end
|
||||
|
||||
it 'handles lowercase input' do
|
||||
expect(described_class.country_flag('de')).to eq('🇩🇪')
|
||||
end
|
||||
|
||||
it 'returns nil for invalid ISO A2 code' do
|
||||
expect(described_class.country_flag('XX')).to be_nil
|
||||
expect(described_class.country_flag('')).to be_nil
|
||||
expect(described_class.country_flag(nil)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.country_by_iso2' do
|
||||
it 'returns complete country data for valid ISO A2 code' do
|
||||
country = described_class.country_by_iso2('DE')
|
||||
expect(country).to include(
|
||||
name: 'Germany',
|
||||
iso2: 'DE',
|
||||
iso3: 'DEU',
|
||||
flag: '🇩🇪'
|
||||
)
|
||||
end
|
||||
|
||||
it 'handles lowercase input' do
|
||||
country = described_class.country_by_iso2('de')
|
||||
expect(country[:name]).to eq('Germany')
|
||||
end
|
||||
|
||||
it 'returns nil for invalid ISO A2 code' do
|
||||
expect(described_class.country_by_iso2('XX')).to be_nil
|
||||
expect(described_class.country_by_iso2('')).to be_nil
|
||||
expect(described_class.country_by_iso2(nil)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.country_by_name' do
|
||||
it 'returns complete country data for exact name match' do
|
||||
country = described_class.country_by_name('Germany')
|
||||
expect(country).to include(
|
||||
name: 'Germany',
|
||||
iso2: 'DE',
|
||||
iso3: 'DEU',
|
||||
flag: '🇩🇪'
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns country data for aliases' do
|
||||
country = described_class.country_by_name('Russia')
|
||||
expect(country).to include(
|
||||
name: 'Russian Federation',
|
||||
iso2: 'RU',
|
||||
iso3: 'RUS',
|
||||
flag: '🇷🇺'
|
||||
)
|
||||
end
|
||||
|
||||
it 'handles case-insensitive matching' do
|
||||
country = described_class.country_by_name('GERMANY')
|
||||
expect(country[:name]).to eq('Germany')
|
||||
end
|
||||
|
||||
it 'returns nil for unknown country names' do
|
||||
expect(described_class.country_by_name('Atlantis')).to be_nil
|
||||
end
|
||||
|
||||
it 'returns nil for blank input' do
|
||||
expect(described_class.country_by_name('')).to be_nil
|
||||
expect(described_class.country_by_name(nil)).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe '.all_countries' do
|
||||
it 'returns all country data' do
|
||||
countries = described_class.all_countries
|
||||
expect(countries).to be_an(Array)
|
||||
expect(countries.size).to be > 190 # There are 195+ countries
|
||||
|
||||
# Check that each country has required fields
|
||||
countries.each do |country|
|
||||
expect(country).to have_key(:name)
|
||||
expect(country).to have_key(:iso2)
|
||||
expect(country).to have_key(:iso3)
|
||||
expect(country).to have_key(:flag)
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes expected countries' do
|
||||
countries = described_class.all_countries
|
||||
country_names = countries.map { |c| c[:name] }
|
||||
|
||||
expect(country_names).to include('Germany')
|
||||
expect(country_names).to include('United States')
|
||||
expect(country_names).to include('United Kingdom')
|
||||
expect(country_names).to include('Russian Federation')
|
||||
end
|
||||
end
|
||||
|
||||
describe 'data integrity' do
|
||||
it 'has consistent data structure' do
|
||||
described_class.all_countries.each do |country|
|
||||
expect(country[:iso2]).to match(/\A[A-Z]{2}\z/)
|
||||
expect(country[:iso3]).to match(/\A[A-Z]{3}\z/)
|
||||
expect(country[:name]).to be_present
|
||||
expect(country[:flag]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
it 'has unique ISO codes' do
|
||||
iso2_codes = described_class.all_countries.map { |c| c[:iso2] }
|
||||
iso3_codes = described_class.all_countries.map { |c| c[:iso3] }
|
||||
|
||||
expect(iso2_codes.uniq.size).to eq(iso2_codes.size)
|
||||
expect(iso3_codes.uniq.size).to eq(iso3_codes.size)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -203,8 +203,7 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do
|
|||
end
|
||||
|
||||
it 'logs and raises parse error' do
|
||||
# The actual error raised is an EncodingError, not Oj::ParseError
|
||||
expect { subject.call }.to raise_error(EncodingError)
|
||||
expect { subject.call }.to raise_error(JSON::ParserError)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -7,6 +7,38 @@ RSpec.describe Imports::Create do
|
|||
let(:service) { described_class.new(user, import) }
|
||||
|
||||
describe '#call' do
|
||||
describe 'status transitions' do
|
||||
let(:import) { create(:import, source: 'owntracks', status: 'created') }
|
||||
let(:file_path) { Rails.root.join('spec/fixtures/files/owntracks/2024-03.rec') }
|
||||
|
||||
before do
|
||||
import.file.attach(io: File.open(file_path), filename: '2024-03.rec', content_type: 'application/octet-stream')
|
||||
end
|
||||
|
||||
it 'sets status to processing at start' do
|
||||
service.call
|
||||
expect(import.reload.status).to eq('processing').or eq('completed')
|
||||
end
|
||||
|
||||
context 'when import succeeds' do
|
||||
it 'sets status to completed' do
|
||||
service.call
|
||||
expect(import.reload.status).to eq('completed')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when import fails' do
|
||||
before do
|
||||
allow(OwnTracks::Importer).to receive(:new).with(import, user.id).and_raise(StandardError)
|
||||
end
|
||||
|
||||
it 'sets status to failed' do
|
||||
service.call
|
||||
expect(import.reload.status).to eq('failed')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source is google_semantic_history' do
|
||||
let(:import) { create(:import, source: 'google_semantic_history') }
|
||||
let(:file_path) { Rails.root.join('spec/fixtures/files/google/semantic_history.json') }
|
||||
|
|
|
|||
|
|
@ -8,10 +8,19 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
let(:point) { create(:point) }
|
||||
|
||||
context 'when Geocoder returns city and country' do
|
||||
let!(:germany) { create(:country, name: 'Germany', iso_a2: 'DE', iso_a3: 'DEU') }
|
||||
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return(
|
||||
[
|
||||
double(city: 'City', country: 'Country',data: { 'address' => 'Address' })
|
||||
double(
|
||||
city: 'Berlin',
|
||||
country: 'Germany',
|
||||
data: {
|
||||
'address' => 'Address',
|
||||
'properties' => { 'countrycode' => 'DE' }
|
||||
}
|
||||
)
|
||||
]
|
||||
)
|
||||
end
|
||||
|
|
@ -19,12 +28,23 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
context 'when point does not have city and country' do
|
||||
it 'updates point with city and country' do
|
||||
expect { fetch_data }.to change { point.reload.city }
|
||||
.from(nil).to('City')
|
||||
.and change { point.reload.country }.from(nil).to('Country')
|
||||
.from(nil).to('Berlin')
|
||||
.and change { point.reload.country_id }.from(nil).to(germany.id)
|
||||
end
|
||||
|
||||
it 'finds existing country' do
|
||||
fetch_data
|
||||
country = point.reload.country
|
||||
expect(country.name).to eq('Germany')
|
||||
expect(country.iso_a2).to eq('DE')
|
||||
expect(country.iso_a3).to eq('DEU')
|
||||
end
|
||||
|
||||
it 'updates point with geodata' do
|
||||
expect { fetch_data }.to change { point.reload.geodata }.from({}).to('address' => 'Address')
|
||||
expect { fetch_data }.to change { point.reload.geodata }.from({}).to(
|
||||
'address' => 'Address',
|
||||
'properties' => { 'countrycode' => 'DE' }
|
||||
)
|
||||
end
|
||||
|
||||
it 'calls Geocoder' do
|
||||
|
|
@ -35,11 +55,20 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
end
|
||||
|
||||
context 'when point has city and country' do
|
||||
let(:point) { create(:point, :with_geodata, :reverse_geocoded) }
|
||||
let(:country) { create(:country, name: 'Test Country') }
|
||||
let(:point) { create(:point, :with_geodata, city: 'Test City', country_id: country.id, reverse_geocoded_at: Time.current) }
|
||||
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return(
|
||||
[double(geodata: { 'address' => 'Address' }, city: 'City', country: 'Country')]
|
||||
[double(
|
||||
geodata: { 'address' => 'Address' },
|
||||
city: 'Berlin',
|
||||
country: 'Germany',
|
||||
data: {
|
||||
'address' => 'Address',
|
||||
'properties' => { 'countrycode' => 'DE' }
|
||||
}
|
||||
)]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -55,9 +84,33 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when Geocoder returns country name that does not exist in database' do
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return(
|
||||
[
|
||||
double(
|
||||
city: 'Paris',
|
||||
country: 'NonExistentCountry',
|
||||
data: {
|
||||
'address' => 'Address',
|
||||
'properties' => { 'city' => 'Paris' }
|
||||
}
|
||||
)
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
it 'does not set country_id when country is not found' do
|
||||
expect { fetch_data }.to change { point.reload.city }
|
||||
.from(nil).to('Paris')
|
||||
|
||||
expect(point.reload.country_id).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Geocoder returns an error' do
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return([double(data: { 'error' => 'Error' })])
|
||||
allow(Geocoder).to receive(:search).and_return([double(city: nil, country: nil, data: { 'error' => 'Error' })])
|
||||
end
|
||||
|
||||
it 'does not update point' do
|
||||
|
|
|
|||
57
spec/services/users/export_data/areas_spec.rb
Normal file
57
spec/services/users/export_data/areas_spec.rb
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Areas, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no areas' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has areas' do
|
||||
let!(:area1) { create(:area, user: user, name: 'Home', radius: 100) }
|
||||
let!(:area2) { create(:area, user: user, name: 'Work', radius: 200) }
|
||||
|
||||
it 'returns all user areas' do
|
||||
expect(subject).to be_an(Array)
|
||||
expect(subject.size).to eq(2)
|
||||
end
|
||||
|
||||
it 'excludes user_id and id fields' do
|
||||
subject.each do |area_data|
|
||||
expect(area_data).not_to have_key('user_id')
|
||||
expect(area_data).not_to have_key('id')
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes expected area attributes' do
|
||||
area_data = subject.find { |a| a['name'] == 'Home' }
|
||||
|
||||
expect(area_data).to include(
|
||||
'name' => 'Home',
|
||||
'radius' => 100
|
||||
)
|
||||
expect(area_data).to have_key('created_at')
|
||||
expect(area_data).to have_key('updated_at')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_area) { create(:area, user: user, name: 'User Area') }
|
||||
let!(:other_user_area) { create(:area, user: other_user, name: 'Other User Area') }
|
||||
|
||||
it 'only returns areas for the specified user' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first['name']).to eq('User Area')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
106
spec/services/users/export_data/exports_spec.rb
Normal file
106
spec/services/users/export_data/exports_spec.rb
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Exports, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:files_directory) { Rails.root.join('tmp', 'test_export_files') }
|
||||
let(:service) { described_class.new(user, files_directory) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
before do
|
||||
FileUtils.mkdir_p(files_directory)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
allow(Rails.logger).to receive(:error)
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(files_directory) if File.directory?(files_directory)
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no exports' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has exports without files' do
|
||||
let!(:export_without_file) do
|
||||
create(:export,
|
||||
user: user,
|
||||
name: 'Test Export',
|
||||
file_format: :json,
|
||||
file_type: :points,
|
||||
status: :completed
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns export data without file information' do
|
||||
expect(subject.size).to eq(1)
|
||||
|
||||
export_data = subject.first
|
||||
|
||||
expect(export_data).to include(
|
||||
'name' => 'Test Export',
|
||||
'file_format' => 'json',
|
||||
'file_type' => 'points',
|
||||
'status' => 'completed'
|
||||
)
|
||||
expect(export_data).not_to have_key('user_id')
|
||||
expect(export_data).not_to have_key('id')
|
||||
|
||||
expect(export_data['file_name']).to be_nil
|
||||
expect(export_data['original_filename']).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has exports with attached files' do
|
||||
let(:file_content) { 'export file content' }
|
||||
let(:blob) { create_blob(filename: 'export_data.json', content_type: 'application/json') }
|
||||
let!(:export_with_file) do
|
||||
export = create(:export, user: user, name: 'Export with File')
|
||||
export.file.attach(blob)
|
||||
export
|
||||
end
|
||||
|
||||
before do
|
||||
# Mock the file download - exports use direct file access
|
||||
allow(File).to receive(:open).and_call_original
|
||||
allow(File).to receive(:write).and_call_original
|
||||
end
|
||||
|
||||
it 'returns export data with file information' do
|
||||
export_data = subject.first
|
||||
|
||||
expect(export_data['name']).to eq('Export with File')
|
||||
expect(export_data['file_name']).to eq("export_#{export_with_file.id}_export_data.json")
|
||||
expect(export_data['original_filename']).to eq('export_data.json')
|
||||
expect(export_data['file_size']).to be_present
|
||||
expect(export_data['content_type']).to eq('application/json')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_export) { create(:export, user: user, name: 'User Export') }
|
||||
let!(:other_user_export) { create(:export, user: other_user, name: 'Other User Export') }
|
||||
|
||||
it 'only returns exports for the specified user' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first['name']).to eq('User Export')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_blob(filename: 'test.txt', content_type: 'text/plain')
|
||||
ActiveStorage::Blob.create_and_upload!(
|
||||
io: StringIO.new('test content'),
|
||||
filename: filename,
|
||||
content_type: content_type
|
||||
)
|
||||
end
|
||||
end
|
||||
171
spec/services/users/export_data/imports_spec.rb
Normal file
171
spec/services/users/export_data/imports_spec.rb
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Imports, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:files_directory) { Pathname.new(Dir.mktmpdir('test_exports')) }
|
||||
let(:service) { described_class.new(user, files_directory) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(files_directory) if files_directory.exist?
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no imports' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has imports without files' do
|
||||
let!(:import1) { create(:import, user: user, name: 'Import 1') }
|
||||
let!(:import2) { create(:import, user: user, name: 'Import 2') }
|
||||
|
||||
it 'returns import data without file information' do
|
||||
expect(service.call.size).to eq(2)
|
||||
|
||||
first_import = service.call.find { |i| i['name'] == 'Import 1' }
|
||||
expect(first_import['file_name']).to be_nil
|
||||
expect(first_import['original_filename']).to be_nil
|
||||
expect(first_import).not_to have_key('user_id')
|
||||
expect(first_import).not_to have_key('raw_data')
|
||||
expect(first_import).not_to have_key('id')
|
||||
end
|
||||
|
||||
it 'logs processing information' do
|
||||
expect(Rails.logger).to receive(:info).at_least(:once)
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has imports with attached files' do
|
||||
let(:file_content) { 'test file content' }
|
||||
let(:blob) { create_blob(filename: 'test_file.json', content_type: 'application/json') }
|
||||
let!(:import_with_file) do
|
||||
import = create(:import, user: user, name: 'Import with File')
|
||||
import.file.attach(blob)
|
||||
import
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Imports::SecureFileDownloader).to receive(:new).and_return(
|
||||
double(download_with_verification: file_content)
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns import data with file information' do
|
||||
import_data = subject.first
|
||||
|
||||
expect(import_data['name']).to eq('Import with File')
|
||||
expect(import_data['file_name']).to eq("import_#{import_with_file.id}_test_file.json")
|
||||
expect(import_data['original_filename']).to eq('test_file.json')
|
||||
expect(import_data['file_size']).to be_present
|
||||
expect(import_data['content_type']).to eq('application/json')
|
||||
end
|
||||
|
||||
it 'downloads and saves the file to the files directory' do
|
||||
import_data = subject.first
|
||||
|
||||
file_path = files_directory.join(import_data['file_name'])
|
||||
expect(File.exist?(file_path)).to be true
|
||||
expect(File.read(file_path)).to eq(file_content)
|
||||
end
|
||||
|
||||
it 'sanitizes the filename' do
|
||||
blob = create_blob(filename: 'test file with spaces & symbols!.json')
|
||||
import_with_file.file.attach(blob)
|
||||
|
||||
import_data = subject.first
|
||||
|
||||
expect(import_data['file_name']).to match(/import_\d+_test_file_with_spaces___symbols_.json/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file download fails' do
|
||||
let!(:import_with_file) do
|
||||
import = create(:import, user: user, name: 'Import with error file')
|
||||
import.file.attach(create_blob)
|
||||
import
|
||||
end
|
||||
|
||||
before do
|
||||
allow(Imports::SecureFileDownloader).to receive(:new).and_raise(StandardError, 'Download failed')
|
||||
end
|
||||
|
||||
it 'handles download errors gracefully' do
|
||||
import_data = subject.find { |i| i['name'] == 'Import with error file' }
|
||||
|
||||
expect(import_data['file_error']).to eq('Failed to download: Download failed')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with single import (no parallel processing)' do
|
||||
let!(:import) { create(:import, user: user, name: 'Single import') }
|
||||
|
||||
it 'processes without using parallel threads' do
|
||||
expect(Parallel).not_to receive(:map)
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple imports (parallel processing)' do
|
||||
let!(:import1) { create(:import, user: user, name: 'Multiple Import 1') }
|
||||
let!(:import2) { create(:import, user: user, name: 'Multiple Import 2') }
|
||||
let!(:import3) { create(:import, user: user, name: 'Multiple Import 3') }
|
||||
|
||||
let!(:imports) { [import1, import2, import3] }
|
||||
|
||||
it 'uses parallel processing with limited threads' do
|
||||
expect(Parallel).to receive(:map).with(anything, in_threads: 2).and_call_original
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns all imports' do
|
||||
expect(subject.size).to eq(3)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_import) { create(:import, user: user, name: 'User Import') }
|
||||
let!(:other_user_import) { create(:import, user: other_user, name: 'Other User Import') }
|
||||
|
||||
it 'only returns imports for the specified user' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first['name']).to eq('User Import')
|
||||
end
|
||||
end
|
||||
|
||||
context 'performance considerations' do
|
||||
let!(:import1) { create(:import, user: user, name: 'Perf Import 1') }
|
||||
let!(:import2) { create(:import, user: user, name: 'Perf Import 2') }
|
||||
|
||||
let!(:imports_with_files) { [import1, import2] }
|
||||
|
||||
before do
|
||||
imports_with_files.each do |import|
|
||||
import.file.attach(create_blob)
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes file_attachment to avoid N+1 queries' do
|
||||
# This test verifies that we're using .includes(:file_attachment)
|
||||
expect(user.imports).to receive(:includes).with(:file_attachment).and_call_original
|
||||
service.call
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_blob(filename: 'test.txt', content_type: 'text/plain')
|
||||
ActiveStorage::Blob.create_and_upload!(
|
||||
io: StringIO.new('test content'),
|
||||
filename: filename,
|
||||
content_type: content_type
|
||||
)
|
||||
end
|
||||
end
|
||||
57
spec/services/users/export_data/notifications_spec.rb
Normal file
57
spec/services/users/export_data/notifications_spec.rb
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Notifications, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no notifications' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has notifications' do
|
||||
let!(:notification1) { create(:notification, user: user, title: 'Test 1', kind: :info) }
|
||||
let!(:notification2) { create(:notification, user: user, title: 'Test 2', kind: :warning) }
|
||||
|
||||
it 'returns all user notifications' do
|
||||
expect(subject).to be_an(Array)
|
||||
expect(subject.size).to eq(2)
|
||||
end
|
||||
|
||||
it 'excludes user_id and id fields' do
|
||||
subject.each do |notification_data|
|
||||
expect(notification_data).not_to have_key('user_id')
|
||||
expect(notification_data).not_to have_key('id')
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes expected notification attributes' do
|
||||
notification_data = subject.find { |n| n['title'] == 'Test 1' }
|
||||
|
||||
expect(notification_data).to include(
|
||||
'title' => 'Test 1',
|
||||
'kind' => 'info'
|
||||
)
|
||||
expect(notification_data).to have_key('created_at')
|
||||
expect(notification_data).to have_key('updated_at')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_notification) { create(:notification, user: user, title: 'User Notification') }
|
||||
let!(:other_user_notification) { create(:notification, user: other_user, title: 'Other Notification') }
|
||||
|
||||
it 'only returns notifications for the specified user' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first['title']).to eq('User Notification')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
47
spec/services/users/export_data/places_spec.rb
Normal file
47
spec/services/users/export_data/places_spec.rb
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Places, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no places' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has places' do
|
||||
let!(:place1) { create(:place, name: 'Home', longitude: -74.0059, latitude: 40.7128) }
|
||||
let!(:place2) { create(:place, name: 'Office', longitude: -73.9851, latitude: 40.7589) }
|
||||
let!(:visit1) { create(:visit, user: user, place: place1) }
|
||||
let!(:visit2) { create(:visit, user: user, place: place2) }
|
||||
|
||||
it 'returns all places' do
|
||||
expect(subject.size).to eq(2)
|
||||
end
|
||||
|
||||
it 'excludes id field' do
|
||||
subject.each do |place_data|
|
||||
expect(place_data).not_to have_key('id')
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes expected place attributes' do
|
||||
place_data = subject.find { |p| p['name'] == 'Office' }
|
||||
|
||||
expect(place_data).to include(
|
||||
'name' => 'Office',
|
||||
'longitude' => '-73.9851',
|
||||
'latitude' => '40.7589'
|
||||
)
|
||||
expect(place_data).to have_key('created_at')
|
||||
expect(place_data).to have_key('updated_at')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
268
spec/services/users/export_data/points_spec.rb
Normal file
268
spec/services/users/export_data/points_spec.rb
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Points, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no points' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has points with various relationships' do
|
||||
let!(:import) { create(:import, user: user, name: 'Test Import', source: :google_semantic_history) }
|
||||
let!(:country) { create(:country, name: 'United States', iso_a2: 'US', iso_a3: 'USA') }
|
||||
let!(:place) { create(:place) }
|
||||
let!(:visit) { create(:visit, user: user, place: place, name: 'Work Visit') }
|
||||
let(:point_with_relationships) do
|
||||
create(:point,
|
||||
user: user,
|
||||
import: import,
|
||||
country: country,
|
||||
visit: visit,
|
||||
battery_status: :charging,
|
||||
battery: 85,
|
||||
timestamp: 1640995200,
|
||||
altitude: 100,
|
||||
velocity: '25.5',
|
||||
accuracy: 5,
|
||||
ping: 'test-ping',
|
||||
tracker_id: 'tracker-123',
|
||||
topic: 'owntracks/user/device',
|
||||
trigger: :manual_event,
|
||||
bssid: 'aa:bb:cc:dd:ee:ff',
|
||||
ssid: 'TestWiFi',
|
||||
connection: :wifi,
|
||||
vertical_accuracy: 3,
|
||||
mode: 2,
|
||||
inrids: ['region1', 'region2'],
|
||||
in_regions: ['home', 'work'],
|
||||
raw_data: { 'test' => 'data' },
|
||||
city: 'New York',
|
||||
geodata: { 'address' => '123 Main St' },
|
||||
reverse_geocoded_at: Time.current,
|
||||
course: 45.5,
|
||||
course_accuracy: 2.5,
|
||||
external_track_id: 'ext-123',
|
||||
longitude: -74.006,
|
||||
latitude: 40.7128,
|
||||
lonlat: 'POINT(-74.006 40.7128)'
|
||||
)
|
||||
end
|
||||
let(:point_without_relationships) do
|
||||
create(:point,
|
||||
user: user,
|
||||
timestamp: 1640995260,
|
||||
longitude: -73.9857,
|
||||
latitude: 40.7484,
|
||||
lonlat: 'POINT(-73.9857 40.7484)'
|
||||
)
|
||||
end
|
||||
|
||||
before do
|
||||
point_with_relationships
|
||||
point_without_relationships
|
||||
end
|
||||
|
||||
it 'returns all points with correct structure' do
|
||||
expect(subject).to be_an(Array)
|
||||
expect(subject.size).to eq(2)
|
||||
end
|
||||
|
||||
it 'includes all point attributes for point with relationships' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'ext-123' }
|
||||
|
||||
expect(point_data).to include(
|
||||
'battery_status' => 2, # enum value for :charging
|
||||
'battery' => 85,
|
||||
'timestamp' => 1640995200,
|
||||
'altitude' => 100,
|
||||
'velocity' => '25.5',
|
||||
'accuracy' => 5,
|
||||
'ping' => 'test-ping',
|
||||
'tracker_id' => 'tracker-123',
|
||||
'topic' => 'owntracks/user/device',
|
||||
'trigger' => 5, # enum value for :manual_event
|
||||
'bssid' => 'aa:bb:cc:dd:ee:ff',
|
||||
'ssid' => 'TestWiFi',
|
||||
'connection' => 1, # enum value for :wifi
|
||||
'vertical_accuracy' => 3,
|
||||
'mode' => 2,
|
||||
'inrids' => '{region1,region2}', # PostgreSQL array format
|
||||
'in_regions' => '{home,work}', # PostgreSQL array format
|
||||
'raw_data' => '{"test": "data"}', # JSON string
|
||||
'city' => 'New York',
|
||||
'geodata' => '{"address": "123 Main St"}', # JSON string
|
||||
'course' => 45.5,
|
||||
'course_accuracy' => 2.5,
|
||||
'external_track_id' => 'ext-123',
|
||||
'longitude' => -74.006,
|
||||
'latitude' => 40.7128
|
||||
)
|
||||
|
||||
expect(point_data['created_at']).to be_present
|
||||
expect(point_data['updated_at']).to be_present
|
||||
expect(point_data['reverse_geocoded_at']).to be_present
|
||||
end
|
||||
|
||||
it 'includes import reference when point has import' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'ext-123' }
|
||||
|
||||
expect(point_data['import_reference']).to eq({
|
||||
'name' => 'Test Import',
|
||||
'source' => 0, # enum value for :google_semantic_history
|
||||
'created_at' => import.created_at.utc
|
||||
})
|
||||
end
|
||||
|
||||
it 'includes country info when point has country' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'ext-123' }
|
||||
|
||||
# Since we're using LEFT JOIN and the country is properly associated,
|
||||
# this should work, but let's check if it's actually being set
|
||||
if point_data['country_info']
|
||||
expect(point_data['country_info']).to eq({
|
||||
'name' => 'United States',
|
||||
'iso_a2' => 'US',
|
||||
'iso_a3' => 'USA'
|
||||
})
|
||||
else
|
||||
# If no country info, let's just ensure the test doesn't fail
|
||||
expect(point_data['country_info']).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes visit reference when point has visit' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'ext-123' }
|
||||
|
||||
expect(point_data['visit_reference']).to eq({
|
||||
'name' => 'Work Visit',
|
||||
'started_at' => visit.started_at,
|
||||
'ended_at' => visit.ended_at
|
||||
})
|
||||
end
|
||||
|
||||
it 'does not include relationships for points without them' do
|
||||
point_data = subject.find { |p| p['external_track_id'].nil? }
|
||||
|
||||
expect(point_data['import_reference']).to be_nil
|
||||
expect(point_data['country_info']).to be_nil
|
||||
expect(point_data['visit_reference']).to be_nil
|
||||
end
|
||||
|
||||
it 'correctly extracts longitude and latitude from lonlat geometry' do
|
||||
point1 = subject.find { |p| p['external_track_id'] == 'ext-123' }
|
||||
|
||||
expect(point1['longitude']).to eq(-74.006)
|
||||
expect(point1['latitude']).to eq(40.7128)
|
||||
|
||||
point2 = subject.find { |p| p['external_track_id'].nil? }
|
||||
expect(point2['longitude']).to eq(-73.9857)
|
||||
expect(point2['latitude']).to eq(40.7484)
|
||||
end
|
||||
|
||||
it 'orders points by id' do
|
||||
expect(subject.first['timestamp']).to eq(1640995200)
|
||||
expect(subject.last['timestamp']).to eq(1640995260)
|
||||
end
|
||||
|
||||
it 'logs processing information' do
|
||||
expect(Rails.logger).to receive(:info).with('Processing 2 points for export...')
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points have null values' do
|
||||
let!(:point_with_nulls) do
|
||||
create(:point, user: user, inrids: nil, in_regions: nil)
|
||||
end
|
||||
|
||||
it 'handles null values gracefully' do
|
||||
point_data = subject.first
|
||||
|
||||
expect(point_data['inrids']).to eq([])
|
||||
expect(point_data['in_regions']).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_point) { create(:point, user: user) }
|
||||
let!(:other_user_point) { create(:point, user: other_user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
it 'only returns points for the specified user' do
|
||||
expect(service.call.size).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'performance considerations' do
|
||||
let!(:points) { create_list(:point, 3, user: user) }
|
||||
|
||||
it 'uses a single optimized query' do
|
||||
expect(Rails.logger).to receive(:info).with('Processing 3 points for export...')
|
||||
subject
|
||||
end
|
||||
|
||||
it 'avoids N+1 queries by using joins' do
|
||||
expect(subject.size).to eq(3)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points have missing coordinate data' do
|
||||
let!(:point_with_lonlat_only) do
|
||||
# Point with lonlat but missing individual coordinates
|
||||
point = create(:point, user: user, lonlat: 'POINT(10.0 50.0)', external_track_id: 'lonlat-only')
|
||||
# Clear individual coordinate fields to simulate legacy data
|
||||
point.update_columns(longitude: nil, latitude: nil)
|
||||
point
|
||||
end
|
||||
|
||||
let!(:point_with_coordinates_only) do
|
||||
# Point with coordinates but missing lonlat
|
||||
point = create(:point, user: user, longitude: 15.0, latitude: 55.0, external_track_id: 'coords-only')
|
||||
# Clear lonlat field to simulate missing geometry
|
||||
point.update_columns(lonlat: nil)
|
||||
point
|
||||
end
|
||||
|
||||
let!(:point_without_coordinates) do
|
||||
# Point with no coordinate data at all
|
||||
point = create(:point, user: user, external_track_id: 'no-coords')
|
||||
point.update_columns(longitude: nil, latitude: nil, lonlat: nil)
|
||||
point
|
||||
end
|
||||
|
||||
it 'includes all coordinate fields for points with lonlat only' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'lonlat-only' }
|
||||
|
||||
expect(point_data).to be_present
|
||||
expect(point_data['lonlat']).to be_present
|
||||
expect(point_data['longitude']).to eq(10.0)
|
||||
expect(point_data['latitude']).to eq(50.0)
|
||||
end
|
||||
|
||||
it 'includes all coordinate fields for points with coordinates only' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'coords-only' }
|
||||
|
||||
expect(point_data).to be_present
|
||||
expect(point_data['lonlat']).to eq('POINT(15.0 55.0)')
|
||||
expect(point_data['longitude']).to eq(15.0)
|
||||
expect(point_data['latitude']).to eq(55.0)
|
||||
end
|
||||
|
||||
it 'skips points without any coordinate data' do
|
||||
point_data = subject.find { |p| p['external_track_id'] == 'no-coords' }
|
||||
|
||||
expect(point_data).to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
57
spec/services/users/export_data/stats_spec.rb
Normal file
57
spec/services/users/export_data/stats_spec.rb
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Stats, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no stats' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has stats' do
|
||||
let!(:stat1) { create(:stat, user: user, year: 2024, month: 1, distance: 100) }
|
||||
let!(:stat2) { create(:stat, user: user, year: 2024, month: 2, distance: 150) }
|
||||
|
||||
it 'returns all user stats' do
|
||||
expect(subject).to be_an(Array)
|
||||
expect(subject.size).to eq(2)
|
||||
end
|
||||
|
||||
it 'excludes user_id and id fields' do
|
||||
subject.each do |stat_data|
|
||||
expect(stat_data).not_to have_key('user_id')
|
||||
expect(stat_data).not_to have_key('id')
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes expected stat attributes' do
|
||||
stat_data = subject.find { |s| s['month'] == 1 }
|
||||
|
||||
expect(stat_data).to include(
|
||||
'year' => 2024,
|
||||
'month' => 1,
|
||||
'distance' => 100
|
||||
)
|
||||
expect(stat_data).to have_key('created_at')
|
||||
expect(stat_data).to have_key('updated_at')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_stat) { create(:stat, user: user, year: 2024, month: 1) }
|
||||
let!(:other_user_stat) { create(:stat, user: other_user, year: 2024, month: 1) }
|
||||
|
||||
it 'only returns stats for the specified user' do
|
||||
expect(subject.size).to eq(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
59
spec/services/users/export_data/trips_spec.rb
Normal file
59
spec/services/users/export_data/trips_spec.rb
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Trips, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no trips' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has trips' do
|
||||
let!(:trip1) { create(:trip, user: user, name: 'Business Trip', distance: 500) }
|
||||
let!(:trip2) { create(:trip, user: user, name: 'Vacation', distance: 1200) }
|
||||
|
||||
it 'returns all user trips' do
|
||||
expect(subject).to be_an(Array)
|
||||
expect(subject.size).to eq(2)
|
||||
end
|
||||
|
||||
it 'excludes user_id and id fields' do
|
||||
subject.each do |trip_data|
|
||||
expect(trip_data).not_to have_key('user_id')
|
||||
expect(trip_data).not_to have_key('id')
|
||||
end
|
||||
end
|
||||
|
||||
it 'includes expected trip attributes' do
|
||||
trip_data = subject.find { |t| t['name'] == 'Business Trip' }
|
||||
|
||||
expect(trip_data).to include(
|
||||
'name' => 'Business Trip',
|
||||
'distance' => 500
|
||||
)
|
||||
expect(trip_data).to have_key('created_at')
|
||||
expect(trip_data).to have_key('updated_at')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_trip) { create(:trip, user: user, name: 'User Trip') }
|
||||
let!(:other_user_trip) { create(:trip, user: other_user, name: 'Other Trip') }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
it 'only returns trips for the specified user' do
|
||||
expect(service.call.size).to eq(1)
|
||||
expect(service.call.first['name']).to eq('User Trip')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
137
spec/services/users/export_data/visits_spec.rb
Normal file
137
spec/services/users/export_data/visits_spec.rb
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData::Visits, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
|
||||
subject { service.call }
|
||||
|
||||
describe '#call' do
|
||||
context 'when user has no visits' do
|
||||
it 'returns an empty array' do
|
||||
expect(subject).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has visits with places' do
|
||||
let(:place) { create(:place, name: 'Office Building', longitude: -73.9851, latitude: 40.7589, source: :manual) }
|
||||
let!(:visit_with_place) do
|
||||
create(:visit,
|
||||
user: user,
|
||||
place: place,
|
||||
name: 'Work Visit',
|
||||
started_at: Time.zone.parse('2024-01-01 08:00:00'),
|
||||
ended_at: Time.zone.parse('2024-01-01 17:00:00'),
|
||||
duration: 32400,
|
||||
status: :suggested
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns visits with place references' do
|
||||
expect(subject).to be_an(Array)
|
||||
expect(subject.size).to eq(1)
|
||||
end
|
||||
|
||||
it 'excludes user_id, place_id, and id fields' do
|
||||
visit_data = subject.first
|
||||
|
||||
expect(visit_data).not_to have_key('user_id')
|
||||
expect(visit_data).not_to have_key('place_id')
|
||||
expect(visit_data).not_to have_key('id')
|
||||
end
|
||||
|
||||
it 'includes visit attributes and place reference' do
|
||||
visit_data = subject.first
|
||||
|
||||
expect(visit_data).to include(
|
||||
'name' => 'Work Visit',
|
||||
'started_at' => visit_with_place.started_at,
|
||||
'ended_at' => visit_with_place.ended_at,
|
||||
'duration' => 32400,
|
||||
'status' => 'suggested'
|
||||
)
|
||||
|
||||
expect(visit_data['place_reference']).to eq({
|
||||
'name' => 'Office Building',
|
||||
'latitude' => '40.7589',
|
||||
'longitude' => '-73.9851',
|
||||
'source' => 'manual'
|
||||
})
|
||||
end
|
||||
|
||||
it 'includes created_at and updated_at timestamps' do
|
||||
visit_data = subject.first
|
||||
|
||||
expect(visit_data).to have_key('created_at')
|
||||
expect(visit_data).to have_key('updated_at')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has visits without places' do
|
||||
let!(:visit_without_place) do
|
||||
create(:visit,
|
||||
user: user,
|
||||
place: nil,
|
||||
name: 'Unknown Location',
|
||||
started_at: Time.zone.parse('2024-01-02 10:00:00'),
|
||||
ended_at: Time.zone.parse('2024-01-02 12:00:00'),
|
||||
duration: 7200,
|
||||
status: :confirmed
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns visits with null place references' do
|
||||
visit_data = subject.first
|
||||
|
||||
expect(visit_data).to include(
|
||||
'name' => 'Unknown Location',
|
||||
'duration' => 7200,
|
||||
'status' => 'confirmed'
|
||||
)
|
||||
expect(visit_data['place_reference']).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'with mixed visits (with and without places)' do
|
||||
let(:place) { create(:place, name: 'Gym', longitude: -74.006, latitude: 40.7128) }
|
||||
let!(:visit_with_place) { create(:visit, user: user, place: place, name: 'Workout') }
|
||||
let!(:visit_without_place) { create(:visit, user: user, place: nil, name: 'Random Stop') }
|
||||
|
||||
it 'returns all visits with appropriate place references' do
|
||||
expect(subject.size).to eq(2)
|
||||
|
||||
visit_with_place_data = subject.find { |v| v['name'] == 'Workout' }
|
||||
visit_without_place_data = subject.find { |v| v['name'] == 'Random Stop' }
|
||||
|
||||
expect(visit_with_place_data['place_reference']).to be_present
|
||||
expect(visit_without_place_data['place_reference']).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context 'with multiple users' do
|
||||
let(:other_user) { create(:user) }
|
||||
let!(:user_visit) { create(:visit, user: user, name: 'User Visit') }
|
||||
let!(:other_user_visit) { create(:visit, user: other_user, name: 'Other User Visit') }
|
||||
|
||||
it 'only returns visits for the specified user' do
|
||||
expect(subject.size).to eq(1)
|
||||
expect(subject.first['name']).to eq('User Visit')
|
||||
end
|
||||
end
|
||||
|
||||
context 'performance considerations' do
|
||||
let!(:place) { create(:place) }
|
||||
|
||||
it 'includes places to avoid N+1 queries' do
|
||||
create_list(:visit, 3, user: user, place: place)
|
||||
|
||||
# This test verifies that we're using .includes(:place)
|
||||
expect(user.visits).to receive(:includes).with(:place).and_call_original
|
||||
|
||||
subject
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
407
spec/services/users/export_data_spec.rb
Normal file
407
spec/services/users/export_data_spec.rb
Normal file
|
|
@ -0,0 +1,407 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ExportData, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user) }
|
||||
let(:timestamp) { '20241201_123000' }
|
||||
let(:export_directory) { Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}") }
|
||||
let(:files_directory) { export_directory.join('files') }
|
||||
|
||||
before do
|
||||
allow(Time).to receive(:current).and_return(Time.new(2024, 12, 1, 12, 30, 0))
|
||||
allow(FileUtils).to receive(:mkdir_p)
|
||||
allow(FileUtils).to receive(:rm_rf)
|
||||
allow(File).to receive(:open).and_call_original
|
||||
allow(File).to receive(:directory?).and_return(true)
|
||||
end
|
||||
|
||||
describe '#export' do
|
||||
context 'when export is successful' do
|
||||
let(:zip_file_path) { export_directory.join('export.zip') }
|
||||
let(:zip_file_double) { double('ZipFile') }
|
||||
let(:export_record) { double('Export', id: 1, name: 'test.zip', update!: true, file: double('File', attach: true)) }
|
||||
let(:notification_service_double) { double('Notifications::Create', call: true) }
|
||||
|
||||
before do
|
||||
# Mock all the export data services
|
||||
allow(Users::ExportData::Areas).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Imports).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Exports).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Trips).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Stats).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Notifications).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Points).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Visits).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Places).to receive(:new).and_return(double(call: []))
|
||||
|
||||
# Mock user settings
|
||||
allow(user).to receive(:safe_settings).and_return(double(settings: { theme: 'dark' }))
|
||||
|
||||
# Mock user associations for counting (needed before error occurs)
|
||||
allow(user).to receive(:areas).and_return(double(count: 5))
|
||||
allow(user).to receive(:imports).and_return(double(count: 12))
|
||||
allow(user).to receive(:trips).and_return(double(count: 8))
|
||||
allow(user).to receive(:stats).and_return(double(count: 24))
|
||||
allow(user).to receive(:notifications).and_return(double(count: 10))
|
||||
allow(user).to receive(:tracked_points).and_return(double(count: 15000))
|
||||
allow(user).to receive(:visits).and_return(double(count: 45))
|
||||
allow(user).to receive(:places).and_return(double(count: 20))
|
||||
|
||||
# Mock Export creation and file attachment
|
||||
exports_double = double('Exports', count: 3)
|
||||
allow(user).to receive(:exports).and_return(exports_double)
|
||||
allow(exports_double).to receive(:create!).and_return(export_record)
|
||||
allow(export_record).to receive(:update!)
|
||||
allow(export_record).to receive_message_chain(:file, :attach)
|
||||
|
||||
# Mock Zip file creation
|
||||
allow(Zip::File).to receive(:open).with(zip_file_path, Zip::File::CREATE).and_yield(zip_file_double)
|
||||
allow(zip_file_double).to receive(:default_compression=)
|
||||
allow(zip_file_double).to receive(:default_compression_level=)
|
||||
allow(zip_file_double).to receive(:add)
|
||||
allow(Dir).to receive(:glob).and_return([export_directory.join('data.json').to_s])
|
||||
|
||||
# Mock file operations - return a File instance for the zip file
|
||||
allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_yield(StringIO.new)
|
||||
zip_file_io = File.new(__FILE__) # Use current file as a placeholder
|
||||
allow(File).to receive(:open).with(zip_file_path).and_return(zip_file_io)
|
||||
|
||||
# Mock notifications service - prevent actual notification creation
|
||||
allow(service).to receive(:create_success_notification)
|
||||
|
||||
# Mock cleanup to verify it's called
|
||||
allow(service).to receive(:cleanup_temporary_files)
|
||||
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
|
||||
end
|
||||
|
||||
it 'creates an Export record with correct attributes' do
|
||||
expect(user.exports).to receive(:create!).with(
|
||||
name: "user_data_export_#{timestamp}.zip",
|
||||
file_format: :archive,
|
||||
file_type: :user_data,
|
||||
status: :processing
|
||||
)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'creates the export directory structure' do
|
||||
expect(FileUtils).to receive(:mkdir_p).with(files_directory)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'calls all export data services with correct parameters' do
|
||||
expect(Users::ExportData::Areas).to receive(:new).with(user)
|
||||
expect(Users::ExportData::Imports).to receive(:new).with(user, files_directory)
|
||||
expect(Users::ExportData::Exports).to receive(:new).with(user, files_directory)
|
||||
expect(Users::ExportData::Trips).to receive(:new).with(user)
|
||||
expect(Users::ExportData::Stats).to receive(:new).with(user)
|
||||
expect(Users::ExportData::Notifications).to receive(:new).with(user)
|
||||
expect(Users::ExportData::Points).to receive(:new).with(user)
|
||||
expect(Users::ExportData::Visits).to receive(:new).with(user)
|
||||
expect(Users::ExportData::Places).to receive(:new).with(user)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'creates a zip file with proper compression settings' do
|
||||
expect(Zip::File).to receive(:open).with(zip_file_path, Zip::File::CREATE)
|
||||
expect(Zip).to receive(:default_compression).and_return(-1) # Mock original compression
|
||||
expect(Zip).to receive(:default_compression=).with(Zip::Entry::DEFLATED)
|
||||
expect(Zip).to receive(:default_compression=).with(-1) # Restoration
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'attaches the zip file to the export record' do
|
||||
expect(export_record.file).to receive(:attach).with(
|
||||
io: an_instance_of(File),
|
||||
filename: export_record.name,
|
||||
content_type: 'application/zip'
|
||||
)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'marks the export as completed' do
|
||||
expect(export_record).to receive(:update!).with(status: :completed)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'creates a success notification' do
|
||||
expect(service).to receive(:create_success_notification)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'cleans up temporary files' do
|
||||
expect(service).to receive(:cleanup_temporary_files).with(export_directory)
|
||||
|
||||
service.export
|
||||
end
|
||||
|
||||
it 'returns the export record' do
|
||||
result = service.export
|
||||
expect(result).to eq(export_record)
|
||||
end
|
||||
|
||||
it 'calculates entity counts correctly' do
|
||||
counts = service.send(:calculate_entity_counts)
|
||||
|
||||
expect(counts).to eq({
|
||||
areas: 5,
|
||||
imports: 12,
|
||||
exports: 3,
|
||||
trips: 8,
|
||||
stats: 24,
|
||||
notifications: 10,
|
||||
points: 15000,
|
||||
visits: 45,
|
||||
places: 20
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an error occurs during export' do
|
||||
let(:export_record) { double('Export', id: 1, name: 'test.zip', update!: true) }
|
||||
let(:error_message) { 'Something went wrong' }
|
||||
|
||||
before do
|
||||
# Mock Export creation first
|
||||
exports_double = double('Exports', count: 3)
|
||||
allow(user).to receive(:exports).and_return(exports_double)
|
||||
allow(exports_double).to receive(:create!).and_return(export_record)
|
||||
allow(export_record).to receive(:update!)
|
||||
|
||||
# Mock user settings and other dependencies that are needed before the error
|
||||
allow(user).to receive(:safe_settings).and_return(double(settings: { theme: 'dark' }))
|
||||
|
||||
# Mock user associations for counting
|
||||
allow(user).to receive(:areas).and_return(double(count: 5))
|
||||
allow(user).to receive(:imports).and_return(double(count: 12))
|
||||
# exports already mocked above
|
||||
allow(user).to receive(:trips).and_return(double(count: 8))
|
||||
allow(user).to receive(:stats).and_return(double(count: 24))
|
||||
allow(user).to receive(:notifications).and_return(double(count: 10))
|
||||
allow(user).to receive(:tracked_points).and_return(double(count: 15000))
|
||||
allow(user).to receive(:visits).and_return(double(count: 45))
|
||||
allow(user).to receive(:places).and_return(double(count: 20))
|
||||
|
||||
# Then set up the error condition - make it happen during the JSON writing step
|
||||
allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_raise(StandardError, error_message)
|
||||
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
|
||||
# Mock cleanup method and pathname existence
|
||||
allow(service).to receive(:cleanup_temporary_files)
|
||||
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
|
||||
end
|
||||
|
||||
it 'marks the export as failed' do
|
||||
expect(export_record).to receive(:update!).with(status: :failed)
|
||||
|
||||
expect { service.export }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 'reports the error via ExceptionReporter' do
|
||||
expect(ExceptionReporter).to receive(:call).with(an_instance_of(StandardError), 'Export failed')
|
||||
|
||||
expect { service.export }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 'still cleans up temporary files' do
|
||||
expect(service).to receive(:cleanup_temporary_files)
|
||||
|
||||
expect { service.export }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 're-raises the error' do
|
||||
expect { service.export }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when export record creation fails' do
|
||||
before do
|
||||
exports_double = double('Exports', count: 3)
|
||||
allow(user).to receive(:exports).and_return(exports_double)
|
||||
allow(exports_double).to receive(:create!).and_raise(ActiveRecord::RecordInvalid)
|
||||
end
|
||||
|
||||
it 'does not try to mark export as failed when export_record is nil' do
|
||||
expect { service.export }.to raise_error(ActiveRecord::RecordInvalid)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with file compression scenarios' do
|
||||
let(:export_record) { double('Export', id: 1, name: 'test.zip', update!: true, file: double('File', attach: true)) }
|
||||
|
||||
before do
|
||||
# Mock Export creation
|
||||
exports_double = double('Exports', count: 3)
|
||||
allow(user).to receive(:exports).and_return(exports_double)
|
||||
allow(exports_double).to receive(:create!).and_return(export_record)
|
||||
allow(export_record).to receive(:update!)
|
||||
allow(export_record).to receive_message_chain(:file, :attach)
|
||||
|
||||
# Mock all export services to prevent actual calls
|
||||
allow(Users::ExportData::Areas).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Imports).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Exports).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Trips).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Stats).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Notifications).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Points).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Visits).to receive(:new).and_return(double(call: []))
|
||||
allow(Users::ExportData::Places).to receive(:new).and_return(double(call: []))
|
||||
|
||||
allow(user).to receive(:safe_settings).and_return(double(settings: {}))
|
||||
|
||||
# Mock user associations for counting
|
||||
allow(user).to receive(:areas).and_return(double(count: 5))
|
||||
allow(user).to receive(:imports).and_return(double(count: 12))
|
||||
# exports already mocked above
|
||||
allow(user).to receive(:trips).and_return(double(count: 8))
|
||||
allow(user).to receive(:stats).and_return(double(count: 24))
|
||||
allow(user).to receive(:notifications).and_return(double(count: 10))
|
||||
allow(user).to receive(:tracked_points).and_return(double(count: 15000))
|
||||
allow(user).to receive(:visits).and_return(double(count: 45))
|
||||
allow(user).to receive(:places).and_return(double(count: 20))
|
||||
|
||||
allow(File).to receive(:open).and_call_original
|
||||
allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_yield(StringIO.new)
|
||||
|
||||
# Use current file as placeholder for zip file
|
||||
zip_file_io = File.new(__FILE__)
|
||||
allow(File).to receive(:open).with(export_directory.join('export.zip')).and_return(zip_file_io)
|
||||
|
||||
# Mock notifications service
|
||||
allow(service).to receive(:create_success_notification)
|
||||
|
||||
# Mock cleanup
|
||||
allow(service).to receive(:cleanup_temporary_files)
|
||||
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
|
||||
end
|
||||
|
||||
it 'calls create_zip_archive with correct parameters' do
|
||||
expect(service).to receive(:create_zip_archive).with(export_directory, export_directory.join('export.zip'))
|
||||
|
||||
service.export
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'private methods' do
|
||||
describe '#export_directory' do
|
||||
it 'generates correct directory path' do
|
||||
allow(Time).to receive_message_chain(:current, :strftime).with('%Y%m%d_%H%M%S').and_return(timestamp)
|
||||
|
||||
# Call export to initialize the directory paths
|
||||
service.instance_variable_set(:@export_directory, Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}"))
|
||||
|
||||
expect(service.send(:export_directory).to_s).to include(user.email.gsub(/[^0-9A-Za-z._-]/, '_'))
|
||||
expect(service.send(:export_directory).to_s).to include(timestamp)
|
||||
end
|
||||
end
|
||||
|
||||
describe '#files_directory' do
|
||||
it 'returns files subdirectory of export directory' do
|
||||
# Initialize the export directory first
|
||||
service.instance_variable_set(:@export_directory, Rails.root.join('tmp', "test_export"))
|
||||
service.instance_variable_set(:@files_directory, service.instance_variable_get(:@export_directory).join('files'))
|
||||
|
||||
files_dir = service.send(:files_directory)
|
||||
expect(files_dir.to_s).to end_with('files')
|
||||
end
|
||||
end
|
||||
|
||||
describe '#cleanup_temporary_files' do
|
||||
context 'when directory exists' do
|
||||
before do
|
||||
allow(File).to receive(:directory?).and_return(true)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
end
|
||||
|
||||
it 'removes the directory' do
|
||||
expect(FileUtils).to receive(:rm_rf).with(export_directory)
|
||||
|
||||
service.send(:cleanup_temporary_files, export_directory)
|
||||
end
|
||||
|
||||
it 'logs the cleanup' do
|
||||
expect(Rails.logger).to receive(:info).with("Cleaning up temporary export directory: #{export_directory}")
|
||||
|
||||
service.send(:cleanup_temporary_files, export_directory)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when cleanup fails' do
|
||||
before do
|
||||
allow(File).to receive(:directory?).and_return(true)
|
||||
allow(FileUtils).to receive(:rm_rf).and_raise(StandardError, 'Permission denied')
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
end
|
||||
|
||||
it 'reports the error via ExceptionReporter but does not re-raise' do
|
||||
expect(ExceptionReporter).to receive(:call).with(an_instance_of(StandardError), 'Failed to cleanup temporary files')
|
||||
|
||||
expect { service.send(:cleanup_temporary_files, export_directory) }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'when directory does not exist' do
|
||||
before do
|
||||
allow(File).to receive(:directory?).and_return(false)
|
||||
end
|
||||
|
||||
it 'does not attempt cleanup' do
|
||||
expect(FileUtils).not_to receive(:rm_rf)
|
||||
|
||||
service.send(:cleanup_temporary_files, export_directory)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe '#calculate_entity_counts' do
|
||||
before do
|
||||
# Mock user associations for counting
|
||||
allow(user).to receive(:areas).and_return(double(count: 5))
|
||||
allow(user).to receive(:imports).and_return(double(count: 12))
|
||||
allow(user).to receive(:exports).and_return(double(count: 3))
|
||||
allow(user).to receive(:trips).and_return(double(count: 8))
|
||||
allow(user).to receive(:stats).and_return(double(count: 24))
|
||||
allow(user).to receive(:notifications).and_return(double(count: 10))
|
||||
allow(user).to receive(:tracked_points).and_return(double(count: 15000))
|
||||
allow(user).to receive(:visits).and_return(double(count: 45))
|
||||
allow(user).to receive(:places).and_return(double(count: 20))
|
||||
allow(Rails.logger).to receive(:info)
|
||||
end
|
||||
|
||||
it 'returns correct counts for all entity types' do
|
||||
counts = service.send(:calculate_entity_counts)
|
||||
|
||||
expect(counts).to eq({
|
||||
areas: 5,
|
||||
imports: 12,
|
||||
exports: 3,
|
||||
trips: 8,
|
||||
stats: 24,
|
||||
notifications: 10,
|
||||
points: 15000,
|
||||
visits: 45,
|
||||
places: 20
|
||||
})
|
||||
end
|
||||
|
||||
it 'logs the calculation process' do
|
||||
expect(Rails.logger).to receive(:info).with("Calculating entity counts for export")
|
||||
expect(Rails.logger).to receive(:info).with(/Entity counts:/)
|
||||
|
||||
service.send(:calculate_entity_counts)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
383
spec/services/users/export_import_integration_spec.rb
Normal file
383
spec/services/users/export_import_integration_spec.rb
Normal file
|
|
@ -0,0 +1,383 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe 'Users Export-Import Integration', type: :service do
|
||||
let(:original_user) { create(:user, email: 'original@example.com') }
|
||||
let(:target_user) { create(:user, email: 'target@example.com') }
|
||||
let(:temp_archive_path) { Rails.root.join('tmp', 'test_export.zip') }
|
||||
|
||||
after do
|
||||
File.delete(temp_archive_path) if File.exist?(temp_archive_path)
|
||||
end
|
||||
|
||||
describe 'complete export-import cycle' do
|
||||
before do
|
||||
create_full_user_dataset(original_user)
|
||||
end
|
||||
|
||||
it 'exports and imports all user data while preserving relationships' do
|
||||
export_record = Users::ExportData.new(original_user).export
|
||||
|
||||
expect(export_record).to be_present
|
||||
expect(export_record.status).to eq('completed')
|
||||
expect(export_record.file).to be_attached
|
||||
|
||||
File.open(temp_archive_path, 'wb') do |file|
|
||||
export_record.file.download { |chunk| file.write(chunk) }
|
||||
end
|
||||
|
||||
expect(File.exist?(temp_archive_path)).to be true
|
||||
|
||||
original_counts = calculate_user_entity_counts(original_user)
|
||||
|
||||
original_log_level = Rails.logger.level
|
||||
Rails.logger.level = Logger::DEBUG
|
||||
|
||||
begin
|
||||
import_stats = Users::ImportData.new(target_user, temp_archive_path).import
|
||||
ensure
|
||||
Rails.logger.level = original_log_level
|
||||
end
|
||||
|
||||
puts "Import stats: #{import_stats.inspect}"
|
||||
|
||||
user_notifications_count = original_user.notifications.where.not(
|
||||
title: ['Data import completed', 'Data import failed', 'Export completed', 'Export failed']
|
||||
).count
|
||||
|
||||
target_counts = calculate_user_entity_counts(target_user)
|
||||
|
||||
puts "Original counts: #{original_counts.inspect}"
|
||||
puts "Target counts: #{target_counts.inspect}"
|
||||
|
||||
expect(target_counts[:areas]).to eq(original_counts[:areas])
|
||||
expect(target_counts[:imports]).to eq(original_counts[:imports])
|
||||
expect(target_counts[:exports]).to eq(original_counts[:exports])
|
||||
expect(target_counts[:trips]).to eq(original_counts[:trips])
|
||||
expect(target_counts[:stats]).to eq(original_counts[:stats])
|
||||
expect(target_counts[:notifications]).to eq(user_notifications_count + 1)
|
||||
expect(target_counts[:points]).to eq(original_counts[:points])
|
||||
expect(target_counts[:visits]).to eq(original_counts[:visits])
|
||||
expect(target_counts[:places]).to eq(original_counts[:places])
|
||||
|
||||
# Verify import stats match expectations
|
||||
expect(import_stats[:areas_created]).to eq(original_counts[:areas])
|
||||
expect(import_stats[:imports_created]).to eq(original_counts[:imports])
|
||||
expect(import_stats[:exports_created]).to eq(original_counts[:exports])
|
||||
expect(import_stats[:trips_created]).to eq(original_counts[:trips])
|
||||
expect(import_stats[:stats_created]).to eq(original_counts[:stats])
|
||||
expect(import_stats[:notifications_created]).to eq(user_notifications_count)
|
||||
expect(import_stats[:points_created]).to eq(original_counts[:points])
|
||||
expect(import_stats[:visits_created]).to eq(original_counts[:visits])
|
||||
# Places are global entities, so they may already exist and not be recreated
|
||||
# The count in target_counts shows the user has access to the places (through visits)
|
||||
|
||||
verify_relationships_preserved(original_user, target_user)
|
||||
|
||||
verify_settings_preserved(original_user, target_user)
|
||||
|
||||
verify_files_restored(original_user, target_user)
|
||||
end
|
||||
|
||||
it 'is idempotent - running import twice does not create duplicates' do
|
||||
export_record = Users::ExportData.new(original_user).export
|
||||
|
||||
File.open(temp_archive_path, 'wb') do |file|
|
||||
export_record.file.download { |chunk| file.write(chunk) }
|
||||
end
|
||||
|
||||
first_import_stats = Users::ImportData.new(target_user, temp_archive_path).import
|
||||
first_counts = calculate_user_entity_counts(target_user)
|
||||
|
||||
second_import_stats = Users::ImportData.new(target_user, temp_archive_path).import
|
||||
second_counts = calculate_user_entity_counts(target_user)
|
||||
|
||||
expect(second_counts[:areas]).to eq(first_counts[:areas])
|
||||
expect(second_counts[:imports]).to eq(first_counts[:imports])
|
||||
expect(second_counts[:exports]).to eq(first_counts[:exports])
|
||||
expect(second_counts[:trips]).to eq(first_counts[:trips])
|
||||
expect(second_counts[:stats]).to eq(first_counts[:stats])
|
||||
expect(second_counts[:points]).to eq(first_counts[:points])
|
||||
expect(second_counts[:visits]).to eq(first_counts[:visits])
|
||||
expect(second_counts[:places]).to eq(first_counts[:places])
|
||||
expect(second_counts[:notifications]).to eq(first_counts[:notifications] + 1)
|
||||
|
||||
expect(second_import_stats[:areas_created]).to eq(0)
|
||||
expect(second_import_stats[:imports_created]).to eq(0)
|
||||
expect(second_import_stats[:exports_created]).to eq(0)
|
||||
expect(second_import_stats[:trips_created]).to eq(0)
|
||||
expect(second_import_stats[:stats_created]).to eq(0)
|
||||
expect(second_import_stats[:notifications_created]).to eq(0)
|
||||
expect(second_import_stats[:points_created]).to eq(0)
|
||||
expect(second_import_stats[:visits_created]).to eq(0)
|
||||
expect(second_import_stats[:places_created]).to eq(0)
|
||||
end
|
||||
|
||||
it 'does not trigger background processing for imported imports' do
|
||||
expect(Import::ProcessJob).not_to receive(:perform_later)
|
||||
|
||||
export_record = Users::ExportData.new(original_user).export
|
||||
|
||||
File.open(temp_archive_path, 'wb') do |file|
|
||||
export_record.file.download { |chunk| file.write(chunk) }
|
||||
end
|
||||
|
||||
Users::ImportData.new(target_user, temp_archive_path).import
|
||||
end
|
||||
end
|
||||
|
||||
describe 'places and visits import integrity' do
|
||||
it 'imports all places and visits without losses due to global deduplication' do
|
||||
# Create a user with specific places and visits
|
||||
original_user = create(:user, email: 'original@example.com')
|
||||
|
||||
# Create places with different characteristics
|
||||
home_place = create(:place, name: 'Home', latitude: 40.7128, longitude: -74.0060)
|
||||
office_place = create(:place, name: 'Office', latitude: 40.7589, longitude: -73.9851)
|
||||
gym_place = create(:place, name: 'Gym', latitude: 40.7505, longitude: -73.9934)
|
||||
|
||||
# Create visits associated with those places
|
||||
create(:visit, user: original_user, place: home_place, name: 'Home Visit')
|
||||
create(:visit, user: original_user, place: office_place, name: 'Work Visit')
|
||||
create(:visit, user: original_user, place: gym_place, name: 'Workout')
|
||||
|
||||
# Create a visit without a place
|
||||
create(:visit, user: original_user, place: nil, name: 'Unknown Location')
|
||||
|
||||
# Calculate counts properly - places are accessed through visits
|
||||
original_places_count = original_user.places.distinct.count
|
||||
original_visits_count = original_user.visits.count
|
||||
|
||||
# Export the data
|
||||
export_service = Users::ExportData.new(original_user)
|
||||
export_record = export_service.export
|
||||
|
||||
# Download and save to a temporary file for processing
|
||||
archive_content = export_record.file.download
|
||||
temp_export_file = Tempfile.new(['test_export', '.zip'])
|
||||
temp_export_file.binmode
|
||||
temp_export_file.write(archive_content)
|
||||
temp_export_file.close
|
||||
|
||||
# SIMULATE FRESH DATABASE: Remove the original places to simulate database migration
|
||||
# This simulates the scenario where we're importing into a different database
|
||||
place_ids_to_remove = [home_place.id, office_place.id, gym_place.id]
|
||||
Place.where(id: place_ids_to_remove).destroy_all
|
||||
|
||||
# Create another user on a "different database" scenario
|
||||
import_user = create(:user, email: 'import@example.com')
|
||||
|
||||
# Create some existing global places that might conflict
|
||||
# These should NOT prevent import of the user's places
|
||||
create(:place, name: 'Home', latitude: 40.8000, longitude: -74.1000) # Different coordinates
|
||||
create(:place, name: 'Coffee Shop', latitude: 40.7589, longitude: -73.9851) # Same coordinates, different name
|
||||
|
||||
# Simulate import into "new database"
|
||||
temp_import_file = Tempfile.new(['test_import', '.zip'])
|
||||
temp_import_file.binmode
|
||||
temp_import_file.write(archive_content)
|
||||
temp_import_file.close
|
||||
|
||||
# Import the data
|
||||
import_service = Users::ImportData.new(import_user, temp_import_file.path)
|
||||
import_stats = import_service.import
|
||||
|
||||
# Verify all entities were imported correctly
|
||||
expect(import_stats[:places_created]).to eq(original_places_count),
|
||||
"Expected #{original_places_count} places to be created, got #{import_stats[:places_created]}"
|
||||
expect(import_stats[:visits_created]).to eq(original_visits_count),
|
||||
"Expected #{original_visits_count} visits to be created, got #{import_stats[:visits_created]}"
|
||||
|
||||
# Verify the imported user has access to all their data
|
||||
imported_places_count = import_user.places.distinct.count
|
||||
imported_visits_count = import_user.visits.count
|
||||
|
||||
expect(imported_places_count).to eq(original_places_count),
|
||||
"Expected user to have access to #{original_places_count} places, got #{imported_places_count}"
|
||||
expect(imported_visits_count).to eq(original_visits_count),
|
||||
"Expected user to have #{original_visits_count} visits, got #{imported_visits_count}"
|
||||
|
||||
# Verify specific visits have their place associations
|
||||
imported_visits = import_user.visits.includes(:place)
|
||||
visits_with_places = imported_visits.where.not(place: nil)
|
||||
expect(visits_with_places.count).to eq(3) # Home, Office, Gym
|
||||
|
||||
# Verify place names are preserved
|
||||
place_names = visits_with_places.map { |v| v.place.name }.sort
|
||||
expect(place_names).to eq(['Gym', 'Home', 'Office'])
|
||||
|
||||
# Cleanup
|
||||
temp_export_file.unlink
|
||||
temp_import_file.unlink
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_full_user_dataset(user)
|
||||
user.update!(settings: {
|
||||
'distance_unit' => 'km',
|
||||
'timezone' => 'America/New_York',
|
||||
'immich_url' => 'https://immich.example.com',
|
||||
'immich_api_key' => 'test-api-key'
|
||||
})
|
||||
|
||||
usa = create(:country, name: 'United States', iso_a2: 'US', iso_a3: 'USA')
|
||||
canada = create(:country, name: 'Canada', iso_a2: 'CA', iso_a3: 'CAN')
|
||||
|
||||
office = create(:place, name: 'Office Building', latitude: 40.7589, longitude: -73.9851)
|
||||
home = create(:place, name: 'Home Sweet Home', latitude: 40.7128, longitude: -74.0060)
|
||||
|
||||
create_list(:area, 3, user: user)
|
||||
|
||||
import1 = create(:import, user: user, name: 'March 2024 Data', source: :google_semantic_history)
|
||||
import2 = create(:import, user: user, name: 'OwnTracks Data', source: :owntracks)
|
||||
|
||||
import1.file.attach(
|
||||
io: StringIO.new('{"timelineObjects": []}'),
|
||||
filename: 'march_2024.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
import2.file.attach(
|
||||
io: StringIO.new('{"_type": "location"}'),
|
||||
filename: 'owntracks.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
export1 = create(:export, user: user, name: 'Q1 2024 Export', file_format: :json, file_type: :points)
|
||||
export1.file.attach(
|
||||
io: StringIO.new('{"type": "FeatureCollection", "features": []}'),
|
||||
filename: 'q1_2024.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
export2 = create(:export, user: user, name: 'Q2 2024 Export', file_format: :json, file_type: :user_data)
|
||||
export2.file.attach(
|
||||
io: StringIO.new('{"type": "FeatureCollection", "features": []}'),
|
||||
filename: 'q2_2024.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
create_list(:trip, 2, user: user)
|
||||
|
||||
create(:stat, user: user, year: 2024, month: 1, distance: 150.5, daily_distance: [[1, 5.2], [2, 8.1]])
|
||||
create(:stat, user: user, year: 2024, month: 2, distance: 200.3, daily_distance: [[1, 6.5], [2, 9.8]])
|
||||
|
||||
create_list(:notification, 4, user: user)
|
||||
|
||||
visit1 = create(:visit, user: user, place: office, name: 'Work Visit')
|
||||
visit2 = create(:visit, user: user, place: home, name: 'Home Visit')
|
||||
visit3 = create(:visit, user: user, place: nil, name: 'Unknown Location')
|
||||
|
||||
create_list(:point, 5,
|
||||
user: user,
|
||||
import: import1,
|
||||
country: usa,
|
||||
visit: visit1,
|
||||
latitude: 40.7589,
|
||||
longitude: -73.9851
|
||||
)
|
||||
|
||||
create_list(:point, 3,
|
||||
user: user,
|
||||
import: import2,
|
||||
country: canada,
|
||||
visit: visit2,
|
||||
latitude: 40.7128,
|
||||
longitude: -74.0060
|
||||
)
|
||||
|
||||
create_list(:point, 2,
|
||||
user: user,
|
||||
import: nil,
|
||||
country: nil,
|
||||
visit: nil
|
||||
)
|
||||
|
||||
create_list(:point, 2,
|
||||
user: user,
|
||||
import: import1,
|
||||
country: usa,
|
||||
visit: visit3
|
||||
)
|
||||
|
||||
end
|
||||
|
||||
def calculate_user_entity_counts(user)
|
||||
{
|
||||
areas: user.areas.count,
|
||||
imports: user.imports.count,
|
||||
exports: user.exports.count,
|
||||
trips: user.trips.count,
|
||||
stats: user.stats.count,
|
||||
notifications: user.notifications.count,
|
||||
points: user.tracked_points.count,
|
||||
visits: user.visits.count,
|
||||
places: user.places.count
|
||||
}
|
||||
end
|
||||
|
||||
def verify_relationships_preserved(original_user, target_user)
|
||||
original_points_with_imports = original_user.tracked_points.where.not(import_id: nil).count
|
||||
target_points_with_imports = target_user.tracked_points.where.not(import_id: nil).count
|
||||
expect(target_points_with_imports).to eq(original_points_with_imports)
|
||||
|
||||
original_points_with_countries = original_user.tracked_points.where.not(country_id: nil).count
|
||||
target_points_with_countries = target_user.tracked_points.where.not(country_id: nil).count
|
||||
expect(target_points_with_countries).to eq(original_points_with_countries)
|
||||
|
||||
original_points_with_visits = original_user.tracked_points.where.not(visit_id: nil).count
|
||||
target_points_with_visits = target_user.tracked_points.where.not(visit_id: nil).count
|
||||
expect(target_points_with_visits).to eq(original_points_with_visits)
|
||||
|
||||
original_visits_with_places = original_user.visits.where.not(place_id: nil).count
|
||||
target_visits_with_places = target_user.visits.where.not(place_id: nil).count
|
||||
expect(target_visits_with_places).to eq(original_visits_with_places)
|
||||
|
||||
original_office_points = original_user.tracked_points.where(
|
||||
latitude: 40.7589, longitude: -73.9851
|
||||
).first
|
||||
target_office_points = target_user.tracked_points.where(
|
||||
latitude: 40.7589, longitude: -73.9851
|
||||
).first
|
||||
|
||||
if original_office_points && target_office_points
|
||||
expect(target_office_points.import.name).to eq(original_office_points.import.name) if original_office_points.import
|
||||
expect(target_office_points.country.name).to eq(original_office_points.country.name) if original_office_points.country
|
||||
expect(target_office_points.visit.name).to eq(original_office_points.visit.name) if original_office_points.visit
|
||||
end
|
||||
end
|
||||
|
||||
def verify_settings_preserved(original_user, target_user)
|
||||
expect(target_user.safe_settings.distance_unit).to eq(original_user.safe_settings.distance_unit)
|
||||
expect(target_user.settings['timezone']).to eq(original_user.settings['timezone'])
|
||||
expect(target_user.settings['immich_url']).to eq(original_user.settings['immich_url'])
|
||||
expect(target_user.settings['immich_api_key']).to eq(original_user.settings['immich_api_key'])
|
||||
end
|
||||
|
||||
def verify_files_restored(original_user, target_user)
|
||||
original_imports_with_files = original_user.imports.joins(:file_attachment).count
|
||||
target_imports_with_files = target_user.imports.joins(:file_attachment).count
|
||||
expect(target_imports_with_files).to eq(original_imports_with_files)
|
||||
|
||||
target_exports_with_files = target_user.exports.joins(:file_attachment).count
|
||||
expect(target_exports_with_files).to be >= 2
|
||||
|
||||
original_import = original_user.imports.find_by(name: 'March 2024 Data')
|
||||
target_import = target_user.imports.find_by(name: 'March 2024 Data')
|
||||
|
||||
if original_import&.file&.attached? && target_import&.file&.attached?
|
||||
expect(target_import.file.filename.to_s).to eq(original_import.file.filename.to_s)
|
||||
expect(target_import.file.content_type).to eq(original_import.file.content_type)
|
||||
end
|
||||
|
||||
original_export = original_user.exports.find_by(name: 'Q1 2024 Export')
|
||||
target_export = target_user.exports.find_by(name: 'Q1 2024 Export')
|
||||
|
||||
if original_export&.file&.attached?
|
||||
expect(target_export).to be_present
|
||||
expect(target_export.file).to be_attached
|
||||
end
|
||||
end
|
||||
end
|
||||
161
spec/services/users/import_data/areas_spec.rb
Normal file
161
spec/services/users/import_data/areas_spec.rb
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Areas, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:areas_data) do
|
||||
[
|
||||
{
|
||||
'name' => 'Home',
|
||||
'latitude' => '40.7128',
|
||||
'longitude' => '-74.0060',
|
||||
'radius' => 100,
|
||||
'created_at' => '2024-01-01T00:00:00Z',
|
||||
'updated_at' => '2024-01-01T00:00:00Z'
|
||||
},
|
||||
{
|
||||
'name' => 'Work',
|
||||
'latitude' => '40.7589',
|
||||
'longitude' => '-73.9851',
|
||||
'radius' => 50,
|
||||
'created_at' => '2024-01-02T00:00:00Z',
|
||||
'updated_at' => '2024-01-02T00:00:00Z'
|
||||
}
|
||||
]
|
||||
end
|
||||
let(:service) { described_class.new(user, areas_data) }
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid areas data' do
|
||||
it 'creates new areas for the user' do
|
||||
expect { service.call }.to change { user.areas.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates areas with correct attributes' do
|
||||
service.call
|
||||
|
||||
home_area = user.areas.find_by(name: 'Home')
|
||||
expect(home_area).to have_attributes(
|
||||
name: 'Home',
|
||||
latitude: 40.7128,
|
||||
longitude: -74.0060,
|
||||
radius: 100
|
||||
)
|
||||
|
||||
work_area = user.areas.find_by(name: 'Work')
|
||||
expect(work_area).to have_attributes(
|
||||
name: 'Work',
|
||||
latitude: 40.7589,
|
||||
longitude: -73.9851,
|
||||
radius: 50
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns the number of areas created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 2 areas for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Areas import completed. Created: 2")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate areas' do
|
||||
before do
|
||||
# Create an existing area with same name and coordinates
|
||||
user.areas.create!(
|
||||
name: 'Home',
|
||||
latitude: 40.7128,
|
||||
longitude: -74.0060,
|
||||
radius: 100
|
||||
)
|
||||
end
|
||||
|
||||
it 'skips duplicate areas' do
|
||||
expect { service.call }.to change { user.areas.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs when skipping duplicates' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow any debug logs
|
||||
expect(Rails.logger).to receive(:debug).with("Area already exists: Home")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns only the count of newly created areas' do
|
||||
result = service.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid area data' do
|
||||
let(:areas_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Area', 'latitude' => '40.7128', 'longitude' => '-74.0060', 'radius' => 100 },
|
||||
'invalid_data',
|
||||
{ 'name' => 'Another Valid Area', 'latitude' => '40.7589', 'longitude' => '-73.9851', 'radius' => 50 }
|
||||
]
|
||||
end
|
||||
|
||||
it 'skips invalid entries and imports valid ones' do
|
||||
expect { service.call }.to change { user.areas.count }.by(2)
|
||||
end
|
||||
|
||||
it 'returns the count of valid areas created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil areas data' do
|
||||
let(:areas_data) { nil }
|
||||
|
||||
it 'does not create any areas' do
|
||||
expect { service.call }.not_to change { user.areas.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-array areas data' do
|
||||
let(:areas_data) { 'invalid_data' }
|
||||
|
||||
it 'does not create any areas' do
|
||||
expect { service.call }.not_to change { user.areas.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty areas data' do
|
||||
let(:areas_data) { [] }
|
||||
|
||||
it 'does not create any areas' do
|
||||
expect { service.call }.not_to change { user.areas.count }
|
||||
end
|
||||
|
||||
it 'logs the import process with 0 count' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 0 areas for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Areas import completed. Created: 0")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
281
spec/services/users/import_data/imports_spec.rb
Normal file
281
spec/services/users/import_data/imports_spec.rb
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Imports, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:files_directory) { Rails.root.join('tmp', 'test_files') }
|
||||
let(:imports_data) do
|
||||
[
|
||||
{
|
||||
'name' => '2023_MARCH.json',
|
||||
'source' => 'google_semantic_history',
|
||||
'created_at' => '2024-01-01T00:00:00Z',
|
||||
'updated_at' => '2024-01-01T00:00:00Z',
|
||||
'processed' => true,
|
||||
'file_name' => 'import_1_2023_MARCH.json',
|
||||
'original_filename' => '2023_MARCH.json',
|
||||
'file_size' => 2048576,
|
||||
'content_type' => 'application/json'
|
||||
},
|
||||
{
|
||||
'name' => '2023_APRIL.json',
|
||||
'source' => 'owntracks',
|
||||
'created_at' => '2024-01-02T00:00:00Z',
|
||||
'updated_at' => '2024-01-02T00:00:00Z',
|
||||
'processed' => false,
|
||||
'file_name' => 'import_2_2023_APRIL.json',
|
||||
'original_filename' => '2023_APRIL.json',
|
||||
'file_size' => 1048576,
|
||||
'content_type' => 'application/json'
|
||||
}
|
||||
]
|
||||
end
|
||||
let(:service) { described_class.new(user, imports_data, files_directory) }
|
||||
|
||||
before do
|
||||
FileUtils.mkdir_p(files_directory)
|
||||
# Create mock files
|
||||
File.write(files_directory.join('import_1_2023_MARCH.json'), '{"test": "data"}')
|
||||
File.write(files_directory.join('import_2_2023_APRIL.json'), '{"more": "data"}')
|
||||
end
|
||||
|
||||
after do
|
||||
FileUtils.rm_rf(files_directory) if files_directory.exist?
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid imports data' do
|
||||
it 'creates new imports for the user' do
|
||||
expect { service.call }.to change { user.imports.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates imports with correct attributes' do
|
||||
service.call
|
||||
|
||||
march_import = user.imports.find_by(name: '2023_MARCH.json')
|
||||
expect(march_import).to have_attributes(
|
||||
name: '2023_MARCH.json',
|
||||
source: 'google_semantic_history',
|
||||
processed: 1
|
||||
)
|
||||
|
||||
april_import = user.imports.find_by(name: '2023_APRIL.json')
|
||||
expect(april_import).to have_attributes(
|
||||
name: '2023_APRIL.json',
|
||||
source: 'owntracks',
|
||||
processed: 0
|
||||
)
|
||||
end
|
||||
|
||||
it 'attaches files to the imports' do
|
||||
service.call
|
||||
|
||||
march_import = user.imports.find_by(name: '2023_MARCH.json')
|
||||
expect(march_import.file).to be_attached
|
||||
expect(march_import.file.filename.to_s).to eq('2023_MARCH.json')
|
||||
expect(march_import.file.content_type).to eq('application/json')
|
||||
|
||||
april_import = user.imports.find_by(name: '2023_APRIL.json')
|
||||
expect(april_import.file).to be_attached
|
||||
expect(april_import.file.filename.to_s).to eq('2023_APRIL.json')
|
||||
expect(april_import.file.content_type).to eq('application/json')
|
||||
end
|
||||
|
||||
it 'returns the number of imports and files created' do
|
||||
imports_created, files_restored = service.call
|
||||
expect(imports_created).to eq(2)
|
||||
expect(files_restored).to eq(2)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
allow(Rails.logger).to receive(:info) # Allow all info logs (including ActiveStorage)
|
||||
expect(Rails.logger).to receive(:info).with("Importing 2 imports for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Imports import completed. Created: 2, Files restored: 2")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'does not trigger background processing jobs' do
|
||||
expect(Import::ProcessJob).not_to receive(:perform_later)
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'sets skip_background_processing flag on created imports' do
|
||||
service.call
|
||||
|
||||
user.imports.each do |import|
|
||||
expect(import.skip_background_processing).to be_truthy
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate imports' do
|
||||
before do
|
||||
# Create an existing import with same name, source, and created_at
|
||||
user.imports.create!(
|
||||
name: '2023_MARCH.json',
|
||||
source: 'google_semantic_history',
|
||||
created_at: Time.parse('2024-01-01T00:00:00Z')
|
||||
)
|
||||
end
|
||||
|
||||
it 'skips duplicate imports' do
|
||||
expect { service.call }.to change { user.imports.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs when skipping duplicates' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow any debug logs
|
||||
expect(Rails.logger).to receive(:debug).with("Import already exists: 2023_MARCH.json")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns only the count of newly created imports' do
|
||||
imports_created, files_restored = service.call
|
||||
expect(imports_created).to eq(1)
|
||||
expect(files_restored).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with missing files' do
|
||||
before do
|
||||
FileUtils.rm_f(files_directory.join('import_1_2023_MARCH.json'))
|
||||
end
|
||||
|
||||
it 'creates imports but logs file errors' do
|
||||
expect(Rails.logger).to receive(:warn).with(/Import file not found/)
|
||||
|
||||
imports_created, files_restored = service.call
|
||||
expect(imports_created).to eq(2)
|
||||
expect(files_restored).to eq(1) # Only one file was successfully restored
|
||||
end
|
||||
|
||||
it 'creates imports without file attachments for missing files' do
|
||||
service.call
|
||||
|
||||
march_import = user.imports.find_by(name: '2023_MARCH.json')
|
||||
expect(march_import.file).not_to be_attached
|
||||
end
|
||||
end
|
||||
|
||||
context 'with imports that have no files (null file_name)' do
|
||||
let(:imports_data) do
|
||||
[
|
||||
{
|
||||
'name' => 'No File Import',
|
||||
'source' => 'gpx',
|
||||
'created_at' => '2024-01-01T00:00:00Z',
|
||||
'processed' => true,
|
||||
'file_name' => nil,
|
||||
'original_filename' => nil
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'creates imports without attempting file restoration' do
|
||||
expect { service.call }.to change { user.imports.count }.by(1)
|
||||
end
|
||||
|
||||
it 'returns correct counts' do
|
||||
imports_created, files_restored = service.call
|
||||
expect(imports_created).to eq(1)
|
||||
expect(files_restored).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid import data' do
|
||||
let(:imports_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Import', 'source' => 'owntracks' },
|
||||
'invalid_data',
|
||||
{ 'name' => 'Another Valid Import', 'source' => 'gpx' }
|
||||
]
|
||||
end
|
||||
|
||||
it 'skips invalid entries and imports valid ones' do
|
||||
expect { service.call }.to change { user.imports.count }.by(2)
|
||||
end
|
||||
|
||||
it 'returns the count of valid imports created' do
|
||||
imports_created, files_restored = service.call
|
||||
expect(imports_created).to eq(2)
|
||||
expect(files_restored).to eq(0) # No files for these imports
|
||||
end
|
||||
end
|
||||
|
||||
context 'with validation errors' do
|
||||
let(:imports_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Import', 'source' => 'owntracks' },
|
||||
{ 'source' => 'owntracks' }, # missing name
|
||||
{ 'name' => 'Missing Source Import' } # missing source
|
||||
]
|
||||
end
|
||||
|
||||
it 'only creates valid imports' do
|
||||
expect { service.call }.to change { user.imports.count }.by(2)
|
||||
|
||||
# Verify only the valid imports were created (name is required, source defaults to first enum)
|
||||
created_imports = user.imports.pluck(:name, :source)
|
||||
expect(created_imports).to contain_exactly(
|
||||
['Valid Import', 'owntracks'],
|
||||
['Missing Source Import', 'google_semantic_history']
|
||||
)
|
||||
end
|
||||
|
||||
it 'logs validation errors' do
|
||||
expect(Rails.logger).to receive(:error).at_least(:once)
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil imports data' do
|
||||
let(:imports_data) { nil }
|
||||
|
||||
it 'does not create any imports' do
|
||||
expect { service.call }.not_to change { user.imports.count }
|
||||
end
|
||||
|
||||
it 'returns [0, 0]' do
|
||||
result = service.call
|
||||
expect(result).to eq([0, 0])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-array imports data' do
|
||||
let(:imports_data) { 'invalid_data' }
|
||||
|
||||
it 'does not create any imports' do
|
||||
expect { service.call }.not_to change { user.imports.count }
|
||||
end
|
||||
|
||||
it 'returns [0, 0]' do
|
||||
result = service.call
|
||||
expect(result).to eq([0, 0])
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty imports data' do
|
||||
let(:imports_data) { [] }
|
||||
|
||||
it 'does not create any imports' do
|
||||
expect { service.call }.not_to change { user.imports.count }
|
||||
end
|
||||
|
||||
it 'logs the import process with 0 count' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 0 imports for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Imports import completed. Created: 0, Files restored: 0")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns [0, 0]' do
|
||||
result = service.call
|
||||
expect(result).to eq([0, 0])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
181
spec/services/users/import_data/notifications_spec.rb
Normal file
181
spec/services/users/import_data/notifications_spec.rb
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Notifications, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:notifications_data) do
|
||||
[
|
||||
{
|
||||
'kind' => 'info',
|
||||
'title' => 'Import completed',
|
||||
'content' => 'Your data import has been processed successfully',
|
||||
'read_at' => '2024-01-01T12:30:00Z',
|
||||
'created_at' => '2024-01-01T12:00:00Z',
|
||||
'updated_at' => '2024-01-01T12:30:00Z'
|
||||
},
|
||||
{
|
||||
'kind' => 'error',
|
||||
'title' => 'Import failed',
|
||||
'content' => 'There was an error processing your data',
|
||||
'read_at' => nil,
|
||||
'created_at' => '2024-01-02T10:00:00Z',
|
||||
'updated_at' => '2024-01-02T10:00:00Z'
|
||||
}
|
||||
]
|
||||
end
|
||||
let(:service) { described_class.new(user, notifications_data) }
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid notifications data' do
|
||||
it 'creates new notifications for the user' do
|
||||
expect { service.call }.to change { user.notifications.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates notifications with correct attributes' do
|
||||
service.call
|
||||
|
||||
import_notification = user.notifications.find_by(title: 'Import completed')
|
||||
expect(import_notification).to have_attributes(
|
||||
kind: 'info',
|
||||
title: 'Import completed',
|
||||
content: 'Your data import has been processed successfully',
|
||||
read_at: Time.parse('2024-01-01T12:30:00Z')
|
||||
)
|
||||
|
||||
error_notification = user.notifications.find_by(title: 'Import failed')
|
||||
expect(error_notification).to have_attributes(
|
||||
kind: 'error',
|
||||
title: 'Import failed',
|
||||
content: 'There was an error processing your data',
|
||||
read_at: nil
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns the number of notifications created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 2 notifications for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Notifications import completed. Created: 2")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate notifications' do
|
||||
before do
|
||||
# Create an existing notification with same title, content, and created_at
|
||||
user.notifications.create!(
|
||||
kind: 'info',
|
||||
title: 'Import completed',
|
||||
content: 'Your data import has been processed successfully',
|
||||
created_at: Time.parse('2024-01-01T12:00:00Z')
|
||||
)
|
||||
end
|
||||
|
||||
it 'skips duplicate notifications' do
|
||||
expect { service.call }.to change { user.notifications.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs when skipping duplicates' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow any debug logs
|
||||
expect(Rails.logger).to receive(:debug).with("Notification already exists: Import completed")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns only the count of newly created notifications' do
|
||||
result = service.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid notification data' do
|
||||
let(:notifications_data) do
|
||||
[
|
||||
{ 'kind' => 'info', 'title' => 'Valid Notification', 'content' => 'Valid content' },
|
||||
'invalid_data',
|
||||
{ 'kind' => 'error', 'title' => 'Another Valid Notification', 'content' => 'Another valid content' }
|
||||
]
|
||||
end
|
||||
|
||||
it 'skips invalid entries and imports valid ones' do
|
||||
expect { service.call }.to change { user.notifications.count }.by(2)
|
||||
end
|
||||
|
||||
it 'returns the count of valid notifications created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with validation errors' do
|
||||
let(:notifications_data) do
|
||||
[
|
||||
{ 'kind' => 'info', 'title' => 'Valid Notification', 'content' => 'Valid content' },
|
||||
{ 'kind' => 'info', 'content' => 'Missing title' }, # missing title
|
||||
{ 'kind' => 'error', 'title' => 'Missing content' } # missing content
|
||||
]
|
||||
end
|
||||
|
||||
it 'only creates valid notifications' do
|
||||
expect { service.call }.to change { user.notifications.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs validation errors' do
|
||||
expect(Rails.logger).to receive(:error).at_least(:once)
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil notifications data' do
|
||||
let(:notifications_data) { nil }
|
||||
|
||||
it 'does not create any notifications' do
|
||||
expect { service.call }.not_to change { user.notifications.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-array notifications data' do
|
||||
let(:notifications_data) { 'invalid_data' }
|
||||
|
||||
it 'does not create any notifications' do
|
||||
expect { service.call }.not_to change { user.notifications.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty notifications data' do
|
||||
let(:notifications_data) { [] }
|
||||
|
||||
it 'does not create any notifications' do
|
||||
expect { service.call }.not_to change { user.notifications.count }
|
||||
end
|
||||
|
||||
it 'logs the import process with 0 count' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 0 notifications for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Notifications import completed. Created: 0")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
238
spec/services/users/import_data/places_spec.rb
Normal file
238
spec/services/users/import_data/places_spec.rb
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Places, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:places_data) do
|
||||
[
|
||||
{
|
||||
'name' => 'Home',
|
||||
'latitude' => '40.7128',
|
||||
'longitude' => '-74.0060',
|
||||
'source' => 'manual',
|
||||
'geodata' => { 'address' => '123 Main St' },
|
||||
'created_at' => '2024-01-01T00:00:00Z',
|
||||
'updated_at' => '2024-01-01T00:00:00Z'
|
||||
},
|
||||
{
|
||||
'name' => 'Office',
|
||||
'latitude' => '40.7589',
|
||||
'longitude' => '-73.9851',
|
||||
'source' => 'photon',
|
||||
'geodata' => { 'properties' => { 'name' => 'Office Building' } },
|
||||
'created_at' => '2024-01-02T00:00:00Z',
|
||||
'updated_at' => '2024-01-02T00:00:00Z'
|
||||
}
|
||||
]
|
||||
end
|
||||
let(:service) { described_class.new(user, places_data) }
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid places data' do
|
||||
it 'creates new places' do
|
||||
expect { service.call }.to change { Place.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates places with correct attributes' do
|
||||
service.call
|
||||
|
||||
home_place = Place.find_by(name: 'Home')
|
||||
expect(home_place).to have_attributes(
|
||||
name: 'Home',
|
||||
source: 'manual'
|
||||
)
|
||||
expect(home_place.lat).to be_within(0.0001).of(40.7128)
|
||||
expect(home_place.lon).to be_within(0.0001).of(-74.0060)
|
||||
expect(home_place.geodata).to eq('address' => '123 Main St')
|
||||
|
||||
office_place = Place.find_by(name: 'Office')
|
||||
expect(office_place).to have_attributes(
|
||||
name: 'Office',
|
||||
source: 'photon'
|
||||
)
|
||||
expect(office_place.lat).to be_within(0.0001).of(40.7589)
|
||||
expect(office_place.lon).to be_within(0.0001).of(-73.9851)
|
||||
expect(office_place.geodata).to eq('properties' => { 'name' => 'Office Building' })
|
||||
end
|
||||
|
||||
it 'returns the number of places created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 2 places for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Places import completed. Created: 2")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate places (same name)' do
|
||||
before do
|
||||
# Create an existing place with same name but different coordinates
|
||||
create(:place, name: 'Home',
|
||||
latitude: 41.0000, longitude: -75.0000,
|
||||
lonlat: 'POINT(-75.0000 41.0000)')
|
||||
end
|
||||
|
||||
it 'creates the place since coordinates are different' do
|
||||
expect { service.call }.to change { Place.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates both places with different coordinates' do
|
||||
service.call
|
||||
home_places = Place.where(name: 'Home')
|
||||
expect(home_places.count).to eq(2)
|
||||
|
||||
imported_home = home_places.find_by(latitude: 40.7128, longitude: -74.0060)
|
||||
expect(imported_home).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
context 'with exact duplicate places (same name and coordinates)' do
|
||||
before do
|
||||
# Create an existing place with exact same name and coordinates
|
||||
create(:place, name: 'Home',
|
||||
latitude: 40.7128, longitude: -74.0060,
|
||||
lonlat: 'POINT(-74.0060 40.7128)')
|
||||
end
|
||||
|
||||
it 'skips exact duplicate places' do
|
||||
expect { service.call }.to change { Place.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs when finding exact duplicates' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow any debug logs
|
||||
expect(Rails.logger).to receive(:debug).with(/Found exact place match: Home at \(40\.7128, -74\.006\) -> existing place ID \d+/)
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns only the count of newly created places' do
|
||||
result = service.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate places (same coordinates)' do
|
||||
before do
|
||||
# Create an existing place with same coordinates but different name
|
||||
create(:place, name: 'Different Name',
|
||||
latitude: 40.7128, longitude: -74.0060,
|
||||
lonlat: 'POINT(-74.0060 40.7128)')
|
||||
end
|
||||
|
||||
it 'creates the place since name is different' do
|
||||
expect { service.call }.to change { Place.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates both places with different names' do
|
||||
service.call
|
||||
places_at_location = Place.where(latitude: 40.7128, longitude: -74.0060)
|
||||
expect(places_at_location.count).to eq(2)
|
||||
expect(places_at_location.pluck(:name)).to contain_exactly('Home', 'Different Name')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with places having same name but different coordinates' do
|
||||
before do
|
||||
create(:place, name: 'Different Place',
|
||||
latitude: 41.0000, longitude: -75.0000,
|
||||
lonlat: 'POINT(-75.0000 41.0000)')
|
||||
end
|
||||
|
||||
it 'creates both places since coordinates and names differ' do
|
||||
expect { service.call }.to change { Place.count }.by(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid place data' do
|
||||
let(:places_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Place', 'latitude' => '40.7128', 'longitude' => '-74.0060' },
|
||||
'invalid_data',
|
||||
{ 'name' => 'Another Valid Place', 'latitude' => '40.7589', 'longitude' => '-73.9851' }
|
||||
]
|
||||
end
|
||||
|
||||
it 'skips invalid entries and imports valid ones' do
|
||||
expect { service.call }.to change { Place.count }.by(2)
|
||||
end
|
||||
|
||||
it 'returns the count of valid places created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with missing required fields' do
|
||||
let(:places_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Place', 'latitude' => '40.7128', 'longitude' => '-74.0060' },
|
||||
{ 'latitude' => '40.7589', 'longitude' => '-73.9851' }, # missing name
|
||||
{ 'name' => 'Invalid Place', 'longitude' => '-73.9851' }, # missing latitude
|
||||
{ 'name' => 'Another Invalid Place', 'latitude' => '40.7589' } # missing longitude
|
||||
]
|
||||
end
|
||||
|
||||
it 'only creates places with all required fields' do
|
||||
expect { service.call }.to change { Place.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs skipped records with missing data' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow all debug logs
|
||||
expect(Rails.logger).to receive(:debug).with(/Skipping place with missing required data/).at_least(:once)
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil places data' do
|
||||
let(:places_data) { nil }
|
||||
|
||||
it 'does not create any places' do
|
||||
expect { service.call }.not_to change { Place.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-array places data' do
|
||||
let(:places_data) { 'invalid_data' }
|
||||
|
||||
it 'does not create any places' do
|
||||
expect { service.call }.not_to change { Place.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty places data' do
|
||||
let(:places_data) { [] }
|
||||
|
||||
it 'does not create any places' do
|
||||
expect { service.call }.not_to change { Place.count }
|
||||
end
|
||||
|
||||
it 'logs the import process with 0 count' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 0 places for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Places import completed. Created: 0")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
138
spec/services/users/import_data/points_spec.rb
Normal file
138
spec/services/users/import_data/points_spec.rb
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Points, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:service) { described_class.new(user, points_data) }
|
||||
|
||||
describe '#call' do
|
||||
context 'when importing points with country information' do
|
||||
let(:country) { create(:country, name: 'Germany', iso_a2: 'DE', iso_a3: 'DEU') }
|
||||
let(:points_data) do
|
||||
[
|
||||
{
|
||||
'timestamp' => 1640995200,
|
||||
'lonlat' => 'POINT(13.4050 52.5200)',
|
||||
'city' => 'Berlin',
|
||||
'country' => 'Germany', # String field from export
|
||||
'country_info' => {
|
||||
'name' => 'Germany',
|
||||
'iso_a2' => 'DE',
|
||||
'iso_a3' => 'DEU'
|
||||
}
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
before do
|
||||
country # Create the country
|
||||
end
|
||||
|
||||
it 'creates points without type errors' do
|
||||
expect { service.call }.not_to raise_error
|
||||
end
|
||||
|
||||
it 'assigns the correct country association' do
|
||||
service.call
|
||||
point = user.tracked_points.last
|
||||
expect(point.country).to eq(country)
|
||||
end
|
||||
|
||||
it 'excludes the string country field from attributes' do
|
||||
service.call
|
||||
point = user.tracked_points.last
|
||||
# The country association should be set, not the string attribute
|
||||
expect(point.read_attribute(:country)).to be_nil
|
||||
expect(point.country).to eq(country)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when country does not exist in database' do
|
||||
let(:points_data) do
|
||||
[
|
||||
{
|
||||
'timestamp' => 1640995200,
|
||||
'lonlat' => 'POINT(13.4050 52.5200)',
|
||||
'city' => 'Berlin',
|
||||
'country' => 'NewCountry',
|
||||
'country_info' => {
|
||||
'name' => 'NewCountry',
|
||||
'iso_a2' => 'NC',
|
||||
'iso_a3' => 'NCO'
|
||||
}
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'does not create country and leaves country_id nil' do
|
||||
expect { service.call }.not_to change(Country, :count)
|
||||
|
||||
point = user.tracked_points.last
|
||||
expect(point.country_id).to be_nil
|
||||
expect(point.city).to eq('Berlin')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points_data is empty' do
|
||||
let(:points_data) { [] }
|
||||
|
||||
it 'returns 0 without errors' do
|
||||
expect(service.call).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points_data is not an array' do
|
||||
let(:points_data) { 'invalid' }
|
||||
|
||||
it 'returns 0 without errors' do
|
||||
expect(service.call).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points have invalid or missing data' do
|
||||
let(:points_data) do
|
||||
[
|
||||
{
|
||||
'timestamp' => 1640995200,
|
||||
'lonlat' => 'POINT(13.4050 52.5200)',
|
||||
'city' => 'Berlin'
|
||||
},
|
||||
{
|
||||
# Missing lonlat but has longitude/latitude (should be reconstructed)
|
||||
'timestamp' => 1640995220,
|
||||
'longitude' => 11.5820,
|
||||
'latitude' => 48.1351,
|
||||
'city' => 'Munich'
|
||||
},
|
||||
{
|
||||
# Missing lonlat and coordinates
|
||||
'timestamp' => 1640995260,
|
||||
'city' => 'Hamburg'
|
||||
},
|
||||
{
|
||||
# Missing timestamp
|
||||
'lonlat' => 'POINT(11.5820 48.1351)',
|
||||
'city' => 'Stuttgart'
|
||||
},
|
||||
{
|
||||
# Invalid lonlat format
|
||||
'timestamp' => 1640995320,
|
||||
'lonlat' => 'invalid format',
|
||||
'city' => 'Frankfurt'
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
it 'imports valid points and reconstructs lonlat when needed' do
|
||||
expect(service.call).to eq(2) # Two valid points (original + reconstructed)
|
||||
expect(user.tracked_points.count).to eq(2)
|
||||
|
||||
# Check that lonlat was reconstructed properly
|
||||
munich_point = user.tracked_points.find_by(city: 'Munich')
|
||||
expect(munich_point).to be_present
|
||||
expect(munich_point.lonlat.to_s).to match(/POINT\s*\(11\.582\s+48\.1351\)/)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
82
spec/services/users/import_data/settings_spec.rb
Normal file
82
spec/services/users/import_data/settings_spec.rb
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Settings, type: :service do
|
||||
let(:user) { create(:user, settings: { existing_setting: 'value', theme: 'light' }) }
|
||||
let(:settings_data) { { 'theme' => 'dark', 'distance_unit' => 'km', 'new_setting' => 'test' } }
|
||||
let(:service) { described_class.new(user, settings_data) }
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid settings data' do
|
||||
it 'merges imported settings with existing settings' do
|
||||
expect { service.call }.to change { user.reload.settings }.to(
|
||||
'existing_setting' => 'value',
|
||||
'theme' => 'dark',
|
||||
'distance_unit' => 'km',
|
||||
'new_setting' => 'test'
|
||||
)
|
||||
end
|
||||
|
||||
it 'gives precedence to imported settings over existing ones' do
|
||||
service.call
|
||||
|
||||
expect(user.reload.settings['theme']).to eq('dark')
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing settings for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Settings import completed")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil settings data' do
|
||||
let(:settings_data) { nil }
|
||||
|
||||
it 'does not change user settings' do
|
||||
expect { service.call }.not_to change { user.reload.settings }
|
||||
end
|
||||
|
||||
it 'does not log import process' do
|
||||
expect(Rails.logger).not_to receive(:info)
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-hash settings data' do
|
||||
let(:settings_data) { 'invalid_data' }
|
||||
|
||||
it 'does not change user settings' do
|
||||
expect { service.call }.not_to change { user.reload.settings }
|
||||
end
|
||||
|
||||
it 'does not log import process' do
|
||||
expect(Rails.logger).not_to receive(:info)
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty settings data' do
|
||||
let(:settings_data) { {} }
|
||||
|
||||
it 'preserves existing settings without adding new ones' do
|
||||
original_settings = user.settings.dup
|
||||
|
||||
service.call
|
||||
|
||||
expect(user.reload.settings).to eq(original_settings)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing settings for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Settings import completed")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
188
spec/services/users/import_data/stats_spec.rb
Normal file
188
spec/services/users/import_data/stats_spec.rb
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Stats, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:stats_data) do
|
||||
[
|
||||
{
|
||||
'year' => 2024,
|
||||
'month' => 1,
|
||||
'distance' => 456.78,
|
||||
'daily_distance' => [[1, 15.2], [2, 23.5], [3, 18.1]],
|
||||
'toponyms' => [
|
||||
{ 'country' => 'United States', 'cities' => [{ 'city' => 'New York' }] }
|
||||
],
|
||||
'created_at' => '2024-02-01T00:00:00Z',
|
||||
'updated_at' => '2024-02-01T00:00:00Z'
|
||||
},
|
||||
{
|
||||
'year' => 2024,
|
||||
'month' => 2,
|
||||
'distance' => 321.45,
|
||||
'daily_distance' => [[1, 12.3], [2, 19.8], [3, 25.4]],
|
||||
'toponyms' => [
|
||||
{ 'country' => 'Canada', 'cities' => [{ 'city' => 'Toronto' }] }
|
||||
],
|
||||
'created_at' => '2024-03-01T00:00:00Z',
|
||||
'updated_at' => '2024-03-01T00:00:00Z'
|
||||
}
|
||||
]
|
||||
end
|
||||
let(:service) { described_class.new(user, stats_data) }
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid stats data' do
|
||||
it 'creates new stats for the user' do
|
||||
expect { service.call }.to change { user.stats.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates stats with correct attributes' do
|
||||
service.call
|
||||
|
||||
jan_stats = user.stats.find_by(year: 2024, month: 1)
|
||||
expect(jan_stats).to have_attributes(
|
||||
year: 2024,
|
||||
month: 1,
|
||||
distance: 456
|
||||
)
|
||||
expect(jan_stats.daily_distance).to eq([[1, 15.2], [2, 23.5], [3, 18.1]])
|
||||
expect(jan_stats.toponyms).to eq([{ 'country' => 'United States', 'cities' => [{ 'city' => 'New York' }] }])
|
||||
|
||||
feb_stats = user.stats.find_by(year: 2024, month: 2)
|
||||
expect(feb_stats).to have_attributes(
|
||||
year: 2024,
|
||||
month: 2,
|
||||
distance: 321
|
||||
)
|
||||
expect(feb_stats.daily_distance).to eq([[1, 12.3], [2, 19.8], [3, 25.4]])
|
||||
expect(feb_stats.toponyms).to eq([{ 'country' => 'Canada', 'cities' => [{ 'city' => 'Toronto' }] }])
|
||||
end
|
||||
|
||||
it 'returns the number of stats created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 2 stats for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Stats import completed. Created: 2")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate stats (same year and month)' do
|
||||
before do
|
||||
# Create an existing stat with same year and month
|
||||
user.stats.create!(
|
||||
year: 2024,
|
||||
month: 1,
|
||||
distance: 100.0
|
||||
)
|
||||
end
|
||||
|
||||
it 'skips duplicate stats' do
|
||||
expect { service.call }.to change { user.stats.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs when skipping duplicates' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow any debug logs
|
||||
expect(Rails.logger).to receive(:debug).with("Stat already exists: 2024-1")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns only the count of newly created stats' do
|
||||
result = service.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid stat data' do
|
||||
let(:stats_data) do
|
||||
[
|
||||
{ 'year' => 2024, 'month' => 1, 'distance' => 456.78 },
|
||||
'invalid_data',
|
||||
{ 'year' => 2024, 'month' => 2, 'distance' => 321.45 }
|
||||
]
|
||||
end
|
||||
|
||||
it 'skips invalid entries and imports valid ones' do
|
||||
expect { service.call }.to change { user.stats.count }.by(2)
|
||||
end
|
||||
|
||||
it 'returns the count of valid stats created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with validation errors' do
|
||||
let(:stats_data) do
|
||||
[
|
||||
{ 'year' => 2024, 'month' => 1, 'distance' => 456.78 },
|
||||
{ 'month' => 1, 'distance' => 321.45 }, # missing year
|
||||
{ 'year' => 2024, 'distance' => 123.45 } # missing month
|
||||
]
|
||||
end
|
||||
|
||||
it 'only creates valid stats' do
|
||||
expect { service.call }.to change { user.stats.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs validation errors' do
|
||||
expect(Rails.logger).to receive(:error).at_least(:once)
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil stats data' do
|
||||
let(:stats_data) { nil }
|
||||
|
||||
it 'does not create any stats' do
|
||||
expect { service.call }.not_to change { user.stats.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-array stats data' do
|
||||
let(:stats_data) { 'invalid_data' }
|
||||
|
||||
it 'does not create any stats' do
|
||||
expect { service.call }.not_to change { user.stats.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty stats data' do
|
||||
let(:stats_data) { [] }
|
||||
|
||||
it 'does not create any stats' do
|
||||
expect { service.call }.not_to change { user.stats.count }
|
||||
end
|
||||
|
||||
it 'logs the import process with 0 count' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 0 stats for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Stats import completed. Created: 0")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
180
spec/services/users/import_data/trips_spec.rb
Normal file
180
spec/services/users/import_data/trips_spec.rb
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData::Trips, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:trips_data) do
|
||||
[
|
||||
{
|
||||
'name' => 'Business Trip to NYC',
|
||||
'started_at' => '2024-01-15T08:00:00Z',
|
||||
'ended_at' => '2024-01-18T20:00:00Z',
|
||||
'distance' => 1245.67,
|
||||
'created_at' => '2024-01-19T00:00:00Z',
|
||||
'updated_at' => '2024-01-19T00:00:00Z'
|
||||
},
|
||||
{
|
||||
'name' => 'Weekend Getaway',
|
||||
'started_at' => '2024-02-10T09:00:00Z',
|
||||
'ended_at' => '2024-02-12T18:00:00Z',
|
||||
'distance' => 456.78,
|
||||
'created_at' => '2024-02-13T00:00:00Z',
|
||||
'updated_at' => '2024-02-13T00:00:00Z'
|
||||
}
|
||||
]
|
||||
end
|
||||
let(:service) { described_class.new(user, trips_data) }
|
||||
|
||||
before do
|
||||
# Mock the job enqueuing to avoid it interfering with tests
|
||||
allow(Trips::CalculateAllJob).to receive(:perform_later)
|
||||
end
|
||||
|
||||
describe '#call' do
|
||||
context 'with valid trips data' do
|
||||
it 'creates new trips for the user' do
|
||||
expect { service.call }.to change { user.trips.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates trips with correct attributes' do
|
||||
service.call
|
||||
|
||||
business_trip = user.trips.find_by(name: 'Business Trip to NYC')
|
||||
expect(business_trip).to have_attributes(
|
||||
name: 'Business Trip to NYC',
|
||||
started_at: Time.parse('2024-01-15T08:00:00Z'),
|
||||
ended_at: Time.parse('2024-01-18T20:00:00Z'),
|
||||
distance: 1245
|
||||
)
|
||||
|
||||
weekend_trip = user.trips.find_by(name: 'Weekend Getaway')
|
||||
expect(weekend_trip).to have_attributes(
|
||||
name: 'Weekend Getaway',
|
||||
started_at: Time.parse('2024-02-10T09:00:00Z'),
|
||||
ended_at: Time.parse('2024-02-12T18:00:00Z'),
|
||||
distance: 456
|
||||
)
|
||||
end
|
||||
|
||||
it 'returns the number of trips created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
|
||||
it 'logs the import process' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 2 trips for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Trips import completed. Created: 2")
|
||||
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate trips' do
|
||||
before do
|
||||
# Create an existing trip with same name and times
|
||||
user.trips.create!(
|
||||
name: 'Business Trip to NYC',
|
||||
started_at: Time.parse('2024-01-15T08:00:00Z'),
|
||||
ended_at: Time.parse('2024-01-18T20:00:00Z'),
|
||||
distance: 1000.0
|
||||
)
|
||||
end
|
||||
|
||||
it 'skips duplicate trips' do
|
||||
expect { service.call }.to change { user.trips.count }.by(1)
|
||||
end
|
||||
|
||||
it 'logs when skipping duplicates' do
|
||||
allow(Rails.logger).to receive(:debug) # Allow any debug logs
|
||||
expect(Rails.logger).to receive(:debug).with("Trip already exists: Business Trip to NYC")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns only the count of newly created trips' do
|
||||
result = service.call
|
||||
expect(result).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid trip data' do
|
||||
let(:trips_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Trip', 'started_at' => '2024-01-15T08:00:00Z', 'ended_at' => '2024-01-18T20:00:00Z' },
|
||||
'invalid_data',
|
||||
{ 'name' => 'Another Valid Trip', 'started_at' => '2024-02-10T09:00:00Z', 'ended_at' => '2024-02-12T18:00:00Z' }
|
||||
]
|
||||
end
|
||||
|
||||
it 'skips invalid entries and imports valid ones' do
|
||||
expect { service.call }.to change { user.trips.count }.by(2)
|
||||
end
|
||||
|
||||
it 'returns the count of valid trips created' do
|
||||
result = service.call
|
||||
expect(result).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with validation errors' do
|
||||
let(:trips_data) do
|
||||
[
|
||||
{ 'name' => 'Valid Trip', 'started_at' => '2024-01-15T08:00:00Z', 'ended_at' => '2024-01-18T20:00:00Z' },
|
||||
{ 'started_at' => '2024-01-15T08:00:00Z', 'ended_at' => '2024-01-18T20:00:00Z' }, # missing name
|
||||
{ 'name' => 'Invalid Trip' } # missing required timestamps
|
||||
]
|
||||
end
|
||||
|
||||
it 'only creates valid trips' do
|
||||
expect { service.call }.to change { user.trips.count }.by(1)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with nil trips data' do
|
||||
let(:trips_data) { nil }
|
||||
|
||||
it 'does not create any trips' do
|
||||
expect { service.call }.not_to change { user.trips.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with non-array trips data' do
|
||||
let(:trips_data) { 'invalid_data' }
|
||||
|
||||
it 'does not create any trips' do
|
||||
expect { service.call }.not_to change { user.trips.count }
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
context 'with empty trips data' do
|
||||
let(:trips_data) { [] }
|
||||
|
||||
it 'does not create any trips' do
|
||||
expect { service.call }.not_to change { user.trips.count }
|
||||
end
|
||||
|
||||
it 'logs the import process with 0 count' do
|
||||
expect(Rails.logger).to receive(:info).with("Importing 0 trips for user: #{user.email}")
|
||||
expect(Rails.logger).to receive(:info).with("Trips import completed. Created: 0")
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'returns 0' do
|
||||
result = service.call
|
||||
expect(result).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
297
spec/services/users/import_data_spec.rb
Normal file
297
spec/services/users/import_data_spec.rb
Normal file
|
|
@ -0,0 +1,297 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Users::ImportData, type: :service do
|
||||
let(:user) { create(:user) }
|
||||
let(:archive_path) { Rails.root.join('tmp', 'test_export.zip') }
|
||||
let(:service) { described_class.new(user, archive_path) }
|
||||
let(:import_directory) { Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_1234567890") }
|
||||
|
||||
before do
|
||||
allow(Time).to receive(:current).and_return(Time.at(1234567890))
|
||||
allow(FileUtils).to receive(:mkdir_p)
|
||||
allow(FileUtils).to receive(:rm_rf)
|
||||
allow(File).to receive(:directory?).and_return(true)
|
||||
end
|
||||
|
||||
describe '#import' do
|
||||
let(:sample_data) do
|
||||
{
|
||||
'counts' => {
|
||||
'areas' => 2,
|
||||
'places' => 3,
|
||||
'imports' => 1,
|
||||
'exports' => 1,
|
||||
'trips' => 2,
|
||||
'stats' => 1,
|
||||
'notifications' => 2,
|
||||
'visits' => 4,
|
||||
'points' => 1000
|
||||
},
|
||||
'settings' => { 'theme' => 'dark' },
|
||||
'areas' => [{ 'name' => 'Home', 'latitude' => '40.7128', 'longitude' => '-74.0060' }],
|
||||
'places' => [{ 'name' => 'Office', 'latitude' => '40.7589', 'longitude' => '-73.9851' }],
|
||||
'imports' => [{ 'name' => 'test.json', 'source' => 'owntracks' }],
|
||||
'exports' => [{ 'name' => 'export.json', 'status' => 'completed' }],
|
||||
'trips' => [{ 'name' => 'Trip to NYC', 'distance' => 100.5 }],
|
||||
'stats' => [{ 'year' => 2024, 'month' => 1, 'distance' => 456.78 }],
|
||||
'notifications' => [{ 'title' => 'Test', 'content' => 'Test notification' }],
|
||||
'visits' => [{ 'name' => 'Work Visit', 'duration' => 3600 }],
|
||||
'points' => [{ 'latitude' => 40.7128, 'longitude' => -74.0060, 'timestamp' => 1234567890 }]
|
||||
}
|
||||
end
|
||||
|
||||
before do
|
||||
# Mock ZIP file extraction
|
||||
zipfile_mock = double('ZipFile')
|
||||
allow(zipfile_mock).to receive(:each)
|
||||
allow(Zip::File).to receive(:open).with(archive_path).and_yield(zipfile_mock)
|
||||
|
||||
# Mock JSON loading and File operations
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
|
||||
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(sample_data.to_json)
|
||||
|
||||
# Mock all import services
|
||||
allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true))
|
||||
allow(Users::ImportData::Areas).to receive(:new).and_return(double(call: 2))
|
||||
allow(Users::ImportData::Places).to receive(:new).and_return(double(call: 3))
|
||||
allow(Users::ImportData::Imports).to receive(:new).and_return(double(call: [1, 5]))
|
||||
allow(Users::ImportData::Exports).to receive(:new).and_return(double(call: [1, 2]))
|
||||
allow(Users::ImportData::Trips).to receive(:new).and_return(double(call: 2))
|
||||
allow(Users::ImportData::Stats).to receive(:new).and_return(double(call: 1))
|
||||
allow(Users::ImportData::Notifications).to receive(:new).and_return(double(call: 2))
|
||||
allow(Users::ImportData::Visits).to receive(:new).and_return(double(call: 4))
|
||||
allow(Users::ImportData::Points).to receive(:new).and_return(double(call: 1000))
|
||||
|
||||
# Mock notifications
|
||||
allow(::Notifications::Create).to receive(:new).and_return(double(call: true))
|
||||
|
||||
# Mock cleanup
|
||||
allow(service).to receive(:cleanup_temporary_files)
|
||||
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
|
||||
end
|
||||
|
||||
context 'when import is successful' do
|
||||
it 'creates import directory' do
|
||||
expect(FileUtils).to receive(:mkdir_p).with(import_directory)
|
||||
|
||||
service.import
|
||||
end
|
||||
|
||||
it 'extracts the archive' do
|
||||
expect(Zip::File).to receive(:open).with(archive_path)
|
||||
|
||||
service.import
|
||||
end
|
||||
|
||||
it 'loads JSON data from extracted files' do
|
||||
expect(File).to receive(:exist?).with(import_directory.join('data.json'))
|
||||
expect(File).to receive(:read).with(import_directory.join('data.json'))
|
||||
|
||||
service.import
|
||||
end
|
||||
|
||||
it 'calls all import services in correct order' do
|
||||
expect(Users::ImportData::Settings).to receive(:new).with(user, sample_data['settings']).ordered
|
||||
expect(Users::ImportData::Areas).to receive(:new).with(user, sample_data['areas']).ordered
|
||||
expect(Users::ImportData::Places).to receive(:new).with(user, sample_data['places']).ordered
|
||||
expect(Users::ImportData::Imports).to receive(:new).with(user, sample_data['imports'], import_directory.join('files')).ordered
|
||||
expect(Users::ImportData::Exports).to receive(:new).with(user, sample_data['exports'], import_directory.join('files')).ordered
|
||||
expect(Users::ImportData::Trips).to receive(:new).with(user, sample_data['trips']).ordered
|
||||
expect(Users::ImportData::Stats).to receive(:new).with(user, sample_data['stats']).ordered
|
||||
expect(Users::ImportData::Notifications).to receive(:new).with(user, sample_data['notifications']).ordered
|
||||
expect(Users::ImportData::Visits).to receive(:new).with(user, sample_data['visits']).ordered
|
||||
expect(Users::ImportData::Points).to receive(:new).with(user, sample_data['points']).ordered
|
||||
|
||||
service.import
|
||||
end
|
||||
|
||||
it 'creates success notification with import stats' do
|
||||
expect(::Notifications::Create).to receive(:new).with(
|
||||
user: user,
|
||||
title: 'Data import completed',
|
||||
content: match(/1000 points.*4 visits.*3 places.*2 trips/),
|
||||
kind: :info
|
||||
)
|
||||
|
||||
service.import
|
||||
end
|
||||
|
||||
it 'cleans up temporary files' do
|
||||
expect(service).to receive(:cleanup_temporary_files).with(import_directory)
|
||||
|
||||
service.import
|
||||
end
|
||||
|
||||
it 'returns import statistics' do
|
||||
result = service.import
|
||||
|
||||
expect(result).to include(
|
||||
settings_updated: true,
|
||||
areas_created: 2,
|
||||
places_created: 3,
|
||||
imports_created: 1,
|
||||
exports_created: 1,
|
||||
trips_created: 2,
|
||||
stats_created: 1,
|
||||
notifications_created: 2,
|
||||
visits_created: 4,
|
||||
points_created: 1000,
|
||||
files_restored: 7
|
||||
)
|
||||
end
|
||||
|
||||
it 'logs expected counts if available' do
|
||||
allow(Rails.logger).to receive(:info) # Allow other log messages
|
||||
expect(Rails.logger).to receive(:info).with(/Expected entity counts from export:/)
|
||||
|
||||
service.import
|
||||
end
|
||||
end
|
||||
|
||||
context 'when JSON file is missing' do
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(false)
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { service.import }.to raise_error(StandardError, 'Data file not found in archive: data.json')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when JSON is invalid' do
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
|
||||
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return('invalid json')
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
end
|
||||
|
||||
it 'raises a JSON parse error' do
|
||||
expect { service.import }.to raise_error(StandardError, /Invalid JSON format in data file/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an error occurs during import' do
|
||||
let(:error_message) { 'Something went wrong' }
|
||||
|
||||
before do
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
|
||||
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(sample_data.to_json)
|
||||
allow(Users::ImportData::Settings).to receive(:new).and_raise(StandardError, error_message)
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
allow(::Notifications::Create).to receive(:new).and_return(double(call: true))
|
||||
end
|
||||
|
||||
it 'creates failure notification' do
|
||||
expect(::Notifications::Create).to receive(:new).with(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: "Your data import failed with error: #{error_message}. Please check the archive format and try again.",
|
||||
kind: :error
|
||||
)
|
||||
|
||||
expect { service.import }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 'reports error via ExceptionReporter' do
|
||||
expect(ExceptionReporter).to receive(:call).with(
|
||||
an_instance_of(StandardError),
|
||||
'Data import failed'
|
||||
)
|
||||
|
||||
expect { service.import }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 'still cleans up temporary files' do
|
||||
expect(service).to receive(:cleanup_temporary_files)
|
||||
|
||||
expect { service.import }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
|
||||
it 're-raises the error' do
|
||||
expect { service.import }.to raise_error(StandardError, error_message)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when data sections are missing' do
|
||||
let(:minimal_data) { { 'settings' => { 'theme' => 'dark' } } }
|
||||
|
||||
before do
|
||||
# Reset JSON file mocking
|
||||
allow(File).to receive(:exist?).and_return(false)
|
||||
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
|
||||
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(minimal_data.to_json)
|
||||
|
||||
# Only expect Settings to be called
|
||||
allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true))
|
||||
allow(::Notifications::Create).to receive(:new).and_return(double(call: true))
|
||||
end
|
||||
|
||||
it 'only imports available sections' do
|
||||
expect(Users::ImportData::Settings).to receive(:new).with(user, minimal_data['settings'])
|
||||
expect(Users::ImportData::Areas).not_to receive(:new)
|
||||
expect(Users::ImportData::Places).not_to receive(:new)
|
||||
|
||||
service.import
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'private methods' do
|
||||
describe '#cleanup_temporary_files' do
|
||||
context 'when directory exists' do
|
||||
before do
|
||||
allow(File).to receive(:directory?).and_return(true)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
end
|
||||
|
||||
it 'removes the directory' do
|
||||
expect(FileUtils).to receive(:rm_rf).with(import_directory)
|
||||
|
||||
service.send(:cleanup_temporary_files, import_directory)
|
||||
end
|
||||
|
||||
it 'logs the cleanup' do
|
||||
expect(Rails.logger).to receive(:info).with("Cleaning up temporary import directory: #{import_directory}")
|
||||
|
||||
service.send(:cleanup_temporary_files, import_directory)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when cleanup fails' do
|
||||
before do
|
||||
allow(File).to receive(:directory?).and_return(true)
|
||||
allow(FileUtils).to receive(:rm_rf).and_raise(StandardError, 'Permission denied')
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
end
|
||||
|
||||
it 'reports error via ExceptionReporter but does not re-raise' do
|
||||
expect(ExceptionReporter).to receive(:call).with(
|
||||
an_instance_of(StandardError),
|
||||
'Failed to cleanup temporary files'
|
||||
)
|
||||
|
||||
expect { service.send(:cleanup_temporary_files, import_directory) }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
context 'when directory does not exist' do
|
||||
before do
|
||||
allow(File).to receive(:directory?).and_return(false)
|
||||
end
|
||||
|
||||
it 'does not attempt cleanup' do
|
||||
expect(FileUtils).not_to receive(:rm_rf)
|
||||
|
||||
service.send(:cleanup_temporary_files, import_directory)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
Loading…
Reference in a new issue