mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 01:31:39 -05:00
Update changelog
This commit is contained in:
parent
32a00db9b9
commit
1ebe2da84a
21 changed files with 75 additions and 343 deletions
|
|
@ -1 +1 @@
|
|||
0.28.1
|
||||
0.29.0
|
||||
|
|
|
|||
10
CHANGELOG.md
10
CHANGELOG.md
|
|
@ -6,6 +6,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||
|
||||
# [0.29.0] - 2025-06-30
|
||||
|
||||
You can now move your user data between Dawarich instances. Simply go to your Account settings and click on the "Export my data" button under the password section. An export will be created and you will be able to download it on Exports page once it's ready.
|
||||
|
||||
To import your data on a new Dawarich instance, create a new user and upload the exported zip file. You can import your data also on the Account page, by clicking "Import my data" button under the password section.
|
||||
|
||||
The feature is experimental and not yet aimed to replace a proper backup solution. Please use at your own risk.
|
||||
|
||||
## Added
|
||||
|
||||
- In the User Settings, you can now export your user data as a zip file. It will contain the following:
|
||||
|
|
@ -20,9 +26,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||
- All your stats
|
||||
|
||||
- In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation.
|
||||
|
||||
- Export file size is now displayed in the exports and imports lists.
|
||||
- A button to download an import file is now displayed in the imports list. It may not work properly for imports created before the 0.25.4 release.
|
||||
- Imports now have statuses.
|
||||
|
||||
## Changed
|
||||
|
||||
|
|
@ -32,6 +38,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||
|
||||
- Email links now use the SMTP domain if set. #1469
|
||||
|
||||
|
||||
|
||||
# 0.28.1 - 2025-06-11
|
||||
|
||||
## Fixed
|
||||
|
|
|
|||
|
|
@ -1,22 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Notifications
|
||||
# System-generated notification titles that should not be exported
|
||||
SYSTEM_NOTIFICATION_TITLES = [
|
||||
'Data import completed',
|
||||
'Data import failed',
|
||||
'Export completed',
|
||||
'Export failed'
|
||||
].freeze
|
||||
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
# Export only user-generated notifications, not system-generated ones
|
||||
# Export all notifications for the user
|
||||
user.notifications
|
||||
.where.not(title: SYSTEM_NOTIFICATION_TITLES)
|
||||
.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ class Users::ExportData::Points
|
|||
end
|
||||
|
||||
def call
|
||||
# Single optimized query with all joins to avoid N+1 queries
|
||||
points_sql = <<-SQL
|
||||
SELECT
|
||||
p.id, p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy,
|
||||
|
|
@ -42,9 +41,7 @@ class Users::ExportData::Points
|
|||
|
||||
Rails.logger.info "Processing #{result.count} points for export..."
|
||||
|
||||
# Process results efficiently
|
||||
result.filter_map do |row|
|
||||
# Skip points without any coordinate data
|
||||
has_lonlat = row['lonlat'].present?
|
||||
has_coordinates = row['computed_longitude'].present? && row['computed_latitude'].present?
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,6 @@ class Users::ImportData
|
|||
end
|
||||
|
||||
def import
|
||||
# Create a temporary directory for extraction
|
||||
@import_directory = Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{Time.current.to_i}")
|
||||
FileUtils.mkdir_p(@import_directory)
|
||||
|
||||
|
|
@ -74,10 +73,8 @@ class Users::ImportData
|
|||
zip_file.each do |entry|
|
||||
extraction_path = @import_directory.join(entry.name)
|
||||
|
||||
# Ensure directory exists
|
||||
FileUtils.mkdir_p(File.dirname(extraction_path))
|
||||
|
||||
# Extract file
|
||||
entry.extract(extraction_path)
|
||||
end
|
||||
end
|
||||
|
|
@ -98,15 +95,12 @@ class Users::ImportData
|
|||
def import_in_correct_order(data)
|
||||
Rails.logger.info "Starting data import for user: #{user.email}"
|
||||
|
||||
# Log expected counts if available
|
||||
if data['counts']
|
||||
Rails.logger.info "Expected entity counts from export: #{data['counts']}"
|
||||
end
|
||||
|
||||
# Debug: Log what data keys are available
|
||||
Rails.logger.debug "Available data keys: #{data.keys.inspect}"
|
||||
|
||||
# Import in dependency order
|
||||
import_settings(data['settings']) if data['settings']
|
||||
import_areas(data['areas']) if data['areas']
|
||||
import_places(data['places']) if data['places']
|
||||
|
|
@ -178,26 +172,14 @@ class Users::ImportData
|
|||
end
|
||||
|
||||
def import_points(points_data)
|
||||
puts "=== POINTS IMPORT DEBUG ==="
|
||||
puts "About to import #{points_data&.size || 0} points"
|
||||
puts "Points data present: #{points_data.present?}"
|
||||
puts "First point sample: #{points_data&.first&.slice('timestamp', 'longitude', 'latitude') if points_data&.first}"
|
||||
puts "=== END POINTS IMPORT DEBUG ==="
|
||||
|
||||
Rails.logger.info "About to import #{points_data&.size || 0} points"
|
||||
Rails.logger.info "Points data present: #{points_data.present?}"
|
||||
Rails.logger.info "First point sample: #{points_data&.first&.slice('timestamp', 'longitude', 'latitude') if points_data&.first}"
|
||||
|
||||
begin
|
||||
points_created = Users::ImportData::Points.new(user, points_data).call
|
||||
Rails.logger.info "Points import returned: #{points_created}"
|
||||
puts "Points import returned: #{points_created}"
|
||||
|
||||
@import_stats[:points_created] = points_created
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Points import failed: #{e.message}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
|
||||
puts "Points import failed: #{e.message}"
|
||||
ExceptionReporter.call(e, 'Points import failed')
|
||||
@import_stats[:points_created] = 0
|
||||
end
|
||||
end
|
||||
|
|
@ -212,16 +194,6 @@ class Users::ImportData
|
|||
end
|
||||
|
||||
def create_success_notification
|
||||
# Check if we already have a recent import success notification to avoid duplicates
|
||||
recent_import_notification = user.notifications.where(
|
||||
title: 'Data import completed'
|
||||
).where('created_at > ?', 5.minutes.ago).first
|
||||
|
||||
if recent_import_notification
|
||||
Rails.logger.debug "Skipping duplicate import success notification"
|
||||
return
|
||||
end
|
||||
|
||||
summary = "#{@import_stats[:points_created]} points, " \
|
||||
"#{@import_stats[:visits_created]} visits, " \
|
||||
"#{@import_stats[:places_created]} places, " \
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ class Users::ImportData::Areas
|
|||
|
||||
Rails.logger.info "Importing #{areas_data.size} areas for user: #{user.email}"
|
||||
|
||||
# Filter valid areas and prepare for bulk import
|
||||
valid_areas = filter_and_prepare_areas
|
||||
|
||||
if valid_areas.empty?
|
||||
|
|
@ -21,14 +20,12 @@ class Users::ImportData::Areas
|
|||
return 0
|
||||
end
|
||||
|
||||
# Remove existing areas to avoid duplicates
|
||||
deduplicated_areas = filter_existing_areas(valid_areas)
|
||||
|
||||
if deduplicated_areas.size < valid_areas.size
|
||||
Rails.logger.debug "Skipped #{valid_areas.size - deduplicated_areas.size} duplicate areas"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_areas(deduplicated_areas)
|
||||
|
||||
Rails.logger.info "Areas import completed. Created: #{total_created}"
|
||||
|
|
@ -46,13 +43,12 @@ class Users::ImportData::Areas
|
|||
areas_data.each do |area_data|
|
||||
next unless area_data.is_a?(Hash)
|
||||
|
||||
# Skip areas with missing required data
|
||||
unless valid_area_data?(area_data)
|
||||
skipped_count += 1
|
||||
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare area attributes for bulk insert
|
||||
prepared_attributes = prepare_area_attributes(area_data)
|
||||
valid_areas << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
|
@ -65,18 +61,13 @@ class Users::ImportData::Areas
|
|||
end
|
||||
|
||||
def prepare_area_attributes(area_data)
|
||||
# Start with base attributes, excluding timestamp fields
|
||||
attributes = area_data.except('created_at', 'updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
attributes['radius'] ||= 100
|
||||
|
||||
# Ensure radius is present (required by model validation)
|
||||
attributes['radius'] ||= 100 # Default radius if not provided
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare area attributes: #{e.message}"
|
||||
|
|
@ -87,17 +78,13 @@ class Users::ImportData::Areas
|
|||
def filter_existing_areas(areas)
|
||||
return areas if areas.empty?
|
||||
|
||||
# Build lookup hash of existing areas for this user
|
||||
existing_areas_lookup = {}
|
||||
user.areas.select(:name, :latitude, :longitude).each do |area|
|
||||
# Normalize decimal values for consistent comparison
|
||||
key = [area.name, area.latitude.to_f, area.longitude.to_f]
|
||||
existing_areas_lookup[key] = true
|
||||
end
|
||||
|
||||
# Filter out areas that already exist
|
||||
filtered_areas = areas.reject do |area|
|
||||
# Normalize decimal values for consistent comparison
|
||||
key = [area[:name], area[:latitude].to_f, area[:longitude].to_f]
|
||||
if existing_areas_lookup[key]
|
||||
Rails.logger.debug "Area already exists: #{area[:name]}"
|
||||
|
|
@ -115,7 +102,6 @@ class Users::ImportData::Areas
|
|||
|
||||
areas.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert areas
|
||||
result = Area.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
|
|
@ -131,7 +117,6 @@ class Users::ImportData::Areas
|
|||
Rails.logger.error "Failed to process area batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -139,7 +124,6 @@ class Users::ImportData::Areas
|
|||
end
|
||||
|
||||
def valid_area_data?(area_data)
|
||||
# Check for required fields
|
||||
return false unless area_data.is_a?(Hash)
|
||||
return false unless area_data['name'].present?
|
||||
return false unless area_data['latitude'].present?
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ class Users::ImportData::Exports
|
|||
exports_data.each do |export_data|
|
||||
next unless export_data.is_a?(Hash)
|
||||
|
||||
# Check if export already exists (match by name and created_at)
|
||||
existing_export = user.exports.find_by(
|
||||
name: export_data['name'],
|
||||
created_at: export_data['created_at']
|
||||
|
|
@ -29,11 +28,9 @@ class Users::ImportData::Exports
|
|||
next
|
||||
end
|
||||
|
||||
# Create new export
|
||||
export_record = create_export_record(export_data)
|
||||
exports_created += 1
|
||||
|
||||
# Restore file if present
|
||||
if export_data['file_name'] && restore_export_file(export_record, export_data)
|
||||
files_restored += 1
|
||||
end
|
||||
|
|
@ -73,7 +70,6 @@ class Users::ImportData::Exports
|
|||
end
|
||||
|
||||
begin
|
||||
# Attach the file to the export record
|
||||
export_record.file.attach(
|
||||
io: File.open(file_path),
|
||||
filename: export_data['original_filename'] || export_data['file_name'],
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ class Users::ImportData::Imports
|
|||
imports_data.each do |import_data|
|
||||
next unless import_data.is_a?(Hash)
|
||||
|
||||
# Check if import already exists (match by name, source, and created_at)
|
||||
existing_import = user.imports.find_by(
|
||||
name: import_data['name'],
|
||||
source: import_data['source'],
|
||||
|
|
@ -30,13 +29,11 @@ class Users::ImportData::Imports
|
|||
next
|
||||
end
|
||||
|
||||
# Create new import
|
||||
import_record = create_import_record(import_data)
|
||||
next unless import_record # Skip if creation failed
|
||||
|
||||
imports_created += 1
|
||||
|
||||
# Restore file if present
|
||||
if import_data['file_name'] && restore_import_file(import_record, import_data)
|
||||
files_restored += 1
|
||||
end
|
||||
|
|
@ -55,7 +52,6 @@ class Users::ImportData::Imports
|
|||
|
||||
begin
|
||||
import_record = user.imports.build(import_attributes)
|
||||
# Skip background processing since we're importing user data directly
|
||||
import_record.skip_background_processing = true
|
||||
import_record.save!
|
||||
Rails.logger.debug "Created import: #{import_record.name}"
|
||||
|
|
@ -86,7 +82,6 @@ class Users::ImportData::Imports
|
|||
end
|
||||
|
||||
begin
|
||||
# Attach the file to the import record
|
||||
import_record.file.attach(
|
||||
io: File.open(file_path),
|
||||
filename: import_data['original_filename'] || import_data['file_name'],
|
||||
|
|
@ -97,7 +92,7 @@ class Users::ImportData::Imports
|
|||
|
||||
true
|
||||
rescue StandardError => e
|
||||
ExceptionReporter.call(e, "Import file restoration failed")
|
||||
ExceptionReporter.call(e, 'Import file restoration failed')
|
||||
|
||||
false
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,22 +13,19 @@ class Users::ImportData::Notifications
|
|||
|
||||
Rails.logger.info "Importing #{notifications_data.size} notifications for user: #{user.email}"
|
||||
|
||||
# Filter valid notifications and prepare for bulk import
|
||||
valid_notifications = filter_and_prepare_notifications
|
||||
|
||||
if valid_notifications.empty?
|
||||
Rails.logger.info "Notifications import completed. Created: 0"
|
||||
Rails.logger.info 'Notifications import completed. Created: 0'
|
||||
return 0
|
||||
end
|
||||
|
||||
# Remove existing notifications to avoid duplicates
|
||||
deduplicated_notifications = filter_existing_notifications(valid_notifications)
|
||||
|
||||
if deduplicated_notifications.size < valid_notifications.size
|
||||
Rails.logger.debug "Skipped #{valid_notifications.size - deduplicated_notifications.size} duplicate notifications"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_notifications(deduplicated_notifications)
|
||||
|
||||
Rails.logger.info "Notifications import completed. Created: #{total_created}"
|
||||
|
|
@ -46,13 +43,11 @@ class Users::ImportData::Notifications
|
|||
notifications_data.each do |notification_data|
|
||||
next unless notification_data.is_a?(Hash)
|
||||
|
||||
# Skip notifications with missing required data
|
||||
unless valid_notification_data?(notification_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare notification attributes for bulk insert
|
||||
prepared_attributes = prepare_notification_attributes(notification_data)
|
||||
valid_notifications << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
|
@ -65,20 +60,16 @@ class Users::ImportData::Notifications
|
|||
end
|
||||
|
||||
def prepare_notification_attributes(notification_data)
|
||||
# Start with base attributes, excluding only updated_at (preserve created_at for duplicate logic)
|
||||
attributes = notification_data.except('updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
|
||||
# Preserve original created_at if present, otherwise use current time
|
||||
unless attributes['created_at'].present?
|
||||
attributes['created_at'] = Time.current
|
||||
end
|
||||
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare notification attributes: #{e.message}"
|
||||
|
|
@ -89,26 +80,20 @@ class Users::ImportData::Notifications
|
|||
def filter_existing_notifications(notifications)
|
||||
return notifications if notifications.empty?
|
||||
|
||||
# Build lookup hash of existing notifications for this user
|
||||
# Use title and content as the primary deduplication key
|
||||
existing_notifications_lookup = {}
|
||||
user.notifications.select(:title, :content, :created_at, :kind).each do |notification|
|
||||
# Primary key: title + content
|
||||
primary_key = [notification.title.strip, notification.content.strip]
|
||||
|
||||
# Secondary key: include timestamp for exact matches
|
||||
exact_key = [notification.title.strip, notification.content.strip, normalize_timestamp(notification.created_at)]
|
||||
|
||||
existing_notifications_lookup[primary_key] = true
|
||||
existing_notifications_lookup[exact_key] = true
|
||||
end
|
||||
|
||||
# Filter out notifications that already exist
|
||||
filtered_notifications = notifications.reject do |notification|
|
||||
title = notification[:title]&.strip
|
||||
content = notification[:content]&.strip
|
||||
|
||||
# Check both primary key (title + content) and exact key (with timestamp)
|
||||
primary_key = [title, content]
|
||||
exact_key = [title, content, normalize_timestamp(notification[:created_at])]
|
||||
|
||||
|
|
@ -125,12 +110,8 @@ class Users::ImportData::Notifications
|
|||
|
||||
def normalize_timestamp(timestamp)
|
||||
case timestamp
|
||||
when String
|
||||
# Parse string and convert to unix timestamp for consistent comparison
|
||||
Time.parse(timestamp).to_i
|
||||
when Time, DateTime
|
||||
# Convert time objects to unix timestamp for consistent comparison
|
||||
timestamp.to_i
|
||||
when String then Time.parse(timestamp).to_i
|
||||
when Time, DateTime then timestamp.to_i
|
||||
else
|
||||
timestamp.to_s
|
||||
end
|
||||
|
|
@ -144,7 +125,6 @@ class Users::ImportData::Notifications
|
|||
|
||||
notifications.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert notifications
|
||||
result = Notification.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
|
|
@ -160,7 +140,6 @@ class Users::ImportData::Notifications
|
|||
Rails.logger.error "Failed to process notification batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -168,7 +147,6 @@ class Users::ImportData::Notifications
|
|||
end
|
||||
|
||||
def valid_notification_data?(notification_data)
|
||||
# Check for required fields
|
||||
return false unless notification_data.is_a?(Hash)
|
||||
|
||||
unless notification_data['title'].present?
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ class Users::ImportData::Places
|
|||
places_data.each do |place_data|
|
||||
next unless place_data.is_a?(Hash)
|
||||
|
||||
# Find or create place by name and coordinates
|
||||
place = find_or_create_place(place_data)
|
||||
places_created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record?
|
||||
end
|
||||
|
|
@ -34,16 +33,13 @@ class Users::ImportData::Places
|
|||
latitude = place_data['latitude']&.to_f
|
||||
longitude = place_data['longitude']&.to_f
|
||||
|
||||
# Skip if essential data is missing
|
||||
unless name.present? && latitude.present? && longitude.present?
|
||||
Rails.logger.debug "Skipping place with missing required data: #{place_data.inspect}"
|
||||
return nil
|
||||
end
|
||||
|
||||
# Try to find existing place by name first, then by coordinates
|
||||
existing_place = Place.find_by(name: name)
|
||||
|
||||
# If no place with same name, check by coordinates
|
||||
unless existing_place
|
||||
existing_place = Place.where(latitude: latitude, longitude: longitude).first
|
||||
end
|
||||
|
|
@ -54,12 +50,10 @@ class Users::ImportData::Places
|
|||
return existing_place
|
||||
end
|
||||
|
||||
# Create new place with lonlat point
|
||||
place_attributes = place_data.except('created_at', 'updated_at', 'latitude', 'longitude')
|
||||
place_attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
|
||||
place_attributes['latitude'] = latitude
|
||||
place_attributes['longitude'] = longitude
|
||||
# Remove any user reference since Place doesn't belong to user directly
|
||||
place_attributes.delete('user')
|
||||
|
||||
begin
|
||||
|
|
@ -69,7 +63,8 @@ class Users::ImportData::Places
|
|||
|
||||
place
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create place: #{e.message}"
|
||||
ExceptionReporter.call(e, 'Failed to create place')
|
||||
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,40 +11,25 @@ class Users::ImportData::Points
|
|||
def call
|
||||
return 0 unless points_data.is_a?(Array)
|
||||
|
||||
puts "=== POINTS SERVICE DEBUG ==="
|
||||
puts "Points data is array: #{points_data.is_a?(Array)}"
|
||||
puts "Points data size: #{points_data.size}"
|
||||
|
||||
Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}"
|
||||
Rails.logger.debug "First point sample: #{points_data.first.inspect}"
|
||||
|
||||
# Pre-load reference data for efficient bulk processing
|
||||
preload_reference_data
|
||||
|
||||
# Filter valid points and prepare for bulk import
|
||||
valid_points = filter_and_prepare_points
|
||||
|
||||
puts "Valid points after filtering: #{valid_points.size}"
|
||||
|
||||
if valid_points.empty?
|
||||
puts "No valid points after filtering - returning 0"
|
||||
Rails.logger.warn "No valid points to import after filtering"
|
||||
Rails.logger.debug "Original points_data size: #{points_data.size}"
|
||||
return 0
|
||||
end
|
||||
|
||||
# Remove duplicates based on unique constraint
|
||||
deduplicated_points = deduplicate_points(valid_points)
|
||||
|
||||
puts "Deduplicated points: #{deduplicated_points.size}"
|
||||
|
||||
Rails.logger.info "Prepared #{deduplicated_points.size} unique valid points (#{points_data.size - deduplicated_points.size} duplicates/invalid skipped)"
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_points(deduplicated_points)
|
||||
|
||||
puts "Total created by bulk import: #{total_created}"
|
||||
|
||||
Rails.logger.info "Points import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
|
@ -54,10 +39,8 @@ class Users::ImportData::Points
|
|||
attr_reader :user, :points_data, :imports_lookup, :countries_lookup, :visits_lookup
|
||||
|
||||
def preload_reference_data
|
||||
# Pre-load imports for this user with multiple lookup keys for flexibility
|
||||
@imports_lookup = {}
|
||||
user.imports.each do |import|
|
||||
# Create keys for both string and integer source representations
|
||||
string_key = [import.name, import.source, import.created_at.utc.iso8601]
|
||||
integer_key = [import.name, Import.sources[import.source], import.created_at.utc.iso8601]
|
||||
|
||||
|
|
@ -66,16 +49,13 @@ class Users::ImportData::Points
|
|||
end
|
||||
Rails.logger.debug "Loaded #{user.imports.size} imports with #{@imports_lookup.size} lookup keys"
|
||||
|
||||
# Pre-load all countries for efficient lookup
|
||||
@countries_lookup = {}
|
||||
Country.all.each do |country|
|
||||
# Index by all possible lookup keys
|
||||
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
|
||||
@countries_lookup[country.name] = country
|
||||
end
|
||||
Rails.logger.debug "Loaded #{Country.count} countries for lookup"
|
||||
|
||||
# Pre-load visits for this user
|
||||
@visits_lookup = user.visits.index_by { |visit|
|
||||
[visit.name, visit.started_at.utc.iso8601, visit.ended_at.utc.iso8601]
|
||||
}
|
||||
|
|
@ -89,14 +69,12 @@ class Users::ImportData::Points
|
|||
points_data.each_with_index do |point_data, index|
|
||||
next unless point_data.is_a?(Hash)
|
||||
|
||||
# Skip points with invalid or missing required data
|
||||
unless valid_point_data?(point_data)
|
||||
skipped_count += 1
|
||||
Rails.logger.debug "Skipped point #{index}: invalid data - #{point_data.slice('timestamp', 'longitude', 'latitude', 'lonlat')}"
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare point attributes for bulk insert
|
||||
prepared_attributes = prepare_point_attributes(point_data)
|
||||
unless prepared_attributes
|
||||
skipped_count += 1
|
||||
|
|
@ -116,48 +94,41 @@ class Users::ImportData::Points
|
|||
end
|
||||
|
||||
def prepare_point_attributes(point_data)
|
||||
# Start with base attributes, excluding fields that need special handling
|
||||
attributes = point_data.except(
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'import_reference',
|
||||
'country_info',
|
||||
'visit_reference',
|
||||
'country' # Exclude the string country field - handled via country_info relationship
|
||||
'country'
|
||||
)
|
||||
|
||||
# Handle lonlat reconstruction if missing (for backward compatibility)
|
||||
ensure_lonlat_field(attributes, point_data)
|
||||
|
||||
# Remove longitude/latitude after lonlat reconstruction to ensure consistent keys
|
||||
attributes.delete('longitude')
|
||||
attributes.delete('latitude')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Resolve foreign key relationships
|
||||
resolve_import_reference(attributes, point_data['import_reference'])
|
||||
resolve_country_reference(attributes, point_data['country_info'])
|
||||
resolve_visit_reference(attributes, point_data['visit_reference'])
|
||||
|
||||
# Convert string keys to symbols for consistency with Point model
|
||||
result = attributes.symbolize_keys
|
||||
|
||||
Rails.logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}"
|
||||
result
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare point attributes: #{e.message}"
|
||||
Rails.logger.error "Point data: #{point_data.inspect}"
|
||||
ExceptionReporter.call(e, 'Failed to prepare point attributes')
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def resolve_import_reference(attributes, import_reference)
|
||||
return unless import_reference.is_a?(Hash)
|
||||
|
||||
# Normalize timestamp format to ISO8601 for consistent lookup
|
||||
created_at = normalize_timestamp_for_lookup(import_reference['created_at'])
|
||||
|
||||
import_key = [
|
||||
|
|
@ -179,11 +150,9 @@ class Users::ImportData::Points
|
|||
def resolve_country_reference(attributes, country_info)
|
||||
return unless country_info.is_a?(Hash)
|
||||
|
||||
# Try to find country by all attributes first
|
||||
country_key = [country_info['name'], country_info['iso_a2'], country_info['iso_a3']]
|
||||
country = countries_lookup[country_key]
|
||||
|
||||
# If not found by all attributes, try to find by name only
|
||||
if country.nil? && country_info['name'].present?
|
||||
country = countries_lookup[country_info['name']]
|
||||
end
|
||||
|
|
@ -201,7 +170,6 @@ class Users::ImportData::Points
|
|||
def resolve_visit_reference(attributes, visit_reference)
|
||||
return unless visit_reference.is_a?(Hash)
|
||||
|
||||
# Normalize timestamp formats to ISO8601 for consistent lookup
|
||||
started_at = normalize_timestamp_for_lookup(visit_reference['started_at'])
|
||||
ended_at = normalize_timestamp_for_lookup(visit_reference['ended_at'])
|
||||
|
||||
|
|
@ -225,9 +193,7 @@ class Users::ImportData::Points
|
|||
points.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] }
|
||||
end
|
||||
|
||||
# Ensure all points have the same keys for upsert_all compatibility
|
||||
def normalize_point_keys(points)
|
||||
# Get all possible keys from all points
|
||||
all_keys = points.flat_map(&:keys).uniq
|
||||
|
||||
# Normalize each point to have all keys (with nil for missing ones)
|
||||
|
|
@ -243,22 +209,13 @@ class Users::ImportData::Points
|
|||
def bulk_import_points(points)
|
||||
total_created = 0
|
||||
|
||||
puts "=== BULK IMPORT DEBUG ==="
|
||||
puts "About to bulk import #{points.size} points"
|
||||
puts "First point for import: #{points.first.inspect}"
|
||||
|
||||
points.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
Rails.logger.debug "Processing batch of #{batch.size} points"
|
||||
Rails.logger.debug "First point in batch: #{batch.first.inspect}"
|
||||
|
||||
puts "Processing batch of #{batch.size} points"
|
||||
puts "Sample point attributes: #{batch.first.slice(:lonlat, :timestamp, :user_id, :import_id, :country_id, :visit_id)}"
|
||||
|
||||
# Normalize all points to have the same keys for upsert_all compatibility
|
||||
normalized_batch = normalize_point_keys(batch)
|
||||
|
||||
# Use upsert_all to efficiently bulk insert/update points
|
||||
result = Point.upsert_all(
|
||||
normalized_batch,
|
||||
unique_by: %i[lonlat timestamp user_id],
|
||||
|
|
@ -269,32 +226,23 @@ class Users::ImportData::Points
|
|||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
puts "Batch result count: #{batch_created}"
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} points, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
puts "Batch import failed: #{e.message}"
|
||||
puts "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
Rails.logger.error "Failed to process point batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "First point in failed batch: #{batch.first.inspect}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
puts "Total created across all batches: #{total_created}"
|
||||
|
||||
total_created
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_point_data?(point_data)
|
||||
# Check for required fields
|
||||
return false unless point_data.is_a?(Hash)
|
||||
return false unless point_data['timestamp'].present?
|
||||
|
||||
# Check if we have either lonlat or longitude/latitude
|
||||
has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(')
|
||||
has_coordinates = point_data['longitude'].present? && point_data['latitude'].present?
|
||||
|
||||
|
|
@ -307,7 +255,6 @@ class Users::ImportData::Points
|
|||
end
|
||||
|
||||
def ensure_lonlat_field(attributes, point_data)
|
||||
# If lonlat is missing but we have longitude/latitude, reconstruct it
|
||||
if attributes['lonlat'].blank? && point_data['longitude'].present? && point_data['latitude'].present?
|
||||
longitude = point_data['longitude'].to_f
|
||||
latitude = point_data['latitude'].to_f
|
||||
|
|
@ -321,13 +268,10 @@ class Users::ImportData::Points
|
|||
|
||||
case timestamp
|
||||
when String
|
||||
# Parse string timestamp and convert to UTC ISO8601 format
|
||||
Time.parse(timestamp).utc.iso8601
|
||||
when Time, DateTime
|
||||
# Convert time objects to UTC ISO8601 format
|
||||
timestamp.utc.iso8601
|
||||
else
|
||||
# Fallback to string representation
|
||||
timestamp.to_s
|
||||
end
|
||||
rescue StandardError => e
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ class Users::ImportData::Settings
|
|||
|
||||
Rails.logger.info "Importing settings for user: #{user.email}"
|
||||
|
||||
# Merge imported settings with existing settings
|
||||
current_settings = user.settings || {}
|
||||
updated_settings = current_settings.merge(settings_data)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ class Users::ImportData::Stats
|
|||
|
||||
Rails.logger.info "Importing #{stats_data.size} stats for user: #{user.email}"
|
||||
|
||||
# Filter valid stats and prepare for bulk import
|
||||
valid_stats = filter_and_prepare_stats
|
||||
|
||||
if valid_stats.empty?
|
||||
|
|
@ -21,14 +20,12 @@ class Users::ImportData::Stats
|
|||
return 0
|
||||
end
|
||||
|
||||
# Remove existing stats to avoid duplicates
|
||||
deduplicated_stats = filter_existing_stats(valid_stats)
|
||||
|
||||
if deduplicated_stats.size < valid_stats.size
|
||||
Rails.logger.debug "Skipped #{valid_stats.size - deduplicated_stats.size} duplicate stats"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_stats(deduplicated_stats)
|
||||
|
||||
Rails.logger.info "Stats import completed. Created: #{total_created}"
|
||||
|
|
@ -46,13 +43,11 @@ class Users::ImportData::Stats
|
|||
stats_data.each do |stat_data|
|
||||
next unless stat_data.is_a?(Hash)
|
||||
|
||||
# Skip stats with missing required data
|
||||
unless valid_stat_data?(stat_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare stat attributes for bulk insert
|
||||
prepared_attributes = prepare_stat_attributes(stat_data)
|
||||
valid_stats << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
|
@ -65,33 +60,28 @@ class Users::ImportData::Stats
|
|||
end
|
||||
|
||||
def prepare_stat_attributes(stat_data)
|
||||
# Start with base attributes, excluding timestamp fields
|
||||
attributes = stat_data.except('created_at', 'updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare stat attributes: #{e.message}"
|
||||
Rails.logger.error "Stat data: #{stat_data.inspect}"
|
||||
ExceptionReporter.call(e, 'Failed to prepare stat attributes')
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_stats(stats)
|
||||
return stats if stats.empty?
|
||||
|
||||
# Build lookup hash of existing stats for this user
|
||||
existing_stats_lookup = {}
|
||||
user.stats.select(:year, :month).each do |stat|
|
||||
key = [stat.year, stat.month]
|
||||
existing_stats_lookup[key] = true
|
||||
end
|
||||
|
||||
# Filter out stats that already exist
|
||||
filtered_stats = stats.reject do |stat|
|
||||
key = [stat[:year], stat[:month]]
|
||||
if existing_stats_lookup[key]
|
||||
|
|
@ -110,7 +100,6 @@ class Users::ImportData::Stats
|
|||
|
||||
stats.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert stats
|
||||
result = Stat.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
|
|
@ -123,10 +112,7 @@ class Users::ImportData::Stats
|
|||
Rails.logger.debug "Processed batch of #{batch.size} stats, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process stat batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
ExceptionReporter.call(e, 'Failed to process stat batch')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -134,7 +120,6 @@ class Users::ImportData::Stats
|
|||
end
|
||||
|
||||
def valid_stat_data?(stat_data)
|
||||
# Check for required fields
|
||||
return false unless stat_data.is_a?(Hash)
|
||||
|
||||
unless stat_data['year'].present?
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ class Users::ImportData::Trips
|
|||
|
||||
Rails.logger.info "Importing #{trips_data.size} trips for user: #{user.email}"
|
||||
|
||||
# Filter valid trips and prepare for bulk import
|
||||
valid_trips = filter_and_prepare_trips
|
||||
|
||||
if valid_trips.empty?
|
||||
|
|
@ -21,14 +20,12 @@ class Users::ImportData::Trips
|
|||
return 0
|
||||
end
|
||||
|
||||
# Remove existing trips to avoid duplicates
|
||||
deduplicated_trips = filter_existing_trips(valid_trips)
|
||||
|
||||
if deduplicated_trips.size < valid_trips.size
|
||||
Rails.logger.debug "Skipped #{valid_trips.size - deduplicated_trips.size} duplicate trips"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_trips(deduplicated_trips)
|
||||
|
||||
Rails.logger.info "Trips import completed. Created: #{total_created}"
|
||||
|
|
@ -46,13 +43,11 @@ class Users::ImportData::Trips
|
|||
trips_data.each do |trip_data|
|
||||
next unless trip_data.is_a?(Hash)
|
||||
|
||||
# Skip trips with missing required data
|
||||
unless valid_trip_data?(trip_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare trip attributes for bulk insert
|
||||
prepared_attributes = prepare_trip_attributes(trip_data)
|
||||
valid_trips << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
|
@ -65,36 +60,29 @@ class Users::ImportData::Trips
|
|||
end
|
||||
|
||||
def prepare_trip_attributes(trip_data)
|
||||
# Start with base attributes, excluding timestamp fields
|
||||
attributes = trip_data.except('created_at', 'updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare trip attributes: #{e.message}"
|
||||
Rails.logger.error "Trip data: #{trip_data.inspect}"
|
||||
ExceptionReporter.call(e, 'Failed to prepare trip attributes')
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_trips(trips)
|
||||
return trips if trips.empty?
|
||||
|
||||
# Build lookup hash of existing trips for this user
|
||||
existing_trips_lookup = {}
|
||||
user.trips.select(:name, :started_at, :ended_at).each do |trip|
|
||||
# Normalize timestamp values for consistent comparison
|
||||
key = [trip.name, normalize_timestamp(trip.started_at), normalize_timestamp(trip.ended_at)]
|
||||
existing_trips_lookup[key] = true
|
||||
end
|
||||
|
||||
# Filter out trips that already exist
|
||||
filtered_trips = trips.reject do |trip|
|
||||
# Normalize timestamp values for consistent comparison
|
||||
key = [trip[:name], normalize_timestamp(trip[:started_at]), normalize_timestamp(trip[:ended_at])]
|
||||
if existing_trips_lookup[key]
|
||||
Rails.logger.debug "Trip already exists: #{trip[:name]}"
|
||||
|
|
@ -110,10 +98,8 @@ class Users::ImportData::Trips
|
|||
def normalize_timestamp(timestamp)
|
||||
case timestamp
|
||||
when String
|
||||
# Parse string and convert to iso8601 format for consistent comparison
|
||||
Time.parse(timestamp).utc.iso8601
|
||||
when Time, DateTime
|
||||
# Convert time objects to iso8601 format for consistent comparison
|
||||
timestamp.utc.iso8601
|
||||
else
|
||||
timestamp.to_s
|
||||
|
|
@ -127,7 +113,6 @@ class Users::ImportData::Trips
|
|||
|
||||
trips.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert trips
|
||||
result = Trip.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
|
|
@ -140,10 +125,7 @@ class Users::ImportData::Trips
|
|||
Rails.logger.debug "Processed batch of #{batch.size} trips, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process trip batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
ExceptionReporter.call(e, 'Failed to process trip batch')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -151,27 +133,37 @@ class Users::ImportData::Trips
|
|||
end
|
||||
|
||||
def valid_trip_data?(trip_data)
|
||||
# Check for required fields
|
||||
return false unless trip_data.is_a?(Hash)
|
||||
|
||||
unless trip_data['name'].present?
|
||||
Rails.logger.error "Failed to create trip: Validation failed: Name can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless trip_data['started_at'].present?
|
||||
Rails.logger.error "Failed to create trip: Validation failed: Started at can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless trip_data['ended_at'].present?
|
||||
Rails.logger.error "Failed to create trip: Validation failed: Ended at can't be blank"
|
||||
return false
|
||||
end
|
||||
validate_trip_name(trip_data)
|
||||
validate_trip_started_at(trip_data)
|
||||
validate_trip_ended_at(trip_data)
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Trip validation failed: #{e.message} for data: #{trip_data.inspect}"
|
||||
false
|
||||
end
|
||||
|
||||
|
||||
def validate_trip_name(trip_data)
|
||||
unless trip_data['name'].present?
|
||||
ExceptionReporter.call(e, 'Failed to create trip: Validation failed: Name can\'t be blank')
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
def validate_trip_started_at(trip_data)
|
||||
unless trip_data['started_at'].present?
|
||||
ExceptionReporter.call(e, 'Failed to create trip: Validation failed: Started at can\'t be blank')
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
def validate_trip_ended_at(trip_data)
|
||||
unless trip_data['ended_at'].present?
|
||||
ExceptionReporter.call(e, 'Failed to create trip: Validation failed: Ended at can\'t be blank')
|
||||
return false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ class Users::ImportData::Visits
|
|||
visits_data.each do |visit_data|
|
||||
next unless visit_data.is_a?(Hash)
|
||||
|
||||
# Check if visit already exists (match by name, timestamps, and place reference)
|
||||
existing_visit = find_existing_visit(visit_data)
|
||||
|
||||
if existing_visit
|
||||
|
|
@ -24,13 +23,12 @@ class Users::ImportData::Visits
|
|||
next
|
||||
end
|
||||
|
||||
# Create new visit
|
||||
begin
|
||||
visit_record = create_visit_record(visit_data)
|
||||
visits_created += 1
|
||||
Rails.logger.debug "Created visit: #{visit_record.name}"
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create visit: #{e.message}"
|
||||
ExceptionReporter.call(e, 'Failed to create visit')
|
||||
next
|
||||
end
|
||||
end
|
||||
|
|
@ -59,7 +57,6 @@ class Users::ImportData::Visits
|
|||
def prepare_visit_attributes(visit_data)
|
||||
attributes = visit_data.except('place_reference')
|
||||
|
||||
# Find and assign place if referenced
|
||||
if visit_data['place_reference']
|
||||
place = find_referenced_place(visit_data['place_reference'])
|
||||
attributes[:place] = place if place
|
||||
|
|
@ -75,7 +72,6 @@ class Users::ImportData::Visits
|
|||
latitude = place_reference['latitude'].to_f
|
||||
longitude = place_reference['longitude'].to_f
|
||||
|
||||
# Find place by name and coordinates (global search since places are not user-specific)
|
||||
place = Place.find_by(name: name) ||
|
||||
Place.where("latitude = ? AND longitude = ?", latitude, longitude).first
|
||||
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ Rails.application.configure do
|
|||
|
||||
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',')
|
||||
|
||||
config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] || hosts.first }
|
||||
config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] || hosts.first, port: ENV.fetch('PORT', 3000) }
|
||||
|
||||
config.hosts.concat(hosts) if hosts.present?
|
||||
|
||||
|
|
|
|||
|
|
@ -6,5 +6,7 @@ class AddStatusToImports < ActiveRecord::Migration[8.0]
|
|||
def change
|
||||
add_column :imports, :status, :integer, default: 0, null: false
|
||||
add_index :imports, :status, algorithm: :concurrently
|
||||
|
||||
Import.update_all(status: :completed)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,10 +9,8 @@ RSpec.describe Users::ImportDataJob, type: :job do
|
|||
let(:job) { described_class.new }
|
||||
|
||||
before do
|
||||
# Create a mock ZIP file
|
||||
FileUtils.touch(archive_path)
|
||||
|
||||
# Mock the import file attachment
|
||||
allow(import).to receive(:file).and_return(
|
||||
double('ActiveStorage::Attached::One',
|
||||
download: proc { |&block|
|
||||
|
|
@ -29,7 +27,6 @@ RSpec.describe Users::ImportDataJob, type: :job do
|
|||
describe '#perform' do
|
||||
context 'when import is successful' do
|
||||
before do
|
||||
# Mock the import service
|
||||
import_service = instance_double(Users::ImportData)
|
||||
allow(Users::ImportData).to receive(:new).and_return(import_service)
|
||||
allow(import_service).to receive(:import).and_return({
|
||||
|
|
@ -46,7 +43,6 @@ RSpec.describe Users::ImportDataJob, type: :job do
|
|||
files_restored: 7
|
||||
})
|
||||
|
||||
# Mock file operations
|
||||
allow(File).to receive(:exist?).and_return(true)
|
||||
allow(File).to receive(:delete)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
|
|
@ -84,21 +80,17 @@ RSpec.describe Users::ImportDataJob, type: :job do
|
|||
let(:error) { StandardError.new(error_message) }
|
||||
|
||||
before do
|
||||
# Mock the import service to raise an error
|
||||
import_service = instance_double(Users::ImportData)
|
||||
allow(Users::ImportData).to receive(:new).and_return(import_service)
|
||||
allow(import_service).to receive(:import).and_raise(error)
|
||||
|
||||
# Mock notification creation
|
||||
notification_service = instance_double(::Notifications::Create, call: true)
|
||||
allow(::Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
|
||||
# Mock file operations
|
||||
allow(File).to receive(:exist?).and_return(true)
|
||||
allow(File).to receive(:delete)
|
||||
allow(Rails.logger).to receive(:info)
|
||||
|
||||
# Mock ExceptionReporter
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
end
|
||||
|
||||
|
|
@ -149,12 +141,10 @@ RSpec.describe Users::ImportDataJob, type: :job do
|
|||
let(:error) { StandardError.new(error_message) }
|
||||
|
||||
before do
|
||||
# Mock file download to fail
|
||||
allow(import).to receive(:file).and_return(
|
||||
double('ActiveStorage::Attached::One', download: proc { raise error })
|
||||
)
|
||||
|
||||
# Mock notification creation
|
||||
notification_service = instance_double(::Notifications::Create, call: true)
|
||||
allow(::Notifications::Create).to receive(:new).and_return(notification_service)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
let(:point) { create(:point) }
|
||||
|
||||
context 'when Geocoder returns city and country' do
|
||||
let!(:germany) { create(:country, name: 'Germany', iso_a2: 'DE', iso_a3: 'DEU') }
|
||||
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return(
|
||||
[
|
||||
|
|
@ -27,10 +29,10 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
it 'updates point with city and country' do
|
||||
expect { fetch_data }.to change { point.reload.city }
|
||||
.from(nil).to('Berlin')
|
||||
.and change { point.reload.country_id }.from(nil).to(be_present)
|
||||
.and change { point.reload.country_id }.from(nil).to(germany.id)
|
||||
end
|
||||
|
||||
it 'creates country with correct ISO codes' do
|
||||
it 'finds existing country' do
|
||||
fetch_data
|
||||
country = point.reload.country
|
||||
expect(country.name).to eq('Germany')
|
||||
|
|
@ -82,28 +84,27 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
end
|
||||
end
|
||||
|
||||
context 'when Geocoder returns country name without ISO code' do
|
||||
context 'when Geocoder returns country name that does not exist in database' do
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return(
|
||||
[
|
||||
double(
|
||||
city: 'Paris',
|
||||
country: 'France',
|
||||
country: 'NonExistentCountry',
|
||||
data: {
|
||||
'address' => 'Address',
|
||||
'properties' => { 'city' => 'Paris' } # No countrycode property
|
||||
'properties' => { 'city' => 'Paris' }
|
||||
}
|
||||
)
|
||||
]
|
||||
)
|
||||
end
|
||||
|
||||
it 'creates country with correct ISO codes from country name mapping' do
|
||||
fetch_data
|
||||
country = point.reload.country
|
||||
expect(country.name).to eq('France')
|
||||
expect(country.iso_a2).to eq('FR')
|
||||
expect(country.iso_a3).to eq('FRA')
|
||||
it 'does not set country_id when country is not found' do
|
||||
expect { fetch_data }.to change { point.reload.city }
|
||||
.from(nil).to('Paris')
|
||||
|
||||
expect(point.reload.country_id).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -65,13 +65,12 @@ RSpec.describe Users::ImportData::Points, type: :service do
|
|||
]
|
||||
end
|
||||
|
||||
it 'creates the country and assigns it' do
|
||||
expect { service.call }.to change(Country, :count).by(1)
|
||||
it 'does not create country and leaves country_id nil' do
|
||||
expect { service.call }.not_to change(Country, :count)
|
||||
|
||||
point = user.tracked_points.last
|
||||
expect(point.country.name).to eq('NewCountry')
|
||||
expect(point.country.iso_a2).to eq('NC')
|
||||
expect(point.country.iso_a3).to eq('NCO')
|
||||
expect(point.country_id).to be_nil
|
||||
expect(point.city).to eq('Berlin')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,92 +0,0 @@
|
|||
Run options: include {locations: {"./spec/services/users/export_import_integration_spec.rb" => [21]}}
|
||||
Created dataset with 12 points
|
||||
|
||||
=== DEBUGGING EXPORT DATA ===
|
||||
Export counts: {"areas" => 3, "imports" => 2, "exports" => 2, "trips" => 2, "stats" => 2, "notifications" => 4, "points" => 12, "visits" => 3, "places" => 2}
|
||||
Points in export: 12
|
||||
Places in export: 2
|
||||
First point sample: {"timestamp" => 1714559220, "longitude" => -73.9851, "latitude" => 40.7589, "import_reference" => {"name" => "March 2024 Data", "source" => 0, "created_at" => "2025-06-30T16:10:46.550Z"}, "country_info" => {"name" => "United States", "iso_a2" => "US", "iso_a3" => "USA"}, "visit_reference" => {"name" => "Work Visit", "started_at" => "2025-06-30T16:10:46.711Z", "ended_at" => "2025-06-30T17:10:46.711Z"}}
|
||||
First place sample: {"name" => "Office Building", "latitude" => "40.7589", "longitude" => "-73.9851", "source" => "manual"}
|
||||
Imports in export: 2
|
||||
Countries referenced: ["United States", "Canada"]
|
||||
=== END DEBUG ===
|
||||
Import stats: {settings_updated: true, areas_created: 3, places_created: 0, imports_created: 2, exports_created: 2, trips_created: 2, stats_created: 2, notifications_created: 4, visits_created: 3, points_created: 0, files_restored: 3}
|
||||
Original counts: {areas: 3, imports: 2, exports: 2, trips: 2, stats: 2, notifications: 5, points: 12, visits: 3, places: 2}
|
||||
Target counts: {areas: 3, imports: 2, exports: 2, trips: 2, stats: 2, notifications: 5, points: 0, visits: 3, places: 2}
|
||||
F/Users/frey/.rvm/rubies/ruby-3.4.1/bin/ruby -I/Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-core-3.13.3/lib:/Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-support-3.13.3/lib /Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-core-3.13.3/exe/rspec --pattern spec/swagger/\*\*/\*_spec.rb --format Rswag::Specs::SwaggerFormatter --dry-run --order defined
|
||||
Generating Swagger docs ...
|
||||
Swagger doc generated at /Users/frey/projects/dawarich/dawarich/swagger/v1/swagger.yaml
|
||||
|
||||
Top 10 slowest examples (0.00002 seconds, 0.6% of total time):
|
||||
Areas API /api/v1/areas post area created returns a 201 response
|
||||
0.00001 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Stats API /api/v1/stats get stats found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Areas API /api/v1/areas post invalid request returns a 422 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Areas API /api/v1/areas/{id} delete area deleted returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Health API /api/v1/health get Healthy returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Points API /api/v1/points get points found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Users API /api/v1/users/me get user found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Settings API /api/v1/settings get settings found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Settings API /api/v1/settings patch settings updated returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Points API /api/v1/points/{id} delete point deleted returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
|
||||
Top 10 slowest example groups:
|
||||
Health API
|
||||
0.00039 seconds average (0.00039 seconds / 1 example) ./spec/swagger/api/v1/health_controller_spec.rb:5
|
||||
Points API
|
||||
0.00016 seconds average (0.00065 seconds / 4 examples) ./spec/swagger/api/v1/points_controller_spec.rb:5
|
||||
Areas API
|
||||
0.00013 seconds average (0.00052 seconds / 4 examples) ./spec/swagger/api/v1/areas_controller_spec.rb:5
|
||||
Stats API
|
||||
0.00013 seconds average (0.00013 seconds / 1 example) ./spec/swagger/api/v1/stats_controller_spec.rb:5
|
||||
Users API
|
||||
0.00012 seconds average (0.00012 seconds / 1 example) ./spec/swagger/api/v1/users_controller_spec.rb:5
|
||||
Settings API
|
||||
0.00011 seconds average (0.00021 seconds / 2 examples) ./spec/swagger/api/v1/settings_controller_spec.rb:5
|
||||
Overland Batches API
|
||||
0.0001 seconds average (0.0002 seconds / 2 examples) ./spec/swagger/api/v1/overland/batches_controller_spec.rb:5
|
||||
Api::V1::Countries::VisitedCities
|
||||
0.00009 seconds average (0.00017 seconds / 2 examples) ./spec/swagger/api/v1/countries/visited_cities_spec.rb:5
|
||||
OwnTracks Points API
|
||||
0.00009 seconds average (0.00017 seconds / 2 examples) ./spec/swagger/api/v1/owntracks/points_controller_spec.rb:5
|
||||
Api::V1::PhotosController
|
||||
0.00008 seconds average (0.00025 seconds / 3 examples) ./spec/swagger/api/v1/photos_controller_spec.rb:5
|
||||
|
||||
Finished in 0.00388 seconds (files took 1.87 seconds to load)
|
||||
24 examples, 0 failures
|
||||
|
||||
Coverage report generated for RSpec to /Users/frey/projects/dawarich/dawarich/coverage.
|
||||
Line Coverage: 61.36% (670 / 1092)
|
||||
|
||||
|
||||
Failures:
|
||||
|
||||
1) Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
|
||||
Failure/Error: expect(target_counts[:points]).to eq(original_counts[:points])
|
||||
Expected 0 to eq 12.
|
||||
# ./spec/services/users/export_import_integration_spec.rb:71:in 'block (3 levels) in <top (required)>'
|
||||
# /Users/frey/.rvm/gems/ruby-3.4.1/gems/webmock-3.25.1/lib/webmock/rspec.rb:39:in 'block (2 levels) in <top (required)>'
|
||||
|
||||
Top 1 slowest examples (0.67919 seconds, 16.2% of total time):
|
||||
Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
|
||||
0.67919 seconds ./spec/services/users/export_import_integration_spec.rb:21
|
||||
|
||||
Finished in 4.18 seconds (files took 2.21 seconds to load)
|
||||
1 example, 1 failure
|
||||
|
||||
Failed examples:
|
||||
|
||||
rspec ./spec/services/users/export_import_integration_spec.rb:21 # Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
|
||||
|
||||
Coverage report generated for RSpec to /Users/frey/projects/dawarich/dawarich/coverage.
|
||||
Line Coverage: 65.56% (1593 / 2430)
|
||||
Stopped processing SimpleCov as a previous error not related to SimpleCov has been detected
|
||||
Loading…
Reference in a new issue