Update changelog

This commit is contained in:
Eugene Burmakin 2025-06-30 22:51:25 +02:00
parent 32a00db9b9
commit 1ebe2da84a
21 changed files with 75 additions and 343 deletions

View file

@ -1 +1 @@
0.28.1 0.29.0

View file

@ -6,6 +6,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
# [0.29.0] - 2025-06-30 # [0.29.0] - 2025-06-30
You can now move your user data between Dawarich instances. Simply go to your Account settings and click on the "Export my data" button under the password section. An export will be created and you will be able to download it on Exports page once it's ready.
To import your data on a new Dawarich instance, create a new user and upload the exported zip file. You can import your data also on the Account page, by clicking "Import my data" button under the password section.
The feature is experimental and not yet aimed to replace a proper backup solution. Please use at your own risk.
## Added ## Added
- In the User Settings, you can now export your user data as a zip file. It will contain the following: - In the User Settings, you can now export your user data as a zip file. It will contain the following:
@ -20,9 +26,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- All your stats - All your stats
- In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation. - In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation.
- Export file size is now displayed in the exports and imports lists. - Export file size is now displayed in the exports and imports lists.
- A button to download an import file is now displayed in the imports list. It may not work properly for imports created before the 0.25.4 release. - A button to download an import file is now displayed in the imports list. It may not work properly for imports created before the 0.25.4 release.
- Imports now have statuses.
## Changed ## Changed
@ -32,6 +38,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- Email links now use the SMTP domain if set. #1469 - Email links now use the SMTP domain if set. #1469
# 0.28.1 - 2025-06-11 # 0.28.1 - 2025-06-11
## Fixed ## Fixed

View file

@ -1,22 +1,13 @@
# frozen_string_literal: true # frozen_string_literal: true
class Users::ExportData::Notifications class Users::ExportData::Notifications
# System-generated notification titles that should not be exported
SYSTEM_NOTIFICATION_TITLES = [
'Data import completed',
'Data import failed',
'Export completed',
'Export failed'
].freeze
def initialize(user) def initialize(user)
@user = user @user = user
end end
def call def call
# Export only user-generated notifications, not system-generated ones # Export all notifications for the user
user.notifications user.notifications
.where.not(title: SYSTEM_NOTIFICATION_TITLES)
.as_json(except: %w[user_id id]) .as_json(except: %w[user_id id])
end end

View file

@ -6,7 +6,6 @@ class Users::ExportData::Points
end end
def call def call
# Single optimized query with all joins to avoid N+1 queries
points_sql = <<-SQL points_sql = <<-SQL
SELECT SELECT
p.id, p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy, p.id, p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy,
@ -42,9 +41,7 @@ class Users::ExportData::Points
Rails.logger.info "Processing #{result.count} points for export..." Rails.logger.info "Processing #{result.count} points for export..."
# Process results efficiently
result.filter_map do |row| result.filter_map do |row|
# Skip points without any coordinate data
has_lonlat = row['lonlat'].present? has_lonlat = row['lonlat'].present?
has_coordinates = row['computed_longitude'].present? && row['computed_latitude'].present? has_coordinates = row['computed_longitude'].present? && row['computed_latitude'].present?

View file

@ -41,7 +41,6 @@ class Users::ImportData
end end
def import def import
# Create a temporary directory for extraction
@import_directory = Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{Time.current.to_i}") @import_directory = Rails.root.join('tmp', "import_#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{Time.current.to_i}")
FileUtils.mkdir_p(@import_directory) FileUtils.mkdir_p(@import_directory)
@ -74,10 +73,8 @@ class Users::ImportData
zip_file.each do |entry| zip_file.each do |entry|
extraction_path = @import_directory.join(entry.name) extraction_path = @import_directory.join(entry.name)
# Ensure directory exists
FileUtils.mkdir_p(File.dirname(extraction_path)) FileUtils.mkdir_p(File.dirname(extraction_path))
# Extract file
entry.extract(extraction_path) entry.extract(extraction_path)
end end
end end
@ -98,15 +95,12 @@ class Users::ImportData
def import_in_correct_order(data) def import_in_correct_order(data)
Rails.logger.info "Starting data import for user: #{user.email}" Rails.logger.info "Starting data import for user: #{user.email}"
# Log expected counts if available
if data['counts'] if data['counts']
Rails.logger.info "Expected entity counts from export: #{data['counts']}" Rails.logger.info "Expected entity counts from export: #{data['counts']}"
end end
# Debug: Log what data keys are available
Rails.logger.debug "Available data keys: #{data.keys.inspect}" Rails.logger.debug "Available data keys: #{data.keys.inspect}"
# Import in dependency order
import_settings(data['settings']) if data['settings'] import_settings(data['settings']) if data['settings']
import_areas(data['areas']) if data['areas'] import_areas(data['areas']) if data['areas']
import_places(data['places']) if data['places'] import_places(data['places']) if data['places']
@ -178,26 +172,14 @@ class Users::ImportData
end end
def import_points(points_data) def import_points(points_data)
puts "=== POINTS IMPORT DEBUG ==="
puts "About to import #{points_data&.size || 0} points"
puts "Points data present: #{points_data.present?}"
puts "First point sample: #{points_data&.first&.slice('timestamp', 'longitude', 'latitude') if points_data&.first}"
puts "=== END POINTS IMPORT DEBUG ==="
Rails.logger.info "About to import #{points_data&.size || 0} points" Rails.logger.info "About to import #{points_data&.size || 0} points"
Rails.logger.info "Points data present: #{points_data.present?}"
Rails.logger.info "First point sample: #{points_data&.first&.slice('timestamp', 'longitude', 'latitude') if points_data&.first}"
begin begin
points_created = Users::ImportData::Points.new(user, points_data).call points_created = Users::ImportData::Points.new(user, points_data).call
Rails.logger.info "Points import returned: #{points_created}"
puts "Points import returned: #{points_created}"
@import_stats[:points_created] = points_created @import_stats[:points_created] = points_created
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Points import failed: #{e.message}" ExceptionReporter.call(e, 'Points import failed')
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
puts "Points import failed: #{e.message}"
@import_stats[:points_created] = 0 @import_stats[:points_created] = 0
end end
end end
@ -212,16 +194,6 @@ class Users::ImportData
end end
def create_success_notification def create_success_notification
# Check if we already have a recent import success notification to avoid duplicates
recent_import_notification = user.notifications.where(
title: 'Data import completed'
).where('created_at > ?', 5.minutes.ago).first
if recent_import_notification
Rails.logger.debug "Skipping duplicate import success notification"
return
end
summary = "#{@import_stats[:points_created]} points, " \ summary = "#{@import_stats[:points_created]} points, " \
"#{@import_stats[:visits_created]} visits, " \ "#{@import_stats[:visits_created]} visits, " \
"#{@import_stats[:places_created]} places, " \ "#{@import_stats[:places_created]} places, " \

View file

@ -13,7 +13,6 @@ class Users::ImportData::Areas
Rails.logger.info "Importing #{areas_data.size} areas for user: #{user.email}" Rails.logger.info "Importing #{areas_data.size} areas for user: #{user.email}"
# Filter valid areas and prepare for bulk import
valid_areas = filter_and_prepare_areas valid_areas = filter_and_prepare_areas
if valid_areas.empty? if valid_areas.empty?
@ -21,14 +20,12 @@ class Users::ImportData::Areas
return 0 return 0
end end
# Remove existing areas to avoid duplicates
deduplicated_areas = filter_existing_areas(valid_areas) deduplicated_areas = filter_existing_areas(valid_areas)
if deduplicated_areas.size < valid_areas.size if deduplicated_areas.size < valid_areas.size
Rails.logger.debug "Skipped #{valid_areas.size - deduplicated_areas.size} duplicate areas" Rails.logger.debug "Skipped #{valid_areas.size - deduplicated_areas.size} duplicate areas"
end end
# Bulk import in batches
total_created = bulk_import_areas(deduplicated_areas) total_created = bulk_import_areas(deduplicated_areas)
Rails.logger.info "Areas import completed. Created: #{total_created}" Rails.logger.info "Areas import completed. Created: #{total_created}"
@ -46,13 +43,12 @@ class Users::ImportData::Areas
areas_data.each do |area_data| areas_data.each do |area_data|
next unless area_data.is_a?(Hash) next unless area_data.is_a?(Hash)
# Skip areas with missing required data
unless valid_area_data?(area_data) unless valid_area_data?(area_data)
skipped_count += 1 skipped_count += 1
next next
end end
# Prepare area attributes for bulk insert
prepared_attributes = prepare_area_attributes(area_data) prepared_attributes = prepare_area_attributes(area_data)
valid_areas << prepared_attributes if prepared_attributes valid_areas << prepared_attributes if prepared_attributes
end end
@ -65,18 +61,13 @@ class Users::ImportData::Areas
end end
def prepare_area_attributes(area_data) def prepare_area_attributes(area_data)
# Start with base attributes, excluding timestamp fields
attributes = area_data.except('created_at', 'updated_at') attributes = area_data.except('created_at', 'updated_at')
# Add required attributes for bulk insert
attributes['user_id'] = user.id attributes['user_id'] = user.id
attributes['created_at'] = Time.current attributes['created_at'] = Time.current
attributes['updated_at'] = Time.current attributes['updated_at'] = Time.current
attributes['radius'] ||= 100
# Ensure radius is present (required by model validation)
attributes['radius'] ||= 100 # Default radius if not provided
# Convert string keys to symbols for consistency
attributes.symbolize_keys attributes.symbolize_keys
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to prepare area attributes: #{e.message}" Rails.logger.error "Failed to prepare area attributes: #{e.message}"
@ -87,17 +78,13 @@ class Users::ImportData::Areas
def filter_existing_areas(areas) def filter_existing_areas(areas)
return areas if areas.empty? return areas if areas.empty?
# Build lookup hash of existing areas for this user
existing_areas_lookup = {} existing_areas_lookup = {}
user.areas.select(:name, :latitude, :longitude).each do |area| user.areas.select(:name, :latitude, :longitude).each do |area|
# Normalize decimal values for consistent comparison
key = [area.name, area.latitude.to_f, area.longitude.to_f] key = [area.name, area.latitude.to_f, area.longitude.to_f]
existing_areas_lookup[key] = true existing_areas_lookup[key] = true
end end
# Filter out areas that already exist
filtered_areas = areas.reject do |area| filtered_areas = areas.reject do |area|
# Normalize decimal values for consistent comparison
key = [area[:name], area[:latitude].to_f, area[:longitude].to_f] key = [area[:name], area[:latitude].to_f, area[:longitude].to_f]
if existing_areas_lookup[key] if existing_areas_lookup[key]
Rails.logger.debug "Area already exists: #{area[:name]}" Rails.logger.debug "Area already exists: #{area[:name]}"
@ -115,7 +102,6 @@ class Users::ImportData::Areas
areas.each_slice(BATCH_SIZE) do |batch| areas.each_slice(BATCH_SIZE) do |batch|
begin begin
# Use upsert_all to efficiently bulk insert areas
result = Area.upsert_all( result = Area.upsert_all(
batch, batch,
returning: %w[id], returning: %w[id],
@ -131,7 +117,6 @@ class Users::ImportData::Areas
Rails.logger.error "Failed to process area batch: #{e.message}" Rails.logger.error "Failed to process area batch: #{e.message}"
Rails.logger.error "Batch size: #{batch.size}" Rails.logger.error "Batch size: #{batch.size}"
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}" Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
# Continue with next batch instead of failing completely
end end
end end
@ -139,7 +124,6 @@ class Users::ImportData::Areas
end end
def valid_area_data?(area_data) def valid_area_data?(area_data)
# Check for required fields
return false unless area_data.is_a?(Hash) return false unless area_data.is_a?(Hash)
return false unless area_data['name'].present? return false unless area_data['name'].present?
return false unless area_data['latitude'].present? return false unless area_data['latitude'].present?

View file

@ -18,7 +18,6 @@ class Users::ImportData::Exports
exports_data.each do |export_data| exports_data.each do |export_data|
next unless export_data.is_a?(Hash) next unless export_data.is_a?(Hash)
# Check if export already exists (match by name and created_at)
existing_export = user.exports.find_by( existing_export = user.exports.find_by(
name: export_data['name'], name: export_data['name'],
created_at: export_data['created_at'] created_at: export_data['created_at']
@ -29,11 +28,9 @@ class Users::ImportData::Exports
next next
end end
# Create new export
export_record = create_export_record(export_data) export_record = create_export_record(export_data)
exports_created += 1 exports_created += 1
# Restore file if present
if export_data['file_name'] && restore_export_file(export_record, export_data) if export_data['file_name'] && restore_export_file(export_record, export_data)
files_restored += 1 files_restored += 1
end end
@ -73,7 +70,6 @@ class Users::ImportData::Exports
end end
begin begin
# Attach the file to the export record
export_record.file.attach( export_record.file.attach(
io: File.open(file_path), io: File.open(file_path),
filename: export_data['original_filename'] || export_data['file_name'], filename: export_data['original_filename'] || export_data['file_name'],

View file

@ -18,7 +18,6 @@ class Users::ImportData::Imports
imports_data.each do |import_data| imports_data.each do |import_data|
next unless import_data.is_a?(Hash) next unless import_data.is_a?(Hash)
# Check if import already exists (match by name, source, and created_at)
existing_import = user.imports.find_by( existing_import = user.imports.find_by(
name: import_data['name'], name: import_data['name'],
source: import_data['source'], source: import_data['source'],
@ -30,13 +29,11 @@ class Users::ImportData::Imports
next next
end end
# Create new import
import_record = create_import_record(import_data) import_record = create_import_record(import_data)
next unless import_record # Skip if creation failed next unless import_record # Skip if creation failed
imports_created += 1 imports_created += 1
# Restore file if present
if import_data['file_name'] && restore_import_file(import_record, import_data) if import_data['file_name'] && restore_import_file(import_record, import_data)
files_restored += 1 files_restored += 1
end end
@ -55,7 +52,6 @@ class Users::ImportData::Imports
begin begin
import_record = user.imports.build(import_attributes) import_record = user.imports.build(import_attributes)
# Skip background processing since we're importing user data directly
import_record.skip_background_processing = true import_record.skip_background_processing = true
import_record.save! import_record.save!
Rails.logger.debug "Created import: #{import_record.name}" Rails.logger.debug "Created import: #{import_record.name}"
@ -86,7 +82,6 @@ class Users::ImportData::Imports
end end
begin begin
# Attach the file to the import record
import_record.file.attach( import_record.file.attach(
io: File.open(file_path), io: File.open(file_path),
filename: import_data['original_filename'] || import_data['file_name'], filename: import_data['original_filename'] || import_data['file_name'],
@ -97,7 +92,7 @@ class Users::ImportData::Imports
true true
rescue StandardError => e rescue StandardError => e
ExceptionReporter.call(e, "Import file restoration failed") ExceptionReporter.call(e, 'Import file restoration failed')
false false
end end

View file

@ -13,22 +13,19 @@ class Users::ImportData::Notifications
Rails.logger.info "Importing #{notifications_data.size} notifications for user: #{user.email}" Rails.logger.info "Importing #{notifications_data.size} notifications for user: #{user.email}"
# Filter valid notifications and prepare for bulk import
valid_notifications = filter_and_prepare_notifications valid_notifications = filter_and_prepare_notifications
if valid_notifications.empty? if valid_notifications.empty?
Rails.logger.info "Notifications import completed. Created: 0" Rails.logger.info 'Notifications import completed. Created: 0'
return 0 return 0
end end
# Remove existing notifications to avoid duplicates
deduplicated_notifications = filter_existing_notifications(valid_notifications) deduplicated_notifications = filter_existing_notifications(valid_notifications)
if deduplicated_notifications.size < valid_notifications.size if deduplicated_notifications.size < valid_notifications.size
Rails.logger.debug "Skipped #{valid_notifications.size - deduplicated_notifications.size} duplicate notifications" Rails.logger.debug "Skipped #{valid_notifications.size - deduplicated_notifications.size} duplicate notifications"
end end
# Bulk import in batches
total_created = bulk_import_notifications(deduplicated_notifications) total_created = bulk_import_notifications(deduplicated_notifications)
Rails.logger.info "Notifications import completed. Created: #{total_created}" Rails.logger.info "Notifications import completed. Created: #{total_created}"
@ -46,13 +43,11 @@ class Users::ImportData::Notifications
notifications_data.each do |notification_data| notifications_data.each do |notification_data|
next unless notification_data.is_a?(Hash) next unless notification_data.is_a?(Hash)
# Skip notifications with missing required data
unless valid_notification_data?(notification_data) unless valid_notification_data?(notification_data)
skipped_count += 1 skipped_count += 1
next next
end end
# Prepare notification attributes for bulk insert
prepared_attributes = prepare_notification_attributes(notification_data) prepared_attributes = prepare_notification_attributes(notification_data)
valid_notifications << prepared_attributes if prepared_attributes valid_notifications << prepared_attributes if prepared_attributes
end end
@ -65,20 +60,16 @@ class Users::ImportData::Notifications
end end
def prepare_notification_attributes(notification_data) def prepare_notification_attributes(notification_data)
# Start with base attributes, excluding only updated_at (preserve created_at for duplicate logic)
attributes = notification_data.except('updated_at') attributes = notification_data.except('updated_at')
# Add required attributes for bulk insert
attributes['user_id'] = user.id attributes['user_id'] = user.id
# Preserve original created_at if present, otherwise use current time
unless attributes['created_at'].present? unless attributes['created_at'].present?
attributes['created_at'] = Time.current attributes['created_at'] = Time.current
end end
attributes['updated_at'] = Time.current attributes['updated_at'] = Time.current
# Convert string keys to symbols for consistency
attributes.symbolize_keys attributes.symbolize_keys
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to prepare notification attributes: #{e.message}" Rails.logger.error "Failed to prepare notification attributes: #{e.message}"
@ -89,26 +80,20 @@ class Users::ImportData::Notifications
def filter_existing_notifications(notifications) def filter_existing_notifications(notifications)
return notifications if notifications.empty? return notifications if notifications.empty?
# Build lookup hash of existing notifications for this user
# Use title and content as the primary deduplication key
existing_notifications_lookup = {} existing_notifications_lookup = {}
user.notifications.select(:title, :content, :created_at, :kind).each do |notification| user.notifications.select(:title, :content, :created_at, :kind).each do |notification|
# Primary key: title + content
primary_key = [notification.title.strip, notification.content.strip] primary_key = [notification.title.strip, notification.content.strip]
# Secondary key: include timestamp for exact matches
exact_key = [notification.title.strip, notification.content.strip, normalize_timestamp(notification.created_at)] exact_key = [notification.title.strip, notification.content.strip, normalize_timestamp(notification.created_at)]
existing_notifications_lookup[primary_key] = true existing_notifications_lookup[primary_key] = true
existing_notifications_lookup[exact_key] = true existing_notifications_lookup[exact_key] = true
end end
# Filter out notifications that already exist
filtered_notifications = notifications.reject do |notification| filtered_notifications = notifications.reject do |notification|
title = notification[:title]&.strip title = notification[:title]&.strip
content = notification[:content]&.strip content = notification[:content]&.strip
# Check both primary key (title + content) and exact key (with timestamp)
primary_key = [title, content] primary_key = [title, content]
exact_key = [title, content, normalize_timestamp(notification[:created_at])] exact_key = [title, content, normalize_timestamp(notification[:created_at])]
@ -125,12 +110,8 @@ class Users::ImportData::Notifications
def normalize_timestamp(timestamp) def normalize_timestamp(timestamp)
case timestamp case timestamp
when String when String then Time.parse(timestamp).to_i
# Parse string and convert to unix timestamp for consistent comparison when Time, DateTime then timestamp.to_i
Time.parse(timestamp).to_i
when Time, DateTime
# Convert time objects to unix timestamp for consistent comparison
timestamp.to_i
else else
timestamp.to_s timestamp.to_s
end end
@ -144,7 +125,6 @@ class Users::ImportData::Notifications
notifications.each_slice(BATCH_SIZE) do |batch| notifications.each_slice(BATCH_SIZE) do |batch|
begin begin
# Use upsert_all to efficiently bulk insert notifications
result = Notification.upsert_all( result = Notification.upsert_all(
batch, batch,
returning: %w[id], returning: %w[id],
@ -160,7 +140,6 @@ class Users::ImportData::Notifications
Rails.logger.error "Failed to process notification batch: #{e.message}" Rails.logger.error "Failed to process notification batch: #{e.message}"
Rails.logger.error "Batch size: #{batch.size}" Rails.logger.error "Batch size: #{batch.size}"
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}" Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
# Continue with next batch instead of failing completely
end end
end end
@ -168,7 +147,6 @@ class Users::ImportData::Notifications
end end
def valid_notification_data?(notification_data) def valid_notification_data?(notification_data)
# Check for required fields
return false unless notification_data.is_a?(Hash) return false unless notification_data.is_a?(Hash)
unless notification_data['title'].present? unless notification_data['title'].present?

View file

@ -16,7 +16,6 @@ class Users::ImportData::Places
places_data.each do |place_data| places_data.each do |place_data|
next unless place_data.is_a?(Hash) next unless place_data.is_a?(Hash)
# Find or create place by name and coordinates
place = find_or_create_place(place_data) place = find_or_create_place(place_data)
places_created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record? places_created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record?
end end
@ -34,16 +33,13 @@ class Users::ImportData::Places
latitude = place_data['latitude']&.to_f latitude = place_data['latitude']&.to_f
longitude = place_data['longitude']&.to_f longitude = place_data['longitude']&.to_f
# Skip if essential data is missing
unless name.present? && latitude.present? && longitude.present? unless name.present? && latitude.present? && longitude.present?
Rails.logger.debug "Skipping place with missing required data: #{place_data.inspect}" Rails.logger.debug "Skipping place with missing required data: #{place_data.inspect}"
return nil return nil
end end
# Try to find existing place by name first, then by coordinates
existing_place = Place.find_by(name: name) existing_place = Place.find_by(name: name)
# If no place with same name, check by coordinates
unless existing_place unless existing_place
existing_place = Place.where(latitude: latitude, longitude: longitude).first existing_place = Place.where(latitude: latitude, longitude: longitude).first
end end
@ -54,12 +50,10 @@ class Users::ImportData::Places
return existing_place return existing_place
end end
# Create new place with lonlat point
place_attributes = place_data.except('created_at', 'updated_at', 'latitude', 'longitude') place_attributes = place_data.except('created_at', 'updated_at', 'latitude', 'longitude')
place_attributes['lonlat'] = "POINT(#{longitude} #{latitude})" place_attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
place_attributes['latitude'] = latitude place_attributes['latitude'] = latitude
place_attributes['longitude'] = longitude place_attributes['longitude'] = longitude
# Remove any user reference since Place doesn't belong to user directly
place_attributes.delete('user') place_attributes.delete('user')
begin begin
@ -69,7 +63,8 @@ class Users::ImportData::Places
place place
rescue ActiveRecord::RecordInvalid => e rescue ActiveRecord::RecordInvalid => e
Rails.logger.error "Failed to create place: #{e.message}" ExceptionReporter.call(e, 'Failed to create place')
nil nil
end end
end end

View file

@ -11,40 +11,25 @@ class Users::ImportData::Points
def call def call
return 0 unless points_data.is_a?(Array) return 0 unless points_data.is_a?(Array)
puts "=== POINTS SERVICE DEBUG ==="
puts "Points data is array: #{points_data.is_a?(Array)}"
puts "Points data size: #{points_data.size}"
Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}" Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}"
Rails.logger.debug "First point sample: #{points_data.first.inspect}" Rails.logger.debug "First point sample: #{points_data.first.inspect}"
# Pre-load reference data for efficient bulk processing
preload_reference_data preload_reference_data
# Filter valid points and prepare for bulk import
valid_points = filter_and_prepare_points valid_points = filter_and_prepare_points
puts "Valid points after filtering: #{valid_points.size}"
if valid_points.empty? if valid_points.empty?
puts "No valid points after filtering - returning 0"
Rails.logger.warn "No valid points to import after filtering" Rails.logger.warn "No valid points to import after filtering"
Rails.logger.debug "Original points_data size: #{points_data.size}" Rails.logger.debug "Original points_data size: #{points_data.size}"
return 0 return 0
end end
# Remove duplicates based on unique constraint
deduplicated_points = deduplicate_points(valid_points) deduplicated_points = deduplicate_points(valid_points)
puts "Deduplicated points: #{deduplicated_points.size}"
Rails.logger.info "Prepared #{deduplicated_points.size} unique valid points (#{points_data.size - deduplicated_points.size} duplicates/invalid skipped)" Rails.logger.info "Prepared #{deduplicated_points.size} unique valid points (#{points_data.size - deduplicated_points.size} duplicates/invalid skipped)"
# Bulk import in batches
total_created = bulk_import_points(deduplicated_points) total_created = bulk_import_points(deduplicated_points)
puts "Total created by bulk import: #{total_created}"
Rails.logger.info "Points import completed. Created: #{total_created}" Rails.logger.info "Points import completed. Created: #{total_created}"
total_created total_created
end end
@ -54,10 +39,8 @@ class Users::ImportData::Points
attr_reader :user, :points_data, :imports_lookup, :countries_lookup, :visits_lookup attr_reader :user, :points_data, :imports_lookup, :countries_lookup, :visits_lookup
def preload_reference_data def preload_reference_data
# Pre-load imports for this user with multiple lookup keys for flexibility
@imports_lookup = {} @imports_lookup = {}
user.imports.each do |import| user.imports.each do |import|
# Create keys for both string and integer source representations
string_key = [import.name, import.source, import.created_at.utc.iso8601] string_key = [import.name, import.source, import.created_at.utc.iso8601]
integer_key = [import.name, Import.sources[import.source], import.created_at.utc.iso8601] integer_key = [import.name, Import.sources[import.source], import.created_at.utc.iso8601]
@ -66,16 +49,13 @@ class Users::ImportData::Points
end end
Rails.logger.debug "Loaded #{user.imports.size} imports with #{@imports_lookup.size} lookup keys" Rails.logger.debug "Loaded #{user.imports.size} imports with #{@imports_lookup.size} lookup keys"
# Pre-load all countries for efficient lookup
@countries_lookup = {} @countries_lookup = {}
Country.all.each do |country| Country.all.each do |country|
# Index by all possible lookup keys
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country @countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
@countries_lookup[country.name] = country @countries_lookup[country.name] = country
end end
Rails.logger.debug "Loaded #{Country.count} countries for lookup" Rails.logger.debug "Loaded #{Country.count} countries for lookup"
# Pre-load visits for this user
@visits_lookup = user.visits.index_by { |visit| @visits_lookup = user.visits.index_by { |visit|
[visit.name, visit.started_at.utc.iso8601, visit.ended_at.utc.iso8601] [visit.name, visit.started_at.utc.iso8601, visit.ended_at.utc.iso8601]
} }
@ -89,14 +69,12 @@ class Users::ImportData::Points
points_data.each_with_index do |point_data, index| points_data.each_with_index do |point_data, index|
next unless point_data.is_a?(Hash) next unless point_data.is_a?(Hash)
# Skip points with invalid or missing required data
unless valid_point_data?(point_data) unless valid_point_data?(point_data)
skipped_count += 1 skipped_count += 1
Rails.logger.debug "Skipped point #{index}: invalid data - #{point_data.slice('timestamp', 'longitude', 'latitude', 'lonlat')}" Rails.logger.debug "Skipped point #{index}: invalid data - #{point_data.slice('timestamp', 'longitude', 'latitude', 'lonlat')}"
next next
end end
# Prepare point attributes for bulk insert
prepared_attributes = prepare_point_attributes(point_data) prepared_attributes = prepare_point_attributes(point_data)
unless prepared_attributes unless prepared_attributes
skipped_count += 1 skipped_count += 1
@ -116,48 +94,41 @@ class Users::ImportData::Points
end end
def prepare_point_attributes(point_data) def prepare_point_attributes(point_data)
# Start with base attributes, excluding fields that need special handling
attributes = point_data.except( attributes = point_data.except(
'created_at', 'created_at',
'updated_at', 'updated_at',
'import_reference', 'import_reference',
'country_info', 'country_info',
'visit_reference', 'visit_reference',
'country' # Exclude the string country field - handled via country_info relationship 'country'
) )
# Handle lonlat reconstruction if missing (for backward compatibility)
ensure_lonlat_field(attributes, point_data) ensure_lonlat_field(attributes, point_data)
# Remove longitude/latitude after lonlat reconstruction to ensure consistent keys
attributes.delete('longitude') attributes.delete('longitude')
attributes.delete('latitude') attributes.delete('latitude')
# Add required attributes for bulk insert
attributes['user_id'] = user.id attributes['user_id'] = user.id
attributes['created_at'] = Time.current attributes['created_at'] = Time.current
attributes['updated_at'] = Time.current attributes['updated_at'] = Time.current
# Resolve foreign key relationships
resolve_import_reference(attributes, point_data['import_reference']) resolve_import_reference(attributes, point_data['import_reference'])
resolve_country_reference(attributes, point_data['country_info']) resolve_country_reference(attributes, point_data['country_info'])
resolve_visit_reference(attributes, point_data['visit_reference']) resolve_visit_reference(attributes, point_data['visit_reference'])
# Convert string keys to symbols for consistency with Point model
result = attributes.symbolize_keys result = attributes.symbolize_keys
Rails.logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}" Rails.logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}"
result result
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to prepare point attributes: #{e.message}" ExceptionReporter.call(e, 'Failed to prepare point attributes')
Rails.logger.error "Point data: #{point_data.inspect}"
nil nil
end end
def resolve_import_reference(attributes, import_reference) def resolve_import_reference(attributes, import_reference)
return unless import_reference.is_a?(Hash) return unless import_reference.is_a?(Hash)
# Normalize timestamp format to ISO8601 for consistent lookup
created_at = normalize_timestamp_for_lookup(import_reference['created_at']) created_at = normalize_timestamp_for_lookup(import_reference['created_at'])
import_key = [ import_key = [
@ -179,11 +150,9 @@ class Users::ImportData::Points
def resolve_country_reference(attributes, country_info) def resolve_country_reference(attributes, country_info)
return unless country_info.is_a?(Hash) return unless country_info.is_a?(Hash)
# Try to find country by all attributes first
country_key = [country_info['name'], country_info['iso_a2'], country_info['iso_a3']] country_key = [country_info['name'], country_info['iso_a2'], country_info['iso_a3']]
country = countries_lookup[country_key] country = countries_lookup[country_key]
# If not found by all attributes, try to find by name only
if country.nil? && country_info['name'].present? if country.nil? && country_info['name'].present?
country = countries_lookup[country_info['name']] country = countries_lookup[country_info['name']]
end end
@ -201,7 +170,6 @@ class Users::ImportData::Points
def resolve_visit_reference(attributes, visit_reference) def resolve_visit_reference(attributes, visit_reference)
return unless visit_reference.is_a?(Hash) return unless visit_reference.is_a?(Hash)
# Normalize timestamp formats to ISO8601 for consistent lookup
started_at = normalize_timestamp_for_lookup(visit_reference['started_at']) started_at = normalize_timestamp_for_lookup(visit_reference['started_at'])
ended_at = normalize_timestamp_for_lookup(visit_reference['ended_at']) ended_at = normalize_timestamp_for_lookup(visit_reference['ended_at'])
@ -225,9 +193,7 @@ class Users::ImportData::Points
points.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] } points.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] }
end end
# Ensure all points have the same keys for upsert_all compatibility
def normalize_point_keys(points) def normalize_point_keys(points)
# Get all possible keys from all points
all_keys = points.flat_map(&:keys).uniq all_keys = points.flat_map(&:keys).uniq
# Normalize each point to have all keys (with nil for missing ones) # Normalize each point to have all keys (with nil for missing ones)
@ -243,22 +209,13 @@ class Users::ImportData::Points
def bulk_import_points(points) def bulk_import_points(points)
total_created = 0 total_created = 0
puts "=== BULK IMPORT DEBUG ==="
puts "About to bulk import #{points.size} points"
puts "First point for import: #{points.first.inspect}"
points.each_slice(BATCH_SIZE) do |batch| points.each_slice(BATCH_SIZE) do |batch|
begin begin
Rails.logger.debug "Processing batch of #{batch.size} points" Rails.logger.debug "Processing batch of #{batch.size} points"
Rails.logger.debug "First point in batch: #{batch.first.inspect}" Rails.logger.debug "First point in batch: #{batch.first.inspect}"
puts "Processing batch of #{batch.size} points"
puts "Sample point attributes: #{batch.first.slice(:lonlat, :timestamp, :user_id, :import_id, :country_id, :visit_id)}"
# Normalize all points to have the same keys for upsert_all compatibility
normalized_batch = normalize_point_keys(batch) normalized_batch = normalize_point_keys(batch)
# Use upsert_all to efficiently bulk insert/update points
result = Point.upsert_all( result = Point.upsert_all(
normalized_batch, normalized_batch,
unique_by: %i[lonlat timestamp user_id], unique_by: %i[lonlat timestamp user_id],
@ -269,32 +226,23 @@ class Users::ImportData::Points
batch_created = result.count batch_created = result.count
total_created += batch_created total_created += batch_created
puts "Batch result count: #{batch_created}"
Rails.logger.debug "Processed batch of #{batch.size} points, created #{batch_created}, total created: #{total_created}" Rails.logger.debug "Processed batch of #{batch.size} points, created #{batch_created}, total created: #{total_created}"
rescue StandardError => e rescue StandardError => e
puts "Batch import failed: #{e.message}"
puts "Backtrace: #{e.backtrace.first(3).join('\n')}"
Rails.logger.error "Failed to process point batch: #{e.message}" Rails.logger.error "Failed to process point batch: #{e.message}"
Rails.logger.error "Batch size: #{batch.size}" Rails.logger.error "Batch size: #{batch.size}"
Rails.logger.error "First point in failed batch: #{batch.first.inspect}" Rails.logger.error "First point in failed batch: #{batch.first.inspect}"
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}" Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
# Continue with next batch instead of failing completely
end end
end end
puts "Total created across all batches: #{total_created}"
total_created total_created
end end
def valid_point_data?(point_data) def valid_point_data?(point_data)
# Check for required fields
return false unless point_data.is_a?(Hash) return false unless point_data.is_a?(Hash)
return false unless point_data['timestamp'].present? return false unless point_data['timestamp'].present?
# Check if we have either lonlat or longitude/latitude
has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(') has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(')
has_coordinates = point_data['longitude'].present? && point_data['latitude'].present? has_coordinates = point_data['longitude'].present? && point_data['latitude'].present?
@ -307,7 +255,6 @@ class Users::ImportData::Points
end end
def ensure_lonlat_field(attributes, point_data) def ensure_lonlat_field(attributes, point_data)
# If lonlat is missing but we have longitude/latitude, reconstruct it
if attributes['lonlat'].blank? && point_data['longitude'].present? && point_data['latitude'].present? if attributes['lonlat'].blank? && point_data['longitude'].present? && point_data['latitude'].present?
longitude = point_data['longitude'].to_f longitude = point_data['longitude'].to_f
latitude = point_data['latitude'].to_f latitude = point_data['latitude'].to_f
@ -321,13 +268,10 @@ class Users::ImportData::Points
case timestamp case timestamp
when String when String
# Parse string timestamp and convert to UTC ISO8601 format
Time.parse(timestamp).utc.iso8601 Time.parse(timestamp).utc.iso8601
when Time, DateTime when Time, DateTime
# Convert time objects to UTC ISO8601 format
timestamp.utc.iso8601 timestamp.utc.iso8601
else else
# Fallback to string representation
timestamp.to_s timestamp.to_s
end end
rescue StandardError => e rescue StandardError => e

View file

@ -11,7 +11,6 @@ class Users::ImportData::Settings
Rails.logger.info "Importing settings for user: #{user.email}" Rails.logger.info "Importing settings for user: #{user.email}"
# Merge imported settings with existing settings
current_settings = user.settings || {} current_settings = user.settings || {}
updated_settings = current_settings.merge(settings_data) updated_settings = current_settings.merge(settings_data)

View file

@ -13,7 +13,6 @@ class Users::ImportData::Stats
Rails.logger.info "Importing #{stats_data.size} stats for user: #{user.email}" Rails.logger.info "Importing #{stats_data.size} stats for user: #{user.email}"
# Filter valid stats and prepare for bulk import
valid_stats = filter_and_prepare_stats valid_stats = filter_and_prepare_stats
if valid_stats.empty? if valid_stats.empty?
@ -21,14 +20,12 @@ class Users::ImportData::Stats
return 0 return 0
end end
# Remove existing stats to avoid duplicates
deduplicated_stats = filter_existing_stats(valid_stats) deduplicated_stats = filter_existing_stats(valid_stats)
if deduplicated_stats.size < valid_stats.size if deduplicated_stats.size < valid_stats.size
Rails.logger.debug "Skipped #{valid_stats.size - deduplicated_stats.size} duplicate stats" Rails.logger.debug "Skipped #{valid_stats.size - deduplicated_stats.size} duplicate stats"
end end
# Bulk import in batches
total_created = bulk_import_stats(deduplicated_stats) total_created = bulk_import_stats(deduplicated_stats)
Rails.logger.info "Stats import completed. Created: #{total_created}" Rails.logger.info "Stats import completed. Created: #{total_created}"
@ -46,13 +43,11 @@ class Users::ImportData::Stats
stats_data.each do |stat_data| stats_data.each do |stat_data|
next unless stat_data.is_a?(Hash) next unless stat_data.is_a?(Hash)
# Skip stats with missing required data
unless valid_stat_data?(stat_data) unless valid_stat_data?(stat_data)
skipped_count += 1 skipped_count += 1
next next
end end
# Prepare stat attributes for bulk insert
prepared_attributes = prepare_stat_attributes(stat_data) prepared_attributes = prepare_stat_attributes(stat_data)
valid_stats << prepared_attributes if prepared_attributes valid_stats << prepared_attributes if prepared_attributes
end end
@ -65,33 +60,28 @@ class Users::ImportData::Stats
end end
def prepare_stat_attributes(stat_data) def prepare_stat_attributes(stat_data)
# Start with base attributes, excluding timestamp fields
attributes = stat_data.except('created_at', 'updated_at') attributes = stat_data.except('created_at', 'updated_at')
# Add required attributes for bulk insert
attributes['user_id'] = user.id attributes['user_id'] = user.id
attributes['created_at'] = Time.current attributes['created_at'] = Time.current
attributes['updated_at'] = Time.current attributes['updated_at'] = Time.current
# Convert string keys to symbols for consistency
attributes.symbolize_keys attributes.symbolize_keys
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to prepare stat attributes: #{e.message}" ExceptionReporter.call(e, 'Failed to prepare stat attributes')
Rails.logger.error "Stat data: #{stat_data.inspect}"
nil nil
end end
def filter_existing_stats(stats) def filter_existing_stats(stats)
return stats if stats.empty? return stats if stats.empty?
# Build lookup hash of existing stats for this user
existing_stats_lookup = {} existing_stats_lookup = {}
user.stats.select(:year, :month).each do |stat| user.stats.select(:year, :month).each do |stat|
key = [stat.year, stat.month] key = [stat.year, stat.month]
existing_stats_lookup[key] = true existing_stats_lookup[key] = true
end end
# Filter out stats that already exist
filtered_stats = stats.reject do |stat| filtered_stats = stats.reject do |stat|
key = [stat[:year], stat[:month]] key = [stat[:year], stat[:month]]
if existing_stats_lookup[key] if existing_stats_lookup[key]
@ -110,7 +100,6 @@ class Users::ImportData::Stats
stats.each_slice(BATCH_SIZE) do |batch| stats.each_slice(BATCH_SIZE) do |batch|
begin begin
# Use upsert_all to efficiently bulk insert stats
result = Stat.upsert_all( result = Stat.upsert_all(
batch, batch,
returning: %w[id], returning: %w[id],
@ -123,10 +112,7 @@ class Users::ImportData::Stats
Rails.logger.debug "Processed batch of #{batch.size} stats, created #{batch_created}, total created: #{total_created}" Rails.logger.debug "Processed batch of #{batch.size} stats, created #{batch_created}, total created: #{total_created}"
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to process stat batch: #{e.message}" ExceptionReporter.call(e, 'Failed to process stat batch')
Rails.logger.error "Batch size: #{batch.size}"
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
# Continue with next batch instead of failing completely
end end
end end
@ -134,7 +120,6 @@ class Users::ImportData::Stats
end end
def valid_stat_data?(stat_data) def valid_stat_data?(stat_data)
# Check for required fields
return false unless stat_data.is_a?(Hash) return false unless stat_data.is_a?(Hash)
unless stat_data['year'].present? unless stat_data['year'].present?

View file

@ -13,7 +13,6 @@ class Users::ImportData::Trips
Rails.logger.info "Importing #{trips_data.size} trips for user: #{user.email}" Rails.logger.info "Importing #{trips_data.size} trips for user: #{user.email}"
# Filter valid trips and prepare for bulk import
valid_trips = filter_and_prepare_trips valid_trips = filter_and_prepare_trips
if valid_trips.empty? if valid_trips.empty?
@ -21,14 +20,12 @@ class Users::ImportData::Trips
return 0 return 0
end end
# Remove existing trips to avoid duplicates
deduplicated_trips = filter_existing_trips(valid_trips) deduplicated_trips = filter_existing_trips(valid_trips)
if deduplicated_trips.size < valid_trips.size if deduplicated_trips.size < valid_trips.size
Rails.logger.debug "Skipped #{valid_trips.size - deduplicated_trips.size} duplicate trips" Rails.logger.debug "Skipped #{valid_trips.size - deduplicated_trips.size} duplicate trips"
end end
# Bulk import in batches
total_created = bulk_import_trips(deduplicated_trips) total_created = bulk_import_trips(deduplicated_trips)
Rails.logger.info "Trips import completed. Created: #{total_created}" Rails.logger.info "Trips import completed. Created: #{total_created}"
@ -46,13 +43,11 @@ class Users::ImportData::Trips
trips_data.each do |trip_data| trips_data.each do |trip_data|
next unless trip_data.is_a?(Hash) next unless trip_data.is_a?(Hash)
# Skip trips with missing required data
unless valid_trip_data?(trip_data) unless valid_trip_data?(trip_data)
skipped_count += 1 skipped_count += 1
next next
end end
# Prepare trip attributes for bulk insert
prepared_attributes = prepare_trip_attributes(trip_data) prepared_attributes = prepare_trip_attributes(trip_data)
valid_trips << prepared_attributes if prepared_attributes valid_trips << prepared_attributes if prepared_attributes
end end
@ -65,36 +60,29 @@ class Users::ImportData::Trips
end end
def prepare_trip_attributes(trip_data) def prepare_trip_attributes(trip_data)
# Start with base attributes, excluding timestamp fields
attributes = trip_data.except('created_at', 'updated_at') attributes = trip_data.except('created_at', 'updated_at')
# Add required attributes for bulk insert
attributes['user_id'] = user.id attributes['user_id'] = user.id
attributes['created_at'] = Time.current attributes['created_at'] = Time.current
attributes['updated_at'] = Time.current attributes['updated_at'] = Time.current
# Convert string keys to symbols for consistency
attributes.symbolize_keys attributes.symbolize_keys
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to prepare trip attributes: #{e.message}" ExceptionReporter.call(e, 'Failed to prepare trip attributes')
Rails.logger.error "Trip data: #{trip_data.inspect}"
nil nil
end end
def filter_existing_trips(trips) def filter_existing_trips(trips)
return trips if trips.empty? return trips if trips.empty?
# Build lookup hash of existing trips for this user
existing_trips_lookup = {} existing_trips_lookup = {}
user.trips.select(:name, :started_at, :ended_at).each do |trip| user.trips.select(:name, :started_at, :ended_at).each do |trip|
# Normalize timestamp values for consistent comparison
key = [trip.name, normalize_timestamp(trip.started_at), normalize_timestamp(trip.ended_at)] key = [trip.name, normalize_timestamp(trip.started_at), normalize_timestamp(trip.ended_at)]
existing_trips_lookup[key] = true existing_trips_lookup[key] = true
end end
# Filter out trips that already exist
filtered_trips = trips.reject do |trip| filtered_trips = trips.reject do |trip|
# Normalize timestamp values for consistent comparison
key = [trip[:name], normalize_timestamp(trip[:started_at]), normalize_timestamp(trip[:ended_at])] key = [trip[:name], normalize_timestamp(trip[:started_at]), normalize_timestamp(trip[:ended_at])]
if existing_trips_lookup[key] if existing_trips_lookup[key]
Rails.logger.debug "Trip already exists: #{trip[:name]}" Rails.logger.debug "Trip already exists: #{trip[:name]}"
@ -110,10 +98,8 @@ class Users::ImportData::Trips
def normalize_timestamp(timestamp) def normalize_timestamp(timestamp)
case timestamp case timestamp
when String when String
# Parse string and convert to iso8601 format for consistent comparison
Time.parse(timestamp).utc.iso8601 Time.parse(timestamp).utc.iso8601
when Time, DateTime when Time, DateTime
# Convert time objects to iso8601 format for consistent comparison
timestamp.utc.iso8601 timestamp.utc.iso8601
else else
timestamp.to_s timestamp.to_s
@ -127,7 +113,6 @@ class Users::ImportData::Trips
trips.each_slice(BATCH_SIZE) do |batch| trips.each_slice(BATCH_SIZE) do |batch|
begin begin
# Use upsert_all to efficiently bulk insert trips
result = Trip.upsert_all( result = Trip.upsert_all(
batch, batch,
returning: %w[id], returning: %w[id],
@ -140,10 +125,7 @@ class Users::ImportData::Trips
Rails.logger.debug "Processed batch of #{batch.size} trips, created #{batch_created}, total created: #{total_created}" Rails.logger.debug "Processed batch of #{batch.size} trips, created #{batch_created}, total created: #{total_created}"
rescue StandardError => e rescue StandardError => e
Rails.logger.error "Failed to process trip batch: #{e.message}" ExceptionReporter.call(e, 'Failed to process trip batch')
Rails.logger.error "Batch size: #{batch.size}"
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
# Continue with next batch instead of failing completely
end end
end end
@ -151,27 +133,37 @@ class Users::ImportData::Trips
end end
def valid_trip_data?(trip_data) def valid_trip_data?(trip_data)
# Check for required fields
return false unless trip_data.is_a?(Hash) return false unless trip_data.is_a?(Hash)
unless trip_data['name'].present? validate_trip_name(trip_data)
Rails.logger.error "Failed to create trip: Validation failed: Name can't be blank" validate_trip_started_at(trip_data)
return false validate_trip_ended_at(trip_data)
end
unless trip_data['started_at'].present?
Rails.logger.error "Failed to create trip: Validation failed: Started at can't be blank"
return false
end
unless trip_data['ended_at'].present?
Rails.logger.error "Failed to create trip: Validation failed: Ended at can't be blank"
return false
end
true true
rescue StandardError => e rescue StandardError => e
Rails.logger.debug "Trip validation failed: #{e.message} for data: #{trip_data.inspect}" Rails.logger.debug "Trip validation failed: #{e.message} for data: #{trip_data.inspect}"
false false
end end
def validate_trip_name(trip_data)
unless trip_data['name'].present?
ExceptionReporter.call(e, 'Failed to create trip: Validation failed: Name can\'t be blank')
return false
end
end
def validate_trip_started_at(trip_data)
unless trip_data['started_at'].present?
ExceptionReporter.call(e, 'Failed to create trip: Validation failed: Started at can\'t be blank')
return false
end
end
def validate_trip_ended_at(trip_data)
unless trip_data['ended_at'].present?
ExceptionReporter.call(e, 'Failed to create trip: Validation failed: Ended at can\'t be blank')
return false
end
end
end end

View file

@ -16,7 +16,6 @@ class Users::ImportData::Visits
visits_data.each do |visit_data| visits_data.each do |visit_data|
next unless visit_data.is_a?(Hash) next unless visit_data.is_a?(Hash)
# Check if visit already exists (match by name, timestamps, and place reference)
existing_visit = find_existing_visit(visit_data) existing_visit = find_existing_visit(visit_data)
if existing_visit if existing_visit
@ -24,13 +23,12 @@ class Users::ImportData::Visits
next next
end end
# Create new visit
begin begin
visit_record = create_visit_record(visit_data) visit_record = create_visit_record(visit_data)
visits_created += 1 visits_created += 1
Rails.logger.debug "Created visit: #{visit_record.name}" Rails.logger.debug "Created visit: #{visit_record.name}"
rescue ActiveRecord::RecordInvalid => e rescue ActiveRecord::RecordInvalid => e
Rails.logger.error "Failed to create visit: #{e.message}" ExceptionReporter.call(e, 'Failed to create visit')
next next
end end
end end
@ -59,7 +57,6 @@ class Users::ImportData::Visits
def prepare_visit_attributes(visit_data) def prepare_visit_attributes(visit_data)
attributes = visit_data.except('place_reference') attributes = visit_data.except('place_reference')
# Find and assign place if referenced
if visit_data['place_reference'] if visit_data['place_reference']
place = find_referenced_place(visit_data['place_reference']) place = find_referenced_place(visit_data['place_reference'])
attributes[:place] = place if place attributes[:place] = place if place
@ -75,7 +72,6 @@ class Users::ImportData::Visits
latitude = place_reference['latitude'].to_f latitude = place_reference['latitude'].to_f
longitude = place_reference['longitude'].to_f longitude = place_reference['longitude'].to_f
# Find place by name and coordinates (global search since places are not user-specific)
place = Place.find_by(name: name) || place = Place.find_by(name: name) ||
Place.where("latitude = ? AND longitude = ?", latitude, longitude).first Place.where("latitude = ? AND longitude = ?", latitude, longitude).first

View file

@ -88,7 +88,7 @@ Rails.application.configure do
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',') hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',')
config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] || hosts.first } config.action_mailer.default_url_options = { host: ENV['SMTP_DOMAIN'] || hosts.first, port: ENV.fetch('PORT', 3000) }
config.hosts.concat(hosts) if hosts.present? config.hosts.concat(hosts) if hosts.present?

View file

@ -6,5 +6,7 @@ class AddStatusToImports < ActiveRecord::Migration[8.0]
def change def change
add_column :imports, :status, :integer, default: 0, null: false add_column :imports, :status, :integer, default: 0, null: false
add_index :imports, :status, algorithm: :concurrently add_index :imports, :status, algorithm: :concurrently
Import.update_all(status: :completed)
end end
end end

View file

@ -9,10 +9,8 @@ RSpec.describe Users::ImportDataJob, type: :job do
let(:job) { described_class.new } let(:job) { described_class.new }
before do before do
# Create a mock ZIP file
FileUtils.touch(archive_path) FileUtils.touch(archive_path)
# Mock the import file attachment
allow(import).to receive(:file).and_return( allow(import).to receive(:file).and_return(
double('ActiveStorage::Attached::One', double('ActiveStorage::Attached::One',
download: proc { |&block| download: proc { |&block|
@ -29,7 +27,6 @@ RSpec.describe Users::ImportDataJob, type: :job do
describe '#perform' do describe '#perform' do
context 'when import is successful' do context 'when import is successful' do
before do before do
# Mock the import service
import_service = instance_double(Users::ImportData) import_service = instance_double(Users::ImportData)
allow(Users::ImportData).to receive(:new).and_return(import_service) allow(Users::ImportData).to receive(:new).and_return(import_service)
allow(import_service).to receive(:import).and_return({ allow(import_service).to receive(:import).and_return({
@ -46,7 +43,6 @@ RSpec.describe Users::ImportDataJob, type: :job do
files_restored: 7 files_restored: 7
}) })
# Mock file operations
allow(File).to receive(:exist?).and_return(true) allow(File).to receive(:exist?).and_return(true)
allow(File).to receive(:delete) allow(File).to receive(:delete)
allow(Rails.logger).to receive(:info) allow(Rails.logger).to receive(:info)
@ -84,21 +80,17 @@ RSpec.describe Users::ImportDataJob, type: :job do
let(:error) { StandardError.new(error_message) } let(:error) { StandardError.new(error_message) }
before do before do
# Mock the import service to raise an error
import_service = instance_double(Users::ImportData) import_service = instance_double(Users::ImportData)
allow(Users::ImportData).to receive(:new).and_return(import_service) allow(Users::ImportData).to receive(:new).and_return(import_service)
allow(import_service).to receive(:import).and_raise(error) allow(import_service).to receive(:import).and_raise(error)
# Mock notification creation
notification_service = instance_double(::Notifications::Create, call: true) notification_service = instance_double(::Notifications::Create, call: true)
allow(::Notifications::Create).to receive(:new).and_return(notification_service) allow(::Notifications::Create).to receive(:new).and_return(notification_service)
# Mock file operations
allow(File).to receive(:exist?).and_return(true) allow(File).to receive(:exist?).and_return(true)
allow(File).to receive(:delete) allow(File).to receive(:delete)
allow(Rails.logger).to receive(:info) allow(Rails.logger).to receive(:info)
# Mock ExceptionReporter
allow(ExceptionReporter).to receive(:call) allow(ExceptionReporter).to receive(:call)
end end
@ -149,12 +141,10 @@ RSpec.describe Users::ImportDataJob, type: :job do
let(:error) { StandardError.new(error_message) } let(:error) { StandardError.new(error_message) }
before do before do
# Mock file download to fail
allow(import).to receive(:file).and_return( allow(import).to receive(:file).and_return(
double('ActiveStorage::Attached::One', download: proc { raise error }) double('ActiveStorage::Attached::One', download: proc { raise error })
) )
# Mock notification creation
notification_service = instance_double(::Notifications::Create, call: true) notification_service = instance_double(::Notifications::Create, call: true)
allow(::Notifications::Create).to receive(:new).and_return(notification_service) allow(::Notifications::Create).to receive(:new).and_return(notification_service)
end end

View file

@ -8,6 +8,8 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
let(:point) { create(:point) } let(:point) { create(:point) }
context 'when Geocoder returns city and country' do context 'when Geocoder returns city and country' do
let!(:germany) { create(:country, name: 'Germany', iso_a2: 'DE', iso_a3: 'DEU') }
before do before do
allow(Geocoder).to receive(:search).and_return( allow(Geocoder).to receive(:search).and_return(
[ [
@ -27,10 +29,10 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
it 'updates point with city and country' do it 'updates point with city and country' do
expect { fetch_data }.to change { point.reload.city } expect { fetch_data }.to change { point.reload.city }
.from(nil).to('Berlin') .from(nil).to('Berlin')
.and change { point.reload.country_id }.from(nil).to(be_present) .and change { point.reload.country_id }.from(nil).to(germany.id)
end end
it 'creates country with correct ISO codes' do it 'finds existing country' do
fetch_data fetch_data
country = point.reload.country country = point.reload.country
expect(country.name).to eq('Germany') expect(country.name).to eq('Germany')
@ -82,28 +84,27 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
end end
end end
context 'when Geocoder returns country name without ISO code' do context 'when Geocoder returns country name that does not exist in database' do
before do before do
allow(Geocoder).to receive(:search).and_return( allow(Geocoder).to receive(:search).and_return(
[ [
double( double(
city: 'Paris', city: 'Paris',
country: 'France', country: 'NonExistentCountry',
data: { data: {
'address' => 'Address', 'address' => 'Address',
'properties' => { 'city' => 'Paris' } # No countrycode property 'properties' => { 'city' => 'Paris' }
} }
) )
] ]
) )
end end
it 'creates country with correct ISO codes from country name mapping' do it 'does not set country_id when country is not found' do
fetch_data expect { fetch_data }.to change { point.reload.city }
country = point.reload.country .from(nil).to('Paris')
expect(country.name).to eq('France')
expect(country.iso_a2).to eq('FR') expect(point.reload.country_id).to be_nil
expect(country.iso_a3).to eq('FRA')
end end
end end

View file

@ -65,13 +65,12 @@ RSpec.describe Users::ImportData::Points, type: :service do
] ]
end end
it 'creates the country and assigns it' do it 'does not create country and leaves country_id nil' do
expect { service.call }.to change(Country, :count).by(1) expect { service.call }.not_to change(Country, :count)
point = user.tracked_points.last point = user.tracked_points.last
expect(point.country.name).to eq('NewCountry') expect(point.country_id).to be_nil
expect(point.country.iso_a2).to eq('NC') expect(point.city).to eq('Berlin')
expect(point.country.iso_a3).to eq('NCO')
end end
end end

View file

@ -1,92 +0,0 @@
Run options: include {locations: {"./spec/services/users/export_import_integration_spec.rb" => [21]}}
Created dataset with 12 points
=== DEBUGGING EXPORT DATA ===
Export counts: {"areas" => 3, "imports" => 2, "exports" => 2, "trips" => 2, "stats" => 2, "notifications" => 4, "points" => 12, "visits" => 3, "places" => 2}
Points in export: 12
Places in export: 2
First point sample: {"timestamp" => 1714559220, "longitude" => -73.9851, "latitude" => 40.7589, "import_reference" => {"name" => "March 2024 Data", "source" => 0, "created_at" => "2025-06-30T16:10:46.550Z"}, "country_info" => {"name" => "United States", "iso_a2" => "US", "iso_a3" => "USA"}, "visit_reference" => {"name" => "Work Visit", "started_at" => "2025-06-30T16:10:46.711Z", "ended_at" => "2025-06-30T17:10:46.711Z"}}
First place sample: {"name" => "Office Building", "latitude" => "40.7589", "longitude" => "-73.9851", "source" => "manual"}
Imports in export: 2
Countries referenced: ["United States", "Canada"]
=== END DEBUG ===
Import stats: {settings_updated: true, areas_created: 3, places_created: 0, imports_created: 2, exports_created: 2, trips_created: 2, stats_created: 2, notifications_created: 4, visits_created: 3, points_created: 0, files_restored: 3}
Original counts: {areas: 3, imports: 2, exports: 2, trips: 2, stats: 2, notifications: 5, points: 12, visits: 3, places: 2}
Target counts: {areas: 3, imports: 2, exports: 2, trips: 2, stats: 2, notifications: 5, points: 0, visits: 3, places: 2}
F/Users/frey/.rvm/rubies/ruby-3.4.1/bin/ruby -I/Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-core-3.13.3/lib:/Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-support-3.13.3/lib /Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-core-3.13.3/exe/rspec --pattern spec/swagger/\*\*/\*_spec.rb --format Rswag::Specs::SwaggerFormatter --dry-run --order defined
Generating Swagger docs ...
Swagger doc generated at /Users/frey/projects/dawarich/dawarich/swagger/v1/swagger.yaml
Top 10 slowest examples (0.00002 seconds, 0.6% of total time):
Areas API /api/v1/areas post area created returns a 201 response
0.00001 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Stats API /api/v1/stats get stats found returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Areas API /api/v1/areas post invalid request returns a 422 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Areas API /api/v1/areas/{id} delete area deleted returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Health API /api/v1/health get Healthy returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Points API /api/v1/points get points found returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Users API /api/v1/users/me get user found returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Settings API /api/v1/settings get settings found returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Settings API /api/v1/settings patch settings updated returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Points API /api/v1/points/{id} delete point deleted returns a 200 response
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
Top 10 slowest example groups:
Health API
0.00039 seconds average (0.00039 seconds / 1 example) ./spec/swagger/api/v1/health_controller_spec.rb:5
Points API
0.00016 seconds average (0.00065 seconds / 4 examples) ./spec/swagger/api/v1/points_controller_spec.rb:5
Areas API
0.00013 seconds average (0.00052 seconds / 4 examples) ./spec/swagger/api/v1/areas_controller_spec.rb:5
Stats API
0.00013 seconds average (0.00013 seconds / 1 example) ./spec/swagger/api/v1/stats_controller_spec.rb:5
Users API
0.00012 seconds average (0.00012 seconds / 1 example) ./spec/swagger/api/v1/users_controller_spec.rb:5
Settings API
0.00011 seconds average (0.00021 seconds / 2 examples) ./spec/swagger/api/v1/settings_controller_spec.rb:5
Overland Batches API
0.0001 seconds average (0.0002 seconds / 2 examples) ./spec/swagger/api/v1/overland/batches_controller_spec.rb:5
Api::V1::Countries::VisitedCities
0.00009 seconds average (0.00017 seconds / 2 examples) ./spec/swagger/api/v1/countries/visited_cities_spec.rb:5
OwnTracks Points API
0.00009 seconds average (0.00017 seconds / 2 examples) ./spec/swagger/api/v1/owntracks/points_controller_spec.rb:5
Api::V1::PhotosController
0.00008 seconds average (0.00025 seconds / 3 examples) ./spec/swagger/api/v1/photos_controller_spec.rb:5
Finished in 0.00388 seconds (files took 1.87 seconds to load)
24 examples, 0 failures
Coverage report generated for RSpec to /Users/frey/projects/dawarich/dawarich/coverage.
Line Coverage: 61.36% (670 / 1092)
Failures:
1) Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
Failure/Error: expect(target_counts[:points]).to eq(original_counts[:points])
Expected 0 to eq 12.
# ./spec/services/users/export_import_integration_spec.rb:71:in 'block (3 levels) in <top (required)>'
# /Users/frey/.rvm/gems/ruby-3.4.1/gems/webmock-3.25.1/lib/webmock/rspec.rb:39:in 'block (2 levels) in <top (required)>'
Top 1 slowest examples (0.67919 seconds, 16.2% of total time):
Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
0.67919 seconds ./spec/services/users/export_import_integration_spec.rb:21
Finished in 4.18 seconds (files took 2.21 seconds to load)
1 example, 1 failure
Failed examples:
rspec ./spec/services/users/export_import_integration_spec.rb:21 # Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
Coverage report generated for RSpec to /Users/frey/projects/dawarich/dawarich/coverage.
Line Coverage: 65.56% (1593 / 2430)
Stopped processing SimpleCov as a previous error not related to SimpleCov has been detected