mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Add export and import specs
This commit is contained in:
parent
aeac8262df
commit
f37039ad8e
15 changed files with 1238 additions and 145 deletions
|
|
@ -6,7 +6,10 @@ class Import < ApplicationRecord
|
|||
|
||||
has_one_attached :file
|
||||
|
||||
after_commit -> { Import::ProcessJob.perform_later(id) }, on: :create
|
||||
# Flag to skip background processing during user data import
|
||||
attr_accessor :skip_background_processing
|
||||
|
||||
after_commit -> { Import::ProcessJob.perform_later(id) unless skip_background_processing }, on: :create
|
||||
after_commit :remove_attached_file, on: :destroy
|
||||
|
||||
validates :name, presence: true, uniqueness: { scope: :user_id }
|
||||
|
|
|
|||
|
|
@ -336,10 +336,10 @@ class Users::ExportData
|
|||
counts
|
||||
end
|
||||
|
||||
def create_zip_archive(export_directory, zip_file_path)
|
||||
# Set global compression level for better file size reduction
|
||||
def create_zip_archive(export_directory, zip_file_path)
|
||||
# Set global compression for better file size reduction
|
||||
original_compression = Zip.default_compression
|
||||
Zip.default_compression = Zlib::BEST_COMPRESSION
|
||||
Zip.default_compression = Zip::Entry::DEFLATED
|
||||
|
||||
# Create zip archive with optimized compression
|
||||
Zip::File.open(zip_file_path, Zip::File::CREATE) do |zipfile|
|
||||
|
|
@ -353,7 +353,7 @@ class Users::ExportData
|
|||
end
|
||||
end
|
||||
ensure
|
||||
# Restore original compression level
|
||||
# Restore original compression setting
|
||||
Zip.default_compression = original_compression if original_compression
|
||||
end
|
||||
|
||||
|
|
@ -368,7 +368,15 @@ class Users::ExportData
|
|||
|
||||
def create_success_notification
|
||||
counts = calculate_entity_counts
|
||||
summary = "#{counts[:points]} points, #{counts[:visits]} visits, #{counts[:places]} places, #{counts[:trips]} trips"
|
||||
summary = "#{counts[:points]} points, " \
|
||||
"#{counts[:visits]} visits, " \
|
||||
"#{counts[:places]} places, " \
|
||||
"#{counts[:trips]} trips, " \
|
||||
"#{counts[:areas]} areas, " \
|
||||
"#{counts[:imports]} imports, " \
|
||||
"#{counts[:exports]} exports, " \
|
||||
"#{counts[:stats]} stats, " \
|
||||
"#{counts[:notifications]} notifications"
|
||||
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
|
|
|
|||
|
|
@ -1,12 +1,23 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ExportData::Notifications
|
||||
# System-generated notification titles that should not be exported
|
||||
SYSTEM_NOTIFICATION_TITLES = [
|
||||
'Data import completed',
|
||||
'Data import failed',
|
||||
'Export completed',
|
||||
'Export failed'
|
||||
].freeze
|
||||
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
user.notifications.as_json(except: %w[user_id id])
|
||||
# Export only user-generated notifications, not system-generated ones
|
||||
user.notifications
|
||||
.where.not(title: SYSTEM_NOTIFICATION_TITLES)
|
||||
.as_json(except: %w[user_id id])
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -103,6 +103,9 @@ class Users::ImportData
|
|||
Rails.logger.info "Expected entity counts from export: #{data['counts']}"
|
||||
end
|
||||
|
||||
# Debug: Log what data keys are available
|
||||
Rails.logger.debug "Available data keys: #{data.keys.inspect}"
|
||||
|
||||
# Import in dependency order
|
||||
import_settings(data['settings']) if data['settings']
|
||||
import_areas(data['areas']) if data['areas']
|
||||
|
|
@ -119,55 +122,84 @@ class Users::ImportData
|
|||
end
|
||||
|
||||
def import_settings(settings_data)
|
||||
Rails.logger.debug "Importing settings: #{settings_data.inspect}"
|
||||
Users::ImportData::Settings.new(user, settings_data).call
|
||||
@import_stats[:settings_updated] = true
|
||||
end
|
||||
|
||||
def import_areas(areas_data)
|
||||
Rails.logger.debug "Importing #{areas_data&.size || 0} areas"
|
||||
areas_created = Users::ImportData::Areas.new(user, areas_data).call
|
||||
@import_stats[:areas_created] = areas_created
|
||||
end
|
||||
|
||||
def import_places(places_data)
|
||||
Rails.logger.debug "Importing #{places_data&.size || 0} places"
|
||||
places_created = Users::ImportData::Places.new(user, places_data).call
|
||||
@import_stats[:places_created] = places_created
|
||||
end
|
||||
|
||||
def import_imports(imports_data)
|
||||
Rails.logger.debug "Importing #{imports_data&.size || 0} imports"
|
||||
imports_created, files_restored = Users::ImportData::Imports.new(user, imports_data, @import_directory.join('files')).call
|
||||
@import_stats[:imports_created] = imports_created
|
||||
@import_stats[:files_restored] += files_restored
|
||||
end
|
||||
|
||||
def import_exports(exports_data)
|
||||
Rails.logger.debug "Importing #{exports_data&.size || 0} exports"
|
||||
exports_created, files_restored = Users::ImportData::Exports.new(user, exports_data, @import_directory.join('files')).call
|
||||
@import_stats[:exports_created] = exports_created
|
||||
@import_stats[:files_restored] += files_restored
|
||||
end
|
||||
|
||||
def import_trips(trips_data)
|
||||
Rails.logger.debug "Importing #{trips_data&.size || 0} trips"
|
||||
trips_created = Users::ImportData::Trips.new(user, trips_data).call
|
||||
@import_stats[:trips_created] = trips_created
|
||||
end
|
||||
|
||||
def import_stats(stats_data)
|
||||
Rails.logger.debug "Importing #{stats_data&.size || 0} stats"
|
||||
stats_created = Users::ImportData::Stats.new(user, stats_data).call
|
||||
@import_stats[:stats_created] = stats_created
|
||||
end
|
||||
|
||||
def import_notifications(notifications_data)
|
||||
Rails.logger.debug "Importing #{notifications_data&.size || 0} notifications"
|
||||
notifications_created = Users::ImportData::Notifications.new(user, notifications_data).call
|
||||
@import_stats[:notifications_created] = notifications_created
|
||||
end
|
||||
|
||||
def import_visits(visits_data)
|
||||
Rails.logger.debug "Importing #{visits_data&.size || 0} visits"
|
||||
visits_created = Users::ImportData::Visits.new(user, visits_data).call
|
||||
@import_stats[:visits_created] = visits_created
|
||||
end
|
||||
|
||||
def import_points(points_data)
|
||||
points_created = Users::ImportData::Points.new(user, points_data).call
|
||||
@import_stats[:points_created] = points_created
|
||||
puts "=== POINTS IMPORT DEBUG ==="
|
||||
puts "About to import #{points_data&.size || 0} points"
|
||||
puts "Points data present: #{points_data.present?}"
|
||||
puts "First point sample: #{points_data&.first&.slice('timestamp', 'longitude', 'latitude') if points_data&.first}"
|
||||
puts "=== END POINTS IMPORT DEBUG ==="
|
||||
|
||||
Rails.logger.info "About to import #{points_data&.size || 0} points"
|
||||
Rails.logger.info "Points data present: #{points_data.present?}"
|
||||
Rails.logger.info "First point sample: #{points_data&.first&.slice('timestamp', 'longitude', 'latitude') if points_data&.first}"
|
||||
|
||||
begin
|
||||
points_created = Users::ImportData::Points.new(user, points_data).call
|
||||
Rails.logger.info "Points import returned: #{points_created}"
|
||||
puts "Points import returned: #{points_created}"
|
||||
|
||||
@import_stats[:points_created] = points_created
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Points import failed: #{e.message}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
|
||||
puts "Points import failed: #{e.message}"
|
||||
@import_stats[:points_created] = 0
|
||||
end
|
||||
end
|
||||
|
||||
def cleanup_temporary_files(import_directory)
|
||||
|
|
@ -180,8 +212,26 @@ class Users::ImportData
|
|||
end
|
||||
|
||||
def create_success_notification
|
||||
summary = "#{@import_stats[:points_created]} points, #{@import_stats[:visits_created]} visits, " \
|
||||
"#{@import_stats[:places_created]} places, #{@import_stats[:trips_created]} trips"
|
||||
# Check if we already have a recent import success notification to avoid duplicates
|
||||
recent_import_notification = user.notifications.where(
|
||||
title: 'Data import completed'
|
||||
).where('created_at > ?', 5.minutes.ago).first
|
||||
|
||||
if recent_import_notification
|
||||
Rails.logger.debug "Skipping duplicate import success notification"
|
||||
return
|
||||
end
|
||||
|
||||
summary = "#{@import_stats[:points_created]} points, " \
|
||||
"#{@import_stats[:visits_created]} visits, " \
|
||||
"#{@import_stats[:places_created]} places, " \
|
||||
"#{@import_stats[:trips_created]} trips, " \
|
||||
"#{@import_stats[:areas_created]} areas, " \
|
||||
"#{@import_stats[:imports_created]} imports, " \
|
||||
"#{@import_stats[:exports_created]} exports, " \
|
||||
"#{@import_stats[:stats_created]} stats, " \
|
||||
"#{@import_stats[:files_restored]} files restored, " \
|
||||
"#{@import_stats[:notifications_created]} notifications"
|
||||
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Areas
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, areas_data)
|
||||
@user = user
|
||||
@areas_data = areas_data
|
||||
|
|
@ -11,43 +13,141 @@ class Users::ImportData::Areas
|
|||
|
||||
Rails.logger.info "Importing #{areas_data.size} areas for user: #{user.email}"
|
||||
|
||||
areas_created = 0
|
||||
# Filter valid areas and prepare for bulk import
|
||||
valid_areas = filter_and_prepare_areas
|
||||
|
||||
areas_data.each do |area_data|
|
||||
next unless area_data.is_a?(Hash)
|
||||
|
||||
# Skip if area already exists (match by name and coordinates)
|
||||
existing_area = user.areas.find_by(
|
||||
name: area_data['name'],
|
||||
latitude: area_data['latitude'],
|
||||
longitude: area_data['longitude']
|
||||
)
|
||||
|
||||
if existing_area
|
||||
Rails.logger.debug "Area already exists: #{area_data['name']}"
|
||||
next
|
||||
end
|
||||
|
||||
# Create new area
|
||||
area_attributes = area_data.merge(user: user)
|
||||
# Ensure radius is present (required by model validation)
|
||||
area_attributes['radius'] ||= 100 # Default radius if not provided
|
||||
|
||||
area = user.areas.create!(area_attributes)
|
||||
areas_created += 1
|
||||
|
||||
Rails.logger.debug "Created area: #{area.name}"
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
ExceptionReporter.call(e, "Failed to create area")
|
||||
|
||||
next
|
||||
if valid_areas.empty?
|
||||
Rails.logger.info "Areas import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
Rails.logger.info "Areas import completed. Created: #{areas_created}"
|
||||
areas_created
|
||||
# Remove existing areas to avoid duplicates
|
||||
deduplicated_areas = filter_existing_areas(valid_areas)
|
||||
|
||||
if deduplicated_areas.size < valid_areas.size
|
||||
Rails.logger.debug "Skipped #{valid_areas.size - deduplicated_areas.size} duplicate areas"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_areas(deduplicated_areas)
|
||||
|
||||
Rails.logger.info "Areas import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :areas_data
|
||||
|
||||
def filter_and_prepare_areas
|
||||
valid_areas = []
|
||||
skipped_count = 0
|
||||
|
||||
areas_data.each do |area_data|
|
||||
next unless area_data.is_a?(Hash)
|
||||
|
||||
# Skip areas with missing required data
|
||||
unless valid_area_data?(area_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare area attributes for bulk insert
|
||||
prepared_attributes = prepare_area_attributes(area_data)
|
||||
valid_areas << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} areas with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_areas
|
||||
end
|
||||
|
||||
def prepare_area_attributes(area_data)
|
||||
# Start with base attributes, excluding timestamp fields
|
||||
attributes = area_data.except('created_at', 'updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Ensure radius is present (required by model validation)
|
||||
attributes['radius'] ||= 100 # Default radius if not provided
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare area attributes: #{e.message}"
|
||||
Rails.logger.error "Area data: #{area_data.inspect}"
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_areas(areas)
|
||||
return areas if areas.empty?
|
||||
|
||||
# Build lookup hash of existing areas for this user
|
||||
existing_areas_lookup = {}
|
||||
user.areas.select(:name, :latitude, :longitude).each do |area|
|
||||
# Normalize decimal values for consistent comparison
|
||||
key = [area.name, area.latitude.to_f, area.longitude.to_f]
|
||||
existing_areas_lookup[key] = true
|
||||
end
|
||||
|
||||
# Filter out areas that already exist
|
||||
filtered_areas = areas.reject do |area|
|
||||
# Normalize decimal values for consistent comparison
|
||||
key = [area[:name], area[:latitude].to_f, area[:longitude].to_f]
|
||||
if existing_areas_lookup[key]
|
||||
Rails.logger.debug "Area already exists: #{area[:name]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_areas
|
||||
end
|
||||
|
||||
def bulk_import_areas(areas)
|
||||
total_created = 0
|
||||
|
||||
areas.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert areas
|
||||
result = Area.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} areas, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process area batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_area_data?(area_data)
|
||||
# Check for required fields
|
||||
return false unless area_data.is_a?(Hash)
|
||||
return false unless area_data['name'].present?
|
||||
return false unless area_data['latitude'].present?
|
||||
return false unless area_data['longitude'].present?
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Area validation failed: #{e.message} for data: #{area_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -54,7 +54,10 @@ class Users::ImportData::Imports
|
|||
import_attributes = prepare_import_attributes(import_data)
|
||||
|
||||
begin
|
||||
import_record = user.imports.create!(import_attributes)
|
||||
import_record = user.imports.build(import_attributes)
|
||||
# Skip background processing since we're importing user data directly
|
||||
import_record.skip_background_processing = true
|
||||
import_record.save!
|
||||
Rails.logger.debug "Created import: #{import_record.name}"
|
||||
import_record
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Notifications
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, notifications_data)
|
||||
@user = user
|
||||
@notifications_data = notifications_data
|
||||
|
|
@ -11,39 +13,177 @@ class Users::ImportData::Notifications
|
|||
|
||||
Rails.logger.info "Importing #{notifications_data.size} notifications for user: #{user.email}"
|
||||
|
||||
notifications_created = 0
|
||||
# Filter valid notifications and prepare for bulk import
|
||||
valid_notifications = filter_and_prepare_notifications
|
||||
|
||||
notifications_data.each do |notification_data|
|
||||
next unless notification_data.is_a?(Hash)
|
||||
|
||||
# Check if notification already exists (match by title, content, and created_at)
|
||||
existing_notification = user.notifications.find_by(
|
||||
title: notification_data['title'],
|
||||
content: notification_data['content'],
|
||||
created_at: notification_data['created_at']
|
||||
)
|
||||
|
||||
if existing_notification
|
||||
Rails.logger.debug "Notification already exists: #{notification_data['title']}"
|
||||
next
|
||||
end
|
||||
|
||||
# Create new notification
|
||||
notification_attributes = notification_data.except('created_at', 'updated_at')
|
||||
notification = user.notifications.create!(notification_attributes)
|
||||
notifications_created += 1
|
||||
|
||||
Rails.logger.debug "Created notification: #{notification.title}"
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create notification: #{e.message}"
|
||||
next
|
||||
if valid_notifications.empty?
|
||||
Rails.logger.info "Notifications import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
Rails.logger.info "Notifications import completed. Created: #{notifications_created}"
|
||||
notifications_created
|
||||
# Remove existing notifications to avoid duplicates
|
||||
deduplicated_notifications = filter_existing_notifications(valid_notifications)
|
||||
|
||||
if deduplicated_notifications.size < valid_notifications.size
|
||||
Rails.logger.debug "Skipped #{valid_notifications.size - deduplicated_notifications.size} duplicate notifications"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_notifications(deduplicated_notifications)
|
||||
|
||||
Rails.logger.info "Notifications import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :notifications_data
|
||||
|
||||
def filter_and_prepare_notifications
|
||||
valid_notifications = []
|
||||
skipped_count = 0
|
||||
|
||||
notifications_data.each do |notification_data|
|
||||
next unless notification_data.is_a?(Hash)
|
||||
|
||||
# Skip notifications with missing required data
|
||||
unless valid_notification_data?(notification_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare notification attributes for bulk insert
|
||||
prepared_attributes = prepare_notification_attributes(notification_data)
|
||||
valid_notifications << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} notifications with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_notifications
|
||||
end
|
||||
|
||||
def prepare_notification_attributes(notification_data)
|
||||
# Start with base attributes, excluding only updated_at (preserve created_at for duplicate logic)
|
||||
attributes = notification_data.except('updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
|
||||
# Preserve original created_at if present, otherwise use current time
|
||||
unless attributes['created_at'].present?
|
||||
attributes['created_at'] = Time.current
|
||||
end
|
||||
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare notification attributes: #{e.message}"
|
||||
Rails.logger.error "Notification data: #{notification_data.inspect}"
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_notifications(notifications)
|
||||
return notifications if notifications.empty?
|
||||
|
||||
# Build lookup hash of existing notifications for this user
|
||||
# Use title and content as the primary deduplication key
|
||||
existing_notifications_lookup = {}
|
||||
user.notifications.select(:title, :content, :created_at, :kind).each do |notification|
|
||||
# Primary key: title + content
|
||||
primary_key = [notification.title.strip, notification.content.strip]
|
||||
|
||||
# Secondary key: include timestamp for exact matches
|
||||
exact_key = [notification.title.strip, notification.content.strip, normalize_timestamp(notification.created_at)]
|
||||
|
||||
existing_notifications_lookup[primary_key] = true
|
||||
existing_notifications_lookup[exact_key] = true
|
||||
end
|
||||
|
||||
# Filter out notifications that already exist
|
||||
filtered_notifications = notifications.reject do |notification|
|
||||
title = notification[:title]&.strip
|
||||
content = notification[:content]&.strip
|
||||
|
||||
# Check both primary key (title + content) and exact key (with timestamp)
|
||||
primary_key = [title, content]
|
||||
exact_key = [title, content, normalize_timestamp(notification[:created_at])]
|
||||
|
||||
if existing_notifications_lookup[primary_key] || existing_notifications_lookup[exact_key]
|
||||
Rails.logger.debug "Notification already exists: #{notification[:title]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_notifications
|
||||
end
|
||||
|
||||
def normalize_timestamp(timestamp)
|
||||
case timestamp
|
||||
when String
|
||||
# Parse string and convert to unix timestamp for consistent comparison
|
||||
Time.parse(timestamp).to_i
|
||||
when Time, DateTime
|
||||
# Convert time objects to unix timestamp for consistent comparison
|
||||
timestamp.to_i
|
||||
else
|
||||
timestamp.to_s
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Failed to normalize timestamp #{timestamp}: #{e.message}"
|
||||
timestamp.to_s
|
||||
end
|
||||
|
||||
def bulk_import_notifications(notifications)
|
||||
total_created = 0
|
||||
|
||||
notifications.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert notifications
|
||||
result = Notification.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} notifications, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process notification batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_notification_data?(notification_data)
|
||||
# Check for required fields
|
||||
return false unless notification_data.is_a?(Hash)
|
||||
|
||||
unless notification_data['title'].present?
|
||||
Rails.logger.error "Failed to create notification: Validation failed: Title can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless notification_data['content'].present?
|
||||
Rails.logger.error "Failed to create notification: Validation failed: Content can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Notification validation failed: #{e.message} for data: #{notification_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,7 +11,12 @@ class Users::ImportData::Points
|
|||
def call
|
||||
return 0 unless points_data.is_a?(Array)
|
||||
|
||||
puts "=== POINTS SERVICE DEBUG ==="
|
||||
puts "Points data is array: #{points_data.is_a?(Array)}"
|
||||
puts "Points data size: #{points_data.size}"
|
||||
|
||||
Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}"
|
||||
Rails.logger.debug "First point sample: #{points_data.first.inspect}"
|
||||
|
||||
# Pre-load reference data for efficient bulk processing
|
||||
preload_reference_data
|
||||
|
|
@ -19,19 +24,27 @@ class Users::ImportData::Points
|
|||
# Filter valid points and prepare for bulk import
|
||||
valid_points = filter_and_prepare_points
|
||||
|
||||
puts "Valid points after filtering: #{valid_points.size}"
|
||||
|
||||
if valid_points.empty?
|
||||
Rails.logger.info "No valid points to import"
|
||||
puts "No valid points after filtering - returning 0"
|
||||
Rails.logger.warn "No valid points to import after filtering"
|
||||
Rails.logger.debug "Original points_data size: #{points_data.size}"
|
||||
return 0
|
||||
end
|
||||
|
||||
# Remove duplicates based on unique constraint
|
||||
deduplicated_points = deduplicate_points(valid_points)
|
||||
|
||||
puts "Deduplicated points: #{deduplicated_points.size}"
|
||||
|
||||
Rails.logger.info "Prepared #{deduplicated_points.size} unique valid points (#{points_data.size - deduplicated_points.size} duplicates/invalid skipped)"
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_points(deduplicated_points)
|
||||
|
||||
puts "Total created by bulk import: #{total_created}"
|
||||
|
||||
Rails.logger.info "Points import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
|
@ -45,6 +58,7 @@ class Users::ImportData::Points
|
|||
@imports_lookup = user.imports.index_by { |import|
|
||||
[import.name, import.source, import.created_at.to_s]
|
||||
}
|
||||
Rails.logger.debug "Loaded #{@imports_lookup.size} imports for lookup"
|
||||
|
||||
# Pre-load all countries for efficient lookup
|
||||
@countries_lookup = {}
|
||||
|
|
@ -53,23 +67,26 @@ class Users::ImportData::Points
|
|||
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
|
||||
@countries_lookup[country.name] = country
|
||||
end
|
||||
Rails.logger.debug "Loaded #{Country.count} countries for lookup"
|
||||
|
||||
# Pre-load visits for this user
|
||||
@visits_lookup = user.visits.index_by { |visit|
|
||||
[visit.name, visit.started_at.to_s, visit.ended_at.to_s]
|
||||
}
|
||||
Rails.logger.debug "Loaded #{@visits_lookup.size} visits for lookup"
|
||||
end
|
||||
|
||||
def filter_and_prepare_points
|
||||
valid_points = []
|
||||
skipped_count = 0
|
||||
|
||||
points_data.each do |point_data|
|
||||
points_data.each_with_index do |point_data, index|
|
||||
next unless point_data.is_a?(Hash)
|
||||
|
||||
# Skip points with invalid or missing required data
|
||||
unless valid_point_data?(point_data)
|
||||
skipped_count += 1
|
||||
Rails.logger.debug "Skipped point #{index}: invalid data - #{point_data.slice('timestamp', 'longitude', 'latitude', 'lonlat')}"
|
||||
next
|
||||
end
|
||||
|
||||
|
|
@ -77,6 +94,7 @@ class Users::ImportData::Points
|
|||
prepared_attributes = prepare_point_attributes(point_data)
|
||||
unless prepared_attributes
|
||||
skipped_count += 1
|
||||
Rails.logger.debug "Skipped point #{index}: failed to prepare attributes"
|
||||
next
|
||||
end
|
||||
|
||||
|
|
@ -87,6 +105,7 @@ class Users::ImportData::Points
|
|||
Rails.logger.warn "Skipped #{skipped_count} points with invalid or missing required data"
|
||||
end
|
||||
|
||||
Rails.logger.debug "Filtered #{valid_points.size} valid points from #{points_data.size} total"
|
||||
valid_points
|
||||
end
|
||||
|
||||
|
|
@ -119,7 +138,10 @@ class Users::ImportData::Points
|
|||
resolve_visit_reference(attributes, point_data['visit_reference'])
|
||||
|
||||
# Convert string keys to symbols for consistency with Point model
|
||||
attributes.symbolize_keys
|
||||
result = attributes.symbolize_keys
|
||||
|
||||
Rails.logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}"
|
||||
result
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare point attributes: #{e.message}"
|
||||
Rails.logger.error "Point data: #{point_data.inspect}"
|
||||
|
|
@ -136,7 +158,13 @@ class Users::ImportData::Points
|
|||
]
|
||||
|
||||
import = imports_lookup[import_key]
|
||||
attributes['import_id'] = import.id if import
|
||||
if import
|
||||
attributes['import_id'] = import.id
|
||||
Rails.logger.debug "Resolved import reference: #{import_reference['name']} -> #{import.id}"
|
||||
else
|
||||
Rails.logger.debug "Import not found for reference: #{import_reference.inspect}"
|
||||
Rails.logger.debug "Available imports: #{imports_lookup.keys.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def resolve_country_reference(attributes, country_info)
|
||||
|
|
@ -159,7 +187,12 @@ class Users::ImportData::Points
|
|||
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
|
||||
end
|
||||
|
||||
attributes['country_id'] = country.id if country
|
||||
if country
|
||||
attributes['country_id'] = country.id
|
||||
Rails.logger.debug "Resolved country reference: #{country_info['name']} -> #{country.id}"
|
||||
else
|
||||
Rails.logger.debug "Country not found for: #{country_info.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def create_missing_country(country_info)
|
||||
|
|
@ -183,21 +216,55 @@ class Users::ImportData::Points
|
|||
]
|
||||
|
||||
visit = visits_lookup[visit_key]
|
||||
attributes['visit_id'] = visit.id if visit
|
||||
if visit
|
||||
attributes['visit_id'] = visit.id
|
||||
Rails.logger.debug "Resolved visit reference: #{visit_reference['name']} -> #{visit.id}"
|
||||
else
|
||||
Rails.logger.debug "Visit not found for reference: #{visit_reference.inspect}"
|
||||
Rails.logger.debug "Available visits: #{visits_lookup.keys.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def deduplicate_points(points)
|
||||
points.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] }
|
||||
end
|
||||
|
||||
# Ensure all points have the same keys for upsert_all compatibility
|
||||
def normalize_point_keys(points)
|
||||
# Get all possible keys from all points
|
||||
all_keys = points.flat_map(&:keys).uniq
|
||||
|
||||
# Normalize each point to have all keys (with nil for missing ones)
|
||||
points.map do |point|
|
||||
normalized = {}
|
||||
all_keys.each do |key|
|
||||
normalized[key] = point[key]
|
||||
end
|
||||
normalized
|
||||
end
|
||||
end
|
||||
|
||||
def bulk_import_points(points)
|
||||
total_created = 0
|
||||
|
||||
puts "=== BULK IMPORT DEBUG ==="
|
||||
puts "About to bulk import #{points.size} points"
|
||||
puts "First point for import: #{points.first.inspect}"
|
||||
|
||||
points.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
Rails.logger.debug "Processing batch of #{batch.size} points"
|
||||
Rails.logger.debug "First point in batch: #{batch.first.inspect}"
|
||||
|
||||
puts "Processing batch of #{batch.size} points"
|
||||
puts "Sample point attributes: #{batch.first.slice(:lonlat, :timestamp, :user_id, :import_id, :country_id, :visit_id)}"
|
||||
|
||||
# Normalize all points to have the same keys for upsert_all compatibility
|
||||
normalized_batch = normalize_point_keys(batch)
|
||||
|
||||
# Use upsert_all to efficiently bulk insert/update points
|
||||
result = Point.upsert_all(
|
||||
batch,
|
||||
normalized_batch,
|
||||
unique_by: %i[lonlat timestamp user_id],
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
|
|
@ -206,17 +273,24 @@ class Users::ImportData::Points
|
|||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
puts "Batch result count: #{batch_created}"
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} points, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
puts "Batch import failed: #{e.message}"
|
||||
puts "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
Rails.logger.error "Failed to process point batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
Rails.logger.error "First point in failed batch: #{batch.first.inspect}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
puts "Total created across all batches: #{total_created}"
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_point_data?(point_data)
|
||||
|
|
@ -242,6 +316,7 @@ class Users::ImportData::Points
|
|||
longitude = point_data['longitude'].to_f
|
||||
latitude = point_data['latitude'].to_f
|
||||
attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
|
||||
Rails.logger.debug "Reconstructed lonlat: #{attributes['lonlat']}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Stats
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, stats_data)
|
||||
@user = user
|
||||
@stats_data = stats_data
|
||||
|
|
@ -11,38 +13,148 @@ class Users::ImportData::Stats
|
|||
|
||||
Rails.logger.info "Importing #{stats_data.size} stats for user: #{user.email}"
|
||||
|
||||
stats_created = 0
|
||||
# Filter valid stats and prepare for bulk import
|
||||
valid_stats = filter_and_prepare_stats
|
||||
|
||||
stats_data.each do |stat_data|
|
||||
next unless stat_data.is_a?(Hash)
|
||||
|
||||
# Check if stat already exists (match by year and month)
|
||||
existing_stat = user.stats.find_by(
|
||||
year: stat_data['year'],
|
||||
month: stat_data['month']
|
||||
)
|
||||
|
||||
if existing_stat
|
||||
Rails.logger.debug "Stat already exists: #{stat_data['year']}-#{stat_data['month']}"
|
||||
next
|
||||
end
|
||||
|
||||
# Create new stat
|
||||
stat_attributes = stat_data.except('created_at', 'updated_at')
|
||||
stat = user.stats.create!(stat_attributes)
|
||||
stats_created += 1
|
||||
|
||||
Rails.logger.debug "Created stat: #{stat.year}-#{stat.month}"
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create stat: #{e.message}"
|
||||
next
|
||||
if valid_stats.empty?
|
||||
Rails.logger.info "Stats import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
Rails.logger.info "Stats import completed. Created: #{stats_created}"
|
||||
stats_created
|
||||
# Remove existing stats to avoid duplicates
|
||||
deduplicated_stats = filter_existing_stats(valid_stats)
|
||||
|
||||
if deduplicated_stats.size < valid_stats.size
|
||||
Rails.logger.debug "Skipped #{valid_stats.size - deduplicated_stats.size} duplicate stats"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_stats(deduplicated_stats)
|
||||
|
||||
Rails.logger.info "Stats import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :stats_data
|
||||
|
||||
def filter_and_prepare_stats
|
||||
valid_stats = []
|
||||
skipped_count = 0
|
||||
|
||||
stats_data.each do |stat_data|
|
||||
next unless stat_data.is_a?(Hash)
|
||||
|
||||
# Skip stats with missing required data
|
||||
unless valid_stat_data?(stat_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare stat attributes for bulk insert
|
||||
prepared_attributes = prepare_stat_attributes(stat_data)
|
||||
valid_stats << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} stats with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_stats
|
||||
end
|
||||
|
||||
def prepare_stat_attributes(stat_data)
|
||||
# Start with base attributes, excluding timestamp fields
|
||||
attributes = stat_data.except('created_at', 'updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare stat attributes: #{e.message}"
|
||||
Rails.logger.error "Stat data: #{stat_data.inspect}"
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_stats(stats)
|
||||
return stats if stats.empty?
|
||||
|
||||
# Build lookup hash of existing stats for this user
|
||||
existing_stats_lookup = {}
|
||||
user.stats.select(:year, :month).each do |stat|
|
||||
key = [stat.year, stat.month]
|
||||
existing_stats_lookup[key] = true
|
||||
end
|
||||
|
||||
# Filter out stats that already exist
|
||||
filtered_stats = stats.reject do |stat|
|
||||
key = [stat[:year], stat[:month]]
|
||||
if existing_stats_lookup[key]
|
||||
Rails.logger.debug "Stat already exists: #{stat[:year]}-#{stat[:month]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_stats
|
||||
end
|
||||
|
||||
def bulk_import_stats(stats)
|
||||
total_created = 0
|
||||
|
||||
stats.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert stats
|
||||
result = Stat.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} stats, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process stat batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_stat_data?(stat_data)
|
||||
# Check for required fields
|
||||
return false unless stat_data.is_a?(Hash)
|
||||
|
||||
unless stat_data['year'].present?
|
||||
Rails.logger.error "Failed to create stat: Validation failed: Year can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless stat_data['month'].present?
|
||||
Rails.logger.error "Failed to create stat: Validation failed: Month can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless stat_data['distance'].present?
|
||||
Rails.logger.error "Failed to create stat: Validation failed: Distance can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Stat validation failed: #{e.message} for data: #{stat_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Users::ImportData::Trips
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def initialize(user, trips_data)
|
||||
@user = user
|
||||
@trips_data = trips_data
|
||||
|
|
@ -11,39 +13,165 @@ class Users::ImportData::Trips
|
|||
|
||||
Rails.logger.info "Importing #{trips_data.size} trips for user: #{user.email}"
|
||||
|
||||
trips_created = 0
|
||||
# Filter valid trips and prepare for bulk import
|
||||
valid_trips = filter_and_prepare_trips
|
||||
|
||||
trips_data.each do |trip_data|
|
||||
next unless trip_data.is_a?(Hash)
|
||||
|
||||
# Check if trip already exists (match by name and timestamps)
|
||||
existing_trip = user.trips.find_by(
|
||||
name: trip_data['name'],
|
||||
started_at: trip_data['started_at'],
|
||||
ended_at: trip_data['ended_at']
|
||||
)
|
||||
|
||||
if existing_trip
|
||||
Rails.logger.debug "Trip already exists: #{trip_data['name']}"
|
||||
next
|
||||
end
|
||||
|
||||
# Create new trip
|
||||
trip_attributes = trip_data.except('created_at', 'updated_at')
|
||||
trip = user.trips.create!(trip_attributes)
|
||||
trips_created += 1
|
||||
|
||||
Rails.logger.debug "Created trip: #{trip.name}"
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
Rails.logger.error "Failed to create trip: #{e.message}"
|
||||
next
|
||||
if valid_trips.empty?
|
||||
Rails.logger.info "Trips import completed. Created: 0"
|
||||
return 0
|
||||
end
|
||||
|
||||
Rails.logger.info "Trips import completed. Created: #{trips_created}"
|
||||
trips_created
|
||||
# Remove existing trips to avoid duplicates
|
||||
deduplicated_trips = filter_existing_trips(valid_trips)
|
||||
|
||||
if deduplicated_trips.size < valid_trips.size
|
||||
Rails.logger.debug "Skipped #{valid_trips.size - deduplicated_trips.size} duplicate trips"
|
||||
end
|
||||
|
||||
# Bulk import in batches
|
||||
total_created = bulk_import_trips(deduplicated_trips)
|
||||
|
||||
Rails.logger.info "Trips import completed. Created: #{total_created}"
|
||||
total_created
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :user, :trips_data
|
||||
|
||||
def filter_and_prepare_trips
|
||||
valid_trips = []
|
||||
skipped_count = 0
|
||||
|
||||
trips_data.each do |trip_data|
|
||||
next unless trip_data.is_a?(Hash)
|
||||
|
||||
# Skip trips with missing required data
|
||||
unless valid_trip_data?(trip_data)
|
||||
skipped_count += 1
|
||||
next
|
||||
end
|
||||
|
||||
# Prepare trip attributes for bulk insert
|
||||
prepared_attributes = prepare_trip_attributes(trip_data)
|
||||
valid_trips << prepared_attributes if prepared_attributes
|
||||
end
|
||||
|
||||
if skipped_count > 0
|
||||
Rails.logger.warn "Skipped #{skipped_count} trips with invalid or missing required data"
|
||||
end
|
||||
|
||||
valid_trips
|
||||
end
|
||||
|
||||
def prepare_trip_attributes(trip_data)
|
||||
# Start with base attributes, excluding timestamp fields
|
||||
attributes = trip_data.except('created_at', 'updated_at')
|
||||
|
||||
# Add required attributes for bulk insert
|
||||
attributes['user_id'] = user.id
|
||||
attributes['created_at'] = Time.current
|
||||
attributes['updated_at'] = Time.current
|
||||
|
||||
# Convert string keys to symbols for consistency
|
||||
attributes.symbolize_keys
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to prepare trip attributes: #{e.message}"
|
||||
Rails.logger.error "Trip data: #{trip_data.inspect}"
|
||||
nil
|
||||
end
|
||||
|
||||
def filter_existing_trips(trips)
|
||||
return trips if trips.empty?
|
||||
|
||||
# Build lookup hash of existing trips for this user
|
||||
existing_trips_lookup = {}
|
||||
user.trips.select(:name, :started_at, :ended_at).each do |trip|
|
||||
# Normalize timestamp values for consistent comparison
|
||||
key = [trip.name, normalize_timestamp(trip.started_at), normalize_timestamp(trip.ended_at)]
|
||||
existing_trips_lookup[key] = true
|
||||
end
|
||||
|
||||
# Filter out trips that already exist
|
||||
filtered_trips = trips.reject do |trip|
|
||||
# Normalize timestamp values for consistent comparison
|
||||
key = [trip[:name], normalize_timestamp(trip[:started_at]), normalize_timestamp(trip[:ended_at])]
|
||||
if existing_trips_lookup[key]
|
||||
Rails.logger.debug "Trip already exists: #{trip[:name]}"
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
filtered_trips
|
||||
end
|
||||
|
||||
def normalize_timestamp(timestamp)
|
||||
case timestamp
|
||||
when String
|
||||
# Parse string and convert to iso8601 format for consistent comparison
|
||||
Time.parse(timestamp).utc.iso8601
|
||||
when Time, DateTime
|
||||
# Convert time objects to iso8601 format for consistent comparison
|
||||
timestamp.utc.iso8601
|
||||
else
|
||||
timestamp.to_s
|
||||
end
|
||||
rescue StandardError
|
||||
timestamp.to_s
|
||||
end
|
||||
|
||||
def bulk_import_trips(trips)
|
||||
total_created = 0
|
||||
|
||||
trips.each_slice(BATCH_SIZE) do |batch|
|
||||
begin
|
||||
# Use upsert_all to efficiently bulk insert trips
|
||||
result = Trip.upsert_all(
|
||||
batch,
|
||||
returning: %w[id],
|
||||
on_duplicate: :skip
|
||||
)
|
||||
|
||||
batch_created = result.count
|
||||
total_created += batch_created
|
||||
|
||||
Rails.logger.debug "Processed batch of #{batch.size} trips, created #{batch_created}, total created: #{total_created}"
|
||||
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to process trip batch: #{e.message}"
|
||||
Rails.logger.error "Batch size: #{batch.size}"
|
||||
Rails.logger.error "Backtrace: #{e.backtrace.first(3).join('\n')}"
|
||||
# Continue with next batch instead of failing completely
|
||||
end
|
||||
end
|
||||
|
||||
total_created
|
||||
end
|
||||
|
||||
def valid_trip_data?(trip_data)
|
||||
# Check for required fields
|
||||
return false unless trip_data.is_a?(Hash)
|
||||
|
||||
unless trip_data['name'].present?
|
||||
Rails.logger.error "Failed to create trip: Validation failed: Name can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless trip_data['started_at'].present?
|
||||
Rails.logger.error "Failed to create trip: Validation failed: Started at can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
unless trip_data['ended_at'].present?
|
||||
Rails.logger.error "Failed to create trip: Validation failed: Ended at can't be blank"
|
||||
return false
|
||||
end
|
||||
|
||||
true
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug "Trip validation failed: #{e.message} for data: #{trip_data.inspect}"
|
||||
false
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -42,11 +42,9 @@
|
|||
<th>Name</th>
|
||||
<th>File size</th>
|
||||
<th>Imported points</th>
|
||||
<% if DawarichSettings.store_geodata? %>
|
||||
<th>Reverse geocoded points</th>
|
||||
<% end %>
|
||||
<th>Status</th>
|
||||
<th>Created at</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody
|
||||
|
|
@ -70,12 +68,14 @@
|
|||
<td data-points-count>
|
||||
<%= number_with_delimiter import.processed %>
|
||||
</td>
|
||||
<% if DawarichSettings.store_geodata? %>
|
||||
<td data-reverse-geocoded-points-count>
|
||||
<%= number_with_delimiter import.reverse_geocoded_points_count %>
|
||||
</td>
|
||||
<% end %>
|
||||
<td><%#= import.status %></td>
|
||||
<td><%= human_datetime(import.created_at) %></td>
|
||||
<td>
|
||||
<% if import.file.present? %>
|
||||
<%= link_to 'Download', rails_blob_path(import.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: import.name %>
|
||||
<% end %>
|
||||
<%= link_to 'Delete', import, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
|
||||
</td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</tbody>
|
||||
|
|
|
|||
|
|
@ -109,8 +109,9 @@ RSpec.describe Users::ExportData, type: :service do
|
|||
|
||||
it 'creates a zip file with proper compression settings' do
|
||||
expect(Zip::File).to receive(:open).with(zip_file_path, Zip::File::CREATE)
|
||||
expect(zip_file_double).to receive(:default_compression=).with(Zip::Entry::DEFLATED)
|
||||
expect(zip_file_double).to receive(:default_compression_level=).with(9)
|
||||
expect(Zip).to receive(:default_compression).and_return(-1) # Mock original compression
|
||||
expect(Zip).to receive(:default_compression=).with(Zip::Entry::DEFLATED)
|
||||
expect(Zip).to receive(:default_compression=).with(-1) # Restoration
|
||||
|
||||
service.export
|
||||
end
|
||||
|
|
|
|||
359
spec/services/users/export_import_integration_spec.rb
Normal file
359
spec/services/users/export_import_integration_spec.rb
Normal file
|
|
@ -0,0 +1,359 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe 'Users Export-Import Integration', type: :service do
|
||||
let(:original_user) { create(:user, email: 'original@example.com') }
|
||||
let(:target_user) { create(:user, email: 'target@example.com') }
|
||||
let(:temp_archive_path) { Rails.root.join('tmp', 'test_export.zip') }
|
||||
|
||||
after do
|
||||
# Clean up any test files
|
||||
File.delete(temp_archive_path) if File.exist?(temp_archive_path)
|
||||
end
|
||||
|
||||
describe 'complete export-import cycle' do
|
||||
before do
|
||||
# Create comprehensive test data for original user
|
||||
create_full_user_dataset(original_user)
|
||||
end
|
||||
|
||||
it 'exports and imports all user data while preserving relationships' do
|
||||
# Step 1: Export original user data
|
||||
export_record = Users::ExportData.new(original_user).export
|
||||
|
||||
expect(export_record).to be_present
|
||||
expect(export_record.status).to eq('completed')
|
||||
expect(export_record.file).to be_attached
|
||||
|
||||
# Download export file to temporary location
|
||||
File.open(temp_archive_path, 'wb') do |file|
|
||||
export_record.file.download { |chunk| file.write(chunk) }
|
||||
end
|
||||
|
||||
expect(File.exist?(temp_archive_path)).to be true
|
||||
|
||||
# Step 2: Capture original counts
|
||||
original_counts = calculate_user_entity_counts(original_user)
|
||||
|
||||
# Debug: Check what was exported
|
||||
debug_export_data(temp_archive_path)
|
||||
|
||||
# Debug: Enable detailed logging
|
||||
original_log_level = Rails.logger.level
|
||||
Rails.logger.level = Logger::DEBUG
|
||||
|
||||
begin
|
||||
# Step 3: Import data into target user
|
||||
import_stats = Users::ImportData.new(target_user, temp_archive_path).import
|
||||
ensure
|
||||
# Restore original log level
|
||||
Rails.logger.level = original_log_level
|
||||
end
|
||||
|
||||
# Debug: Check import stats
|
||||
puts "Import stats: #{import_stats.inspect}"
|
||||
|
||||
# Step 4: Calculate user-generated notification count for comparisons
|
||||
# Only user-generated notifications are exported, not system notifications
|
||||
user_notifications_count = original_user.notifications.where.not(
|
||||
title: ['Data import completed', 'Data import failed', 'Export completed', 'Export failed']
|
||||
).count
|
||||
|
||||
# Verify entity counts match
|
||||
target_counts = calculate_user_entity_counts(target_user)
|
||||
|
||||
# Debug: Show count comparison
|
||||
puts "Original counts: #{original_counts.inspect}"
|
||||
puts "Target counts: #{target_counts.inspect}"
|
||||
|
||||
# Compare all entity counts
|
||||
expect(target_counts[:areas]).to eq(original_counts[:areas])
|
||||
expect(target_counts[:imports]).to eq(original_counts[:imports])
|
||||
expect(target_counts[:exports]).to eq(original_counts[:exports])
|
||||
expect(target_counts[:trips]).to eq(original_counts[:trips])
|
||||
expect(target_counts[:stats]).to eq(original_counts[:stats])
|
||||
# Target should have user notifications + import success notification
|
||||
# Original count includes export success, but export filters that out
|
||||
# Import creates its own success notification, so target should have user notifications + import success
|
||||
expect(target_counts[:notifications]).to eq(user_notifications_count + 1) # +1 for import success
|
||||
expect(target_counts[:points]).to eq(original_counts[:points])
|
||||
expect(target_counts[:visits]).to eq(original_counts[:visits])
|
||||
expect(target_counts[:places]).to eq(original_counts[:places])
|
||||
|
||||
# Verify import stats match expectations
|
||||
expect(import_stats[:areas_created]).to eq(original_counts[:areas])
|
||||
expect(import_stats[:imports_created]).to eq(original_counts[:imports])
|
||||
expect(import_stats[:exports_created]).to eq(original_counts[:exports])
|
||||
expect(import_stats[:trips_created]).to eq(original_counts[:trips])
|
||||
expect(import_stats[:stats_created]).to eq(original_counts[:stats])
|
||||
expect(import_stats[:notifications_created]).to eq(user_notifications_count)
|
||||
expect(import_stats[:points_created]).to eq(original_counts[:points])
|
||||
expect(import_stats[:visits_created]).to eq(original_counts[:visits])
|
||||
# Places are global entities, so they may already exist and not be recreated
|
||||
# The count in target_counts shows the user has access to the places (through visits)
|
||||
# but places_created shows how many NEW places were actually created during import
|
||||
# Since places may be global duplicates, we just verify they're accessible
|
||||
expect(target_counts[:places]).to eq(original_counts[:places]) # User still has access to places
|
||||
|
||||
# Step 5: Verify relationships are preserved
|
||||
verify_relationships_preserved(original_user, target_user)
|
||||
|
||||
# Step 6: Verify settings are preserved
|
||||
verify_settings_preserved(original_user, target_user)
|
||||
|
||||
# Step 7: Verify files are restored
|
||||
verify_files_restored(original_user, target_user)
|
||||
end
|
||||
|
||||
it 'is idempotent - running import twice does not create duplicates' do
|
||||
# First export and import
|
||||
export_record = Users::ExportData.new(original_user).export
|
||||
|
||||
File.open(temp_archive_path, 'wb') do |file|
|
||||
export_record.file.download { |chunk| file.write(chunk) }
|
||||
end
|
||||
|
||||
# First import
|
||||
first_import_stats = Users::ImportData.new(target_user, temp_archive_path).import
|
||||
first_counts = calculate_user_entity_counts(target_user)
|
||||
|
||||
# Second import (should not create duplicates)
|
||||
second_import_stats = Users::ImportData.new(target_user, temp_archive_path).import
|
||||
second_counts = calculate_user_entity_counts(target_user)
|
||||
|
||||
# Counts should be identical
|
||||
expect(second_counts).to eq(first_counts)
|
||||
|
||||
# Second import should create no new entities
|
||||
expect(second_import_stats[:areas_created]).to eq(0)
|
||||
expect(second_import_stats[:imports_created]).to eq(0)
|
||||
expect(second_import_stats[:exports_created]).to eq(0)
|
||||
expect(second_import_stats[:trips_created]).to eq(0)
|
||||
expect(second_import_stats[:stats_created]).to eq(0)
|
||||
expect(second_import_stats[:notifications_created]).to eq(0)
|
||||
expect(second_import_stats[:points_created]).to eq(0)
|
||||
expect(second_import_stats[:visits_created]).to eq(0)
|
||||
expect(second_import_stats[:places_created]).to eq(0)
|
||||
end
|
||||
|
||||
it 'does not trigger background processing for imported imports' do
|
||||
# Mock the job to ensure it's not called
|
||||
expect(Import::ProcessJob).not_to receive(:perform_later)
|
||||
|
||||
export_record = Users::ExportData.new(original_user).export
|
||||
|
||||
File.open(temp_archive_path, 'wb') do |file|
|
||||
export_record.file.download { |chunk| file.write(chunk) }
|
||||
end
|
||||
|
||||
Users::ImportData.new(target_user, temp_archive_path).import
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def debug_export_data(archive_path)
|
||||
require 'zip'
|
||||
|
||||
puts "\n=== DEBUGGING EXPORT DATA ==="
|
||||
|
||||
# Extract and read the data.json file
|
||||
Zip::File.open(archive_path) do |zip_file|
|
||||
data_entry = zip_file.find { |entry| entry.name == 'data.json' }
|
||||
if data_entry
|
||||
json_content = data_entry.get_input_stream.read
|
||||
data = JSON.parse(json_content)
|
||||
|
||||
puts "Export counts: #{data['counts'].inspect}"
|
||||
puts "Points in export: #{data['points']&.size || 0}"
|
||||
puts "Places in export: #{data['places']&.size || 0}"
|
||||
puts "First point sample: #{data['points']&.first&.slice('timestamp', 'longitude', 'latitude', 'import_reference', 'country_info', 'visit_reference')}"
|
||||
puts "First place sample: #{data['places']&.first&.slice('name', 'latitude', 'longitude', 'source')}"
|
||||
puts "Imports in export: #{data['imports']&.size || 0}"
|
||||
puts "Countries referenced: #{data['points']&.map { |p| p['country_info']&.dig('name') }&.compact&.uniq || []}"
|
||||
else
|
||||
puts "No data.json found in export!"
|
||||
end
|
||||
end
|
||||
|
||||
puts "=== END DEBUG ==="
|
||||
end
|
||||
|
||||
def create_full_user_dataset(user)
|
||||
# Set custom user settings
|
||||
user.update!(settings: {
|
||||
'distance_unit' => 'km',
|
||||
'timezone' => 'America/New_York',
|
||||
'immich_url' => 'https://immich.example.com',
|
||||
'immich_api_key' => 'test-api-key'
|
||||
})
|
||||
|
||||
# Create countries (global entities)
|
||||
usa = create(:country, name: 'United States', iso_a2: 'US', iso_a3: 'USA')
|
||||
canada = create(:country, name: 'Canada', iso_a2: 'CA', iso_a3: 'CAN')
|
||||
|
||||
# Create places (global entities)
|
||||
office = create(:place, name: 'Office Building', latitude: 40.7589, longitude: -73.9851)
|
||||
home = create(:place, name: 'Home Sweet Home', latitude: 40.7128, longitude: -74.0060)
|
||||
|
||||
# Create user-specific areas
|
||||
create_list(:area, 3, user: user)
|
||||
|
||||
# Create imports with files
|
||||
import1 = create(:import, user: user, name: 'March 2024 Data', source: :google_semantic_history)
|
||||
import2 = create(:import, user: user, name: 'OwnTracks Data', source: :owntracks)
|
||||
|
||||
# Attach files to imports
|
||||
import1.file.attach(
|
||||
io: StringIO.new('{"timelineObjects": []}'),
|
||||
filename: 'march_2024.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
import2.file.attach(
|
||||
io: StringIO.new('{"_type": "location"}'),
|
||||
filename: 'owntracks.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
# Create exports with files
|
||||
export1 = create(:export, user: user, name: 'Q1 2024 Export', file_format: :json, file_type: :points)
|
||||
export1.file.attach(
|
||||
io: StringIO.new('{"type": "FeatureCollection", "features": []}'),
|
||||
filename: 'q1_2024.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
# Create trips
|
||||
create_list(:trip, 2, user: user)
|
||||
|
||||
# Create stats
|
||||
create(:stat, user: user, year: 2024, month: 1, distance: 150.5, daily_distance: [[1, 5.2], [2, 8.1]])
|
||||
create(:stat, user: user, year: 2024, month: 2, distance: 200.3, daily_distance: [[1, 6.5], [2, 9.8]])
|
||||
|
||||
# Create notifications
|
||||
create_list(:notification, 4, user: user)
|
||||
|
||||
# Create visits (linked to places)
|
||||
visit1 = create(:visit, user: user, place: office, name: 'Work Visit')
|
||||
visit2 = create(:visit, user: user, place: home, name: 'Home Visit')
|
||||
visit3 = create(:visit, user: user, place: nil, name: 'Unknown Location')
|
||||
|
||||
# Create points with various relationships
|
||||
# Points linked to import1, usa, and visit1
|
||||
create_list(:point, 5,
|
||||
user: user,
|
||||
import: import1,
|
||||
country: usa,
|
||||
visit: visit1,
|
||||
latitude: 40.7589,
|
||||
longitude: -73.9851
|
||||
)
|
||||
|
||||
# Points linked to import2, canada, and visit2
|
||||
create_list(:point, 3,
|
||||
user: user,
|
||||
import: import2,
|
||||
country: canada,
|
||||
visit: visit2,
|
||||
latitude: 40.7128,
|
||||
longitude: -74.0060
|
||||
)
|
||||
|
||||
# Points with no relationships (orphaned)
|
||||
create_list(:point, 2,
|
||||
user: user,
|
||||
import: nil,
|
||||
country: nil,
|
||||
visit: nil
|
||||
)
|
||||
|
||||
# Points linked to visit3 (no place)
|
||||
create_list(:point, 2,
|
||||
user: user,
|
||||
import: import1,
|
||||
country: usa,
|
||||
visit: visit3
|
||||
)
|
||||
|
||||
puts "Created dataset with #{user.tracked_points.count} points"
|
||||
end
|
||||
|
||||
def calculate_user_entity_counts(user)
|
||||
{
|
||||
areas: user.areas.count,
|
||||
imports: user.imports.count,
|
||||
exports: user.exports.count,
|
||||
trips: user.trips.count,
|
||||
stats: user.stats.count,
|
||||
notifications: user.notifications.count,
|
||||
points: user.tracked_points.count,
|
||||
visits: user.visits.count,
|
||||
places: user.places.count
|
||||
}
|
||||
end
|
||||
|
||||
def verify_relationships_preserved(original_user, target_user)
|
||||
# Verify points maintain their relationships
|
||||
original_points_with_imports = original_user.tracked_points.where.not(import_id: nil).count
|
||||
target_points_with_imports = target_user.tracked_points.where.not(import_id: nil).count
|
||||
expect(target_points_with_imports).to eq(original_points_with_imports)
|
||||
|
||||
original_points_with_countries = original_user.tracked_points.where.not(country_id: nil).count
|
||||
target_points_with_countries = target_user.tracked_points.where.not(country_id: nil).count
|
||||
expect(target_points_with_countries).to eq(original_points_with_countries)
|
||||
|
||||
original_points_with_visits = original_user.tracked_points.where.not(visit_id: nil).count
|
||||
target_points_with_visits = target_user.tracked_points.where.not(visit_id: nil).count
|
||||
expect(target_points_with_visits).to eq(original_points_with_visits)
|
||||
|
||||
# Verify visits maintain their place relationships
|
||||
original_visits_with_places = original_user.visits.where.not(place_id: nil).count
|
||||
target_visits_with_places = target_user.visits.where.not(place_id: nil).count
|
||||
expect(target_visits_with_places).to eq(original_visits_with_places)
|
||||
|
||||
# Verify specific relationship consistency
|
||||
# Check that points with same coordinates have same relationships
|
||||
original_office_points = original_user.tracked_points.where(
|
||||
latitude: 40.7589, longitude: -73.9851
|
||||
).first
|
||||
target_office_points = target_user.tracked_points.where(
|
||||
latitude: 40.7589, longitude: -73.9851
|
||||
).first
|
||||
|
||||
if original_office_points && target_office_points
|
||||
expect(target_office_points.import.name).to eq(original_office_points.import.name) if original_office_points.import
|
||||
expect(target_office_points.country.name).to eq(original_office_points.country.name) if original_office_points.country
|
||||
expect(target_office_points.visit.name).to eq(original_office_points.visit.name) if original_office_points.visit
|
||||
end
|
||||
end
|
||||
|
||||
def verify_settings_preserved(original_user, target_user)
|
||||
# Verify user settings are correctly applied
|
||||
expect(target_user.safe_settings.distance_unit).to eq(original_user.safe_settings.distance_unit)
|
||||
expect(target_user.safe_settings.timezone).to eq(original_user.safe_settings.timezone)
|
||||
expect(target_user.settings['immich_url']).to eq(original_user.settings['immich_url'])
|
||||
expect(target_user.settings['immich_api_key']).to eq(original_user.settings['immich_api_key'])
|
||||
end
|
||||
|
||||
def verify_files_restored(original_user, target_user)
|
||||
# Verify import files are restored
|
||||
original_imports_with_files = original_user.imports.joins(:file_attachment).count
|
||||
target_imports_with_files = target_user.imports.joins(:file_attachment).count
|
||||
expect(target_imports_with_files).to eq(original_imports_with_files)
|
||||
|
||||
# Verify export files are restored
|
||||
original_exports_with_files = original_user.exports.joins(:file_attachment).count
|
||||
target_exports_with_files = target_user.exports.joins(:file_attachment).count
|
||||
expect(target_exports_with_files).to eq(original_exports_with_files)
|
||||
|
||||
# Verify specific file details
|
||||
original_import = original_user.imports.find_by(name: 'March 2024 Data')
|
||||
target_import = target_user.imports.find_by(name: 'March 2024 Data')
|
||||
|
||||
if original_import&.file&.attached? && target_import&.file&.attached?
|
||||
expect(target_import.file.filename.to_s).to eq(original_import.file.filename.to_s)
|
||||
expect(target_import.file.content_type).to eq(original_import.file.content_type)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -38,9 +38,6 @@ RSpec.describe Users::ImportData::Imports, type: :service do
|
|||
# Create mock files
|
||||
File.write(files_directory.join('import_1_2023_MARCH.json'), '{"test": "data"}')
|
||||
File.write(files_directory.join('import_2_2023_APRIL.json'), '{"more": "data"}')
|
||||
|
||||
# Mock the Import job to prevent it from being enqueued
|
||||
allow(Import::ProcessJob).to receive(:perform_later)
|
||||
end
|
||||
|
||||
after do
|
||||
|
|
@ -98,6 +95,20 @@ RSpec.describe Users::ImportData::Imports, type: :service do
|
|||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'does not trigger background processing jobs' do
|
||||
expect(Import::ProcessJob).not_to receive(:perform_later)
|
||||
|
||||
service.call
|
||||
end
|
||||
|
||||
it 'sets skip_background_processing flag on created imports' do
|
||||
service.call
|
||||
|
||||
user.imports.each do |import|
|
||||
expect(import.skip_background_processing).to be_truthy
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'with duplicate imports' do
|
||||
|
|
|
|||
92
test_output.log
Normal file
92
test_output.log
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
Run options: include {locations: {"./spec/services/users/export_import_integration_spec.rb" => [21]}}
|
||||
Created dataset with 12 points
|
||||
|
||||
=== DEBUGGING EXPORT DATA ===
|
||||
Export counts: {"areas" => 3, "imports" => 2, "exports" => 2, "trips" => 2, "stats" => 2, "notifications" => 4, "points" => 12, "visits" => 3, "places" => 2}
|
||||
Points in export: 12
|
||||
Places in export: 2
|
||||
First point sample: {"timestamp" => 1714559220, "longitude" => -73.9851, "latitude" => 40.7589, "import_reference" => {"name" => "March 2024 Data", "source" => 0, "created_at" => "2025-06-30T16:10:46.550Z"}, "country_info" => {"name" => "United States", "iso_a2" => "US", "iso_a3" => "USA"}, "visit_reference" => {"name" => "Work Visit", "started_at" => "2025-06-30T16:10:46.711Z", "ended_at" => "2025-06-30T17:10:46.711Z"}}
|
||||
First place sample: {"name" => "Office Building", "latitude" => "40.7589", "longitude" => "-73.9851", "source" => "manual"}
|
||||
Imports in export: 2
|
||||
Countries referenced: ["United States", "Canada"]
|
||||
=== END DEBUG ===
|
||||
Import stats: {settings_updated: true, areas_created: 3, places_created: 0, imports_created: 2, exports_created: 2, trips_created: 2, stats_created: 2, notifications_created: 4, visits_created: 3, points_created: 0, files_restored: 3}
|
||||
Original counts: {areas: 3, imports: 2, exports: 2, trips: 2, stats: 2, notifications: 5, points: 12, visits: 3, places: 2}
|
||||
Target counts: {areas: 3, imports: 2, exports: 2, trips: 2, stats: 2, notifications: 5, points: 0, visits: 3, places: 2}
|
||||
F/Users/frey/.rvm/rubies/ruby-3.4.1/bin/ruby -I/Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-core-3.13.3/lib:/Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-support-3.13.3/lib /Users/frey/.rvm/gems/ruby-3.4.1/gems/rspec-core-3.13.3/exe/rspec --pattern spec/swagger/\*\*/\*_spec.rb --format Rswag::Specs::SwaggerFormatter --dry-run --order defined
|
||||
Generating Swagger docs ...
|
||||
Swagger doc generated at /Users/frey/projects/dawarich/dawarich/swagger/v1/swagger.yaml
|
||||
|
||||
Top 10 slowest examples (0.00002 seconds, 0.6% of total time):
|
||||
Areas API /api/v1/areas post area created returns a 201 response
|
||||
0.00001 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Stats API /api/v1/stats get stats found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Areas API /api/v1/areas post invalid request returns a 422 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Areas API /api/v1/areas/{id} delete area deleted returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Health API /api/v1/health get Healthy returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Points API /api/v1/points get points found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Users API /api/v1/users/me get user found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Settings API /api/v1/settings get settings found returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Settings API /api/v1/settings patch settings updated returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
Points API /api/v1/points/{id} delete point deleted returns a 200 response
|
||||
0 seconds /Users/frey/.rvm/gems/ruby-3.4.1/gems/rswag-specs-2.16.0/lib/rswag/specs/example_group_helpers.rb:143
|
||||
|
||||
Top 10 slowest example groups:
|
||||
Health API
|
||||
0.00039 seconds average (0.00039 seconds / 1 example) ./spec/swagger/api/v1/health_controller_spec.rb:5
|
||||
Points API
|
||||
0.00016 seconds average (0.00065 seconds / 4 examples) ./spec/swagger/api/v1/points_controller_spec.rb:5
|
||||
Areas API
|
||||
0.00013 seconds average (0.00052 seconds / 4 examples) ./spec/swagger/api/v1/areas_controller_spec.rb:5
|
||||
Stats API
|
||||
0.00013 seconds average (0.00013 seconds / 1 example) ./spec/swagger/api/v1/stats_controller_spec.rb:5
|
||||
Users API
|
||||
0.00012 seconds average (0.00012 seconds / 1 example) ./spec/swagger/api/v1/users_controller_spec.rb:5
|
||||
Settings API
|
||||
0.00011 seconds average (0.00021 seconds / 2 examples) ./spec/swagger/api/v1/settings_controller_spec.rb:5
|
||||
Overland Batches API
|
||||
0.0001 seconds average (0.0002 seconds / 2 examples) ./spec/swagger/api/v1/overland/batches_controller_spec.rb:5
|
||||
Api::V1::Countries::VisitedCities
|
||||
0.00009 seconds average (0.00017 seconds / 2 examples) ./spec/swagger/api/v1/countries/visited_cities_spec.rb:5
|
||||
OwnTracks Points API
|
||||
0.00009 seconds average (0.00017 seconds / 2 examples) ./spec/swagger/api/v1/owntracks/points_controller_spec.rb:5
|
||||
Api::V1::PhotosController
|
||||
0.00008 seconds average (0.00025 seconds / 3 examples) ./spec/swagger/api/v1/photos_controller_spec.rb:5
|
||||
|
||||
Finished in 0.00388 seconds (files took 1.87 seconds to load)
|
||||
24 examples, 0 failures
|
||||
|
||||
Coverage report generated for RSpec to /Users/frey/projects/dawarich/dawarich/coverage.
|
||||
Line Coverage: 61.36% (670 / 1092)
|
||||
|
||||
|
||||
Failures:
|
||||
|
||||
1) Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
|
||||
Failure/Error: expect(target_counts[:points]).to eq(original_counts[:points])
|
||||
Expected 0 to eq 12.
|
||||
# ./spec/services/users/export_import_integration_spec.rb:71:in 'block (3 levels) in <top (required)>'
|
||||
# /Users/frey/.rvm/gems/ruby-3.4.1/gems/webmock-3.25.1/lib/webmock/rspec.rb:39:in 'block (2 levels) in <top (required)>'
|
||||
|
||||
Top 1 slowest examples (0.67919 seconds, 16.2% of total time):
|
||||
Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
|
||||
0.67919 seconds ./spec/services/users/export_import_integration_spec.rb:21
|
||||
|
||||
Finished in 4.18 seconds (files took 2.21 seconds to load)
|
||||
1 example, 1 failure
|
||||
|
||||
Failed examples:
|
||||
|
||||
rspec ./spec/services/users/export_import_integration_spec.rb:21 # Users Export-Import Integration complete export-import cycle exports and imports all user data while preserving relationships
|
||||
|
||||
Coverage report generated for RSpec to /Users/frey/projects/dawarich/dawarich/coverage.
|
||||
Line Coverage: 65.56% (1593 / 2430)
|
||||
Stopped processing SimpleCov as a previous error not related to SimpleCov has been detected
|
||||
Loading…
Reference in a new issue