Merge pull request #1830 from Freika/dev

0.33.1
This commit is contained in:
Evgenii Burmakin 2025-10-07 22:26:01 +02:00 committed by GitHub
commit 493db274f5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 1096 additions and 572 deletions

View file

@ -1 +1 @@
0.33.0
0.33.1

View file

@ -4,6 +4,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
# [0.33.1]
## Changed
- On the Trip page, instead of list of visited countries, a number of them is being shown. Clicking on it opens a modal with a list of countries visited during the trip. #1731
## Fixed
- `GET /api/v1/stats` endpoint now returns correct 0 instead of null if no points were tracked in the requested period.
- User import data now being streamed instead of loaded into memory all at once. This should prevent large imports from exhausting memory or hitting IO limits while reading export archives.
- Popup for manual visit creation now looks better in both light and dark modes. #1835
- Fixed a bug where visit circles were not interactive on the map page. #1833
- Fixed a bug with stats sharing settings being not filled. #1826
- Fixed a bug where user could not be deleted due to counter cache on points. #1818
- Introduce apt-get upgrade before installing new packages in the docker image to prevent vulnerabilities. #1793
- Fixed time shift when creating visits manually. #1679
- Provide default map layer if user settings are not set.
# [0.33.0] - 2025-09-29
## Fixed

File diff suppressed because one or more lines are too long

View file

@ -1,6 +1,5 @@
// Configure your import map in config/importmap.rb. Read more: https://github.com/rails/importmap-rails
import "@rails/ujs"
import "@rails/actioncable"
import "controllers"
import "@hotwired/turbo-rails"
@ -13,5 +12,3 @@ import "./channels"
import "trix"
import "@rails/actiontext"
Rails.start()

View file

@ -197,39 +197,45 @@ export default class extends Controller {
const startTime = formatDateTime(now);
const endTime = formatDateTime(oneHourLater);
// Create form HTML
// Create form HTML using DaisyUI classes for automatic theme support
const formHTML = `
<div class="visit-form" style="min-width: 280px;">
<h3 style="margin-top: 0; margin-bottom: 15px; font-size: 16px; color: #333;">Add New Visit</h3>
<h3 class="text-base font-semibold mb-4">Add New Visit</h3>
<form id="add-visit-form" style="display: flex; flex-direction: column; gap: 10px;">
<div>
<label for="visit-name" style="display: block; margin-bottom: 5px; font-weight: bold; font-size: 14px;">Name:</label>
<form id="add-visit-form" class="space-y-3">
<div class="form-control">
<label for="visit-name" class="label">
<span class="label-text font-medium">Name:</span>
</label>
<input type="text" id="visit-name" name="name" required
style="width: 100%; padding: 8px; border: 1px solid #ccc; border-radius: 4px; font-size: 14px;"
class="input input-bordered w-full"
placeholder="Enter visit name">
</div>
<div>
<label for="visit-start" style="display: block; margin-bottom: 5px; font-weight: bold; font-size: 14px;">Start Time:</label>
<div class="form-control">
<label for="visit-start" class="label">
<span class="label-text font-medium">Start Time:</span>
</label>
<input type="datetime-local" id="visit-start" name="started_at" required value="${startTime}"
style="width: 100%; padding: 8px; border: 1px solid #ccc; border-radius: 4px; font-size: 14px;">
class="input input-bordered w-full">
</div>
<div>
<label for="visit-end" style="display: block; margin-bottom: 5px; font-weight: bold; font-size: 14px;">End Time:</label>
<div class="form-control">
<label for="visit-end" class="label">
<span class="label-text font-medium">End Time:</span>
</label>
<input type="datetime-local" id="visit-end" name="ended_at" required value="${endTime}"
style="width: 100%; padding: 8px; border: 1px solid #ccc; border-radius: 4px; font-size: 14px;">
class="input input-bordered w-full">
</div>
<input type="hidden" name="latitude" value="${lat}">
<input type="hidden" name="longitude" value="${lng}">
<div style="display: flex; gap: 10px; margin-top: 15px;">
<button type="submit" style="flex: 1; background: #28a745; color: white; border: none; padding: 10px; border-radius: 4px; cursor: pointer; font-weight: bold;">
<div class="flex gap-2 mt-4">
<button type="submit" class="btn btn-success flex-1">
Create Visit
</button>
<button type="button" id="cancel-visit" style="flex: 1; background: #dc3545; color: white; border: none; padding: 10px; border-radius: 4px; cursor: pointer; font-weight: bold;">
<button type="button" id="cancel-visit" class="btn btn-error flex-1">
Cancel
</button>
</div>

View file

@ -167,25 +167,28 @@ export default class extends BaseController {
// Create a proper Leaflet layer for fog
this.fogOverlay = new (createFogOverlay())();
// Create custom pane for areas
// Create custom panes with proper z-index ordering
// Leaflet default panes: tilePane=200, overlayPane=400, shadowPane=500, markerPane=600, tooltipPane=650, popupPane=700
// Areas pane - below visits so they don't block interaction
this.map.createPane('areasPane');
this.map.getPane('areasPane').style.zIndex = 650;
this.map.getPane('areasPane').style.pointerEvents = 'all';
this.map.getPane('areasPane').style.zIndex = 605; // Above markerPane but below visits
this.map.getPane('areasPane').style.pointerEvents = 'none'; // Don't block clicks, let them pass through
// Create custom panes for visits
// Note: We'll still create visitsPane for backward compatibility
// Legacy visits pane for backward compatibility
this.map.createPane('visitsPane');
this.map.getPane('visitsPane').style.zIndex = 600;
this.map.getPane('visitsPane').style.pointerEvents = 'all';
// Create separate panes for confirmed and suggested visits
this.map.createPane('confirmedVisitsPane');
this.map.getPane('confirmedVisitsPane').style.zIndex = 450;
this.map.getPane('confirmedVisitsPane').style.pointerEvents = 'all';
this.map.getPane('visitsPane').style.zIndex = 615;
this.map.getPane('visitsPane').style.pointerEvents = 'auto';
// Suggested visits pane - interactive layer
this.map.createPane('suggestedVisitsPane');
this.map.getPane('suggestedVisitsPane').style.zIndex = 460;
this.map.getPane('suggestedVisitsPane').style.pointerEvents = 'all';
this.map.getPane('suggestedVisitsPane').style.zIndex = 610;
this.map.getPane('suggestedVisitsPane').style.pointerEvents = 'auto';
// Confirmed visits pane - on top of suggested, interactive
this.map.createPane('confirmedVisitsPane');
this.map.getPane('confirmedVisitsPane').style.zIndex = 620;
this.map.getPane('confirmedVisitsPane').style.pointerEvents = 'auto';
// Initialize areasLayer as a feature group and add it to the map immediately
this.areasLayer = new L.FeatureGroup();

View file

@ -50,9 +50,17 @@ export function createAllMapLayers(map, selectedLayerName, selfHosted) {
const layers = {};
const mapsConfig = selfHosted === "true" ? rasterMapsConfig : vectorMapsConfig;
// Determine the default layer based on self-hosted mode
const defaultLayerName = selfHosted === "true" ? "OpenStreetMap" : "Light";
// If selectedLayerName is null/undefined or doesn't exist in config, use default
const layerToSelect = selectedLayerName && mapsConfig[selectedLayerName]
? selectedLayerName
: defaultLayerName;
Object.keys(mapsConfig).forEach(layerKey => {
// Create the layer and add it to the map if it's the user's selected layer
const layer = createMapLayer(map, selectedLayerName, layerKey, selfHosted);
// Create the layer and add it to the map if it's the selected/default layer
const layer = createMapLayer(map, layerToSelect, layerKey, selfHosted);
layers[layerKey] = layer;
});

View file

@ -12,14 +12,17 @@ export class VisitsManager {
this.userTheme = userTheme;
// Create custom panes for different visit types
if (!map.getPane('confirmedVisitsPane')) {
map.createPane('confirmedVisitsPane');
map.getPane('confirmedVisitsPane').style.zIndex = 450; // Above default overlay pane (400)
}
// Leaflet default panes: tilePane=200, overlayPane=400, shadowPane=500, markerPane=600, tooltipPane=650, popupPane=700
if (!map.getPane('suggestedVisitsPane')) {
map.createPane('suggestedVisitsPane');
map.getPane('suggestedVisitsPane').style.zIndex = 460; // Below confirmed visits but above base layers
map.getPane('suggestedVisitsPane').style.zIndex = 610; // Above markerPane (600), below tooltipPane (650)
map.getPane('suggestedVisitsPane').style.pointerEvents = 'auto'; // Ensure interactions work
}
if (!map.getPane('confirmedVisitsPane')) {
map.createPane('confirmedVisitsPane');
map.getPane('confirmedVisitsPane').style.zIndex = 620; // Above suggested visits
map.getPane('confirmedVisitsPane').style.pointerEvents = 'auto'; // Ensure interactions work
}
this.visitCircles = L.layerGroup();
@ -1324,38 +1327,31 @@ export class VisitsManager {
// Create popup content with form and dropdown
const defaultName = visit.name;
const popupContent = `
<div class="p-4 bg-base-100 text-base-content rounded-lg shadow-lg">
<div class="mb-4">
<div class="text-sm mb-2 text-base-content/80 font-medium">
${dateTimeDisplay.trim()}
</div>
<div class="space-y-1">
<div class="text-sm text-base-content/60">
Duration: ${durationText}
</div>
<div class="text-sm ${statusColorClass} font-semibold">
Status: ${visit.status.charAt(0).toUpperCase() + visit.status.slice(1)}
</div>
<div class="text-xs text-base-content/50 font-mono">
${visit.place.latitude}, ${visit.place.longitude}
</div>
</div>
<div style="min-width: 280px;">
<h3 class="text-base font-semibold mb-3">${dateTimeDisplay.trim()}</h3>
<div class="space-y-1 mb-4 text-sm">
<div>Duration: ${durationText}</div>
<div class="${statusColorClass} font-semibold">Status: ${visit.status.charAt(0).toUpperCase() + visit.status.slice(1)}</div>
<div class="text-xs opacity-60 font-mono">${visit.place.latitude}, ${visit.place.longitude}</div>
</div>
<form class="visit-name-form space-y-3" data-visit-id="${visit.id}">
<div class="form-control">
<label class="label">
<span class="label-text text-sm font-medium">Visit Name</span>
<span class="label-text font-medium">Visit Name:</span>
</label>
<input type="text"
class="input input-bordered input-sm w-full bg-base-200 text-base-content placeholder:text-base-content/50"
class="input input-bordered w-full"
value="${defaultName}"
placeholder="Enter visit name">
</div>
<div class="form-control">
<label class="label">
<span class="label-text text-sm font-medium">Location</span>
<span class="label-text font-medium">Location:</span>
</label>
<select class="select select-bordered select-sm text-xs w-full bg-base-200 text-base-content" name="place">
<select class="select select-bordered w-full" name="place">
${possiblePlaces.length > 0 ? possiblePlaces.map(place => `
<option value="${place.id}" ${place.id === visit.place.id ? 'selected' : ''}>
${place.name}
@ -1367,36 +1363,24 @@ export class VisitsManager {
`}
</select>
</div>
<div class="flex gap-2 mt-4 pt-2 border-t border-base-300">
<button type="submit" class="btn btn-sm btn-primary flex-1">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 13l4 4L19 7"></path>
</svg>
<div class="grid grid-cols-3 gap-2">
<button type="submit" class="btn btn-primary btn-sm">
Save
</button>
${visit.status !== 'confirmed' ? `
<button type="button" class="btn btn-sm btn-success confirm-visit" data-id="${visit.id}">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4"></path>
</svg>
<button type="button" class="btn btn-success btn-sm confirm-visit" data-id="${visit.id}">
Confirm
</button>
<button type="button" class="btn btn-sm btn-error decline-visit" data-id="${visit.id}">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"></path>
</svg>
<button type="button" class="btn btn-error btn-sm decline-visit" data-id="${visit.id}">
Decline
</button>
` : ''}
</div>
<div class="mt-2">
<button type="button" class="btn btn-sm btn-outline btn-error w-full delete-visit" data-id="${visit.id}">
<svg class="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16"></path>
</svg>
Delete Visit
</button>
` : '<div class="col-span-2"></div>'}
</div>
<button type="button" class="btn btn-outline btn-error btn-sm w-full delete-visit" data-id="${visit.id}">
Delete Visit
</button>
</form>
</div>
`;

View file

@ -33,14 +33,14 @@ class Stat < ApplicationRecord
end
def sharing_enabled?
sharing_settings['enabled'] == true
sharing_settings.try(:[], 'enabled') == true
end
def sharing_expired?
expiration = sharing_settings['expiration']
expiration = sharing_settings.try(:[], 'expiration')
return false if expiration.blank?
expires_at_value = sharing_settings['expires_at']
expires_at_value = sharing_settings.try(:[], 'expires_at')
return true if expires_at_value.blank?
expires_at = begin

View file

@ -10,7 +10,7 @@ class StatsSerializer
def call
{
totalDistanceKm: total_distance_km,
totalPointsTracked: user.points_count,
totalPointsTracked: user.points_count.to_i,
totalReverseGeocodedPoints: reverse_geocoded_points,
totalCountriesVisited: user.countries_visited.count,
totalCitiesVisited: user.cities_visited.count,

View file

@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'zip'
require 'oj'
# Users::ImportData - Imports complete user data from exported archive
#
@ -22,6 +23,9 @@ require 'zip'
# Files are restored to their original locations and properly attached to records.
class Users::ImportData
STREAM_BATCH_SIZE = 5000
STREAMED_SECTIONS = %w[places visits points].freeze
def initialize(user, archive_path)
@user = user
@archive_path = archive_path
@ -46,10 +50,7 @@ class Users::ImportData
ActiveRecord::Base.transaction do
extract_archive
data = load_json_data
import_in_correct_order(data)
process_archive_data
create_success_notification
@import_stats
@ -73,14 +74,10 @@ class Users::ImportData
zip_file.each do |entry|
next if entry.directory?
# Sanitize entry name to prevent path traversal attacks
sanitized_name = sanitize_zip_entry_name(entry.name)
next if sanitized_name.nil?
# Compute absolute destination path
extraction_path = File.expand_path(File.join(@import_directory, sanitized_name))
# Verify the extraction path is within the import directory
safe_import_dir = File.expand_path(@import_directory) + File::SEPARATOR
unless extraction_path.start_with?(safe_import_dir) || extraction_path == File.expand_path(@import_directory)
Rails.logger.warn "Skipping potentially malicious ZIP entry: #{entry.name} (would extract to #{extraction_path})"
@ -90,24 +87,19 @@ class Users::ImportData
Rails.logger.debug "Extracting #{entry.name} to #{extraction_path}"
FileUtils.mkdir_p(File.dirname(extraction_path))
# Use destination_directory parameter for rubyzip 3.x compatibility
entry.extract(sanitized_name, destination_directory: @import_directory)
end
end
end
def sanitize_zip_entry_name(entry_name)
# Remove leading slashes, backslashes, and dots
sanitized = entry_name.gsub(%r{^[/\\]+}, '')
# Reject entries with path traversal attempts
if sanitized.include?('..') || sanitized.start_with?('/') || sanitized.start_with?('\\')
Rails.logger.warn "Rejecting potentially malicious ZIP entry name: #{entry_name}"
return nil
end
# Reject absolute paths
if Pathname.new(sanitized).absolute?
Rails.logger.warn "Rejecting absolute path in ZIP entry: #{entry_name}"
return nil
@ -116,52 +108,188 @@ class Users::ImportData
sanitized
end
def load_json_data
json_path = @import_directory.join('data.json')
unless File.exist?(json_path)
raise StandardError, "Data file not found in archive: data.json"
end
JSON.parse(File.read(json_path))
rescue JSON::ParserError => e
raise StandardError, "Invalid JSON format in data file: #{e.message}"
end
def import_in_correct_order(data)
def process_archive_data
Rails.logger.info "Starting data import for user: #{user.email}"
if data['counts']
Rails.logger.info "Expected entity counts from export: #{data['counts']}"
json_path = @import_directory.join('data.json')
unless File.exist?(json_path)
raise StandardError, 'Data file not found in archive: data.json'
end
Rails.logger.debug "Available data keys: #{data.keys.inspect}"
initialize_stream_state
import_settings(data['settings']) if data['settings']
import_areas(data['areas']) if data['areas']
handler = ::JsonStreamHandler.new(self)
parser = Oj::Parser.new(:saj, handler: handler)
# Import places first to ensure they're available for visits
places_imported = import_places(data['places']) if data['places']
Rails.logger.info "Places import phase completed: #{places_imported} places imported"
import_imports(data['imports']) if data['imports']
import_exports(data['exports']) if data['exports']
import_trips(data['trips']) if data['trips']
import_stats(data['stats']) if data['stats']
import_notifications(data['notifications']) if data['notifications']
# Import visits after places to ensure proper place resolution
visits_imported = import_visits(data['visits']) if data['visits']
Rails.logger.info "Visits import phase completed: #{visits_imported} visits imported"
import_points(data['points']) if data['points']
# Final validation check
if data['counts']
validate_import_completeness(data['counts'])
File.open(json_path, 'rb') do |io|
parser.load(io)
end
finalize_stream_processing
rescue Oj::ParseError => e
raise StandardError, "Invalid JSON format in data file: #{e.message}"
rescue IOError => e
raise StandardError, "Failed to read JSON data: #{e.message}"
end
def initialize_stream_state
@expected_counts = nil
@places_batch = []
@stream_writers = {}
@stream_temp_paths = {}
end
def finalize_stream_processing
flush_places_batch
close_stream_writer(:visits)
close_stream_writer(:points)
process_visits_stream
process_points_stream
Rails.logger.info "Data import completed. Stats: #{@import_stats}"
validate_import_completeness(@expected_counts) if @expected_counts.present?
end
def handle_section(key, value)
case key
when 'counts'
@expected_counts = value if value.is_a?(Hash)
Rails.logger.info "Expected entity counts from export: #{@expected_counts}" if @expected_counts
when 'settings'
import_settings(value) if value.present?
when 'areas'
import_areas(value)
when 'imports'
import_imports(value)
when 'exports'
import_exports(value)
when 'trips'
import_trips(value)
when 'stats'
import_stats_section(value)
when 'notifications'
import_notifications(value)
else
Rails.logger.debug "Unhandled non-stream section #{key}" unless STREAMED_SECTIONS.include?(key)
end
end
def handle_stream_value(section, value)
case section
when 'places'
queue_place_for_import(value)
when 'visits'
append_to_stream(:visits, value)
when 'points'
append_to_stream(:points, value)
else
Rails.logger.debug "Received stream value for unknown section #{section}"
end
end
def finish_stream(section)
case section
when 'places'
flush_places_batch
when 'visits'
close_stream_writer(:visits)
when 'points'
close_stream_writer(:points)
end
end
def queue_place_for_import(place_data)
return unless place_data.is_a?(Hash)
@places_batch << place_data
if @places_batch.size >= STREAM_BATCH_SIZE
import_places_batch(@places_batch)
@places_batch.clear
end
end
def flush_places_batch
return if @places_batch.blank?
import_places_batch(@places_batch)
@places_batch.clear
end
def import_places_batch(batch)
Rails.logger.debug "Importing places batch of size #{batch.size}"
places_created = Users::ImportData::Places.new(user, batch.dup).call.to_i
@import_stats[:places_created] += places_created
end
def append_to_stream(section, value)
return unless value
writer = stream_writer(section)
writer.puts(Oj.dump(value, mode: :compat))
end
def stream_writer(section)
@stream_writers[section] ||= begin
path = stream_temp_path(section)
Rails.logger.debug "Creating stream buffer for #{section} at #{path}"
File.open(path, 'w')
end
end
def stream_temp_path(section)
@stream_temp_paths[section] ||= @import_directory.join("stream_#{section}.ndjson")
end
def close_stream_writer(section)
@stream_writers[section]&.close
ensure
@stream_writers.delete(section)
end
def process_visits_stream
path = @stream_temp_paths[:visits]
return unless path&.exist?
Rails.logger.info 'Importing visits from streamed buffer'
batch = []
File.foreach(path) do |line|
line = line.strip
next if line.blank?
batch << Oj.load(line)
if batch.size >= STREAM_BATCH_SIZE
import_visits_batch(batch)
batch = []
end
end
import_visits_batch(batch) if batch.any?
end
def import_visits_batch(batch)
visits_created = Users::ImportData::Visits.new(user, batch).call.to_i
@import_stats[:visits_created] += visits_created
end
def process_points_stream
path = @stream_temp_paths[:points]
return unless path&.exist?
Rails.logger.info 'Importing points from streamed buffer'
importer = Users::ImportData::Points.new(user, nil, batch_size: STREAM_BATCH_SIZE)
File.foreach(path) do |line|
line = line.strip
next if line.blank?
importer.add(Oj.load(line))
end
@import_stats[:points_created] = importer.finalize.to_i
end
def import_settings(settings_data)
@ -172,67 +300,40 @@ class Users::ImportData
def import_areas(areas_data)
Rails.logger.debug "Importing #{areas_data&.size || 0} areas"
areas_created = Users::ImportData::Areas.new(user, areas_data).call
@import_stats[:areas_created] = areas_created
end
def import_places(places_data)
Rails.logger.debug "Importing #{places_data&.size || 0} places"
places_created = Users::ImportData::Places.new(user, places_data).call
@import_stats[:places_created] = places_created
places_created
areas_created = Users::ImportData::Areas.new(user, areas_data).call.to_i
@import_stats[:areas_created] += areas_created
end
def import_imports(imports_data)
Rails.logger.debug "Importing #{imports_data&.size || 0} imports"
imports_created, files_restored = Users::ImportData::Imports.new(user, imports_data, @import_directory.join('files')).call
@import_stats[:imports_created] = imports_created
@import_stats[:files_restored] += files_restored
@import_stats[:imports_created] += imports_created.to_i
@import_stats[:files_restored] += files_restored.to_i
end
def import_exports(exports_data)
Rails.logger.debug "Importing #{exports_data&.size || 0} exports"
exports_created, files_restored = Users::ImportData::Exports.new(user, exports_data, @import_directory.join('files')).call
@import_stats[:exports_created] = exports_created
@import_stats[:files_restored] += files_restored
@import_stats[:exports_created] += exports_created.to_i
@import_stats[:files_restored] += files_restored.to_i
end
def import_trips(trips_data)
Rails.logger.debug "Importing #{trips_data&.size || 0} trips"
trips_created = Users::ImportData::Trips.new(user, trips_data).call
@import_stats[:trips_created] = trips_created
trips_created = Users::ImportData::Trips.new(user, trips_data).call.to_i
@import_stats[:trips_created] += trips_created
end
def import_stats(stats_data)
def import_stats_section(stats_data)
Rails.logger.debug "Importing #{stats_data&.size || 0} stats"
stats_created = Users::ImportData::Stats.new(user, stats_data).call
@import_stats[:stats_created] = stats_created
stats_created = Users::ImportData::Stats.new(user, stats_data).call.to_i
@import_stats[:stats_created] += stats_created
end
def import_notifications(notifications_data)
Rails.logger.debug "Importing #{notifications_data&.size || 0} notifications"
notifications_created = Users::ImportData::Notifications.new(user, notifications_data).call
@import_stats[:notifications_created] = notifications_created
end
def import_visits(visits_data)
Rails.logger.debug "Importing #{visits_data&.size || 0} visits"
visits_created = Users::ImportData::Visits.new(user, visits_data).call
@import_stats[:visits_created] = visits_created
visits_created
end
def import_points(points_data)
Rails.logger.info "About to import #{points_data&.size || 0} points"
begin
points_created = Users::ImportData::Points.new(user, points_data).call
@import_stats[:points_created] = points_created
rescue StandardError => e
ExceptionReporter.call(e, 'Points import failed')
@import_stats[:points_created] = 0
end
notifications_created = Users::ImportData::Notifications.new(user, notifications_data).call.to_i
@import_stats[:notifications_created] += notifications_created
end
def cleanup_temporary_files(import_directory)
@ -246,15 +347,15 @@ class Users::ImportData
def create_success_notification
summary = "#{@import_stats[:points_created]} points, " \
"#{@import_stats[:visits_created]} visits, " \
"#{@import_stats[:places_created]} places, " \
"#{@import_stats[:trips_created]} trips, " \
"#{@import_stats[:areas_created]} areas, " \
"#{@import_stats[:imports_created]} imports, " \
"#{@import_stats[:exports_created]} exports, " \
"#{@import_stats[:stats_created]} stats, " \
"#{@import_stats[:files_restored]} files restored, " \
"#{@import_stats[:notifications_created]} notifications"
"#{@import_stats[:visits_created]} visits, " \
"#{@import_stats[:places_created]} places, " \
"#{@import_stats[:trips_created]} trips, " \
"#{@import_stats[:areas_created]} areas, " \
"#{@import_stats[:imports_created]} imports, " \
"#{@import_stats[:exports_created]} exports, " \
"#{@import_stats[:stats_created]} stats, " \
"#{@import_stats[:files_restored]} files restored, " \
"#{@import_stats[:notifications_created]} notifications"
::Notifications::Create.new(
user: user,
@ -274,7 +375,7 @@ class Users::ImportData
end
def validate_import_completeness(expected_counts)
Rails.logger.info "Validating import completeness..."
Rails.logger.info 'Validating import completeness...'
discrepancies = []
@ -291,7 +392,7 @@ class Users::ImportData
if discrepancies.any?
Rails.logger.warn "Import completed with discrepancies: #{discrepancies.join(', ')}"
else
Rails.logger.info "Import validation successful - all entities imported correctly"
Rails.logger.info 'Import validation successful - all entities imported correctly'
end
end
end

View file

@ -1,32 +1,67 @@
# frozen_string_literal: true
class Users::ImportData::Places
def initialize(user, places_data)
BATCH_SIZE = 5000
def initialize(user, places_data = nil, batch_size: BATCH_SIZE, logger: Rails.logger)
@user = user
@places_data = places_data
@batch_size = batch_size
@logger = logger
@buffer = []
@created = 0
end
def call
return 0 unless places_data.is_a?(Array)
return 0 unless places_data.respond_to?(:each)
Rails.logger.info "Importing #{places_data.size} places for user: #{user.email}"
logger.info "Importing #{collection_description(places_data)} places for user: #{user.email}"
places_created = 0
places_data.each do |place_data|
next unless place_data.is_a?(Hash)
place = find_or_create_place_for_import(place_data)
places_created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record?
enumerate(places_data) do |place_data|
add(place_data)
end
Rails.logger.info "Places import completed. Created: #{places_created}"
places_created
finalize
end
def add(place_data)
return unless place_data.is_a?(Hash)
@buffer << place_data
flush_batch if @buffer.size >= batch_size
end
def finalize
flush_batch
logger.info "Places import completed. Created: #{@created}"
@created
end
private
attr_reader :user, :places_data
attr_reader :user, :places_data, :batch_size, :logger
def enumerate(collection, &block)
collection.each(&block)
end
def collection_description(collection)
return collection.size if collection.respond_to?(:size)
'streamed'
end
def flush_batch
return if @buffer.empty?
logger.debug "Processing places batch of #{@buffer.size}"
@buffer.each do |place_data|
place = find_or_create_place_for_import(place_data)
@created += 1 if place&.respond_to?(:previously_new_record?) && place.previously_new_record?
end
@buffer.clear
end
def find_or_create_place_for_import(place_data)
name = place_data['name']
@ -34,14 +69,12 @@ class Users::ImportData::Places
longitude = place_data['longitude']&.to_f
unless name.present? && latitude.present? && longitude.present?
Rails.logger.debug "Skipping place with missing required data: #{place_data.inspect}"
logger.debug "Skipping place with missing required data: #{place_data.inspect}"
return nil
end
Rails.logger.debug "Processing place for import: #{name} at (#{latitude}, #{longitude})"
logger.debug "Processing place for import: #{name} at (#{latitude}, #{longitude})"
# During import, we prioritize data integrity for the importing user
# First try exact match (name + coordinates)
existing_place = Place.where(
name: name,
latitude: latitude,
@ -49,31 +82,29 @@ class Users::ImportData::Places
).first
if existing_place
Rails.logger.debug "Found exact place match: #{name} at (#{latitude}, #{longitude}) -> existing place ID #{existing_place.id}"
logger.debug "Found exact place match: #{name} at (#{latitude}, #{longitude}) -> existing place ID #{existing_place.id}"
existing_place.define_singleton_method(:previously_new_record?) { false }
return existing_place
end
Rails.logger.debug "No exact match found for #{name} at (#{latitude}, #{longitude}). Creating new place."
logger.debug "No exact match found for #{name} at (#{latitude}, #{longitude}). Creating new place."
# If no exact match, create a new place to ensure data integrity
# This prevents data loss during import even if similar places exist
place_attributes = place_data.except('created_at', 'updated_at', 'latitude', 'longitude')
place_attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
place_attributes['latitude'] = latitude
place_attributes['longitude'] = longitude
place_attributes.delete('user')
Rails.logger.debug "Creating place with attributes: #{place_attributes.inspect}"
logger.debug "Creating place with attributes: #{place_attributes.inspect}"
begin
place = Place.create!(place_attributes)
place.define_singleton_method(:previously_new_record?) { true }
Rails.logger.debug "Created place during import: #{place.name} (ID: #{place.id})"
logger.debug "Created place during import: #{place.name} (ID: #{place.id})"
place
rescue ActiveRecord::RecordInvalid => e
Rails.logger.error "Failed to create place: #{place_data.inspect}, error: #{e.message}"
logger.error "Failed to create place: #{place_data.inspect}, error: #{e.message}"
nil
end
end

View file

@ -1,96 +1,163 @@
# frozen_string_literal: true
class Users::ImportData::Points
BATCH_SIZE = 1000
require 'time'
def initialize(user, points_data)
class Users::ImportData::Points
BATCH_SIZE = 5000
def initialize(user, points_data = nil, batch_size: BATCH_SIZE, logger: Rails.logger)
@user = user
@points_data = points_data
@batch_size = batch_size
@logger = logger
@buffer = []
@total_created = 0
@processed_count = 0
@skipped_count = 0
@preloaded = false
@imports_lookup = {}
@countries_lookup = {}
@visits_lookup = {}
end
def call
return 0 unless points_data.is_a?(Array)
return 0 unless points_data.respond_to?(:each)
Rails.logger.info "Importing #{points_data.size} points for user: #{user.email}"
Rails.logger.debug "First point sample: #{points_data.first.inspect}"
logger.info "Importing #{collection_description(points_data)} points for user: #{user.email}"
preload_reference_data
valid_points = filter_and_prepare_points
if valid_points.empty?
Rails.logger.warn "No valid points to import after filtering"
Rails.logger.debug "Original points_data size: #{points_data.size}"
return 0
enumerate(points_data) do |point_data|
add(point_data)
end
deduplicated_points = deduplicate_points(valid_points)
finalize
end
Rails.logger.info "Prepared #{deduplicated_points.size} unique valid points (#{points_data.size - deduplicated_points.size} duplicates/invalid skipped)"
# Allows streamed usage by pushing a single point at a time.
def add(point_data)
preload_reference_data unless @preloaded
total_created = bulk_import_points(deduplicated_points)
if valid_point_data?(point_data)
prepared_attributes = prepare_point_attributes(point_data)
Rails.logger.info "Points import completed. Created: #{total_created}"
total_created
if prepared_attributes
@buffer << prepared_attributes
@processed_count += 1
flush_batch if @buffer.size >= batch_size
else
@skipped_count += 1
end
else
@skipped_count += 1
logger.debug "Skipped point: invalid data - #{point_data.inspect}"
end
end
def finalize
preload_reference_data unless @preloaded
flush_batch
logger.info "Points import completed. Created: #{@total_created}. Processed #{@processed_count} valid points, skipped #{@skipped_count}."
@total_created
end
private
attr_reader :user, :points_data, :imports_lookup, :countries_lookup, :visits_lookup
attr_reader :user, :points_data, :batch_size, :logger, :imports_lookup, :countries_lookup, :visits_lookup
def enumerate(collection, &block)
collection.each(&block)
end
def collection_description(collection)
return collection.size if collection.respond_to?(:size)
'streamed'
end
def flush_batch
return if @buffer.empty?
logger.debug "Processing batch of #{@buffer.size} points"
logger.debug "First point in batch: #{@buffer.first.inspect}"
normalized_batch = normalize_point_keys(@buffer)
begin
result = Point.upsert_all(
normalized_batch,
unique_by: %i[lonlat timestamp user_id],
returning: %w[id],
on_duplicate: :skip
)
batch_created = result&.count.to_i
@total_created += batch_created
logger.debug "Processed batch of #{@buffer.size} points, created #{batch_created}, total created: #{@total_created}"
rescue StandardError => e
logger.error "Failed to process point batch: #{e.message}"
logger.error "Batch size: #{@buffer.size}"
logger.error "First point in failed batch: #{@buffer.first.inspect}"
logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
ensure
@buffer.clear
end
end
def preload_reference_data
return if @preloaded
logger.debug 'Preloading reference data for points import'
@imports_lookup = {}
user.imports.each do |import|
user.imports.reload.each do |import|
string_key = [import.name, import.source, import.created_at.utc.iso8601]
integer_key = [import.name, Import.sources[import.source], import.created_at.utc.iso8601]
@imports_lookup[string_key] = import
@imports_lookup[integer_key] = import
end
Rails.logger.debug "Loaded #{user.imports.size} imports with #{@imports_lookup.size} lookup keys"
logger.debug "Loaded #{user.imports.size} imports with #{@imports_lookup.size} lookup keys"
@countries_lookup = {}
Country.all.each do |country|
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
@countries_lookup[country.name] = country
end
Rails.logger.debug "Loaded #{Country.count} countries for lookup"
logger.debug "Loaded #{Country.count} countries for lookup"
@visits_lookup = user.visits.index_by { |visit|
@visits_lookup = user.visits.reload.index_by do |visit|
[visit.name, visit.started_at.utc.iso8601, visit.ended_at.utc.iso8601]
}
Rails.logger.debug "Loaded #{@visits_lookup.size} visits for lookup"
end
logger.debug "Loaded #{@visits_lookup.size} visits for lookup"
@preloaded = true
end
def filter_and_prepare_points
valid_points = []
skipped_count = 0
def normalize_point_keys(points)
all_keys = points.flat_map(&:keys).uniq
points_data.each_with_index do |point_data, index|
next unless point_data.is_a?(Hash)
unless valid_point_data?(point_data)
skipped_count += 1
Rails.logger.debug "Skipped point #{index}: invalid data - #{point_data.slice('timestamp', 'longitude', 'latitude', 'lonlat')}"
next
points.map do |point|
all_keys.each_with_object({}) do |key, normalized|
normalized[key] = point[key]
end
prepared_attributes = prepare_point_attributes(point_data)
unless prepared_attributes
skipped_count += 1
Rails.logger.debug "Skipped point #{index}: failed to prepare attributes"
next
end
valid_points << prepared_attributes
end
end
if skipped_count > 0
Rails.logger.warn "Skipped #{skipped_count} points with invalid or missing required data"
end
def valid_point_data?(point_data)
return false unless point_data.is_a?(Hash)
return false unless point_data['timestamp'].present?
Rails.logger.debug "Filtered #{valid_points.size} valid points from #{points_data.size} total"
valid_points
has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(')
has_coordinates = point_data['longitude'].present? && point_data['latitude'].present?
has_lonlat || has_coordinates
rescue StandardError => e
logger.debug "Point validation failed: #{e.message} for data: #{point_data.inspect}"
false
end
def prepare_point_attributes(point_data)
@ -118,15 +185,14 @@ class Users::ImportData::Points
result = attributes.symbolize_keys
Rails.logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}"
logger.debug "Prepared point attributes: #{result.slice(:lonlat, :timestamp, :import_id, :country_id, :visit_id)}"
result
rescue StandardError => e
ExceptionReporter.call(e, 'Failed to prepare point attributes')
nil
end
def resolve_import_reference(attributes, import_reference)
def resolve_import_reference(attributes, import_reference)
return unless import_reference.is_a?(Hash)
created_at = normalize_timestamp_for_lookup(import_reference['created_at'])
@ -140,10 +206,10 @@ class Users::ImportData::Points
import = imports_lookup[import_key]
if import
attributes['import_id'] = import.id
Rails.logger.debug "Resolved import reference: #{import_reference['name']} -> #{import.id}"
logger.debug "Resolved import reference: #{import_reference['name']} -> #{import.id}"
else
Rails.logger.debug "Import not found for reference: #{import_reference.inspect}"
Rails.logger.debug "Available imports: #{imports_lookup.keys.inspect}"
logger.debug "Import not found for reference: #{import_reference.inspect}"
logger.debug "Available imports: #{imports_lookup.keys.inspect}"
end
end
@ -159,14 +225,12 @@ class Users::ImportData::Points
if country
attributes['country_id'] = country.id
Rails.logger.debug "Resolved country reference: #{country_info['name']} -> #{country.id}"
logger.debug "Resolved country reference: #{country_info['name']} -> #{country.id}"
else
Rails.logger.debug "Country not found for: #{country_info.inspect}"
logger.debug "Country not found for: #{country_info.inspect}"
end
end
def resolve_visit_reference(attributes, visit_reference)
return unless visit_reference.is_a?(Hash)
@ -182,84 +246,19 @@ class Users::ImportData::Points
visit = visits_lookup[visit_key]
if visit
attributes['visit_id'] = visit.id
Rails.logger.debug "Resolved visit reference: #{visit_reference['name']} -> #{visit.id}"
logger.debug "Resolved visit reference: #{visit_reference['name']} -> #{visit.id}"
else
Rails.logger.debug "Visit not found for reference: #{visit_reference.inspect}"
Rails.logger.debug "Available visits: #{visits_lookup.keys.inspect}"
logger.debug "Visit not found for reference: #{visit_reference.inspect}"
logger.debug "Available visits: #{visits_lookup.keys.inspect}"
end
end
def deduplicate_points(points)
points.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] }
end
def normalize_point_keys(points)
all_keys = points.flat_map(&:keys).uniq
# Normalize each point to have all keys (with nil for missing ones)
points.map do |point|
normalized = {}
all_keys.each do |key|
normalized[key] = point[key]
end
normalized
end
end
def bulk_import_points(points)
total_created = 0
points.each_slice(BATCH_SIZE) do |batch|
begin
Rails.logger.debug "Processing batch of #{batch.size} points"
Rails.logger.debug "First point in batch: #{batch.first.inspect}"
normalized_batch = normalize_point_keys(batch)
result = Point.upsert_all(
normalized_batch,
unique_by: %i[lonlat timestamp user_id],
returning: %w[id],
on_duplicate: :skip
)
batch_created = result.count
total_created += batch_created
Rails.logger.debug "Processed batch of #{batch.size} points, created #{batch_created}, total created: #{total_created}"
rescue StandardError => e
Rails.logger.error "Failed to process point batch: #{e.message}"
Rails.logger.error "Batch size: #{batch.size}"
Rails.logger.error "First point in failed batch: #{batch.first.inspect}"
Rails.logger.error "Backtrace: #{e.backtrace.first(5).join('\n')}"
end
end
total_created
end
def valid_point_data?(point_data)
return false unless point_data.is_a?(Hash)
return false unless point_data['timestamp'].present?
has_lonlat = point_data['lonlat'].present? && point_data['lonlat'].is_a?(String) && point_data['lonlat'].start_with?('POINT(')
has_coordinates = point_data['longitude'].present? && point_data['latitude'].present?
return false unless has_lonlat || has_coordinates
true
rescue StandardError => e
Rails.logger.debug "Point validation failed: #{e.message} for data: #{point_data.inspect}"
false
end
def ensure_lonlat_field(attributes, point_data)
if attributes['lonlat'].blank? && point_data['longitude'].present? && point_data['latitude'].present?
longitude = point_data['longitude'].to_f
latitude = point_data['latitude'].to_f
attributes['lonlat'] = "POINT(#{longitude} #{latitude})"
Rails.logger.debug "Reconstructed lonlat: #{attributes['lonlat']}"
logger.debug "Reconstructed lonlat: #{attributes['lonlat']}"
end
end
@ -275,7 +274,7 @@ class Users::ImportData::Points
timestamp.to_s
end
rescue StandardError => e
Rails.logger.debug "Failed to normalize timestamp #{timestamp}: #{e.message}"
logger.debug "Failed to normalize timestamp #{timestamp}: #{e.message}"
timestamp.to_s
end
end

View file

@ -71,16 +71,16 @@ module Visits
end
def create_visit(place)
started_at = DateTime.parse(params[:started_at])
ended_at = DateTime.parse(params[:ended_at])
duration_minutes = (ended_at - started_at) * 24 * 60
started_at = Time.zone.parse(params[:started_at])
ended_at = Time.zone.parse(params[:ended_at])
duration_minutes = ((ended_at - started_at) / 60).to_i
@visit = user.visits.create!(
name: params[:name],
place: place,
started_at: started_at,
ended_at: ended_at,
duration: duration_minutes.to_i,
duration: duration_minutes,
status: :confirmed
)

View file

@ -11,12 +11,13 @@
<div class="stat-value text-lg"><%= trip_duration(trip) %></div>
</div>
</div>
<div class="card bg-base-200 shadow-lg">
<div class="card bg-base-200 shadow-lg cursor-pointer hover:bg-base-300 transition-colors"
onclick="countries_modal_<%= trip.id %>.showModal()">
<div class="card-body p-4">
<div class="stat-title text-xs">Countries</div>
<div class="stat-value text-lg">
<% if trip.visited_countries.any? %>
<%= trip.visited_countries.join(', ') %>
<%= trip.visited_countries.count %>
<% else %>
<span class="loading loading-dots loading-sm"></span>
<% end %>
@ -24,3 +25,27 @@
</div>
</div>
</div>
<!-- Countries Modal -->
<dialog id="countries_modal_<%= trip.id %>" class="modal">
<div class="modal-box">
<form method="dialog">
<button class="btn btn-sm btn-circle btn-ghost absolute right-2 top-2">✕</button>
</form>
<h3 class="font-bold text-lg mb-4">Visited Countries</h3>
<% if trip.visited_countries.any? %>
<div class="space-y-2">
<% trip.visited_countries.sort.each do |country| %>
<div class="p-3 bg-base-200 rounded-lg">
<%= country %>
</div>
<% end %>
</div>
<% else %>
<p class="text-base-content/70">No countries data available yet.</p>
<% end %>
</div>
<form method="dialog" class="modal-backdrop">
<button>close</button>
</form>
</dialog>

View file

@ -29,7 +29,7 @@ Rails.application.configure do
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
config.assets.compile = true
config.assets.content_type = {
geojson: 'application/geo+json'

View file

@ -29,7 +29,7 @@ Rails.application.configure do
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
config.assets.compile = true
config.assets.content_type = {
geojson: 'application/geo+json'

View file

@ -3,6 +3,7 @@
# Pin npm packages by running ./bin/importmap
pin_all_from 'app/javascript/channels', under: 'channels'
pin_all_from 'app/javascript/maps', under: 'maps'
pin 'application', preload: true
pin '@rails/actioncable', to: 'actioncable.esm.js'

View file

@ -6,6 +6,16 @@
# to asset_path in the _favicon.html.erb partial.
Rails.application.config.assets.configure do |env|
mime_type = 'application/manifest+json'
extensions = ['.webmanifest']
if Sprockets::VERSION.to_i >= 4
extensions << '.webmanifest.erb'
env.register_preprocessor(mime_type, Sprockets::ERBProcessor)
end
env.register_mime_type(mime_type, extensions: extensions)
# Register .webmanifest files with the correct MIME type
env.register_mime_type 'application/manifest+json', extensions: ['.webmanifest']
# env.register_mime_type 'application/manifest+json', extensions: ['.webmanifest']
end

View file

@ -12,7 +12,9 @@ ENV SIDEKIQ_PASSWORD=password
# Resolving sqlite3 error
ENV PGSSENCMODE=disable
RUN apt-get update -qq && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
RUN apt-get update -qq \
&& DEBIAN_FRONTEND=noninteractive apt-get upgrade -y -qq \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
curl \
wget \
build-essential \

View file

@ -7,7 +7,9 @@ ENV RAILS_LOG_TO_STDOUT=true
ENV RAILS_PORT=3000
ENV RAILS_ENV=production
RUN apt-get update -qq && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
RUN apt-get update -qq \
&& DEBIAN_FRONTEND=noninteractive apt-get upgrade -y -qq \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
curl \
wget \
build-essential \

View file

@ -0,0 +1,79 @@
# frozen_string_literal: true
# Streaming JSON handler relays sections and streamed values back to the importer instance.
class JsonStreamHandler < Oj::Saj
HashState = Struct.new(:hash, :root, :key)
ArrayState = Struct.new(:array, :key)
StreamState = Struct.new(:key)
def initialize(processor)
super()
@processor = processor
@stack = []
end
def hash_start(key = nil, *_)
state = HashState.new({}, @stack.empty?, normalize_key(key))
@stack << state
end
def hash_end(key = nil, *_)
state = @stack.pop
value = state.hash
parent = @stack.last
dispatch_to_parent(parent, value, normalize_key(key) || state.key)
end
def array_start(key = nil, *_)
normalized_key = normalize_key(key)
parent = @stack.last
if parent.is_a?(HashState) && parent.root && @stack.size == 1 && Users::ImportData::STREAMED_SECTIONS.include?(normalized_key)
@stack << StreamState.new(normalized_key)
else
@stack << ArrayState.new([], normalized_key)
end
end
def array_end(key = nil, *_)
state = @stack.pop
case state
when StreamState
@processor.send(:finish_stream, state.key)
when ArrayState
value = state.array
parent = @stack.last
dispatch_to_parent(parent, value, normalize_key(key) || state.key)
end
end
def add_value(value, key)
parent = @stack.last
dispatch_to_parent(parent, value, normalize_key(key))
end
private
def normalize_key(key)
key&.to_s
end
def dispatch_to_parent(parent, value, key)
return unless parent
case parent
when HashState
if parent.root && @stack.size == 1
@processor.send(:handle_section, key, value)
else
parent.hash[key] = value
end
when ArrayState
parent.array << value
when StreamState
@processor.send(:handle_stream_value, parent.key, value)
end
end
end

View file

@ -38,6 +38,6 @@ namespace :webmanifest do
end
# Hook to automatically generate webmanifest after assets:precompile
Rake::Task['assets:precompile'].enhance do
Rake::Task['webmanifest:generate'].invoke
end
# Rake::Task['assets:precompile'].enhance do
# Rake::Task['webmanifest:generate'].invoke
# end

View file

@ -0,0 +1,58 @@
# frozen_string_literal: true
require 'rails_helper'
require 'oj'
RSpec.describe JsonStreamHandler do
let(:processor) { double('StreamProcessor') }
let(:handler) { described_class.new(processor) }
let(:payload) do
{
'counts' => { 'places' => 2, 'visits' => 1, 'points' => 1 },
'settings' => { 'theme' => 'dark' },
'areas' => [{ 'name' => 'Home' }],
'places' => [
{ 'name' => 'Cafe', 'latitude' => 1.0, 'longitude' => 2.0 },
{ 'name' => 'Library', 'latitude' => 3.0, 'longitude' => 4.0 }
],
'visits' => [
{
'name' => 'Morning Coffee',
'started_at' => '2025-01-01T09:00:00Z',
'ended_at' => '2025-01-01T10:00:00Z'
}
],
'points' => [
{ 'timestamp' => 1, 'lonlat' => 'POINT(2 1)' }
]
}
end
before do
allow(processor).to receive(:handle_section)
allow(processor).to receive(:handle_stream_value)
allow(processor).to receive(:finish_stream)
end
it 'streams configured sections and delegates other values immediately' do
Oj.saj_parse(handler, Oj.dump(payload, mode: :compat))
expect(processor).to have_received(:handle_section).with('counts', hash_including('places' => 2))
expect(processor).to have_received(:handle_section).with('settings', hash_including('theme' => 'dark'))
expect(processor).to have_received(:handle_section).with('areas', [hash_including('name' => 'Home')])
expect(processor).to have_received(:handle_stream_value).with('places', hash_including('name' => 'Cafe'))
expect(processor).to have_received(:handle_stream_value).with('places', hash_including('name' => 'Library'))
expect(processor).to have_received(:handle_stream_value).with('visits', hash_including('name' => 'Morning Coffee'))
expect(processor).to have_received(:handle_stream_value).with('points', hash_including('timestamp' => 1))
expect(processor).to have_received(:finish_stream).with('places')
expect(processor).to have_received(:finish_stream).with('visits')
expect(processor).to have_received(:finish_stream).with('points')
expect(processor).not_to have_received(:handle_section).with('places', anything)
expect(processor).not_to have_received(:handle_section).with('visits', anything)
expect(processor).not_to have_received(:handle_section).with('points', anything)
end
end

View file

@ -262,5 +262,223 @@ RSpec.describe Stat, type: :model do
end
end
end
describe 'sharing settings' do
let(:user) { create(:user) }
let(:stat) { create(:stat, year: 2024, month: 6, user: user) }
describe '#sharing_enabled?' do
context 'when sharing_settings is nil' do
before { stat.update_column(:sharing_settings, nil) }
it 'returns false' do
expect(stat.sharing_enabled?).to be false
end
end
context 'when sharing_settings is empty hash' do
before { stat.update(sharing_settings: {}) }
it 'returns false' do
expect(stat.sharing_enabled?).to be false
end
end
context 'when enabled is false' do
before { stat.update(sharing_settings: { 'enabled' => false }) }
it 'returns false' do
expect(stat.sharing_enabled?).to be false
end
end
context 'when enabled is true' do
before { stat.update(sharing_settings: { 'enabled' => true }) }
it 'returns true' do
expect(stat.sharing_enabled?).to be true
end
end
context 'when enabled is a string "true"' do
before { stat.update(sharing_settings: { 'enabled' => 'true' }) }
it 'returns false (strict boolean check)' do
expect(stat.sharing_enabled?).to be false
end
end
end
describe '#sharing_expired?' do
context 'when sharing_settings is nil' do
before { stat.update_column(:sharing_settings, nil) }
it 'returns false' do
expect(stat.sharing_expired?).to be false
end
end
context 'when expiration is blank' do
before { stat.update(sharing_settings: { 'enabled' => true }) }
it 'returns false' do
expect(stat.sharing_expired?).to be false
end
end
context 'when expiration is present but expires_at is blank' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h'
})
end
it 'returns true' do
expect(stat.sharing_expired?).to be true
end
end
context 'when expires_at is in the future' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => 1.hour.from_now.iso8601
})
end
it 'returns false' do
expect(stat.sharing_expired?).to be false
end
end
context 'when expires_at is in the past' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => 1.hour.ago.iso8601
})
end
it 'returns true' do
expect(stat.sharing_expired?).to be true
end
end
context 'when expires_at is 1 second in the future' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => 1.second.from_now.iso8601
})
end
it 'returns false (not yet expired)' do
expect(stat.sharing_expired?).to be false
end
end
context 'when expires_at is invalid date string' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => 'invalid-date'
})
end
it 'returns true (treats as expired)' do
expect(stat.sharing_expired?).to be true
end
end
context 'when expires_at is nil' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => nil
})
end
it 'returns true' do
expect(stat.sharing_expired?).to be true
end
end
context 'when expires_at is empty string' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => ''
})
end
it 'returns true' do
expect(stat.sharing_expired?).to be true
end
end
end
describe '#public_accessible?' do
context 'when sharing_settings is nil' do
before { stat.update_column(:sharing_settings, nil) }
it 'returns false' do
expect(stat.public_accessible?).to be false
end
end
context 'when sharing is not enabled' do
before { stat.update(sharing_settings: { 'enabled' => false }) }
it 'returns false' do
expect(stat.public_accessible?).to be false
end
end
context 'when sharing is enabled but expired' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => 1.hour.ago.iso8601
})
end
it 'returns false' do
expect(stat.public_accessible?).to be false
end
end
context 'when sharing is enabled and not expired' do
before do
stat.update(sharing_settings: {
'enabled' => true,
'expiration' => '1h',
'expires_at' => 1.hour.from_now.iso8601
})
end
it 'returns true' do
expect(stat.public_accessible?).to be true
end
end
context 'when sharing is enabled with no expiration' do
before do
stat.update(sharing_settings: { 'enabled' => true })
end
it 'returns true' do
expect(stat.public_accessible?).to be true
end
end
end
end
end
end

View file

@ -0,0 +1,54 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ImportData::Places do
let(:user) { create(:user) }
let(:logger) { instance_double(Logger, info: nil, debug: nil, error: nil) }
let(:service) { described_class.new(user, nil, logger: logger) }
describe '#add / #finalize' do
it 'creates places in batches and tracks total created' do
2.times do |index|
service.add(
'name' => "Place #{index}",
'latitude' => 10.0 + index,
'longitude' => 20.0 + index
)
end
expect { service.finalize }.to change(Place, :count).by(2)
expect { expect(service.finalize).to eq(2) }.not_to change(Place, :count)
end
it 'flushes automatically when the buffer reaches the batch size' do
stub_const('Users::ImportData::Places::BATCH_SIZE', 2)
logger_double = instance_double(Logger)
allow(logger_double).to receive(:info)
allow(logger_double).to receive(:debug)
allow(logger_double).to receive(:error)
buffered_service = described_class.new(user, nil, batch_size: 2, logger: logger_double)
buffered_service.add('name' => 'First', 'latitude' => 1, 'longitude' => 2)
expect(Place.count).to eq(0)
buffered_service.add('name' => 'Second', 'latitude' => 3, 'longitude' => 4)
expect(Place.count).to eq(2)
expect(buffered_service.finalize).to eq(2)
expect { buffered_service.finalize }.not_to change(Place, :count)
end
it 'skips invalid records and logs debug messages' do
allow(logger).to receive(:debug)
service.add('name' => 'Valid', 'latitude' => 1, 'longitude' => 2)
service.add('name' => 'Missing coords')
expect(service.finalize).to eq(1)
expect(logger).to have_received(:debug).with(/Skipping place with missing required data/)
end
end
end

View file

@ -1,6 +1,8 @@
# frozen_string_literal: true
require 'rails_helper'
require 'tmpdir'
require 'oj'
RSpec.describe Users::ImportData, type: :service do
let(:user) { create(:user) }
@ -12,122 +14,59 @@ RSpec.describe Users::ImportData, type: :service do
allow(Time).to receive(:current).and_return(Time.zone.at(1234567890))
allow(FileUtils).to receive(:mkdir_p)
allow(FileUtils).to receive(:rm_rf)
allow(File).to receive(:directory?).and_return(true)
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
end
describe '#import' do
let(:sample_data) do
{
'counts' => {
'areas' => 2,
'places' => 3,
'imports' => 1,
'exports' => 1,
'trips' => 2,
'stats' => 1,
'notifications' => 2,
'visits' => 4,
'points' => 1000
},
'settings' => { 'theme' => 'dark' },
'areas' => [{ 'name' => 'Home', 'latitude' => '40.7128', 'longitude' => '-74.0060' }],
'places' => [{ 'name' => 'Office', 'latitude' => '40.7589', 'longitude' => '-73.9851' }],
'imports' => [{ 'name' => 'test.json', 'source' => 'owntracks' }],
'exports' => [{ 'name' => 'export.json', 'status' => 'completed' }],
'trips' => [{ 'name' => 'Trip to NYC', 'distance' => 100.5 }],
'stats' => [{ 'year' => 2024, 'month' => 1, 'distance' => 456.78 }],
'notifications' => [{ 'title' => 'Test', 'content' => 'Test notification' }],
'visits' => [{ 'name' => 'Work Visit', 'duration' => 3600 }],
'points' => [{ 'latitude' => 40.7128, 'longitude' => -74.0060, 'timestamp' => 1234567890 }]
}
end
let(:notification_double) { instance_double(::Notifications::Create, call: true) }
before do
# Mock ZIP file extraction
zipfile_mock = double('ZipFile')
allow(zipfile_mock).to receive(:each)
allow(Zip::File).to receive(:open).with(archive_path).and_yield(zipfile_mock)
# Mock JSON loading and File operations
allow(File).to receive(:exist?).and_return(false)
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(sample_data.to_json)
# Mock all import services
allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true))
allow(Users::ImportData::Areas).to receive(:new).and_return(double(call: 2))
allow(Users::ImportData::Places).to receive(:new).and_return(double(call: 3))
allow(Users::ImportData::Imports).to receive(:new).and_return(double(call: [1, 5]))
allow(Users::ImportData::Exports).to receive(:new).and_return(double(call: [1, 2]))
allow(Users::ImportData::Trips).to receive(:new).and_return(double(call: 2))
allow(Users::ImportData::Stats).to receive(:new).and_return(double(call: 1))
allow(Users::ImportData::Notifications).to receive(:new).and_return(double(call: 2))
allow(Users::ImportData::Visits).to receive(:new).and_return(double(call: 4))
allow(Users::ImportData::Points).to receive(:new).and_return(double(call: 1000))
# Mock notifications
allow(::Notifications::Create).to receive(:new).and_return(double(call: true))
# Mock cleanup
allow(::Notifications::Create).to receive(:new).and_return(notification_double)
allow(service).to receive(:cleanup_temporary_files)
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
end
context 'when import is successful' do
it 'creates import directory' do
context 'when import succeeds' do
before do
allow(service).to receive(:extract_archive)
allow(service).to receive(:process_archive_data) do
stats = service.instance_variable_get(:@import_stats)
stats[:settings_updated] = true
stats[:areas_created] = 2
stats[:places_created] = 3
stats[:imports_created] = 1
stats[:exports_created] = 1
stats[:trips_created] = 2
stats[:stats_created] = 1
stats[:notifications_created] = 2
stats[:visits_created] = 4
stats[:points_created] = 1000
stats[:files_restored] = 7
end
end
it 'creates the import directory' do
expect(FileUtils).to receive(:mkdir_p).with(import_directory)
service.import
end
it 'extracts the archive' do
expect(Zip::File).to receive(:open).with(archive_path)
it 'extracts the archive and processes data' do
expect(service).to receive(:extract_archive).ordered
expect(service).to receive(:process_archive_data).ordered
service.import
end
it 'loads JSON data from extracted files' do
expect(File).to receive(:exist?).with(import_directory.join('data.json'))
expect(File).to receive(:read).with(import_directory.join('data.json'))
service.import
end
it 'calls all import services in correct order' do
expect(Users::ImportData::Settings).to receive(:new).with(user, sample_data['settings']).ordered
expect(Users::ImportData::Areas).to receive(:new).with(user, sample_data['areas']).ordered
expect(Users::ImportData::Places).to receive(:new).with(user, sample_data['places']).ordered
expect(Users::ImportData::Imports).to receive(:new).with(user, sample_data['imports'], import_directory.join('files')).ordered
expect(Users::ImportData::Exports).to receive(:new).with(user, sample_data['exports'], import_directory.join('files')).ordered
expect(Users::ImportData::Trips).to receive(:new).with(user, sample_data['trips']).ordered
expect(Users::ImportData::Stats).to receive(:new).with(user, sample_data['stats']).ordered
expect(Users::ImportData::Notifications).to receive(:new).with(user, sample_data['notifications']).ordered
expect(Users::ImportData::Visits).to receive(:new).with(user, sample_data['visits']).ordered
expect(Users::ImportData::Points).to receive(:new).with(user, sample_data['points']).ordered
service.import
end
it 'creates success notification with import stats' do
it 'creates a success notification with summary' do
expect(::Notifications::Create).to receive(:new).with(
user: user,
title: 'Data import completed',
content: match(/1000 points.*4 visits.*3 places.*2 trips/),
content: include('1000 points, 4 visits, 3 places, 2 trips'),
kind: :info
)
service.import
end
it 'cleans up temporary files' do
expect(service).to receive(:cleanup_temporary_files).with(import_directory)
service.import
end
it 'returns import statistics' do
result = service.import
expect(result).to include(
settings_updated: true,
areas_created: 2,
@ -142,53 +81,18 @@ RSpec.describe Users::ImportData, type: :service do
files_restored: 7
)
end
it 'logs expected counts if available' do
allow(Rails.logger).to receive(:info) # Allow other log messages
expect(Rails.logger).to receive(:info).with(/Expected entity counts from export:/)
service.import
end
end
context 'when JSON file is missing' do
context 'when an error happens during processing' do
let(:error_message) { 'boom' }
before do
allow(File).to receive(:exist?).and_return(false)
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(false)
allow(service).to receive(:extract_archive)
allow(service).to receive(:process_archive_data).and_raise(StandardError, error_message)
allow(ExceptionReporter).to receive(:call)
end
it 'raises an error' do
expect { service.import }.to raise_error(StandardError, 'Data file not found in archive: data.json')
end
end
context 'when JSON is invalid' do
before do
allow(File).to receive(:exist?).and_return(false)
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return('invalid json')
allow(ExceptionReporter).to receive(:call)
end
it 'raises a JSON parse error' do
expect { service.import }.to raise_error(StandardError, /Invalid JSON format in data file/)
end
end
context 'when an error occurs during import' do
let(:error_message) { 'Something went wrong' }
before do
allow(File).to receive(:exist?).and_return(false)
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(sample_data.to_json)
allow(Users::ImportData::Settings).to receive(:new).and_raise(StandardError, error_message)
allow(ExceptionReporter).to receive(:call)
allow(::Notifications::Create).to receive(:new).and_return(double(call: true))
end
it 'creates failure notification' do
it 'creates a failure notification and re-raises the error' do
expect(::Notifications::Create).to receive(:new).with(
user: user,
title: 'Data import failed',
@ -198,100 +102,99 @@ RSpec.describe Users::ImportData, type: :service do
expect { service.import }.to raise_error(StandardError, error_message)
end
it 'reports error via ExceptionReporter' do
expect(ExceptionReporter).to receive(:call).with(
an_instance_of(StandardError),
'Data import failed'
)
expect { service.import }.to raise_error(StandardError, error_message)
end
it 'still cleans up temporary files' do
expect(service).to receive(:cleanup_temporary_files)
expect { service.import }.to raise_error(StandardError, error_message)
end
it 're-raises the error' do
expect { service.import }.to raise_error(StandardError, error_message)
end
end
context 'when data sections are missing' do
let(:minimal_data) { { 'settings' => { 'theme' => 'dark' } } }
before do
# Reset JSON file mocking
allow(File).to receive(:exist?).and_return(false)
allow(File).to receive(:exist?).with(import_directory.join('data.json')).and_return(true)
allow(File).to receive(:read).with(import_directory.join('data.json')).and_return(minimal_data.to_json)
# Only expect Settings to be called
allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true))
allow(::Notifications::Create).to receive(:new).and_return(double(call: true))
end
it 'only imports available sections' do
expect(Users::ImportData::Settings).to receive(:new).with(user, minimal_data['settings'])
expect(Users::ImportData::Areas).not_to receive(:new)
expect(Users::ImportData::Places).not_to receive(:new)
service.import
end
end
end
describe 'private methods' do
describe '#cleanup_temporary_files' do
context 'when directory exists' do
before do
allow(File).to receive(:directory?).and_return(true)
allow(Rails.logger).to receive(:info)
end
describe '#process_archive_data' do
let(:tmp_dir) { Pathname.new(Dir.mktmpdir) }
let(:json_path) { tmp_dir.join('data.json') }
let(:places_calls) { [] }
let(:visits_batches) { [] }
let(:points_ingested) { [] }
let(:points_importer) do
instance_double(Users::ImportData::Points, add: nil, finalize: 2)
end
it 'removes the directory' do
expect(FileUtils).to receive(:rm_rf).with(import_directory)
before do
payload = {
'counts' => { 'places' => 2, 'visits' => 2, 'points' => 2 },
'settings' => { 'theme' => 'dark' },
'areas' => [],
'imports' => [],
'exports' => [],
'trips' => [],
'stats' => [],
'notifications' => [],
'places' => [
{ 'name' => 'Cafe', 'latitude' => 1.0, 'longitude' => 2.0 },
{ 'name' => 'Library', 'latitude' => 3.0, 'longitude' => 4.0 }
],
'visits' => [
{
'name' => 'Morning Coffee',
'started_at' => '2025-01-01T09:00:00Z',
'ended_at' => '2025-01-01T10:00:00Z'
},
{
'name' => 'Study Time',
'started_at' => '2025-01-01T12:00:00Z',
'ended_at' => '2025-01-01T14:00:00Z'
}
],
'points' => [
{ 'timestamp' => 1, 'lonlat' => 'POINT(2 1)' },
{ 'timestamp' => 2, 'lonlat' => 'POINT(4 3)' }
]
}
service.send(:cleanup_temporary_files, import_directory)
end
File.write(json_path, Oj.dump(payload, mode: :compat))
it 'logs the cleanup' do
expect(Rails.logger).to receive(:info).with("Cleaning up temporary import directory: #{import_directory}")
service.instance_variable_set(:@import_directory, tmp_dir)
service.send(:cleanup_temporary_files, import_directory)
end
allow(Users::ImportData::Settings).to receive(:new).and_return(double(call: true))
allow(Users::ImportData::Areas).to receive(:new).and_return(double(call: 0))
allow(Users::ImportData::Imports).to receive(:new).and_return(double(call: [0, 0]))
allow(Users::ImportData::Exports).to receive(:new).and_return(double(call: [0, 0]))
allow(Users::ImportData::Trips).to receive(:new).and_return(double(call: 0))
allow(Users::ImportData::Stats).to receive(:new).and_return(double(call: 0))
allow(Users::ImportData::Notifications).to receive(:new).and_return(double(call: 0))
allow(Users::ImportData::Places).to receive(:new) do |_, batch|
places_calls << batch
double(call: batch.size)
end
context 'when cleanup fails' do
before do
allow(File).to receive(:directory?).and_return(true)
allow(FileUtils).to receive(:rm_rf).and_raise(StandardError, 'Permission denied')
allow(ExceptionReporter).to receive(:call)
end
it 'reports error via ExceptionReporter but does not re-raise' do
expect(ExceptionReporter).to receive(:call).with(
an_instance_of(StandardError),
'Failed to cleanup temporary files'
)
expect { service.send(:cleanup_temporary_files, import_directory) }.not_to raise_error
end
allow(Users::ImportData::Visits).to receive(:new) do |_, batch|
visits_batches << batch
double(call: batch.size)
end
context 'when directory does not exist' do
before do
allow(File).to receive(:directory?).and_return(false)
end
it 'does not attempt cleanup' do
expect(FileUtils).not_to receive(:rm_rf)
service.send(:cleanup_temporary_files, import_directory)
end
allow(points_importer).to receive(:add) do |point|
points_ingested << point
end
allow(Users::ImportData::Points).to receive(:new) do |_, points_data, batch_size:|
expect(points_data).to be_nil
expect(batch_size).to eq(described_class::STREAM_BATCH_SIZE)
points_importer
end
end
after do
FileUtils.remove_entry(tmp_dir)
end
it 'streams sections and updates import stats' do
service.send(:process_archive_data)
expect(places_calls.flatten.map { |place| place['name'] }).to contain_exactly('Cafe', 'Library')
expect(visits_batches.flatten.map { |visit| visit['name'] }).to contain_exactly('Morning Coffee', 'Study Time')
expect(points_ingested.map { |point| point['timestamp'] }).to eq([1, 2])
stats = service.instance_variable_get(:@import_stats)
expect(stats[:places_created]).to eq(2)
expect(stats[:visits_created]).to eq(2)
expect(stats[:points_created]).to eq(2)
end
end
end

View file

@ -166,6 +166,31 @@ RSpec.describe Visits::Create do
expect(service.visit.duration).to eq(36 * 60) # 36 hours in minutes
end
end
context 'when datetime-local input is provided without timezone' do
let(:params) do
valid_params.merge(
started_at: '2023-12-01T19:54',
ended_at: '2023-12-01T20:54'
)
end
subject(:service) { described_class.new(user, params) }
it 'parses the datetime in the application timezone' do
service.call
visit = service.visit
expect(visit.started_at.hour).to eq(19)
expect(visit.started_at.min).to eq(54)
expect(visit.ended_at.hour).to eq(20)
expect(visit.ended_at.min).to eq(54)
end
it 'calculates correct duration' do
service.call
expect(service.visit.duration).to eq(60) # 1 hour in minutes
end
end
end
end
end