This commit is contained in:
Evgenii Burmakin 2025-12-27 12:33:57 +00:00 committed by GitHub
commit 8413e1109d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 913 additions and 96 deletions

View file

@ -1 +1 @@
0.36.4
0.36.5

View file

@ -4,7 +4,23 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
# [0.36.4] - Unreleased
# [0.36.5] - Unreleased
## Changed
- Deleting an import will now be processed in the background to prevent request timeouts for large imports.
## Fixed
- Deleting an import will no longer result in negative points count for the user.
- Updating stats. #2022
- Validate trip start date to be earlier than end date. #2057
- Fog of war radius slider in map v2 settings is now being respected correctly. #2041
- Applying changes in map v2 settings now works correctly. #2041
- Invalidate stats cache on recalculation and other operations that change stats data.
# [0.36.4] - 2025-12-26
## Fixed
@ -14,6 +30,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- Disable Family::Invitations::CleanupJob no invitations are in the database. #2043
- User can now enable family layer in Maps v2 and center on family members by clicking their emails. #2036
# [0.36.3] - 2025-12-14
## Added

View file

@ -108,12 +108,12 @@ GEM
aws-eventstream (~> 1, >= 1.0.2)
base64 (0.3.0)
bcrypt (3.1.20)
benchmark (0.4.1)
benchmark (0.5.0)
bigdecimal (3.3.1)
bindata (2.5.1)
bootsnap (1.18.6)
msgpack (~> 1.2)
brakeman (7.1.0)
brakeman (7.1.1)
racc
builder (3.3.0)
bundler-audit (0.9.2)
@ -133,8 +133,8 @@ GEM
chunky_png (1.4.0)
coderay (1.1.3)
concurrent-ruby (1.3.5)
connection_pool (2.5.4)
crack (1.0.0)
connection_pool (2.5.5)
crack (1.0.1)
bigdecimal
rexml
crass (1.0.6)
@ -166,7 +166,7 @@ GEM
drb (2.2.3)
email_validator (2.2.4)
activemodel
erb (5.1.3)
erb (6.0.0)
erubi (1.13.1)
et-orbi (1.4.0)
tzinfo
@ -208,7 +208,7 @@ GEM
ffi (~> 1.9)
rgeo-geojson (~> 2.1)
zeitwerk (~> 2.5)
hashdiff (1.1.2)
hashdiff (1.2.1)
hashie (5.0.0)
httparty (0.23.1)
csv
@ -221,7 +221,7 @@ GEM
activesupport (>= 6.0.0)
railties (>= 6.0.0)
io-console (0.8.1)
irb (1.15.2)
irb (1.15.3)
pp (>= 0.6.0)
rdoc (>= 4.0.0)
reline (>= 0.4.2)
@ -272,7 +272,7 @@ GEM
method_source (1.1.0)
mini_mime (1.1.5)
mini_portile2 (2.8.9)
minitest (5.26.0)
minitest (5.26.2)
msgpack (1.7.3)
multi_json (1.15.0)
multi_xml (0.7.1)
@ -379,14 +379,14 @@ GEM
psych (5.2.6)
date
stringio
public_suffix (6.0.1)
public_suffix (6.0.2)
puma (7.1.0)
nio4r (~> 2.0)
pundit (2.5.2)
activesupport (>= 3.0.0)
raabro (1.4.0)
racc (1.8.1)
rack (3.2.3)
rack (3.2.4)
rack-oauth2 (2.3.0)
activesupport
attr_required
@ -440,16 +440,16 @@ GEM
zeitwerk (~> 2.6)
rainbow (3.1.1)
rake (13.3.1)
rdoc (6.15.0)
rdoc (6.16.1)
erb
psych (>= 4.0.0)
tsort
redis (5.4.0)
redis (5.4.1)
redis-client (>= 0.22.0)
redis-client (0.24.0)
redis-client (0.26.1)
connection_pool
regexp_parser (2.11.3)
reline (0.6.2)
reline (0.6.3)
io-console (~> 0.5)
request_store (1.7.0)
rack (>= 1.4)
@ -525,10 +525,10 @@ GEM
rexml (~> 3.2, >= 3.2.5)
rubyzip (>= 1.2.2, < 4.0)
websocket (~> 1.0)
sentry-rails (6.0.0)
sentry-rails (6.1.1)
railties (>= 5.2.0)
sentry-ruby (~> 6.0.0)
sentry-ruby (6.0.0)
sentry-ruby (~> 6.1.1)
sentry-ruby (6.1.1)
bigdecimal
concurrent-ruby (~> 1.0, >= 1.0.2)
shoulda-matchers (6.5.0)
@ -565,7 +565,7 @@ GEM
stackprof (0.2.27)
stimulus-rails (1.3.4)
railties (>= 6.0.0)
stringio (3.1.7)
stringio (3.1.8)
strong_migrations (2.5.1)
activerecord (>= 7.1)
super_diff (0.17.0)
@ -589,7 +589,7 @@ GEM
thor (1.4.0)
timeout (0.4.4)
tsort (0.2.0)
turbo-rails (2.0.17)
turbo-rails (2.0.20)
actionpack (>= 7.1.0)
railties (>= 7.1.0)
tzinfo (2.0.6)
@ -598,7 +598,7 @@ GEM
unicode-display_width (3.2.0)
unicode-emoji (~> 4.1)
unicode-emoji (4.1.0)
uri (1.0.4)
uri (1.1.1)
useragent (0.16.11)
validate_url (1.0.15)
activemodel (>= 3.0.0)
@ -610,7 +610,7 @@ GEM
activesupport
faraday (~> 2.0)
faraday-follow_redirects
webmock (3.25.1)
webmock (3.26.1)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff (>= 0.4.0, < 2.0.0)

View file

@ -13,6 +13,7 @@ class Api::V1::PointsController < ApiController
points = current_api_user
.points
.without_raw_data
.where(timestamp: start_at..end_at)
# Filter by geographic bounds if provided

View file

@ -78,9 +78,13 @@ class ImportsController < ApplicationController
end
def destroy
Imports::Destroy.new(current_user, @import).call
@import.deleting!
Imports::DestroyJob.perform_later(@import.id)
redirect_to imports_url, notice: 'Import was successfully destroyed.', status: :see_other
respond_to do |format|
format.html { redirect_to imports_url, notice: 'Import is being deleted.', status: :see_other }
format.turbo_stream
end
end
private

View file

@ -11,9 +11,57 @@ export default class extends BaseController {
connect() {
console.log("Datetime controller connected")
this.debounceTimer = null;
// Add validation listeners
if (this.hasStartedAtTarget && this.hasEndedAtTarget) {
// Validate on change to set validation state
this.startedAtTarget.addEventListener('change', () => this.validateDates())
this.endedAtTarget.addEventListener('change', () => this.validateDates())
// Validate on blur to set validation state
this.startedAtTarget.addEventListener('blur', () => this.validateDates())
this.endedAtTarget.addEventListener('blur', () => this.validateDates())
// Add form submit validation
const form = this.element.closest('form')
if (form) {
form.addEventListener('submit', (e) => {
if (!this.validateDates()) {
e.preventDefault()
this.endedAtTarget.reportValidity()
}
})
}
}
}
async updateCoordinates(event) {
validateDates(showPopup = false) {
const startDate = new Date(this.startedAtTarget.value)
const endDate = new Date(this.endedAtTarget.value)
// Clear any existing custom validity
this.startedAtTarget.setCustomValidity('')
this.endedAtTarget.setCustomValidity('')
// Check if both dates are valid
if (isNaN(startDate.getTime()) || isNaN(endDate.getTime())) {
return true
}
// Validate that start date is before end date
if (startDate >= endDate) {
const errorMessage = 'Start date must be earlier than end date'
this.endedAtTarget.setCustomValidity(errorMessage)
if (showPopup) {
this.endedAtTarget.reportValidity()
}
return false
}
return true
}
async updateCoordinates() {
// Clear any existing timeout
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
@ -25,6 +73,11 @@ export default class extends BaseController {
const endedAt = this.endedAtTarget.value
const apiKey = this.apiKeyTarget.value
// Validate dates before making API call (don't show popup, already shown on change)
if (!this.validateDates(false)) {
return
}
if (startedAt && endedAt) {
try {
const params = new URLSearchParams({

View file

@ -26,16 +26,23 @@ export default class extends BaseController {
received: (data) => {
const row = this.element.querySelector(`tr[data-import-id="${data.import.id}"]`);
if (row) {
const pointsCell = row.querySelector('[data-points-count]');
if (pointsCell) {
pointsCell.textContent = new Intl.NumberFormat().format(data.import.points_count);
}
if (!row) return;
const statusCell = row.querySelector('[data-status-display]');
if (statusCell && data.import.status) {
statusCell.textContent = data.import.status;
}
// Handle deletion complete - remove the row
if (data.action === 'delete') {
row.remove();
return;
}
// Handle status and points updates
const pointsCell = row.querySelector('[data-points-count]');
if (pointsCell && data.import.points_count !== undefined) {
pointsCell.textContent = new Intl.NumberFormat().format(data.import.points_count);
}
const statusCell = row.querySelector('[data-status-display]');
if (statusCell && data.import.status) {
statusCell.textContent = data.import.status;
}
}
}

View file

@ -270,7 +270,7 @@ export class LayerManager {
// Always create fog layer for backward compatibility
if (!this.layers.fogLayer) {
this.layers.fogLayer = new FogLayer(this.map, {
clearRadius: 1000,
clearRadius: this.settings.fogOfWarRadius || 1000,
visible: this.settings.fogEnabled || false
})
this.layers.fogLayer.add(pointsGeoJSON)

View file

@ -59,7 +59,8 @@ export class SettingsController {
Object.entries(toggleMap).forEach(([targetName, settingKey]) => {
const target = `${targetName}Target`
if (controller[target]) {
const hasTarget = `has${targetName.charAt(0).toUpperCase()}${targetName.slice(1)}Target`
if (controller[hasTarget]) {
controller[target].checked = this.settings[settingKey]
}
})
@ -75,7 +76,7 @@ export class SettingsController {
}
// Show/hide family members list based on initial toggle state
if (controller.hasFamilyToggleTarget && controller.hasFamilyMembersListTarget) {
if (controller.hasFamilyToggleTarget && controller.hasFamilyMembersListTarget && controller.familyToggleTarget) {
controller.familyMembersListTarget.style.display = controller.familyToggleTarget.checked ? 'block' : 'none'
}
@ -244,8 +245,8 @@ export class SettingsController {
if (settings.fogOfWarRadius) {
fogLayer.clearRadius = settings.fogOfWarRadius
}
// Redraw fog layer
if (fogLayer.visible) {
// Redraw fog layer if it has data and is visible
if (fogLayer.visible && fogLayer.data) {
await fogLayer.update(fogLayer.data)
}
}

View file

@ -12,9 +12,11 @@ export class FogLayer {
this.ctx = null
this.clearRadius = options.clearRadius || 1000 // meters
this.points = []
this.data = null // Store original data for updates
}
add(data) {
this.data = data // Store for later updates
this.points = data.features || []
this.createCanvas()
if (this.visible) {
@ -24,6 +26,7 @@ export class FogLayer {
}
update(data) {
this.data = data // Store for later updates
this.points = data.features || []
this.render()
}
@ -78,6 +81,7 @@ export class FogLayer {
// Clear circles around visited points
this.ctx.globalCompositeOperation = 'destination-out'
this.ctx.fillStyle = 'rgba(0, 0, 0, 1)' // Fully opaque to completely clear fog
this.points.forEach(feature => {
const coords = feature.geometry.coordinates

View file

@ -3,14 +3,10 @@ import { BaseLayer } from './base_layer'
/**
* Heatmap layer showing point density
* Uses MapLibre's native heatmap for performance
* Fixed radius: 20 pixels
*/
export class HeatmapLayer extends BaseLayer {
constructor(map, options = {}) {
super(map, { id: 'heatmap', ...options })
this.radius = 20 // Fixed radius
this.weight = options.weight || 1
this.intensity = 1 // Fixed intensity
this.opacity = options.opacity || 0.6
}
@ -31,53 +27,52 @@ export class HeatmapLayer extends BaseLayer {
type: 'heatmap',
source: this.sourceId,
paint: {
// Increase weight as diameter increases
'heatmap-weight': [
'interpolate',
['linear'],
['get', 'weight'],
0, 0,
6, 1
],
// Fixed weight
'heatmap-weight': 1,
// Increase intensity as zoom increases
// low intensity to view major clusters
'heatmap-intensity': [
'interpolate',
['linear'],
['zoom'],
0, this.intensity,
9, this.intensity * 3
0, 0.01,
10, 0.1,
15, 0.3
],
// Color ramp from blue to red
// Color ramp
'heatmap-color': [
'interpolate',
['linear'],
['heatmap-density'],
0, 'rgba(33,102,172,0)',
0.2, 'rgb(103,169,207)',
0.4, 'rgb(209,229,240)',
0.6, 'rgb(253,219,199)',
0.8, 'rgb(239,138,98)',
0, 'rgba(0,0,0,0)',
0.4, 'rgba(0,0,0,0)',
0.65, 'rgba(33,102,172,0.4)',
0.7, 'rgb(103,169,207)',
0.8, 'rgb(209,229,240)',
0.9, 'rgb(253,219,199)',
0.95, 'rgb(239,138,98)',
1, 'rgb(178,24,43)'
],
// Fixed radius adjusted by zoom level
// Radius in pixels, exponential growth
'heatmap-radius': [
'interpolate',
['linear'],
['exponential', 2],
['zoom'],
0, this.radius,
9, this.radius * 3
10, 5,
15, 10,
20, 160
],
// Transition from heatmap to circle layer by zoom level
// Visible when zoomed in, fades when zoomed out
'heatmap-opacity': [
'interpolate',
['linear'],
['zoom'],
7, this.opacity,
9, 0
0, 0.3,
10, this.opacity,
15, this.opacity
]
}
}

View file

@ -0,0 +1,46 @@
# frozen_string_literal: true
class Imports::DestroyJob < ApplicationJob
queue_as :default
def perform(import_id)
import = Import.find_by(id: import_id)
return unless import
import.deleting!
broadcast_status_update(import)
Imports::Destroy.new(import.user, import).call
broadcast_deletion_complete(import)
rescue ActiveRecord::RecordNotFound
Rails.logger.warn "Import #{import_id} not found, may have already been deleted"
end
private
def broadcast_status_update(import)
ImportsChannel.broadcast_to(
import.user,
{
action: 'status_update',
import: {
id: import.id,
status: import.status
}
}
)
end
def broadcast_deletion_complete(import)
ImportsChannel.broadcast_to(
import.user,
{
action: 'delete',
import: {
id: import.id
}
}
)
end
end

View file

@ -6,8 +6,15 @@ class Points::NightlyReverseGeocodingJob < ApplicationJob
def perform
return unless DawarichSettings.reverse_geocoding_enabled?
processed_user_ids = Set.new
Point.not_reverse_geocoded.find_each(batch_size: 1000) do |point|
point.async_reverse_geocode
processed_user_ids.add(point.user_id)
end
processed_user_ids.each do |user_id|
Cache::InvalidateUserCaches.new(user_id).call
end
end
end

View file

@ -17,7 +17,7 @@ class Import < ApplicationRecord
validate :file_size_within_limit, if: -> { user.trial? }
validate :import_count_within_limit, if: -> { user.trial? }
enum :status, { created: 0, processing: 1, completed: 2, failed: 3 }
enum :status, { created: 0, processing: 1, completed: 2, failed: 3, deleting: 4 }
enum :source, {
google_semantic_history: 0, owntracks: 1, google_records: 2,

View file

@ -9,6 +9,7 @@ class Trip < ApplicationRecord
belongs_to :user
validates :name, :started_at, :ended_at, presence: true
validate :started_at_before_ended_at
after_create :enqueue_calculation_jobs
after_update :enqueue_calculation_jobs, if: -> { saved_change_to_started_at? || saved_change_to_ended_at? }
@ -47,4 +48,11 @@ class Trip < ApplicationRecord
# to show all photos in the same height
vertical_photos.count > horizontal_photos.count ? vertical_photos : horizontal_photos
end
def started_at_before_ended_at
return if started_at.blank? || ended_at.blank?
return unless started_at >= ended_at
errors.add(:ended_at, 'must be after start date')
end
end

View file

@ -36,8 +36,8 @@ class Cache::Clean
def delete_countries_cities_cache
User.find_each do |user|
Rails.cache.delete("dawarich/user_#{user.id}_countries")
Rails.cache.delete("dawarich/user_#{user.id}_cities")
Rails.cache.delete("dawarich/user_#{user.id}_countries_visited")
Rails.cache.delete("dawarich/user_#{user.id}_cities_visited")
end
end
end

View file

@ -0,0 +1,34 @@
# frozen_string_literal: true
class Cache::InvalidateUserCaches
# Invalidates user-specific caches that depend on point data.
# This should be called after:
# - Reverse geocoding operations (updates country/city data)
# - Stats calculations (updates geocoding stats)
# - Bulk point imports/updates
def initialize(user_id)
@user_id = user_id
end
def call
invalidate_countries_visited
invalidate_cities_visited
invalidate_points_geocoded_stats
end
def invalidate_countries_visited
Rails.cache.delete("dawarich/user_#{user_id}_countries_visited")
end
def invalidate_cities_visited
Rails.cache.delete("dawarich/user_#{user_id}_cities_visited")
end
def invalidate_points_geocoded_stats
Rails.cache.delete("dawarich/user_#{user_id}_points_geocoded_stats")
end
private
attr_reader :user_id
end

View file

@ -9,11 +9,15 @@ class Imports::Destroy
end
def call
points_count = @import.points_count
ActiveRecord::Base.transaction do
@import.points.delete_all
@import.points.destroy_all
@import.destroy!
end
Rails.logger.info "Import #{@import.id} deleted with #{points_count} points"
Stats::BulkCalculator.new(@user.id).call
end
end

View file

@ -11,8 +11,7 @@ class Points::Create
def call
data = Points::Params.new(params, user.id).call
# Deduplicate points based on unique constraint
deduplicated_data = data.uniq { |point| [point[:lonlat], point[:timestamp], point[:user_id]] }
deduplicated_data = data.uniq { |point| [point[:lonlat], point[:timestamp].to_i, point[:user_id]] }
created_points = []

View file

@ -26,7 +26,7 @@ class Stats::CalculateMonth
def start_timestamp = DateTime.new(year, month, 1).to_i
def end_timestamp
DateTime.new(year, month, -1).to_i # -1 returns last day of month
DateTime.new(year, month, -1).to_i
end
def update_month_stats(year, month)
@ -42,6 +42,8 @@ class Stats::CalculateMonth
)
stat.save!
Cache::InvalidateUserCaches.new(user.id).call
end
end

View file

@ -53,8 +53,8 @@ class Stats::HexagonCalculator
# Try with lower resolution (larger hexagons)
lower_resolution = [h3_resolution - 2, 0].max
Rails.logger.info "Recalculating with lower H3 resolution: #{lower_resolution}"
# Create a new instance with lower resolution for recursion
return self.class.new(user.id, year, month).calculate_hexagons(lower_resolution)
# Recursively call with lower resolution
return calculate_hexagons(lower_resolution)
end
Rails.logger.info "Generated #{h3_hash.size} H3 hexagons at resolution #{h3_resolution} for user #{user.id}"

View file

@ -0,0 +1,24 @@
<%= turbo_stream.replace "import-#{@import.id}" do %>
<tr data-import-id="<%= @import.id %>"
id="import-<%= @import.id %>"
data-points-total="<%= @import.processed %>"
class="hover">
<td>
<%= @import.name %> (<%= @import.source %>)
&nbsp;
<%= link_to '🗺️', map_path(import_id: @import.id) %>
&nbsp;
<%= link_to '📋', points_path(import_id: @import.id) %>
</td>
<td><%= number_to_human_size(@import.file&.byte_size) || 'N/A' %></td>
<td data-points-count>
<%= number_with_delimiter @import.processed %>
</td>
<td data-status-display>deleting</td>
<td><%= human_datetime(@import.created_at) %></td>
<td class="whitespace-nowrap">
<span class="loading loading-spinner loading-sm"></span>
<span class="text-sm text-gray-500">Deleting...</span>
</td>
</tr>
<% end %>

View file

@ -72,10 +72,15 @@
<td data-status-display><%= import.status %></td>
<td><%= human_datetime(import.created_at) %></td>
<td class="whitespace-nowrap">
<% if import.file.present? %>
<%= link_to 'Download', rails_blob_path(import.file, disposition: 'attachment'), class: "btn btn-outline btn-sm btn-info", download: import.name %>
<% if import.deleting? %>
<span class="loading loading-spinner loading-sm"></span>
<span class="text-sm text-gray-500">Deleting...</span>
<% else %>
<% if import.file.present? %>
<%= link_to 'Download', rails_blob_path(import.file, disposition: 'attachment'), class: "btn btn-outline btn-sm btn-info", download: import.name %>
<% end %>
<%= link_to 'Delete', import, data: { turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "btn btn-outline btn-sm btn-error" %>
<% end %>
<%= link_to 'Delete', import, data: { turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "btn btn-outline btn-sm btn-error" %>
</td>
</tr>
<% end %>

View file

@ -37,6 +37,8 @@ module Dawarich
config.active_job.queue_adapter = :sidekiq
config.action_mailer.preview_paths << "#{Rails.root.join('spec/mailers/previews')}"
config.action_mailer.preview_paths << Rails.root.join('spec/mailers/previews').to_s
config.middleware.use Rack::Deflater
end
end

View file

@ -3,16 +3,55 @@
class AddCompositeIndexToStats < ActiveRecord::Migration[8.0]
disable_ddl_transaction!
BATCH_SIZE = 1000
def change
# Add composite index for the most common stats lookup pattern:
# Stat.find_or_initialize_by(year:, month:, user:)
# This query is called on EVERY stats calculation
#
# Using algorithm: :concurrently to avoid locking the table during index creation
# This is crucial for production deployments with existing data
total_duplicates = execute(<<-SQL.squish).first['count'].to_i
SELECT COUNT(*) as count
FROM stats s1
WHERE EXISTS (
SELECT 1 FROM stats s2
WHERE s2.user_id = s1.user_id
AND s2.year = s1.year
AND s2.month = s1.month
AND s2.id > s1.id
)
SQL
if total_duplicates.positive?
Rails.logger.info(
"Found #{total_duplicates} duplicate stats records. Starting cleanup in batches of #{BATCH_SIZE}..."
)
end
deleted_count = 0
loop do
batch_deleted = execute(<<-SQL.squish).cmd_tuples
DELETE FROM stats s1
WHERE EXISTS (
SELECT 1 FROM stats s2
WHERE s2.user_id = s1.user_id
AND s2.year = s1.year
AND s2.month = s1.month
AND s2.id > s1.id
)
LIMIT #{BATCH_SIZE}
SQL
break if batch_deleted.zero?
deleted_count += batch_deleted
Rails.logger.info("Cleaned up #{deleted_count}/#{total_duplicates} duplicate stats records")
end
Rails.logger.info("Completed cleanup: removed #{deleted_count} duplicate stats records") if deleted_count.positive?
add_index :stats, %i[user_id year month],
name: 'index_stats_on_user_id_year_month',
unique: true,
algorithm: :concurrently
algorithm: :concurrently,
if_not_exists: true
BulkStatsCalculatingJob.perform_later
end
end

View file

@ -0,0 +1,12 @@
# frozen_string_literal: true
class AddCompositeIndexToPointsUserIdTimestamp < ActiveRecord::Migration[8.0]
disable_ddl_transaction!
def change
add_index :points, %i[user_id timestamp],
order: { timestamp: :desc },
algorithm: :concurrently,
if_not_exists: true
end
end

3
db/schema.rb generated
View file

@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[8.0].define(version: 2025_12_10_193532) do
ActiveRecord::Schema[8.0].define(version: 2025_12_26_170919) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_catalog.plpgsql"
enable_extension "postgis"
@ -249,6 +249,7 @@ ActiveRecord::Schema[8.0].define(version: 2025_12_10_193532) do
t.index ["user_id", "country_name"], name: "idx_points_user_country_name"
t.index ["user_id", "reverse_geocoded_at"], name: "index_points_on_user_id_and_reverse_geocoded_at", where: "(reverse_geocoded_at IS NOT NULL)"
t.index ["user_id", "timestamp", "track_id"], name: "idx_points_track_generation"
t.index ["user_id", "timestamp"], name: "index_points_on_user_id_and_timestamp", order: { timestamp: :desc }
t.index ["user_id"], name: "index_points_on_user_id"
t.index ["visit_id"], name: "index_points_on_visit_id"
end

View file

@ -36,6 +36,81 @@ test.describe('Advanced Layers', () => {
expect(await fogToggle.isChecked()).toBe(true)
})
test('fog radius setting can be changed and applied', async ({ page }) => {
// Enable fog layer first
await page.click('button[title="Open map settings"]')
await page.waitForTimeout(400)
await page.click('button[data-tab="layers"]')
await page.waitForTimeout(300)
const fogToggle = page.locator('label:has-text("Fog of War")').first().locator('input.toggle')
await fogToggle.check()
await page.waitForTimeout(500)
// Go to advanced settings tab
await page.click('button[data-tab="settings"]')
await page.waitForTimeout(300)
// Find fog radius slider
const fogRadiusSlider = page.locator('input[name="fogOfWarRadius"]')
await expect(fogRadiusSlider).toBeVisible()
// Change the slider value using evaluate to trigger input event
await fogRadiusSlider.evaluate((slider) => {
slider.value = '500'
slider.dispatchEvent(new Event('input', { bubbles: true }))
})
await page.waitForTimeout(200)
// Verify display value updated
const displayValue = page.locator('[data-maps--maplibre-target="fogRadiusValue"]')
await expect(displayValue).toHaveText('500m')
// Verify slider value was set
expect(await fogRadiusSlider.inputValue()).toBe('500')
// Click Apply Settings button
const applyButton = page.locator('button:has-text("Apply Settings")')
await applyButton.click()
await page.waitForTimeout(500)
// Verify no errors in console
const consoleErrors = []
page.on('console', msg => {
if (msg.type() === 'error') consoleErrors.push(msg.text())
})
await page.waitForTimeout(500)
expect(consoleErrors.filter(e => e.includes('fog_layer'))).toHaveLength(0)
})
test('fog settings can be applied without errors when fog layer is not visible', async ({ page }) => {
await page.click('button[title="Open map settings"]')
await page.waitForTimeout(400)
await page.click('button[data-tab="settings"]')
await page.waitForTimeout(300)
// Change fog radius slider without enabling fog layer
const fogRadiusSlider = page.locator('input[name="fogOfWarRadius"]')
await fogRadiusSlider.evaluate((slider) => {
slider.value = '750'
slider.dispatchEvent(new Event('input', { bubbles: true }))
})
await page.waitForTimeout(200)
// Click Apply Settings - this should not throw an error
const applyButton = page.locator('button:has-text("Apply Settings")')
await applyButton.click()
await page.waitForTimeout(500)
// Verify no JavaScript errors occurred
const consoleErrors = []
page.on('console', msg => {
if (msg.type() === 'error') consoleErrors.push(msg.text())
})
await page.waitForTimeout(500)
expect(consoleErrors.filter(e => e.includes('undefined') || e.includes('fog'))).toHaveLength(0)
})
})
test.describe('Scratch Map', () => {

100
e2e/v2/trips.spec.js Normal file
View file

@ -0,0 +1,100 @@
import { test, expect } from '@playwright/test'
import { closeOnboardingModal } from '../helpers/navigation.js'
test.describe('Trips Date Validation', () => {
test.beforeEach(async ({ page }) => {
await page.goto('/trips/new')
await closeOnboardingModal(page)
});
test('validates that start date is earlier than end date on new trip form', async ({ page }) => {
// Wait for the form to load
await page.waitForSelector('input[name="trip[started_at]"]')
// Fill in trip name
await page.fill('input[name="trip[name]"]', 'Test Trip')
// Set end date before start date
await page.fill('input[name="trip[started_at]"]', '2024-12-25T10:00')
await page.fill('input[name="trip[ended_at]"]', '2024-12-20T10:00')
// Get the current URL to verify we stay on the same page
const currentUrl = page.url()
// Try to submit the form
const submitButton = page.locator('input[type="submit"], button[type="submit"]')
await submitButton.click()
// Wait a bit for potential navigation
await page.waitForTimeout(500)
// Verify we're still on the same page (form wasn't submitted)
expect(page.url()).toBe(currentUrl)
// Verify the dates are still there (form wasn't cleared)
const startValue = await page.locator('input[name="trip[started_at]"]').inputValue()
const endValue = await page.locator('input[name="trip[ended_at]"]').inputValue()
expect(startValue).toBe('2024-12-25T10:00')
expect(endValue).toBe('2024-12-20T10:00')
});
test('allows valid date range on new trip form', async ({ page }) => {
// Wait for the form to load
await page.waitForSelector('input[name="trip[started_at]"]')
// Fill in trip name
await page.fill('input[name="trip[name]"]', 'Valid Test Trip')
// Set valid date range (start before end)
await page.fill('input[name="trip[started_at]"]', '2024-12-20T10:00')
await page.fill('input[name="trip[ended_at]"]', '2024-12-25T10:00')
// Trigger blur to validate
await page.locator('input[name="trip[ended_at]"]').blur()
// Give the validation time to run
await page.waitForTimeout(200)
// Check that the end date field has no validation error
const endDateInput = page.locator('input[name="trip[ended_at]"]')
const validationMessage = await endDateInput.evaluate(el => el.validationMessage)
const isValid = await endDateInput.evaluate(el => el.validity.valid)
expect(validationMessage).toBe('')
expect(isValid).toBe(true)
});
test('validates dates when updating end date to be earlier than start date', async ({ page }) => {
// Wait for the form to load
await page.waitForSelector('input[name="trip[started_at]"]')
// Fill in trip name
await page.fill('input[name="trip[name]"]', 'Test Trip')
// First set a valid range
await page.fill('input[name="trip[started_at]"]', '2024-12-20T10:00')
await page.fill('input[name="trip[ended_at]"]', '2024-12-25T10:00')
// Now change start date to be after end date
await page.fill('input[name="trip[started_at]"]', '2024-12-26T10:00')
// Get the current URL to verify we stay on the same page
const currentUrl = page.url()
// Try to submit the form
const submitButton = page.locator('input[type="submit"], button[type="submit"]')
await submitButton.click()
// Wait a bit for potential navigation
await page.waitForTimeout(500)
// Verify we're still on the same page (form wasn't submitted)
expect(page.url()).toBe(currentUrl)
// Verify the dates are still there (form wasn't cleared)
const startValue = await page.locator('input[name="trip[started_at]"]').inputValue()
const endValue = await page.locator('input[name="trip[ended_at]"]').inputValue()
expect(startValue).toBe('2024-12-26T10:00')
expect(endValue).toBe('2024-12-25T10:00')
});
});

View file

@ -0,0 +1,192 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Imports::DestroyJob, type: :job do
describe '#perform' do
let(:user) { create(:user) }
let(:import) { create(:import, user: user, status: :completed) }
describe 'queue configuration' do
it 'uses the default queue' do
expect(described_class.queue_name).to eq('default')
end
end
context 'when import exists' do
before do
create_list(:point, 3, user: user, import: import)
end
it 'changes import status to deleting and deletes it' do
expect(import).not_to be_deleting
import_id = import.id
described_class.perform_now(import_id)
expect(Import.find_by(id: import_id)).to be_nil
end
it 'calls the Imports::Destroy service' do
destroy_service = instance_double(Imports::Destroy)
allow(Imports::Destroy).to receive(:new).with(user, import).and_return(destroy_service)
allow(destroy_service).to receive(:call)
described_class.perform_now(import.id)
expect(Imports::Destroy).to have_received(:new).with(user, import)
expect(destroy_service).to have_received(:call)
end
it 'broadcasts status update to the user' do
allow(ImportsChannel).to receive(:broadcast_to)
described_class.perform_now(import.id)
expect(ImportsChannel).to have_received(:broadcast_to).with(
user,
hash_including(
action: 'status_update',
import: hash_including(
id: import.id,
status: 'deleting'
)
)
).at_least(:once)
end
it 'broadcasts deletion complete to the user' do
allow(ImportsChannel).to receive(:broadcast_to)
described_class.perform_now(import.id)
expect(ImportsChannel).to have_received(:broadcast_to).with(
user,
hash_including(
action: 'delete',
import: hash_including(id: import.id)
)
).at_least(:once)
end
it 'broadcasts both status update and deletion messages' do
allow(ImportsChannel).to receive(:broadcast_to)
described_class.perform_now(import.id)
expect(ImportsChannel).to have_received(:broadcast_to).twice
end
it 'deletes the import and its points' do
import_id = import.id
point_ids = import.points.pluck(:id)
described_class.perform_now(import_id)
expect(Import.find_by(id: import_id)).to be_nil
expect(Point.where(id: point_ids)).to be_empty
end
end
context 'when import does not exist' do
let(:non_existent_id) { 999_999 }
it 'does not raise an error' do
expect { described_class.perform_now(non_existent_id) }.not_to raise_error
end
it 'does not call the Imports::Destroy service' do
expect(Imports::Destroy).not_to receive(:new)
described_class.perform_now(non_existent_id)
end
it 'does not broadcast any messages' do
expect(ImportsChannel).not_to receive(:broadcast_to)
described_class.perform_now(non_existent_id)
end
it 'returns early without logging' do
allow(Rails.logger).to receive(:warn)
described_class.perform_now(non_existent_id)
expect(Rails.logger).not_to have_received(:warn)
end
end
context 'when import is deleted during job execution' do
it 'handles RecordNotFound gracefully' do
allow(Import).to receive(:find_by).with(id: import.id).and_return(import)
allow(import).to receive(:deleting!).and_raise(ActiveRecord::RecordNotFound)
expect { described_class.perform_now(import.id) }.not_to raise_error
end
it 'logs a warning when RecordNotFound is raised' do
allow(Import).to receive(:find_by).with(id: import.id).and_return(import)
allow(import).to receive(:deleting!).and_raise(ActiveRecord::RecordNotFound)
allow(Rails.logger).to receive(:warn)
described_class.perform_now(import.id)
expect(Rails.logger).to have_received(:warn).with(/Import #{import.id} not found/)
end
end
context 'when broadcast fails' do
before do
allow(ImportsChannel).to receive(:broadcast_to).and_raise(StandardError, 'Broadcast error')
end
it 'allows the error to propagate' do
expect { described_class.perform_now(import.id) }.to raise_error(StandardError, 'Broadcast error')
end
end
context 'when Imports::Destroy service fails' do
before do
allow_any_instance_of(Imports::Destroy).to receive(:call).and_raise(StandardError, 'Destroy failed')
end
it 'allows the error to propagate' do
expect { described_class.perform_now(import.id) }.to raise_error(StandardError, 'Destroy failed')
end
it 'has already set status to deleting before service is called' do
expect do
described_class.perform_now(import.id)
rescue StandardError
StandardError
end.to change { import.reload.status }.to('deleting')
end
end
context 'with multiple imports for different users' do
let(:user2) { create(:user) }
let(:import2) { create(:import, user: user2, status: :completed) }
it 'only broadcasts to the correct user' do
expect(ImportsChannel).to receive(:broadcast_to).with(user, anything).twice
expect(ImportsChannel).not_to receive(:broadcast_to).with(user2, anything)
described_class.perform_now(import.id)
end
end
context 'job enqueuing' do
it 'can be enqueued' do
expect do
described_class.perform_later(import.id)
end.to have_enqueued_job(described_class).with(import.id)
end
it 'can be performed later with correct arguments' do
expect do
described_class.perform_later(import.id)
end.to have_enqueued_job(described_class).on_queue('default').with(import.id)
end
end
end
end

View file

@ -7,7 +7,6 @@ RSpec.describe Points::NightlyReverseGeocodingJob, type: :job do
let(:user) { create(:user) }
before do
# Clear any existing jobs and points to ensure test isolation
ActiveJob::Base.queue_adapter.enqueued_jobs.clear
Point.delete_all
end
@ -62,18 +61,22 @@ RSpec.describe Points::NightlyReverseGeocodingJob, type: :job do
end
context 'with points needing reverse geocoding' do
let(:user2) { create(:user) }
let!(:point_without_geocoding1) do
create(:point, user: user, reverse_geocoded_at: nil)
end
let!(:point_without_geocoding2) do
create(:point, user: user, reverse_geocoded_at: nil)
end
let!(:point_without_geocoding3) do
create(:point, user: user2, reverse_geocoded_at: nil)
end
let!(:geocoded_point) do
create(:point, user: user, reverse_geocoded_at: 1.day.ago)
end
it 'processes all points that need reverse geocoding' do
expect { described_class.perform_now }.to have_enqueued_job(ReverseGeocodingJob).exactly(2).times
expect { described_class.perform_now }.to have_enqueued_job(ReverseGeocodingJob).exactly(3).times
end
it 'enqueues jobs with correct parameters' do
@ -82,6 +85,8 @@ RSpec.describe Points::NightlyReverseGeocodingJob, type: :job do
.with('Point', point_without_geocoding1.id)
.and have_enqueued_job(ReverseGeocodingJob)
.with('Point', point_without_geocoding2.id)
.and have_enqueued_job(ReverseGeocodingJob)
.with('Point', point_without_geocoding3.id)
end
it 'uses find_each with correct batch size' do
@ -93,6 +98,47 @@ RSpec.describe Points::NightlyReverseGeocodingJob, type: :job do
expect(relation_mock).to have_received(:find_each).with(batch_size: 1000)
end
it 'invalidates caches for all affected users' do
allow(Cache::InvalidateUserCaches).to receive(:new).and_call_original
described_class.perform_now
# Verify that cache invalidation service was instantiated for both users
expect(Cache::InvalidateUserCaches).to have_received(:new).with(user.id)
expect(Cache::InvalidateUserCaches).to have_received(:new).with(user2.id)
end
it 'invalidates caches for the correct users' do
cache_service1 = instance_double(Cache::InvalidateUserCaches)
cache_service2 = instance_double(Cache::InvalidateUserCaches)
allow(Cache::InvalidateUserCaches).to receive(:new).with(user.id).and_return(cache_service1)
allow(Cache::InvalidateUserCaches).to receive(:new).with(user2.id).and_return(cache_service2)
allow(cache_service1).to receive(:call)
allow(cache_service2).to receive(:call)
described_class.perform_now
expect(cache_service1).to have_received(:call)
expect(cache_service2).to have_received(:call)
end
it 'does not invalidate caches multiple times for the same user' do
cache_service = instance_double(Cache::InvalidateUserCaches)
allow(Cache::InvalidateUserCaches).to receive(:new).with(user.id).and_return(cache_service)
allow(Cache::InvalidateUserCaches).to receive(:new).with(user2.id).and_return(
instance_double(
Cache::InvalidateUserCaches, call: nil
)
)
allow(cache_service).to receive(:call)
described_class.perform_now
expect(cache_service).to have_received(:call).once
end
end
end

View file

@ -7,6 +7,33 @@ RSpec.describe Trip, type: :model do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:started_at) }
it { is_expected.to validate_presence_of(:ended_at) }
context 'date range validation' do
let(:user) { create(:user) }
it 'is valid when started_at is before ended_at' do
trip = build(:trip, user: user, started_at: 1.day.ago, ended_at: Time.current)
expect(trip).to be_valid
end
it 'is invalid when started_at is after ended_at' do
trip = build(:trip, user: user, started_at: Time.current, ended_at: 1.day.ago)
expect(trip).not_to be_valid
expect(trip.errors[:ended_at]).to include('must be after start date')
end
it 'is invalid when started_at equals ended_at' do
time = Time.current
trip = build(:trip, user: user, started_at: time, ended_at: time)
expect(trip).not_to be_valid
expect(trip.errors[:ended_at]).to include('must be after start date')
end
it 'is valid when both dates are blank during initialization' do
trip = Trip.new(user: user, name: 'Test Trip')
expect(trip.errors[:ended_at]).to be_empty
end
end
end
describe 'associations' do

View file

@ -223,9 +223,10 @@ RSpec.describe 'Imports', type: :request do
it 'deletes the import' do
expect do
delete import_path(import)
end.to change(user.imports, :count).by(-1)
end.to have_enqueued_job(Imports::DestroyJob).with(import.id)
expect(response).to redirect_to(imports_path)
expect(import.reload).to be_deleting
end
end
end

View file

@ -12,10 +12,10 @@ RSpec.describe Cache::Clean do
let(:user_2_years_tracked_key) { "dawarich/user_#{user2.id}_years_tracked" }
let(:user_1_points_geocoded_stats_key) { "dawarich/user_#{user1.id}_points_geocoded_stats" }
let(:user_2_points_geocoded_stats_key) { "dawarich/user_#{user2.id}_points_geocoded_stats" }
let(:user_1_countries_key) { "dawarich/user_#{user1.id}_countries" }
let(:user_2_countries_key) { "dawarich/user_#{user2.id}_countries" }
let(:user_1_cities_key) { "dawarich/user_#{user1.id}_cities" }
let(:user_2_cities_key) { "dawarich/user_#{user2.id}_cities" }
let(:user_1_countries_key) { "dawarich/user_#{user1.id}_countries_visited" }
let(:user_2_countries_key) { "dawarich/user_#{user2.id}_countries_visited" }
let(:user_1_cities_key) { "dawarich/user_#{user1.id}_cities_visited" }
let(:user_2_cities_key) { "dawarich/user_#{user2.id}_cities_visited" }
before do
# Set up cache entries that should be cleaned

View file

@ -0,0 +1,57 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Cache::InvalidateUserCaches do
let(:user) { create(:user) }
let(:service) { described_class.new(user.id) }
describe '#call' do
it 'invalidates all user-related caches' do
Rails.cache.write("dawarich/user_#{user.id}_countries_visited", %w[USA Canada])
Rails.cache.write("dawarich/user_#{user.id}_cities_visited", ['New York', 'Toronto'])
Rails.cache.write("dawarich/user_#{user.id}_points_geocoded_stats", { geocoded: 100, without_data: 10 })
expect(Rails.cache.read("dawarich/user_#{user.id}_countries_visited")).to eq(%w[USA Canada])
expect(Rails.cache.read("dawarich/user_#{user.id}_cities_visited")).to eq(['New York', 'Toronto'])
expect(Rails.cache.read("dawarich/user_#{user.id}_points_geocoded_stats")).to eq({ geocoded: 100,
without_data: 10 })
service.call
expect(Rails.cache.read("dawarich/user_#{user.id}_countries_visited")).to be_nil
expect(Rails.cache.read("dawarich/user_#{user.id}_cities_visited")).to be_nil
expect(Rails.cache.read("dawarich/user_#{user.id}_points_geocoded_stats")).to be_nil
end
end
describe '#invalidate_countries_visited' do
it 'deletes the countries_visited cache' do
Rails.cache.write("dawarich/user_#{user.id}_countries_visited", %w[USA Canada])
service.invalidate_countries_visited
expect(Rails.cache.read("dawarich/user_#{user.id}_countries_visited")).to be_nil
end
end
describe '#invalidate_cities_visited' do
it 'deletes the cities_visited cache' do
Rails.cache.write("dawarich/user_#{user.id}_cities_visited", ['New York', 'Toronto'])
service.invalidate_cities_visited
expect(Rails.cache.read("dawarich/user_#{user.id}_cities_visited")).to be_nil
end
end
describe '#invalidate_points_geocoded_stats' do
it 'deletes the points_geocoded_stats cache' do
Rails.cache.write("dawarich/user_#{user.id}_points_geocoded_stats", { geocoded: 100, without_data: 10 })
service.invalidate_points_geocoded_stats
expect(Rails.cache.read("dawarich/user_#{user.id}_points_geocoded_stats")).to be_nil
end
end
end

View file

@ -201,6 +201,27 @@ RSpec.describe Stats::CalculateMonth do
end
end
end
context 'when invalidating caches' do
it 'invalidates user caches after updating stats' do
cache_service = instance_double(Cache::InvalidateUserCaches)
allow(Cache::InvalidateUserCaches).to receive(:new).with(user.id).and_return(cache_service)
allow(cache_service).to receive(:call)
calculate_stats
expect(cache_service).to have_received(:call)
end
it 'does not invalidate caches when there are no points' do
new_user = create(:user)
service = described_class.new(new_user.id, year, month)
expect(Cache::InvalidateUserCaches).not_to receive(:new)
service.call
end
end
end
end
end

View file

@ -62,6 +62,39 @@ RSpec.describe Stats::HexagonCalculator do
expect(total_points).to eq(2)
end
context 'when there are too many hexagons' do
let(:h3_resolution) { 15 } # Very high resolution to trigger MAX_HEXAGONS
before do
# Stub to simulate too many hexagons on first call, then acceptable on second
allow_any_instance_of(described_class).to receive(:calculate_h3_indexes).and_call_original
call_count = 0
allow_any_instance_of(described_class).to receive(:calculate_h3_indexes) do |instance, points, resolution|
call_count += 1
if call_count == 1
# First call: return too many hexagons
Hash.new.tap do |hash|
(described_class::MAX_HEXAGONS + 1).times do |i|
hash[i.to_s(16)] = [1, timestamp1, timestamp1]
end
end
else
# Second call with lower resolution: return acceptable amount
{ '8c2a1072b3f1fff' => [2, timestamp1, timestamp2] }
end
end
end
it 'recursively reduces resolution when too many hexagons are generated' do
result = calculate_hexagons
expect(result).to be_an(Array)
expect(result).not_to be_empty
# Should have successfully reduced the hexagon count
expect(result.size).to be < described_class::MAX_HEXAGONS
end
end
context 'when H3 raises an error' do
before do
allow(H3).to receive(:from_geo_coordinates).and_raise(StandardError, 'H3 error')