mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 01:01:39 -05:00
Compare commits
3 commits
3d2348db08
...
2053248323
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2053248323 | ||
|
|
29f81738df | ||
|
|
174a4bf5b6 |
42 changed files with 1045 additions and 164 deletions
|
|
@ -1 +1 @@
|
|||
0.37.1
|
||||
0.37.2
|
||||
|
|
|
|||
10
CHANGELOG.md
10
CHANGELOG.md
|
|
@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# [0.37.2] - 2026-01-04
|
||||
|
||||
## Fixed
|
||||
|
||||
- Months are now correctly ordered (Jan-Dec) in the year-end digest chart instead of being sorted alphabetically.
|
||||
- Time spent in a country and city is now calculated correctly for the year-end digest email. #2104
|
||||
- Updated Trix to fix a XSS vulnerability. #2102
|
||||
- Map v2 UI no longer blocks when Immich/Photoprism integration has a bad URL or is unreachable. Added 10-second timeout to photo API requests and improved error handling to prevent UI freezing during initial load. #2085
|
||||
- In Map v2 settings, you can now enable map to be rendered as a globe.
|
||||
|
||||
# [0.37.1] - 2025-12-30
|
||||
|
||||
## Fixed
|
||||
|
|
|
|||
3
Gemfile
3
Gemfile
|
|
@ -12,6 +12,7 @@ gem 'aws-sdk-kms', '~> 1.96.0', require: false
|
|||
gem 'aws-sdk-s3', '~> 1.177.0', require: false
|
||||
gem 'bootsnap', require: false
|
||||
gem 'chartkick'
|
||||
gem 'connection_pool', '< 3' # Pin to 2.x - version 3.0+ has breaking API changes with Rails RedisCacheStore
|
||||
gem 'data_migrate'
|
||||
gem 'devise'
|
||||
gem 'foreman'
|
||||
|
|
@ -48,7 +49,7 @@ gem 'rswag-ui'
|
|||
gem 'rubyzip', '~> 3.2'
|
||||
gem 'sentry-rails', '>= 5.27.0'
|
||||
gem 'sentry-ruby'
|
||||
gem 'sidekiq', '>= 8.0.5'
|
||||
gem 'sidekiq', '8.0.10' # Pin to 8.0.x - sidekiq 8.1+ requires connection_pool 3.0+ which has breaking changes with Rails
|
||||
gem 'sidekiq-cron', '>= 2.3.1'
|
||||
gem 'sidekiq-limit_fetch'
|
||||
gem 'sprockets-rails'
|
||||
|
|
|
|||
48
Gemfile.lock
48
Gemfile.lock
|
|
@ -109,7 +109,7 @@ GEM
|
|||
base64 (0.3.0)
|
||||
bcrypt (3.1.20)
|
||||
benchmark (0.5.0)
|
||||
bigdecimal (3.3.1)
|
||||
bigdecimal (4.0.1)
|
||||
bindata (2.5.1)
|
||||
bootsnap (1.18.6)
|
||||
msgpack (~> 1.2)
|
||||
|
|
@ -129,10 +129,10 @@ GEM
|
|||
rack-test (>= 0.6.3)
|
||||
regexp_parser (>= 1.5, < 3.0)
|
||||
xpath (~> 3.2)
|
||||
chartkick (5.2.0)
|
||||
chartkick (5.2.1)
|
||||
chunky_png (1.4.0)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.3.5)
|
||||
concurrent-ruby (1.3.6)
|
||||
connection_pool (2.5.5)
|
||||
crack (1.0.1)
|
||||
bigdecimal
|
||||
|
|
@ -215,7 +215,7 @@ GEM
|
|||
csv
|
||||
mini_mime (>= 1.0.0)
|
||||
multi_xml (>= 0.5.2)
|
||||
i18n (1.14.7)
|
||||
i18n (1.14.8)
|
||||
concurrent-ruby (~> 1.0)
|
||||
importmap-rails (2.2.2)
|
||||
actionpack (>= 6.0.0)
|
||||
|
|
@ -227,7 +227,7 @@ GEM
|
|||
rdoc (>= 4.0.0)
|
||||
reline (>= 0.4.2)
|
||||
jmespath (1.6.2)
|
||||
json (2.15.0)
|
||||
json (2.18.0)
|
||||
json-jwt (1.17.0)
|
||||
activesupport (>= 4.2)
|
||||
aes_key_wrap
|
||||
|
|
@ -273,11 +273,12 @@ GEM
|
|||
method_source (1.1.0)
|
||||
mini_mime (1.1.5)
|
||||
mini_portile2 (2.8.9)
|
||||
minitest (5.26.2)
|
||||
minitest (6.0.1)
|
||||
prism (~> 1.5)
|
||||
msgpack (1.7.3)
|
||||
multi_json (1.15.0)
|
||||
multi_xml (0.7.1)
|
||||
bigdecimal (~> 3.1)
|
||||
multi_xml (0.8.0)
|
||||
bigdecimal (>= 3.1, < 5)
|
||||
net-http (0.6.0)
|
||||
uri
|
||||
net-imap (0.5.12)
|
||||
|
|
@ -356,7 +357,7 @@ GEM
|
|||
json
|
||||
yaml
|
||||
parallel (1.27.0)
|
||||
parser (3.3.9.0)
|
||||
parser (3.3.10.0)
|
||||
ast (~> 2.4.1)
|
||||
racc
|
||||
patience_diff (1.2.0)
|
||||
|
|
@ -369,7 +370,7 @@ GEM
|
|||
pp (0.6.3)
|
||||
prettyprint
|
||||
prettyprint (0.2.0)
|
||||
prism (1.5.1)
|
||||
prism (1.7.0)
|
||||
prometheus_exporter (2.2.0)
|
||||
webrick
|
||||
pry (0.15.2)
|
||||
|
|
@ -462,7 +463,7 @@ GEM
|
|||
tsort
|
||||
redis (5.4.1)
|
||||
redis-client (>= 0.22.0)
|
||||
redis-client (0.26.1)
|
||||
redis-client (0.26.2)
|
||||
connection_pool
|
||||
regexp_parser (2.11.3)
|
||||
reline (0.6.3)
|
||||
|
|
@ -512,7 +513,7 @@ GEM
|
|||
rswag-ui (2.17.0)
|
||||
actionpack (>= 5.2, < 8.2)
|
||||
railties (>= 5.2, < 8.2)
|
||||
rubocop (1.81.1)
|
||||
rubocop (1.82.1)
|
||||
json (~> 2.3)
|
||||
language_server-protocol (~> 3.17.0.2)
|
||||
lint_roller (~> 1.1.0)
|
||||
|
|
@ -520,20 +521,20 @@ GEM
|
|||
parser (>= 3.3.0.2)
|
||||
rainbow (>= 2.2.2, < 4.0)
|
||||
regexp_parser (>= 2.9.3, < 3.0)
|
||||
rubocop-ast (>= 1.47.1, < 2.0)
|
||||
rubocop-ast (>= 1.48.0, < 2.0)
|
||||
ruby-progressbar (~> 1.7)
|
||||
unicode-display_width (>= 2.4.0, < 4.0)
|
||||
rubocop-ast (1.47.1)
|
||||
rubocop-ast (1.49.0)
|
||||
parser (>= 3.3.7.2)
|
||||
prism (~> 1.4)
|
||||
rubocop-rails (2.33.4)
|
||||
prism (~> 1.7)
|
||||
rubocop-rails (2.34.2)
|
||||
activesupport (>= 4.2.0)
|
||||
lint_roller (~> 1.1)
|
||||
rack (>= 1.1)
|
||||
rubocop (>= 1.75.0, < 2.0)
|
||||
rubocop-ast (>= 1.44.0, < 2.0)
|
||||
ruby-progressbar (1.13.0)
|
||||
rubyzip (3.2.0)
|
||||
rubyzip (3.2.2)
|
||||
securerandom (0.4.1)
|
||||
selenium-webdriver (4.35.0)
|
||||
base64 (~> 0.2)
|
||||
|
|
@ -541,15 +542,15 @@ GEM
|
|||
rexml (~> 3.2, >= 3.2.5)
|
||||
rubyzip (>= 1.2.2, < 4.0)
|
||||
websocket (~> 1.0)
|
||||
sentry-rails (6.1.1)
|
||||
sentry-rails (6.2.0)
|
||||
railties (>= 5.2.0)
|
||||
sentry-ruby (~> 6.1.1)
|
||||
sentry-ruby (6.1.1)
|
||||
sentry-ruby (~> 6.2.0)
|
||||
sentry-ruby (6.2.0)
|
||||
bigdecimal
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
shoulda-matchers (6.5.0)
|
||||
activesupport (>= 5.2.0)
|
||||
sidekiq (8.0.8)
|
||||
sidekiq (8.0.10)
|
||||
connection_pool (>= 2.5.0)
|
||||
json (>= 2.9.0)
|
||||
logger (>= 1.6.2)
|
||||
|
|
@ -613,7 +614,7 @@ GEM
|
|||
unicode (0.4.4.5)
|
||||
unicode-display_width (3.2.0)
|
||||
unicode-emoji (~> 4.1)
|
||||
unicode-emoji (4.1.0)
|
||||
unicode-emoji (4.2.0)
|
||||
uri (1.1.1)
|
||||
useragent (0.16.11)
|
||||
validate_url (1.0.15)
|
||||
|
|
@ -662,6 +663,7 @@ DEPENDENCIES
|
|||
bundler-audit
|
||||
capybara
|
||||
chartkick
|
||||
connection_pool (< 3)
|
||||
data_migrate
|
||||
database_consistency (>= 2.0.5)
|
||||
debug
|
||||
|
|
@ -711,7 +713,7 @@ DEPENDENCIES
|
|||
sentry-rails (>= 5.27.0)
|
||||
sentry-ruby
|
||||
shoulda-matchers
|
||||
sidekiq (>= 8.0.5)
|
||||
sidekiq (= 8.0.10)
|
||||
sidekiq-cron (>= 2.3.1)
|
||||
sidekiq-limit_fetch
|
||||
simplecov
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class Api::V1::SettingsController < ApiController
|
|||
:preferred_map_layer, :points_rendering_mode, :live_map_enabled,
|
||||
:immich_url, :immich_api_key, :photoprism_url, :photoprism_api_key,
|
||||
:speed_colored_routes, :speed_color_scale, :fog_of_war_threshold,
|
||||
:maps_v2_style, :maps_maplibre_style,
|
||||
:maps_v2_style, :maps_maplibre_style, :globe_projection,
|
||||
enabled_map_layers: []
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class Users::DigestsController < ApplicationController
|
|||
|
||||
before_action :authenticate_user!
|
||||
before_action :authenticate_active_user!, only: [:create]
|
||||
before_action :set_digest, only: [:show]
|
||||
before_action :set_digest, only: %i[show destroy]
|
||||
|
||||
def index
|
||||
@digests = current_user.digests.yearly.order(year: :desc)
|
||||
|
|
@ -30,6 +30,12 @@ class Users::DigestsController < ApplicationController
|
|||
end
|
||||
end
|
||||
|
||||
def destroy
|
||||
year = @digest.year
|
||||
@digest.destroy!
|
||||
redirect_to users_digests_path, notice: "Year-end digest for #{year} has been deleted", status: :see_other
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_digest
|
||||
|
|
@ -42,7 +48,7 @@ class Users::DigestsController < ApplicationController
|
|||
tracked_years = current_user.stats.select(:year).distinct.pluck(:year)
|
||||
existing_digests = current_user.digests.yearly.pluck(:year)
|
||||
|
||||
(tracked_years - existing_digests).sort.reverse
|
||||
(tracked_years - existing_digests - [Time.current.year]).sort.reverse
|
||||
end
|
||||
|
||||
def valid_year?(year)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,27 @@
|
|||
|
||||
module Users
|
||||
module DigestsHelper
|
||||
PROGRESS_COLORS = %w[
|
||||
progress-primary progress-secondary progress-accent
|
||||
progress-info progress-success progress-warning
|
||||
].freeze
|
||||
|
||||
def progress_color_for_index(index)
|
||||
PROGRESS_COLORS[index % PROGRESS_COLORS.length]
|
||||
end
|
||||
|
||||
def city_progress_value(city_count, max_cities)
|
||||
return 0 unless max_cities&.positive?
|
||||
|
||||
(city_count.to_f / max_cities * 100).round
|
||||
end
|
||||
|
||||
def max_cities_count(toponyms)
|
||||
return 0 if toponyms.blank?
|
||||
|
||||
toponyms.map { |country| country['cities']&.length || 0 }.max
|
||||
end
|
||||
|
||||
def distance_with_unit(distance_meters, unit)
|
||||
value = Users::Digest.convert_distance(distance_meters, unit).round
|
||||
"#{number_with_delimiter(value)} #{unit}"
|
||||
|
|
|
|||
|
|
@ -56,22 +56,36 @@ export class DataLoader {
|
|||
}
|
||||
data.visitsGeoJSON = this.visitsToGeoJSON(data.visits)
|
||||
|
||||
// Fetch photos
|
||||
try {
|
||||
console.log('[Photos] Fetching photos from:', startDate, 'to', endDate)
|
||||
data.photos = await this.api.fetchPhotos({
|
||||
start_at: startDate,
|
||||
end_at: endDate
|
||||
})
|
||||
console.log('[Photos] Fetched photos:', data.photos.length, 'photos')
|
||||
console.log('[Photos] Sample photo:', data.photos[0])
|
||||
} catch (error) {
|
||||
console.error('[Photos] Failed to fetch photos:', error)
|
||||
// Fetch photos - only if photos layer is enabled and integration is configured
|
||||
// Skip API call if photos are disabled to avoid blocking on failed integrations
|
||||
if (this.settings.photosEnabled) {
|
||||
try {
|
||||
console.log('[Photos] Fetching photos from:', startDate, 'to', endDate)
|
||||
// Use Promise.race to enforce a client-side timeout
|
||||
const photosPromise = this.api.fetchPhotos({
|
||||
start_at: startDate,
|
||||
end_at: endDate
|
||||
})
|
||||
const timeoutPromise = new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Photo fetch timeout')), 15000) // 15 second timeout
|
||||
)
|
||||
|
||||
data.photos = await Promise.race([photosPromise, timeoutPromise])
|
||||
console.log('[Photos] Fetched photos:', data.photos.length, 'photos')
|
||||
console.log('[Photos] Sample photo:', data.photos[0])
|
||||
} catch (error) {
|
||||
console.warn('[Photos] Failed to fetch photos (non-blocking):', error.message)
|
||||
data.photos = []
|
||||
}
|
||||
} else {
|
||||
console.log('[Photos] Photos layer disabled, skipping fetch')
|
||||
data.photos = []
|
||||
}
|
||||
data.photosGeoJSON = this.photosToGeoJSON(data.photos)
|
||||
console.log('[Photos] Converted to GeoJSON:', data.photosGeoJSON.features.length, 'features')
|
||||
console.log('[Photos] Sample feature:', data.photosGeoJSON.features[0])
|
||||
if (data.photosGeoJSON.features.length > 0) {
|
||||
console.log('[Photos] Sample feature:', data.photosGeoJSON.features[0])
|
||||
}
|
||||
|
||||
// Fetch areas
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -16,17 +16,35 @@ export class MapInitializer {
|
|||
mapStyle = 'streets',
|
||||
center = [0, 0],
|
||||
zoom = 2,
|
||||
showControls = true
|
||||
showControls = true,
|
||||
globeProjection = false
|
||||
} = settings
|
||||
|
||||
const style = await getMapStyle(mapStyle)
|
||||
|
||||
const map = new maplibregl.Map({
|
||||
const mapOptions = {
|
||||
container,
|
||||
style,
|
||||
center,
|
||||
zoom
|
||||
})
|
||||
}
|
||||
|
||||
const map = new maplibregl.Map(mapOptions)
|
||||
|
||||
// Set globe projection after map loads
|
||||
if (globeProjection === true || globeProjection === 'true') {
|
||||
map.on('load', () => {
|
||||
map.setProjection({ type: 'globe' })
|
||||
|
||||
// Add atmosphere effect
|
||||
map.setSky({
|
||||
'atmosphere-blend': [
|
||||
'interpolate', ['linear'], ['zoom'],
|
||||
0, 1, 5, 1, 7, 0
|
||||
]
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (showControls) {
|
||||
map.addControl(new maplibregl.NavigationControl(), 'top-right')
|
||||
|
|
|
|||
|
|
@ -91,6 +91,11 @@ export class SettingsController {
|
|||
mapStyleSelect.value = this.settings.mapStyle || 'light'
|
||||
}
|
||||
|
||||
// Sync globe projection toggle
|
||||
if (controller.hasGlobeToggleTarget) {
|
||||
controller.globeToggleTarget.checked = this.settings.globeProjection || false
|
||||
}
|
||||
|
||||
// Sync fog of war settings
|
||||
const fogRadiusInput = controller.element.querySelector('input[name="fogOfWarRadius"]')
|
||||
if (fogRadiusInput) {
|
||||
|
|
@ -178,6 +183,22 @@ export class SettingsController {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle globe projection
|
||||
* Requires page reload to apply since projection is set at map initialization
|
||||
*/
|
||||
async toggleGlobe(event) {
|
||||
const enabled = event.target.checked
|
||||
await SettingsManager.updateSetting('globeProjection', enabled)
|
||||
|
||||
Toast.info('Globe view will be applied after page reload')
|
||||
|
||||
// Prompt user to reload
|
||||
if (confirm('Globe view requires a page reload to take effect. Reload now?')) {
|
||||
window.location.reload()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update route opacity in real-time
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -64,6 +64,8 @@ export default class extends Controller {
|
|||
'speedColoredToggle',
|
||||
'speedColorScaleContainer',
|
||||
'speedColorScaleInput',
|
||||
// Globe projection
|
||||
'globeToggle',
|
||||
// Family members
|
||||
'familyMembersList',
|
||||
'familyMembersContainer',
|
||||
|
|
@ -147,7 +149,8 @@ export default class extends Controller {
|
|||
*/
|
||||
async initializeMap() {
|
||||
this.map = await MapInitializer.initialize(this.containerTarget, {
|
||||
mapStyle: this.settings.mapStyle
|
||||
mapStyle: this.settings.mapStyle,
|
||||
globeProjection: this.settings.globeProjection
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -243,6 +246,7 @@ export default class extends Controller {
|
|||
updateFogThresholdDisplay(event) { return this.settingsController.updateFogThresholdDisplay(event) }
|
||||
updateMetersBetweenDisplay(event) { return this.settingsController.updateMetersBetweenDisplay(event) }
|
||||
updateMinutesBetweenDisplay(event) { return this.settingsController.updateMinutesBetweenDisplay(event) }
|
||||
toggleGlobe(event) { return this.settingsController.toggleGlobe(event) }
|
||||
|
||||
// Area Selection Manager methods
|
||||
startSelectArea() { return this.areaSelectionManager.startSelectArea() }
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ const DEFAULT_SETTINGS = {
|
|||
minutesBetweenRoutes: 60,
|
||||
pointsRenderingMode: 'raw',
|
||||
speedColoredRoutes: false,
|
||||
speedColorScale: '0:#00ff00|15:#00ffff|30:#ff00ff|50:#ffff00|100:#ff3300'
|
||||
speedColorScale: '0:#00ff00|15:#00ffff|30:#ff00ff|50:#ffff00|100:#ff3300',
|
||||
globeProjection: false
|
||||
}
|
||||
|
||||
// Mapping between v2 layer names and v1 layer names in enabled_map_layers array
|
||||
|
|
@ -41,7 +42,8 @@ const BACKEND_SETTINGS_MAP = {
|
|||
minutesBetweenRoutes: 'minutes_between_routes',
|
||||
pointsRenderingMode: 'points_rendering_mode',
|
||||
speedColoredRoutes: 'speed_colored_routes',
|
||||
speedColorScale: 'speed_color_scale'
|
||||
speedColorScale: 'speed_color_scale',
|
||||
globeProjection: 'globe_projection'
|
||||
}
|
||||
|
||||
export class SettingsManager {
|
||||
|
|
@ -152,6 +154,8 @@ export class SettingsManager {
|
|||
value = parseInt(value) || DEFAULT_SETTINGS.minutesBetweenRoutes
|
||||
} else if (frontendKey === 'speedColoredRoutes') {
|
||||
value = value === true || value === 'true'
|
||||
} else if (frontendKey === 'globeProjection') {
|
||||
value = value === true || value === 'true'
|
||||
}
|
||||
|
||||
frontendSettings[frontendKey] = value
|
||||
|
|
@ -219,6 +223,8 @@ export class SettingsManager {
|
|||
value = parseInt(value).toString()
|
||||
} else if (frontendKey === 'speedColoredRoutes') {
|
||||
value = Boolean(value)
|
||||
} else if (frontendKey === 'globeProjection') {
|
||||
value = Boolean(value)
|
||||
}
|
||||
|
||||
backendSettings[backendKey] = value
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ class Users::Digests::CalculatingJob < ApplicationJob
|
|||
queue_as :digests
|
||||
|
||||
def perform(user_id, year)
|
||||
recalculate_monthly_stats(user_id, year)
|
||||
Users::Digests::CalculateYear.new(user_id, year).call
|
||||
rescue StandardError => e
|
||||
create_digest_failed_notification(user_id, e)
|
||||
|
|
@ -11,6 +12,12 @@ class Users::Digests::CalculatingJob < ApplicationJob
|
|||
|
||||
private
|
||||
|
||||
def recalculate_monthly_stats(user_id, year)
|
||||
(1..12).each do |month|
|
||||
Stats::CalculateMonth.new(user_id, year, month).call
|
||||
end
|
||||
end
|
||||
|
||||
def create_digest_failed_notification(user_id, error)
|
||||
user = User.find(user_id)
|
||||
|
||||
|
|
|
|||
|
|
@ -45,18 +45,13 @@ class User < ApplicationRecord # rubocop:disable Metrics/ClassLength
|
|||
|
||||
def countries_visited
|
||||
Rails.cache.fetch("dawarich/user_#{id}_countries_visited", expires_in: 1.day) do
|
||||
points
|
||||
.without_raw_data
|
||||
.where.not(country_name: [nil, ''])
|
||||
.distinct
|
||||
.pluck(:country_name)
|
||||
.compact
|
||||
countries_visited_uncached
|
||||
end
|
||||
end
|
||||
|
||||
def cities_visited
|
||||
Rails.cache.fetch("dawarich/user_#{id}_cities_visited", expires_in: 1.day) do
|
||||
points.where.not(city: [nil, '']).distinct.pluck(:city).compact
|
||||
cities_visited_uncached
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -139,17 +134,47 @@ class User < ApplicationRecord # rubocop:disable Metrics/ClassLength
|
|||
Time.zone.name
|
||||
end
|
||||
|
||||
# Aggregate countries from all stats' toponyms
|
||||
# This is more accurate than raw point queries as it uses processed data
|
||||
def countries_visited_uncached
|
||||
points
|
||||
.without_raw_data
|
||||
.where.not(country_name: [nil, ''])
|
||||
.distinct
|
||||
.pluck(:country_name)
|
||||
.compact
|
||||
countries = Set.new
|
||||
|
||||
stats.find_each do |stat|
|
||||
toponyms = stat.toponyms
|
||||
next unless toponyms.is_a?(Array)
|
||||
|
||||
toponyms.each do |toponym|
|
||||
next unless toponym.is_a?(Hash)
|
||||
|
||||
countries.add(toponym['country']) if toponym['country'].present?
|
||||
end
|
||||
end
|
||||
|
||||
countries.to_a.sort
|
||||
end
|
||||
|
||||
# Aggregate cities from all stats' toponyms
|
||||
# This respects MIN_MINUTES_SPENT_IN_CITY since toponyms are already filtered
|
||||
def cities_visited_uncached
|
||||
points.where.not(city: [nil, '']).distinct.pluck(:city).compact
|
||||
cities = Set.new
|
||||
|
||||
stats.find_each do |stat|
|
||||
toponyms = stat.toponyms
|
||||
next unless toponyms.is_a?(Array)
|
||||
|
||||
toponyms.each do |toponym|
|
||||
next unless toponym.is_a?(Hash)
|
||||
next unless toponym['cities'].is_a?(Array)
|
||||
|
||||
toponym['cities'].each do |city|
|
||||
next unless city.is_a?(Hash)
|
||||
|
||||
cities.add(city['city']) if city['city'].present?
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
cities.to_a.sort
|
||||
end
|
||||
|
||||
def home_place_coordinates
|
||||
|
|
|
|||
|
|
@ -132,6 +132,11 @@ class Users::Digest < ApplicationRecord
|
|||
(all_time_stats['total_distance'] || 0).to_i
|
||||
end
|
||||
|
||||
def untracked_days
|
||||
days_in_year = Date.leap?(year) ? 366 : 365
|
||||
[days_in_year - total_tracked_days, 0].max.round(1)
|
||||
end
|
||||
|
||||
def distance_km
|
||||
distance.to_f / 1000
|
||||
end
|
||||
|
|
@ -151,4 +156,15 @@ class Users::Digest < ApplicationRecord
|
|||
def generate_sharing_uuid
|
||||
self.sharing_uuid ||= SecureRandom.uuid
|
||||
end
|
||||
|
||||
def total_tracked_days
|
||||
(total_tracked_minutes / 1440.0).round(1)
|
||||
end
|
||||
|
||||
def total_tracked_minutes
|
||||
# Use total_country_minutes if available (new digests),
|
||||
# fall back to summing top_countries_by_time (existing digests)
|
||||
time_spent_by_location['total_country_minutes'] ||
|
||||
top_countries_by_time.sum { |country| country['minutes'].to_i }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -42,7 +42,8 @@ class Api::UserSerializer
|
|||
photoprism_url: user.safe_settings.photoprism_url,
|
||||
visits_suggestions_enabled: user.safe_settings.visits_suggestions_enabled?,
|
||||
speed_color_scale: user.safe_settings.speed_color_scale,
|
||||
fog_of_war_threshold: user.safe_settings.fog_of_war_threshold
|
||||
fog_of_war_threshold: user.safe_settings.fog_of_war_threshold,
|
||||
globe_projection: user.safe_settings.globe_projection
|
||||
}
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -49,6 +49,17 @@ class CountriesAndCities
|
|||
end
|
||||
|
||||
def calculate_duration_in_minutes(timestamps)
|
||||
((timestamps.max - timestamps.min).to_i / 60)
|
||||
return 0 if timestamps.size < 2
|
||||
|
||||
sorted = timestamps.sort
|
||||
total_minutes = 0
|
||||
gap_threshold_seconds = ::MIN_MINUTES_SPENT_IN_CITY * 60
|
||||
|
||||
sorted.each_cons(2) do |prev_ts, curr_ts|
|
||||
interval_seconds = curr_ts - prev_ts
|
||||
total_minutes += (interval_seconds / 60) if interval_seconds < gap_threshold_seconds
|
||||
end
|
||||
|
||||
total_minutes
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -31,7 +31,10 @@ class Immich::RequestPhotos
|
|||
while page <= max_pages
|
||||
response = JSON.parse(
|
||||
HTTParty.post(
|
||||
immich_api_base_url, headers: headers, body: request_body(page)
|
||||
immich_api_base_url,
|
||||
headers: headers,
|
||||
body: request_body(page),
|
||||
timeout: 10
|
||||
).body
|
||||
)
|
||||
Rails.logger.debug('==== IMMICH RESPONSE ====')
|
||||
|
|
@ -46,6 +49,9 @@ class Immich::RequestPhotos
|
|||
end
|
||||
|
||||
data.flatten
|
||||
rescue HTTParty::Error, Net::OpenTimeout, Net::ReadTimeout => e
|
||||
Rails.logger.error("Immich photo fetch failed: #{e.message}")
|
||||
[]
|
||||
end
|
||||
|
||||
def headers
|
||||
|
|
|
|||
|
|
@ -43,13 +43,17 @@ class Photoprism::RequestPhotos
|
|||
end
|
||||
|
||||
data.flatten
|
||||
rescue HTTParty::Error, Net::OpenTimeout, Net::ReadTimeout => e
|
||||
Rails.logger.error("Photoprism photo fetch failed: #{e.message}")
|
||||
[]
|
||||
end
|
||||
|
||||
def fetch_page(offset)
|
||||
response = HTTParty.get(
|
||||
photoprism_api_base_url,
|
||||
headers: headers,
|
||||
query: request_params(offset)
|
||||
query: request_params(offset),
|
||||
timeout: 10
|
||||
)
|
||||
|
||||
if response.code != 200
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
module Users
|
||||
module Digests
|
||||
class CalculateYear
|
||||
MINUTES_PER_DAY = 1440
|
||||
|
||||
def initialize(user_id, year)
|
||||
@user = ::User.find(user_id)
|
||||
@year = year.to_i
|
||||
|
|
@ -50,7 +52,7 @@ module Users
|
|||
next unless toponym.is_a?(Hash)
|
||||
|
||||
country = toponym['country']
|
||||
next unless country.present?
|
||||
next if country.blank?
|
||||
|
||||
if toponym['cities'].is_a?(Array)
|
||||
toponym['cities'].each do |city|
|
||||
|
|
@ -64,7 +66,7 @@ module Users
|
|||
end
|
||||
end
|
||||
|
||||
country_cities.sort_by { |country, _| country }.map do |country, cities|
|
||||
country_cities.sort_by { |_country, cities| -cities.size }.map do |country, cities|
|
||||
{
|
||||
'country' => country,
|
||||
'cities' => cities.to_a.sort.map { |city| { 'city' => city } }
|
||||
|
|
@ -88,35 +90,120 @@ module Users
|
|||
end
|
||||
|
||||
def calculate_time_spent
|
||||
country_time = Hash.new(0)
|
||||
city_time = Hash.new(0)
|
||||
country_minutes = calculate_actual_country_minutes
|
||||
|
||||
monthly_stats.each do |stat|
|
||||
toponyms = stat.toponyms
|
||||
next unless toponyms.is_a?(Array)
|
||||
{
|
||||
'countries' => format_top_countries(country_minutes),
|
||||
'cities' => calculate_city_time_spent,
|
||||
'total_country_minutes' => country_minutes.values.sum
|
||||
}
|
||||
end
|
||||
|
||||
toponyms.each do |toponym|
|
||||
next unless toponym.is_a?(Hash)
|
||||
def format_top_countries(country_minutes)
|
||||
country_minutes
|
||||
.sort_by { |_, minutes| -minutes }
|
||||
.first(10)
|
||||
.map { |name, minutes| { 'name' => name, 'minutes' => minutes } }
|
||||
end
|
||||
|
||||
country = toponym['country']
|
||||
next unless toponym['cities'].is_a?(Array)
|
||||
def calculate_actual_country_minutes
|
||||
points_by_date = group_points_by_date
|
||||
country_minutes = Hash.new(0)
|
||||
|
||||
toponym['cities'].each do |city|
|
||||
next unless city.is_a?(Hash)
|
||||
points_by_date.each do |_date, day_points|
|
||||
countries_on_day = day_points.map(&:country_name).uniq
|
||||
|
||||
stayed_for = city['stayed_for'].to_i
|
||||
city_name = city['city']
|
||||
|
||||
country_time[country] += stayed_for if country.present?
|
||||
city_time[city_name] += stayed_for if city_name.present?
|
||||
end
|
||||
if countries_on_day.size == 1
|
||||
# Single country day - assign full day
|
||||
country_minutes[countries_on_day.first] += MINUTES_PER_DAY
|
||||
else
|
||||
# Multi-country day - calculate proportional time
|
||||
calculate_proportional_time(day_points, country_minutes)
|
||||
end
|
||||
end
|
||||
|
||||
{
|
||||
'countries' => country_time.sort_by { |_, v| -v }.first(10).map { |name, minutes| { 'name' => name, 'minutes' => minutes } },
|
||||
'cities' => city_time.sort_by { |_, v| -v }.first(10).map { |name, minutes| { 'name' => name, 'minutes' => minutes } }
|
||||
}
|
||||
country_minutes
|
||||
end
|
||||
|
||||
def group_points_by_date
|
||||
points = fetch_year_points_with_country_ordered
|
||||
|
||||
points.group_by do |point|
|
||||
Time.zone.at(point.timestamp).to_date
|
||||
end
|
||||
end
|
||||
|
||||
def calculate_proportional_time(day_points, country_minutes)
|
||||
country_spans = Hash.new(0)
|
||||
points_by_country = day_points.group_by(&:country_name)
|
||||
|
||||
points_by_country.each do |country, country_points|
|
||||
timestamps = country_points.map(&:timestamp)
|
||||
span_seconds = timestamps.max - timestamps.min
|
||||
# Minimum 60 seconds (1 min) for single-point countries
|
||||
country_spans[country] = [span_seconds, 60].max
|
||||
end
|
||||
|
||||
total_spans = country_spans.values.sum.to_f
|
||||
|
||||
country_spans.each do |country, span|
|
||||
proportional_minutes = (span / total_spans * MINUTES_PER_DAY).round
|
||||
country_minutes[country] += proportional_minutes
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_year_points_with_country_ordered
|
||||
start_of_year = Time.zone.local(year, 1, 1, 0, 0, 0)
|
||||
end_of_year = start_of_year.end_of_year
|
||||
|
||||
user.points
|
||||
.without_raw_data
|
||||
.where('timestamp >= ? AND timestamp <= ?', start_of_year.to_i, end_of_year.to_i)
|
||||
.where.not(country_name: [nil, ''])
|
||||
.select(:country_name, :timestamp)
|
||||
.order(timestamp: :asc)
|
||||
end
|
||||
|
||||
def calculate_city_time_spent
|
||||
city_time = aggregate_city_time_from_monthly_stats
|
||||
|
||||
city_time
|
||||
.sort_by { |_, minutes| -minutes }
|
||||
.first(10)
|
||||
.map { |name, minutes| { 'name' => name, 'minutes' => minutes } }
|
||||
end
|
||||
|
||||
def aggregate_city_time_from_monthly_stats
|
||||
city_time = Hash.new(0)
|
||||
|
||||
monthly_stats.each do |stat|
|
||||
process_stat_toponyms(stat, city_time)
|
||||
end
|
||||
|
||||
city_time
|
||||
end
|
||||
|
||||
def process_stat_toponyms(stat, city_time)
|
||||
toponyms = stat.toponyms
|
||||
return unless toponyms.is_a?(Array)
|
||||
|
||||
toponyms.each do |toponym|
|
||||
process_toponym_cities(toponym, city_time)
|
||||
end
|
||||
end
|
||||
|
||||
def process_toponym_cities(toponym, city_time)
|
||||
return unless toponym.is_a?(Hash)
|
||||
return unless toponym['cities'].is_a?(Array)
|
||||
|
||||
toponym['cities'].each do |city|
|
||||
next unless city.is_a?(Hash)
|
||||
|
||||
stayed_for = city['stayed_for'].to_i
|
||||
city_name = city['city']
|
||||
|
||||
city_time[city_name] += stayed_for if city_name.present?
|
||||
end
|
||||
end
|
||||
|
||||
def calculate_first_time_visits
|
||||
|
|
@ -129,8 +216,8 @@ module Users
|
|||
|
||||
def calculate_all_time_stats
|
||||
{
|
||||
'total_countries' => user.countries_visited.count,
|
||||
'total_cities' => user.cities_visited.count,
|
||||
'total_countries' => user.countries_visited_uncached.size,
|
||||
'total_cities' => user.cities_visited_uncached.size,
|
||||
'total_distance' => user.stats.sum(:distance).to_s
|
||||
}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class Users::ExportData::Points
|
|||
|
||||
output_file.write('[')
|
||||
|
||||
user.points.find_in_batches(batch_size: BATCH_SIZE).with_index do |batch, batch_index|
|
||||
user.points.find_in_batches(batch_size: BATCH_SIZE).with_index do |batch, _batch_index|
|
||||
batch_sql = build_batch_query(batch.map(&:id))
|
||||
result = ActiveRecord::Base.connection.exec_query(batch_sql, 'Points Export Batch')
|
||||
|
||||
|
|
@ -188,13 +188,13 @@ class Users::ExportData::Points
|
|||
}
|
||||
end
|
||||
|
||||
if row['visit_name']
|
||||
point_hash['visit_reference'] = {
|
||||
'name' => row['visit_name'],
|
||||
'started_at' => row['visit_started_at'],
|
||||
'ended_at' => row['visit_ended_at']
|
||||
}
|
||||
end
|
||||
return unless row['visit_name']
|
||||
|
||||
point_hash['visit_reference'] = {
|
||||
'name' => row['visit_name'],
|
||||
'started_at' => row['visit_started_at'],
|
||||
'ended_at' => row['visit_ended_at']
|
||||
}
|
||||
end
|
||||
|
||||
def log_progress(processed, total)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,8 @@ class Users::SafeSettings
|
|||
'visits_suggestions_enabled' => 'true',
|
||||
'enabled_map_layers' => %w[Routes Heatmap],
|
||||
'maps_maplibre_style' => 'light',
|
||||
'digest_emails_enabled' => true
|
||||
'digest_emails_enabled' => true,
|
||||
'globe_projection' => false
|
||||
}.freeze
|
||||
|
||||
def initialize(settings = {})
|
||||
|
|
@ -52,7 +53,8 @@ class Users::SafeSettings
|
|||
speed_color_scale: speed_color_scale,
|
||||
fog_of_war_threshold: fog_of_war_threshold,
|
||||
enabled_map_layers: enabled_map_layers,
|
||||
maps_maplibre_style: maps_maplibre_style
|
||||
maps_maplibre_style: maps_maplibre_style,
|
||||
globe_projection: globe_projection
|
||||
}
|
||||
end
|
||||
# rubocop:enable Metrics/MethodLength
|
||||
|
|
@ -141,6 +143,10 @@ class Users::SafeSettings
|
|||
settings['maps_maplibre_style']
|
||||
end
|
||||
|
||||
def globe_projection
|
||||
ActiveModel::Type::Boolean.new.cast(settings['globe_projection'])
|
||||
end
|
||||
|
||||
def digest_emails_enabled?
|
||||
value = settings['digest_emails_enabled']
|
||||
return true if value.nil?
|
||||
|
|
|
|||
|
|
@ -365,6 +365,19 @@
|
|||
</select>
|
||||
</div>
|
||||
|
||||
<!-- Globe Projection -->
|
||||
<div class="form-control">
|
||||
<label class="label cursor-pointer justify-start gap-3">
|
||||
<input type="checkbox"
|
||||
name="globeProjection"
|
||||
class="toggle toggle-primary"
|
||||
data-maps--maplibre-target="globeToggle"
|
||||
data-action="change->maps--maplibre#toggleGlobe" />
|
||||
<span class="label-text font-medium">Globe View</span>
|
||||
</label>
|
||||
<p class="text-sm text-base-content/60 mt-1">Render map as a 3D globe (requires page reload)</p>
|
||||
</div>
|
||||
|
||||
<div class="divider"></div>
|
||||
|
||||
<!-- Route Opacity -->
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@
|
|||
</h2>
|
||||
<div class="w-full h-48 bg-base-200 rounded-lg p-4 relative">
|
||||
<%= column_chart(
|
||||
@digest.monthly_distances.sort.map { |month, distance_meters|
|
||||
@digest.monthly_distances.sort_by { |month, _| month.to_i }.map { |month, distance_meters|
|
||||
[Date::ABBR_MONTHNAMES[month.to_i], Users::Digest.convert_distance(distance_meters.to_i, @distance_unit).round]
|
||||
},
|
||||
height: '200px',
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@
|
|||
</h2>
|
||||
<div class="w-full h-64 bg-base-100 rounded-lg p-4">
|
||||
<%= column_chart(
|
||||
@digest.monthly_distances.sort.map { |month, distance_meters|
|
||||
@digest.monthly_distances.sort_by { |month, _| month.to_i }.map { |month, distance_meters|
|
||||
[Date::ABBR_MONTHNAMES[month.to_i], Users::Digest.convert_distance(distance_meters.to_i, @distance_unit).round]
|
||||
},
|
||||
height: '250px',
|
||||
|
|
@ -142,6 +142,19 @@
|
|||
<span class="text-gray-600"><%= format_time_spent(country['minutes']) %></span>
|
||||
</div>
|
||||
<% end %>
|
||||
|
||||
<% if @digest.untracked_days > 0 %>
|
||||
<div class="flex justify-between items-center p-3 bg-base-100 rounded-lg border-2 border-dashed border-gray-200">
|
||||
<div class="flex items-center gap-3">
|
||||
<span class="badge badge-lg badge-ghost">?</span>
|
||||
<span class="text-gray-500 italic">No tracking data</span>
|
||||
</div>
|
||||
<span class="text-gray-500"><%= pluralize(@digest.untracked_days.round, 'day') %></span>
|
||||
</div>
|
||||
<p class="text-sm text-gray-500 mt-2 flex items-center justify-center gap-2">
|
||||
<%= icon 'lightbulb' %> Track more in <%= @digest.year + 1 %> to see a fuller picture of your travels!
|
||||
</p>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -155,14 +168,7 @@
|
|||
</h2>
|
||||
<div class="space-y-4 w-full">
|
||||
<% if @digest.toponyms.present? %>
|
||||
<% max_cities = @digest.toponyms.map { |country| country['cities']&.length || 0 }.max %>
|
||||
<% progress_colors = ['progress-primary', 'progress-secondary', 'progress-accent', 'progress-info', 'progress-success', 'progress-warning'] %>
|
||||
|
||||
<% @digest.toponyms.each_with_index do |country, index| %>
|
||||
<% cities_count = country['cities']&.length || 0 %>
|
||||
<% progress_value = max_cities&.positive? ? (cities_count.to_f / max_cities * 100).round : 0 %>
|
||||
<% color_class = progress_colors[index % progress_colors.length] %>
|
||||
|
||||
<div class="space-y-2">
|
||||
<div class="flex justify-between items-center">
|
||||
<span class="font-semibold">
|
||||
|
|
@ -170,10 +176,10 @@
|
|||
<%= country['country'] %>
|
||||
</span>
|
||||
<span class="text-sm">
|
||||
<%= pluralize(cities_count, 'city') %>
|
||||
<%= pluralize(country['cities']&.length || 0, 'city') %>
|
||||
</span>
|
||||
</div>
|
||||
<progress class="progress <%= color_class %> w-full" value="<%= progress_value %>" max="100"></progress>
|
||||
<progress class="progress <%= progress_color_for_index(index) %> w-full" value="<%= city_progress_value(country['cities']&.length || 0, max_cities_count(@digest.toponyms)) %>" max="100"></progress>
|
||||
</div>
|
||||
<% end %>
|
||||
<% else %>
|
||||
|
|
@ -214,6 +220,12 @@
|
|||
<button class="btn btn-outline" onclick="sharing_modal.showModal()">
|
||||
<%= icon 'share' %> Share
|
||||
</button>
|
||||
<%= button_to users_digest_path(year: @digest.year),
|
||||
method: :delete,
|
||||
class: 'btn btn-outline btn-error',
|
||||
data: { turbo_confirm: "Are you sure you want to delete the #{@digest.year} digest? This cannot be undone." } do %>
|
||||
<%= icon 'trash-2' %> Delete
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -250,13 +250,24 @@
|
|||
<div class="stat-card">
|
||||
<div class="stat-label">Where You Spent the Most Time</div>
|
||||
<ul class="location-list">
|
||||
<% @digest.top_countries_by_time.take(3).each do |country| %>
|
||||
<% @digest.top_countries_by_time.take(5).each do |country| %>
|
||||
<li>
|
||||
<span><%= country_flag(country['name']) %> <%= country['name'] %></span>
|
||||
<span><%= format_time_spent(country['minutes']) %></span>
|
||||
</li>
|
||||
<% end %>
|
||||
<% if @digest.untracked_days > 0 %>
|
||||
<li style="border-top: 2px dashed #e2e8f0; padding-top: 12px; margin-top: 4px;">
|
||||
<span style="color: #94a3b8; font-style: italic;">No tracking data</span>
|
||||
<span style="color: #94a3b8;"><%= pluralize(@digest.untracked_days.round, 'day') %></span>
|
||||
</li>
|
||||
<% end %>
|
||||
</ul>
|
||||
<% if @digest.untracked_days > 0 %>
|
||||
<p style="color: #64748b; font-size: 13px; margin-top: 12px;">
|
||||
💡 Track more in <%= @digest.year + 1 %> to see a fuller picture of your travels!
|
||||
</p>
|
||||
<% end %>
|
||||
</div>
|
||||
<% end %>
|
||||
|
||||
|
|
|
|||
|
|
@ -101,8 +101,8 @@ Rails.application.routes.draw do
|
|||
|
||||
# User digests routes (yearly/monthly digest reports)
|
||||
scope module: 'users' do
|
||||
resources :digests, only: %i[index create], param: :year, as: :users_digests
|
||||
get 'digests/:year', to: 'digests#show', as: :users_digest, constraints: { year: /\d{4}/ }
|
||||
resources :digests, only: %i[index create show destroy], param: :year, as: :users_digests,
|
||||
constraints: { year: /\d{4}/ }
|
||||
end
|
||||
get 'shared/digest/:uuid', to: 'shared/digests#show', as: :shared_users_digest
|
||||
patch 'digests/:year/sharing',
|
||||
|
|
|
|||
|
|
@ -3,21 +3,19 @@ class InstallRailsPulseTables < ActiveRecord::Migration[8.0]
|
|||
def change
|
||||
# Load and execute the Rails Pulse schema directly
|
||||
# This ensures the migration is always in sync with the schema file
|
||||
schema_file = File.join(::Rails.root.to_s, "db/rails_pulse_schema.rb")
|
||||
schema_file = Rails.root.join('db/rails_pulse_schema.rb').to_s
|
||||
|
||||
if File.exist?(schema_file)
|
||||
say "Loading Rails Pulse schema from db/rails_pulse_schema.rb"
|
||||
raise 'Rails Pulse schema file not found at db/rails_pulse_schema.rb' unless File.exist?(schema_file)
|
||||
|
||||
# Load the schema file to define RailsPulse::Schema
|
||||
load schema_file
|
||||
say 'Loading Rails Pulse schema from db/rails_pulse_schema.rb'
|
||||
|
||||
# Execute the schema in the context of this migration
|
||||
RailsPulse::Schema.call(connection)
|
||||
# Load the schema file to define RailsPulse::Schema
|
||||
load schema_file
|
||||
|
||||
say "Rails Pulse tables created successfully"
|
||||
say "The schema file db/rails_pulse_schema.rb remains as your single source of truth"
|
||||
else
|
||||
raise "Rails Pulse schema file not found at db/rails_pulse_schema.rb"
|
||||
end
|
||||
# Execute the schema in the context of this migration
|
||||
RailsPulse::Schema.call(connection)
|
||||
|
||||
say 'Rails Pulse tables created successfully'
|
||||
say 'The schema file db/rails_pulse_schema.rb remains as your single source of truth'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,21 @@
|
|||
class AddIndexesToPointsForStatsQuery < ActiveRecord::Migration[8.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
def change
|
||||
# Index for counting reverse geocoded points
|
||||
# This speeds up: COUNT(reverse_geocoded_at)
|
||||
add_index :points, [:user_id, :reverse_geocoded_at],
|
||||
where: "reverse_geocoded_at IS NOT NULL",
|
||||
algorithm: :concurrently,
|
||||
if_not_exists: true,
|
||||
name: 'index_points_on_user_id_and_reverse_geocoded_at'
|
||||
|
||||
# Index for finding points with empty geodata
|
||||
# This speeds up: COUNT(CASE WHEN geodata = '{}'::jsonb THEN 1 END)
|
||||
add_index :points, [:user_id, :geodata],
|
||||
where: "geodata = '{}'::jsonb",
|
||||
algorithm: :concurrently,
|
||||
if_not_exists: true,
|
||||
name: 'index_points_on_user_id_and_empty_geodata'
|
||||
end
|
||||
end
|
||||
4
db/schema.rb
generated
4
db/schema.rb
generated
|
|
@ -10,7 +10,7 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema[8.0].define(version: 2025_12_28_163703) do
|
||||
ActiveRecord::Schema[8.0].define(version: 2026_01_03_114630) do
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "pg_catalog.plpgsql"
|
||||
enable_extension "postgis"
|
||||
|
|
@ -260,6 +260,7 @@ ActiveRecord::Schema[8.0].define(version: 2025_12_28_163703) do
|
|||
t.index ["track_id"], name: "index_points_on_track_id"
|
||||
t.index ["user_id", "city"], name: "idx_points_user_city"
|
||||
t.index ["user_id", "country_name"], name: "idx_points_user_country_name"
|
||||
t.index ["user_id", "geodata"], name: "index_points_on_user_id_and_empty_geodata", where: "(geodata = '{}'::jsonb)"
|
||||
t.index ["user_id", "reverse_geocoded_at"], name: "index_points_on_user_id_and_reverse_geocoded_at", where: "(reverse_geocoded_at IS NOT NULL)"
|
||||
t.index ["user_id", "timestamp", "track_id"], name: "idx_points_track_generation"
|
||||
t.index ["user_id", "timestamp"], name: "idx_points_user_visit_null_timestamp", where: "(visit_id IS NULL)"
|
||||
|
|
@ -521,6 +522,7 @@ ActiveRecord::Schema[8.0].define(version: 2025_12_28_163703) do
|
|||
add_foreign_key "notifications", "users"
|
||||
add_foreign_key "place_visits", "places"
|
||||
add_foreign_key "place_visits", "visits"
|
||||
add_foreign_key "points", "points_raw_data_archives", column: "raw_data_archive_id", name: "fk_rails_points_raw_data_archives", on_delete: :nullify, validate: false
|
||||
add_foreign_key "points", "points_raw_data_archives", column: "raw_data_archive_id", on_delete: :nullify
|
||||
add_foreign_key "points", "users"
|
||||
add_foreign_key "points", "visits"
|
||||
|
|
|
|||
244
docs/How_to_install_Dawarich_on_Unraid.md
Normal file
244
docs/How_to_install_Dawarich_on_Unraid.md
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
# How to install Dawarich on Unraid
|
||||
|
||||
> [!WARNING]
|
||||
> **Do not use autoupdate** and **do not update** any Dawarich container **without** [**backing up your data**](https://dawarich.app/docs/tutorials/backup-and-restore) first and checking for breaking changes in the [updating guides](https://dawarich.app/docs/updating-guides)!
|
||||
>
|
||||
> *Dawarich is still in beta and a rapidly evolving project, and some changes may break compatibility with older versions.*
|
||||
|
||||
This guide is written for:
|
||||
|
||||
- Unraid OS 7.1.4
|
||||
- Dawarich 0.33.0
|
||||
|
||||
## Installation methods: CA Templates vs. Docker Compose
|
||||
|
||||
For Dawarich to run 4 docker containers are required:
|
||||
|
||||
- `dawarich_db` - PostgreSQL database
|
||||
- `dawarich_redis` - Redis database
|
||||
- `dawarich_app` - Dawarich web application
|
||||
- `dawarich_sidekiq` - Sidekiq worker (for background jobs)
|
||||
|
||||
> [!NOTE]
|
||||
> Some containers depend on others to be running first. Therefore this guide will follow this order: `dawarich_db` >> `dawarich_redis` >> `dawarich_app` >> `dawarich_sidekiq`.
|
||||
|
||||
[Usually](https://dawarich.app/docs/intro/) all 4 containers are created and started together using [Docker Compose](https://docs.docker.com/compose/). Unraid [does not support Docker Compose natively](https://docs.unraid.net/unraid-os/using-unraid-to/run-docker-containers/overview/). Instead, it uses its own implementation of `DockerMan` for managing Docker containers via [Community Applications (CA)](https://docs.unraid.net/unraid-os/using-unraid-to/run-docker-containers/community-applications/) plugin.
|
||||
|
||||
However, there is a [Docker Compose Manager](https://forums.unraid.net/topic/114415-plugin-docker-compose-manager/) plugin that can be used to [setup and run Dawarich using Docker Compose](https://github.com/Freika/dawarich/discussions/150). This method is not covered in this guide.
|
||||
|
||||
*Feel free to contribute a PR if you want to add it.*
|
||||
|
||||
## Support for Unraid CA Templates
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Since [Freika is not maintaining the Unraid CA templates](https://github.com/Freika/dawarich/issues/1382), all Unraid-related issues should be raised in the appropriate repositories or Unraid forum threads:
|
||||
>
|
||||
> - `dawarich_db` & `dawarich_redis` [Github](https://github.com/pa7rickstar/unraid_templates) and [Unraid forum](https://forums.unraid.net/topic/193769-support-pa7rickstar-docker-templates/)
|
||||
> - `dawarich_app` & `dawarich_sidekiq` [Github](https://github.com/nwithan8/unraid_templates) and [Unraid forum](https://forums.unraid.net/topic/133764-support-grtgbln-docker-templates/)
|
||||
|
||||
There is an official [PostGIS](https://hub.docker.com/r/postgis/postgis) CA you can use for `dawarich_db` and an official [redis](https://forums.unraid.net/topic/89502-support-a75g-repo/) CA you can use for `dawarich_redis`. However, if you don’t want to set up the correct volume paths, environment variables, health-checks and arguments by yourself, [Pa7rickStar](https://github.com/pa7rickstar) has created CA for [`dawarich_db`](https://github.com/pa7rickstar/unraid_templates/blob/main/templates/dawarich_db.xml) and [`dawarich_redis`](https://github.com/pa7rickstar/unraid_templates/blob/main/templates/dawarich_redis.xml) which are preconfigured for an easy Dawarich installation.
|
||||
|
||||
For [`dawarich_app`](https://github.com/nwithan8/unraid_templates/discussions/273) and [`dawarich_sidekiq`](https://github.com/nwithan8/unraid_templates/discussions/310) [grtgbln](https://forums.unraid.net/profile/81372-grtgbln/) aka. [nwithan8 on Github](https://github.com/nwithan8) is [maintaining](https://github.com/Freika/dawarich/issues/928#issuecomment-2749287192) Unraid CA templates.
|
||||
|
||||
> [!NOTE]
|
||||
> All 4 CA use the official docker hub repositories of redis, PostGIS, Dawarich and Sidekiq.
|
||||
|
||||
## Installation
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This guide assumes you will name the containers `dawarich_redis`, `dawarich_db`, `dawarich_app` and `dawarich_sidekiq`. You can use other names, but make sure to adjust the settings accordingly or use IP addresses and ports instead.
|
||||
|
||||
### 1. (Optional) Setup user-defined bridge network
|
||||
|
||||
The [docker-compose file](https://github.com/Freika/dawarich/blob/master/docker/docker-compose.yml) usually used to set up Dawarich creates a user-defined bridge network for Dawarich containers so they are isolated in their own network and are still able to communicate with each other. This step is optional, but [it is a good practice](https://trash-guides.info/File-and-Folder-Structure/How-to-set-up/Unraid/#setting-up-the-containers) to do so.
|
||||
|
||||
> [!NOTE]
|
||||
> Check out this [video on YouTube](https://www.youtube.com/watch?v=bKFMS5C4CG0) if you want to learn how different network drivers work in Docker.
|
||||
|
||||
#### 1. Set Unraid to preserve user-defined networks
|
||||
|
||||
By default user created networks are removed from Unraid when Docker is being restarted. This is done to prevent potential conflicts with the automatic generation of custom networks. If you want to use a user-defined bridge network for Dawarich containers, you need to change this behavior. Go to `Settings` -> `Docker` -> enable `Advanced View` and set `Preserve user defined networks` to `Yes`.
|
||||
Docker has to be stopped so that the setting can be changed.
|
||||
|
||||
> [!WARNING]
|
||||
> Change this setting to preserve user defined networks, but it is the responsibility of the user to ensure these entries work correctly and are conflict free.
|
||||
|
||||
#### 2. Create the user-defined bridge network
|
||||
|
||||
To create an user-defined bridge network called `dawarich`, open the terminal on your Unraid server and run:
|
||||
|
||||
```bash
|
||||
docker network create dawarich
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> You can check if the network was created successfully by running:
|
||||
>
|
||||
> ```bash
|
||||
> docker network ls
|
||||
> ```
|
||||
|
||||
### 2. Install `dawarich_db` container
|
||||
|
||||
Install the `dawarich_db` CA template from `Pa7rickStar's Repository`.
|
||||
|
||||
- The container Name `dawarich_db` will be used by other containers instead of an IP address and port. If you use this method, you don't need set the `Database port` in this template (there is also no need to access the database directly).
|
||||
- You can leave the `Extra Parameters` as is.
|
||||
- `--restart=always` in the `Extra Parameters` field (you have to turn on `ADVANCED VIEW` in the top right corner to see this field) will make sure the container is restarted automatically if it crashes.
|
||||
> [!NOTE]
|
||||
> This will cause the container to start after you boot the host [even if autostart is set to off](https://forums.unraid.net/topic/57181-docker-faq/page/2/#findComment-600177).
|
||||
- If you have set up a user-defined bridge network in the first step, select it under `Network Type`. Otherwise, leave it at `bridge`.
|
||||
- The default `Database username` is fine. You should set a strong `Database password`.
|
||||
> [!NOTE]
|
||||
> You can change the `Database password` without having the old one from the Unraid (host) Terminal by running:
|
||||
>
|
||||
> ```bash
|
||||
> docker exec -it dawarich_db \
|
||||
> psql -U postgres -d postgres -c "ALTER ROLE postgres WITH PASSWORD 'NEW_STRONG_PASSWORD';"
|
||||
>```
|
||||
>
|
||||
> Replace `NEW_STRONG_PASSWORD` with your new password and keep the `''`.
|
||||
|
||||
### 3. Install `dawarich_redis` container
|
||||
|
||||
Install the `dawarich_redis` CA template from `Pa7rickStar's Repository`.
|
||||
|
||||
- The container Name `dawarich_redis` will be used by other containers instead of an IP address.
|
||||
- `--restart=always` in the `Extra Parameters` field (you have to turn on `ADVANCED VIEW` in the top right corner to see this field) will make sure the container is restarted automatically if it crashes.
|
||||
> [!NOTE]
|
||||
> This will cause the container to start after you boot the host [even if autostart is set to off](https://forums.unraid.net/topic/57181-docker-faq/page/2/#findComment-600177).
|
||||
- If you have set up a user-defined bridge network in the first step, select it under `Network Type`. Otherwise, leave it at `bridge`.
|
||||
- If you have no port conflicts, leave the `Redis Port` at default value. Otherwise, change it to a free port. This port has to be used later in the `dawarich_app` and `dawarich_sidekiq` containers.
|
||||
|
||||
### 4. Install `dawarich_app` container
|
||||
|
||||
Install the `dawarich_app` CA template from `grtgbln's Repository`.
|
||||
|
||||
- You do not need to change the container Name to `dawarich_app` as other containers won't establish a connection by themselves.
|
||||
- Set `Extra Parameters` (you have to turn on `ADVANCED VIEW` in the top right corner to see this field) to:
|
||||
|
||||
```bash
|
||||
--entrypoint=web-entrypoint.sh --restart=on-failure --health-cmd='wget -qO- http://127.0.0.1:3000/api/v1/health | grep -q "\"status\"[[:space:]]*:[[:space:]]*\"ok\"" || exit 1' --health-interval=10s --health-retries=30 --health-start-period=30s --health-timeout=10s
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> The `--restart=on-failure` parameter will make sure the container is restarted automatically if it crashes. This *might* cause the container to start after you boot the host [even if autostart is set to off](https://forums.unraid.net/topic/57181-docker-faq/page/2/#findComment-600177).
|
||||
- If you have set up a user-defined bridge network in the first step, select it under `Network Type`. Otherwise, leave it at `bridge`.
|
||||
- If you have no port conflicts, leave the `Web Port` at default value. Otherwise, change it to a free port. This port will be used to access the Dawarich web interface. In this case make sure to set the same port for `WebUI` (default value is `http://[IP]:[PORT:3000]/`).
|
||||
- If you haven't changed any file paths in the previous containers, you can leave all the paths at default values. Otherwise, set the correct paths.
|
||||
- Set the `Redis URL` to `redis://dawarich_redis:6379/0` if you are using the container name `dawarich_redis` and the default port in the redis container.
|
||||
- Set the `PostGIS - Host` to `dawarich_db` if you are using the container name `dawarich_db`. Otherwise use the IP address.
|
||||
- Set `PostGIS - Username`, `PostGIS - Password` and `PostGIS - Database` to the same values you used in the setup of your `dawarich_db` container.
|
||||
- For any other settings refer to the [official documentation for environment variables and settings](https://dawarich.app/docs/environment-variables-and-settings).
|
||||
|
||||
> [!WARNING]
|
||||
> The CA template sets `PHOTON_API_HOST` to `photon.komoot.io` and `STORE_GEODATA` to `true` by default. This means the container will try to translate your location data (longitude, latitude) to addresses, cities etc. and [save the result in the database](https://github.com/Freika/dawarich/discussions/1457). In order to do so, the app will [send your data to the service provider, which might raise privacy concerns](https://dawarich.app/docs/tutorials/reverse-geocoding/). If you don't want this behavior you should leave `PHOTON_API_HOST` empty! You cold also [set up your own reverse geocoding service](#setup-reverse-geocoding).
|
||||
|
||||
### 5. Install `dawarich_sidekiq` container
|
||||
|
||||
Install the `dawarich_sidekiq` CA template from `grtgbln's Repository`.
|
||||
|
||||
- The same notes as for the `dawarich_app` container apply here.
|
||||
- Set `Extra Parameters` (you have to turn on `ADVANCED VIEW` in the top right corner to see this field) to:
|
||||
|
||||
```bash
|
||||
--entrypoint=sidekiq-entrypoint.sh --restart=on-failure --health-cmd='pgrep -f sidekiq >/dev/null || exit 1' --health-interval=10s --health-retries=30 --health-start-period=30s --health-timeout=10s
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> The `--restart=on-failure` parameter will make sure the container is restarted automatically if it crashes. This *might* cause the container to start after you boot the host [even if autostart is set to off](https://forums.unraid.net/topic/57181-docker-faq/page/2/#findComment-600177).
|
||||
|
||||
> [!WARNING]
|
||||
> The CA template sets `PHOTON_API_HOST` to `photon.komoot.io` and `STORE_GEODATA` to `true` by default. This means the container will try to translate your location data (longitude, latitude) to addresses, cities etc. and [save the result in the database](https://github.com/Freika/dawarich/discussions/1457). In order to do so, the app will [send your data to the service provider, which might raise privacy concerns](https://dawarich.app/docs/tutorials/reverse-geocoding/). If you don't want this behavior you should leave `PHOTON_API_HOST` empty! You cold also [set up your own reverse geocoding service](#setup-reverse-geocoding).
|
||||
|
||||
## Post installation
|
||||
|
||||
### 1. Starting the containers
|
||||
|
||||
The containers should start automatically when you are setting them up for the first time. If not, start them manually in the Unraid web interface. Use the correct order: `dawarich_db` >> `dawarich_redis` >> `dawarich_app` >> `dawarich_sidekiq`.
|
||||
|
||||
### 2. Health checks
|
||||
|
||||
According to the [Unraid documentation](https://docs.unraid.net/unraid-os/using-unraid-to/run-docker-containers/overview/#health-checks), colored health indicators next to each container’s icon are shown in the Unraid web interface when health checks are configured in the containers. Depending on the selected theme the container health might be indicated by text in the `uptime` column instead.
|
||||
|
||||
You can check the health status of the containers from the Unraid (host) Terminal:
|
||||
|
||||
```bash
|
||||
docker ps --format 'table {{.Names}}\t{{.Status}}'
|
||||
```
|
||||
|
||||
This should show something like this:
|
||||
|
||||
```bash
|
||||
root@tower# docker ps --format 'table {{.Names}}\t{{.Status}}'
|
||||
NAMES STATUS
|
||||
dawarich_sidekiq Up About a minute (healthy)
|
||||
dawarich_app Up About a minute (healthy)
|
||||
dawarich_db Up About a minute (healthy)
|
||||
dawarich_redis Up About a minute (healthy)
|
||||
```
|
||||
|
||||
If not, you can check the health status of each container individually:
|
||||
|
||||
```bash
|
||||
docker inspect --format '{{json .State.Health}}' dawarich_db | jq
|
||||
docker inspect --format '{{json .State.Health}}' dawarich_redis | jq
|
||||
docker inspect --format '{{json .State.Health}}' dawarich_app | jq
|
||||
docker inspect --format '{{json .State.Health}}' dawarich_sidekiq | jq
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
> There is a difference between `liveness` and `readiness` probes. Simply put:
|
||||
>
|
||||
> - `liveness` = "is the process up?"
|
||||
> - `readiness` = "can it do useful work?"
|
||||
>
|
||||
> The health checks configured in the `dawarich_app` and `dawarich_sidekiq` containers are `liveness` probes. This means that they will show `healthy` as long as the main process is running, even if the application is not fully started yet. So it might take a while until Dawarich is actually ready to use, even if the health check shows `healthy`. This also means that the health check will show `healthy` even if the application is not fully functional (e.g. if it can not connect to the database). You should check the logs of the `dawarich_app` container for any errors if you suspect that something is wrong.
|
||||
|
||||
### 3. Check the logs
|
||||
|
||||
You should check the Logs of each container for any errors.
|
||||
|
||||
> [!NOTE]
|
||||
> You might see this warning in the `dawarich_redis` container:
|
||||
>
|
||||
> ```bash
|
||||
> # WARNING Memory overcommit must be enabled! Without it, a background save or replication may fail under low memory condition. Being disabled, it can also cause failures without low memory condition, see https://github.com/jemalloc/jemalloc/issues/1328. To fix this issue add 'vm.overcommit_memory = 1' to /etc/sysctl.conf and then reboot or run the command 'sysctl vm.overcommit_memory=1' for this to take effect.
|
||||
> ```
|
||||
>
|
||||
> The `sysctl vm.overcommit_memory=1` command referenced there has to be run on the Unraid host (not in the container). As of now the author of this guide can not confidently advice on this, so please check the [Unraid forum](https://forums.unraid.net/) for help.
|
||||
|
||||
## Setup Reverse Geocoding
|
||||
|
||||
> [!NOTE]
|
||||
> Please check out the Dawarich [docs on reverse geocoding](https://dawarich.app/docs/tutorials/reverse-geocoding).
|
||||
|
||||
### 1. Install `Photon` container
|
||||
|
||||
If you want to [set up your own reverse geocoding service](https://dawarich.app/docs/tutorials/reverse-geocoding/#setting-up-your-own-reverse-geocoding-service) install the `Photon` CA template from `Pa7rickStar's Repository` and change the [environment variables](https://github.com/rtuszik/photon-docker?tab=readme-ov-file#configuration-options) to your liking.
|
||||
|
||||
- To reduce the load on the official Photon servers you can use the [community mirrors](https://github.com/rtuszik/photon-docker?tab=readme-ov-file#community-mirrors).
|
||||
- The default value for `REGION` is `planet` which might be more than you need.
|
||||
|
||||
> [!WARNING]
|
||||
> Large file sizes! This might take more than 200GB depending on the selected region. See here for the [available regions](https://github.com/rtuszik/photon-docker#available-regions).
|
||||
|
||||
### 2. Post installation
|
||||
|
||||
Check the logs after the container started. Photon should download the index files for the `REGION` you set.
|
||||
|
||||
After the index files are downloaded and Photon is ready, you can check if it is working by opening in a webbrowser:
|
||||
|
||||
```zsh
|
||||
http://localhost:[PORT]/api?q=Berlin
|
||||
```
|
||||
|
||||
### 3. Configure Photon for Dawarich
|
||||
|
||||
In your `dawarich_app` and `dawarich_sidekiq` containers:
|
||||
|
||||
- Set the `Photon API - Host` to `[IP]:[PORT]` of your `Photon` container (without the `[]`).
|
||||
- Set `Photon API - Use HTTPS` to `false`.
|
||||
- Restart the containers in the [correct order](#1-starting-the-containers).
|
||||
|
||||
*2025-10-07 by [Pa7rickStar](https://github.com/Pa7rickStar) with contributions from [nwithan8](https://github.com/nwithan8).*
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import { test as setup, expect } from '@playwright/test';
|
||||
import { disableGlobeProjection } from '../v2/helpers/setup.js';
|
||||
|
||||
const authFile = 'e2e/temp/.auth/user.json';
|
||||
|
||||
|
|
@ -19,6 +20,9 @@ setup('authenticate', async ({ page }) => {
|
|||
// Wait for successful navigation to map (v1 or v2 depending on user preference)
|
||||
await page.waitForURL(/\/map(\/v[12])?/, { timeout: 10000 });
|
||||
|
||||
// Disable globe projection to ensure consistent E2E test behavior
|
||||
await disableGlobeProjection(page);
|
||||
|
||||
// Save authentication state
|
||||
await page.context().storageState({ path: authFile });
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,6 +2,33 @@
|
|||
* Helper functions for Maps V2 E2E tests
|
||||
*/
|
||||
|
||||
/**
|
||||
* Disable globe projection setting via API
|
||||
* This ensures consistent map rendering for E2E tests
|
||||
* @param {Page} page - Playwright page object
|
||||
*/
|
||||
export async function disableGlobeProjection(page) {
|
||||
// Get API key from the page (requires being logged in)
|
||||
const apiKey = await page.evaluate(() => {
|
||||
const metaTag = document.querySelector('meta[name="api-key"]');
|
||||
return metaTag?.content;
|
||||
});
|
||||
|
||||
if (apiKey) {
|
||||
await page.request.patch('/api/v1/settings', {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
data: {
|
||||
settings: {
|
||||
globe_projection: false
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to Maps V2 page
|
||||
* @param {Page} page - Playwright page object
|
||||
|
|
|
|||
18
package-lock.json
generated
18
package-lock.json
generated
|
|
@ -11,7 +11,7 @@
|
|||
"leaflet": "^1.9.4",
|
||||
"maplibre-gl": "^5.13.0",
|
||||
"postcss": "^8.4.49",
|
||||
"trix": "^2.1.15"
|
||||
"trix": "^2.1.16"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.56.1",
|
||||
|
|
@ -575,12 +575,14 @@
|
|||
"license": "ISC"
|
||||
},
|
||||
"node_modules/trix": {
|
||||
"version": "2.1.15",
|
||||
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.15.tgz",
|
||||
"integrity": "sha512-LoaXWczdTUV8+3Box92B9b1iaDVbxD14dYemZRxi3PwY+AuDm97BUJV2aHLBUFPuDABhxp0wzcbf0CxHCVmXiw==",
|
||||
"license": "MIT",
|
||||
"version": "2.1.16",
|
||||
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.16.tgz",
|
||||
"integrity": "sha512-XtZgWI+oBvLzX7CWnkIf+ZWC+chL+YG/TkY43iMTV0Zl+CJjn18B1GJUCEWJ8qgfpcyMBuysnNAfPWiv2sV14A==",
|
||||
"dependencies": {
|
||||
"dompurify": "^3.2.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
|
|
@ -986,9 +988,9 @@
|
|||
"integrity": "sha512-gRa9gwYU3ECmQYv3lslts5hxuIa90veaEcxDYuu3QGOIAEM2mOZkVHp48ANJuu1CURtRdHKUBY5Lm1tHV+sD4g=="
|
||||
},
|
||||
"trix": {
|
||||
"version": "2.1.15",
|
||||
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.15.tgz",
|
||||
"integrity": "sha512-LoaXWczdTUV8+3Box92B9b1iaDVbxD14dYemZRxi3PwY+AuDm97BUJV2aHLBUFPuDABhxp0wzcbf0CxHCVmXiw==",
|
||||
"version": "2.1.16",
|
||||
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.16.tgz",
|
||||
"integrity": "sha512-XtZgWI+oBvLzX7CWnkIf+ZWC+chL+YG/TkY43iMTV0Zl+CJjn18B1GJUCEWJ8qgfpcyMBuysnNAfPWiv2sV14A==",
|
||||
"requires": {
|
||||
"dompurify": "^3.2.5"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
"leaflet": "^1.9.4",
|
||||
"maplibre-gl": "^5.13.0",
|
||||
"postcss": "^8.4.49",
|
||||
"trix": "^2.1.15"
|
||||
"trix": "^2.1.16"
|
||||
},
|
||||
"engines": {
|
||||
"node": "18.17.1",
|
||||
|
|
|
|||
|
|
@ -163,12 +163,16 @@ RSpec.describe User, type: :model do
|
|||
describe '#countries_visited' do
|
||||
subject { user.countries_visited }
|
||||
|
||||
let!(:point1) { create(:point, user:, country_name: 'Germany') }
|
||||
let!(:point2) { create(:point, user:, country_name: 'France') }
|
||||
let!(:point3) { create(:point, user:, country_name: nil) }
|
||||
let!(:point4) { create(:point, user:, country_name: '') }
|
||||
let!(:stat) do
|
||||
create(:stat, user:, toponyms: [
|
||||
{ 'country' => 'Germany', 'cities' => [{ 'city' => 'Berlin', 'stayed_for' => 120 }] },
|
||||
{ 'country' => 'France', 'cities' => [{ 'city' => 'Paris', 'stayed_for' => 90 }] },
|
||||
{ 'country' => nil, 'cities' => [] },
|
||||
{ 'country' => '', 'cities' => [] }
|
||||
])
|
||||
end
|
||||
|
||||
it 'returns array of countries' do
|
||||
it 'returns array of countries from stats toponyms' do
|
||||
expect(subject).to include('Germany', 'France')
|
||||
expect(subject.count).to eq(2)
|
||||
end
|
||||
|
|
@ -181,12 +185,18 @@ RSpec.describe User, type: :model do
|
|||
describe '#cities_visited' do
|
||||
subject { user.cities_visited }
|
||||
|
||||
let!(:point1) { create(:point, user:, city: 'Berlin') }
|
||||
let!(:point2) { create(:point, user:, city: 'Paris') }
|
||||
let!(:point3) { create(:point, user:, city: nil) }
|
||||
let!(:point4) { create(:point, user:, city: '') }
|
||||
let!(:stat) do
|
||||
create(:stat, user:, toponyms: [
|
||||
{ 'country' => 'Germany', 'cities' => [
|
||||
{ 'city' => 'Berlin', 'stayed_for' => 120 },
|
||||
{ 'city' => nil, 'stayed_for' => 60 },
|
||||
{ 'city' => '', 'stayed_for' => 60 }
|
||||
] },
|
||||
{ 'country' => 'France', 'cities' => [{ 'city' => 'Paris', 'stayed_for' => 90 }] }
|
||||
])
|
||||
end
|
||||
|
||||
it 'returns array of cities' do
|
||||
it 'returns array of cities from stats toponyms' do
|
||||
expect(subject).to include('Berlin', 'Paris')
|
||||
expect(subject.count).to eq(2)
|
||||
end
|
||||
|
|
@ -210,11 +220,15 @@ RSpec.describe User, type: :model do
|
|||
describe '#total_countries' do
|
||||
subject { user.total_countries }
|
||||
|
||||
let!(:point1) { create(:point, user:, country_name: 'Germany') }
|
||||
let!(:point2) { create(:point, user:, country_name: 'France') }
|
||||
let!(:point3) { create(:point, user:, country_name: nil) }
|
||||
let!(:stat) do
|
||||
create(:stat, user:, toponyms: [
|
||||
{ 'country' => 'Germany', 'cities' => [] },
|
||||
{ 'country' => 'France', 'cities' => [] },
|
||||
{ 'country' => nil, 'cities' => [] }
|
||||
])
|
||||
end
|
||||
|
||||
it 'returns number of countries' do
|
||||
it 'returns number of countries from stats toponyms' do
|
||||
expect(subject).to eq(2)
|
||||
end
|
||||
end
|
||||
|
|
@ -222,11 +236,17 @@ RSpec.describe User, type: :model do
|
|||
describe '#total_cities' do
|
||||
subject { user.total_cities }
|
||||
|
||||
let!(:point1) { create(:point, user:, city: 'Berlin') }
|
||||
let!(:point2) { create(:point, user:, city: 'Paris') }
|
||||
let!(:point3) { create(:point, user:, city: nil) }
|
||||
let!(:stat) do
|
||||
create(:stat, user:, toponyms: [
|
||||
{ 'country' => 'Germany', 'cities' => [
|
||||
{ 'city' => 'Berlin', 'stayed_for' => 120 },
|
||||
{ 'city' => 'Paris', 'stayed_for' => 90 },
|
||||
{ 'city' => nil, 'stayed_for' => 60 }
|
||||
] }
|
||||
])
|
||||
end
|
||||
|
||||
it 'returns number of cities' do
|
||||
it 'returns number of cities from stats toponyms' do
|
||||
expect(subject).to eq(2)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -27,6 +27,14 @@ RSpec.describe '/digests', type: :request do
|
|||
expect(response.status).to eq(302)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'DELETE /destroy' do
|
||||
it 'redirects to the sign in page' do
|
||||
delete users_digest_url(year: 2024)
|
||||
|
||||
expect(response).to redirect_to(new_user_session_path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is signed in' do
|
||||
|
|
@ -137,5 +145,40 @@ RSpec.describe '/digests', type: :request do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'DELETE /destroy' do
|
||||
let!(:digest) { create(:users_digest, user:, year: 2024) }
|
||||
|
||||
it 'deletes the digest' do
|
||||
expect do
|
||||
delete users_digest_url(year: 2024)
|
||||
end.to change(Users::Digest, :count).by(-1)
|
||||
end
|
||||
|
||||
it 'redirects with success notice' do
|
||||
delete users_digest_url(year: 2024)
|
||||
|
||||
expect(response).to redirect_to(users_digests_path)
|
||||
expect(flash[:notice]).to eq('Year-end digest for 2024 has been deleted')
|
||||
end
|
||||
|
||||
it 'returns not found for non-existent digest' do
|
||||
delete users_digest_url(year: 2020)
|
||||
|
||||
expect(response).to redirect_to(users_digests_path)
|
||||
expect(flash[:alert]).to eq('Digest not found')
|
||||
end
|
||||
|
||||
it 'cannot delete another user digest' do
|
||||
other_user = create(:user)
|
||||
other_digest = create(:users_digest, user: other_user, year: 2023)
|
||||
|
||||
delete users_digest_url(year: 2023)
|
||||
|
||||
expect(response).to redirect_to(users_digests_path)
|
||||
expect(flash[:alert]).to eq('Digest not found')
|
||||
expect(other_digest.reload).to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -79,6 +79,58 @@ RSpec.describe CountriesAndCities do
|
|||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points have a gap larger than threshold (passing through)' do
|
||||
let(:points) do
|
||||
[
|
||||
# User in Berlin at 9:00, leaves, returns at 11:00
|
||||
create(:point, city: 'Berlin', country: 'Germany', timestamp:),
|
||||
create(:point, city: 'Berlin', country: 'Germany', timestamp: timestamp + 15.minutes),
|
||||
# 105-minute gap here (user left the city)
|
||||
create(:point, city: 'Berlin', country: 'Germany', timestamp: timestamp + 120.minutes),
|
||||
create(:point, city: 'Berlin', country: 'Germany', timestamp: timestamp + 130.minutes)
|
||||
]
|
||||
end
|
||||
|
||||
it 'only counts time between consecutive points within threshold' do
|
||||
# Old logic would count 130 minutes (span from first to last)
|
||||
# New logic counts: 15 min (0->15) + 10 min (120->130) = 25 minutes
|
||||
# Since 25 < 60, Berlin should be filtered out
|
||||
expect(countries_and_cities).to eq(
|
||||
[
|
||||
CountriesAndCities::CountryData.new(
|
||||
country: 'Germany',
|
||||
cities: []
|
||||
)
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when points span a long time but have continuous presence' do
|
||||
let(:points) do
|
||||
# Points every 30 minutes for 2.5 hours = continuous presence
|
||||
(0..5).map do |i|
|
||||
create(:point, city: 'Berlin', country: 'Germany', timestamp: timestamp + (i * 30).minutes)
|
||||
end
|
||||
end
|
||||
|
||||
it 'counts the full duration when all intervals are within threshold' do
|
||||
# 5 intervals of 30 minutes each = 150 minutes total
|
||||
expect(countries_and_cities).to eq(
|
||||
[
|
||||
CountriesAndCities::CountryData.new(
|
||||
country: 'Germany',
|
||||
cities: [
|
||||
CountriesAndCities::CityData.new(
|
||||
city: 'Berlin', points: 6, timestamp: (timestamp + 150.minutes).to_i, stayed_for: 150
|
||||
)
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -61,16 +61,18 @@ RSpec.describe Points::RawData::Verifier do
|
|||
end.not_to change { archive.reload.verified_at }
|
||||
end
|
||||
|
||||
it 'detects deleted points' do
|
||||
it 'still verifies successfully when points are deleted from database' do
|
||||
# Force archive creation first
|
||||
archive_id = archive.id
|
||||
|
||||
# Then delete one point from database
|
||||
points.first.destroy
|
||||
|
||||
# Verification should still succeed - deleted points are acceptable
|
||||
# (users should be able to delete their data without failing archive verification)
|
||||
expect do
|
||||
verifier.verify_specific_archive(archive_id)
|
||||
end.not_to change { archive.reload.verified_at }
|
||||
end.to change { archive.reload.verified_at }.from(nil)
|
||||
end
|
||||
|
||||
it 'detects raw_data mismatch between archive and database' do
|
||||
|
|
|
|||
|
|
@ -155,10 +155,14 @@ RSpec.describe Stats::CalculateMonth do
|
|||
context 'when user visited multiple cities with mixed durations' do
|
||||
let!(:mixed_points) do
|
||||
[
|
||||
# Berlin: 70 minutes (should be included)
|
||||
# Berlin: 70 minutes with continuous presence (should be included)
|
||||
# Points every 35 minutes: 0, 35, 70 = 70 min total
|
||||
create(:point, user:, import:, timestamp: timestamp_base,
|
||||
city: 'Berlin', country_name: 'Germany',
|
||||
lonlat: 'POINT(13.404954 52.520008)'),
|
||||
create(:point, user:, import:, timestamp: timestamp_base + 35.minutes,
|
||||
city: 'Berlin', country_name: 'Germany',
|
||||
lonlat: 'POINT(13.404954 52.520008)'),
|
||||
create(:point, user:, import:, timestamp: timestamp_base + 70.minutes,
|
||||
city: 'Berlin', country_name: 'Germany',
|
||||
lonlat: 'POINT(13.404954 52.520008)'),
|
||||
|
|
@ -171,10 +175,17 @@ RSpec.describe Stats::CalculateMonth do
|
|||
city: 'Prague', country_name: 'Czech Republic',
|
||||
lonlat: 'POINT(14.4378 50.0755)'),
|
||||
|
||||
# Vienna: 90 minutes (should be included)
|
||||
# Vienna: 90 minutes with continuous presence (should be included)
|
||||
# Points every 30 minutes: 150, 180, 210, 240 = 90 min total
|
||||
create(:point, user:, import:, timestamp: timestamp_base + 150.minutes,
|
||||
city: 'Vienna', country_name: 'Austria',
|
||||
lonlat: 'POINT(16.3738 48.2082)'),
|
||||
create(:point, user:, import:, timestamp: timestamp_base + 180.minutes,
|
||||
city: 'Vienna', country_name: 'Austria',
|
||||
lonlat: 'POINT(16.3738 48.2082)'),
|
||||
create(:point, user:, import:, timestamp: timestamp_base + 210.minutes,
|
||||
city: 'Vienna', country_name: 'Austria',
|
||||
lonlat: 'POINT(16.3738 48.2082)'),
|
||||
create(:point, user:, import:, timestamp: timestamp_base + 240.minutes,
|
||||
city: 'Vienna', country_name: 'Austria',
|
||||
lonlat: 'POINT(16.3738 48.2082)')
|
||||
|
|
|
|||
|
|
@ -76,19 +76,169 @@ RSpec.describe Users::Digests::CalculateYear do
|
|||
expect(calculate_digest.monthly_distances['3']).to eq('0') # Missing month
|
||||
end
|
||||
|
||||
it 'calculates time spent by location' do
|
||||
it 'calculates time spent by location using hybrid day-based approach' do
|
||||
# Create points to test hybrid calculation
|
||||
# Jan 1: single country day (Germany) -> full 1440 minutes
|
||||
jan_1_10am = Time.zone.local(2024, 1, 1, 10, 0, 0).to_i
|
||||
jan_1_11am = Time.zone.local(2024, 1, 1, 11, 0, 0).to_i
|
||||
jan_1_12pm = Time.zone.local(2024, 1, 1, 12, 0, 0).to_i
|
||||
# Feb 1: single country day (France) -> full 1440 minutes
|
||||
feb_1_10am = Time.zone.local(2024, 2, 1, 10, 0, 0).to_i
|
||||
|
||||
create(:point, user: user, timestamp: jan_1_10am, country_name: 'Germany', city: 'Berlin')
|
||||
create(:point, user: user, timestamp: jan_1_11am, country_name: 'Germany', city: 'Berlin')
|
||||
create(:point, user: user, timestamp: jan_1_12pm, country_name: 'Germany', city: 'Munich')
|
||||
create(:point, user: user, timestamp: feb_1_10am, country_name: 'France', city: 'Paris')
|
||||
|
||||
countries = calculate_digest.time_spent_by_location['countries']
|
||||
cities = calculate_digest.time_spent_by_location['cities']
|
||||
|
||||
expect(countries.first['name']).to eq('Germany')
|
||||
expect(countries.first['minutes']).to eq(720) # 480 + 240
|
||||
# Germany: 1 full day = 1440 minutes
|
||||
germany_country = countries.find { |c| c['name'] == 'Germany' }
|
||||
expect(germany_country['minutes']).to eq(1440)
|
||||
|
||||
# France: 1 full day = 1440 minutes
|
||||
france_country = countries.find { |c| c['name'] == 'France' }
|
||||
expect(france_country['minutes']).to eq(1440)
|
||||
|
||||
# Cities: based on stayed_for from monthly stats (sum across months)
|
||||
expect(cities.first['name']).to eq('Berlin')
|
||||
expect(cities.first['minutes']).to eq(480)
|
||||
end
|
||||
|
||||
it 'calculates all time stats' do
|
||||
expect(calculate_digest.all_time_stats['total_distance']).to eq('125000')
|
||||
end
|
||||
|
||||
context 'when user visits same country across multiple months' do
|
||||
it 'counts each day as a full day for single-country days' do
|
||||
# Create hourly points across multiple days in March and July
|
||||
mar_start = Time.zone.local(2024, 3, 1, 10, 0, 0).to_i
|
||||
jul_start = Time.zone.local(2024, 7, 1, 10, 0, 0).to_i
|
||||
|
||||
# Create 3 days of hourly points in March
|
||||
3.times do |day|
|
||||
3.times do |hour|
|
||||
timestamp = mar_start + (day * 24 * 60 * 60) + (hour * 60 * 60)
|
||||
create(:point, user: user, timestamp: timestamp, country_name: 'Germany', city: 'Berlin')
|
||||
end
|
||||
end
|
||||
|
||||
# Create 3 days of hourly points in July
|
||||
3.times do |day|
|
||||
3.times do |hour|
|
||||
timestamp = jul_start + (day * 24 * 60 * 60) + (hour * 60 * 60)
|
||||
create(:point, user: user, timestamp: timestamp, country_name: 'Germany', city: 'Munich')
|
||||
end
|
||||
end
|
||||
|
||||
# Create the monthly stats
|
||||
create(:stat, user: user, year: 2024, month: 3, distance: 10_000, toponyms: [
|
||||
{ 'country' => 'Germany', 'cities' => [
|
||||
{ 'city' => 'Berlin', 'stayed_for' => 14_400 }
|
||||
] }
|
||||
])
|
||||
|
||||
create(:stat, user: user, year: 2024, month: 7, distance: 15_000, toponyms: [
|
||||
{ 'country' => 'Germany', 'cities' => [
|
||||
{ 'city' => 'Munich', 'stayed_for' => 14_400 }
|
||||
] }
|
||||
])
|
||||
|
||||
digest = calculate_digest
|
||||
countries = digest.time_spent_by_location['countries']
|
||||
germany = countries.find { |c| c['name'] == 'Germany' }
|
||||
|
||||
# Each single-country day = 1440 minutes
|
||||
# 6 days total (3 in March + 3 in July) = 6 * 1440 = 8640 minutes
|
||||
expect(germany['minutes']).to eq(6 * 1440)
|
||||
|
||||
# Total should equal exactly 6 days
|
||||
total_days = germany['minutes'] / 1440.0
|
||||
expect(total_days).to eq(6)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are large gaps between points on same day' do
|
||||
it 'still counts the full day for single-country day' do
|
||||
point_1 = Time.zone.local(2024, 1, 1, 10, 0, 0).to_i
|
||||
point_2 = Time.zone.local(2024, 1, 1, 12, 0, 0).to_i # 2 hours later
|
||||
point_3 = Time.zone.local(2024, 1, 1, 18, 0, 0).to_i # 6 hours later
|
||||
|
||||
create(:point, user: user, timestamp: point_1, country_name: 'Germany')
|
||||
create(:point, user: user, timestamp: point_2, country_name: 'Germany')
|
||||
create(:point, user: user, timestamp: point_3, country_name: 'Germany')
|
||||
|
||||
digest = calculate_digest
|
||||
germany = digest.time_spent_by_location['countries'].find { |c| c['name'] == 'Germany' }
|
||||
|
||||
# Hybrid approach: single-country day = full 1440 minutes
|
||||
# regardless of gaps between points
|
||||
expect(germany['minutes']).to eq(1440)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when transitioning between countries on same day' do
|
||||
it 'calculates proportional time based on time spans' do
|
||||
# Multi-country day: Germany 10:00-10:30, France 11:00-11:30
|
||||
point_1 = Time.zone.local(2024, 1, 1, 10, 0, 0).to_i
|
||||
point_2 = Time.zone.local(2024, 1, 1, 10, 30, 0).to_i # In Germany
|
||||
point_3 = Time.zone.local(2024, 1, 1, 11, 0, 0).to_i # Now in France
|
||||
point_4 = Time.zone.local(2024, 1, 1, 11, 30, 0).to_i # Still in France
|
||||
|
||||
create(:point, user: user, timestamp: point_1, country_name: 'Germany')
|
||||
create(:point, user: user, timestamp: point_2, country_name: 'Germany')
|
||||
create(:point, user: user, timestamp: point_3, country_name: 'France')
|
||||
create(:point, user: user, timestamp: point_4, country_name: 'France')
|
||||
|
||||
digest = calculate_digest
|
||||
countries = digest.time_spent_by_location['countries']
|
||||
|
||||
germany = countries.find { |c| c['name'] == 'Germany' }
|
||||
france = countries.find { |c| c['name'] == 'France' }
|
||||
|
||||
# Germany span: 10:30 - 10:00 = 30 min = 1800 seconds
|
||||
# France span: 11:30 - 11:00 = 30 min = 1800 seconds
|
||||
# Total spans = 3600 seconds
|
||||
# Each country gets 50% of 1440 = 720 minutes
|
||||
expect(germany['minutes']).to eq(720)
|
||||
expect(france['minutes']).to eq(720)
|
||||
# Total = 1440 (exactly one day)
|
||||
expect(germany['minutes'] + france['minutes']).to eq(1440)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when visiting multiple countries on same day' do
|
||||
it 'calculates proportional time and never exceeds one day total' do
|
||||
# This tests the fix for the original bug: border crossing should not count double
|
||||
# France: 8am-9am (1 hour span = 3600 seconds)
|
||||
# Germany: 10am-11am (1 hour span = 3600 seconds)
|
||||
jan_1_8am = Time.zone.local(2024, 1, 1, 8, 0, 0).to_i
|
||||
jan_1_9am = Time.zone.local(2024, 1, 1, 9, 0, 0).to_i
|
||||
jan_1_10am = Time.zone.local(2024, 1, 1, 10, 0, 0).to_i # Border crossing
|
||||
jan_1_11am = Time.zone.local(2024, 1, 1, 11, 0, 0).to_i
|
||||
|
||||
create(:point, user: user, timestamp: jan_1_8am, country_name: 'France')
|
||||
create(:point, user: user, timestamp: jan_1_9am, country_name: 'France')
|
||||
create(:point, user: user, timestamp: jan_1_10am, country_name: 'Germany')
|
||||
create(:point, user: user, timestamp: jan_1_11am, country_name: 'Germany')
|
||||
|
||||
digest = calculate_digest
|
||||
countries = digest.time_spent_by_location['countries']
|
||||
|
||||
france = countries.find { |c| c['name'] == 'France' }
|
||||
germany = countries.find { |c| c['name'] == 'Germany' }
|
||||
|
||||
# France span: 3600 seconds, Germany span: 3600 seconds
|
||||
# Total spans: 7200 seconds
|
||||
# Each gets 50% of 1440 = 720 minutes
|
||||
expect(france['minutes']).to eq(720)
|
||||
expect(germany['minutes']).to eq(720)
|
||||
# Total = 1440 (exactly one day) - NOT 2 days as the bug would have caused
|
||||
expect(france['minutes'] + germany['minutes']).to eq(1440)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when digest already exists' do
|
||||
let!(:existing_digest) do
|
||||
create(:users_digest, user: user, year: 2024, period_type: :yearly, distance: 10_000)
|
||||
|
|
|
|||
|
|
@ -31,7 +31,8 @@ RSpec.describe Users::SafeSettings do
|
|||
speed_color_scale: nil,
|
||||
fog_of_war_threshold: nil,
|
||||
enabled_map_layers: %w[Routes Heatmap],
|
||||
maps_maplibre_style: 'light'
|
||||
maps_maplibre_style: 'light',
|
||||
globe_projection: false
|
||||
}
|
||||
)
|
||||
end
|
||||
|
|
@ -82,7 +83,8 @@ RSpec.describe Users::SafeSettings do
|
|||
'visits_suggestions_enabled' => false,
|
||||
'enabled_map_layers' => %w[Points Routes Areas Photos],
|
||||
'maps_maplibre_style' => 'light',
|
||||
'digest_emails_enabled' => true
|
||||
'digest_emails_enabled' => true,
|
||||
'globe_projection' => false
|
||||
}
|
||||
)
|
||||
end
|
||||
|
|
@ -110,7 +112,8 @@ RSpec.describe Users::SafeSettings do
|
|||
speed_color_scale: nil,
|
||||
fog_of_war_threshold: nil,
|
||||
enabled_map_layers: %w[Points Routes Areas Photos],
|
||||
maps_maplibre_style: 'light'
|
||||
maps_maplibre_style: 'light',
|
||||
globe_projection: false
|
||||
}
|
||||
)
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in a new issue