mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Compare commits
8 commits
b4c2def2be
...
594b7ffdd2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
594b7ffdd2 | ||
|
|
a4b9ed1087 | ||
|
|
966dc01651 | ||
|
|
96a78881f1 | ||
|
|
aa3bf93a45 | ||
|
|
24cbabf3b7 | ||
|
|
8ecb2e3765 | ||
|
|
b037be3299 |
15 changed files with 261 additions and 94 deletions
|
|
@ -1 +1 @@
|
||||||
0.37.1
|
0.37.2
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||||
|
|
||||||
|
# [0.37.2] - 2026-01-03
|
||||||
|
|
||||||
|
## Fixed
|
||||||
|
|
||||||
|
- Months are now correctly ordered (Jan-Dec) in the year-end digest chart instead of being sorted alphabetically.
|
||||||
|
- Time spent in a country and city is now calculated correctly for the year-end digest email. #2104
|
||||||
|
- Updated Trix to fix a XSS vulnerability. #2102
|
||||||
|
- Map v2 UI no longer blocks when Immich/Photoprism integration has a bad URL or is unreachable. Added 10-second timeout to photo API requests and improved error handling to prevent UI freezing during initial load. #2085
|
||||||
|
|
||||||
# [0.37.1] - 2025-12-30
|
# [0.37.1] - 2025-12-30
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
|
||||||
57
Gemfile.lock
57
Gemfile.lock
|
|
@ -109,7 +109,7 @@ GEM
|
||||||
base64 (0.3.0)
|
base64 (0.3.0)
|
||||||
bcrypt (3.1.20)
|
bcrypt (3.1.20)
|
||||||
benchmark (0.5.0)
|
benchmark (0.5.0)
|
||||||
bigdecimal (3.3.1)
|
bigdecimal (4.0.1)
|
||||||
bindata (2.5.1)
|
bindata (2.5.1)
|
||||||
bootsnap (1.18.6)
|
bootsnap (1.18.6)
|
||||||
msgpack (~> 1.2)
|
msgpack (~> 1.2)
|
||||||
|
|
@ -129,11 +129,11 @@ GEM
|
||||||
rack-test (>= 0.6.3)
|
rack-test (>= 0.6.3)
|
||||||
regexp_parser (>= 1.5, < 3.0)
|
regexp_parser (>= 1.5, < 3.0)
|
||||||
xpath (~> 3.2)
|
xpath (~> 3.2)
|
||||||
chartkick (5.2.0)
|
chartkick (5.2.1)
|
||||||
chunky_png (1.4.0)
|
chunky_png (1.4.0)
|
||||||
coderay (1.1.3)
|
coderay (1.1.3)
|
||||||
concurrent-ruby (1.3.5)
|
concurrent-ruby (1.3.6)
|
||||||
connection_pool (2.5.5)
|
connection_pool (3.0.2)
|
||||||
crack (1.0.1)
|
crack (1.0.1)
|
||||||
bigdecimal
|
bigdecimal
|
||||||
rexml
|
rexml
|
||||||
|
|
@ -215,7 +215,7 @@ GEM
|
||||||
csv
|
csv
|
||||||
mini_mime (>= 1.0.0)
|
mini_mime (>= 1.0.0)
|
||||||
multi_xml (>= 0.5.2)
|
multi_xml (>= 0.5.2)
|
||||||
i18n (1.14.7)
|
i18n (1.14.8)
|
||||||
concurrent-ruby (~> 1.0)
|
concurrent-ruby (~> 1.0)
|
||||||
importmap-rails (2.2.2)
|
importmap-rails (2.2.2)
|
||||||
actionpack (>= 6.0.0)
|
actionpack (>= 6.0.0)
|
||||||
|
|
@ -227,7 +227,7 @@ GEM
|
||||||
rdoc (>= 4.0.0)
|
rdoc (>= 4.0.0)
|
||||||
reline (>= 0.4.2)
|
reline (>= 0.4.2)
|
||||||
jmespath (1.6.2)
|
jmespath (1.6.2)
|
||||||
json (2.15.0)
|
json (2.18.0)
|
||||||
json-jwt (1.17.0)
|
json-jwt (1.17.0)
|
||||||
activesupport (>= 4.2)
|
activesupport (>= 4.2)
|
||||||
aes_key_wrap
|
aes_key_wrap
|
||||||
|
|
@ -273,11 +273,12 @@ GEM
|
||||||
method_source (1.1.0)
|
method_source (1.1.0)
|
||||||
mini_mime (1.1.5)
|
mini_mime (1.1.5)
|
||||||
mini_portile2 (2.8.9)
|
mini_portile2 (2.8.9)
|
||||||
minitest (5.26.2)
|
minitest (6.0.1)
|
||||||
|
prism (~> 1.5)
|
||||||
msgpack (1.7.3)
|
msgpack (1.7.3)
|
||||||
multi_json (1.15.0)
|
multi_json (1.15.0)
|
||||||
multi_xml (0.7.1)
|
multi_xml (0.8.0)
|
||||||
bigdecimal (~> 3.1)
|
bigdecimal (>= 3.1, < 5)
|
||||||
net-http (0.6.0)
|
net-http (0.6.0)
|
||||||
uri
|
uri
|
||||||
net-imap (0.5.12)
|
net-imap (0.5.12)
|
||||||
|
|
@ -356,7 +357,7 @@ GEM
|
||||||
json
|
json
|
||||||
yaml
|
yaml
|
||||||
parallel (1.27.0)
|
parallel (1.27.0)
|
||||||
parser (3.3.9.0)
|
parser (3.3.10.0)
|
||||||
ast (~> 2.4.1)
|
ast (~> 2.4.1)
|
||||||
racc
|
racc
|
||||||
patience_diff (1.2.0)
|
patience_diff (1.2.0)
|
||||||
|
|
@ -369,7 +370,7 @@ GEM
|
||||||
pp (0.6.3)
|
pp (0.6.3)
|
||||||
prettyprint
|
prettyprint
|
||||||
prettyprint (0.2.0)
|
prettyprint (0.2.0)
|
||||||
prism (1.5.1)
|
prism (1.7.0)
|
||||||
prometheus_exporter (2.2.0)
|
prometheus_exporter (2.2.0)
|
||||||
webrick
|
webrick
|
||||||
pry (0.15.2)
|
pry (0.15.2)
|
||||||
|
|
@ -462,7 +463,7 @@ GEM
|
||||||
tsort
|
tsort
|
||||||
redis (5.4.1)
|
redis (5.4.1)
|
||||||
redis-client (>= 0.22.0)
|
redis-client (>= 0.22.0)
|
||||||
redis-client (0.26.1)
|
redis-client (0.26.2)
|
||||||
connection_pool
|
connection_pool
|
||||||
regexp_parser (2.11.3)
|
regexp_parser (2.11.3)
|
||||||
reline (0.6.3)
|
reline (0.6.3)
|
||||||
|
|
@ -512,7 +513,7 @@ GEM
|
||||||
rswag-ui (2.17.0)
|
rswag-ui (2.17.0)
|
||||||
actionpack (>= 5.2, < 8.2)
|
actionpack (>= 5.2, < 8.2)
|
||||||
railties (>= 5.2, < 8.2)
|
railties (>= 5.2, < 8.2)
|
||||||
rubocop (1.81.1)
|
rubocop (1.82.1)
|
||||||
json (~> 2.3)
|
json (~> 2.3)
|
||||||
language_server-protocol (~> 3.17.0.2)
|
language_server-protocol (~> 3.17.0.2)
|
||||||
lint_roller (~> 1.1.0)
|
lint_roller (~> 1.1.0)
|
||||||
|
|
@ -520,20 +521,20 @@ GEM
|
||||||
parser (>= 3.3.0.2)
|
parser (>= 3.3.0.2)
|
||||||
rainbow (>= 2.2.2, < 4.0)
|
rainbow (>= 2.2.2, < 4.0)
|
||||||
regexp_parser (>= 2.9.3, < 3.0)
|
regexp_parser (>= 2.9.3, < 3.0)
|
||||||
rubocop-ast (>= 1.47.1, < 2.0)
|
rubocop-ast (>= 1.48.0, < 2.0)
|
||||||
ruby-progressbar (~> 1.7)
|
ruby-progressbar (~> 1.7)
|
||||||
unicode-display_width (>= 2.4.0, < 4.0)
|
unicode-display_width (>= 2.4.0, < 4.0)
|
||||||
rubocop-ast (1.47.1)
|
rubocop-ast (1.49.0)
|
||||||
parser (>= 3.3.7.2)
|
parser (>= 3.3.7.2)
|
||||||
prism (~> 1.4)
|
prism (~> 1.7)
|
||||||
rubocop-rails (2.33.4)
|
rubocop-rails (2.34.2)
|
||||||
activesupport (>= 4.2.0)
|
activesupport (>= 4.2.0)
|
||||||
lint_roller (~> 1.1)
|
lint_roller (~> 1.1)
|
||||||
rack (>= 1.1)
|
rack (>= 1.1)
|
||||||
rubocop (>= 1.75.0, < 2.0)
|
rubocop (>= 1.75.0, < 2.0)
|
||||||
rubocop-ast (>= 1.44.0, < 2.0)
|
rubocop-ast (>= 1.44.0, < 2.0)
|
||||||
ruby-progressbar (1.13.0)
|
ruby-progressbar (1.13.0)
|
||||||
rubyzip (3.2.0)
|
rubyzip (3.2.2)
|
||||||
securerandom (0.4.1)
|
securerandom (0.4.1)
|
||||||
selenium-webdriver (4.35.0)
|
selenium-webdriver (4.35.0)
|
||||||
base64 (~> 0.2)
|
base64 (~> 0.2)
|
||||||
|
|
@ -541,20 +542,20 @@ GEM
|
||||||
rexml (~> 3.2, >= 3.2.5)
|
rexml (~> 3.2, >= 3.2.5)
|
||||||
rubyzip (>= 1.2.2, < 4.0)
|
rubyzip (>= 1.2.2, < 4.0)
|
||||||
websocket (~> 1.0)
|
websocket (~> 1.0)
|
||||||
sentry-rails (6.1.1)
|
sentry-rails (6.2.0)
|
||||||
railties (>= 5.2.0)
|
railties (>= 5.2.0)
|
||||||
sentry-ruby (~> 6.1.1)
|
sentry-ruby (~> 6.2.0)
|
||||||
sentry-ruby (6.1.1)
|
sentry-ruby (6.2.0)
|
||||||
bigdecimal
|
bigdecimal
|
||||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||||
shoulda-matchers (6.5.0)
|
shoulda-matchers (6.5.0)
|
||||||
activesupport (>= 5.2.0)
|
activesupport (>= 5.2.0)
|
||||||
sidekiq (8.0.8)
|
sidekiq (8.1.0)
|
||||||
connection_pool (>= 2.5.0)
|
connection_pool (>= 3.0.0)
|
||||||
json (>= 2.9.0)
|
json (>= 2.16.0)
|
||||||
logger (>= 1.6.2)
|
logger (>= 1.7.0)
|
||||||
rack (>= 3.1.0)
|
rack (>= 3.2.0)
|
||||||
redis-client (>= 0.23.2)
|
redis-client (>= 0.26.0)
|
||||||
sidekiq-cron (2.3.1)
|
sidekiq-cron (2.3.1)
|
||||||
cronex (>= 0.13.0)
|
cronex (>= 0.13.0)
|
||||||
fugit (~> 1.8, >= 1.11.1)
|
fugit (~> 1.8, >= 1.11.1)
|
||||||
|
|
@ -613,7 +614,7 @@ GEM
|
||||||
unicode (0.4.4.5)
|
unicode (0.4.4.5)
|
||||||
unicode-display_width (3.2.0)
|
unicode-display_width (3.2.0)
|
||||||
unicode-emoji (~> 4.1)
|
unicode-emoji (~> 4.1)
|
||||||
unicode-emoji (4.1.0)
|
unicode-emoji (4.2.0)
|
||||||
uri (1.1.1)
|
uri (1.1.1)
|
||||||
useragent (0.16.11)
|
useragent (0.16.11)
|
||||||
validate_url (1.0.15)
|
validate_url (1.0.15)
|
||||||
|
|
|
||||||
|
|
@ -56,22 +56,36 @@ export class DataLoader {
|
||||||
}
|
}
|
||||||
data.visitsGeoJSON = this.visitsToGeoJSON(data.visits)
|
data.visitsGeoJSON = this.visitsToGeoJSON(data.visits)
|
||||||
|
|
||||||
// Fetch photos
|
// Fetch photos - only if photos layer is enabled and integration is configured
|
||||||
try {
|
// Skip API call if photos are disabled to avoid blocking on failed integrations
|
||||||
console.log('[Photos] Fetching photos from:', startDate, 'to', endDate)
|
if (this.settings.photosEnabled) {
|
||||||
data.photos = await this.api.fetchPhotos({
|
try {
|
||||||
start_at: startDate,
|
console.log('[Photos] Fetching photos from:', startDate, 'to', endDate)
|
||||||
end_at: endDate
|
// Use Promise.race to enforce a client-side timeout
|
||||||
})
|
const photosPromise = this.api.fetchPhotos({
|
||||||
console.log('[Photos] Fetched photos:', data.photos.length, 'photos')
|
start_at: startDate,
|
||||||
console.log('[Photos] Sample photo:', data.photos[0])
|
end_at: endDate
|
||||||
} catch (error) {
|
})
|
||||||
console.error('[Photos] Failed to fetch photos:', error)
|
const timeoutPromise = new Promise((_, reject) =>
|
||||||
|
setTimeout(() => reject(new Error('Photo fetch timeout')), 15000) // 15 second timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
data.photos = await Promise.race([photosPromise, timeoutPromise])
|
||||||
|
console.log('[Photos] Fetched photos:', data.photos.length, 'photos')
|
||||||
|
console.log('[Photos] Sample photo:', data.photos[0])
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('[Photos] Failed to fetch photos (non-blocking):', error.message)
|
||||||
|
data.photos = []
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log('[Photos] Photos layer disabled, skipping fetch')
|
||||||
data.photos = []
|
data.photos = []
|
||||||
}
|
}
|
||||||
data.photosGeoJSON = this.photosToGeoJSON(data.photos)
|
data.photosGeoJSON = this.photosToGeoJSON(data.photos)
|
||||||
console.log('[Photos] Converted to GeoJSON:', data.photosGeoJSON.features.length, 'features')
|
console.log('[Photos] Converted to GeoJSON:', data.photosGeoJSON.features.length, 'features')
|
||||||
console.log('[Photos] Sample feature:', data.photosGeoJSON.features[0])
|
if (data.photosGeoJSON.features.length > 0) {
|
||||||
|
console.log('[Photos] Sample feature:', data.photosGeoJSON.features[0])
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch areas
|
// Fetch areas
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,10 @@ class Immich::RequestPhotos
|
||||||
while page <= max_pages
|
while page <= max_pages
|
||||||
response = JSON.parse(
|
response = JSON.parse(
|
||||||
HTTParty.post(
|
HTTParty.post(
|
||||||
immich_api_base_url, headers: headers, body: request_body(page)
|
immich_api_base_url,
|
||||||
|
headers: headers,
|
||||||
|
body: request_body(page),
|
||||||
|
timeout: 10
|
||||||
).body
|
).body
|
||||||
)
|
)
|
||||||
Rails.logger.debug('==== IMMICH RESPONSE ====')
|
Rails.logger.debug('==== IMMICH RESPONSE ====')
|
||||||
|
|
@ -46,6 +49,9 @@ class Immich::RequestPhotos
|
||||||
end
|
end
|
||||||
|
|
||||||
data.flatten
|
data.flatten
|
||||||
|
rescue HTTParty::Error, Net::OpenTimeout, Net::ReadTimeout => e
|
||||||
|
Rails.logger.error("Immich photo fetch failed: #{e.message}")
|
||||||
|
[]
|
||||||
end
|
end
|
||||||
|
|
||||||
def headers
|
def headers
|
||||||
|
|
|
||||||
|
|
@ -43,13 +43,17 @@ class Photoprism::RequestPhotos
|
||||||
end
|
end
|
||||||
|
|
||||||
data.flatten
|
data.flatten
|
||||||
|
rescue HTTParty::Error, Net::OpenTimeout, Net::ReadTimeout => e
|
||||||
|
Rails.logger.error("Photoprism photo fetch failed: #{e.message}")
|
||||||
|
[]
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_page(offset)
|
def fetch_page(offset)
|
||||||
response = HTTParty.get(
|
response = HTTParty.get(
|
||||||
photoprism_api_base_url,
|
photoprism_api_base_url,
|
||||||
headers: headers,
|
headers: headers,
|
||||||
query: request_params(offset)
|
query: request_params(offset),
|
||||||
|
timeout: 10
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.code != 200
|
if response.code != 200
|
||||||
|
|
|
||||||
|
|
@ -88,35 +88,86 @@ module Users
|
||||||
end
|
end
|
||||||
|
|
||||||
def calculate_time_spent
|
def calculate_time_spent
|
||||||
country_time = Hash.new(0)
|
{
|
||||||
|
'countries' => calculate_country_time_spent,
|
||||||
|
'cities' => calculate_city_time_spent
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def calculate_country_time_spent
|
||||||
|
country_days = build_country_days_map
|
||||||
|
|
||||||
|
# Convert days to minutes (days * 24 * 60) and return top 10
|
||||||
|
country_days
|
||||||
|
.transform_values { |days| days.size * 24 * 60 }
|
||||||
|
.sort_by { |_, minutes| -minutes }
|
||||||
|
.first(10)
|
||||||
|
.map { |name, minutes| { 'name' => name, 'minutes' => minutes } }
|
||||||
|
end
|
||||||
|
|
||||||
|
def build_country_days_map
|
||||||
|
year_points = fetch_year_points_with_country
|
||||||
|
country_days = Hash.new { |h, k| h[k] = Set.new }
|
||||||
|
|
||||||
|
year_points.each do |point|
|
||||||
|
date = Time.zone.at(point.timestamp).to_date
|
||||||
|
country_days[point.country_name].add(date)
|
||||||
|
end
|
||||||
|
|
||||||
|
country_days
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_year_points_with_country
|
||||||
|
start_of_year = Time.zone.local(year, 1, 1, 0, 0, 0)
|
||||||
|
end_of_year = start_of_year.end_of_year
|
||||||
|
|
||||||
|
user.points
|
||||||
|
.without_raw_data
|
||||||
|
.where('timestamp >= ? AND timestamp <= ?', start_of_year.to_i, end_of_year.to_i)
|
||||||
|
.where.not(country_name: [nil, ''])
|
||||||
|
.select(:country_name, :timestamp)
|
||||||
|
end
|
||||||
|
|
||||||
|
def calculate_city_time_spent
|
||||||
|
city_time = aggregate_city_time_from_monthly_stats
|
||||||
|
|
||||||
|
city_time
|
||||||
|
.sort_by { |_, minutes| -minutes }
|
||||||
|
.first(10)
|
||||||
|
.map { |name, minutes| { 'name' => name, 'minutes' => minutes } }
|
||||||
|
end
|
||||||
|
|
||||||
|
def aggregate_city_time_from_monthly_stats
|
||||||
city_time = Hash.new(0)
|
city_time = Hash.new(0)
|
||||||
|
|
||||||
monthly_stats.each do |stat|
|
monthly_stats.each do |stat|
|
||||||
toponyms = stat.toponyms
|
process_stat_toponyms(stat, city_time)
|
||||||
next unless toponyms.is_a?(Array)
|
|
||||||
|
|
||||||
toponyms.each do |toponym|
|
|
||||||
next unless toponym.is_a?(Hash)
|
|
||||||
|
|
||||||
country = toponym['country']
|
|
||||||
next unless toponym['cities'].is_a?(Array)
|
|
||||||
|
|
||||||
toponym['cities'].each do |city|
|
|
||||||
next unless city.is_a?(Hash)
|
|
||||||
|
|
||||||
stayed_for = city['stayed_for'].to_i
|
|
||||||
city_name = city['city']
|
|
||||||
|
|
||||||
country_time[country] += stayed_for if country.present?
|
|
||||||
city_time[city_name] += stayed_for if city_name.present?
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
{
|
city_time
|
||||||
'countries' => country_time.sort_by { |_, v| -v }.first(10).map { |name, minutes| { 'name' => name, 'minutes' => minutes } },
|
end
|
||||||
'cities' => city_time.sort_by { |_, v| -v }.first(10).map { |name, minutes| { 'name' => name, 'minutes' => minutes } }
|
|
||||||
}
|
def process_stat_toponyms(stat, city_time)
|
||||||
|
toponyms = stat.toponyms
|
||||||
|
return unless toponyms.is_a?(Array)
|
||||||
|
|
||||||
|
toponyms.each do |toponym|
|
||||||
|
process_toponym_cities(toponym, city_time)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def process_toponym_cities(toponym, city_time)
|
||||||
|
return unless toponym.is_a?(Hash)
|
||||||
|
return unless toponym['cities'].is_a?(Array)
|
||||||
|
|
||||||
|
toponym['cities'].each do |city|
|
||||||
|
next unless city.is_a?(Hash)
|
||||||
|
|
||||||
|
stayed_for = city['stayed_for'].to_i
|
||||||
|
city_name = city['city']
|
||||||
|
|
||||||
|
city_time[city_name] += stayed_for if city_name.present?
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def calculate_first_time_visits
|
def calculate_first_time_visits
|
||||||
|
|
|
||||||
|
|
@ -79,7 +79,7 @@
|
||||||
</h2>
|
</h2>
|
||||||
<div class="w-full h-48 bg-base-200 rounded-lg p-4 relative">
|
<div class="w-full h-48 bg-base-200 rounded-lg p-4 relative">
|
||||||
<%= column_chart(
|
<%= column_chart(
|
||||||
@digest.monthly_distances.sort.map { |month, distance_meters|
|
@digest.monthly_distances.sort_by { |month, _| month.to_i }.map { |month, distance_meters|
|
||||||
[Date::ABBR_MONTHNAMES[month.to_i], Users::Digest.convert_distance(distance_meters.to_i, @distance_unit).round]
|
[Date::ABBR_MONTHNAMES[month.to_i], Users::Digest.convert_distance(distance_meters.to_i, @distance_unit).round]
|
||||||
},
|
},
|
||||||
height: '200px',
|
height: '200px',
|
||||||
|
|
|
||||||
|
|
@ -101,7 +101,7 @@
|
||||||
</h2>
|
</h2>
|
||||||
<div class="w-full h-64 bg-base-100 rounded-lg p-4">
|
<div class="w-full h-64 bg-base-100 rounded-lg p-4">
|
||||||
<%= column_chart(
|
<%= column_chart(
|
||||||
@digest.monthly_distances.sort.map { |month, distance_meters|
|
@digest.monthly_distances.sort_by { |month, _| month.to_i }.map { |month, distance_meters|
|
||||||
[Date::ABBR_MONTHNAMES[month.to_i], Users::Digest.convert_distance(distance_meters.to_i, @distance_unit).round]
|
[Date::ABBR_MONTHNAMES[month.to_i], Users::Digest.convert_distance(distance_meters.to_i, @distance_unit).round]
|
||||||
},
|
},
|
||||||
height: '250px',
|
height: '250px',
|
||||||
|
|
|
||||||
|
|
@ -3,21 +3,19 @@ class InstallRailsPulseTables < ActiveRecord::Migration[8.0]
|
||||||
def change
|
def change
|
||||||
# Load and execute the Rails Pulse schema directly
|
# Load and execute the Rails Pulse schema directly
|
||||||
# This ensures the migration is always in sync with the schema file
|
# This ensures the migration is always in sync with the schema file
|
||||||
schema_file = File.join(::Rails.root.to_s, "db/rails_pulse_schema.rb")
|
schema_file = Rails.root.join('db/rails_pulse_schema.rb').to_s
|
||||||
|
|
||||||
if File.exist?(schema_file)
|
raise 'Rails Pulse schema file not found at db/rails_pulse_schema.rb' unless File.exist?(schema_file)
|
||||||
say "Loading Rails Pulse schema from db/rails_pulse_schema.rb"
|
|
||||||
|
|
||||||
# Load the schema file to define RailsPulse::Schema
|
say 'Loading Rails Pulse schema from db/rails_pulse_schema.rb'
|
||||||
load schema_file
|
|
||||||
|
|
||||||
# Execute the schema in the context of this migration
|
# Load the schema file to define RailsPulse::Schema
|
||||||
RailsPulse::Schema.call(connection)
|
load schema_file
|
||||||
|
|
||||||
say "Rails Pulse tables created successfully"
|
# Execute the schema in the context of this migration
|
||||||
say "The schema file db/rails_pulse_schema.rb remains as your single source of truth"
|
RailsPulse::Schema.call(connection)
|
||||||
else
|
|
||||||
raise "Rails Pulse schema file not found at db/rails_pulse_schema.rb"
|
say 'Rails Pulse tables created successfully'
|
||||||
end
|
say 'The schema file db/rails_pulse_schema.rb remains as your single source of truth'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
class AddIndexesToPointsForStatsQuery < ActiveRecord::Migration[8.0]
|
||||||
|
disable_ddl_transaction!
|
||||||
|
|
||||||
|
def change
|
||||||
|
# Index for counting reverse geocoded points
|
||||||
|
# This speeds up: COUNT(reverse_geocoded_at)
|
||||||
|
add_index :points, [:user_id, :reverse_geocoded_at],
|
||||||
|
where: "reverse_geocoded_at IS NOT NULL",
|
||||||
|
algorithm: :concurrently,
|
||||||
|
if_not_exists: true,
|
||||||
|
name: 'index_points_on_user_id_and_reverse_geocoded_at'
|
||||||
|
|
||||||
|
# Index for finding points with empty geodata
|
||||||
|
# This speeds up: COUNT(CASE WHEN geodata = '{}'::jsonb THEN 1 END)
|
||||||
|
add_index :points, [:user_id, :geodata],
|
||||||
|
where: "geodata = '{}'::jsonb",
|
||||||
|
algorithm: :concurrently,
|
||||||
|
if_not_exists: true,
|
||||||
|
name: 'index_points_on_user_id_and_empty_geodata'
|
||||||
|
end
|
||||||
|
end
|
||||||
3
db/schema.rb
generated
3
db/schema.rb
generated
|
|
@ -10,7 +10,7 @@
|
||||||
#
|
#
|
||||||
# It's strongly recommended that you check this file into your version control system.
|
# It's strongly recommended that you check this file into your version control system.
|
||||||
|
|
||||||
ActiveRecord::Schema[8.0].define(version: 2025_12_28_163703) do
|
ActiveRecord::Schema[8.0].define(version: 2026_01_03_114630) do
|
||||||
# These are extensions that must be enabled in order to support this database
|
# These are extensions that must be enabled in order to support this database
|
||||||
enable_extension "pg_catalog.plpgsql"
|
enable_extension "pg_catalog.plpgsql"
|
||||||
enable_extension "postgis"
|
enable_extension "postgis"
|
||||||
|
|
@ -260,6 +260,7 @@ ActiveRecord::Schema[8.0].define(version: 2025_12_28_163703) do
|
||||||
t.index ["track_id"], name: "index_points_on_track_id"
|
t.index ["track_id"], name: "index_points_on_track_id"
|
||||||
t.index ["user_id", "city"], name: "idx_points_user_city"
|
t.index ["user_id", "city"], name: "idx_points_user_city"
|
||||||
t.index ["user_id", "country_name"], name: "idx_points_user_country_name"
|
t.index ["user_id", "country_name"], name: "idx_points_user_country_name"
|
||||||
|
t.index ["user_id", "geodata"], name: "index_points_on_user_id_and_empty_geodata", where: "(geodata = '{}'::jsonb)"
|
||||||
t.index ["user_id", "reverse_geocoded_at"], name: "index_points_on_user_id_and_reverse_geocoded_at", where: "(reverse_geocoded_at IS NOT NULL)"
|
t.index ["user_id", "reverse_geocoded_at"], name: "index_points_on_user_id_and_reverse_geocoded_at", where: "(reverse_geocoded_at IS NOT NULL)"
|
||||||
t.index ["user_id", "timestamp", "track_id"], name: "idx_points_track_generation"
|
t.index ["user_id", "timestamp", "track_id"], name: "idx_points_track_generation"
|
||||||
t.index ["user_id", "timestamp"], name: "idx_points_user_visit_null_timestamp", where: "(visit_id IS NULL)"
|
t.index ["user_id", "timestamp"], name: "idx_points_user_visit_null_timestamp", where: "(visit_id IS NULL)"
|
||||||
|
|
|
||||||
18
package-lock.json
generated
18
package-lock.json
generated
|
|
@ -11,7 +11,7 @@
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
"maplibre-gl": "^5.13.0",
|
"maplibre-gl": "^5.13.0",
|
||||||
"postcss": "^8.4.49",
|
"postcss": "^8.4.49",
|
||||||
"trix": "^2.1.15"
|
"trix": "^2.1.16"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@playwright/test": "^1.56.1",
|
"@playwright/test": "^1.56.1",
|
||||||
|
|
@ -575,12 +575,14 @@
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/trix": {
|
"node_modules/trix": {
|
||||||
"version": "2.1.15",
|
"version": "2.1.16",
|
||||||
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.15.tgz",
|
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.16.tgz",
|
||||||
"integrity": "sha512-LoaXWczdTUV8+3Box92B9b1iaDVbxD14dYemZRxi3PwY+AuDm97BUJV2aHLBUFPuDABhxp0wzcbf0CxHCVmXiw==",
|
"integrity": "sha512-XtZgWI+oBvLzX7CWnkIf+ZWC+chL+YG/TkY43iMTV0Zl+CJjn18B1GJUCEWJ8qgfpcyMBuysnNAfPWiv2sV14A==",
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"dompurify": "^3.2.5"
|
"dompurify": "^3.2.5"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/undici-types": {
|
"node_modules/undici-types": {
|
||||||
|
|
@ -986,9 +988,9 @@
|
||||||
"integrity": "sha512-gRa9gwYU3ECmQYv3lslts5hxuIa90veaEcxDYuu3QGOIAEM2mOZkVHp48ANJuu1CURtRdHKUBY5Lm1tHV+sD4g=="
|
"integrity": "sha512-gRa9gwYU3ECmQYv3lslts5hxuIa90veaEcxDYuu3QGOIAEM2mOZkVHp48ANJuu1CURtRdHKUBY5Lm1tHV+sD4g=="
|
||||||
},
|
},
|
||||||
"trix": {
|
"trix": {
|
||||||
"version": "2.1.15",
|
"version": "2.1.16",
|
||||||
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.15.tgz",
|
"resolved": "https://registry.npmjs.org/trix/-/trix-2.1.16.tgz",
|
||||||
"integrity": "sha512-LoaXWczdTUV8+3Box92B9b1iaDVbxD14dYemZRxi3PwY+AuDm97BUJV2aHLBUFPuDABhxp0wzcbf0CxHCVmXiw==",
|
"integrity": "sha512-XtZgWI+oBvLzX7CWnkIf+ZWC+chL+YG/TkY43iMTV0Zl+CJjn18B1GJUCEWJ8qgfpcyMBuysnNAfPWiv2sV14A==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"dompurify": "^3.2.5"
|
"dompurify": "^3.2.5"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
"leaflet": "^1.9.4",
|
"leaflet": "^1.9.4",
|
||||||
"maplibre-gl": "^5.13.0",
|
"maplibre-gl": "^5.13.0",
|
||||||
"postcss": "^8.4.49",
|
"postcss": "^8.4.49",
|
||||||
"trix": "^2.1.15"
|
"trix": "^2.1.16"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "18.17.1",
|
"node": "18.17.1",
|
||||||
|
|
|
||||||
|
|
@ -77,18 +77,78 @@ RSpec.describe Users::Digests::CalculateYear do
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'calculates time spent by location' do
|
it 'calculates time spent by location' do
|
||||||
|
# Create points to enable country time calculation based on unique days
|
||||||
|
jan_1 = Time.zone.local(2024, 1, 1, 10, 0, 0).to_i
|
||||||
|
jan_2 = Time.zone.local(2024, 1, 2, 10, 0, 0).to_i
|
||||||
|
feb_1 = Time.zone.local(2024, 2, 1, 10, 0, 0).to_i
|
||||||
|
|
||||||
|
create(:point, user: user, timestamp: jan_1, country_name: 'Germany', city: 'Berlin')
|
||||||
|
create(:point, user: user, timestamp: jan_2, country_name: 'Germany', city: 'Munich')
|
||||||
|
create(:point, user: user, timestamp: feb_1, country_name: 'France', city: 'Paris')
|
||||||
|
|
||||||
countries = calculate_digest.time_spent_by_location['countries']
|
countries = calculate_digest.time_spent_by_location['countries']
|
||||||
cities = calculate_digest.time_spent_by_location['cities']
|
cities = calculate_digest.time_spent_by_location['cities']
|
||||||
|
|
||||||
expect(countries.first['name']).to eq('Germany')
|
# Countries: based on unique days (2 days in Germany, 1 day in France)
|
||||||
expect(countries.first['minutes']).to eq(720) # 480 + 240
|
germany_country = countries.find { |c| c['name'] == 'Germany' }
|
||||||
|
expect(germany_country['minutes']).to eq(2 * 24 * 60) # 2 days = 2880 minutes
|
||||||
|
|
||||||
|
# Cities: based on stayed_for from monthly stats (sum across months)
|
||||||
expect(cities.first['name']).to eq('Berlin')
|
expect(cities.first['name']).to eq('Berlin')
|
||||||
|
expect(cities.first['minutes']).to eq(480)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'calculates all time stats' do
|
it 'calculates all time stats' do
|
||||||
expect(calculate_digest.all_time_stats['total_distance']).to eq('125000')
|
expect(calculate_digest.all_time_stats['total_distance']).to eq('125000')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when user visits same country across multiple months' do
|
||||||
|
it 'does not double-count days' do
|
||||||
|
# Create a user who was in Germany for 10 days in March and 10 days in July
|
||||||
|
# If we summed the stayed_for values from cities, we might get inflated numbers
|
||||||
|
# The fix counts unique days to prevent exceeding 365 days per year
|
||||||
|
mar_start = Time.zone.local(2024, 3, 1, 10, 0, 0).to_i
|
||||||
|
jul_start = Time.zone.local(2024, 7, 1, 10, 0, 0).to_i
|
||||||
|
|
||||||
|
# Create 10 days of points in March
|
||||||
|
10.times do |i|
|
||||||
|
timestamp = mar_start + (i * 24 * 60 * 60)
|
||||||
|
create(:point, user: user, timestamp: timestamp, country_name: 'Germany', city: 'Berlin')
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create 10 days of points in July
|
||||||
|
10.times do |i|
|
||||||
|
timestamp = jul_start + (i * 24 * 60 * 60)
|
||||||
|
create(:point, user: user, timestamp: timestamp, country_name: 'Germany', city: 'Munich')
|
||||||
|
end
|
||||||
|
|
||||||
|
# Create the monthly stats (simulating what would be created by the stats calculation)
|
||||||
|
create(:stat, user: user, year: 2024, month: 3, distance: 10_000, toponyms: [
|
||||||
|
{ 'country' => 'Germany', 'cities' => [
|
||||||
|
{ 'city' => 'Berlin', 'stayed_for' => 14_400 } # 10 days in minutes
|
||||||
|
] }
|
||||||
|
])
|
||||||
|
|
||||||
|
create(:stat, user: user, year: 2024, month: 7, distance: 15_000, toponyms: [
|
||||||
|
{ 'country' => 'Germany', 'cities' => [
|
||||||
|
{ 'city' => 'Munich', 'stayed_for' => 14_400 } # 10 days in minutes
|
||||||
|
] }
|
||||||
|
])
|
||||||
|
|
||||||
|
digest = calculate_digest
|
||||||
|
countries = digest.time_spent_by_location['countries']
|
||||||
|
germany = countries.find { |c| c['name'] == 'Germany' }
|
||||||
|
|
||||||
|
# Should be 20 days total (10 unique days in Mar + 10 unique days in Jul)
|
||||||
|
expected_minutes = 20 * 24 * 60 # 28,800 minutes
|
||||||
|
expect(germany['minutes']).to eq(expected_minutes)
|
||||||
|
|
||||||
|
# Verify this is less than 365 days (the bug would cause inflated numbers)
|
||||||
|
total_days = germany['minutes'] / (24 * 60)
|
||||||
|
expect(total_days).to be <= 365
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context 'when digest already exists' do
|
context 'when digest already exists' do
|
||||||
let!(:existing_digest) do
|
let!(:existing_digest) do
|
||||||
create(:users_digest, user: user, year: 2024, period_type: :yearly, distance: 10_000)
|
create(:users_digest, user: user, year: 2024, period_type: :yearly, distance: 10_000)
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue