mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 01:31:39 -05:00
Merge 1b504ff757 into 322ae311cf
This commit is contained in:
commit
6770bd5e12
14 changed files with 189 additions and 42 deletions
|
|
@ -12,6 +12,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||||
- Time spent in a country and city is now calculated correctly for the year-end digest email. #2104
|
- Time spent in a country and city is now calculated correctly for the year-end digest email. #2104
|
||||||
- Updated Trix to fix a XSS vulnerability. #2102
|
- Updated Trix to fix a XSS vulnerability. #2102
|
||||||
- Map v2 UI no longer blocks when Immich/Photoprism integration has a bad URL or is unreachable. Added 10-second timeout to photo API requests and improved error handling to prevent UI freezing during initial load. #2085
|
- Map v2 UI no longer blocks when Immich/Photoprism integration has a bad URL or is unreachable. Added 10-second timeout to photo API requests and improved error handling to prevent UI freezing during initial load. #2085
|
||||||
|
|
||||||
|
## Added
|
||||||
- In Map v2 settings, you can now enable map to be rendered as a globe.
|
- In Map v2 settings, you can now enable map to be rendered as a globe.
|
||||||
|
|
||||||
# [0.37.1] - 2025-12-30
|
# [0.37.1] - 2025-12-30
|
||||||
|
|
|
||||||
|
|
@ -16,11 +16,11 @@ module Api
|
||||||
include_untagged = tag_ids.include?('untagged')
|
include_untagged = tag_ids.include?('untagged')
|
||||||
|
|
||||||
if numeric_tag_ids.any? && include_untagged
|
if numeric_tag_ids.any? && include_untagged
|
||||||
# Both tagged and untagged: return union (OR logic)
|
# Both tagged and untagged: use OR logic to preserve eager loading
|
||||||
tagged = current_api_user.places.includes(:tags, :visits).with_tags(numeric_tag_ids)
|
tagged_ids = current_api_user.places.with_tags(numeric_tag_ids).pluck(:id)
|
||||||
untagged = current_api_user.places.includes(:tags, :visits).without_tags
|
untagged_ids = current_api_user.places.without_tags.pluck(:id)
|
||||||
@places = Place.from("(#{tagged.to_sql} UNION #{untagged.to_sql}) AS places")
|
combined_ids = (tagged_ids + untagged_ids).uniq
|
||||||
.includes(:tags, :visits)
|
@places = current_api_user.places.includes(:tags, :visits).where(id: combined_ids)
|
||||||
elsif numeric_tag_ids.any?
|
elsif numeric_tag_ids.any?
|
||||||
# Only tagged places with ANY of the selected tags (OR logic)
|
# Only tagged places with ANY of the selected tags (OR logic)
|
||||||
@places = @places.with_tags(numeric_tag_ids)
|
@places = @places.with_tags(numeric_tag_ids)
|
||||||
|
|
@ -30,6 +30,16 @@ module Api
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Support optional pagination (backward compatible - returns all if no page param)
|
||||||
|
if params[:page].present?
|
||||||
|
per_page = [params[:per_page]&.to_i || 100, 500].min
|
||||||
|
@places = @places.page(params[:page]).per(per_page)
|
||||||
|
|
||||||
|
response.set_header('X-Current-Page', @places.current_page.to_s)
|
||||||
|
response.set_header('X-Total-Pages', @places.total_pages.to_s)
|
||||||
|
response.set_header('X-Total-Count', @places.total_count.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
render json: @places.map { |place| serialize_place(place) }
|
render json: @places.map { |place| serialize_place(place) }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -120,7 +130,7 @@ module Api
|
||||||
note: place.note,
|
note: place.note,
|
||||||
icon: place.tags.first&.icon,
|
icon: place.tags.first&.icon,
|
||||||
color: place.tags.first&.color,
|
color: place.tags.first&.color,
|
||||||
visits_count: place.visits.count,
|
visits_count: place.visits.size,
|
||||||
created_at: place.created_at,
|
created_at: place.created_at,
|
||||||
tags: place.tags.map do |tag|
|
tags: place.tags.map do |tag|
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,17 @@
|
||||||
class Api::V1::VisitsController < ApiController
|
class Api::V1::VisitsController < ApiController
|
||||||
def index
|
def index
|
||||||
visits = Visits::Finder.new(current_api_user, params).call
|
visits = Visits::Finder.new(current_api_user, params).call
|
||||||
|
|
||||||
|
# Support optional pagination (backward compatible - returns all if no page param)
|
||||||
|
if params[:page].present?
|
||||||
|
per_page = [params[:per_page]&.to_i || 100, 500].min
|
||||||
|
visits = visits.page(params[:page]).per(per_page)
|
||||||
|
|
||||||
|
response.set_header('X-Current-Page', visits.current_page.to_s)
|
||||||
|
response.set_header('X-Total-Pages', visits.total_pages.to_s)
|
||||||
|
response.set_header('X-Total-Count', visits.total_count.to_s)
|
||||||
|
end
|
||||||
|
|
||||||
serialized_visits = visits.map do |visit|
|
serialized_visits = visits.map do |visit|
|
||||||
Api::VisitSerializer.new(visit).call
|
Api::VisitSerializer.new(visit).call
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -41,19 +41,31 @@ class Map::LeafletController < ApplicationController
|
||||||
end
|
end
|
||||||
|
|
||||||
def calculate_distance
|
def calculate_distance
|
||||||
return 0 if @coordinates.size < 2
|
return 0 if @points.count(:id) < 2
|
||||||
|
|
||||||
total_distance = 0
|
# Use PostGIS window function for efficient distance calculation
|
||||||
|
# This is O(1) database operation vs O(n) Ruby iteration
|
||||||
|
sql = <<~SQL.squish
|
||||||
|
SELECT COALESCE(SUM(distance_m) / 1000.0, 0) as total_km FROM (
|
||||||
|
SELECT ST_Distance(
|
||||||
|
lonlat::geography,
|
||||||
|
LAG(lonlat::geography) OVER (ORDER BY timestamp)
|
||||||
|
) as distance_m
|
||||||
|
FROM points
|
||||||
|
WHERE user_id = :user_id
|
||||||
|
AND timestamp >= :start_at
|
||||||
|
AND timestamp <= :end_at
|
||||||
|
) distances
|
||||||
|
SQL
|
||||||
|
|
||||||
@coordinates.each_cons(2) do
|
result = Point.connection.select_value(
|
||||||
distance_km = Geocoder::Calculations.distance_between(
|
ActiveRecord::Base.sanitize_sql_array([
|
||||||
[_1[0], _1[1]], [_2[0], _2[1]], units: :km
|
sql,
|
||||||
)
|
{ user_id: current_user.id, start_at: start_at, end_at: end_at }
|
||||||
|
])
|
||||||
|
)
|
||||||
|
|
||||||
total_distance += distance_km
|
result&.to_f&.round || 0
|
||||||
end
|
|
||||||
|
|
||||||
total_distance.round
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def parsed_start_at
|
def parsed_start_at
|
||||||
|
|
|
||||||
|
|
@ -80,8 +80,14 @@ class StatsController < ApplicationController
|
||||||
end
|
end
|
||||||
|
|
||||||
def build_stats
|
def build_stats
|
||||||
current_user.stats.group_by(&:year).transform_values do |stats|
|
# Select only needed columns - avoid loading large JSONB fields
|
||||||
stats.sort_by(&:updated_at).reverse
|
# daily_distance and h3_hex_ids are never needed on index page
|
||||||
end.sort.reverse
|
columns = [:id, :year, :month, :distance, :updated_at, :user_id]
|
||||||
|
columns << :toponyms if DawarichSettings.reverse_geocoding_enabled?
|
||||||
|
|
||||||
|
current_user.stats
|
||||||
|
.select(columns)
|
||||||
|
.order(year: :desc, updated_at: :desc)
|
||||||
|
.group_by(&:year)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -73,10 +73,17 @@ export class ApiClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch visits for date range
|
* Fetch visits for date range (paginated)
|
||||||
|
* @param {Object} options - { start_at, end_at, page, per_page }
|
||||||
|
* @returns {Promise<Object>} { visits, currentPage, totalPages }
|
||||||
*/
|
*/
|
||||||
async fetchVisits({ start_at, end_at }) {
|
async fetchVisitsPage({ start_at, end_at, page = 1, per_page = 500 }) {
|
||||||
const params = new URLSearchParams({ start_at, end_at })
|
const params = new URLSearchParams({
|
||||||
|
start_at,
|
||||||
|
end_at,
|
||||||
|
page: page.toString(),
|
||||||
|
per_page: per_page.toString()
|
||||||
|
})
|
||||||
|
|
||||||
const response = await fetch(`${this.baseURL}/visits?${params}`, {
|
const response = await fetch(`${this.baseURL}/visits?${params}`, {
|
||||||
headers: this.getHeaders()
|
headers: this.getHeaders()
|
||||||
|
|
@ -86,20 +93,63 @@ export class ApiClient {
|
||||||
throw new Error(`Failed to fetch visits: ${response.statusText}`)
|
throw new Error(`Failed to fetch visits: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return response.json()
|
const visits = await response.json()
|
||||||
|
|
||||||
|
return {
|
||||||
|
visits,
|
||||||
|
currentPage: parseInt(response.headers.get('X-Current-Page') || '1'),
|
||||||
|
totalPages: parseInt(response.headers.get('X-Total-Pages') || '1')
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetch places optionally filtered by tags
|
* Fetch all visits for date range (handles pagination)
|
||||||
|
* @param {Object} options - { start_at, end_at, onProgress }
|
||||||
|
* @returns {Promise<Array>} All visits
|
||||||
*/
|
*/
|
||||||
async fetchPlaces({ tag_ids = [] } = {}) {
|
async fetchVisits({ start_at, end_at, onProgress = null }) {
|
||||||
const params = new URLSearchParams()
|
const allVisits = []
|
||||||
|
let page = 1
|
||||||
|
let totalPages = 1
|
||||||
|
|
||||||
|
do {
|
||||||
|
const { visits, currentPage, totalPages: total } =
|
||||||
|
await this.fetchVisitsPage({ start_at, end_at, page, per_page: 500 })
|
||||||
|
|
||||||
|
allVisits.push(...visits)
|
||||||
|
totalPages = total
|
||||||
|
page++
|
||||||
|
|
||||||
|
if (onProgress) {
|
||||||
|
const progress = totalPages > 0 ? currentPage / totalPages : 1.0
|
||||||
|
onProgress({
|
||||||
|
loaded: allVisits.length,
|
||||||
|
currentPage,
|
||||||
|
totalPages,
|
||||||
|
progress
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} while (page <= totalPages)
|
||||||
|
|
||||||
|
return allVisits
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch places (paginated)
|
||||||
|
* @param {Object} options - { tag_ids, page, per_page }
|
||||||
|
* @returns {Promise<Object>} { places, currentPage, totalPages }
|
||||||
|
*/
|
||||||
|
async fetchPlacesPage({ tag_ids = [], page = 1, per_page = 500 } = {}) {
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
page: page.toString(),
|
||||||
|
per_page: per_page.toString()
|
||||||
|
})
|
||||||
|
|
||||||
if (tag_ids && tag_ids.length > 0) {
|
if (tag_ids && tag_ids.length > 0) {
|
||||||
tag_ids.forEach(id => params.append('tag_ids[]', id))
|
tag_ids.forEach(id => params.append('tag_ids[]', id))
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = `${this.baseURL}/places${params.toString() ? '?' + params.toString() : ''}`
|
const url = `${this.baseURL}/places?${params.toString()}`
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
headers: this.getHeaders()
|
headers: this.getHeaders()
|
||||||
|
|
@ -109,7 +159,45 @@ export class ApiClient {
|
||||||
throw new Error(`Failed to fetch places: ${response.statusText}`)
|
throw new Error(`Failed to fetch places: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
return response.json()
|
const places = await response.json()
|
||||||
|
|
||||||
|
return {
|
||||||
|
places,
|
||||||
|
currentPage: parseInt(response.headers.get('X-Current-Page') || '1'),
|
||||||
|
totalPages: parseInt(response.headers.get('X-Total-Pages') || '1')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch all places optionally filtered by tags (handles pagination)
|
||||||
|
* @param {Object} options - { tag_ids, onProgress }
|
||||||
|
* @returns {Promise<Array>} All places
|
||||||
|
*/
|
||||||
|
async fetchPlaces({ tag_ids = [], onProgress = null } = {}) {
|
||||||
|
const allPlaces = []
|
||||||
|
let page = 1
|
||||||
|
let totalPages = 1
|
||||||
|
|
||||||
|
do {
|
||||||
|
const { places, currentPage, totalPages: total } =
|
||||||
|
await this.fetchPlacesPage({ tag_ids, page, per_page: 500 })
|
||||||
|
|
||||||
|
allPlaces.push(...places)
|
||||||
|
totalPages = total
|
||||||
|
page++
|
||||||
|
|
||||||
|
if (onProgress) {
|
||||||
|
const progress = totalPages > 0 ? currentPage / totalPages : 1.0
|
||||||
|
onProgress({
|
||||||
|
loaded: allPlaces.length,
|
||||||
|
currentPage,
|
||||||
|
totalPages,
|
||||||
|
progress
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} while (page <= totalPages)
|
||||||
|
|
||||||
|
return allPlaces
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
8
app/jobs/cache/preheating_job.rb
vendored
8
app/jobs/cache/preheating_job.rb
vendored
|
|
@ -28,6 +28,14 @@ class Cache::PreheatingJob < ApplicationJob
|
||||||
user.cities_visited_uncached,
|
user.cities_visited_uncached,
|
||||||
expires_in: 1.day
|
expires_in: 1.day
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Preheat total_distance cache
|
||||||
|
total_distance_meters = user.stats.sum(:distance)
|
||||||
|
Rails.cache.write(
|
||||||
|
"dawarich/user_#{user.id}_total_distance",
|
||||||
|
Stat.convert_distance(total_distance_meters, user.safe_settings.distance_unit),
|
||||||
|
expires_in: 1.day
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -56,8 +56,10 @@ class User < ApplicationRecord # rubocop:disable Metrics/ClassLength
|
||||||
end
|
end
|
||||||
|
|
||||||
def total_distance
|
def total_distance
|
||||||
total_distance_meters = stats.sum(:distance)
|
Rails.cache.fetch("dawarich/user_#{id}_total_distance", expires_in: 1.day) do
|
||||||
Stat.convert_distance(total_distance_meters, safe_settings.distance_unit)
|
total_distance_meters = stats.sum(:distance)
|
||||||
|
Stat.convert_distance(total_distance_meters, safe_settings.distance_unit)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def total_countries
|
def total_countries
|
||||||
|
|
|
||||||
7
app/services/cache/clean.rb
vendored
7
app/services/cache/clean.rb
vendored
|
|
@ -9,6 +9,7 @@ class Cache::Clean
|
||||||
delete_years_tracked_cache
|
delete_years_tracked_cache
|
||||||
delete_points_geocoded_stats_cache
|
delete_points_geocoded_stats_cache
|
||||||
delete_countries_cities_cache
|
delete_countries_cities_cache
|
||||||
|
delete_total_distance_cache
|
||||||
Rails.logger.info('Cache cleaned')
|
Rails.logger.info('Cache cleaned')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -40,5 +41,11 @@ class Cache::Clean
|
||||||
Rails.cache.delete("dawarich/user_#{user.id}_cities_visited")
|
Rails.cache.delete("dawarich/user_#{user.id}_cities_visited")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def delete_total_distance_cache
|
||||||
|
User.find_each do |user|
|
||||||
|
Rails.cache.delete("dawarich/user_#{user.id}_total_distance")
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
5
app/services/cache/invalidate_user_caches.rb
vendored
5
app/services/cache/invalidate_user_caches.rb
vendored
|
|
@ -14,6 +14,7 @@ class Cache::InvalidateUserCaches
|
||||||
invalidate_countries_visited
|
invalidate_countries_visited
|
||||||
invalidate_cities_visited
|
invalidate_cities_visited
|
||||||
invalidate_points_geocoded_stats
|
invalidate_points_geocoded_stats
|
||||||
|
invalidate_total_distance
|
||||||
end
|
end
|
||||||
|
|
||||||
def invalidate_countries_visited
|
def invalidate_countries_visited
|
||||||
|
|
@ -28,6 +29,10 @@ class Cache::InvalidateUserCaches
|
||||||
Rails.cache.delete("dawarich/user_#{user_id}_points_geocoded_stats")
|
Rails.cache.delete("dawarich/user_#{user_id}_points_geocoded_stats")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def invalidate_total_distance
|
||||||
|
Rails.cache.delete("dawarich/user_#{user_id}_total_distance")
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
attr_reader :user_id
|
attr_reader :user_id
|
||||||
|
|
|
||||||
|
|
@ -50,11 +50,13 @@ class Stats::CalculateMonth
|
||||||
def points
|
def points
|
||||||
return @points if defined?(@points)
|
return @points if defined?(@points)
|
||||||
|
|
||||||
|
# Select all needed columns to avoid duplicate queries
|
||||||
|
# Used for both distance calculation and toponyms extraction
|
||||||
@points = user
|
@points = user
|
||||||
.points
|
.points
|
||||||
.without_raw_data
|
.without_raw_data
|
||||||
.where(timestamp: start_timestamp..end_timestamp)
|
.where(timestamp: start_timestamp..end_timestamp)
|
||||||
.select(:lonlat, :timestamp)
|
.select(:lonlat, :timestamp, :city, :country_name)
|
||||||
.order(timestamp: :asc)
|
.order(timestamp: :asc)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -63,14 +65,8 @@ class Stats::CalculateMonth
|
||||||
end
|
end
|
||||||
|
|
||||||
def toponyms
|
def toponyms
|
||||||
toponym_points =
|
# Reuse already-loaded points instead of making a duplicate query
|
||||||
user
|
CountriesAndCities.new(points).call
|
||||||
.points
|
|
||||||
.without_raw_data
|
|
||||||
.where(timestamp: start_timestamp..end_timestamp)
|
|
||||||
.select(:city, :country_name, :timestamp)
|
|
||||||
|
|
||||||
CountriesAndCities.new(toponym_points).call
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_stats_update_failed_notification(user, error)
|
def create_stats_update_failed_notification(user, error)
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ module Visits
|
||||||
|
|
||||||
def call
|
def call
|
||||||
Visit
|
Visit
|
||||||
.includes(:place)
|
.includes(:place, :area)
|
||||||
.where(user:)
|
.where(user:)
|
||||||
.where('started_at >= ? AND ended_at <= ?', start_at, end_at)
|
.where('started_at >= ? AND ended_at <= ?', start_at, end_at)
|
||||||
.order(started_at: :desc)
|
.order(started_at: :desc)
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ module Visits
|
||||||
|
|
||||||
def call
|
def call
|
||||||
Visit
|
Visit
|
||||||
.includes(:place)
|
.includes(:place, :area)
|
||||||
.where(user:)
|
.where(user:)
|
||||||
.joins(:place)
|
.joins(:place)
|
||||||
.where(
|
.where(
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ RSpec.describe 'Api::V1::Users', type: :request do
|
||||||
speed_colored_routes points_rendering_mode minutes_between_routes
|
speed_colored_routes points_rendering_mode minutes_between_routes
|
||||||
time_threshold_minutes merge_threshold_minutes live_map_enabled
|
time_threshold_minutes merge_threshold_minutes live_map_enabled
|
||||||
route_opacity immich_url photoprism_url visits_suggestions_enabled
|
route_opacity immich_url photoprism_url visits_suggestions_enabled
|
||||||
speed_color_scale fog_of_war_threshold
|
speed_color_scale fog_of_war_threshold globe_projection
|
||||||
])
|
])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue