diff --git a/CHANGELOG.md b/CHANGELOG.md
index b150176a..cdb8c5c3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,7 +11,12 @@ Geodata on demand
- [x] Introduce a `STORE_GEODATA` environment variable to control whether to store geodata in the database.
- [ ] When `STORE_GEODATA` is disabled, each feature that uses geodata will now make a direct request to the geocoding service to calculate required data.
+ Geodata is being used:
+ - [ ] Fetching places geodata
+ - [x] Fetching countries for a trip
+ - [ ] Suggesting place name for a visit
- [ ] When `STORE_GEODATA` is disabled, points are not being reverse geocoded on creation.
+- [ ] When `STORE_GEODATA` is disabled, countries for a trip are being pulled from the geocoding service.
- [ ] When `STORE_GEODATA` is enabled, points are being reverse geocoded upon creation and stored in the database.
- [ ] Each feature that uses geodata will check if an entity (point, place, etc.) has geodata stored in the database and use it if available. If not, it will make a direct request to the geocoding service to calculate required data.
diff --git a/Gemfile b/Gemfile
index e651c4df..1759ef3f 100644
--- a/Gemfile
+++ b/Gemfile
@@ -30,6 +30,8 @@ gem 'rails', '~> 8.0'
gem 'rexml'
gem 'rgeo'
gem 'rgeo-activerecord'
+gem 'rgeo-geojson'
+gem 'parallel'
gem 'rswag-api'
gem 'rswag-ui'
gem 'sentry-ruby'
diff --git a/Gemfile.lock b/Gemfile.lock
index 1128ba27..284b3dba 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -219,6 +219,7 @@ GEM
mini_portile2 (2.8.8)
minitest (5.25.5)
msgpack (1.7.3)
+ multi_json (1.15.0)
multi_xml (0.7.1)
bigdecimal (~> 3.1)
net-imap (0.5.8)
@@ -339,6 +340,9 @@ GEM
rgeo-activerecord (8.0.0)
activerecord (>= 7.0)
rgeo (>= 3.0)
+ rgeo-geojson (2.2.0)
+ multi_json (~> 1.15)
+ rgeo (>= 1.0.0)
rspec-core (3.13.3)
rspec-support (~> 3.13.0)
rspec-expectations (3.13.3)
@@ -502,6 +506,7 @@ DEPENDENCIES
kaminari
lograge
oj
+ parallel
pg
prometheus_exporter
pry-byebug
@@ -513,6 +518,7 @@ DEPENDENCIES
rexml
rgeo
rgeo-activerecord
+ rgeo-geojson
rspec-rails
rswag-api
rswag-specs
diff --git a/app/jobs/trips/create_path_job.rb b/app/jobs/trips/create_path_job.rb
index d64a39ec..5c73dc90 100644
--- a/app/jobs/trips/create_path_job.rb
+++ b/app/jobs/trips/create_path_job.rb
@@ -6,7 +6,7 @@ class Trips::CreatePathJob < ApplicationJob
def perform(trip_id)
trip = Trip.find(trip_id)
- trip.calculate_path_and_distance
+ trip.calculate_trip_data
trip.save!
end
diff --git a/app/models/trip.rb b/app/models/trip.rb
index 098feb82..caab72ab 100644
--- a/app/models/trip.rb
+++ b/app/models/trip.rb
@@ -7,11 +7,12 @@ class Trip < ApplicationRecord
validates :name, :started_at, :ended_at, presence: true
- before_save :calculate_path_and_distance
+ before_save :calculate_trip_data
- def calculate_path_and_distance
+ def calculate_trip_data
calculate_path
calculate_distance
+ calculate_countries
end
def points
@@ -19,7 +20,9 @@ class Trip < ApplicationRecord
end
def countries
- points.pluck(:country).uniq.compact
+ return points.pluck(:country).uniq.compact if DawarichSettings.store_geodata?
+
+ visited_countries
end
def photo_previews
@@ -56,4 +59,10 @@ class Trip < ApplicationRecord
self.distance = distance.round
end
+
+ def calculate_countries
+ countries = Trips::Countries.new(self).call
+
+ self.visited_countries = countries
+ end
end
diff --git a/app/services/reverse_geocoding/places/fetch_data.rb b/app/services/reverse_geocoding/places/fetch_data.rb
index 733ddde7..72195437 100644
--- a/app/services/reverse_geocoding/places/fetch_data.rb
+++ b/app/services/reverse_geocoding/places/fetch_data.rb
@@ -17,10 +17,19 @@ class ReverseGeocoding::Places::FetchData
return
end
- first_place = reverse_geocoded_places.shift
+ places = reverse_geocoded_places
+ first_place = places.shift
update_place(first_place)
- reverse_geocoded_places.each { |reverse_geocoded_place| fetch_and_create_place(reverse_geocoded_place) }
+ # Extract all osm_ids for preloading
+ osm_ids = places.map { |place| place.data['properties']['osm_id'].to_s }
+
+ # Preload all existing places with these osm_ids in a single query
+ existing_places = Place.where("geodata->'properties'->>'osm_id' IN (?)", osm_ids)
+ .index_by { |p| p.geodata.dig('properties', 'osm_id').to_s }
+
+ # Process with preloaded data
+ places.each { |reverse_geocoded_place| fetch_and_create_place(reverse_geocoded_place, existing_places) }
end
private
@@ -41,9 +50,9 @@ class ReverseGeocoding::Places::FetchData
)
end
- def fetch_and_create_place(reverse_geocoded_place)
+ def fetch_and_create_place(reverse_geocoded_place, existing_places = nil)
data = reverse_geocoded_place.data
- new_place = find_place(data)
+ new_place = find_place(data, existing_places)
new_place.name = place_name(data)
new_place.city = data['properties']['city']
@@ -57,12 +66,17 @@ class ReverseGeocoding::Places::FetchData
new_place.save!
end
- def find_place(place_data)
- found_place = Place.where(
- "geodata->'properties'->>'osm_id' = ?", place_data['properties']['osm_id'].to_s
- ).first
+ def find_place(place_data, existing_places = nil)
+ osm_id = place_data['properties']['osm_id'].to_s
- return found_place if found_place.present?
+ # Use the preloaded data if available
+ if existing_places
+ return existing_places[osm_id] if existing_places[osm_id].present?
+ else
+ # Fall back to individual query if no preloaded data
+ found_place = Place.where("geodata->'properties'->>'osm_id' = ?", osm_id).first
+ return found_place if found_place.present?
+ end
Place.find_or_initialize_by(
lonlat: "POINT(#{place_data['geometry']['coordinates'][0].to_f.round(5)} #{place_data['geometry']['coordinates'][1].to_f.round(5)})",
diff --git a/app/services/trips/countries.rb b/app/services/trips/countries.rb
new file mode 100644
index 00000000..2ed18cbb
--- /dev/null
+++ b/app/services/trips/countries.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'rgeo/geo_json'
+require 'rgeo'
+require 'json'
+require 'geocoder'
+
+class Trips::Countries
+ FILE_PATH = Rails.root.join('lib/assets/countries.json')
+
+ def initialize(trip, batch_count = 2)
+ @trip = trip
+ @batch_count = batch_count
+ @factory = RGeo::Geographic.spherical_factory
+ @file = File.read(FILE_PATH)
+ @countries_features =
+ RGeo::GeoJSON.decode(@file, json_parser: :json, geo_factory: @factory)
+ end
+
+ def call
+ all_points = @trip.points.to_a
+ total_points = all_points.size
+
+ # Return empty hash if no points
+ return {} if total_points.zero?
+
+ batches = split_into_batches(all_points, @batch_count)
+ threads_results = process_batches_in_threads(batches, total_points)
+ country_counts = merge_thread_results(threads_results)
+
+ log_results(country_counts, total_points)
+ country_counts.sort_by { |_country, count| -count }.to_h
+ end
+
+ private
+
+ def split_into_batches(points, batch_count)
+ batch_count = [batch_count, 1].max # Ensure batch_count is at least 1
+ batch_size = (points.size / batch_count.to_f).ceil
+ points.each_slice(batch_size).to_a
+ end
+
+ def process_batches_in_threads(batches, total_points)
+ threads_results = []
+ threads = []
+
+ batches.each_with_index do |batch, batch_index|
+ start_index = batch_index * batch.size + 1
+ threads << Thread.new do
+ threads_results << process_batch(batch, start_index, total_points)
+ end
+ end
+
+ threads.each(&:join)
+ threads_results
+ end
+
+ def merge_thread_results(threads_results)
+ country_counts = {}
+
+ threads_results.each do |result|
+ result.each do |country, count|
+ country_counts[country] ||= 0
+ country_counts[country] += count
+ end
+ end
+
+ country_counts
+ end
+
+ def log_results(country_counts, total_points)
+ total_counted = country_counts.values.sum
+ Rails.logger.info("Processed #{total_points} points and found #{country_counts.size} countries")
+ Rails.logger.info("Points counted: #{total_counted} out of #{total_points}")
+ end
+
+ def process_batch(points, start_index, total_points)
+ country_counts = {}
+
+ points.each_with_index do |point, idx|
+ current_index = start_index + idx
+ country_code = geocode_point(point, current_index, total_points)
+ next unless country_code
+
+ country_counts[country_code] ||= 0
+ country_counts[country_code] += 1
+ end
+
+ country_counts
+ end
+
+ def geocode_point(point, current_index, total_points)
+ lonlat = point.lonlat
+ return nil unless lonlat
+
+ latitude = lonlat.y
+ longitude = lonlat.x
+
+ log_processing_point(current_index, total_points, latitude, longitude)
+ country_code = fetch_country_code(latitude, longitude)
+ log_found_country(country_code, latitude, longitude) if country_code
+
+ country_code
+ end
+
+ def log_processing_point(current_index, total_points, latitude, longitude)
+ thread_id = Thread.current.object_id
+ Rails.logger.info(
+ "Thread #{thread_id}: Processing point #{current_index} of #{total_points}: lat=#{latitude}, lon=#{longitude}"
+ )
+ end
+
+ def log_found_country(country_code, latitude, longitude)
+ thread_id = Thread.current.object_id
+ Rails.logger.info("Thread #{thread_id}: Found country: #{country_code} for point at #{latitude}, #{longitude}")
+ end
+
+ def fetch_country_code(latitude, longitude)
+ results = Geocoder.search([latitude, longitude], limit: 1)
+ return nil unless results.any?
+
+ result = results.first
+ result.data['properties']['countrycode']
+ rescue StandardError => e
+ Rails.logger.error("Error geocoding point: #{e.message}")
+ nil
+ end
+end
diff --git a/app/services/visits/names/fetcher.rb b/app/services/visits/names/fetcher.rb
index 6a3ed157..7e970551 100644
--- a/app/services/visits/names/fetcher.rb
+++ b/app/services/visits/names/fetcher.rb
@@ -27,8 +27,7 @@ module Visits
def build_place_name
return nil if geocoder_results.first&.data.blank?
- properties = geocoder_results.first.data['properties']
- return nil unless properties.present?
+ return nil if properties.blank?
# First try the direct properties approach
name = Visits::Names::Builder.build_from_properties(properties)
@@ -51,6 +50,10 @@ module Visits
}
end.compact
end
+
+ def properties
+ @properties ||= geocoder_results.first.data['properties']
+ end
end
end
end
diff --git a/app/views/home/index.html.erb b/app/views/home/index.html.erb
index f290fc01..259b784d 100644
--- a/app/views/home/index.html.erb
+++ b/app/views/home/index.html.erb
@@ -8,8 +8,10 @@
The only location history tracker you'll ever need.
- <%= link_to 'Sign up', new_user_registration_path, class: "rounded-lg py-3 px-5 my-3 bg-blue-600 text-white block font-medium" %>
- or
+ <% if !DawarichSettings.self_hosted? %>
+ <%= link_to 'Sign up', new_user_registration_path, class: "rounded-lg py-3 px-5 my-3 bg-blue-600 text-white block font-medium" %>
+ or
+ <% end %>
<%= link_to 'Sign in', new_user_session_path, class: "rounded-lg py-3 px-5 bg-neutral text-neutral-content block font-medium" %>
diff --git a/db/migrate/20250513164521_add_visited_countries_to_trips.rb b/db/migrate/20250513164521_add_visited_countries_to_trips.rb
new file mode 100644
index 00000000..27072d3e
--- /dev/null
+++ b/db/migrate/20250513164521_add_visited_countries_to_trips.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+class AddVisitedCountriesToTrips < ActiveRecord::Migration[8.0]
+ def change
+ add_column :trips, :visited_countries, :jsonb, default: []
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index 68eac3a8..85f06f62 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,264 +10,262 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema[8.0].define(version: 20_250_404_182_437) do
+ActiveRecord::Schema[8.0].define(version: 2025_05_13_164521) do
# These are extensions that must be enabled in order to support this database
- enable_extension 'pg_catalog.plpgsql'
- enable_extension 'postgis'
+ enable_extension "pg_catalog.plpgsql"
+ enable_extension "postgis"
- create_table 'action_text_rich_texts', force: :cascade do |t|
- t.string 'name', null: false
- t.text 'body'
- t.string 'record_type', null: false
- t.bigint 'record_id', null: false
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.index %w[record_type record_id name], name: 'index_action_text_rich_texts_uniqueness', unique: true
+ create_table "action_text_rich_texts", force: :cascade do |t|
+ t.string "name", null: false
+ t.text "body"
+ t.string "record_type", null: false
+ t.bigint "record_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["record_type", "record_id", "name"], name: "index_action_text_rich_texts_uniqueness", unique: true
end
- create_table 'active_storage_attachments', force: :cascade do |t|
- t.string 'name', null: false
- t.string 'record_type', null: false
- t.bigint 'record_id', null: false
- t.bigint 'blob_id', null: false
- t.datetime 'created_at', null: false
- t.index ['blob_id'], name: 'index_active_storage_attachments_on_blob_id'
- t.index %w[record_type record_id name blob_id], name: 'index_active_storage_attachments_uniqueness',
-unique: true
+ create_table "active_storage_attachments", force: :cascade do |t|
+ t.string "name", null: false
+ t.string "record_type", null: false
+ t.bigint "record_id", null: false
+ t.bigint "blob_id", null: false
+ t.datetime "created_at", null: false
+ t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
+ t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
- create_table 'active_storage_blobs', force: :cascade do |t|
- t.string 'key', null: false
- t.string 'filename', null: false
- t.string 'content_type'
- t.text 'metadata'
- t.string 'service_name', null: false
- t.bigint 'byte_size', null: false
- t.string 'checksum'
- t.datetime 'created_at', null: false
- t.index ['key'], name: 'index_active_storage_blobs_on_key', unique: true
+ create_table "active_storage_blobs", force: :cascade do |t|
+ t.string "key", null: false
+ t.string "filename", null: false
+ t.string "content_type"
+ t.text "metadata"
+ t.string "service_name", null: false
+ t.bigint "byte_size", null: false
+ t.string "checksum"
+ t.datetime "created_at", null: false
+ t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
- create_table 'active_storage_variant_records', force: :cascade do |t|
- t.bigint 'blob_id', null: false
- t.string 'variation_digest', null: false
- t.index %w[blob_id variation_digest], name: 'index_active_storage_variant_records_uniqueness', unique: true
+ create_table "active_storage_variant_records", force: :cascade do |t|
+ t.bigint "blob_id", null: false
+ t.string "variation_digest", null: false
+ t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end
- create_table 'areas', force: :cascade do |t|
- t.string 'name', null: false
- t.bigint 'user_id', null: false
- t.decimal 'longitude', precision: 10, scale: 6, null: false
- t.decimal 'latitude', precision: 10, scale: 6, null: false
- t.integer 'radius', null: false
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.index ['user_id'], name: 'index_areas_on_user_id'
+ create_table "areas", force: :cascade do |t|
+ t.string "name", null: false
+ t.bigint "user_id", null: false
+ t.decimal "longitude", precision: 10, scale: 6, null: false
+ t.decimal "latitude", precision: 10, scale: 6, null: false
+ t.integer "radius", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["user_id"], name: "index_areas_on_user_id"
end
- create_table 'data_migrations', primary_key: 'version', id: :string, force: :cascade do |t|
+ create_table "data_migrations", primary_key: "version", id: :string, force: :cascade do |t|
end
- create_table 'exports', force: :cascade do |t|
- t.string 'name', null: false
- t.string 'url'
- t.integer 'status', default: 0, null: false
- t.bigint 'user_id', null: false
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.integer 'file_format', default: 0
- t.datetime 'start_at'
- t.datetime 'end_at'
- t.index ['status'], name: 'index_exports_on_status'
- t.index ['user_id'], name: 'index_exports_on_user_id'
+ create_table "exports", force: :cascade do |t|
+ t.string "name", null: false
+ t.string "url"
+ t.integer "status", default: 0, null: false
+ t.bigint "user_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.integer "file_format", default: 0
+ t.datetime "start_at"
+ t.datetime "end_at"
+ t.index ["status"], name: "index_exports_on_status"
+ t.index ["user_id"], name: "index_exports_on_user_id"
end
- create_table 'imports', force: :cascade do |t|
- t.string 'name', null: false
- t.bigint 'user_id', null: false
- t.integer 'source', default: 0
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.integer 'raw_points', default: 0
- t.integer 'doubles', default: 0
- t.integer 'processed', default: 0
- t.jsonb 'raw_data'
- t.integer 'points_count', default: 0
- t.index ['source'], name: 'index_imports_on_source'
- t.index ['user_id'], name: 'index_imports_on_user_id'
+ create_table "imports", force: :cascade do |t|
+ t.string "name", null: false
+ t.bigint "user_id", null: false
+ t.integer "source", default: 0
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.integer "raw_points", default: 0
+ t.integer "doubles", default: 0
+ t.integer "processed", default: 0
+ t.jsonb "raw_data"
+ t.integer "points_count", default: 0
+ t.index ["source"], name: "index_imports_on_source"
+ t.index ["user_id"], name: "index_imports_on_user_id"
end
- create_table 'notifications', force: :cascade do |t|
- t.string 'title', null: false
- t.text 'content', null: false
- t.bigint 'user_id', null: false
- t.integer 'kind', default: 0, null: false
- t.datetime 'read_at'
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.index ['kind'], name: 'index_notifications_on_kind'
- t.index ['user_id'], name: 'index_notifications_on_user_id'
+ create_table "notifications", force: :cascade do |t|
+ t.string "title", null: false
+ t.text "content", null: false
+ t.bigint "user_id", null: false
+ t.integer "kind", default: 0, null: false
+ t.datetime "read_at"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["kind"], name: "index_notifications_on_kind"
+ t.index ["user_id"], name: "index_notifications_on_user_id"
end
- create_table 'place_visits', force: :cascade do |t|
- t.bigint 'place_id', null: false
- t.bigint 'visit_id', null: false
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.index ['place_id'], name: 'index_place_visits_on_place_id'
- t.index ['visit_id'], name: 'index_place_visits_on_visit_id'
+ create_table "place_visits", force: :cascade do |t|
+ t.bigint "place_id", null: false
+ t.bigint "visit_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["place_id"], name: "index_place_visits_on_place_id"
+ t.index ["visit_id"], name: "index_place_visits_on_visit_id"
end
- create_table 'places', force: :cascade do |t|
- t.string 'name', null: false
- t.decimal 'longitude', precision: 10, scale: 6, null: false
- t.decimal 'latitude', precision: 10, scale: 6, null: false
- t.string 'city'
- t.string 'country'
- t.integer 'source', default: 0
- t.jsonb 'geodata', default: {}, null: false
- t.datetime 'reverse_geocoded_at'
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.geography 'lonlat', limit: { srid: 4326, type: 'st_point', geographic: true }
- t.index ['lonlat'], name: 'index_places_on_lonlat', using: :gist
+ create_table "places", force: :cascade do |t|
+ t.string "name", null: false
+ t.decimal "longitude", precision: 10, scale: 6, null: false
+ t.decimal "latitude", precision: 10, scale: 6, null: false
+ t.string "city"
+ t.string "country"
+ t.integer "source", default: 0
+ t.jsonb "geodata", default: {}, null: false
+ t.datetime "reverse_geocoded_at"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.geography "lonlat", limit: {srid: 4326, type: "st_point", geographic: true}
+ t.index ["lonlat"], name: "index_places_on_lonlat", using: :gist
end
- create_table 'points', force: :cascade do |t|
- t.integer 'battery_status'
- t.string 'ping'
- t.integer 'battery'
- t.string 'tracker_id'
- t.string 'topic'
- t.integer 'altitude'
- t.decimal 'longitude', precision: 10, scale: 6
- t.string 'velocity'
- t.integer 'trigger'
- t.string 'bssid'
- t.string 'ssid'
- t.integer 'connection'
- t.integer 'vertical_accuracy'
- t.integer 'accuracy'
- t.integer 'timestamp'
- t.decimal 'latitude', precision: 10, scale: 6
- t.integer 'mode'
- t.text 'inrids', default: [], array: true
- t.text 'in_regions', default: [], array: true
- t.jsonb 'raw_data', default: {}
- t.bigint 'import_id'
- t.string 'city'
- t.string 'country'
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.bigint 'user_id'
- t.jsonb 'geodata', default: {}, null: false
- t.bigint 'visit_id'
- t.datetime 'reverse_geocoded_at'
- t.decimal 'course', precision: 8, scale: 5
- t.decimal 'course_accuracy', precision: 8, scale: 5
- t.string 'external_track_id'
- t.geography 'lonlat', limit: { srid: 4326, type: 'st_point', geographic: true }
- t.index ['altitude'], name: 'index_points_on_altitude'
- t.index ['battery'], name: 'index_points_on_battery'
- t.index ['battery_status'], name: 'index_points_on_battery_status'
- t.index ['city'], name: 'index_points_on_city'
- t.index ['connection'], name: 'index_points_on_connection'
- t.index ['country'], name: 'index_points_on_country'
- t.index ['external_track_id'], name: 'index_points_on_external_track_id'
- t.index ['geodata'], name: 'index_points_on_geodata', using: :gin
- t.index ['import_id'], name: 'index_points_on_import_id'
- t.index %w[latitude longitude], name: 'index_points_on_latitude_and_longitude'
- t.index %w[lonlat timestamp user_id], name: 'index_points_on_lonlat_timestamp_user_id', unique: true
- t.index ['lonlat'], name: 'index_points_on_lonlat', using: :gist
- t.index ['reverse_geocoded_at'], name: 'index_points_on_reverse_geocoded_at'
- t.index ['timestamp'], name: 'index_points_on_timestamp'
- t.index ['trigger'], name: 'index_points_on_trigger'
- t.index ['user_id'], name: 'index_points_on_user_id'
- t.index ['visit_id'], name: 'index_points_on_visit_id'
+ create_table "points", force: :cascade do |t|
+ t.integer "battery_status"
+ t.string "ping"
+ t.integer "battery"
+ t.string "tracker_id"
+ t.string "topic"
+ t.integer "altitude"
+ t.decimal "longitude", precision: 10, scale: 6
+ t.string "velocity"
+ t.integer "trigger"
+ t.string "bssid"
+ t.string "ssid"
+ t.integer "connection"
+ t.integer "vertical_accuracy"
+ t.integer "accuracy"
+ t.integer "timestamp"
+ t.decimal "latitude", precision: 10, scale: 6
+ t.integer "mode"
+ t.text "inrids", default: [], array: true
+ t.text "in_regions", default: [], array: true
+ t.jsonb "raw_data", default: {}
+ t.bigint "import_id"
+ t.string "city"
+ t.string "country"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.bigint "user_id"
+ t.jsonb "geodata", default: {}, null: false
+ t.bigint "visit_id"
+ t.datetime "reverse_geocoded_at"
+ t.decimal "course", precision: 8, scale: 5
+ t.decimal "course_accuracy", precision: 8, scale: 5
+ t.string "external_track_id"
+ t.geography "lonlat", limit: {srid: 4326, type: "st_point", geographic: true}
+ t.index ["altitude"], name: "index_points_on_altitude"
+ t.index ["battery"], name: "index_points_on_battery"
+ t.index ["battery_status"], name: "index_points_on_battery_status"
+ t.index ["city"], name: "index_points_on_city"
+ t.index ["connection"], name: "index_points_on_connection"
+ t.index ["country"], name: "index_points_on_country"
+ t.index ["external_track_id"], name: "index_points_on_external_track_id"
+ t.index ["geodata"], name: "index_points_on_geodata", using: :gin
+ t.index ["import_id"], name: "index_points_on_import_id"
+ t.index ["latitude", "longitude"], name: "index_points_on_latitude_and_longitude"
+ t.index ["lonlat", "timestamp", "user_id"], name: "index_points_on_lonlat_timestamp_user_id", unique: true
+ t.index ["lonlat"], name: "index_points_on_lonlat", using: :gist
+ t.index ["reverse_geocoded_at"], name: "index_points_on_reverse_geocoded_at"
+ t.index ["timestamp"], name: "index_points_on_timestamp"
+ t.index ["trigger"], name: "index_points_on_trigger"
+ t.index ["user_id"], name: "index_points_on_user_id"
+ t.index ["visit_id"], name: "index_points_on_visit_id"
end
- create_table 'stats', force: :cascade do |t|
- t.integer 'year', null: false
- t.integer 'month', null: false
- t.integer 'distance', null: false
- t.jsonb 'toponyms'
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.bigint 'user_id', null: false
- t.jsonb 'daily_distance', default: {}
- t.index ['distance'], name: 'index_stats_on_distance'
- t.index ['month'], name: 'index_stats_on_month'
- t.index ['user_id'], name: 'index_stats_on_user_id'
- t.index ['year'], name: 'index_stats_on_year'
+ create_table "stats", force: :cascade do |t|
+ t.integer "year", null: false
+ t.integer "month", null: false
+ t.integer "distance", null: false
+ t.jsonb "toponyms"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.bigint "user_id", null: false
+ t.jsonb "daily_distance", default: {}
+ t.index ["distance"], name: "index_stats_on_distance"
+ t.index ["month"], name: "index_stats_on_month"
+ t.index ["user_id"], name: "index_stats_on_user_id"
+ t.index ["year"], name: "index_stats_on_year"
end
- create_table 'trips', force: :cascade do |t|
- t.string 'name', null: false
- t.datetime 'started_at', null: false
- t.datetime 'ended_at', null: false
- t.integer 'distance'
- t.bigint 'user_id', null: false
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.geometry 'path', limit: { srid: 3857, type: 'line_string' }
- t.index ['user_id'], name: 'index_trips_on_user_id'
+ create_table "trips", force: :cascade do |t|
+ t.string "name", null: false
+ t.datetime "started_at", null: false
+ t.datetime "ended_at", null: false
+ t.integer "distance"
+ t.bigint "user_id", null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.geometry "path", limit: {srid: 3857, type: "line_string"}
+ t.jsonb "visited_countries", default: []
+ t.index ["user_id"], name: "index_trips_on_user_id"
end
- create_table 'users', force: :cascade do |t|
- t.string 'email', default: '', null: false
- t.string 'encrypted_password', default: '', null: false
- t.string 'reset_password_token'
- t.datetime 'reset_password_sent_at'
- t.datetime 'remember_created_at'
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.string 'api_key', default: '', null: false
- t.string 'theme', default: 'dark', null: false
- t.jsonb 'settings',
- default: { 'fog_of_war_meters' => '100', 'meters_between_routes' => '1000',
-'minutes_between_routes' => '60' }
- t.boolean 'admin', default: false
- t.integer 'sign_in_count', default: 0, null: false
- t.datetime 'current_sign_in_at'
- t.datetime 'last_sign_in_at'
- t.string 'current_sign_in_ip'
- t.string 'last_sign_in_ip'
- t.integer 'status', default: 0
- t.datetime 'active_until'
- t.index ['email'], name: 'index_users_on_email', unique: true
- t.index ['reset_password_token'], name: 'index_users_on_reset_password_token', unique: true
+ create_table "users", force: :cascade do |t|
+ t.string "email", default: "", null: false
+ t.string "encrypted_password", default: "", null: false
+ t.string "reset_password_token"
+ t.datetime "reset_password_sent_at"
+ t.datetime "remember_created_at"
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.string "api_key", default: "", null: false
+ t.string "theme", default: "dark", null: false
+ t.jsonb "settings", default: {"fog_of_war_meters" => "100", "meters_between_routes" => "1000", "minutes_between_routes" => "60"}
+ t.boolean "admin", default: false
+ t.integer "sign_in_count", default: 0, null: false
+ t.datetime "current_sign_in_at"
+ t.datetime "last_sign_in_at"
+ t.string "current_sign_in_ip"
+ t.string "last_sign_in_ip"
+ t.integer "status", default: 0
+ t.datetime "active_until"
+ t.index ["email"], name: "index_users_on_email", unique: true
+ t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
- add_check_constraint 'users', 'admin IS NOT NULL', name: 'users_admin_null', validate: false
+ add_check_constraint "users", "admin IS NOT NULL", name: "users_admin_null", validate: false
- create_table 'visits', force: :cascade do |t|
- t.bigint 'area_id'
- t.bigint 'user_id', null: false
- t.datetime 'started_at', null: false
- t.datetime 'ended_at', null: false
- t.integer 'duration', null: false
- t.string 'name', null: false
- t.integer 'status', default: 0, null: false
- t.datetime 'created_at', null: false
- t.datetime 'updated_at', null: false
- t.bigint 'place_id'
- t.index ['area_id'], name: 'index_visits_on_area_id'
- t.index ['place_id'], name: 'index_visits_on_place_id'
- t.index ['started_at'], name: 'index_visits_on_started_at'
- t.index ['user_id'], name: 'index_visits_on_user_id'
+ create_table "visits", force: :cascade do |t|
+ t.bigint "area_id"
+ t.bigint "user_id", null: false
+ t.datetime "started_at", null: false
+ t.datetime "ended_at", null: false
+ t.integer "duration", null: false
+ t.string "name", null: false
+ t.integer "status", default: 0, null: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.bigint "place_id"
+ t.index ["area_id"], name: "index_visits_on_area_id"
+ t.index ["place_id"], name: "index_visits_on_place_id"
+ t.index ["started_at"], name: "index_visits_on_started_at"
+ t.index ["user_id"], name: "index_visits_on_user_id"
end
- add_foreign_key 'active_storage_attachments', 'active_storage_blobs', column: 'blob_id'
- add_foreign_key 'active_storage_variant_records', 'active_storage_blobs', column: 'blob_id'
- add_foreign_key 'areas', 'users'
- add_foreign_key 'notifications', 'users'
- add_foreign_key 'place_visits', 'places'
- add_foreign_key 'place_visits', 'visits'
- add_foreign_key 'points', 'users'
- add_foreign_key 'points', 'visits'
- add_foreign_key 'stats', 'users'
- add_foreign_key 'trips', 'users'
- add_foreign_key 'visits', 'areas'
- add_foreign_key 'visits', 'places'
- add_foreign_key 'visits', 'users'
+ add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
+ add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
+ add_foreign_key "areas", "users"
+ add_foreign_key "notifications", "users"
+ add_foreign_key "place_visits", "places"
+ add_foreign_key "place_visits", "visits"
+ add_foreign_key "points", "users"
+ add_foreign_key "points", "visits"
+ add_foreign_key "stats", "users"
+ add_foreign_key "trips", "users"
+ add_foreign_key "visits", "areas"
+ add_foreign_key "visits", "places"
+ add_foreign_key "visits", "users"
end
diff --git a/spec/models/trip_spec.rb b/spec/models/trip_spec.rb
index f56daf20..7d17117d 100644
--- a/spec/models/trip_spec.rb
+++ b/spec/models/trip_spec.rb
@@ -3,6 +3,10 @@
require 'rails_helper'
RSpec.describe Trip, type: :model do
+ before do
+ allow_any_instance_of(Trips::Countries).to receive(:call).and_return([])
+ end
+
describe 'validations' do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:started_at) }
@@ -25,6 +29,26 @@ RSpec.describe Trip, type: :model do
it 'sets the path' do
expect(trip.path).to be_present
end
+
+ context 'when DawarichSettings.store_geodata is enabled' do
+ before do
+ allow(DawarichSettings).to receive(:store_geodata?).and_return(true)
+ end
+
+ it 'sets the countries' do
+ expect(trip.countries).to eq(trip.points.pluck(:country).uniq.compact)
+ end
+ end
+
+ context 'when DawarichSettings.store_geodata is disabled' do
+ it 'sets the visited countries' do
+ countries_service = instance_double(Trips::Countries, call: [])
+ expect(Trips::Countries).to receive(:new).with(trip).and_return(countries_service)
+ expect(countries_service).to receive(:call)
+
+ trip.save
+ end
+ end
end
describe '#countries' do
diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
index ed2d0de6..15d0ef7c 100644
--- a/spec/rails_helper.rb
+++ b/spec/rails_helper.rb
@@ -38,6 +38,7 @@ RSpec.configure do |config|
config.before do
ActiveJob::Base.queue_adapter = :test
+ allow(DawarichSettings).to receive(:store_geodata?).and_return(true)
end
config.after(:suite) do
diff --git a/spec/services/trips/countries_spec.rb b/spec/services/trips/countries_spec.rb
new file mode 100644
index 00000000..107e98fc
--- /dev/null
+++ b/spec/services/trips/countries_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe Trips::Countries do
+ let(:trip) { instance_double('Trip') }
+ let(:point1) { instance_double('Point', lonlat: factory.point(10.0, 50.0)) }
+ let(:point2) { instance_double('Point', lonlat: factory.point(20.0, 60.0)) }
+ let(:point3) { instance_double('Point', lonlat: factory.point(30.0, 70.0)) }
+ let(:point4) { instance_double('Point', lonlat: nil) }
+ let(:factory) { RGeo::Geographic.spherical_factory }
+ let(:points) { [point1, point2, point3, point4] }
+
+ let(:geo_json_content) do
+ {
+ type: 'FeatureCollection',
+ features: [
+ {
+ type: 'Feature',
+ properties: { ADMIN: 'Germany', ISO_A3: 'DEU', ISO_A2: 'DE' },
+ geometry: { type: 'MultiPolygon', coordinates: [] }
+ }
+ ]
+ }.to_json
+ end
+
+ before do
+ allow(trip).to receive(:points).and_return(points)
+ allow(File).to receive(:read).with(Trips::Countries::FILE_PATH).and_return(geo_json_content)
+
+ # Explicitly stub all Geocoder calls with specific coordinates
+ stub_request(:get, 'https://photon.dawarich.app/reverse?lang=en&lat=50.0&limit=1&lon=10.0')
+ .to_return(
+ status: 200,
+ body: {
+ type: 'FeatureCollection',
+ features: [{ type: 'Feature', properties: { countrycode: 'DE' } }]
+ }.to_json
+ )
+
+ stub_request(:get, 'https://photon.dawarich.app/reverse?lang=en&lat=60.0&limit=1&lon=20.0')
+ .to_return(
+ status: 200,
+ body: {
+ type: 'FeatureCollection',
+ features: [{ type: 'Feature', properties: { countrycode: 'SE' } }]
+ }.to_json
+ )
+
+ stub_request(:get, 'https://photon.dawarich.app/reverse?lang=en&lat=70.0&limit=1&lon=30.0')
+ .to_return(
+ status: 200,
+ body: {
+ type: 'FeatureCollection',
+ features: [{ type: 'Feature', properties: { countrycode: 'FI' } }]
+ }.to_json
+ )
+
+ allow(Rails.logger).to receive(:info)
+ allow(Rails.logger).to receive(:error)
+ end
+
+ describe '#call' do
+ it 'returns a hash with country counts' do
+ allow(Thread).to receive(:new).and_yield
+
+ result = described_class.new(trip).call
+
+ expect(result).to be_a(Hash)
+ expect(result.keys).to match_array(%w[DE SE FI])
+ expect(result.values.sum).to eq(3)
+ end
+
+ it 'handles points without coordinates' do
+ allow(Thread).to receive(:new).and_yield
+
+ result = described_class.new(trip).call
+
+ expect(result.values.sum).to eq(3) # Should only count the 3 valid points
+ end
+
+ it 'processes batches in multiple threads' do
+ expect(Thread).to receive(:new).at_least(:twice).and_yield
+
+ described_class.new(trip).call
+ end
+
+ it 'sorts countries by count in descending order' do
+ allow(Thread).to receive(:new).and_yield
+ allow(points).to receive(:to_a).and_return([point1, point1, point2, point3, point4])
+
+ # Make sure we have a stub for the duplicated point
+ stub_request(:get, 'https://photon.dawarich.app/reverse?lang=en&lat=50.0&limit=1&lon=10.0')
+ .to_return(
+ status: 200,
+ body: {
+ type: 'FeatureCollection',
+ features: [{ type: 'Feature', properties: { countrycode: 'DE' } }]
+ }.to_json
+ )
+
+ result = described_class.new(trip).call
+
+ expect(result.keys.first).to eq('DE')
+ expect(result['DE']).to eq(2)
+ end
+ end
+end
diff --git a/spec/support/geocoder_stubs.rb b/spec/support/geocoder_stubs.rb
new file mode 100644
index 00000000..8e34dc2d
--- /dev/null
+++ b/spec/support/geocoder_stubs.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+# Stub all Geocoder requests in tests
+RSpec.configure do |config|
+ config.before(:each) do
+ # Create a generic stub for all Geocoder requests
+ stub_request(:any, %r{photon\.dawarich\.app/reverse}).to_return(
+ status: 200,
+ body: {
+ type: 'FeatureCollection',
+ features: [
+ {
+ type: 'Feature',
+ properties: {
+ name: 'Test Location',
+ countrycode: 'US',
+ country: 'United States',
+ state: 'New York'
+ },
+ geometry: {
+ coordinates: [-73.9, 40.7],
+ type: 'Point'
+ }
+ }
+ ]
+ }.to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+end