mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 09:41:40 -05:00
Merge pull request #1177 from Freika/feature/store-geodata
Feature/store geodata
This commit is contained in:
commit
0501c15ab6
43 changed files with 1143 additions and 372 deletions
28
CHANGELOG.md
28
CHANGELOG.md
|
|
@ -5,12 +5,38 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||||
|
|
||||||
|
|
||||||
# 0.26.1 - 2025-05-12
|
# 0.26.1 - 2025-05-15
|
||||||
|
|
||||||
|
## Geodata on demand
|
||||||
|
|
||||||
|
This release introduces a new environment variable `STORE_GEODATA` to control whether to store geodata in the database.
|
||||||
|
|
||||||
|
When `STORE_GEODATA` is disabled, each feature that uses geodata will now make a direct request to the geocoding service to calculate required data.
|
||||||
|
|
||||||
|
Geodata is being used:
|
||||||
|
|
||||||
|
- Fetching places geodata
|
||||||
|
- Fetching countries for a trip
|
||||||
|
- Suggesting place name for a visit
|
||||||
|
|
||||||
|
If you prefer to keep the old behavior, you can set `STORE_GEODATA` to `true`. By default, starting this release, it's set to `false`.
|
||||||
|
|
||||||
|
If you're running your own Photon instance, you can safely set `STORE_GEODATA` to `false`, otherwise it'd be better to keep it enabled, because that way Dawarich will be using existing geodata for its calculations.
|
||||||
|
|
||||||
|
## Added
|
||||||
|
|
||||||
|
- Map page now has a button to go to the previous and next day. #296 #631 #904
|
||||||
|
|
||||||
|
## Changed
|
||||||
|
|
||||||
|
- Reverse geocoding is now working as on-demand job instead of storing the result in the database.
|
||||||
|
- Stats cards now show the last update time. #733
|
||||||
|
|
||||||
## Fixed
|
## Fixed
|
||||||
|
|
||||||
- Fixed a bug with an attempt to write points with same lonlat and timestamp from iOS app. #1170
|
- Fixed a bug with an attempt to write points with same lonlat and timestamp from iOS app. #1170
|
||||||
- Importing GeoJSON files now saves velocity if it was stored in either `velocity` or `speed` property.
|
- Importing GeoJSON files now saves velocity if it was stored in either `velocity` or `speed` property.
|
||||||
|
- `rake points:migrate_to_lonlat` should work properly now. #1083 #1161
|
||||||
|
|
||||||
|
|
||||||
# 0.26.0 - 2025-05-08
|
# 0.26.0 - 2025-05-08
|
||||||
|
|
|
||||||
1
Gemfile
1
Gemfile
|
|
@ -30,6 +30,7 @@ gem 'rails', '~> 8.0'
|
||||||
gem 'rexml'
|
gem 'rexml'
|
||||||
gem 'rgeo'
|
gem 'rgeo'
|
||||||
gem 'rgeo-activerecord'
|
gem 'rgeo-activerecord'
|
||||||
|
gem 'rgeo-geojson'
|
||||||
gem 'rswag-api'
|
gem 'rswag-api'
|
||||||
gem 'rswag-ui'
|
gem 'rswag-ui'
|
||||||
gem 'sentry-ruby'
|
gem 'sentry-ruby'
|
||||||
|
|
|
||||||
|
|
@ -219,6 +219,7 @@ GEM
|
||||||
mini_portile2 (2.8.8)
|
mini_portile2 (2.8.8)
|
||||||
minitest (5.25.5)
|
minitest (5.25.5)
|
||||||
msgpack (1.7.3)
|
msgpack (1.7.3)
|
||||||
|
multi_json (1.15.0)
|
||||||
multi_xml (0.7.1)
|
multi_xml (0.7.1)
|
||||||
bigdecimal (~> 3.1)
|
bigdecimal (~> 3.1)
|
||||||
net-imap (0.5.8)
|
net-imap (0.5.8)
|
||||||
|
|
@ -339,6 +340,9 @@ GEM
|
||||||
rgeo-activerecord (8.0.0)
|
rgeo-activerecord (8.0.0)
|
||||||
activerecord (>= 7.0)
|
activerecord (>= 7.0)
|
||||||
rgeo (>= 3.0)
|
rgeo (>= 3.0)
|
||||||
|
rgeo-geojson (2.2.0)
|
||||||
|
multi_json (~> 1.15)
|
||||||
|
rgeo (>= 1.0.0)
|
||||||
rspec-core (3.13.3)
|
rspec-core (3.13.3)
|
||||||
rspec-support (~> 3.13.0)
|
rspec-support (~> 3.13.0)
|
||||||
rspec-expectations (3.13.3)
|
rspec-expectations (3.13.3)
|
||||||
|
|
@ -513,6 +517,7 @@ DEPENDENCIES
|
||||||
rexml
|
rexml
|
||||||
rgeo
|
rgeo
|
||||||
rgeo-activerecord
|
rgeo-activerecord
|
||||||
|
rgeo-geojson
|
||||||
rspec-rails
|
rspec-rails
|
||||||
rswag-api
|
rswag-api
|
||||||
rswag-specs
|
rswag-specs
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -5,7 +5,7 @@ class StatsController < ApplicationController
|
||||||
before_action :authenticate_active_user!, only: %i[update update_all]
|
before_action :authenticate_active_user!, only: %i[update update_all]
|
||||||
|
|
||||||
def index
|
def index
|
||||||
@stats = current_user.stats.group_by(&:year).sort.reverse
|
@stats = current_user.stats.group_by(&:year).transform_values { |stats| stats.sort_by(&:updated_at).reverse }.sort.reverse
|
||||||
@points_total = current_user.tracked_points.count
|
@points_total = current_user.tracked_points.count
|
||||||
@points_reverse_geocoded = current_user.total_reverse_geocoded_points
|
@points_reverse_geocoded = current_user.total_reverse_geocoded_points
|
||||||
@points_reverse_geocoded_without_data = current_user.total_reverse_geocoded_points_without_data
|
@points_reverse_geocoded_without_data = current_user.total_reverse_geocoded_points_without_data
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ class Trips::CreatePathJob < ApplicationJob
|
||||||
def perform(trip_id)
|
def perform(trip_id)
|
||||||
trip = Trip.find(trip_id)
|
trip = Trip.find(trip_id)
|
||||||
|
|
||||||
trip.calculate_path_and_distance
|
trip.calculate_trip_data
|
||||||
|
|
||||||
trip.save!
|
trip.save!
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -22,29 +22,19 @@ class Place < ApplicationRecord
|
||||||
lonlat.y
|
lonlat.y
|
||||||
end
|
end
|
||||||
|
|
||||||
def async_reverse_geocode
|
|
||||||
return unless DawarichSettings.reverse_geocoding_enabled?
|
|
||||||
|
|
||||||
ReverseGeocodingJob.perform_later(self.class.to_s, id)
|
|
||||||
end
|
|
||||||
|
|
||||||
def reverse_geocoded?
|
|
||||||
geodata.present?
|
|
||||||
end
|
|
||||||
|
|
||||||
def osm_id
|
def osm_id
|
||||||
geodata['properties']['osm_id']
|
geodata.dig('properties', 'osm_id')
|
||||||
end
|
end
|
||||||
|
|
||||||
def osm_key
|
def osm_key
|
||||||
geodata['properties']['osm_key']
|
geodata.dig('properties', 'osm_key')
|
||||||
end
|
end
|
||||||
|
|
||||||
def osm_value
|
def osm_value
|
||||||
geodata['properties']['osm_value']
|
geodata.dig('properties', 'osm_value')
|
||||||
end
|
end
|
||||||
|
|
||||||
def osm_type
|
def osm_type
|
||||||
geodata['properties']['osm_type']
|
geodata.dig('properties', 'osm_type')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ class Point < ApplicationRecord
|
||||||
scope :visited, -> { where.not(visit_id: nil) }
|
scope :visited, -> { where.not(visit_id: nil) }
|
||||||
scope :not_visited, -> { where(visit_id: nil) }
|
scope :not_visited, -> { where(visit_id: nil) }
|
||||||
|
|
||||||
after_create :async_reverse_geocode
|
after_create :async_reverse_geocode, if: -> { DawarichSettings.store_geodata? }
|
||||||
after_create_commit :broadcast_coordinates
|
after_create_commit :broadcast_coordinates
|
||||||
|
|
||||||
def self.without_raw_data
|
def self.without_raw_data
|
||||||
|
|
|
||||||
|
|
@ -7,11 +7,12 @@ class Trip < ApplicationRecord
|
||||||
|
|
||||||
validates :name, :started_at, :ended_at, presence: true
|
validates :name, :started_at, :ended_at, presence: true
|
||||||
|
|
||||||
before_save :calculate_path_and_distance
|
before_save :calculate_trip_data
|
||||||
|
|
||||||
def calculate_path_and_distance
|
def calculate_trip_data
|
||||||
calculate_path
|
calculate_path
|
||||||
calculate_distance
|
calculate_distance
|
||||||
|
calculate_countries
|
||||||
end
|
end
|
||||||
|
|
||||||
def points
|
def points
|
||||||
|
|
@ -19,7 +20,9 @@ class Trip < ApplicationRecord
|
||||||
end
|
end
|
||||||
|
|
||||||
def countries
|
def countries
|
||||||
points.pluck(:country).uniq.compact
|
return points.pluck(:country).uniq.compact if DawarichSettings.store_geodata?
|
||||||
|
|
||||||
|
visited_countries
|
||||||
end
|
end
|
||||||
|
|
||||||
def photo_previews
|
def photo_previews
|
||||||
|
|
@ -56,4 +59,10 @@ class Trip < ApplicationRecord
|
||||||
|
|
||||||
self.distance = distance.round
|
self.distance = distance.round
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def calculate_countries
|
||||||
|
countries = Trips::Countries.new(self).call
|
||||||
|
|
||||||
|
self.visited_countries = countries
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -12,10 +12,6 @@ class Visit < ApplicationRecord
|
||||||
|
|
||||||
enum :status, { suggested: 0, confirmed: 1, declined: 2 }
|
enum :status, { suggested: 0, confirmed: 1, declined: 2 }
|
||||||
|
|
||||||
def reverse_geocoded?
|
|
||||||
place.geodata.present?
|
|
||||||
end
|
|
||||||
|
|
||||||
def coordinates
|
def coordinates
|
||||||
points.pluck(:latitude, :longitude).map { [_1[0].to_f, _1[1].to_f] }
|
points.pluck(:latitude, :longitude).map { [_1[0].to_f, _1[1].to_f] }
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -7,14 +7,16 @@ class Api::PlaceSerializer
|
||||||
|
|
||||||
def call
|
def call
|
||||||
{
|
{
|
||||||
id: place.id,
|
id: place.id,
|
||||||
name: place.name,
|
name: place.name,
|
||||||
longitude: place.lon,
|
longitude: place.lon,
|
||||||
latitude: place.lat,
|
latitude: place.lat,
|
||||||
city: place.city,
|
city: place.city,
|
||||||
country: place.country,
|
country: place.country,
|
||||||
source: place.source,
|
source: place.source,
|
||||||
geodata: place.geodata,
|
geodata: place.geodata,
|
||||||
|
created_at: place.created_at,
|
||||||
|
updated_at: place.updated_at,
|
||||||
reverse_geocoded_at: place.reverse_geocoded_at
|
reverse_geocoded_at: place.reverse_geocoded_at
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -21,8 +21,6 @@ class Jobs::Create
|
||||||
raise InvalidJobName, 'Invalid job name'
|
raise InvalidJobName, 'Invalid job name'
|
||||||
end
|
end
|
||||||
|
|
||||||
points.find_each(batch_size: 1_000) do |point|
|
points.find_each(&:async_reverse_geocode)
|
||||||
point.async_reverse_geocode
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -17,10 +17,19 @@ class ReverseGeocoding::Places::FetchData
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
first_place = reverse_geocoded_places.shift
|
places = reverse_geocoded_places
|
||||||
|
first_place = places.shift
|
||||||
update_place(first_place)
|
update_place(first_place)
|
||||||
|
|
||||||
reverse_geocoded_places.each { |reverse_geocoded_place| fetch_and_create_place(reverse_geocoded_place) }
|
# Extract all osm_ids for preloading
|
||||||
|
osm_ids = places.map { |place| place.data['properties']['osm_id'].to_s }
|
||||||
|
|
||||||
|
# Preload all existing places with these osm_ids in a single query
|
||||||
|
existing_places = Place.where("geodata->'properties'->>'osm_id' IN (?)", osm_ids)
|
||||||
|
.index_by { |p| p.geodata.dig('properties', 'osm_id').to_s }
|
||||||
|
|
||||||
|
# Process with preloaded data
|
||||||
|
places.each { |reverse_geocoded_place| fetch_and_create_place(reverse_geocoded_place, existing_places) }
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
@ -41,9 +50,9 @@ class ReverseGeocoding::Places::FetchData
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_and_create_place(reverse_geocoded_place)
|
def fetch_and_create_place(reverse_geocoded_place, existing_places = nil)
|
||||||
data = reverse_geocoded_place.data
|
data = reverse_geocoded_place.data
|
||||||
new_place = find_place(data)
|
new_place = find_place(data, existing_places)
|
||||||
|
|
||||||
new_place.name = place_name(data)
|
new_place.name = place_name(data)
|
||||||
new_place.city = data['properties']['city']
|
new_place.city = data['properties']['city']
|
||||||
|
|
@ -57,16 +66,17 @@ class ReverseGeocoding::Places::FetchData
|
||||||
new_place.save!
|
new_place.save!
|
||||||
end
|
end
|
||||||
|
|
||||||
def reverse_geocoded?
|
def find_place(place_data, existing_places = nil)
|
||||||
place.geodata.present?
|
osm_id = place_data['properties']['osm_id'].to_s
|
||||||
end
|
|
||||||
|
|
||||||
def find_place(place_data)
|
# Use the preloaded data if available
|
||||||
found_place = Place.where(
|
if existing_places
|
||||||
"geodata->'properties'->>'osm_id' = ?", place_data['properties']['osm_id'].to_s
|
return existing_places[osm_id] if existing_places[osm_id].present?
|
||||||
).first
|
else
|
||||||
|
# Fall back to individual query if no preloaded data
|
||||||
return found_place if found_place.present?
|
found_place = Place.where("geodata->'properties'->>'osm_id' = ?", osm_id).first
|
||||||
|
return found_place if found_place.present?
|
||||||
|
end
|
||||||
|
|
||||||
Place.find_or_initialize_by(
|
Place.find_or_initialize_by(
|
||||||
lonlat: "POINT(#{place_data['geometry']['coordinates'][0].to_f.round(5)} #{place_data['geometry']['coordinates'][1].to_f.round(5)})",
|
lonlat: "POINT(#{place_data['geometry']['coordinates'][0].to_f.round(5)} #{place_data['geometry']['coordinates'][1].to_f.round(5)})",
|
||||||
|
|
|
||||||
92
app/services/trips/countries.rb
Normal file
92
app/services/trips/countries.rb
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Trips::Countries
|
||||||
|
FILE_PATH = Rails.root.join('lib/assets/countries.json')
|
||||||
|
|
||||||
|
def initialize(trip, batch_count = 2)
|
||||||
|
@trip = trip
|
||||||
|
@batch_count = batch_count
|
||||||
|
@factory = RGeo::Geographic.spherical_factory
|
||||||
|
@file = File.read(FILE_PATH)
|
||||||
|
@countries_features =
|
||||||
|
RGeo::GeoJSON.decode(@file, json_parser: :json, geo_factory: @factory)
|
||||||
|
end
|
||||||
|
|
||||||
|
def call
|
||||||
|
all_points = @trip.points.to_a
|
||||||
|
total_points = all_points.size
|
||||||
|
|
||||||
|
# Return empty hash if no points
|
||||||
|
return {} if total_points.zero?
|
||||||
|
|
||||||
|
batches = split_into_batches(all_points, @batch_count)
|
||||||
|
threads_results = process_batches_in_threads(batches, total_points)
|
||||||
|
|
||||||
|
merge_thread_results(threads_results).uniq.compact
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def split_into_batches(points, batch_count)
|
||||||
|
batch_count = [batch_count, 1].max # Ensure batch_count is at least 1
|
||||||
|
batch_size = (points.size / batch_count.to_f).ceil
|
||||||
|
points.each_slice(batch_size).to_a
|
||||||
|
end
|
||||||
|
|
||||||
|
def process_batches_in_threads(batches, total_points)
|
||||||
|
threads_results = []
|
||||||
|
threads = []
|
||||||
|
|
||||||
|
batches.each do |batch|
|
||||||
|
threads << Thread.new do
|
||||||
|
threads_results << process_batch(batch)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
threads.each(&:join)
|
||||||
|
threads_results
|
||||||
|
end
|
||||||
|
|
||||||
|
def merge_thread_results(threads_results)
|
||||||
|
countries = []
|
||||||
|
|
||||||
|
threads_results.each do |result|
|
||||||
|
countries.concat(result)
|
||||||
|
end
|
||||||
|
|
||||||
|
countries
|
||||||
|
end
|
||||||
|
|
||||||
|
def process_batch(points)
|
||||||
|
points.map do |point|
|
||||||
|
country_code = geocode_point(point)
|
||||||
|
next unless country_code
|
||||||
|
|
||||||
|
country_code
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def geocode_point(point)
|
||||||
|
lonlat = point.lonlat
|
||||||
|
return nil unless lonlat
|
||||||
|
|
||||||
|
latitude = lonlat.y
|
||||||
|
longitude = lonlat.x
|
||||||
|
|
||||||
|
fetch_country_code(latitude, longitude)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_country_code(latitude, longitude)
|
||||||
|
results = Geocoder.search([latitude, longitude], limit: 1)
|
||||||
|
return nil unless results.any?
|
||||||
|
|
||||||
|
result = results.first
|
||||||
|
result.data['properties']['countrycode']
|
||||||
|
rescue StandardError => e
|
||||||
|
Rails.logger.error("Error geocoding point: #{e.message}")
|
||||||
|
|
||||||
|
ExceptionReporter.call(e)
|
||||||
|
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -7,10 +7,11 @@ module Visits
|
||||||
MAXIMUM_VISIT_GAP = 30.minutes
|
MAXIMUM_VISIT_GAP = 30.minutes
|
||||||
MINIMUM_POINTS_FOR_VISIT = 2
|
MINIMUM_POINTS_FOR_VISIT = 2
|
||||||
|
|
||||||
attr_reader :points
|
attr_reader :points, :place_name_suggester
|
||||||
|
|
||||||
def initialize(points)
|
def initialize(points)
|
||||||
@points = points
|
@points = points
|
||||||
|
@place_name_suggester = Visits::Names::Suggester
|
||||||
end
|
end
|
||||||
|
|
||||||
def detect_potential_visits
|
def detect_potential_visits
|
||||||
|
|
@ -89,7 +90,7 @@ module Visits
|
||||||
center_lat: center[0],
|
center_lat: center[0],
|
||||||
center_lon: center[1],
|
center_lon: center[1],
|
||||||
radius: calculate_visit_radius(points, center),
|
radius: calculate_visit_radius(points, center),
|
||||||
suggested_name: suggest_place_name(points)
|
suggested_name: suggest_place_name(points) || fetch_place_name(center)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -111,48 +112,11 @@ module Visits
|
||||||
end
|
end
|
||||||
|
|
||||||
def suggest_place_name(points)
|
def suggest_place_name(points)
|
||||||
# Get points with geodata
|
place_name_suggester.new(points).call
|
||||||
geocoded_points = points.select { |p| p.geodata.present? && !p.geodata.empty? }
|
end
|
||||||
return nil if geocoded_points.empty?
|
|
||||||
|
|
||||||
# Extract all features from points' geodata
|
def fetch_place_name(center)
|
||||||
features = geocoded_points.flat_map do |point|
|
Visits::Names::Fetcher.new(center).call
|
||||||
next [] unless point.geodata['features'].is_a?(Array)
|
|
||||||
|
|
||||||
point.geodata['features']
|
|
||||||
end.compact
|
|
||||||
|
|
||||||
return nil if features.empty?
|
|
||||||
|
|
||||||
# Group features by type and count occurrences
|
|
||||||
feature_counts = features.group_by { |f| f.dig('properties', 'type') }
|
|
||||||
.transform_values(&:size)
|
|
||||||
|
|
||||||
# Find the most common feature type
|
|
||||||
most_common_type = feature_counts.max_by { |_, count| count }&.first
|
|
||||||
return nil unless most_common_type
|
|
||||||
|
|
||||||
# Get all features of the most common type
|
|
||||||
common_features = features.select { |f| f.dig('properties', 'type') == most_common_type }
|
|
||||||
|
|
||||||
# Group these features by name and get the most common one
|
|
||||||
name_counts = common_features.group_by { |f| f.dig('properties', 'name') }
|
|
||||||
.transform_values(&:size)
|
|
||||||
most_common_name = name_counts.max_by { |_, count| count }&.first
|
|
||||||
|
|
||||||
return if most_common_name.blank?
|
|
||||||
|
|
||||||
# If we have a name, try to get additional context
|
|
||||||
feature = common_features.find { |f| f.dig('properties', 'name') == most_common_name }
|
|
||||||
properties = feature['properties']
|
|
||||||
|
|
||||||
# Build a more descriptive name if possible
|
|
||||||
[
|
|
||||||
most_common_name,
|
|
||||||
properties['street'],
|
|
||||||
properties['city'],
|
|
||||||
properties['state']
|
|
||||||
].compact.uniq.join(', ')
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
68
app/services/visits/names/builder.rb
Normal file
68
app/services/visits/names/builder.rb
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module Visits
|
||||||
|
module Names
|
||||||
|
# Builds descriptive names for places from geodata features
|
||||||
|
class Builder
|
||||||
|
def self.build_from_properties(properties)
|
||||||
|
return nil if properties.blank?
|
||||||
|
|
||||||
|
name_components = [
|
||||||
|
properties['name'],
|
||||||
|
properties['street'],
|
||||||
|
properties['housenumber'],
|
||||||
|
properties['city'],
|
||||||
|
properties['state']
|
||||||
|
].compact.reject(&:empty?).uniq
|
||||||
|
|
||||||
|
name_components.any? ? name_components.join(', ') : nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize(features, feature_type, name)
|
||||||
|
@features = features
|
||||||
|
@feature_type = feature_type
|
||||||
|
@name = name
|
||||||
|
end
|
||||||
|
|
||||||
|
def call
|
||||||
|
return nil if features.blank? || feature_type.blank? || name.blank?
|
||||||
|
return nil unless feature
|
||||||
|
|
||||||
|
[
|
||||||
|
name,
|
||||||
|
properties['street'],
|
||||||
|
properties['city'],
|
||||||
|
properties['state']
|
||||||
|
].compact.uniq.join(', ')
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :features, :feature_type, :name
|
||||||
|
|
||||||
|
def feature
|
||||||
|
@feature ||= find_feature
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_feature
|
||||||
|
features.find do |f|
|
||||||
|
f.dig('properties', 'type') == feature_type &&
|
||||||
|
f.dig('properties', 'name') == name
|
||||||
|
end || find_feature_by_osm_value
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_feature_by_osm_value
|
||||||
|
features.find do |f|
|
||||||
|
f.dig('properties', 'osm_value') == feature_type &&
|
||||||
|
f.dig('properties', 'name') == name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def properties
|
||||||
|
return {} unless feature && feature['properties'].is_a?(Hash)
|
||||||
|
|
||||||
|
feature['properties']
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
59
app/services/visits/names/fetcher.rb
Normal file
59
app/services/visits/names/fetcher.rb
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module Visits
|
||||||
|
module Names
|
||||||
|
# Fetches names for places from reverse geocoding API
|
||||||
|
class Fetcher
|
||||||
|
def initialize(center)
|
||||||
|
@center = center
|
||||||
|
end
|
||||||
|
|
||||||
|
def call
|
||||||
|
return nil if geocoder_results.blank?
|
||||||
|
|
||||||
|
build_place_name
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :center
|
||||||
|
|
||||||
|
def geocoder_results
|
||||||
|
@geocoder_results ||= Geocoder.search(
|
||||||
|
center, limit: 10, distance_sort: true, radius: 1, units: ::DISTANCE_UNIT
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def build_place_name
|
||||||
|
return nil if geocoder_results.first&.data.blank?
|
||||||
|
|
||||||
|
return nil if properties.blank?
|
||||||
|
|
||||||
|
# First try the direct properties approach
|
||||||
|
name = Visits::Names::Builder.build_from_properties(properties)
|
||||||
|
return name if name.present?
|
||||||
|
|
||||||
|
# Fall back to the instance-based approach
|
||||||
|
return nil unless properties['name'] && properties['osm_value']
|
||||||
|
|
||||||
|
Visits::Names::Builder.new(
|
||||||
|
features,
|
||||||
|
properties['osm_value'],
|
||||||
|
properties['name']
|
||||||
|
).call
|
||||||
|
end
|
||||||
|
|
||||||
|
def features
|
||||||
|
geocoder_results.map do |result|
|
||||||
|
{
|
||||||
|
'properties' => result.data['properties']
|
||||||
|
}
|
||||||
|
end.compact
|
||||||
|
end
|
||||||
|
|
||||||
|
def properties
|
||||||
|
@properties ||= geocoder_results.first.data['properties']
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
59
app/services/visits/names/suggester.rb
Normal file
59
app/services/visits/names/suggester.rb
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module Visits
|
||||||
|
module Names
|
||||||
|
# Suggests names for places based on geodata from tracked points
|
||||||
|
class Suggester
|
||||||
|
def initialize(points)
|
||||||
|
@points = points
|
||||||
|
end
|
||||||
|
|
||||||
|
def call
|
||||||
|
geocoded_points = extract_geocoded_points(points)
|
||||||
|
return nil if geocoded_points.empty?
|
||||||
|
|
||||||
|
features = extract_features(geocoded_points)
|
||||||
|
return nil if features.empty?
|
||||||
|
|
||||||
|
most_common_type = find_most_common_feature_type(features)
|
||||||
|
return nil unless most_common_type
|
||||||
|
|
||||||
|
most_common_name = find_most_common_name(features, most_common_type)
|
||||||
|
return nil if most_common_name.blank?
|
||||||
|
|
||||||
|
Visits::Names::Builder.new(
|
||||||
|
features, most_common_type, most_common_name
|
||||||
|
).call
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
attr_reader :points
|
||||||
|
|
||||||
|
def extract_geocoded_points(points)
|
||||||
|
points.select { |p| p.geodata.present? && !p.geodata.empty? }
|
||||||
|
end
|
||||||
|
|
||||||
|
def extract_features(geocoded_points)
|
||||||
|
geocoded_points.flat_map do |point|
|
||||||
|
next [] unless point.geodata['features'].is_a?(Array)
|
||||||
|
|
||||||
|
point.geodata['features']
|
||||||
|
end.compact
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_most_common_feature_type(features)
|
||||||
|
feature_counts = features.group_by { |f| f.dig('properties', 'type') }
|
||||||
|
.transform_values(&:size)
|
||||||
|
feature_counts.max_by { |_, count| count }&.first
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_most_common_name(features, feature_type)
|
||||||
|
common_features = features.select { |f| f.dig('properties', 'type') == feature_type }
|
||||||
|
name_counts = common_features.group_by { |f| f.dig('properties', 'name') }
|
||||||
|
.transform_values(&:size)
|
||||||
|
name_counts.max_by { |_, count| count }&.first
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -51,7 +51,7 @@ module Visits
|
||||||
return existing_by_location if existing_by_location
|
return existing_by_location if existing_by_location
|
||||||
|
|
||||||
# Then try by name if available
|
# Then try by name if available
|
||||||
return nil unless name.present?
|
return nil if name.blank?
|
||||||
|
|
||||||
Place.where(name: name)
|
Place.where(name: name)
|
||||||
.near([lat, lon], SEARCH_RADIUS, :m)
|
.near([lat, lon], SEARCH_RADIUS, :m)
|
||||||
|
|
@ -64,16 +64,13 @@ module Visits
|
||||||
lon = visit_data[:center_lon]
|
lon = visit_data[:center_lon]
|
||||||
|
|
||||||
# Get places from points' geodata
|
# Get places from points' geodata
|
||||||
places_from_points = extract_places_from_points(visit_data[:points], lat, lon)
|
places_from_points = extract_places_from_points(visit_data[:points])
|
||||||
|
|
||||||
# Get places from external API
|
|
||||||
places_from_api = fetch_places_from_api(lat, lon)
|
|
||||||
|
|
||||||
# Combine and deduplicate by name
|
# Combine and deduplicate by name
|
||||||
combined_places = []
|
combined_places = []
|
||||||
|
|
||||||
# Add API places first (usually better quality)
|
# Add API places first (usually better quality)
|
||||||
places_from_api.each do |api_place|
|
reverse_geocoded_places(lat, lon).each do |api_place|
|
||||||
combined_places << api_place unless place_name_exists?(combined_places, api_place.name)
|
combined_places << api_place unless place_name_exists?(combined_places, api_place.name)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -86,7 +83,7 @@ module Visits
|
||||||
end
|
end
|
||||||
|
|
||||||
# Step 3: Extract places from points
|
# Step 3: Extract places from points
|
||||||
def extract_places_from_points(points, center_lat, center_lon)
|
def extract_places_from_points(points)
|
||||||
return [] if points.blank?
|
return [] if points.blank?
|
||||||
|
|
||||||
# Filter points with geodata
|
# Filter points with geodata
|
||||||
|
|
@ -101,7 +98,7 @@ module Visits
|
||||||
places << place if place
|
places << place if place
|
||||||
end
|
end
|
||||||
|
|
||||||
places.uniq { |place| place.name }
|
places.uniq(&:name)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Step 4: Create place from point
|
# Step 4: Create place from point
|
||||||
|
|
@ -141,7 +138,7 @@ module Visits
|
||||||
end
|
end
|
||||||
|
|
||||||
# Step 5: Fetch places from API
|
# Step 5: Fetch places from API
|
||||||
def fetch_places_from_api(lat, lon)
|
def reverse_geocoded_places(lat, lon)
|
||||||
# Get broader search results from Geocoder
|
# Get broader search results from Geocoder
|
||||||
geocoder_results = Geocoder.search([lat, lon], units: :km, limit: 20, distance_sort: true)
|
geocoder_results = Geocoder.search([lat, lon], units: :km, limit: 20, distance_sort: true)
|
||||||
return [] if geocoder_results.blank?
|
return [] if geocoder_results.blank?
|
||||||
|
|
@ -228,15 +225,22 @@ module Visits
|
||||||
# Helper methods
|
# Helper methods
|
||||||
|
|
||||||
def build_place_name(properties)
|
def build_place_name(properties)
|
||||||
name_components = [
|
# First try building with our name builder
|
||||||
properties['name'],
|
built_name = Visits::Names::Builder.build_from_properties(properties)
|
||||||
properties['street'],
|
return built_name if built_name.present?
|
||||||
properties['housenumber'],
|
|
||||||
properties['postcode'],
|
|
||||||
properties['city']
|
|
||||||
].compact.reject(&:empty?).uniq
|
|
||||||
|
|
||||||
name_components.any? ? name_components.join(', ') : Place::DEFAULT_NAME
|
# Try using the instance-based approach as a fallback
|
||||||
|
features = [{ 'properties' => properties }]
|
||||||
|
feature_type = properties['type'] || properties['osm_value']
|
||||||
|
name = properties['name']
|
||||||
|
|
||||||
|
if feature_type.present? && name.present?
|
||||||
|
built_name = Visits::Names::Builder.new(features, feature_type, name).call
|
||||||
|
return built_name if built_name.present?
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fallback to the default name if all else fails
|
||||||
|
Place::DEFAULT_NAME
|
||||||
end
|
end
|
||||||
|
|
||||||
def place_name_exists?(places, name)
|
def place_name_exists?(places, name)
|
||||||
|
|
|
||||||
|
|
@ -8,8 +8,10 @@
|
||||||
</h1>
|
</h1>
|
||||||
<p class="py-6 text-3xl">The only location history tracker you'll ever need.</p>
|
<p class="py-6 text-3xl">The only location history tracker you'll ever need.</p>
|
||||||
|
|
||||||
<%= link_to 'Sign up', new_user_registration_path, class: "rounded-lg py-3 px-5 my-3 bg-blue-600 text-white block font-medium" %>
|
<% if !DawarichSettings.self_hosted? %>
|
||||||
<div class="divider">or</div>
|
<%= link_to 'Sign up', new_user_registration_path, class: "rounded-lg py-3 px-5 my-3 bg-blue-600 text-white block font-medium" %>
|
||||||
|
<div class="divider">or</div>
|
||||||
|
<% end %>
|
||||||
<%= link_to 'Sign in', new_user_session_path, class: "rounded-lg py-3 px-5 bg-neutral text-neutral-content block font-medium" %>
|
<%= link_to 'Sign in', new_user_session_path, class: "rounded-lg py-3 px-5 bg-neutral text-neutral-content block font-medium" %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -5,18 +5,36 @@
|
||||||
<div class="flex flex-col space-y-4 mb-4 w-full">
|
<div class="flex flex-col space-y-4 mb-4 w-full">
|
||||||
<%= form_with url: map_path(import_id: params[:import_id]), method: :get do |f| %>
|
<%= form_with url: map_path(import_id: params[:import_id]), method: :get do |f| %>
|
||||||
<div class="flex flex-col space-y-4 sm:flex-row sm:space-y-0 sm:space-x-4 sm:items-end">
|
<div class="flex flex-col space-y-4 sm:flex-row sm:space-y-0 sm:space-x-4 sm:items-end">
|
||||||
<div class="w-full sm:w-2/12 md:w-1/12 lg:w-3/12">
|
<div class="w-full sm:w-1/12 md:w-1/12 lg:w-1/12">
|
||||||
|
<div class="flex flex-col space-y-2">
|
||||||
|
<span class="tooltip" data-tip="<%= human_date(@start_at - 1.day) %>">
|
||||||
|
<%= link_to map_path(start_at: @start_at - 1.day, end_at: @end_at - 1.day, import_id: params[:import_id]), class: "btn btn-neutral hover:btn-ghost w-full" do %>
|
||||||
|
◀️
|
||||||
|
<% end %>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="w-full sm:w-2/12 md:w-1/12 lg:w-2/12">
|
||||||
<div class="flex flex-col space-y-2">
|
<div class="flex flex-col space-y-2">
|
||||||
<%= f.label :start_at, class: "text-sm font-semibold" %>
|
<%= f.label :start_at, class: "text-sm font-semibold" %>
|
||||||
<%= f.datetime_local_field :start_at, include_seconds: false, class: "input input-bordered hover:cursor-pointer hover:input-primary", value: @start_at %>
|
<%= f.datetime_local_field :start_at, include_seconds: false, class: "input input-bordered hover:cursor-pointer hover:input-primary", value: @start_at %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="w-full sm:w-2/12 md:w-1/12 lg:w-3/12">
|
<div class="w-full sm:w-2/12 md:w-1/12 lg:w-2/12">
|
||||||
<div class="flex flex-col space-y-2">
|
<div class="flex flex-col space-y-2">
|
||||||
<%= f.label :end_at, class: "text-sm font-semibold" %>
|
<%= f.label :end_at, class: "text-sm font-semibold" %>
|
||||||
<%= f.datetime_local_field :end_at, include_seconds: false, class: "input input-bordered hover:cursor-pointer hover:input-primary", value: @end_at %>
|
<%= f.datetime_local_field :end_at, include_seconds: false, class: "input input-bordered hover:cursor-pointer hover:input-primary", value: @end_at %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="w-full sm:w-1/12 md:w-1/12 lg:w-1/12">
|
||||||
|
<div class="flex flex-col space-y-2">
|
||||||
|
<span class="tooltip" data-tip="<%= human_date(@start_at + 1.day) %>">
|
||||||
|
<%= link_to map_path(start_at: @start_at + 1.day, end_at: @end_at + 1.day, import_id: params[:import_id]), class: "btn btn-neutral hover:btn-ghost w-full" do %>
|
||||||
|
▶️
|
||||||
|
<% end %>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="w-full sm:w-6/12 md:w-2/12 lg:w-1/12">
|
<div class="w-full sm:w-6/12 md:w-2/12 lg:w-1/12">
|
||||||
<div class="flex flex-col space-y-2">
|
<div class="flex flex-col space-y-2">
|
||||||
<%= f.submit "Search", class: "btn btn-primary hover:btn-info" %>
|
<%= f.submit "Search", class: "btn btn-primary hover:btn-info" %>
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,16 @@
|
||||||
<div class="stat text-center">
|
<% if DawarichSettings.store_geodata? %>
|
||||||
<div class="stat-value text-secondary">
|
<div class="stat text-center">
|
||||||
<%= number_with_delimiter @points_reverse_geocoded %>
|
<div class="stat-value text-secondary">
|
||||||
|
<%= number_with_delimiter @points_reverse_geocoded %>
|
||||||
|
</div>
|
||||||
|
<div class="stat-title">Reverse geocoded points</div>
|
||||||
|
<div class="stat-title">
|
||||||
|
<span class="tooltip underline decoration-dotted" data-tip="Points that were reverse geocoded but had no data">
|
||||||
|
<%= number_with_delimiter @points_reverse_geocoded_without_data %> points without data
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="stat-title">Reverse geocoded points</div>
|
<% end %>
|
||||||
<div class="stat-title">
|
|
||||||
<span class="tooltip underline decoration-dotted" data-tip="Points that were reverse geocoded but had no data">
|
|
||||||
<%= number_with_delimiter @points_reverse_geocoded_without_data %> points without data
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="stat text-center">
|
<div class="stat text-center">
|
||||||
<div class="stat-value text-warning underline hover:no-underline hover:cursor-pointer" onclick="countries_visited.showModal()">
|
<div class="stat-value text-warning underline hover:no-underline hover:cursor-pointer" onclick="countries_visited.showModal()">
|
||||||
|
|
|
||||||
|
|
@ -7,11 +7,12 @@
|
||||||
<% end %>
|
<% end %>
|
||||||
</h2>
|
</h2>
|
||||||
|
|
||||||
<div class="flex items-center gap-2">
|
<div class="gap-2">
|
||||||
<%= link_to '[Update]', update_year_month_stats_path(stat.year, stat.month), data: { turbo_method: :put }, class: 'text-sm text-gray-500 hover:underline' %>
|
<span class='text-xs text-gray-500'>Last update <%= human_date(stat.updated_at) %></span>
|
||||||
|
<%= link_to '🔄', update_year_month_stats_path(stat.year, stat.month), data: { turbo_method: :put }, class: 'text-sm text-gray-500 hover:underline' %>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<p><%= stat.distance %><%= DISTANCE_UNIT %></p>
|
<p><%= number_with_delimiter stat.distance %><%= DISTANCE_UNIT %></p>
|
||||||
<% if DawarichSettings.reverse_geocoding_enabled? %>
|
<% if DawarichSettings.reverse_geocoding_enabled? %>
|
||||||
<div class="card-actions justify-end">
|
<div class="card-actions justify-end">
|
||||||
<%= countries_and_cities_stat_for_month(stat) %>
|
<%= countries_and_cities_stat_for_month(stat) %>
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,10 @@
|
||||||
<%= link_to year, "/stats/#{year}", class: 'underline hover:no-underline' %>
|
<%= link_to year, "/stats/#{year}", class: 'underline hover:no-underline' %>
|
||||||
<%= link_to '[Map]', map_url(year_timespan(year)), class: 'underline hover:no-underline' %>
|
<%= link_to '[Map]', map_url(year_timespan(year)), class: 'underline hover:no-underline' %>
|
||||||
</div>
|
</div>
|
||||||
<%= link_to '[Update]', update_year_month_stats_path(year, :all), data: { turbo_method: :put }, class: 'text-sm text-gray-500 hover:underline' %>
|
<div class="gap-2">
|
||||||
|
<span class='text-xs text-gray-500'>Last updated: <%= human_date(stats.first.updated_at) %></span>
|
||||||
|
<%= link_to '🔄', update_year_month_stats_path(year, :all), data: { turbo_method: :put }, class: 'text-sm text-gray-500 hover:underline' %>
|
||||||
|
</div>
|
||||||
</h2>
|
</h2>
|
||||||
<p>
|
<p>
|
||||||
<% cache [current_user, 'year_distance_stat', year], skip_digest: true do %>
|
<% cache [current_user, 'year_distance_stat', year], skip_digest: true do %>
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ NOMINATIM_API_KEY = ENV.fetch('NOMINATIM_API_KEY', nil)
|
||||||
NOMINATIM_API_USE_HTTPS = ENV.fetch('NOMINATIM_API_USE_HTTPS', 'true') == 'true'
|
NOMINATIM_API_USE_HTTPS = ENV.fetch('NOMINATIM_API_USE_HTTPS', 'true') == 'true'
|
||||||
|
|
||||||
GEOAPIFY_API_KEY = ENV.fetch('GEOAPIFY_API_KEY', nil)
|
GEOAPIFY_API_KEY = ENV.fetch('GEOAPIFY_API_KEY', nil)
|
||||||
|
STORE_GEODATA = ENV.fetch('STORE_GEODATA', 'false') == 'true'
|
||||||
# /Reverse geocoding settings
|
# /Reverse geocoding settings
|
||||||
|
|
||||||
SENTRY_DSN = ENV.fetch('SENTRY_DSN', nil)
|
SENTRY_DSN = ENV.fetch('SENTRY_DSN', nil)
|
||||||
|
|
|
||||||
|
|
@ -32,5 +32,9 @@ class DawarichSettings
|
||||||
def nominatim_enabled?
|
def nominatim_enabled?
|
||||||
@nominatim_enabled ||= NOMINATIM_API_HOST.present?
|
@nominatim_enabled ||= NOMINATIM_API_HOST.present?
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def store_geodata?
|
||||||
|
@store_geodata ||= STORE_GEODATA
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class AddVisitedCountriesToTrips < ActiveRecord::Migration[8.0]
|
||||||
|
def change
|
||||||
|
add_column :trips, :visited_countries, :jsonb, default: []
|
||||||
|
end
|
||||||
|
end
|
||||||
448
db/schema.rb
generated
448
db/schema.rb
generated
|
|
@ -10,264 +10,262 @@
|
||||||
#
|
#
|
||||||
# It's strongly recommended that you check this file into your version control system.
|
# It's strongly recommended that you check this file into your version control system.
|
||||||
|
|
||||||
ActiveRecord::Schema[8.0].define(version: 20_250_404_182_437) do
|
ActiveRecord::Schema[8.0].define(version: 2025_05_13_164521) do
|
||||||
# These are extensions that must be enabled in order to support this database
|
# These are extensions that must be enabled in order to support this database
|
||||||
enable_extension 'pg_catalog.plpgsql'
|
enable_extension "pg_catalog.plpgsql"
|
||||||
enable_extension 'postgis'
|
enable_extension "postgis"
|
||||||
|
|
||||||
create_table 'action_text_rich_texts', force: :cascade do |t|
|
create_table "action_text_rich_texts", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.text 'body'
|
t.text "body"
|
||||||
t.string 'record_type', null: false
|
t.string "record_type", null: false
|
||||||
t.bigint 'record_id', null: false
|
t.bigint "record_id", null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.index %w[record_type record_id name], name: 'index_action_text_rich_texts_uniqueness', unique: true
|
t.index ["record_type", "record_id", "name"], name: "index_action_text_rich_texts_uniqueness", unique: true
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'active_storage_attachments', force: :cascade do |t|
|
create_table "active_storage_attachments", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.string 'record_type', null: false
|
t.string "record_type", null: false
|
||||||
t.bigint 'record_id', null: false
|
t.bigint "record_id", null: false
|
||||||
t.bigint 'blob_id', null: false
|
t.bigint "blob_id", null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.index ['blob_id'], name: 'index_active_storage_attachments_on_blob_id'
|
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
|
||||||
t.index %w[record_type record_id name blob_id], name: 'index_active_storage_attachments_uniqueness',
|
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
|
||||||
unique: true
|
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'active_storage_blobs', force: :cascade do |t|
|
create_table "active_storage_blobs", force: :cascade do |t|
|
||||||
t.string 'key', null: false
|
t.string "key", null: false
|
||||||
t.string 'filename', null: false
|
t.string "filename", null: false
|
||||||
t.string 'content_type'
|
t.string "content_type"
|
||||||
t.text 'metadata'
|
t.text "metadata"
|
||||||
t.string 'service_name', null: false
|
t.string "service_name", null: false
|
||||||
t.bigint 'byte_size', null: false
|
t.bigint "byte_size", null: false
|
||||||
t.string 'checksum'
|
t.string "checksum"
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.index ['key'], name: 'index_active_storage_blobs_on_key', unique: true
|
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'active_storage_variant_records', force: :cascade do |t|
|
create_table "active_storage_variant_records", force: :cascade do |t|
|
||||||
t.bigint 'blob_id', null: false
|
t.bigint "blob_id", null: false
|
||||||
t.string 'variation_digest', null: false
|
t.string "variation_digest", null: false
|
||||||
t.index %w[blob_id variation_digest], name: 'index_active_storage_variant_records_uniqueness', unique: true
|
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'areas', force: :cascade do |t|
|
create_table "areas", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.decimal 'longitude', precision: 10, scale: 6, null: false
|
t.decimal "longitude", precision: 10, scale: 6, null: false
|
||||||
t.decimal 'latitude', precision: 10, scale: 6, null: false
|
t.decimal "latitude", precision: 10, scale: 6, null: false
|
||||||
t.integer 'radius', null: false
|
t.integer "radius", null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.index ['user_id'], name: 'index_areas_on_user_id'
|
t.index ["user_id"], name: "index_areas_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'data_migrations', primary_key: 'version', id: :string, force: :cascade do |t|
|
create_table "data_migrations", primary_key: "version", id: :string, force: :cascade do |t|
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'exports', force: :cascade do |t|
|
create_table "exports", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.string 'url'
|
t.string "url"
|
||||||
t.integer 'status', default: 0, null: false
|
t.integer "status", default: 0, null: false
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.integer 'file_format', default: 0
|
t.integer "file_format", default: 0
|
||||||
t.datetime 'start_at'
|
t.datetime "start_at"
|
||||||
t.datetime 'end_at'
|
t.datetime "end_at"
|
||||||
t.index ['status'], name: 'index_exports_on_status'
|
t.index ["status"], name: "index_exports_on_status"
|
||||||
t.index ['user_id'], name: 'index_exports_on_user_id'
|
t.index ["user_id"], name: "index_exports_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'imports', force: :cascade do |t|
|
create_table "imports", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.integer 'source', default: 0
|
t.integer "source", default: 0
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.integer 'raw_points', default: 0
|
t.integer "raw_points", default: 0
|
||||||
t.integer 'doubles', default: 0
|
t.integer "doubles", default: 0
|
||||||
t.integer 'processed', default: 0
|
t.integer "processed", default: 0
|
||||||
t.jsonb 'raw_data'
|
t.jsonb "raw_data"
|
||||||
t.integer 'points_count', default: 0
|
t.integer "points_count", default: 0
|
||||||
t.index ['source'], name: 'index_imports_on_source'
|
t.index ["source"], name: "index_imports_on_source"
|
||||||
t.index ['user_id'], name: 'index_imports_on_user_id'
|
t.index ["user_id"], name: "index_imports_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'notifications', force: :cascade do |t|
|
create_table "notifications", force: :cascade do |t|
|
||||||
t.string 'title', null: false
|
t.string "title", null: false
|
||||||
t.text 'content', null: false
|
t.text "content", null: false
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.integer 'kind', default: 0, null: false
|
t.integer "kind", default: 0, null: false
|
||||||
t.datetime 'read_at'
|
t.datetime "read_at"
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.index ['kind'], name: 'index_notifications_on_kind'
|
t.index ["kind"], name: "index_notifications_on_kind"
|
||||||
t.index ['user_id'], name: 'index_notifications_on_user_id'
|
t.index ["user_id"], name: "index_notifications_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'place_visits', force: :cascade do |t|
|
create_table "place_visits", force: :cascade do |t|
|
||||||
t.bigint 'place_id', null: false
|
t.bigint "place_id", null: false
|
||||||
t.bigint 'visit_id', null: false
|
t.bigint "visit_id", null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.index ['place_id'], name: 'index_place_visits_on_place_id'
|
t.index ["place_id"], name: "index_place_visits_on_place_id"
|
||||||
t.index ['visit_id'], name: 'index_place_visits_on_visit_id'
|
t.index ["visit_id"], name: "index_place_visits_on_visit_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'places', force: :cascade do |t|
|
create_table "places", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.decimal 'longitude', precision: 10, scale: 6, null: false
|
t.decimal "longitude", precision: 10, scale: 6, null: false
|
||||||
t.decimal 'latitude', precision: 10, scale: 6, null: false
|
t.decimal "latitude", precision: 10, scale: 6, null: false
|
||||||
t.string 'city'
|
t.string "city"
|
||||||
t.string 'country'
|
t.string "country"
|
||||||
t.integer 'source', default: 0
|
t.integer "source", default: 0
|
||||||
t.jsonb 'geodata', default: {}, null: false
|
t.jsonb "geodata", default: {}, null: false
|
||||||
t.datetime 'reverse_geocoded_at'
|
t.datetime "reverse_geocoded_at"
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.geography 'lonlat', limit: { srid: 4326, type: 'st_point', geographic: true }
|
t.geography "lonlat", limit: {srid: 4326, type: "st_point", geographic: true}
|
||||||
t.index ['lonlat'], name: 'index_places_on_lonlat', using: :gist
|
t.index ["lonlat"], name: "index_places_on_lonlat", using: :gist
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'points', force: :cascade do |t|
|
create_table "points", force: :cascade do |t|
|
||||||
t.integer 'battery_status'
|
t.integer "battery_status"
|
||||||
t.string 'ping'
|
t.string "ping"
|
||||||
t.integer 'battery'
|
t.integer "battery"
|
||||||
t.string 'tracker_id'
|
t.string "tracker_id"
|
||||||
t.string 'topic'
|
t.string "topic"
|
||||||
t.integer 'altitude'
|
t.integer "altitude"
|
||||||
t.decimal 'longitude', precision: 10, scale: 6
|
t.decimal "longitude", precision: 10, scale: 6
|
||||||
t.string 'velocity'
|
t.string "velocity"
|
||||||
t.integer 'trigger'
|
t.integer "trigger"
|
||||||
t.string 'bssid'
|
t.string "bssid"
|
||||||
t.string 'ssid'
|
t.string "ssid"
|
||||||
t.integer 'connection'
|
t.integer "connection"
|
||||||
t.integer 'vertical_accuracy'
|
t.integer "vertical_accuracy"
|
||||||
t.integer 'accuracy'
|
t.integer "accuracy"
|
||||||
t.integer 'timestamp'
|
t.integer "timestamp"
|
||||||
t.decimal 'latitude', precision: 10, scale: 6
|
t.decimal "latitude", precision: 10, scale: 6
|
||||||
t.integer 'mode'
|
t.integer "mode"
|
||||||
t.text 'inrids', default: [], array: true
|
t.text "inrids", default: [], array: true
|
||||||
t.text 'in_regions', default: [], array: true
|
t.text "in_regions", default: [], array: true
|
||||||
t.jsonb 'raw_data', default: {}
|
t.jsonb "raw_data", default: {}
|
||||||
t.bigint 'import_id'
|
t.bigint "import_id"
|
||||||
t.string 'city'
|
t.string "city"
|
||||||
t.string 'country'
|
t.string "country"
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.bigint 'user_id'
|
t.bigint "user_id"
|
||||||
t.jsonb 'geodata', default: {}, null: false
|
t.jsonb "geodata", default: {}, null: false
|
||||||
t.bigint 'visit_id'
|
t.bigint "visit_id"
|
||||||
t.datetime 'reverse_geocoded_at'
|
t.datetime "reverse_geocoded_at"
|
||||||
t.decimal 'course', precision: 8, scale: 5
|
t.decimal "course", precision: 8, scale: 5
|
||||||
t.decimal 'course_accuracy', precision: 8, scale: 5
|
t.decimal "course_accuracy", precision: 8, scale: 5
|
||||||
t.string 'external_track_id'
|
t.string "external_track_id"
|
||||||
t.geography 'lonlat', limit: { srid: 4326, type: 'st_point', geographic: true }
|
t.geography "lonlat", limit: {srid: 4326, type: "st_point", geographic: true}
|
||||||
t.index ['altitude'], name: 'index_points_on_altitude'
|
t.index ["altitude"], name: "index_points_on_altitude"
|
||||||
t.index ['battery'], name: 'index_points_on_battery'
|
t.index ["battery"], name: "index_points_on_battery"
|
||||||
t.index ['battery_status'], name: 'index_points_on_battery_status'
|
t.index ["battery_status"], name: "index_points_on_battery_status"
|
||||||
t.index ['city'], name: 'index_points_on_city'
|
t.index ["city"], name: "index_points_on_city"
|
||||||
t.index ['connection'], name: 'index_points_on_connection'
|
t.index ["connection"], name: "index_points_on_connection"
|
||||||
t.index ['country'], name: 'index_points_on_country'
|
t.index ["country"], name: "index_points_on_country"
|
||||||
t.index ['external_track_id'], name: 'index_points_on_external_track_id'
|
t.index ["external_track_id"], name: "index_points_on_external_track_id"
|
||||||
t.index ['geodata'], name: 'index_points_on_geodata', using: :gin
|
t.index ["geodata"], name: "index_points_on_geodata", using: :gin
|
||||||
t.index ['import_id'], name: 'index_points_on_import_id'
|
t.index ["import_id"], name: "index_points_on_import_id"
|
||||||
t.index %w[latitude longitude], name: 'index_points_on_latitude_and_longitude'
|
t.index ["latitude", "longitude"], name: "index_points_on_latitude_and_longitude"
|
||||||
t.index %w[lonlat timestamp user_id], name: 'index_points_on_lonlat_timestamp_user_id', unique: true
|
t.index ["lonlat", "timestamp", "user_id"], name: "index_points_on_lonlat_timestamp_user_id", unique: true
|
||||||
t.index ['lonlat'], name: 'index_points_on_lonlat', using: :gist
|
t.index ["lonlat"], name: "index_points_on_lonlat", using: :gist
|
||||||
t.index ['reverse_geocoded_at'], name: 'index_points_on_reverse_geocoded_at'
|
t.index ["reverse_geocoded_at"], name: "index_points_on_reverse_geocoded_at"
|
||||||
t.index ['timestamp'], name: 'index_points_on_timestamp'
|
t.index ["timestamp"], name: "index_points_on_timestamp"
|
||||||
t.index ['trigger'], name: 'index_points_on_trigger'
|
t.index ["trigger"], name: "index_points_on_trigger"
|
||||||
t.index ['user_id'], name: 'index_points_on_user_id'
|
t.index ["user_id"], name: "index_points_on_user_id"
|
||||||
t.index ['visit_id'], name: 'index_points_on_visit_id'
|
t.index ["visit_id"], name: "index_points_on_visit_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'stats', force: :cascade do |t|
|
create_table "stats", force: :cascade do |t|
|
||||||
t.integer 'year', null: false
|
t.integer "year", null: false
|
||||||
t.integer 'month', null: false
|
t.integer "month", null: false
|
||||||
t.integer 'distance', null: false
|
t.integer "distance", null: false
|
||||||
t.jsonb 'toponyms'
|
t.jsonb "toponyms"
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.jsonb 'daily_distance', default: {}
|
t.jsonb "daily_distance", default: {}
|
||||||
t.index ['distance'], name: 'index_stats_on_distance'
|
t.index ["distance"], name: "index_stats_on_distance"
|
||||||
t.index ['month'], name: 'index_stats_on_month'
|
t.index ["month"], name: "index_stats_on_month"
|
||||||
t.index ['user_id'], name: 'index_stats_on_user_id'
|
t.index ["user_id"], name: "index_stats_on_user_id"
|
||||||
t.index ['year'], name: 'index_stats_on_year'
|
t.index ["year"], name: "index_stats_on_year"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'trips', force: :cascade do |t|
|
create_table "trips", force: :cascade do |t|
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.datetime 'started_at', null: false
|
t.datetime "started_at", null: false
|
||||||
t.datetime 'ended_at', null: false
|
t.datetime "ended_at", null: false
|
||||||
t.integer 'distance'
|
t.integer "distance"
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.geometry 'path', limit: { srid: 3857, type: 'line_string' }
|
t.geometry "path", limit: {srid: 3857, type: "line_string"}
|
||||||
t.index ['user_id'], name: 'index_trips_on_user_id'
|
t.jsonb "visited_countries", default: []
|
||||||
|
t.index ["user_id"], name: "index_trips_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
create_table 'users', force: :cascade do |t|
|
create_table "users", force: :cascade do |t|
|
||||||
t.string 'email', default: '', null: false
|
t.string "email", default: "", null: false
|
||||||
t.string 'encrypted_password', default: '', null: false
|
t.string "encrypted_password", default: "", null: false
|
||||||
t.string 'reset_password_token'
|
t.string "reset_password_token"
|
||||||
t.datetime 'reset_password_sent_at'
|
t.datetime "reset_password_sent_at"
|
||||||
t.datetime 'remember_created_at'
|
t.datetime "remember_created_at"
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.string 'api_key', default: '', null: false
|
t.string "api_key", default: "", null: false
|
||||||
t.string 'theme', default: 'dark', null: false
|
t.string "theme", default: "dark", null: false
|
||||||
t.jsonb 'settings',
|
t.jsonb "settings", default: {"fog_of_war_meters" => "100", "meters_between_routes" => "1000", "minutes_between_routes" => "60"}
|
||||||
default: { 'fog_of_war_meters' => '100', 'meters_between_routes' => '1000',
|
t.boolean "admin", default: false
|
||||||
'minutes_between_routes' => '60' }
|
t.integer "sign_in_count", default: 0, null: false
|
||||||
t.boolean 'admin', default: false
|
t.datetime "current_sign_in_at"
|
||||||
t.integer 'sign_in_count', default: 0, null: false
|
t.datetime "last_sign_in_at"
|
||||||
t.datetime 'current_sign_in_at'
|
t.string "current_sign_in_ip"
|
||||||
t.datetime 'last_sign_in_at'
|
t.string "last_sign_in_ip"
|
||||||
t.string 'current_sign_in_ip'
|
t.integer "status", default: 0
|
||||||
t.string 'last_sign_in_ip'
|
t.datetime "active_until"
|
||||||
t.integer 'status', default: 0
|
t.index ["email"], name: "index_users_on_email", unique: true
|
||||||
t.datetime 'active_until'
|
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
|
||||||
t.index ['email'], name: 'index_users_on_email', unique: true
|
|
||||||
t.index ['reset_password_token'], name: 'index_users_on_reset_password_token', unique: true
|
|
||||||
end
|
end
|
||||||
|
|
||||||
add_check_constraint 'users', 'admin IS NOT NULL', name: 'users_admin_null', validate: false
|
add_check_constraint "users", "admin IS NOT NULL", name: "users_admin_null", validate: false
|
||||||
|
|
||||||
create_table 'visits', force: :cascade do |t|
|
create_table "visits", force: :cascade do |t|
|
||||||
t.bigint 'area_id'
|
t.bigint "area_id"
|
||||||
t.bigint 'user_id', null: false
|
t.bigint "user_id", null: false
|
||||||
t.datetime 'started_at', null: false
|
t.datetime "started_at", null: false
|
||||||
t.datetime 'ended_at', null: false
|
t.datetime "ended_at", null: false
|
||||||
t.integer 'duration', null: false
|
t.integer "duration", null: false
|
||||||
t.string 'name', null: false
|
t.string "name", null: false
|
||||||
t.integer 'status', default: 0, null: false
|
t.integer "status", default: 0, null: false
|
||||||
t.datetime 'created_at', null: false
|
t.datetime "created_at", null: false
|
||||||
t.datetime 'updated_at', null: false
|
t.datetime "updated_at", null: false
|
||||||
t.bigint 'place_id'
|
t.bigint "place_id"
|
||||||
t.index ['area_id'], name: 'index_visits_on_area_id'
|
t.index ["area_id"], name: "index_visits_on_area_id"
|
||||||
t.index ['place_id'], name: 'index_visits_on_place_id'
|
t.index ["place_id"], name: "index_visits_on_place_id"
|
||||||
t.index ['started_at'], name: 'index_visits_on_started_at'
|
t.index ["started_at"], name: "index_visits_on_started_at"
|
||||||
t.index ['user_id'], name: 'index_visits_on_user_id'
|
t.index ["user_id"], name: "index_visits_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
||||||
add_foreign_key 'active_storage_attachments', 'active_storage_blobs', column: 'blob_id'
|
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
|
||||||
add_foreign_key 'active_storage_variant_records', 'active_storage_blobs', column: 'blob_id'
|
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
|
||||||
add_foreign_key 'areas', 'users'
|
add_foreign_key "areas", "users"
|
||||||
add_foreign_key 'notifications', 'users'
|
add_foreign_key "notifications", "users"
|
||||||
add_foreign_key 'place_visits', 'places'
|
add_foreign_key "place_visits", "places"
|
||||||
add_foreign_key 'place_visits', 'visits'
|
add_foreign_key "place_visits", "visits"
|
||||||
add_foreign_key 'points', 'users'
|
add_foreign_key "points", "users"
|
||||||
add_foreign_key 'points', 'visits'
|
add_foreign_key "points", "visits"
|
||||||
add_foreign_key 'stats', 'users'
|
add_foreign_key "stats", "users"
|
||||||
add_foreign_key 'trips', 'users'
|
add_foreign_key "trips", "users"
|
||||||
add_foreign_key 'visits', 'areas'
|
add_foreign_key "visits", "areas"
|
||||||
add_foreign_key 'visits', 'places'
|
add_foreign_key "visits", "places"
|
||||||
add_foreign_key 'visits', 'users'
|
add_foreign_key "visits", "users"
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -70,6 +70,7 @@ services:
|
||||||
PROMETHEUS_EXPORTER_PORT: 9394
|
PROMETHEUS_EXPORTER_PORT: 9394
|
||||||
SECRET_KEY_BASE: 1234567890
|
SECRET_KEY_BASE: 1234567890
|
||||||
RAILS_LOG_TO_STDOUT: "true"
|
RAILS_LOG_TO_STDOUT: "true"
|
||||||
|
STORE_GEODATA: "false"
|
||||||
logging:
|
logging:
|
||||||
driver: "json-file"
|
driver: "json-file"
|
||||||
options:
|
options:
|
||||||
|
|
@ -124,6 +125,7 @@ services:
|
||||||
PROMETHEUS_EXPORTER_PORT: 9394
|
PROMETHEUS_EXPORTER_PORT: 9394
|
||||||
SECRET_KEY_BASE: 1234567890
|
SECRET_KEY_BASE: 1234567890
|
||||||
RAILS_LOG_TO_STDOUT: "true"
|
RAILS_LOG_TO_STDOUT: "true"
|
||||||
|
STORE_GEODATA: "false"
|
||||||
logging:
|
logging:
|
||||||
driver: "json-file"
|
driver: "json-file"
|
||||||
options:
|
options:
|
||||||
|
|
|
||||||
|
|
@ -70,6 +70,7 @@ services:
|
||||||
PROMETHEUS_EXPORTER_HOST: 0.0.0.0
|
PROMETHEUS_EXPORTER_HOST: 0.0.0.0
|
||||||
PROMETHEUS_EXPORTER_PORT: 9394
|
PROMETHEUS_EXPORTER_PORT: 9394
|
||||||
SELF_HOSTED: "true"
|
SELF_HOSTED: "true"
|
||||||
|
STORE_GEODATA: "false"
|
||||||
logging:
|
logging:
|
||||||
driver: "json-file"
|
driver: "json-file"
|
||||||
options:
|
options:
|
||||||
|
|
@ -122,6 +123,7 @@ services:
|
||||||
PROMETHEUS_EXPORTER_HOST: dawarich_app
|
PROMETHEUS_EXPORTER_HOST: dawarich_app
|
||||||
PROMETHEUS_EXPORTER_PORT: 9394
|
PROMETHEUS_EXPORTER_PORT: 9394
|
||||||
SELF_HOSTED: "true"
|
SELF_HOSTED: "true"
|
||||||
|
STORE_GEODATA: "false"
|
||||||
logging:
|
logging:
|
||||||
driver: "json-file"
|
driver: "json-file"
|
||||||
options:
|
options:
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ MIN_MINUTES_SPENT_IN_CITY=60
|
||||||
APPLICATION_HOSTS=dawarich.example.synology.me
|
APPLICATION_HOSTS=dawarich.example.synology.me
|
||||||
TIME_ZONE=Europe/Berlin
|
TIME_ZONE=Europe/Berlin
|
||||||
BACKGROUND_PROCESSING_CONCURRENCY=10
|
BACKGROUND_PROCESSING_CONCURRENCY=10
|
||||||
|
STORE_GEODATA=false
|
||||||
|
|
||||||
###################################################################################
|
###################################################################################
|
||||||
# Database
|
# Database
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,7 @@ namespace :points do
|
||||||
task migrate_to_lonlat: :environment do
|
task migrate_to_lonlat: :environment do
|
||||||
puts 'Updating points to use lonlat...'
|
puts 'Updating points to use lonlat...'
|
||||||
|
|
||||||
points =
|
points = Point.where(longitude: nil, latitude: nil).without_raw_data
|
||||||
Point.where(longitude: nil, latitude: nil)
|
|
||||||
.select(:id, :longitude, :latitude, :raw_data, :user_id, :timestamp)
|
|
||||||
|
|
||||||
points.find_each do |point|
|
points.find_each do |point|
|
||||||
Points::RawDataLonlatExtractor.new(point).call
|
Points::RawDataLonlatExtractor.new(point).call
|
||||||
|
|
|
||||||
|
|
@ -21,15 +21,6 @@ RSpec.describe Place, type: :model do
|
||||||
describe 'methods' do
|
describe 'methods' do
|
||||||
let(:place) { create(:place, :with_geodata) }
|
let(:place) { create(:place, :with_geodata) }
|
||||||
|
|
||||||
describe '#async_reverse_geocode' do
|
|
||||||
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
|
|
||||||
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
|
|
||||||
|
|
||||||
it 'updates address' do
|
|
||||||
expect { place.async_reverse_geocode }.to have_enqueued_job(ReverseGeocodingJob).with('Place', place.id)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '#osm_id' do
|
describe '#osm_id' do
|
||||||
it 'returns the osm_id' do
|
it 'returns the osm_id' do
|
||||||
expect(place.osm_id).to eq(5_762_449_774)
|
expect(place.osm_id).to eq(5_762_449_774)
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,10 @@ RSpec.describe Point, type: :model do
|
||||||
describe '#async_reverse_geocode' do
|
describe '#async_reverse_geocode' do
|
||||||
let(:point) { build(:point) }
|
let(:point) { build(:point) }
|
||||||
|
|
||||||
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
|
before do
|
||||||
|
allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true)
|
||||||
|
allow(DawarichSettings).to receive(:store_geodata?).and_return(true)
|
||||||
|
end
|
||||||
|
|
||||||
it 'enqueues ReverseGeocodeJob with correct arguments' do
|
it 'enqueues ReverseGeocodeJob with correct arguments' do
|
||||||
point.save
|
point.save
|
||||||
|
|
@ -61,6 +64,16 @@ RSpec.describe Point, type: :model do
|
||||||
expect { point.async_reverse_geocode }.to have_enqueued_job(ReverseGeocodingJob)
|
expect { point.async_reverse_geocode }.to have_enqueued_job(ReverseGeocodingJob)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when reverse geocoding is disabled' do
|
||||||
|
before do
|
||||||
|
allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(false)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not enqueue ReverseGeocodeJob' do
|
||||||
|
expect { point.save }.not_to have_enqueued_job(ReverseGeocodingJob)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#lon' do
|
describe '#lon' do
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,10 @@
|
||||||
require 'rails_helper'
|
require 'rails_helper'
|
||||||
|
|
||||||
RSpec.describe Trip, type: :model do
|
RSpec.describe Trip, type: :model do
|
||||||
|
before do
|
||||||
|
allow_any_instance_of(Trips::Countries).to receive(:call).and_return([])
|
||||||
|
end
|
||||||
|
|
||||||
describe 'validations' do
|
describe 'validations' do
|
||||||
it { is_expected.to validate_presence_of(:name) }
|
it { is_expected.to validate_presence_of(:name) }
|
||||||
it { is_expected.to validate_presence_of(:started_at) }
|
it { is_expected.to validate_presence_of(:started_at) }
|
||||||
|
|
@ -25,6 +29,26 @@ RSpec.describe Trip, type: :model do
|
||||||
it 'sets the path' do
|
it 'sets the path' do
|
||||||
expect(trip.path).to be_present
|
expect(trip.path).to be_present
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context 'when DawarichSettings.store_geodata is enabled' do
|
||||||
|
before do
|
||||||
|
allow(DawarichSettings).to receive(:store_geodata?).and_return(true)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'sets the countries' do
|
||||||
|
expect(trip.countries).to eq(trip.points.pluck(:country).uniq.compact)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when DawarichSettings.store_geodata is disabled' do
|
||||||
|
it 'sets the visited countries' do
|
||||||
|
countries_service = instance_double(Trips::Countries, call: [])
|
||||||
|
expect(Trips::Countries).to receive(:new).with(trip).and_return(countries_service)
|
||||||
|
expect(countries_service).to receive(:call)
|
||||||
|
|
||||||
|
trip.save
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#countries' do
|
describe '#countries' do
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,7 @@ RSpec.configure do |config|
|
||||||
|
|
||||||
config.before do
|
config.before do
|
||||||
ActiveJob::Base.queue_adapter = :test
|
ActiveJob::Base.queue_adapter = :test
|
||||||
|
allow(DawarichSettings).to receive(:store_geodata?).and_return(true)
|
||||||
end
|
end
|
||||||
|
|
||||||
config.after(:suite) do
|
config.after(:suite) do
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,10 @@ require 'rails_helper'
|
||||||
|
|
||||||
RSpec.describe Jobs::Create do
|
RSpec.describe Jobs::Create do
|
||||||
describe '#call' do
|
describe '#call' do
|
||||||
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
|
before do
|
||||||
|
allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true)
|
||||||
|
allow(DawarichSettings).to receive(:store_geodata?).and_return(true)
|
||||||
|
end
|
||||||
|
|
||||||
context 'when job_name is start_reverse_geocoding' do
|
context 'when job_name is start_reverse_geocoding' do
|
||||||
let(:user) { create(:user) }
|
let(:user) { create(:user) }
|
||||||
|
|
|
||||||
|
|
@ -9,8 +9,11 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
||||||
|
|
||||||
context 'when Geocoder returns city and country' do
|
context 'when Geocoder returns city and country' do
|
||||||
before do
|
before do
|
||||||
allow(Geocoder).to receive(:search).and_return([double(city: 'City', country: 'Country',
|
allow(Geocoder).to receive(:search).and_return(
|
||||||
data: { 'address' => 'Address' })])
|
[
|
||||||
|
double(city: 'City', country: 'Country',data: { 'address' => 'Address' })
|
||||||
|
]
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when point does not have city and country' do
|
context 'when point does not have city and country' do
|
||||||
|
|
|
||||||
93
spec/services/trips/countries_spec.rb
Normal file
93
spec/services/trips/countries_spec.rb
Normal file
|
|
@ -0,0 +1,93 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'rails_helper'
|
||||||
|
|
||||||
|
RSpec.describe Trips::Countries do
|
||||||
|
let(:trip) { instance_double('Trip') }
|
||||||
|
let(:point1) { instance_double('Point', lonlat: factory.point(10.0, 50.0)) }
|
||||||
|
let(:point2) { instance_double('Point', lonlat: factory.point(20.0, 60.0)) }
|
||||||
|
let(:point3) { instance_double('Point', lonlat: factory.point(30.0, 70.0)) }
|
||||||
|
let(:point4) { instance_double('Point', lonlat: nil) }
|
||||||
|
let(:factory) { RGeo::Geographic.spherical_factory }
|
||||||
|
let(:points) { [point1, point2, point3, point4] }
|
||||||
|
|
||||||
|
let(:geo_json_content) do
|
||||||
|
{
|
||||||
|
type: 'FeatureCollection',
|
||||||
|
features: [
|
||||||
|
{
|
||||||
|
type: 'Feature',
|
||||||
|
properties: { ADMIN: 'Germany', ISO_A3: 'DEU', ISO_A2: 'DE' },
|
||||||
|
geometry: { type: 'MultiPolygon', coordinates: [] }
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}.to_json
|
||||||
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
allow(trip).to receive(:points).and_return(points)
|
||||||
|
allow(File).to receive(:read).with(Trips::Countries::FILE_PATH).and_return(geo_json_content)
|
||||||
|
|
||||||
|
# Explicitly stub all Geocoder calls with specific coordinates
|
||||||
|
allow(Geocoder).to receive(:search).and_return(
|
||||||
|
[double(data: { 'properties' => { 'countrycode' => 'DE' } })]
|
||||||
|
)
|
||||||
|
allow(Geocoder).to receive(:search).with([50.0, 10.0], limit: 1).and_return(
|
||||||
|
[double(data: { 'properties' => { 'countrycode' => 'DE' } })]
|
||||||
|
)
|
||||||
|
allow(Geocoder).to receive(:search).with([60.0, 20.0], limit: 1).and_return(
|
||||||
|
[double(data: { 'properties' => { 'countrycode' => 'SE' } })]
|
||||||
|
)
|
||||||
|
allow(Geocoder).to receive(:search).with([70.0, 30.0], limit: 1).and_return(
|
||||||
|
[double(data: { 'properties' => { 'countrycode' => 'FI' } })]
|
||||||
|
)
|
||||||
|
|
||||||
|
allow(Rails.logger).to receive(:info)
|
||||||
|
allow(Rails.logger).to receive(:error)
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#call' do
|
||||||
|
it 'returns a hash with country counts' do
|
||||||
|
allow(Thread).to receive(:new).and_yield
|
||||||
|
|
||||||
|
result = described_class.new(trip).call
|
||||||
|
|
||||||
|
expect(result).to match_array(%w[DE SE FI])
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'handles points without coordinates' do
|
||||||
|
allow(Thread).to receive(:new).and_yield
|
||||||
|
|
||||||
|
result = described_class.new(trip).call
|
||||||
|
|
||||||
|
expect(result.size).to eq(3) # Should only count the 3 valid points
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'processes batches in multiple threads' do
|
||||||
|
expect(Thread).to receive(:new).at_least(:twice).and_yield
|
||||||
|
|
||||||
|
described_class.new(trip).call
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'sorts countries by count in descending order' do
|
||||||
|
allow(Thread).to receive(:new).and_yield
|
||||||
|
allow(points).to receive(:to_a).and_return([point1, point1, point2, point3, point4])
|
||||||
|
|
||||||
|
result = described_class.new(trip).call
|
||||||
|
|
||||||
|
expect(result.first).to eq('DE')
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when an error occurs' do
|
||||||
|
before do
|
||||||
|
allow(Geocoder).to receive(:search).and_raise(Geocoder::Error, 'Error')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'calls the exception reporter' do
|
||||||
|
expect(ExceptionReporter).to receive(:call).with(Geocoder::Error).at_least(3).times
|
||||||
|
|
||||||
|
described_class.new(trip).call
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
126
spec/services/visits/names/builder_spec.rb
Normal file
126
spec/services/visits/names/builder_spec.rb
Normal file
|
|
@ -0,0 +1,126 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'rails_helper'
|
||||||
|
|
||||||
|
RSpec.describe Visits::Names::Builder do
|
||||||
|
describe '.build_from_properties' do
|
||||||
|
it 'builds a name from all available properties' do
|
||||||
|
properties = {
|
||||||
|
'name' => 'Coffee Shop',
|
||||||
|
'street' => 'Main St',
|
||||||
|
'housenumber' => '123',
|
||||||
|
'city' => 'New York',
|
||||||
|
'state' => 'NY'
|
||||||
|
}
|
||||||
|
|
||||||
|
result = described_class.build_from_properties(properties)
|
||||||
|
expect(result).to eq('Coffee Shop, Main St, 123, New York, NY')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'handles missing properties' do
|
||||||
|
properties = {
|
||||||
|
'name' => 'Coffee Shop',
|
||||||
|
'city' => 'New York',
|
||||||
|
'state' => 'NY'
|
||||||
|
}
|
||||||
|
|
||||||
|
result = described_class.build_from_properties(properties)
|
||||||
|
expect(result).to eq('Coffee Shop, New York, NY')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'deduplicates components' do
|
||||||
|
properties = {
|
||||||
|
'name' => 'New York Cafe',
|
||||||
|
'city' => 'New York',
|
||||||
|
'state' => 'NY'
|
||||||
|
}
|
||||||
|
|
||||||
|
result = described_class.build_from_properties(properties)
|
||||||
|
expect(result).to eq('New York Cafe, New York, NY')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil for empty properties' do
|
||||||
|
result = described_class.build_from_properties({})
|
||||||
|
expect(result).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil for nil properties' do
|
||||||
|
result = described_class.build_from_properties(nil)
|
||||||
|
expect(result).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe '#call' do
|
||||||
|
subject { described_class.new(features, feature_type, name).call }
|
||||||
|
|
||||||
|
let(:feature_type) { 'amenity' }
|
||||||
|
let(:name) { 'Coffee Shop' }
|
||||||
|
let(:features) do
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'properties' => {
|
||||||
|
'type' => 'amenity',
|
||||||
|
'name' => 'Coffee Shop',
|
||||||
|
'street' => '123 Main St',
|
||||||
|
'city' => 'San Francisco',
|
||||||
|
'state' => 'CA'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'properties' => {
|
||||||
|
'type' => 'park',
|
||||||
|
'name' => 'Central Park',
|
||||||
|
'city' => 'New York',
|
||||||
|
'state' => 'NY'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns a descriptive name with all available components' do
|
||||||
|
expect(subject).to eq('Coffee Shop, 123 Main St, San Francisco, CA')
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when feature uses osm_value instead of type' do
|
||||||
|
let(:features) do
|
||||||
|
[
|
||||||
|
{
|
||||||
|
'properties' => {
|
||||||
|
'osm_value' => 'amenity',
|
||||||
|
'name' => 'Coffee Shop',
|
||||||
|
'street' => '123 Main St',
|
||||||
|
'city' => 'San Francisco',
|
||||||
|
'state' => 'CA'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'finds the feature using osm_value' do
|
||||||
|
expect(subject).to eq('Coffee Shop, 123 Main St, San Francisco, CA')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when no matching feature is found' do
|
||||||
|
let(:name) { 'Non-existent Shop' }
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(subject).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with empty inputs' do
|
||||||
|
it 'returns nil for empty features' do
|
||||||
|
expect(described_class.new([], feature_type, name).call).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil for blank feature_type' do
|
||||||
|
expect(described_class.new(features, '', name).call).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil for blank name' do
|
||||||
|
expect(described_class.new(features, feature_type, '').call).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
173
spec/services/visits/names/suggester_spec.rb
Normal file
173
spec/services/visits/names/suggester_spec.rb
Normal file
|
|
@ -0,0 +1,173 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require 'rails_helper'
|
||||||
|
|
||||||
|
RSpec.describe Visits::Names::Suggester do
|
||||||
|
subject(:suggester) { described_class.new(points) }
|
||||||
|
|
||||||
|
describe '#call' do
|
||||||
|
context 'when no points have geodata' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double('Point', geodata: nil),
|
||||||
|
double('Point', geodata: {})
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(suggester.call).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when points have geodata but no features' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double('Point', geodata: { 'features' => [] })
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(suggester.call).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when features exist but with different types' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{ 'properties' => { 'type' => 'cafe', 'name' => 'Coffee Shop' } },
|
||||||
|
{ 'properties' => { 'type' => 'restaurant', 'name' => 'Pizza Place' } }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns the name of the most common type' do
|
||||||
|
expect(suggester.call).to eq('Coffee Shop')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when features have a common type but different names' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{ 'properties' => { 'type' => 'park', 'name' => 'Central Park' } }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{ 'properties' => { 'type' => 'park', 'name' => 'City Park' } }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{ 'properties' => { 'type' => 'park', 'name' => 'Central Park' } }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns the most common name' do
|
||||||
|
expect(suggester.call).to eq('Central Park')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when a complete place can be built' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{
|
||||||
|
'properties' => {
|
||||||
|
'type' => 'cafe',
|
||||||
|
'name' => 'Starbucks',
|
||||||
|
'street' => '123 Main St',
|
||||||
|
'city' => 'San Francisco',
|
||||||
|
'state' => 'CA'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns a descriptive name with all components' do
|
||||||
|
expect(suggester.call).to eq('Starbucks, 123 Main St, San Francisco, CA')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when only partial place details are available' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{
|
||||||
|
'properties' => {
|
||||||
|
'type' => 'cafe',
|
||||||
|
'name' => 'Starbucks',
|
||||||
|
'city' => 'San Francisco'
|
||||||
|
# No street or state
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns a name with available components' do
|
||||||
|
expect(suggester.call).to eq('Starbucks, San Francisco')
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when points have geodata with non-array features' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double('Point', geodata: { 'features' => 'not an array' })
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(suggester.call).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when most common name is blank' do
|
||||||
|
let(:points) do
|
||||||
|
[
|
||||||
|
double(
|
||||||
|
'Point',
|
||||||
|
geodata: {
|
||||||
|
'features' => [
|
||||||
|
{ 'properties' => { 'type' => 'road', 'name' => '' } }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'returns nil' do
|
||||||
|
expect(suggester.call).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -75,7 +75,7 @@ RSpec.describe Visits::PlaceFinder do
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow(Geocoder).to receive(:search).and_return([])
|
allow(Geocoder).to receive(:search).and_return([])
|
||||||
allow(subject).to receive(:fetch_places_from_api).and_return([])
|
allow(subject).to receive(:reverse_geocoded_places).and_return([])
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'extracts and creates places from point geodata' do
|
it 'extracts and creates places from point geodata' do
|
||||||
|
|
|
||||||
22
spec/support/geocoder_stubs.rb
Normal file
22
spec/support/geocoder_stubs.rb
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
# Stub all Geocoder requests in tests
|
||||||
|
RSpec.configure do |config|
|
||||||
|
config.before(:each) do
|
||||||
|
# Create a generic stub for all Geocoder requests
|
||||||
|
allow(Geocoder).to receive(:search).and_return(
|
||||||
|
[
|
||||||
|
double(
|
||||||
|
data: {
|
||||||
|
'properties' => {
|
||||||
|
'countrycode' => 'US',
|
||||||
|
'country' => 'United States',
|
||||||
|
'state' => 'New York',
|
||||||
|
'name' => 'Test Location'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
Loading…
Reference in a new issue