mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-13 02:31:39 -05:00
Merge remote-tracking branch 'origin/master' into visit_detection
This commit is contained in:
commit
f9750133df
64 changed files with 4597 additions and 166 deletions
|
|
@ -1 +1 @@
|
|||
0.9.9
|
||||
0.11.2
|
||||
|
|
|
|||
42
.circleci/config.yml
Normal file
42
.circleci/config.yml
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
version: 2.1
|
||||
|
||||
orbs:
|
||||
ruby: circleci/ruby@2.1.1
|
||||
browser-tools: circleci/browser-tools@1.2.3
|
||||
|
||||
jobs:
|
||||
test:
|
||||
docker:
|
||||
- image: cimg/ruby:3.2.3
|
||||
environment:
|
||||
RAILS_ENV: test
|
||||
- image: circleci/postgres:13.3
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: test_database
|
||||
POSTGRES_PASSWORD: mysecretpassword
|
||||
- image: redis:7.0
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Install Bundler
|
||||
command: gem install bundler
|
||||
- run:
|
||||
name: Bundle Install
|
||||
command: bundle install --jobs=4 --retry=3
|
||||
- run:
|
||||
name: Database Setup
|
||||
command: |
|
||||
bundle exec rails db:create
|
||||
bundle exec rails db:schema:load
|
||||
- run:
|
||||
name: Run RSpec tests
|
||||
command: bundle exec rspec
|
||||
- store_artifacts:
|
||||
path: coverage
|
||||
|
||||
workflows:
|
||||
rspec:
|
||||
jobs:
|
||||
- test
|
||||
|
|
@ -4,4 +4,4 @@ DATABASE_PASSWORD=password
|
|||
DATABASE_NAME=dawarich_development
|
||||
DATABASE_PORT=5432
|
||||
REDIS_URL=redis://localhost:6379/1
|
||||
PHOTON_API_HOST='photon.chibi.rodeo'
|
||||
PHOTON_API_HOST='photon.komoot.io'
|
||||
|
|
|
|||
87
CHANGELOG.md
87
CHANGELOG.md
|
|
@ -30,6 +30,93 @@ The visit suggestion release.
|
|||
- Visits suggestion functionality. Read more on that in the release description
|
||||
- Tabs to the Visits page to allow user to switch between confirmed, declined and suggested visits
|
||||
|
||||
## [0.11.2] — 2024-08-22
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
- Dawarich export was failing when attempted to be imported back to Dawarich.
|
||||
- Imports page with a lot of imports should now load faster.
|
||||
|
||||
|
||||
## [0.11.1] — 2024-08-21
|
||||
|
||||
### Changed
|
||||
|
||||
- `/api/v1/points` endpoint now returns 100 points by default. You can specify the number of points to return by passing the `per_page` query parameter. Example: `/api/v1/points?per_page=50` will return 50 points. Also, `page` query parameter is now available to paginate the results. Example: `/api/v1/points?per_page=50&page=2` will return the second page of 50 points.
|
||||
|
||||
## [0.11.0] — 2024-08-21
|
||||
|
||||
### Added
|
||||
|
||||
- A user can now trigger the import of their geodata from Immich to Dawarich by clicking the "Import Immich data" button in the Imports page.
|
||||
- A user can now provide a url and an api key for their Immich instance and then trigger the import of their geodata from Immich to Dawarich. This can be done in the Settings page.
|
||||
|
||||
### Changed
|
||||
|
||||
- Table columns on the Exports page were reordered to make it more user-friendly.
|
||||
- Exports are now being named with this pattern: "export_from_dd.mm.yyyy_to_dd.mm.yyyy.json" where "dd.mm.yyyy" is the date range of the export.
|
||||
- Notification about any error now will include the stacktrace.
|
||||
|
||||
## [0.10.0] — 2024-08-20
|
||||
|
||||
### Added
|
||||
|
||||
- The `api/v1/stats` endpoint to get stats for the user with swagger docs
|
||||
|
||||
### Fixed
|
||||
|
||||
- Redis and DB containers are now being automatically restarted if they fail. Update your `docker-compose.yml` if necessary
|
||||
|
||||
```diff
|
||||
services:
|
||||
dawarich_redis:
|
||||
image: redis:7.0-alpine
|
||||
command: redis-server
|
||||
networks:
|
||||
- dawarich
|
||||
volumes:
|
||||
- shared_data:/var/shared/redis
|
||||
+ restart: always
|
||||
dawarich_db:
|
||||
image: postgres:14.2-alpine
|
||||
container_name: dawarich_db
|
||||
volumes:
|
||||
- db_data:/var/lib/postgresql/data
|
||||
- shared_data:/var/shared
|
||||
networks:
|
||||
- dawarich
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: password
|
||||
+ restart: always
|
||||
```
|
||||
|
||||
|
||||
See the [PR](https://github.com/Freika/dawarich/pull/185) or Swagger docs (`/api-docs`) for more information.
|
||||
|
||||
## [0.9.12] — 2024-08-15
|
||||
|
||||
### Fixed
|
||||
|
||||
- Owntracks points are now being saved to the database with the full attributes
|
||||
- Existing owntracks points also filled with missing data
|
||||
- Definition of "reverse geocoded points" is now correctly based on the number of points that have full reverse geocoding data instead of the number of points that have only country and city
|
||||
- Fixed a bug in gpx importing scipt ([thanks, bluemax!](https://github.com/Freika/dawarich/pull/126))
|
||||
|
||||
## [0.9.11] — 2024-08-14
|
||||
|
||||
### Fixed
|
||||
|
||||
- A bug where an attempt to import a Google's Records.json file was failing due to wrong object being passed to a background worker
|
||||
|
||||
## [0.9.10] — 2024-08-14
|
||||
|
||||
### Added
|
||||
|
||||
- PHOTON_API_HOST env variable to set the host of the Photon API. It will allow you to use your own Photon API instance instead of the default one.
|
||||
|
||||
## [0.9.9] — 2024-07-30
|
||||
|
||||
### Added
|
||||
|
|
|
|||
4
Gemfile
4
Gemfile
|
|
@ -9,7 +9,7 @@ gem 'chartkick'
|
|||
gem 'data_migrate'
|
||||
gem 'devise'
|
||||
gem 'geocoder'
|
||||
gem 'google_places'
|
||||
gem 'httparty'
|
||||
gem 'importmap-rails'
|
||||
gem 'kaminari'
|
||||
gem 'lograge'
|
||||
|
|
@ -43,7 +43,7 @@ end
|
|||
group :test do
|
||||
gem 'fakeredis'
|
||||
gem 'shoulda-matchers'
|
||||
gem 'simplecov'
|
||||
gem 'simplecov', require: false
|
||||
gem 'super_diff'
|
||||
gem 'webmock'
|
||||
end
|
||||
|
|
|
|||
55
Gemfile.lock
55
Gemfile.lock
|
|
@ -82,13 +82,13 @@ GEM
|
|||
base64 (0.2.0)
|
||||
bcrypt (3.1.20)
|
||||
bigdecimal (3.1.8)
|
||||
bootsnap (1.18.3)
|
||||
bootsnap (1.18.4)
|
||||
msgpack (~> 1.2)
|
||||
builder (3.3.0)
|
||||
byebug (11.1.3)
|
||||
chartkick (5.0.7)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.3.3)
|
||||
concurrent-ruby (1.3.4)
|
||||
connection_pool (2.4.1)
|
||||
content_disposition (1.0.0)
|
||||
crack (1.0.0)
|
||||
|
|
@ -186,7 +186,7 @@ GEM
|
|||
marcel (1.0.4)
|
||||
method_source (1.1.0)
|
||||
mini_mime (1.1.5)
|
||||
minitest (5.24.1)
|
||||
minitest (5.25.1)
|
||||
msgpack (1.7.2)
|
||||
multi_xml (0.7.1)
|
||||
bigdecimal (~> 3.1)
|
||||
|
|
@ -201,29 +201,31 @@ GEM
|
|||
net-smtp (0.5.0)
|
||||
net-protocol
|
||||
nio4r (2.7.3)
|
||||
nokogiri (1.16.6-aarch64-linux)
|
||||
nokogiri (1.16.7-aarch64-linux)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.16.6-arm-linux)
|
||||
nokogiri (1.16.7-arm-linux)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.16.6-arm64-darwin)
|
||||
nokogiri (1.16.7-arm64-darwin)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.16.6-x86-linux)
|
||||
nokogiri (1.16.7-x86-linux)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.16.6-x86_64-darwin)
|
||||
nokogiri (1.16.7-x86_64-darwin)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.16.6-x86_64-linux)
|
||||
nokogiri (1.16.7-x86_64-linux)
|
||||
racc (~> 1.4)
|
||||
oj (3.16.4)
|
||||
oj (3.16.5)
|
||||
bigdecimal (>= 3.0)
|
||||
ostruct (>= 0.2)
|
||||
optimist (3.1.0)
|
||||
orm_adapter (0.5.0)
|
||||
ostruct (0.6.0)
|
||||
parallel (1.25.1)
|
||||
parser (3.3.3.0)
|
||||
ast (~> 2.4.1)
|
||||
racc
|
||||
patience_diff (1.2.0)
|
||||
optimist (~> 3.0)
|
||||
pg (1.5.6)
|
||||
pg (1.5.7)
|
||||
pry (0.14.2)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
|
|
@ -240,7 +242,7 @@ GEM
|
|||
pundit (2.3.2)
|
||||
activesupport (>= 3.0.0)
|
||||
raabro (1.4.0)
|
||||
racc (1.8.0)
|
||||
racc (1.8.1)
|
||||
rack (3.1.7)
|
||||
rack-session (2.0.0)
|
||||
rack (>= 3.0.0)
|
||||
|
|
@ -304,7 +306,7 @@ GEM
|
|||
rspec-mocks (3.13.1)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-rails (6.1.3)
|
||||
rspec-rails (6.1.4)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
railties (>= 6.1)
|
||||
|
|
@ -313,9 +315,9 @@ GEM
|
|||
rspec-mocks (~> 3.13)
|
||||
rspec-support (~> 3.13)
|
||||
rspec-support (3.13.1)
|
||||
rswag-api (2.13.0)
|
||||
activesupport (>= 3.1, < 7.2)
|
||||
railties (>= 3.1, < 7.2)
|
||||
rswag-api (2.14.0)
|
||||
activesupport (>= 5.2, < 8.0)
|
||||
railties (>= 5.2, < 8.0)
|
||||
rswag-specs (2.13.0)
|
||||
activesupport (>= 3.1, < 7.2)
|
||||
json-schema (>= 2.2, < 5.0)
|
||||
|
|
@ -343,7 +345,7 @@ GEM
|
|||
rubocop (>= 1.33.0, < 2.0)
|
||||
rubocop-ast (>= 1.31.1, < 2.0)
|
||||
ruby-progressbar (1.13.0)
|
||||
shoulda-matchers (6.2.0)
|
||||
shoulda-matchers (6.3.0)
|
||||
activesupport (>= 5.2.0)
|
||||
shrine (3.6.0)
|
||||
content_disposition (~> 1.0)
|
||||
|
|
@ -366,11 +368,11 @@ GEM
|
|||
sprockets (4.2.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
rack (>= 2.2.4, < 4)
|
||||
sprockets-rails (3.5.1)
|
||||
sprockets-rails (3.5.2)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
sprockets (>= 3.0.0)
|
||||
stimulus-rails (1.3.3)
|
||||
stimulus-rails (1.3.4)
|
||||
railties (>= 6.0.0)
|
||||
stringio (3.1.1)
|
||||
strscan (3.1.0)
|
||||
|
|
@ -378,17 +380,17 @@ GEM
|
|||
attr_extras (>= 6.2.4)
|
||||
diff-lcs
|
||||
patience_diff
|
||||
tailwindcss-rails (2.6.3)
|
||||
tailwindcss-rails (2.7.3)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-rails (2.6.3-aarch64-linux)
|
||||
tailwindcss-rails (2.7.3-aarch64-linux)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-rails (2.6.3-arm-linux)
|
||||
tailwindcss-rails (2.7.3-arm-linux)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-rails (2.6.3-arm64-darwin)
|
||||
tailwindcss-rails (2.7.3-arm64-darwin)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-rails (2.6.3-x86_64-darwin)
|
||||
tailwindcss-rails (2.7.3-x86_64-darwin)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-rails (2.6.3-x86_64-linux)
|
||||
tailwindcss-rails (2.7.3-x86_64-linux)
|
||||
railties (>= 7.0.0)
|
||||
thor (1.3.1)
|
||||
timeout (0.4.1)
|
||||
|
|
@ -409,7 +411,7 @@ GEM
|
|||
websocket-driver (0.7.6)
|
||||
websocket-extensions (>= 0.1.0)
|
||||
websocket-extensions (0.1.5)
|
||||
zeitwerk (2.6.16)
|
||||
zeitwerk (2.6.17)
|
||||
|
||||
PLATFORMS
|
||||
aarch64-linux
|
||||
|
|
@ -432,6 +434,7 @@ DEPENDENCIES
|
|||
foreman
|
||||
geocoder
|
||||
google_places
|
||||
httparty
|
||||
importmap-rails
|
||||
kaminari
|
||||
lograge
|
||||
|
|
|
|||
18
README.md
18
README.md
|
|
@ -1,7 +1,9 @@
|
|||
# Dawarich
|
||||
|
||||
|
||||
[](https://discord.gg/pHsBjpt5J8) | [](https://ko-fi.com/H2H3IDYDD) | [](https://www.patreon.com/freika)
|
||||
[](https://discord.gg/pHsBjpt5J8) | [](https://ko-fi.com/H2H3IDYDD) | [](https://www.patreon.com/freika) | [0x6bAd13667692632f1bF926cA9B421bEe7EaEB8D4](https://etherscan.io/address/0x6bAd13667692632f1bF926cA9B421bEe7EaEB8D4)
|
||||
|
||||
[](https://app.circleci.com/pipelines/github/Freika/dawarich)
|
||||
|
||||
## Screenshots
|
||||
|
||||
|
|
@ -50,14 +52,14 @@ To import your Google Maps Timeline data, download your location history from [G
|
|||
## How-to's
|
||||
|
||||
- [How to set up reverse proxy](docs/how_to_setup_reverse_proxy.md)
|
||||
- [How to import Google Takeout to Dawarich](https://github.com/Freika/dawarich/wiki/How-to-import-your-Google-Takeout-data)
|
||||
- [How to import Google Semantic History to Dawarich](https://github.com/Freika/dawarich/wiki/How-to-import-your-Google-Semantic-History-data)
|
||||
- [How to import Google Maps Timeline Data to Dawarich](https://github.com/Freika/dawarich/wiki/How-to-import-your-Google-Maps-Timeline-data)
|
||||
- [How to track your location to Dawarich with Overland](https://github.com/Freika/dawarich/wiki/How-to-track-your-location-to-Dawarich-with-Overland)
|
||||
- [How to track your location to Dawarich with OwnTracks](https://github.com/Freika/dawarich/wiki/How-to-track-your-location-to-Dawarich-with-OwnTracks)
|
||||
- [How to export your data from Dawarich](https://github.com/Freika/dawarich/wiki/How-to-export-your-data-from-Dawarich)
|
||||
- [How to import Google Takeout to Dawarich](https://dawarich.app/docs/tutorials/import-existing-data#sources-of-data)
|
||||
- [How to import Google Semantic History to Dawarich](https://dawarich.app/docs/tutorials/import-existing-data#semantic-location-history)
|
||||
- [How to import Google Maps Timeline Data to Dawarich](https://dawarich.app/docs/tutorials/import-existing-data#recordsjson)
|
||||
- [How to track your location to Dawarich with Overland](https://dawarich.app/docs/tutorials/track-your-location#overland)
|
||||
- [How to track your location to Dawarich with OwnTracks](https://dawarich.app/docs/tutorials/track-your-location#owntracks)
|
||||
- [How to export your data from Dawarich](https://dawarich.app/docs/tutorials/export-your-data)
|
||||
|
||||
More guides can be found in the [Wiki](https://github.com/Freika/dawarich/wiki#how-tos)
|
||||
More guides can be found in the [Docs](https://dawarich.app/docs/intro)
|
||||
|
||||
## Features
|
||||
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -8,7 +8,12 @@ class Api::V1::PointsController < ApplicationController
|
|||
start_at = params[:start_at]&.to_datetime&.to_i
|
||||
end_at = params[:end_at]&.to_datetime&.to_i || Time.zone.now.to_i
|
||||
|
||||
points = current_api_user.tracked_points.where(timestamp: start_at..end_at)
|
||||
points = current_api_user
|
||||
.tracked_points
|
||||
.where(timestamp: start_at..end_at)
|
||||
.order(:timestamp)
|
||||
.page(params[:page])
|
||||
.per(params[:per_page] || 100)
|
||||
|
||||
render json: points
|
||||
end
|
||||
|
|
|
|||
10
app/controllers/api/v1/stats_controller.rb
Normal file
10
app/controllers/api/v1/stats_controller.rb
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Api::V1::StatsController < ApplicationController
|
||||
skip_forgery_protection
|
||||
before_action :authenticate_api_key
|
||||
|
||||
def index
|
||||
render json: StatsSerializer.new(current_api_user).call
|
||||
end
|
||||
end
|
||||
|
|
@ -9,7 +9,7 @@ class ExportsController < ApplicationController
|
|||
end
|
||||
|
||||
def create
|
||||
export_name = "#{params[:start_at].to_date}_#{params[:end_at].to_date}"
|
||||
export_name = "export_from_#{params[:start_at].to_date}_to_#{params[:end_at].to_date}"
|
||||
export = current_user.exports.create(name: export_name, status: :created)
|
||||
|
||||
ExportJob.perform_later(export.id, params[:start_at], params[:end_at])
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class Settings::BackgroundJobsController < ApplicationController
|
|||
end
|
||||
|
||||
def create
|
||||
EnqueueReverseGeocodingJob.perform_later(params[:job_name], current_user.id)
|
||||
EnqueueBackgroundJob.perform_later(params[:job_name], current_user.id)
|
||||
|
||||
flash.now[:notice] = 'Job was successfully created.'
|
||||
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@
|
|||
class SettingsController < ApplicationController
|
||||
before_action :authenticate_user!
|
||||
|
||||
def index
|
||||
end
|
||||
def index; end
|
||||
|
||||
def update
|
||||
current_user.update(settings: settings_params)
|
||||
|
|
@ -31,7 +30,8 @@ class SettingsController < ApplicationController
|
|||
def settings_params
|
||||
params.require(:settings).permit(
|
||||
:meters_between_routes, :minutes_between_routes, :fog_of_war_meters,
|
||||
:time_threshold_minutes, :merge_threshold_minutes, :route_opacity
|
||||
:time_threshold_minutes, :merge_threshold_minutes, :route_opacity,
|
||||
:immich_url, :immich_api_key
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
16
app/jobs/enqueue_background_job.rb
Normal file
16
app/jobs/enqueue_background_job.rb
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class EnqueueBackgroundJob < ApplicationJob
|
||||
queue_as :reverse_geocoding
|
||||
|
||||
def perform(job_name, user_id)
|
||||
case job_name
|
||||
when 'start_immich_import'
|
||||
ImportImmichGeodataJob.perform_later(user_id)
|
||||
when 'start_reverse_geocoding', 'continue_reverse_geocoding'
|
||||
Jobs::Create.new(job_name, user_id).call
|
||||
else
|
||||
raise ArgumentError, "Unknown job name: #{job_name}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class EnqueueReverseGeocodingJob < ApplicationJob
|
||||
queue_as :reverse_geocoding
|
||||
|
||||
def perform(job_name, user_id)
|
||||
Jobs::Create.new(job_name, user_id).call
|
||||
end
|
||||
end
|
||||
11
app/jobs/import_immich_geodata_job.rb
Normal file
11
app/jobs/import_immich_geodata_job.rb
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ImportImmichGeodataJob < ApplicationJob
|
||||
queue_as :imports
|
||||
|
||||
def perform(user_id)
|
||||
user = User.find(user_id)
|
||||
|
||||
Immich::ImportGeodata.new(user).call
|
||||
end
|
||||
end
|
||||
|
|
@ -13,33 +13,25 @@ class ImportJob < ApplicationJob
|
|||
raw_points: result[:raw_points], doubles: result[:doubles], processed: result[:processed]
|
||||
)
|
||||
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: 'Import finished',
|
||||
content: "Import \"#{import.name}\" successfully finished."
|
||||
).call
|
||||
create_import_finished_notification(import, user)
|
||||
|
||||
schedule_stats_creating(user_id)
|
||||
schedule_visit_suggesting(user_id, import)
|
||||
rescue StandardError => e
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
title: 'Import failed',
|
||||
content: "Import \"#{import.name}\" failed: #{e.message}"
|
||||
).call
|
||||
create_import_failed_notification(import, user, e)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def parser(source)
|
||||
# Bad classes naming by the way, they are not parsers, they are point creators
|
||||
case source
|
||||
when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser
|
||||
when 'google_records' then GoogleMaps::RecordsParser
|
||||
when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutParser
|
||||
when 'owntracks' then OwnTracks::ExportParser
|
||||
when 'gpx' then Gpx::TrackParser
|
||||
when 'immich_api' then Immich::ImportParser
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -54,4 +46,22 @@ class ImportJob < ApplicationJob
|
|||
|
||||
VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:)
|
||||
end
|
||||
|
||||
def create_import_finished_notification(import, user)
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: 'Import finished',
|
||||
content: "Import \"#{import.name}\" successfully finished."
|
||||
).call
|
||||
end
|
||||
|
||||
def create_import_failed_notification(import, user, error)
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
title: 'Import failed',
|
||||
content: "Import \"#{import.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"
|
||||
).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,5 +8,8 @@ class Import < ApplicationRecord
|
|||
|
||||
include ImportUploader::Attachment(:raw)
|
||||
|
||||
enum source: { google_semantic_history: 0, owntracks: 1, google_records: 2, google_phone_takeout: 3, gpx: 4 }
|
||||
enum source: {
|
||||
google_semantic_history: 0, owntracks: 1, google_records: 2,
|
||||
google_phone_takeout: 3, gpx: 4, immich_api: 5
|
||||
}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
class Point < ApplicationRecord
|
||||
reverse_geocoded_by :latitude, :longitude
|
||||
|
||||
belongs_to :import, optional: true
|
||||
belongs_to :import, optional: true, counter_cache: true
|
||||
belongs_to :visit, optional: true
|
||||
belongs_to :user
|
||||
|
||||
|
|
@ -15,10 +15,10 @@ class Point < ApplicationRecord
|
|||
report_location_message_event: 4, manual_event: 5, timer_based_event: 6,
|
||||
settings_monitoring_event: 7
|
||||
}, _suffix: true
|
||||
enum connection: { mobile: 0, wifi: 1, offline: 2 }, _suffix: true
|
||||
enum connection: { mobile: 0, wifi: 1, offline: 2, unknown: 4 }, _suffix: true
|
||||
|
||||
scope :reverse_geocoded, -> { where.not(city: nil, country: nil) }
|
||||
scope :not_reverse_geocoded, -> { where(city: nil, country: nil) }
|
||||
scope :reverse_geocoded, -> { where.not(geodata: {}) }
|
||||
scope :not_reverse_geocoded, -> { where(geodata: {}) }
|
||||
scope :visited, -> { where.not(visit_id: nil) }
|
||||
scope :not_visited, -> { where(visit_id: nil) }
|
||||
|
||||
|
|
|
|||
54
app/serializers/stats_serializer.rb
Normal file
54
app/serializers/stats_serializer.rb
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class StatsSerializer
|
||||
attr_reader :user
|
||||
|
||||
def initialize(user)
|
||||
@user = user
|
||||
end
|
||||
|
||||
def call
|
||||
{
|
||||
totalDistanceKm: total_distance,
|
||||
totalPointsTracked: user.tracked_points.count,
|
||||
totalReverseGeocodedPoints: reverse_geocoded_points,
|
||||
totalCountriesVisited: user.countries_visited.count,
|
||||
totalCitiesVisited: user.cities_visited.count,
|
||||
yearlyStats: yearly_stats
|
||||
}.to_json
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def total_distance
|
||||
user.stats.sum(:distance)
|
||||
end
|
||||
|
||||
def reverse_geocoded_points
|
||||
user.tracked_points.reverse_geocoded.count
|
||||
end
|
||||
|
||||
def yearly_stats
|
||||
user.stats.group_by(&:year).sort.reverse.map do |year, stats|
|
||||
{
|
||||
year:,
|
||||
totalDistanceKm: stats.sum(&:distance),
|
||||
totalCountriesVisited: user.countries_visited.count,
|
||||
totalCitiesVisited: user.cities_visited.count,
|
||||
monthlyDistanceKm: monthly_distance(year, stats)
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def monthly_distance(year, stats)
|
||||
months = {}
|
||||
|
||||
(1..12).each { |month| months[Date::MONTHNAMES[month]&.downcase] = distance(month, year, stats) }
|
||||
|
||||
months
|
||||
end
|
||||
|
||||
def distance(month, year, stats)
|
||||
stats.find { _1.month == month && _1.year == year }&.distance.to_i
|
||||
end
|
||||
end
|
||||
|
|
@ -29,7 +29,12 @@ class CreateStats
|
|||
|
||||
Notifications::Create.new(user:, kind: :info, title: 'Stats updated', content: 'Stats updated').call
|
||||
rescue StandardError => e
|
||||
Notifications::Create.new(user:, kind: :error, title: 'Stats update failed', content: e.message).call
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
title: 'Stats update failed',
|
||||
content: "#{e.message}, stacktrace: #{e.backtrace.join("\n")}"
|
||||
).call
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,11 +11,11 @@ class Exports::Create
|
|||
def call
|
||||
export.update!(status: :processing)
|
||||
|
||||
pp "====Exporting data for #{user.email} from #{start_at} to #{end_at}"
|
||||
Rails.logger.debug "====Exporting data for #{user.email} from #{start_at} to #{end_at}"
|
||||
|
||||
points = time_framed_points
|
||||
|
||||
pp "====Exporting #{points.size} points"
|
||||
Rails.logger.debug "====Exporting #{points.size} points"
|
||||
|
||||
data = ::ExportSerializer.new(points, user.email).call
|
||||
file_path = Rails.root.join('public', 'exports', "#{export.name}.json")
|
||||
|
|
@ -24,21 +24,11 @@ class Exports::Create
|
|||
|
||||
export.update!(status: :completed, url: "exports/#{export.name}.json")
|
||||
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: 'Export finished',
|
||||
content: "Export \"#{export.name}\" successfully finished."
|
||||
).call
|
||||
create_export_finished_notification
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("====Export failed to create: #{e.message}")
|
||||
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
title: 'Export failed',
|
||||
content: "Export \"#{export.name}\" failed: #{e.message}"
|
||||
).call
|
||||
create_failed_export_notification(e)
|
||||
|
||||
export.update!(status: :failed)
|
||||
end
|
||||
|
|
@ -50,7 +40,24 @@ class Exports::Create
|
|||
def time_framed_points
|
||||
user
|
||||
.tracked_points
|
||||
.without_raw_data
|
||||
.where('timestamp >= ? AND timestamp <= ?', start_at.to_i, end_at.to_i)
|
||||
end
|
||||
|
||||
def create_export_finished_notification
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: 'Export finished',
|
||||
content: "Export \"#{export.name}\" successfully finished."
|
||||
).call
|
||||
end
|
||||
|
||||
def create_failed_export_notification(error)
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
title: 'Export failed',
|
||||
content: "Export \"#{export.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"
|
||||
).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,22 +10,35 @@ class Gpx::TrackParser
|
|||
end
|
||||
|
||||
def call
|
||||
segments = json['gpx']['trk']['trkseg']
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
|
||||
if segments.is_a?(Array)
|
||||
segments.each do |segment|
|
||||
segment['trkpt'].each { create_point(_1) }
|
||||
end
|
||||
else
|
||||
segments['trkpt'].each { create_point(_1) }
|
||||
end
|
||||
tracks_arr
|
||||
.map { parse_track(_1) }
|
||||
.flatten
|
||||
.reduce { |result, points| result.merge(points) { _2 + _3 } }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def parse_track(track)
|
||||
segments = track['trkseg']
|
||||
segments_arr = segments.is_a?(Array) ? segments : [segments]
|
||||
|
||||
segments_arr.map do |segment|
|
||||
trackpoints = segment['trkpt']
|
||||
|
||||
points = trackpoints.reduce(0) { _1 + create_point(_2) }
|
||||
doubles = trackpoints.size - points
|
||||
processed = points + doubles
|
||||
|
||||
{ raw_points: trackpoints.size, points:, doubles:, processed: }
|
||||
end
|
||||
end
|
||||
|
||||
def create_point(point)
|
||||
return if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
|
||||
return if point_exists?(point)
|
||||
return 0 if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
|
||||
return 0 if point_exists?(point)
|
||||
|
||||
Point.create(
|
||||
latitude: point['lat'].to_d,
|
||||
|
|
@ -33,8 +46,11 @@ class Gpx::TrackParser
|
|||
altitude: point['ele'].to_i,
|
||||
timestamp: Time.parse(point['time']).to_i,
|
||||
import_id: import.id,
|
||||
raw_data: point,
|
||||
user_id:
|
||||
)
|
||||
|
||||
1
|
||||
end
|
||||
|
||||
def point_exists?(point)
|
||||
|
|
|
|||
102
app/services/immich/import_geodata.rb
Normal file
102
app/services/immich/import_geodata.rb
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Immich::ImportGeodata
|
||||
attr_reader :user, :immich_api_base_url, :immich_api_key
|
||||
|
||||
def initialize(user)
|
||||
@user = user
|
||||
@immich_api_base_url = "#{user.settings['immich_url']}/api"
|
||||
@immich_api_key = user.settings['immich_api_key']
|
||||
end
|
||||
|
||||
def call
|
||||
raise ArgumentError, 'Immich API key is missing' if immich_api_key.blank?
|
||||
raise ArgumentError, 'Immich URL is missing' if user.settings['immich_url'].blank?
|
||||
|
||||
immich_data = retrieve_immich_data
|
||||
immich_data_json = parse_immich_data(immich_data)
|
||||
file_name = file_name(immich_data_json)
|
||||
import = user.imports.find_or_initialize_by(name: file_name, source: :immich_api)
|
||||
|
||||
create_import_failed_notification and return unless import.new_record?
|
||||
|
||||
import.raw_data = immich_data_json
|
||||
import.save!
|
||||
ImportJob.perform_later(user.id, import.id)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def headers
|
||||
{
|
||||
'x-api-key' => immich_api_key,
|
||||
'accept' => 'application/json'
|
||||
}
|
||||
end
|
||||
|
||||
def retrieve_immich_data
|
||||
(1..12).flat_map do |month_number|
|
||||
(1..31).map do |day|
|
||||
url = "#{immich_api_base_url}/assets/memory-lane?day=#{day}&month=#{month_number}"
|
||||
|
||||
JSON.parse(HTTParty.get(url, headers:).body)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def valid?(asset)
|
||||
asset.dig('exifInfo', 'latitude') &&
|
||||
asset.dig('exifInfo', 'longitude') &&
|
||||
asset.dig('exifInfo', 'dateTimeOriginal')
|
||||
end
|
||||
|
||||
def parse_immich_data(immich_data)
|
||||
geodata = []
|
||||
|
||||
immich_data.each do |memory_lane|
|
||||
log_no_data and next if memory_lane_invalid?(memory_lane)
|
||||
|
||||
assets = extract_assets(memory_lane)
|
||||
|
||||
assets.each { |asset| geodata << extract_geodata(asset) if valid?(asset) }
|
||||
end
|
||||
|
||||
geodata.sort_by { |data| data[:timestamp] }
|
||||
end
|
||||
|
||||
def memory_lane_invalid?(memory_lane)
|
||||
memory_lane.is_a?(Hash) && memory_lane['statusCode'] == 404
|
||||
end
|
||||
|
||||
def extract_assets(memory_lane)
|
||||
memory_lane.flat_map { |lane| lane['assets'] }.compact
|
||||
end
|
||||
|
||||
def extract_geodata(asset)
|
||||
{
|
||||
latitude: asset.dig('exifInfo', 'latitude'),
|
||||
longitude: asset.dig('exifInfo', 'longitude'),
|
||||
timestamp: Time.zone.parse(asset.dig('exifInfo', 'dateTimeOriginal')).to_i
|
||||
}
|
||||
end
|
||||
|
||||
def log_no_data
|
||||
Rails.logger.debug 'No data found'
|
||||
end
|
||||
|
||||
def create_import_failed_notification
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: 'Import was not created',
|
||||
content: 'Import with the same name already exists. If you want to proceed, delete the existing import and try again.'
|
||||
).call
|
||||
end
|
||||
|
||||
def file_name(immich_data_json)
|
||||
from = Time.zone.at(immich_data_json.first[:timestamp]).to_date
|
||||
to = Time.zone.at(immich_data_json.last[:timestamp]).to_date
|
||||
|
||||
"immich-geodata-#{user.email}-from-#{from}-to-#{to}.json"
|
||||
end
|
||||
end
|
||||
40
app/services/immich/import_parser.rb
Normal file
40
app/services/immich/import_parser.rb
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Immich::ImportParser
|
||||
attr_reader :import, :json, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@json = import.raw_data
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def call
|
||||
json.each { |point| create_point(point) }
|
||||
end
|
||||
|
||||
def create_point(point)
|
||||
return 0 if point['latitude'].blank? || point['longitude'].blank? || point['timestamp'].blank?
|
||||
return 0 if point_exists?(point, point['timestamp'])
|
||||
|
||||
Point.create(
|
||||
latitude: point['latitude'].to_d,
|
||||
longitude: point['longitude'].to_d,
|
||||
timestamp: point['timestamp'],
|
||||
raw_data: point,
|
||||
import_id: import.id,
|
||||
user_id:
|
||||
)
|
||||
|
||||
1
|
||||
end
|
||||
|
||||
def point_exists?(point, timestamp)
|
||||
Point.exists?(
|
||||
latitude: point['latitude'].to_d,
|
||||
longitude: point['longitude'].to_d,
|
||||
timestamp:,
|
||||
user_id:
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -22,16 +22,12 @@ class OwnTracks::ExportParser
|
|||
user_id:
|
||||
)
|
||||
|
||||
Point.create(
|
||||
latitude: point_data[:latitude],
|
||||
longitude: point_data[:longitude],
|
||||
timestamp: point_data[:timestamp],
|
||||
raw_data: point_data[:raw_data],
|
||||
topic: point_data[:topic],
|
||||
tracker_id: point_data[:tracker_id],
|
||||
import_id: import.id,
|
||||
user_id:
|
||||
)
|
||||
point = Point.new(point_data).tap do |p|
|
||||
p.user_id = user_id
|
||||
p.import_id = import.id
|
||||
end
|
||||
|
||||
point.save
|
||||
|
||||
points += 1
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,22 +11,22 @@ class OwnTracks::Params
|
|||
{
|
||||
latitude: params[:lat],
|
||||
longitude: params[:lon],
|
||||
battery_status: battery_status,
|
||||
battery: params[:batt],
|
||||
ping: params[:p],
|
||||
altitude: params[:alt],
|
||||
accuracy: params[:acc],
|
||||
vertical_accuracy: params[:vac],
|
||||
velocity: params[:vel],
|
||||
connection: connection,
|
||||
ssid: params[:SSID],
|
||||
bssid: params[:BSSID],
|
||||
trigger: trigger,
|
||||
tracker_id: params[:tid],
|
||||
timestamp: params[:tst].to_i,
|
||||
inrids: params[:inrids],
|
||||
in_regions: params[:inregions],
|
||||
topic: params[:topic],
|
||||
battery_status:,
|
||||
connection:,
|
||||
trigger:,
|
||||
raw_data: params.deep_stringify_keys
|
||||
}
|
||||
end
|
||||
|
|
@ -36,18 +36,18 @@ class OwnTracks::Params
|
|||
def battery_status
|
||||
return 'unknown' if params[:bs].nil?
|
||||
|
||||
case params[:bs]
|
||||
when 'u' then 'unplugged'
|
||||
when 'c' then 'charging'
|
||||
when 'f' then 'full'
|
||||
case params[:bs].to_i
|
||||
when 1 then 'unplugged'
|
||||
when 2 then 'charging'
|
||||
when 3 then 'full'
|
||||
else 'unknown'
|
||||
end
|
||||
end
|
||||
|
||||
def trigger
|
||||
return 'unknown' if params[:m].nil?
|
||||
return 'unknown' if params[:t].nil?
|
||||
|
||||
case params[:m]
|
||||
case params[:t]
|
||||
when 'p' then 'background_event'
|
||||
when 'c' then 'circular_region_event'
|
||||
when 'b' then 'beacon_event'
|
||||
|
|
|
|||
|
|
@ -21,6 +21,6 @@ class ReverseGeocoding::Points::FetchData
|
|||
private
|
||||
|
||||
def reverse_geocoded?
|
||||
point.city.present? && point.country.present? || point.geodata.present?
|
||||
point.geodata.present?
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class Tasks::Imports::GoogleRecords
|
|||
private
|
||||
|
||||
def create_import
|
||||
@user.imports.create(name: @file_path, source: :google_records)
|
||||
@user.imports.create(name: @file_path, source: :google_records).id
|
||||
end
|
||||
|
||||
def read_file
|
||||
|
|
|
|||
|
|
@ -28,15 +28,16 @@
|
|||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Created at</th>
|
||||
<th>Status</th>
|
||||
<th>Actions</th>
|
||||
<th>Created at</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<% @exports.each do |export| %>
|
||||
<tr>
|
||||
<td><%= export.name %></td>
|
||||
<td><%= export.created_at.strftime('%Y-%m-%d %H:%M:%S') %></td>
|
||||
<td><%= export.status %></td>
|
||||
<td>
|
||||
<% if export.completed? %>
|
||||
|
|
@ -44,7 +45,6 @@
|
|||
<% end %>
|
||||
<%= link_to 'Delete', export, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
|
||||
</td>
|
||||
<td><%= export.created_at.strftime('%Y-%m-%d %H:%M:%S') %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</tbody>
|
||||
|
|
|
|||
|
|
@ -1,9 +1,15 @@
|
|||
<% content_for :title, 'Imports' %>
|
||||
|
||||
<div class="w-full">
|
||||
<div class="flex justify-between items-center">
|
||||
<div class="flex justify-between items-center mb-3">
|
||||
<h1 class="font-bold text-4xl">Imports</h1>
|
||||
<%= link_to "New import", new_import_path, class: "rounded-lg py-3 px-5 bg-blue-600 text-white block font-medium" %>
|
||||
|
||||
<% if current_user.settings['immich_url'] && current_user.settings['immich_api_key'] %>
|
||||
<%= link_to 'Import Immich data', settings_background_jobs_path(job_name: 'start_immich_import'), method: :post, data: { confirm: 'Are you sure?', turbo_confirm: 'Are you sure?', turbo_method: :post }, class: 'rounded-lg py-3 px-5 bg-blue-600 text-white block font-medium' %>
|
||||
<% else %>
|
||||
<a href='' class="rounded-lg py-3 px-5 bg-blue-900 text-gray block font-medium tooltip cursor-not-allowed" data-tip="You need to provide your Immich instance data in the Settings">Import Immich data</a>
|
||||
<% end %>
|
||||
</div>
|
||||
|
||||
<div id="imports" class="min-w-full">
|
||||
|
|
@ -40,7 +46,7 @@
|
|||
<%= link_to import.name, import, class: 'underline hover:no-underline' %> (<%= import.source %>)
|
||||
</td>
|
||||
<td>
|
||||
<%= "#{number_with_delimiter import.points.size}" %>
|
||||
<%= "#{number_with_delimiter import.points_count}" %>
|
||||
</td>
|
||||
<td><%= import.created_at.strftime("%d.%m.%Y, %H:%M") %></td>
|
||||
</tr>
|
||||
|
|
|
|||
|
|
@ -164,6 +164,14 @@
|
|||
<% end %>
|
||||
<%= f.number_field :route_opacity, value: current_user.settings['route_opacity'], class: "input input-bordered" %>
|
||||
</div>
|
||||
<div class="form-control my-2">
|
||||
<%= f.label :immich_url %>
|
||||
<%= f.text_field :immich_url, value: current_user.settings['immich_url'], class: "input input-bordered", placeholder: 'http://192.168.0.1:2283' %>
|
||||
</div>
|
||||
<div class="form-control my-2">
|
||||
<%= f.label :immich_api_key %>
|
||||
<%= f.text_field :immich_api_key, value: current_user.settings['immich_api_key'], class: "input input-bordered", placeholder: 'xxxxxxxxxxxxxx' %>
|
||||
</div>
|
||||
<div class="form-control my-2">
|
||||
<%= f.submit "Update", class: "btn btn-primary" %>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,23 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
config = {
|
||||
# geocoding service request timeout, in seconds (default 3):
|
||||
timeout: 10,
|
||||
|
||||
# set default units to kilometers:
|
||||
settings = {
|
||||
timeout: 5,
|
||||
units: :km,
|
||||
|
||||
# caching (see Caching section below for details):
|
||||
cache: Redis.new,
|
||||
cache_options: {
|
||||
expiration: 1.day # Defaults to `nil`
|
||||
# prefix: "another_key:" # Defaults to `geocoder:`
|
||||
},
|
||||
always_raise: :all,
|
||||
|
||||
use_https: false,
|
||||
lookup: :photon,
|
||||
photon: { host: 'photon.chibi.rodeo' }
|
||||
cache_options: {
|
||||
expiration: 1.day
|
||||
}
|
||||
}
|
||||
|
||||
Geocoder.configure(config)
|
||||
if defined?(PHOTON_API_HOST)
|
||||
settings[:lookup] = :photon
|
||||
settings[:photon] = { host: PHOTON_API_HOST }
|
||||
end
|
||||
|
||||
Geocoder.configure(settings)
|
||||
|
|
|
|||
|
|
@ -58,6 +58,7 @@ Rails.application.routes.draw do
|
|||
resources :areas, only: %i[index create update destroy]
|
||||
resources :points, only: %i[index destroy]
|
||||
resources :visits, only: %i[update]
|
||||
resources :stats, only: :index
|
||||
|
||||
namespace :overland do
|
||||
resources :batches, only: :create
|
||||
|
|
|
|||
62
db/data/20240815174852_add_owntracks_points_data.rb
Normal file
62
db/data/20240815174852_add_owntracks_points_data.rb
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddOwntracksPointsData < ActiveRecord::Migration[7.1]
|
||||
def up
|
||||
Rails.logger.info("Updating #{Import.owntracks.count} owntracks imports points")
|
||||
|
||||
import_points = 0
|
||||
Import.owntracks.each do |import|
|
||||
import.points.each do |point|
|
||||
params = OwnTracks::Params.new(point.raw_data).call
|
||||
|
||||
update_point(point, params)
|
||||
|
||||
import_points += 1
|
||||
end
|
||||
end
|
||||
|
||||
Rails.logger.info("#{import_points} points updated from owntracks imports")
|
||||
|
||||
# Getting points by owntracks-specific data
|
||||
points = Point.where("raw_data -> 'm' is not null and raw_data -> 'acc' is not null")
|
||||
|
||||
Rails.logger.info("Updating #{points.count} points")
|
||||
|
||||
points_updated = 0
|
||||
points.each do |point|
|
||||
params = OwnTracks::Params.new(point.raw_data).call
|
||||
|
||||
update_point(point, params)
|
||||
|
||||
points_updated += 1
|
||||
end
|
||||
|
||||
Rails.logger.info("#{points_updated} points updated")
|
||||
end
|
||||
|
||||
def down
|
||||
raise ActiveRecord::IrreversibleMigration
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def update_point(point, params)
|
||||
point.update!(
|
||||
battery: params[:battery],
|
||||
ping: params[:ping],
|
||||
altitude: params[:altitude],
|
||||
accuracy: params[:accuracy],
|
||||
vertical_accuracy: params[:vertical_accuracy],
|
||||
velocity: params[:velocity],
|
||||
ssid: params[:ssid],
|
||||
bssid: params[:bssid],
|
||||
tracker_id: params[:tracker_id],
|
||||
inrids: params[:inrids],
|
||||
in_regions: params[:in_regions],
|
||||
topic: params[:topic],
|
||||
battery_status: params[:battery_status],
|
||||
connection: params[:connection],
|
||||
trigger: params[:trigger]
|
||||
)
|
||||
end
|
||||
end
|
||||
13
db/data/20240822094532_add_counter_cache_to_imports.rb
Normal file
13
db/data/20240822094532_add_counter_cache_to_imports.rb
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddCounterCacheToImports < ActiveRecord::Migration[7.1]
|
||||
def up
|
||||
Import.find_each do |import|
|
||||
Import.reset_counters(import.id, :points)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
raise ActiveRecord::IrreversibleMigration
|
||||
end
|
||||
end
|
||||
7
db/migrate/20240822092405_add_points_count_to_imports.rb
Normal file
7
db/migrate/20240822092405_add_points_count_to_imports.rb
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddPointsCountToImports < ActiveRecord::Migration[7.1]
|
||||
def change
|
||||
add_column :imports, :points_count, :integer, default: 0
|
||||
end
|
||||
end
|
||||
3
db/schema.rb
generated
3
db/schema.rb
generated
|
|
@ -10,7 +10,7 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema[7.1].define(version: 2024_08_08_121027) do
|
||||
ActiveRecord::Schema[7.1].define(version: 2024_08_22_092405) do
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "plpgsql"
|
||||
|
||||
|
|
@ -77,6 +77,7 @@ ActiveRecord::Schema[7.1].define(version: 2024_08_08_121027) do
|
|||
t.integer "doubles", default: 0
|
||||
t.integer "processed", default: 0
|
||||
t.jsonb "raw_data"
|
||||
t.integer "points_count", default: 0
|
||||
t.index ["source"], name: "index_imports_on_source"
|
||||
t.index ["user_id"], name: "index_imports_on_user_id"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ services:
|
|||
- dawarich
|
||||
volumes:
|
||||
- shared_data:/var/shared/redis
|
||||
restart: always
|
||||
dawarich_db:
|
||||
image: postgres:14.2-alpine
|
||||
container_name: dawarich_db
|
||||
|
|
@ -20,6 +21,7 @@ services:
|
|||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: password
|
||||
restart: always
|
||||
dawarich_app:
|
||||
image: freikin/dawarich:latest
|
||||
container_name: dawarich_app
|
||||
|
|
|
|||
|
|
@ -26,5 +26,29 @@ FactoryBot.define do
|
|||
city { nil }
|
||||
country { nil }
|
||||
user
|
||||
|
||||
trait :with_geodata do
|
||||
geodata do
|
||||
{
|
||||
'type' => 'Feature',
|
||||
'geometry' => { 'type' => 'Point', 'coordinates' => [37.6177036, 55.755847] },
|
||||
'properties' => {
|
||||
'city' => 'Moscow',
|
||||
'name' => 'Kilometre zero',
|
||||
'type' => 'house',
|
||||
'state' => 'Moscow',
|
||||
'osm_id' => 583_204_619,
|
||||
'street' => 'Манежная площадь',
|
||||
'country' => 'Russia',
|
||||
'osm_key' => 'tourism',
|
||||
'district' => 'Tverskoy',
|
||||
'osm_type' => 'N',
|
||||
'postcode' => '103265',
|
||||
'osm_value' => 'attraction',
|
||||
'countrycode' => 'RU'
|
||||
}
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ FactoryBot.define do
|
|||
year { 1 }
|
||||
month { 1 }
|
||||
distance { 1 }
|
||||
user
|
||||
toponyms do
|
||||
[
|
||||
{
|
||||
|
|
|
|||
3291
spec/fixtures/files/gpx/gpx_track_multiple_tracks.gpx
vendored
Normal file
3291
spec/fixtures/files/gpx/gpx_track_multiple_tracks.gpx
vendored
Normal file
File diff suppressed because it is too large
Load diff
12
spec/fixtures/files/immich/geodata.json
vendored
Normal file
12
spec/fixtures/files/immich/geodata.json
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
[
|
||||
{
|
||||
"latitude": 59.0000,
|
||||
"longitude": 30.0000,
|
||||
"timestamp": 978296400
|
||||
},
|
||||
{
|
||||
"latitude": 55.0001,
|
||||
"longitude": 37.0001,
|
||||
"timestamp": 978296400
|
||||
}
|
||||
]
|
||||
24
spec/fixtures/files/immich/response.json
vendored
Normal file
24
spec/fixtures/files/immich/response.json
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
[
|
||||
[
|
||||
{
|
||||
"assets": [
|
||||
{
|
||||
"exifInfo": {
|
||||
"dateTimeOriginal": "2022-12-31T23:17:06.170Z",
|
||||
"latitude": 52.0000,
|
||||
"longitude": 13.0000
|
||||
}
|
||||
},
|
||||
{
|
||||
"exifInfo": {
|
||||
"dateTimeOriginal": "2022-12-31T23:21:53.140Z",
|
||||
"latitude": 52.0000,
|
||||
"longitude": 13.0000
|
||||
}
|
||||
}
|
||||
],
|
||||
"title": "1 year ago",
|
||||
"yearsAgo": 1
|
||||
}
|
||||
]
|
||||
]
|
||||
|
|
@ -8,9 +8,11 @@ RSpec.describe AreaVisitsCalculationSchedulingJob, type: :job do
|
|||
let(:user) { create(:user) }
|
||||
|
||||
it 'calls the AreaVisitsCalculationService' do
|
||||
expect(AreaVisitsCalculatingJob).to receive(:perform_later).with(user.id).and_call_original
|
||||
Sidekiq::Testing.inline! do
|
||||
expect(AreaVisitsCalculatingJob).to receive(:perform_later).with(user.id).and_call_original
|
||||
|
||||
described_class.new.perform
|
||||
described_class.new.perform
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe EnqueueReverseGeocodingJob, type: :job do
|
||||
RSpec.describe EnqueueBackgroundJob, type: :job do
|
||||
let(:job_name) { 'start_reverse_geocoding' }
|
||||
let(:user_id) { 1 }
|
||||
|
||||
it 'calls job creation service' do
|
||||
expect(Jobs::Create).to receive(:new).with(job_name, user_id).and_return(double(call: nil))
|
||||
|
||||
EnqueueReverseGeocodingJob.perform_now(job_name, user_id)
|
||||
EnqueueBackgroundJob.perform_now(job_name, user_id)
|
||||
end
|
||||
end
|
||||
15
spec/jobs/import_immich_geodata_job_spec.rb
Normal file
15
spec/jobs/import_immich_geodata_job_spec.rb
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe ImportImmichGeodataJob, type: :job do
|
||||
describe '#perform' do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
it 'calls Immich::ImportGeodata' do
|
||||
expect_any_instance_of(Immich::ImportGeodata).to receive(:call)
|
||||
|
||||
ImportImmichGeodataJob.perform_now(user.id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Import, type: :model do
|
||||
|
|
@ -13,7 +15,8 @@ RSpec.describe Import, type: :model do
|
|||
owntracks: 1,
|
||||
google_records: 2,
|
||||
google_phone_takeout: 3,
|
||||
gpx: 4
|
||||
gpx: 4,
|
||||
immich_api: 5
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ RSpec.describe Point, type: :model do
|
|||
|
||||
describe 'scopes' do
|
||||
describe '.reverse_geocoded' do
|
||||
let(:point) { create(:point, country: 'Country', city: 'City') }
|
||||
let(:point) { create(:point, :with_geodata) }
|
||||
let(:point_without_address) { create(:point, city: nil, country: nil) }
|
||||
|
||||
it 'returns points with reverse geocoded address' do
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ abort('The Rails environment is running in production mode!') if Rails.env.produ
|
|||
require 'rspec/rails'
|
||||
require 'rswag/specs'
|
||||
require 'sidekiq/testing'
|
||||
require 'super_diff/rspec-rails'
|
||||
|
||||
require 'rake'
|
||||
|
||||
|
|
|
|||
36
spec/requests/api/v1/points_spec.rb
Normal file
36
spec/requests/api/v1/points_spec.rb
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe 'Api::V1::Points', type: :request do
|
||||
let!(:user) { create(:user) }
|
||||
let!(:points) { create_list(:point, 150, user:) }
|
||||
|
||||
describe 'GET /index' do
|
||||
it 'renders a successful response' do
|
||||
get api_v1_points_url(api_key: user.api_key)
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
|
||||
it 'returns a list of points' do
|
||||
get api_v1_points_url(api_key: user.api_key)
|
||||
|
||||
expect(response).to have_http_status(:ok)
|
||||
|
||||
json_response = JSON.parse(response.body)
|
||||
|
||||
expect(json_response.size).to eq(100)
|
||||
end
|
||||
|
||||
it 'returns a list of points with pagination' do
|
||||
get api_v1_points_url(api_key: user.api_key, page: 2, per_page: 10)
|
||||
|
||||
expect(response).to have_http_status(:ok)
|
||||
|
||||
json_response = JSON.parse(response.body)
|
||||
|
||||
expect(json_response.size).to eq(10)
|
||||
end
|
||||
end
|
||||
end
|
||||
78
spec/requests/api/v1/stats_spec.rb
Normal file
78
spec/requests/api/v1/stats_spec.rb
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe 'Api::V1::Stats', type: :request do
|
||||
let(:user) { create(:user) }
|
||||
|
||||
describe 'GET /index' do
|
||||
let!(:user) { create(:user) }
|
||||
let!(:stats_in_2020) { create_list(:stat, 12, year: 2020, user:) }
|
||||
let!(:stats_in_2021) { create_list(:stat, 12, year: 2021, user:) }
|
||||
let!(:points_in_2020) { create_list(:point, 85, :with_geodata, timestamp: Time.zone.local(2020), user:) }
|
||||
let!(:points_in_2021) { create_list(:point, 95, timestamp: Time.zone.local(2021), user:) }
|
||||
let(:expected_json) do
|
||||
{
|
||||
totalDistanceKm: stats_in_2020.map(&:distance).sum + stats_in_2021.map(&:distance).sum,
|
||||
totalPointsTracked: points_in_2020.count + points_in_2021.count,
|
||||
totalReverseGeocodedPoints: points_in_2020.count,
|
||||
totalCountriesVisited: 1,
|
||||
totalCitiesVisited: 1,
|
||||
yearlyStats: [
|
||||
{
|
||||
year: 2021,
|
||||
totalDistanceKm: 12,
|
||||
totalCountriesVisited: 1,
|
||||
totalCitiesVisited: 1,
|
||||
monthlyDistanceKm: {
|
||||
january: 1,
|
||||
february: 0,
|
||||
march: 0,
|
||||
april: 0,
|
||||
may: 0,
|
||||
june: 0,
|
||||
july: 0,
|
||||
august: 0,
|
||||
september: 0,
|
||||
october: 0,
|
||||
november: 0,
|
||||
december: 0
|
||||
}
|
||||
},
|
||||
{
|
||||
year: 2020,
|
||||
totalDistanceKm: 12,
|
||||
totalCountriesVisited: 1,
|
||||
totalCitiesVisited: 1,
|
||||
monthlyDistanceKm: {
|
||||
january: 1,
|
||||
february: 0,
|
||||
march: 0,
|
||||
april: 0,
|
||||
may: 0,
|
||||
june: 0,
|
||||
july: 0,
|
||||
august: 0,
|
||||
september: 0,
|
||||
october: 0,
|
||||
november: 0,
|
||||
december: 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}.to_json
|
||||
end
|
||||
|
||||
it 'renders a successful response' do
|
||||
get api_v1_areas_url(api_key: user.api_key)
|
||||
expect(response).to be_successful
|
||||
end
|
||||
|
||||
it 'returns the stats' do
|
||||
get api_v1_stats_url(api_key: user.api_key)
|
||||
|
||||
expect(response).to be_successful
|
||||
expect(response.body).to eq(expected_json)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -46,7 +46,7 @@ RSpec.describe '/settings/background_jobs', type: :request do
|
|||
it 'enqueues a new job' do
|
||||
expect do
|
||||
post settings_background_jobs_url, params:
|
||||
end.to have_enqueued_job(EnqueueReverseGeocodingJob)
|
||||
end.to have_enqueued_job(EnqueueBackgroundJob)
|
||||
end
|
||||
|
||||
it 'redirects to the created settings_background_job' do
|
||||
|
|
|
|||
|
|
@ -27,9 +27,9 @@ RSpec.describe '/settings/users', type: :request do
|
|||
context 'when user is an admin' do
|
||||
let!(:admin) { create(:user, :admin) }
|
||||
|
||||
before { sign_in admin }
|
||||
|
||||
describe 'POST /create' do
|
||||
before { sign_in admin }
|
||||
|
||||
context 'with valid parameters' do
|
||||
it 'creates a new User' do
|
||||
expect do
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe ExportSerializer do
|
||||
|
|
|
|||
90
spec/serializers/stats_serializer_spec.rb
Normal file
90
spec/serializers/stats_serializer_spec.rb
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe StatsSerializer do
|
||||
describe '#call' do
|
||||
subject(:serializer) { described_class.new(user).call }
|
||||
|
||||
let!(:user) { create(:user) }
|
||||
|
||||
context 'when the user has no stats' do
|
||||
let(:expected_json) do
|
||||
{
|
||||
"totalDistanceKm": 0,
|
||||
"totalPointsTracked": 0,
|
||||
"totalReverseGeocodedPoints": 0,
|
||||
"totalCountriesVisited": 0,
|
||||
"totalCitiesVisited": 0,
|
||||
"yearlyStats": []
|
||||
}.to_json
|
||||
end
|
||||
|
||||
it 'returns the expected JSON' do
|
||||
expect(serializer).to eq(expected_json)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when the user has stats' do
|
||||
let!(:stats_in_2020) { create_list(:stat, 12, year: 2020, user:) }
|
||||
let!(:stats_in_2021) { create_list(:stat, 12, year: 2021, user:) }
|
||||
let!(:points_in_2020) { create_list(:point, 85, :with_geodata, timestamp: Time.zone.local(2020), user:) }
|
||||
let!(:points_in_2021) { create_list(:point, 95, timestamp: Time.zone.local(2021), user:) }
|
||||
let(:expected_json) do
|
||||
{
|
||||
"totalDistanceKm": stats_in_2020.map(&:distance).sum + stats_in_2021.map(&:distance).sum,
|
||||
"totalPointsTracked": points_in_2020.count + points_in_2021.count,
|
||||
"totalReverseGeocodedPoints": points_in_2020.count,
|
||||
"totalCountriesVisited": 1,
|
||||
"totalCitiesVisited": 1,
|
||||
"yearlyStats": [
|
||||
{
|
||||
"year": 2021,
|
||||
"totalDistanceKm": 12,
|
||||
"totalCountriesVisited": 1,
|
||||
"totalCitiesVisited": 1,
|
||||
"monthlyDistanceKm": {
|
||||
"january": 1,
|
||||
"february": 0,
|
||||
"march": 0,
|
||||
"april": 0,
|
||||
"may": 0,
|
||||
"june": 0,
|
||||
"july": 0,
|
||||
"august": 0,
|
||||
"september": 0,
|
||||
"october": 0,
|
||||
"november": 0,
|
||||
"december": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"year": 2020,
|
||||
"totalDistanceKm": 12,
|
||||
"totalCountriesVisited": 1,
|
||||
"totalCitiesVisited": 1,
|
||||
"monthlyDistanceKm": {
|
||||
"january": 1,
|
||||
"february": 0,
|
||||
"march": 0,
|
||||
"april": 0,
|
||||
"may": 0,
|
||||
"june": 0,
|
||||
"july": 0,
|
||||
"august": 0,
|
||||
"september": 0,
|
||||
"october": 0,
|
||||
"november": 0,
|
||||
"december": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}.to_json
|
||||
end
|
||||
|
||||
it 'returns the expected JSON' do
|
||||
expect(serializer).to eq(expected_json)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -15,6 +15,7 @@ RSpec.describe Gpx::TrackParser do
|
|||
context 'when file has a single segment' do
|
||||
it 'creates points' do
|
||||
expect { parser }.to change { Point.count }.by(301)
|
||||
expect(parser).to eq({ doubles: 4, points: 301, processed: 305, raw_points: 305 })
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -23,8 +24,18 @@ RSpec.describe Gpx::TrackParser do
|
|||
|
||||
it 'creates points' do
|
||||
expect { parser }.to change { Point.count }.by(558)
|
||||
expect(parser).to eq({ doubles: 0, points: 558, processed: 558, raw_points: 558 })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file has multiple tracks' do
|
||||
let(:file_path) { Rails.root.join('spec/fixtures/files/gpx/gpx_track_multiple_tracks.gpx') }
|
||||
|
||||
it 'creates points' do
|
||||
expect { parser }.to change { Point.count }.by(407)
|
||||
expect(parser).to eq({ doubles: 0, points: 407, processed: 407, raw_points: 407 })
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
92
spec/services/immich/import_geodata_spec.rb
Normal file
92
spec/services/immich/import_geodata_spec.rb
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Immich::ImportGeodata do
|
||||
describe '#call' do
|
||||
subject(:service) { described_class.new(user).call }
|
||||
|
||||
let(:user) do
|
||||
create(:user, settings: { 'immich_url' => 'http://immich.app', 'immich_api_key' => '123456' })
|
||||
end
|
||||
let(:immich_data) do
|
||||
[
|
||||
{
|
||||
"assets": [
|
||||
{
|
||||
"exifInfo": {
|
||||
"dateTimeOriginal": '2022-12-31T23:17:06.170Z',
|
||||
"latitude": 52.0000,
|
||||
"longitude": 13.0000
|
||||
}
|
||||
},
|
||||
{
|
||||
"exifInfo": {
|
||||
"dateTimeOriginal": '2022-12-31T23:21:53.140Z',
|
||||
"latitude": 52.0000,
|
||||
"longitude": 13.0000
|
||||
}
|
||||
}
|
||||
],
|
||||
"title": '1 year ago',
|
||||
"yearsAgo": 1
|
||||
}
|
||||
].to_json
|
||||
end
|
||||
|
||||
context 'when user has immich_url and immich_api_key' do
|
||||
before do
|
||||
stub_request(
|
||||
:any,
|
||||
%r{http://immich\.app/api/assets/memory-lane\?day=(1[0-9]|2[0-9]|3[01]|[1-9])&month=(1[0-2]|[1-9])}
|
||||
).to_return(status: 200, body: immich_data, headers: {})
|
||||
end
|
||||
|
||||
it 'creates import' do
|
||||
expect { service }.to change { Import.count }.by(1)
|
||||
end
|
||||
|
||||
it 'enqueues ImportJob' do
|
||||
expect(ImportJob).to receive(:perform_later)
|
||||
|
||||
service
|
||||
end
|
||||
|
||||
context 'when import already exists' do
|
||||
before { service }
|
||||
|
||||
it 'does not create new import' do
|
||||
expect { service }.not_to(change { Import.count })
|
||||
end
|
||||
|
||||
it 'does not enqueue ImportJob' do
|
||||
expect(ImportJob).to_not receive(:perform_later)
|
||||
|
||||
service
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has no immich_url' do
|
||||
before do
|
||||
user.settings['immich_url'] = nil
|
||||
user.save
|
||||
end
|
||||
|
||||
it 'raises ArgumentError' do
|
||||
expect { service }.to raise_error(ArgumentError, 'Immich URL is missing')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user has no immich_api_key' do
|
||||
before do
|
||||
user.settings['immich_api_key'] = nil
|
||||
user.save
|
||||
end
|
||||
|
||||
it 'raises ArgumentError' do
|
||||
expect { service }.to raise_error(ArgumentError, 'Immich API key is missing')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
48
spec/services/immich/import_parser_spec.rb
Normal file
48
spec/services/immich/import_parser_spec.rb
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Immich::ImportParser do
|
||||
describe '#call' do
|
||||
subject(:service) { described_class.new(import, user.id).call }
|
||||
|
||||
let(:user) do
|
||||
create(:user, settings: { 'immich_url' => 'http://immich.app', 'immich_api_key' => '123456' })
|
||||
end
|
||||
|
||||
let(:immich_data) do
|
||||
JSON.parse(File.read(Rails.root.join('spec/fixtures/files/immich/geodata.json')))
|
||||
end
|
||||
let(:import) { create(:import, user:, raw_data: immich_data) }
|
||||
|
||||
context 'when there are no points' do
|
||||
it 'creates new points' do
|
||||
expect { service }.to change { Point.count }.by(2)
|
||||
end
|
||||
|
||||
it 'creates points with correct attributes' do
|
||||
service
|
||||
|
||||
expect(Point.first.latitude.to_f).to eq(59.0000)
|
||||
expect(Point.first.longitude.to_f).to eq(30.0000)
|
||||
expect(Point.first.timestamp).to eq(978_296_400)
|
||||
expect(Point.first.import_id).to eq(import.id)
|
||||
|
||||
expect(Point.second.latitude.to_f).to eq(55.0001)
|
||||
expect(Point.second.longitude.to_f).to eq(37.0001)
|
||||
expect(Point.second.timestamp).to eq(978_296_400)
|
||||
expect(Point.second.import_id).to eq(import.id)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when there are points with the same coordinates' do
|
||||
let!(:existing_point) do
|
||||
create(:point, latitude: 59.0000, longitude: 30.0000, timestamp: 978_296_400, user:)
|
||||
end
|
||||
|
||||
it 'creates only new points' do
|
||||
expect { service }.to change { Point.count }.by(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -13,6 +13,59 @@ RSpec.describe OwnTracks::ExportParser do
|
|||
it 'creates points' do
|
||||
expect { parser }.to change { Point.count }.by(9)
|
||||
end
|
||||
|
||||
it 'correctly writes attributes' do
|
||||
parser
|
||||
|
||||
expect(Point.first.attributes).to include(
|
||||
'latitude' => 40.7128,
|
||||
'longitude' => -74.006,
|
||||
'battery_status' => 'charging',
|
||||
'battery' => 85,
|
||||
'ping' => nil,
|
||||
'altitude' => 41,
|
||||
'accuracy' => 8,
|
||||
'vertical_accuracy' => 3,
|
||||
'velocity' => nil,
|
||||
'connection' => 'wifi',
|
||||
'ssid' => 'Home Wifi',
|
||||
'bssid' => 'b0:f2:8:45:94:33',
|
||||
'trigger' => 'background_event',
|
||||
'tracker_id' => 'RO',
|
||||
'timestamp' => 1_706_965_203,
|
||||
'inrids' => ['5f1d1b'],
|
||||
'in_regions' => ['home'],
|
||||
'topic' => 'owntracks/test/iPhone 12 Pro',
|
||||
'visit_id' => nil,
|
||||
'user_id' => user.id,
|
||||
'country' => nil,
|
||||
'raw_data' => {
|
||||
'batt' => 85,
|
||||
'lon' => -74.006,
|
||||
'acc' => 8,
|
||||
'bs' => 2,
|
||||
'inrids' => ['5f1d1b'],
|
||||
'BSSID' => 'b0:f2:8:45:94:33',
|
||||
'SSID' => 'Home Wifi',
|
||||
'vac' => 3,
|
||||
'inregions' => ['home'],
|
||||
'lat' => 40.7128,
|
||||
'topic' => 'owntracks/test/iPhone 12 Pro',
|
||||
't' => 'p',
|
||||
'conn' => 'w',
|
||||
'm' => 1,
|
||||
'tst' => 1_706_965_203,
|
||||
'alt' => 41,
|
||||
'_type' => 'location',
|
||||
'tid' => 'RO',
|
||||
'_http' => true,
|
||||
'ghash' => 'u33d773',
|
||||
'isorcv' => '2024-02-03T13:00:03Z',
|
||||
'isotst' => '2024-02-03T13:00:03Z',
|
||||
'disptst' => '2024-02-03 13:00:03'
|
||||
}
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe OwnTracks::Params do
|
||||
|
|
@ -15,7 +17,7 @@ RSpec.describe OwnTracks::Params do
|
|||
{
|
||||
latitude: 40.7128,
|
||||
longitude: -74.006,
|
||||
battery_status: 'unknown',
|
||||
battery_status: 'charging',
|
||||
battery: 85,
|
||||
ping: nil,
|
||||
altitude: 41,
|
||||
|
|
@ -25,9 +27,9 @@ RSpec.describe OwnTracks::Params do
|
|||
connection: 'wifi',
|
||||
ssid: 'Home Wifi',
|
||||
bssid: 'b0:f2:8:45:94:33',
|
||||
trigger: 'unknown',
|
||||
trigger: 'background_event',
|
||||
tracker_id: 'RO',
|
||||
timestamp: 1706965203,
|
||||
timestamp: 1_706_965_203,
|
||||
inrids: ['5f1d1b'],
|
||||
in_regions: ['home'],
|
||||
topic: 'owntracks/test/iPhone 12 Pro',
|
||||
|
|
@ -46,7 +48,7 @@ RSpec.describe OwnTracks::Params do
|
|||
't' => 'p',
|
||||
'conn' => 'w',
|
||||
'm' => 1,
|
||||
'tst' => 1706965203,
|
||||
'tst' => 1_706_965_203,
|
||||
'alt' => 41,
|
||||
'_type' => 'location',
|
||||
'tid' => 'RO',
|
||||
|
|
@ -64,7 +66,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when battery status is unplugged' do
|
||||
let(:raw_point_params) { super().merge(bs: 'u') }
|
||||
let(:raw_point_params) { super().merge(bs: 1) }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:battery_status]).to eq('unplugged')
|
||||
|
|
@ -72,7 +74,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when battery status is charging' do
|
||||
let(:raw_point_params) { super().merge(bs: 'c') }
|
||||
let(:raw_point_params) { super().merge(bs: 2) }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:battery_status]).to eq('charging')
|
||||
|
|
@ -80,7 +82,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when battery status is full' do
|
||||
let(:raw_point_params) { super().merge(bs: 'f') }
|
||||
let(:raw_point_params) { super().merge(bs: 3) }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:battery_status]).to eq('full')
|
||||
|
|
@ -96,7 +98,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is circular_region_event' do
|
||||
let(:raw_point_params) { super().merge(m: 'c') }
|
||||
let(:raw_point_params) { super().merge(t: 'c') }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('circular_region_event')
|
||||
|
|
@ -104,7 +106,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is beacon_event' do
|
||||
let(:raw_point_params) { super().merge(m: 'b') }
|
||||
let(:raw_point_params) { super().merge(t: 'b') }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('beacon_event')
|
||||
|
|
@ -112,7 +114,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is report_location_message_event' do
|
||||
let(:raw_point_params) { super().merge(m: 'r') }
|
||||
let(:raw_point_params) { super().merge(t: 'r') }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('report_location_message_event')
|
||||
|
|
@ -120,7 +122,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is manual_event' do
|
||||
let(:raw_point_params) { super().merge(m: 'u') }
|
||||
let(:raw_point_params) { super().merge(t: 'u') }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('manual_event')
|
||||
|
|
@ -128,7 +130,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is timer_based_event' do
|
||||
let(:raw_point_params) { super().merge(m: 't') }
|
||||
let(:raw_point_params) { super().merge(t: 't') }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('timer_based_event')
|
||||
|
|
@ -136,7 +138,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is settings_monitoring_event' do
|
||||
let(:raw_point_params) { super().merge(m: 'v') }
|
||||
let(:raw_point_params) { super().merge(t: 'v') }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('settings_monitoring_event')
|
||||
|
|
@ -184,7 +186,7 @@ RSpec.describe OwnTracks::Params do
|
|||
end
|
||||
|
||||
context 'when trigger is unknown' do
|
||||
let(:raw_point_params) { super().merge(m: 'unknown') }
|
||||
before { raw_point_params[:t] = 'unknown' }
|
||||
|
||||
it 'returns parsed params' do
|
||||
expect(params[:trigger]).to eq('unknown')
|
||||
|
|
|
|||
|
|
@ -32,11 +32,11 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
|
|||
end
|
||||
|
||||
context 'when point has city and country' do
|
||||
let(:point) { create(:point, city: 'City', country: 'Country') }
|
||||
let(:point) { create(:point, :with_geodata) }
|
||||
|
||||
before do
|
||||
allow(Geocoder).to receive(:search).and_return(
|
||||
[double(city: 'Another city', country: 'Some country')]
|
||||
[double(geodata: { 'address' => 'Address' }, city: 'City', country: 'Country')]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
68
spec/swagger/api/v1/stats_controller_spec.rb
Normal file
68
spec/swagger/api/v1/stats_controller_spec.rb
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'swagger_helper'
|
||||
|
||||
describe 'Stats API', type: :request do
|
||||
path '/api/v1/stats' do
|
||||
get 'Retrieves all stats' do
|
||||
tags 'Stats'
|
||||
produces 'application/json'
|
||||
parameter name: :api_key, in: :query, type: :string, required: true, description: 'API Key'
|
||||
response '200', 'stats found' do
|
||||
schema type: :object,
|
||||
properties: {
|
||||
totalDistanceKm: { type: :number },
|
||||
totalPointsTracked: { type: :number },
|
||||
totalReverseGeocodedPoints: { type: :number },
|
||||
totalCountriesVisited: { type: :number },
|
||||
totalCitiesVisited: { type: :number },
|
||||
yearlyStats: {
|
||||
type: :array,
|
||||
items: {
|
||||
type: :object,
|
||||
properties: {
|
||||
year: { type: :integer },
|
||||
totalDistanceKm: { type: :number },
|
||||
totalCountriesVisited: { type: :number },
|
||||
totalCitiesVisited: { type: :number },
|
||||
monthlyDistanceKm: {
|
||||
type: :object,
|
||||
properties: {
|
||||
january: { type: :number },
|
||||
february: { type: :number },
|
||||
march: { type: :number },
|
||||
april: { type: :number },
|
||||
may: { type: :number },
|
||||
june: { type: :number },
|
||||
july: { type: :number },
|
||||
august: { type: :number },
|
||||
september: { type: :number },
|
||||
october: { type: :number },
|
||||
november: { type: :number },
|
||||
december: { type: :number }
|
||||
}
|
||||
}
|
||||
},
|
||||
required: %w[
|
||||
year totalDistanceKm totalCountriesVisited totalCitiesVisited monthlyDistanceKm
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
required: %w[
|
||||
totalDistanceKm totalPointsTracked totalReverseGeocodedPoints totalCountriesVisited
|
||||
totalCitiesVisited yearlyStats
|
||||
]
|
||||
|
||||
let!(:user) { create(:user) }
|
||||
let!(:stats_in_2020) { create_list(:stat, 12, year: 2020, user:) }
|
||||
let!(:stats_in_2021) { create_list(:stat, 12, year: 2021, user:) }
|
||||
let!(:points_in_2020) { create_list(:point, 85, :with_geodata, timestamp: Time.zone.local(2020), user:) }
|
||||
let!(:points_in_2021) { create_list(:point, 95, timestamp: Time.zone.local(2021), user:) }
|
||||
let(:api_key) { user.api_key }
|
||||
|
||||
run_test!
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -415,6 +415,89 @@ paths:
|
|||
responses:
|
||||
'200':
|
||||
description: point deleted
|
||||
"/api/v1/stats":
|
||||
get:
|
||||
summary: Retrieves all stats
|
||||
tags:
|
||||
- Stats
|
||||
parameters:
|
||||
- name: api_key
|
||||
in: query
|
||||
required: true
|
||||
description: API Key
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: stats found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
totalDistanceKm:
|
||||
type: number
|
||||
totalPointsTracked:
|
||||
type: number
|
||||
totalReverseGeocodedPoints:
|
||||
type: number
|
||||
totalCountriesVisited:
|
||||
type: number
|
||||
totalCitiesVisited:
|
||||
type: number
|
||||
yearlyStats:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
year:
|
||||
type: integer
|
||||
totalDistanceKm:
|
||||
type: number
|
||||
totalCountriesVisited:
|
||||
type: number
|
||||
totalCitiesVisited:
|
||||
type: number
|
||||
monthlyDistanceKm:
|
||||
type: object
|
||||
properties:
|
||||
january:
|
||||
type: number
|
||||
february:
|
||||
type: number
|
||||
march:
|
||||
type: number
|
||||
april:
|
||||
type: number
|
||||
may:
|
||||
type: number
|
||||
june:
|
||||
type: number
|
||||
july:
|
||||
type: number
|
||||
august:
|
||||
type: number
|
||||
september:
|
||||
type: number
|
||||
october:
|
||||
type: number
|
||||
november:
|
||||
type: number
|
||||
december:
|
||||
type: number
|
||||
required:
|
||||
- year
|
||||
- totalDistanceKm
|
||||
- totalCountriesVisited
|
||||
- totalCitiesVisited
|
||||
- monthlyDistanceKm
|
||||
required:
|
||||
- totalDistanceKm
|
||||
- totalPointsTracked
|
||||
- totalReverseGeocodedPoints
|
||||
- totalCountriesVisited
|
||||
- totalCitiesVisited
|
||||
- yearlyStats
|
||||
servers:
|
||||
- url: http://{defaultHost}
|
||||
variables:
|
||||
|
|
|
|||
Loading…
Reference in a new issue