Merge remote-tracking branch 'origin/dev' into chore/database-user-constraints

This commit is contained in:
Eugene Burmakin 2025-01-22 15:03:55 +01:00
commit be779fe333
124 changed files with 3135 additions and 867 deletions

View file

@ -1 +1 @@
0.21.2
0.23.4

View file

@ -1,8 +1,8 @@
# Basis-Image für Ruby und Node.js
# Base-Image for Ruby and Node.js
FROM ruby:3.3.4-alpine
ENV APP_PATH=/var/app
ENV BUNDLE_VERSION=2.5.9
ENV BUNDLE_VERSION=2.5.21
ENV BUNDLE_PATH=/usr/local/bundle/gems
ENV TMP_PATH=/tmp/
ENV RAILS_LOG_TO_STDOUT=true
@ -27,7 +27,7 @@ RUN apk -U add --no-cache \
&& rm -rf /var/cache/apk/* \
&& mkdir -p $APP_PATH
RUN gem update --system 3.5.7 && gem install bundler --version "$BUNDLE_VERSION" \
RUN gem update --system 3.6.2 && gem install bundler --version "$BUNDLE_VERSION" \
&& rm -rf $GEM_HOME/cache/*
# FIXME It would be a good idea to use a other user than root, but this lead to permission error on export and maybe more yet.

View file

@ -8,7 +8,6 @@ services:
container_name: dawarich_dev
volumes:
- "${PWD}:/var/app:cached"
- dawarich_gem_cache_app:/usr/local/bundle/gems_app
- dawarich_public:/var/app/public
- dawarich_watched:/var/app/tmp/imports/watched
networks:
@ -26,19 +25,16 @@ services:
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_development
MIN_MINUTES_SPENT_IN_CITY: 60
APPLICATION_HOST: localhost
APPLICATION_HOSTS: localhost
TIME_ZONE: Europe/London
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PHOTON_API_HOST: photon.komoot.io
PHOTON_API_USE_HTTPS: true
PROMETHEUS_EXPORTER_ENABLED: false
PROMETHEUS_EXPORTER_HOST: 0.0.0.0
PROMETHEUS_EXPORTER_PORT: 9394
ENABLE_TELEMETRY: false # More on telemetry: https://dawarich.app/docs/tutorials/telemetry
dawarich_redis:
image: redis:7.0-alpine
image: redis:7.4-alpine
container_name: dawarich_redis
command: redis-server
networks:
@ -53,7 +49,7 @@ services:
start_period: 30s
timeout: 10s
dawarich_db:
image: postgres:14.2-alpine
image: postgres:17-alpine
container_name: dawarich_db
volumes:
- dawarich_db_data:/var/lib/postgresql/data
@ -72,8 +68,6 @@ services:
POSTGRES_PASSWORD: password
volumes:
dawarich_db_data:
dawarich_gem_cache_app:
dawarich_gem_cache_sidekiq:
dawarich_shared:
dawarich_public:
dawarich_watched:

View file

@ -4,5 +4,4 @@ DATABASE_PASSWORD=password
DATABASE_NAME=dawarich_development
DATABASE_PORT=5432
REDIS_URL=redis://localhost:6379/1
PHOTON_API_HOST='photon.komoot.io'
DISTANCE_UNIT='km'

View file

@ -7,11 +7,14 @@ assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**OS & Hardware**
Provide your software and hardware specs
**Version**
Include version of Dawarich you're experiencing problem on.
Provide the version of Dawarich you're experiencing the problem on.
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:

View file

@ -15,34 +15,58 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.branch || github.ref_name }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v3
- name: Cache Docker layers
uses: actions/cache@v2
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Install dependencies
run: npm install
- name: Login to Docker Hub
uses: docker/login-action@v3.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set Docker tags
id: docker_meta
run: |
VERSION=${GITHUB_REF#refs/tags/}
TAGS="freikin/dawarich:${VERSION}"
# Add :rc tag for pre-releases
if [ "${{ github.event.release.prerelease }}" = "true" ]; then
TAGS="${TAGS},freikin/dawarich:rc"
fi
# Add :latest tag only if release is not a pre-release
if [ "${{ github.event.release.prerelease }}" != "true" ]; then
TAGS="${TAGS},freikin/dawarich:latest"
fi
echo "tags=${TAGS}" >> $GITHUB_OUTPUT
- name: Build and push
uses: docker/build-push-action@v2
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
file: ./docker/Dockerfile.dev
push: true
tags: freikin/dawarich:latest,freikin/dawarich:${{ github.event.inputs.branch || github.ref_name }}
tags: ${{ steps.docker_meta.outputs.tags }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

View file

@ -5,8 +5,237 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
# 0.23.4 - 2025-01-22
### Added
- A test for building rc Docker image.
### Fixed
- Fix authentication to `GET /api/v1/countries/visited_cities` with header `Authorization: Bearer YOUR_API_KEY` instead of `api_key` query param. #679
- Fix a bug where a gpx file with empty tracks was not being imported. #646
- Fix a bug where rc version was being checked as a stable release. #711
# 0.23.3 - 2025-01-21
### Changed
- Synology-related files are now up to date. #684
### Fixed
- Drastically improved performance for Google's Records.json import. It will now take less than 5 minutes to import 500,000 points, which previously took a few hours.
### Fixed
- Add index only if it doesn't exist.
# 0.23.1 - 2025-01-21
### Fixed
- Renamed unique index on points to `unique_points_lat_long_timestamp_user_id_index` to fix naming conflict with `unique_points_index`.
# 0.23.0 - 2025-01-20
## ⚠️ IMPORTANT ⚠️
This release includes a data migration to remove duplicated points from the database. It will not remove anything except for duplcates from the `points` table, but please make sure to create a [backup](https://dawarich.app/docs/tutorials/backup-and-restore) before updating to this version.
### Added
- `POST /api/v1/points/create` endpoint added.
- An index to guarantee uniqueness of points across `latitude`, `longitude`, `timestamp` and `user_id` values. This is introduced to make sure no duplicates will be created in the database in addition to previously existing validations.
- `GET /api/v1/users/me` endpoint added to get current user.
# 0.22.4 - 2025-01-20
### Added
- You can now drag-n-drop a point on the map to update its position. Enable the "Points" layer on the map to see the points.
- `PATCH /api/v1/points/:id` endpoint added to update a point. It only accepts `latitude` and `longitude` params. #51 #503
### Changed
- Run seeds even in prod env so Unraid users could have default user.
- Precompile assets in production env using dummy secret key base.
### Fixed
- Fixed a bug where route wasn't highlighted when it was hovered or clicked.
# 0.22.3 - 2025-01-14
### Changed
- The Map now uses a canvas to draw polylines, points and fog of war. This should improve performance in browser with a lot of points and polylines.
# 0.22.2 - 2025-01-13
✨ The Fancy Routes release ✨
### Added
- In the Map Settings (coggle in the top left corner of the map), you can now enable/disable the Fancy Routes feature. Simply said, it will color your routes based on the speed of each segment.
- Hovering over a polyline now shows the speed of the segment. Move cursor over a polyline to see the speed of different segments.
- Distance and points number in the custom control to the map.
### Changed
- The name of the "Polylines" feature is now "Routes".
⚠️ Important note on the Prometheus monitoring ⚠️
In the previous release, `bin/dev` command in the default `docker-compose.yml` file was replaced with `bin/rails server -p 3000 -b ::`, but this way Dawarich won't be able to start Prometheus Exporter. If you want to use Prometheus monitoring, you need to use `bin/dev` command instead.
Example:
```diff
dawarich_app:
image: freikin/dawarich:latest
...
- command: ['bin/rails', 'server', '-p', '3000', '-b', '::']
+ command: ['bin/dev']
```
# 0.22.1 - 2025-01-09
### Removed
- Gems caching volume from the `docker-compose.yml` file.
To update existing `docker-compose.yml` to new changes, refer to the following:
```diff
dawarich_app:
image: freikin/dawarich:latest
...
volumes:
- - dawarich_gem_cache_app:/usr/local/bundle/gems
...
dawarich_sidekiq:
image: freikin/dawarich:latest
...
volumes:
- - dawarich_gem_cache_app:/usr/local/bundle/gems
...
volumes:
dawarich_db_data:
- dawarich_gem_cache_app:
- dawarich_gem_cache_sidekiq:
dawarich_shared:
dawarich_public:
dawarich_watched:
```
### Changed
- `GET /api/v1/health` endpoint now returns a `X-Dawarich-Response: Hey, Im alive and authenticated!` header if user is authenticated.
# 0.22.0 - 2025-01-09
⚠️ This release introduces a breaking change. ⚠️
Please read this release notes carefully before upgrading.
Docker-related files were moved to the `docker` directory and some of them were renamed. Before upgrading, study carefully changes in the `docker/docker-compose.yml` file and update your docker-compose file accordingly, so it uses the new files and commands. Copying `docker/docker-compose.yml` blindly may lead to errors.
No volumes were removed or renamed, so with a proper docker-compose file, you should be able to upgrade without any issues.
To update existing `docker-compose.yml` to new changes, refer to the following:
```diff
dawarich_app:
image: freikin/dawarich:latest
...
- entrypoint: dev-entrypoint.sh
- command: ['bin/dev']
+ entrypoint: web-entrypoint.sh
+ command: ['bin/rails', 'server', '-p', '3000', '-b', '::']
...
dawarich_sidekiq:
image: freikin/dawarich:latest
...
- entrypoint: dev-entrypoint.sh
- command: ['bin/dev']
+ entrypoint: sidekiq-entrypoint.sh
+ command: ['bundle', 'exec', 'sidekiq']
```
Although `docker-compose.production.yml` was added, it's not being used by default. It's just an example of how to configure Dawarich for production. The default `docker-compose.yml` file is still recommended for running the app.
### Changed
- All docker-related files were moved to the `docker` directory.
- Default memory limit for `dawarich_app` and `dawarich_sidekiq` services was increased to 4GB.
- `dawarich_app` and `dawarich_sidekiq` services now use separate entrypoint scripts.
- Gems (dependency libraries) are now being shipped as part of the Dawarich Docker image.
### Fixed
- Visit suggesting job does nothing if user has no tracked points.
- `BulkStatsCalculationJob` now being called without arguments in the data migration.
### Added
- A proper production Dockerfile, docker-compose and env files.
# 0.21.6 - 2025-01-07
### Changed
- Disabled visit suggesting job after import.
- Improved performance of the `User#years_tracked` method.
### Fixed
- Inconsistent password for the `dawarich_db` service in `docker-compose_mounted_volumes.yml`. #605
- Points are now being rendered with higher z-index than polylines. #577
- Run cache cleaning and preheating jobs only on server start. #594
# 0.21.5 - 2025-01-07
You may now use Geoapify API for reverse geocoding. To obtain an API key, sign up at https://myprojects.geoapify.com/ and create a new project. Make sure you have read and understood the [pricing policy](https://www.geoapify.com/pricing) and [Terms and Conditions](https://www.geoapify.com/terms-and-conditions/).
### Added
- Geoapify API support for reverse geocoding. Provide `GEOAPIFY_API_KEY` env var to use it.
### Removed
- Photon ENV vars from the `.env.development` and docker-compose.yml files.
- `APPLICATION_HOST` env var.
- `REVERSE_GEOCODING_ENABLED` env var.
# 0.21.4 - 2025-01-05
### Fixed
- Fixed a bug where Photon API for patreon supporters was not being used for reverse geocoding.
# 0.21.3 - 2025-01-04
### Added
- A notification about Photon API being under heavy load.
### Removed
- The notification about telemetry being enabled.
### Reverted
- ~~Imported points will now be reverse geocoded only after import is finished.~~
# 0.21.2 - 2024-12-25
### Added
- Logging for Immich responses.
- Watcher now supports all data formats that can be imported via web interface.
### Changed
- Imported points will now be reverse geocoded only after import is finished.

View file

@ -1,49 +0,0 @@
FROM ruby:3.3.4-alpine
ENV APP_PATH=/var/app
ENV BUNDLE_VERSION=2.5.9
ENV BUNDLE_PATH=/usr/local/bundle/gems
ENV TMP_PATH=/tmp/
ENV RAILS_LOG_TO_STDOUT=true
ENV RAILS_PORT=3000
# Install dependencies for application
RUN apk -U add --no-cache \
build-base \
git \
postgresql-dev \
postgresql-client \
libxml2-dev \
libxslt-dev \
nodejs \
yarn \
imagemagick \
tzdata \
less \
yaml-dev \
# gcompat for nokogiri on mac m1
gcompat \
&& rm -rf /var/cache/apk/* \
&& mkdir -p $APP_PATH
RUN gem install bundler --version "$BUNDLE_VERSION" \
&& rm -rf $GEM_HOME/cache/*
# Navigate to app directory
WORKDIR $APP_PATH
COPY Gemfile Gemfile.lock vendor .ruby-version ./
# Install missing gems
RUN bundle config set --local path 'vendor/bundle' \
&& bundle install --jobs 20 --retry 5
COPY . ./
# Copy entrypoint scripts and grant execution permissions
COPY ./dev-docker-entrypoint.sh /usr/local/bin/dev-entrypoint.sh
RUN chmod +x /usr/local/bin/dev-entrypoint.sh
EXPOSE $RAILS_PORT
ENTRYPOINT [ "bundle", "exec" ]

View file

@ -1,41 +0,0 @@
FROM ruby:3.3.4-alpine
ENV APP_PATH=/var/app
ENV BUNDLE_VERSION=2.5.9
ENV BUNDLE_PATH=/usr/local/bundle/gems
ENV TMP_PATH=/tmp/
ENV RAILS_LOG_TO_STDOUT=true
ENV RAILS_PORT=3000
# install dependencies for application
RUN apk -U add --no-cache \
build-base \
git \
postgresql-dev \
postgresql-client \
libxml2-dev \
libxslt-dev \
nodejs \
yarn \
imagemagick \
tzdata \
less \
# gcompat for nokogiri on mac m1
gcompat \
&& rm -rf /var/cache/apk/* \
&& mkdir -p $APP_PATH
RUN gem install bundler --version "$BUNDLE_VERSION" \
&& rm -rf $GEM_HOME/cache/*
# copy entrypoint scripts and grant execution permissions
COPY ./dev-docker-entrypoint.sh /usr/local/bin/dev-entrypoint.sh
COPY ./test-docker-entrypoint.sh /usr/local/bin/test-entrypoint.sh
RUN chmod +x /usr/local/bin/dev-entrypoint.sh && chmod +x /usr/local/bin/test-entrypoint.sh
# navigate to app directory
WORKDIR $APP_PATH
EXPOSE $RAILS_PORT
ENTRYPOINT [ "bundle", "exec" ]

View file

@ -88,15 +88,15 @@ GEM
base64 (0.2.0)
bcrypt (3.1.20)
benchmark (0.4.0)
bigdecimal (3.1.8)
bigdecimal (3.1.9)
bootsnap (1.18.4)
msgpack (~> 1.2)
builder (3.3.0)
byebug (11.1.3)
chartkick (5.1.2)
coderay (1.1.3)
concurrent-ruby (1.3.4)
connection_pool (2.4.1)
concurrent-ruby (1.3.5)
connection_pool (2.5.0)
content_disposition (1.0.0)
crack (1.0.0)
bigdecimal
@ -156,7 +156,7 @@ GEM
csv
mini_mime (>= 1.0.0)
multi_xml (>= 0.5.2)
i18n (1.14.6)
i18n (1.14.7)
concurrent-ruby (~> 1.0)
importmap-rails (2.1.0)
actionpack (>= 6.0.0)
@ -166,7 +166,7 @@ GEM
irb (1.14.3)
rdoc (>= 4.0.0)
reline (>= 0.4.2)
json (2.7.4)
json (2.9.1)
json-schema (5.0.1)
addressable (~> 2.8)
kaminari (1.2.2)
@ -182,13 +182,13 @@ GEM
kaminari-core (= 1.2.2)
kaminari-core (1.2.2)
language_server-protocol (3.17.0.3)
logger (1.6.4)
logger (1.6.5)
lograge (0.14.0)
actionpack (>= 4)
activesupport (>= 4)
railties (>= 4)
request_store (~> 1.0)
loofah (2.23.1)
loofah (2.24.0)
crass (~> 1.0.2)
nokogiri (>= 1.12.0)
mail (2.8.1)
@ -199,6 +199,7 @@ GEM
marcel (1.0.4)
method_source (1.1.0)
mini_mime (1.1.5)
mini_portile2 (2.8.8)
minitest (5.25.4)
msgpack (1.7.3)
multi_xml (0.7.1)
@ -213,26 +214,27 @@ GEM
net-smtp (0.5.0)
net-protocol
nio4r (2.7.4)
nokogiri (1.17.2-aarch64-linux)
nokogiri (1.18.2)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
nokogiri (1.17.2-arm-linux)
nokogiri (1.18.2-aarch64-linux-gnu)
racc (~> 1.4)
nokogiri (1.17.2-arm64-darwin)
nokogiri (1.18.2-arm-linux-gnu)
racc (~> 1.4)
nokogiri (1.17.2-x86-linux)
nokogiri (1.18.2-arm64-darwin)
racc (~> 1.4)
nokogiri (1.17.2-x86_64-darwin)
nokogiri (1.18.2-x86_64-darwin)
racc (~> 1.4)
nokogiri (1.17.2-x86_64-linux)
nokogiri (1.18.2-x86_64-linux-gnu)
racc (~> 1.4)
oj (3.16.8)
oj (3.16.9)
bigdecimal (>= 3.0)
ostruct (>= 0.2)
optimist (3.2.0)
orm_adapter (0.5.0)
ostruct (0.6.1)
parallel (1.26.3)
parser (3.3.5.0)
parser (3.3.7.0)
ast (~> 2.4.1)
racc
patience_diff (1.2.0)
@ -248,7 +250,7 @@ GEM
pry (>= 0.13, < 0.15)
pry-rails (0.3.11)
pry (>= 0.13.0)
psych (5.2.2)
psych (5.2.3)
date
stringio
public_suffix (6.0.1)
@ -259,9 +261,10 @@ GEM
raabro (1.4.0)
racc (1.8.1)
rack (3.1.8)
rack-session (2.0.0)
rack-session (2.1.0)
base64 (>= 0.1.0)
rack (>= 3.0.0)
rack-test (2.1.0)
rack-test (2.2.0)
rack (>= 1.3)
rackup (2.2.1)
rack (>= 3)
@ -296,13 +299,13 @@ GEM
zeitwerk (~> 2.6)
rainbow (3.1.1)
rake (13.2.1)
rdoc (6.10.0)
rdoc (6.11.0)
psych (>= 4.0.0)
redis (5.3.0)
redis-client (>= 0.22.0)
redis-client (0.23.0)
redis-client (0.23.2)
connection_pool
regexp_parser (2.9.2)
regexp_parser (2.10.0)
reline (0.6.0)
io-console (~> 0.5)
request_store (1.7.0)
@ -339,19 +342,19 @@ GEM
rswag-ui (2.16.0)
actionpack (>= 5.2, < 8.1)
railties (>= 5.2, < 8.1)
rubocop (1.67.0)
rubocop (1.70.0)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
parser (>= 3.3.0.2)
rainbow (>= 2.2.2, < 4.0)
regexp_parser (>= 2.4, < 3.0)
rubocop-ast (>= 1.32.2, < 2.0)
regexp_parser (>= 2.9.3, < 3.0)
rubocop-ast (>= 1.36.2, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
rubocop-ast (1.32.3)
unicode-display_width (>= 2.4.0, < 4.0)
rubocop-ast (1.37.0)
parser (>= 3.3.1.0)
rubocop-rails (2.27.0)
rubocop-rails (2.29.0)
activesupport (>= 4.2.0)
rack (>= 1.1)
rubocop (>= 1.52.0, < 2.0)
@ -368,7 +371,7 @@ GEM
logger
rack (>= 2.2.4)
redis-client (>= 0.22.2)
sidekiq-cron (2.0.1)
sidekiq-cron (2.1.0)
cronex (>= 0.13.0)
fugit (~> 1.8, >= 1.11.1)
globalid (>= 1.0.1)
@ -393,19 +396,19 @@ GEM
stringio (3.1.2)
strong_migrations (2.1.0)
activerecord (>= 6.1)
super_diff (0.14.0)
super_diff (0.15.0)
attr_extras (>= 6.2.4)
diff-lcs
patience_diff
tailwindcss-rails (3.0.0)
tailwindcss-rails (3.3.0)
railties (>= 7.0.0)
tailwindcss-ruby
tailwindcss-ruby (3.4.14)
tailwindcss-ruby (3.4.14-aarch64-linux)
tailwindcss-ruby (3.4.14-arm-linux)
tailwindcss-ruby (3.4.14-arm64-darwin)
tailwindcss-ruby (3.4.14-x86_64-darwin)
tailwindcss-ruby (3.4.14-x86_64-linux)
tailwindcss-ruby (3.4.17)
tailwindcss-ruby (3.4.17-aarch64-linux)
tailwindcss-ruby (3.4.17-arm-linux)
tailwindcss-ruby (3.4.17-arm64-darwin)
tailwindcss-ruby (3.4.17-x86_64-darwin)
tailwindcss-ruby (3.4.17-x86_64-linux)
thor (1.3.2)
timeout (0.4.2)
turbo-rails (2.0.11)
@ -414,7 +417,9 @@ GEM
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
unicode (0.4.4.5)
unicode-display_width (2.6.0)
unicode-display_width (3.1.4)
unicode-emoji (~> 4.0, >= 4.0.4)
unicode-emoji (4.0.4)
uri (1.0.2)
useragent (0.16.11)
warden (1.2.9)

File diff suppressed because one or more lines are too long

View file

@ -1,21 +0,0 @@
/* Ensure fog overlay is positioned relative to the map container */
#fog {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.8); /* Adjust the opacity here */
pointer-events: none;
mix-blend-mode: multiply;
z-index: 1000;
}
.unfogged-circle {
position: absolute;
pointer-events: none;
border-radius: 50%;
background: white;
mix-blend-mode: destination-out;
filter: blur(3px); /* Apply no blur to the circles */
}

View file

@ -4,8 +4,12 @@ class Api::V1::HealthController < ApiController
skip_before_action :authenticate_api_key
def index
response.set_header('X-Dawarich-Response', 'Hey, I\'m alive!')
if current_api_user
response.set_header('X-Dawarich-Response', 'Hey, I\'m alive and authenticated!')
else
response.set_header('X-Dawarich-Response', 'Hey, I\'m alive!')
end
render json: { status: 'ok' }
end
end

View file

@ -21,6 +21,20 @@ class Api::V1::PointsController < ApiController
render json: serialized_points
end
def create
Points::CreateJob.perform_later(batch_params, current_api_user.id)
render json: { message: 'Points are being processed' }
end
def update
point = current_api_user.tracked_points.find(params[:id])
point.update(point_params)
render json: point_serializer.new(point).call
end
def destroy
point = current_api_user.tracked_points.find(params[:id])
point.destroy
@ -30,6 +44,14 @@ class Api::V1::PointsController < ApiController
private
def point_params
params.require(:point).permit(:latitude, :longitude)
end
def batch_params
params.permit(locations: [:type, { geometry: {}, properties: {} }], batch: {})
end
def point_serializer
params[:slim] == 'true' ? Api::SlimPointSerializer : Api::PointSerializer
end

View file

@ -27,7 +27,8 @@ class Api::V1::SettingsController < ApiController
:meters_between_routes, :minutes_between_routes, :fog_of_war_meters,
:time_threshold_minutes, :merge_threshold_minutes, :route_opacity,
:preferred_map_layer, :points_rendering_mode, :live_map_enabled,
:immich_url, :immich_api_key, :photoprism_url, :photoprism_api_key
:immich_url, :immich_api_key, :photoprism_url, :photoprism_api_key,
:speed_colored_routes
)
end
end

View file

@ -0,0 +1,7 @@
# frozen_string_literal: true
class Api::V1::UsersController < ApiController
def me
render json: { user: current_api_user }
end
end

View file

@ -6,7 +6,6 @@ class MapController < ApplicationController
def index
@points = points.where('timestamp >= ? AND timestamp <= ?', start_at, end_at)
@countries_and_cities = CountriesAndCities.new(@points).call
@coordinates =
@points.pluck(:latitude, :longitude, :battery, :altitude, :timestamp, :velocity, :id, :country)
.map { [_1.to_f, _2.to_f, _3.to_s, _4.to_s, _5.to_s, _6.to_s, _7.to_s, _8.to_s] }
@ -14,6 +13,7 @@ class MapController < ApplicationController
@start_at = Time.zone.at(start_at)
@end_at = Time.zone.at(end_at)
@years = (@start_at.year..@end_at.year).to_a
@points_number = @coordinates.count
end
private
@ -36,7 +36,7 @@ class MapController < ApplicationController
@distance ||= 0
@coordinates.each_cons(2) do
@distance += Geocoder::Calculations.distance_between([_1[0], _1[1]], [_2[0], _2[1]])
@distance += Geocoder::Calculations.distance_between([_1[0], _1[1]], [_2[0], _2[1]], units: DISTANCE_UNIT)
end
@distance.round(1)

View file

@ -5,27 +5,34 @@ import consumer from "../channels/consumer";
import { createMarkersArray } from "../maps/markers";
import { createPolylinesLayer } from "../maps/polylines";
import { updatePolylinesOpacity } from "../maps/polylines";
import {
createPolylinesLayer,
updatePolylinesOpacity,
updatePolylinesColors,
calculateSpeed,
getSpeedColor
} from "../maps/polylines";
import { fetchAndDrawAreas } from "../maps/areas";
import { handleAreaCreated } from "../maps/areas";
import { showFlashMessage } from "../maps/helpers";
import { fetchAndDisplayPhotos } from '../maps/helpers';
import { showFlashMessage, fetchAndDisplayPhotos, debounce } from "../maps/helpers";
import { osmMapLayer } from "../maps/layers";
import { osmHotMapLayer } from "../maps/layers";
import { OPNVMapLayer } from "../maps/layers";
import { openTopoMapLayer } from "../maps/layers";
import { cyclOsmMapLayer } from "../maps/layers";
import { esriWorldStreetMapLayer } from "../maps/layers";
import { esriWorldTopoMapLayer } from "../maps/layers";
import { esriWorldImageryMapLayer } from "../maps/layers";
import { esriWorldGrayCanvasMapLayer } from "../maps/layers";
import {
osmMapLayer,
osmHotMapLayer,
OPNVMapLayer,
openTopoMapLayer,
cyclOsmMapLayer,
esriWorldStreetMapLayer,
esriWorldTopoMapLayer,
esriWorldImageryMapLayer,
esriWorldGrayCanvasMapLayer
} from "../maps/layers";
import { countryCodesMap } from "../maps/country_codes";
import "leaflet-draw";
import { initializeFogCanvas, drawFogCanvas, createFogOverlay } from "../maps/fog_of_war";
export default class extends Controller {
static targets = ["container"];
@ -48,11 +55,41 @@ export default class extends Controller {
this.pointsRenderingMode = this.userSettings.points_rendering_mode || "raw";
this.liveMapEnabled = this.userSettings.live_map_enabled || false;
this.countryCodesMap = countryCodesMap();
this.speedColoredPolylines = this.userSettings.speed_colored_routes || false;
this.center = this.markers[this.markers.length - 1] || [52.514568, 13.350111];
this.map = L.map(this.containerTarget).setView([this.center[0], this.center[1]], 14);
// Add scale control
L.control.scale({
position: 'bottomright',
imperial: this.distanceUnit === 'mi',
metric: this.distanceUnit === 'km',
maxWidth: 120
}).addTo(this.map)
// Add stats control
const StatsControl = L.Control.extend({
options: {
position: 'bottomright'
},
onAdd: (map) => {
const div = L.DomUtil.create('div', 'leaflet-control-stats');
const distance = this.element.dataset.distance || '0';
const pointsNumber = this.element.dataset.points_number || '0';
const unit = this.distanceUnit === 'mi' ? 'mi' : 'km';
div.innerHTML = `${distance} ${unit} | ${pointsNumber} points`;
div.style.backgroundColor = 'white';
div.style.padding = '0 5px';
div.style.marginRight = '5px';
div.style.display = 'inline-block';
return div;
}
});
new StatsControl().addTo(this.map);
// Set the maximum bounds to prevent infinite scroll
var southWest = L.latLng(-120, -210);
var northEast = L.latLng(120, 210);
@ -60,13 +97,16 @@ export default class extends Controller {
this.map.setMaxBounds(bounds);
this.markersArray = createMarkersArray(this.markers, this.userSettings);
this.markersArray = createMarkersArray(this.markers, this.userSettings, this.apiKey);
this.markersLayer = L.layerGroup(this.markersArray);
this.heatmapMarkers = this.markersArray.map((element) => [element._latlng.lat, element._latlng.lng, 0.2]);
this.polylinesLayer = createPolylinesLayer(this.markers, this.map, this.timezone, this.routeOpacity, this.userSettings, this.distanceUnit);
this.heatmapLayer = L.heatLayer(this.heatmapMarkers, { radius: 20 }).addTo(this.map);
this.fogOverlay = L.layerGroup(); // Initialize fog layer
// Create a proper Leaflet layer for fog
this.fogOverlay = createFogOverlay();
this.areasLayer = L.layerGroup(); // Initialize areas layer
this.photoMarkers = L.layerGroup();
@ -76,26 +116,53 @@ export default class extends Controller {
this.addSettingsButton();
}
// Initialize layers for the layer control
const controlsLayer = {
Points: this.markersLayer,
Polylines: this.polylinesLayer,
Routes: this.polylinesLayer,
Heatmap: this.heatmapLayer,
"Fog of War": this.fogOverlay,
"Fog of War": new this.fogOverlay(),
"Scratch map": this.scratchLayer,
Areas: this.areasLayer,
Photos: this.photoMarkers
};
// Add scale control to bottom right
L.control.scale({
position: 'bottomright',
imperial: this.distanceUnit === 'mi',
metric: this.distanceUnit === 'km',
maxWidth: 120
}).addTo(this.map)
// Initialize layer control first
this.layerControl = L.control.layers(this.baseMaps(), controlsLayer).addTo(this.map);
// Add the toggle panel button
this.addTogglePanelButton();
// Check if we should open the panel based on localStorage or URL params
const urlParams = new URLSearchParams(window.location.search);
const isPanelOpen = localStorage.getItem('mapPanelOpen') === 'true';
const hasDateParams = urlParams.has('start_at') && urlParams.has('end_at');
// Always create the panel first
this.toggleRightPanel();
// Then hide it if it shouldn't be open
if (!isPanelOpen && !hasDateParams) {
const panel = document.querySelector('.leaflet-right-panel');
if (panel) {
panel.style.display = 'none';
localStorage.setItem('mapPanelOpen', 'false');
}
}
// Update event handlers
this.map.on('moveend', () => {
if (document.getElementById('fog')) {
this.updateFog(this.markers, this.clearFogRadius);
}
});
this.map.on('zoomend', () => {
if (document.getElementById('fog')) {
this.updateFog(this.markers, this.clearFogRadius);
}
});
// Fetch and draw areas when the map is loaded
fetchAndDrawAreas(this.areasLayer, this.apiKey);
@ -173,26 +240,6 @@ export default class extends Controller {
if (this.liveMapEnabled) {
this.setupSubscription();
}
// Add the toggle panel button
this.addTogglePanelButton();
// Check if we should open the panel based on localStorage or URL params
const urlParams = new URLSearchParams(window.location.search);
const isPanelOpen = localStorage.getItem('mapPanelOpen') === 'true';
const hasDateParams = urlParams.has('start_at') && urlParams.has('end_at');
// Always create the panel first
this.toggleRightPanel();
// Then hide it if it shouldn't be open
if (!isPanelOpen && !hasDateParams) {
const panel = document.querySelector('.leaflet-right-panel');
if (panel) {
panel.style.display = 'none';
localStorage.setItem('mapPanelOpen', 'false');
}
}
}
disconnect() {
@ -439,7 +486,7 @@ export default class extends Controller {
this.map.removeControl(this.layerControl);
const controlsLayer = {
Points: this.markersLayer,
Polylines: this.polylinesLayer,
Routes: this.polylinesLayer,
Heatmap: this.heatmapLayer,
"Fog of War": this.fogOverlay,
"Scratch map": this.scratchLayer,
@ -491,39 +538,11 @@ export default class extends Controller {
}
updateFog(markers, clearFogRadius) {
var fog = document.getElementById('fog');
fog.innerHTML = ''; // Clear previous circles
markers.forEach((point) => {
const radiusInPixels = this.metersToPixels(this.map, clearFogRadius);
this.clearFog(point[0], point[1], radiusInPixels);
});
}
metersToPixels(map, meters) {
const zoom = map.getZoom();
const latLng = map.getCenter(); // Get map center for correct projection
const metersPerPixel = this.getMetersPerPixel(latLng.lat, zoom);
return meters / metersPerPixel;
}
getMetersPerPixel(latitude, zoom) {
const earthCircumference = 40075016.686; // Earth's circumference in meters
const metersPerPixel = earthCircumference * Math.cos(latitude * Math.PI / 180) / Math.pow(2, zoom + 8);
return metersPerPixel;
}
clearFog(lat, lng, radius) {
var fog = document.getElementById('fog');
var point = this.map.latLngToContainerPoint([lat, lng]);
var size = radius * 2;
var circle = document.createElement('div');
circle.className = 'unfogged-circle';
circle.style.width = size + 'px';
circle.style.height = size + 'px';
circle.style.left = (point.x - radius) + 'px';
circle.style.top = (point.y - radius) + 'px';
circle.style.backdropFilter = 'blur(0px)'; // Remove blur for the circles
fog.appendChild(circle);
const fog = document.getElementById('fog');
if (!fog) {
initializeFogCanvas(this.map);
}
requestAnimationFrame(() => drawFogCanvas(this.map, markers, clearFogRadius));
}
initializeDrawControl() {
@ -677,6 +696,12 @@ export default class extends Controller {
<input type="checkbox" id="live_map_enabled" name="live_map_enabled" class='w-4' style="width: 20px;" value="false" ${this.liveMapEnabledChecked(true)} />
</label>
<label for="speed_colored_routes">
Speed-colored routes
<label for="speed_colored_routes_info" class="btn-xs join-item inline">?</label>
<input type="checkbox" id="speed_colored_routes" name="speed_colored_routes" class='w-4' style="width: 20px;" ${this.speedColoredRoutesChecked()} />
</label>
<button type="submit">Update</button>
</form>
`;
@ -717,8 +742,13 @@ export default class extends Controller {
}
}
speedColoredRoutesChecked() {
return this.userSettings.speed_colored_routes ? 'checked' : '';
}
updateSettings(event) {
event.preventDefault();
console.log('Form submitted');
fetch(`/api/v1/settings?api_key=${this.apiKey}`, {
method: 'PATCH',
@ -732,12 +762,14 @@ export default class extends Controller {
time_threshold_minutes: event.target.time_threshold_minutes.value,
merge_threshold_minutes: event.target.merge_threshold_minutes.value,
points_rendering_mode: event.target.points_rendering_mode.value,
live_map_enabled: event.target.live_map_enabled.checked
live_map_enabled: event.target.live_map_enabled.checked,
speed_colored_routes: event.target.speed_colored_routes.checked
},
}),
})
.then((response) => response.json())
.then((data) => {
console.log('Settings update response:', data);
if (data.status === 'success') {
showFlashMessage('notice', data.message);
this.updateMapWithNewSettings(data.settings);
@ -748,144 +780,92 @@ export default class extends Controller {
} else {
showFlashMessage('error', data.message);
}
})
.catch(error => {
console.error('Settings update error:', error);
showFlashMessage('error', 'Failed to update settings');
});
}
updateMapWithNewSettings(newSettings) {
const currentLayerStates = this.getLayerControlStates();
// Show loading indicator
const loadingDiv = document.createElement('div');
loadingDiv.className = 'map-loading-overlay';
loadingDiv.innerHTML = '<div class="loading loading-lg">Updating map...</div>';
document.body.appendChild(loadingDiv);
// Update local state with new settings
this.clearFogRadius = parseInt(newSettings.fog_of_war_meters) || 50;
this.routeOpacity = parseFloat(newSettings.route_opacity) || 0.6;
// Preserve existing layer instances if they exist
const preserveLayers = {
Points: this.markersLayer,
Polylines: this.polylinesLayer,
Heatmap: this.heatmapLayer,
"Fog of War": this.fogOverlay,
Areas: this.areasLayer,
};
// Clear all layers except base layers
this.map.eachLayer((layer) => {
if (!(layer instanceof L.TileLayer)) {
this.map.removeLayer(layer);
try {
// Update settings first
if (newSettings.speed_colored_routes !== this.userSettings.speed_colored_routes) {
if (this.polylinesLayer) {
updatePolylinesColors(
this.polylinesLayer,
newSettings.speed_colored_routes
);
}
}
});
// Recreate layers only if they don't exist
this.markersLayer = preserveLayers.Points || L.layerGroup(createMarkersArray(this.markers, newSettings));
this.polylinesLayer = preserveLayers.Polylines || createPolylinesLayer(this.markers, this.map, this.timezone, this.routeOpacity, this.userSettings, this.distanceUnit);
this.heatmapLayer = preserveLayers.Heatmap || L.heatLayer(this.markers.map((element) => [element[0], element[1], 0.2]), { radius: 20 });
this.fogOverlay = preserveLayers["Fog of War"] || L.layerGroup();
this.areasLayer = preserveLayers.Areas || L.layerGroup();
// Redraw areas
fetchAndDrawAreas(this.areasLayer, this.apiKey);
let fogEnabled = false;
document.getElementById('fog').style.display = 'none';
this.map.on('overlayadd', (e) => {
if (e.name === 'Fog of War') {
fogEnabled = true;
document.getElementById('fog').style.display = 'block';
this.updateFog(this.markers, this.clearFogRadius);
if (newSettings.route_opacity !== this.userSettings.route_opacity) {
const newOpacity = parseFloat(newSettings.route_opacity) || 0.6;
if (this.polylinesLayer) {
updatePolylinesOpacity(this.polylinesLayer, newOpacity);
}
}
});
this.map.on('overlayremove', (e) => {
if (e.name === 'Fog of War') {
fogEnabled = false;
document.getElementById('fog').style.display = 'none';
// Update the local settings
this.userSettings = { ...this.userSettings, ...newSettings };
this.routeOpacity = parseFloat(newSettings.route_opacity) || 0.6;
this.clearFogRadius = parseInt(newSettings.fog_of_war_meters) || 50;
// Store current layer states
const layerStates = {
Points: this.map.hasLayer(this.markersLayer),
Routes: this.map.hasLayer(this.polylinesLayer),
Heatmap: this.map.hasLayer(this.heatmapLayer),
"Fog of War": this.map.hasLayer(this.fogOverlay),
"Scratch map": this.map.hasLayer(this.scratchLayer),
Areas: this.map.hasLayer(this.areasLayer),
Photos: this.map.hasLayer(this.photoMarkers)
};
// Remove only the layer control
if (this.layerControl) {
this.map.removeControl(this.layerControl);
}
});
this.map.on('zoomend moveend', () => {
if (fogEnabled) {
this.updateFog(this.markers, this.clearFogRadius);
}
});
// Create new controls layer object
const controlsLayer = {
Points: this.markersLayer || L.layerGroup(),
Routes: this.polylinesLayer || L.layerGroup(),
Heatmap: this.heatmapLayer || L.heatLayer([]),
"Fog of War": new this.fogOverlay(),
"Scratch map": this.scratchLayer || L.layerGroup(),
Areas: this.areasLayer || L.layerGroup(),
Photos: this.photoMarkers || L.layerGroup()
};
this.addLastMarker(this.map, this.markers);
this.addEventListeners();
this.initializeDrawControl();
updatePolylinesOpacity(this.polylinesLayer, this.routeOpacity);
// Re-add the layer control in the same position
this.layerControl = L.control.layers(this.baseMaps(), controlsLayer).addTo(this.map);
this.map.on('overlayadd', (e) => {
if (e.name === 'Areas') {
this.map.addControl(this.drawControl);
}
});
// Restore layer visibility states
Object.entries(layerStates).forEach(([name, wasVisible]) => {
const layer = controlsLayer[name];
if (wasVisible && layer) {
layer.addTo(this.map);
} else if (layer && this.map.hasLayer(layer)) {
this.map.removeLayer(layer);
}
});
this.map.on('overlayremove', (e) => {
if (e.name === 'Areas') {
this.map.removeControl(this.drawControl);
}
});
this.applyLayerControlStates(currentLayerStates);
}
getLayerControlStates() {
const controls = {};
this.map.eachLayer((layer) => {
const layerName = this.getLayerName(layer);
if (layerName) {
controls[layerName] = this.map.hasLayer(layer);
}
});
return controls;
}
getLayerName(layer) {
const controlLayers = {
Points: this.markersLayer,
Polylines: this.polylinesLayer,
Heatmap: this.heatmapLayer,
"Fog of War": this.fogOverlay,
Areas: this.areasLayer,
};
for (const [name, val] of Object.entries(controlLayers)) {
if (val && val.hasLayer && layer && val.hasLayer(layer)) // Check if the group layer contains the current layer
return name;
} catch (error) {
console.error('Error updating map settings:', error);
console.error(error.stack);
} finally {
// Remove loading indicator
setTimeout(() => {
document.body.removeChild(loadingDiv);
}, 500);
}
// Direct instance matching
for (const [name, val] of Object.entries(controlLayers)) {
if (val === layer) return name;
}
return undefined; // Indicate no matching layer name found
}
applyLayerControlStates(states) {
const layerControl = {
Points: this.markersLayer,
Polylines: this.polylinesLayer,
Heatmap: this.heatmapLayer,
"Fog of War": this.fogOverlay,
Areas: this.areasLayer,
};
for (const [name, isVisible] of Object.entries(states)) {
const layer = layerControl[name];
if (isVisible && !this.map.hasLayer(layer)) {
this.map.addLayer(layer);
} else if (this.map.hasLayer(layer)) {
this.map.removeLayer(layer);
}
}
// Ensure the layer control reflects the current state
this.map.removeControl(this.layerControl);
this.layerControl = L.control.layers(this.baseMaps(), layerControl).addTo(this.map);
}
createPhotoMarker(photo) {

View file

@ -43,8 +43,9 @@ export default class extends Controller {
const polyline = L.polyline(points, {
color: 'blue',
opacity: 0.8,
weight: 3,
opacity: 0.8
zIndexOffset: 400
}).addTo(this.map)
this.map.fitBounds(polyline.getBounds(), {

View file

@ -138,7 +138,14 @@ export default class extends Controller {
addMarkers() {
this.coordinates.forEach(coord => {
const marker = L.circleMarker([coord[0], coord[1]], {radius: 4})
const marker = L.circleMarker(
[coord[0], coord[1]],
{
radius: 4,
color: coord[5] < 0 ? "orange" : "blue",
zIndexOffset: 1000
}
)
const popupContent = createPopupContent(coord, this.timezone, this.distanceUnit)
marker.bindPopup(popupContent)
@ -152,8 +159,9 @@ export default class extends Controller {
const points = this.coordinates.map(coord => [coord[0], coord[1]])
const polyline = L.polyline(points, {
color: 'blue',
opacity: 0.8,
weight: 3,
opacity: 0.8
zIndexOffset: 400
})
// Add to polylines layer instead of directly to map
this.polylinesLayer.addTo(this.map)

View file

@ -27,7 +27,15 @@ export default class extends Controller {
addMarkers() {
this.coordinates.forEach((coordinate) => {
L.circleMarker([coordinate[0], coordinate[1]], { radius: 4 }).addTo(this.map);
L.circleMarker(
[coordinate[0], coordinate[1]],
{
radius: 4,
color: coordinate[5] < 0 ? "orange" : "blue",
zIndexOffset: 1000
}
).addTo(this.map);
});
}
}

View file

@ -0,0 +1,108 @@
export function initializeFogCanvas(map) {
// Remove existing fog canvas if it exists
const oldFog = document.getElementById('fog');
if (oldFog) oldFog.remove();
// Create new fog canvas
const fog = document.createElement('canvas');
fog.id = 'fog';
fog.style.position = 'absolute';
fog.style.top = '0';
fog.style.left = '0';
fog.style.pointerEvents = 'none';
fog.style.zIndex = '400';
// Set canvas size to match map container
const mapSize = map.getSize();
fog.width = mapSize.x;
fog.height = mapSize.y;
// Add canvas to map container
map.getContainer().appendChild(fog);
return fog;
}
export function drawFogCanvas(map, markers, clearFogRadius) {
const fog = document.getElementById('fog');
// Return early if fog element doesn't exist or isn't a canvas
if (!fog || !(fog instanceof HTMLCanvasElement)) return;
const ctx = fog.getContext('2d');
if (!ctx) return;
const size = map.getSize();
// Clear the canvas
ctx.clearRect(0, 0, size.x, size.y);
// Keep the light fog for unexplored areas
ctx.fillStyle = 'rgba(0, 0, 0, 0.4)';
ctx.fillRect(0, 0, size.x, size.y);
// Set up for "cutting" holes
ctx.globalCompositeOperation = 'destination-out';
// Draw clear circles for each point
markers.forEach(point => {
const latLng = L.latLng(point[0], point[1]);
const pixelPoint = map.latLngToContainerPoint(latLng);
const radiusInPixels = metersToPixels(map, clearFogRadius);
// Make explored areas completely transparent
const gradient = ctx.createRadialGradient(
pixelPoint.x, pixelPoint.y, 0,
pixelPoint.x, pixelPoint.y, radiusInPixels
);
gradient.addColorStop(0, 'rgba(255, 255, 255, 1)'); // 100% transparent
gradient.addColorStop(0.85, 'rgba(255, 255, 255, 1)'); // Still 100% transparent
gradient.addColorStop(1, 'rgba(255, 255, 255, 0)'); // Fade to fog at edge
ctx.fillStyle = gradient;
ctx.beginPath();
ctx.arc(pixelPoint.x, pixelPoint.y, radiusInPixels, 0, Math.PI * 2);
ctx.fill();
});
// Reset composite operation
ctx.globalCompositeOperation = 'source-over';
}
function metersToPixels(map, meters) {
const zoom = map.getZoom();
const latLng = map.getCenter();
const metersPerPixel = getMetersPerPixel(latLng.lat, zoom);
return meters / metersPerPixel;
}
function getMetersPerPixel(latitude, zoom) {
const earthCircumference = 40075016.686;
return earthCircumference * Math.cos(latitude * Math.PI / 180) / Math.pow(2, zoom + 8);
}
export function createFogOverlay() {
return L.Layer.extend({
onAdd: (map) => {
initializeFogCanvas(map);
// Add drag event handlers to update fog during marker movement
map.on('drag', () => {
const fog = document.getElementById('fog');
if (fog) {
// Update fog canvas position to match map position
const mapPos = map.getContainer().getBoundingClientRect();
fog.style.left = `${mapPos.left}px`;
fog.style.top = `${mapPos.top}px`;
}
});
},
onRemove: (map) => {
const fog = document.getElementById('fog');
if (fog) {
fog.remove();
}
// Clean up event listener
map.off('drag');
}
});
}

View file

@ -297,3 +297,15 @@ export function createPhotoMarker(photo, userSettings, photoMarkers, apiKey) {
photoMarkers.addLayer(marker);
}
export function debounce(func, wait) {
let timeout;
return function executedFunction(...args) {
const later = () => {
clearTimeout(timeout);
func(...args);
};
clearTimeout(timeout);
timeout = setTimeout(later, wait);
};
}

View file

@ -1,24 +1,164 @@
import { createPopupContent } from "./popups";
export function createMarkersArray(markersData, userSettings) {
if (userSettings.pointsRenderingMode === "simplified") {
return createSimplifiedMarkers(markersData);
} else {
return markersData.map((marker) => {
const [lat, lon] = marker;
export function createMarkersArray(markersData, userSettings, apiKey) {
// Create a canvas renderer
const renderer = L.canvas({ padding: 0.5 });
const popupContent = createPopupContent(marker, userSettings.timezone, userSettings.distanceUnit);
let markerColor = marker[5] < 0 ? "orange" : "blue";
return L.circleMarker([lat, lon], {
radius: 4,
color: markerColor,
zIndexOffset: 1000
}).bindPopup(popupContent, { autoClose: false });
if (userSettings.pointsRenderingMode === "simplified") {
return createSimplifiedMarkers(markersData, renderer);
} else {
return markersData.map((marker, index) => {
const [lat, lon] = marker;
const pointId = marker[6]; // ID is at index 6
const markerColor = marker[5] < 0 ? "orange" : "blue";
return L.marker([lat, lon], {
icon: L.divIcon({
className: 'custom-div-icon',
html: `<div style='background-color: ${markerColor}; width: 8px; height: 8px; border-radius: 50%;'></div>`,
iconSize: [8, 8],
iconAnchor: [4, 4]
}),
draggable: true,
autoPan: true,
pointIndex: index,
pointId: pointId,
originalLat: lat,
originalLng: lon,
markerData: marker, // Store the complete marker data
renderer: renderer
}).bindPopup(createPopupContent(marker, userSettings.timezone, userSettings.distanceUnit))
.on('dragstart', function(e) {
this.closePopup();
})
.on('drag', function(e) {
const newLatLng = e.target.getLatLng();
const map = e.target._map;
const pointIndex = e.target.options.pointIndex;
const originalLat = e.target.options.originalLat;
const originalLng = e.target.options.originalLng;
// Find polylines by iterating through all map layers
map.eachLayer((layer) => {
// Check if this is a LayerGroup containing polylines
if (layer instanceof L.LayerGroup) {
layer.eachLayer((featureGroup) => {
if (featureGroup instanceof L.FeatureGroup) {
featureGroup.eachLayer((segment) => {
if (segment instanceof L.Polyline) {
const coords = segment.getLatLngs();
const tolerance = 0.0000001;
let updated = false;
// Check and update start point
if (Math.abs(coords[0].lat - originalLat) < tolerance &&
Math.abs(coords[0].lng - originalLng) < tolerance) {
coords[0] = newLatLng;
updated = true;
}
// Check and update end point
if (Math.abs(coords[1].lat - originalLat) < tolerance &&
Math.abs(coords[1].lng - originalLng) < tolerance) {
coords[1] = newLatLng;
updated = true;
}
// Only update if we found a matching endpoint
if (updated) {
segment.setLatLngs(coords);
segment.redraw();
}
}
});
}
});
}
});
// Update the marker's original position for the next drag event
e.target.options.originalLat = newLatLng.lat;
e.target.options.originalLng = newLatLng.lng;
})
.on('dragend', function(e) {
const newLatLng = e.target.getLatLng();
const pointId = e.target.options.pointId;
const pointIndex = e.target.options.pointIndex;
const originalMarkerData = e.target.options.markerData;
fetch(`/api/v1/points/${pointId}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': `Bearer ${apiKey}`
},
body: JSON.stringify({
point: {
latitude: newLatLng.lat.toString(),
longitude: newLatLng.lng.toString()
}
})
})
.then(response => {
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
return response.json();
})
.then(data => {
const map = e.target._map;
if (map && map.mapsController && map.mapsController.markers) {
const markers = map.mapsController.markers;
if (markers[pointIndex]) {
markers[pointIndex][0] = parseFloat(data.latitude);
markers[pointIndex][1] = parseFloat(data.longitude);
}
}
// Create updated marker data array
const updatedMarkerData = [
parseFloat(data.latitude),
parseFloat(data.longitude),
originalMarkerData[2], // battery
originalMarkerData[3], // altitude
originalMarkerData[4], // timestamp
originalMarkerData[5], // velocity
data.id, // id
originalMarkerData[7] // country
];
// Update the marker's stored data
e.target.options.markerData = updatedMarkerData;
// Update the popup content
if (this._popup) {
const updatedPopupContent = createPopupContent(
updatedMarkerData,
userSettings.timezone,
userSettings.distanceUnit
);
this.setPopupContent(updatedPopupContent);
}
})
.catch(error => {
console.error('Error updating point:', error);
this.setLatLng([e.target.options.originalLat, e.target.options.originalLng]);
alert('Failed to update point position. Please try again.');
});
});
});
}
}
export function createSimplifiedMarkers(markersData) {
// Helper function to check if a point is connected to a polyline endpoint
function isConnectedToPoint(latLng, originalPoint, tolerance) {
// originalPoint is [lat, lng] array
const latMatch = Math.abs(latLng.lat - originalPoint[0]) < tolerance;
const lngMatch = Math.abs(latLng.lng - originalPoint[1]) < tolerance;
return latMatch && lngMatch;
}
export function createSimplifiedMarkers(markersData, renderer) {
const distanceThreshold = 50; // meters
const timeThreshold = 20000; // milliseconds (3 seconds)
@ -30,7 +170,6 @@ export function createSimplifiedMarkers(markersData) {
if (index === 0) return; // Skip the first marker
const [prevLat, prevLon, prevTimestamp] = previousMarker;
const [currLat, currLon, currTimestamp] = currentMarker;
const timeDiff = currTimestamp - prevTimestamp;
const distance = haversineDistance(prevLat, prevLon, currLat, currLon, 'km') * 1000; // Convert km to meters
@ -47,6 +186,25 @@ export function createSimplifiedMarkers(markersData) {
const [lat, lon] = marker;
const popupContent = createPopupContent(marker);
let markerColor = marker[5] < 0 ? "orange" : "blue";
return L.circleMarker([lat, lon], { radius: 4, color: markerColor }).bindPopup(popupContent);
// Use L.marker instead of L.circleMarker for better drag support
return L.marker([lat, lon], {
icon: L.divIcon({
className: 'custom-div-icon',
html: `<div style='background-color: ${markerColor}; width: 8px; height: 8px; border-radius: 50%;'></div>`,
iconSize: [8, 8],
iconAnchor: [4, 4]
}),
draggable: true,
autoPan: true
}).bindPopup(popupContent)
.on('dragstart', function(e) {
this.closePopup();
})
.on('dragend', function(e) {
const newLatLng = e.target.getLatLng();
this.setLatLng(newLatLng);
this.openPopup();
});
});
}

View file

@ -1,15 +1,152 @@
import { formatDate } from "../maps/helpers";
import { formatDistance } from "../maps/helpers";
import { getUrlParameter } from "../maps/helpers";
import { minutesToDaysHoursMinutes } from "../maps/helpers";
import { haversineDistance } from "../maps/helpers";
export function addHighlightOnHover(polyline, map, polylineCoordinates, userSettings, distanceUnit) {
const originalStyle = { color: "blue", opacity: userSettings.routeOpacity, weight: 3 };
const highlightStyle = { color: "yellow", opacity: 1, weight: 5 };
function pointToLineDistance(point, lineStart, lineEnd) {
const x = point.lat;
const y = point.lng;
const x1 = lineStart.lat;
const y1 = lineStart.lng;
const x2 = lineEnd.lat;
const y2 = lineEnd.lng;
polyline.setStyle(originalStyle);
const A = x - x1;
const B = y - y1;
const C = x2 - x1;
const D = y2 - y1;
const dot = A * C + B * D;
const lenSq = C * C + D * D;
let param = -1;
if (lenSq !== 0) {
param = dot / lenSq;
}
let xx, yy;
if (param < 0) {
xx = x1;
yy = y1;
} else if (param > 1) {
xx = x2;
yy = y2;
} else {
xx = x1 + param * C;
yy = y1 + param * D;
}
const dx = x - xx;
const dy = y - yy;
return Math.sqrt(dx * dx + dy * dy);
}
export function calculateSpeed(point1, point2) {
if (!point1 || !point2 || !point1[4] || !point2[4]) {
console.warn('Invalid points for speed calculation:', { point1, point2 });
return 0;
}
const distanceKm = haversineDistance(point1[0], point1[1], point2[0], point2[1]); // in kilometers
const timeDiffSeconds = point2[4] - point1[4];
// Handle edge cases
if (timeDiffSeconds <= 0 || distanceKm <= 0) {
return 0;
}
const speedKmh = (distanceKm / timeDiffSeconds) * 3600; // Convert to km/h
// Cap speed at reasonable maximum (e.g., 150 km/h)
const MAX_SPEED = 150;
return Math.min(speedKmh, MAX_SPEED);
}
// Optimize getSpeedColor by pre-calculating color stops
const colorStops = [
{ speed: 0, color: '#00ff00' }, // Stationary/very slow (green)
{ speed: 15, color: '#00ffff' }, // Walking/jogging (cyan)
{ speed: 30, color: '#ff00ff' }, // Cycling/slow driving (magenta)
{ speed: 50, color: '#ffff00' }, // Urban driving (yellow)
{ speed: 100, color: '#ff3300' } // Highway driving (red)
].map(stop => ({
...stop,
rgb: hexToRGB(stop.color)
}));
export function getSpeedColor(speedKmh, useSpeedColors) {
if (!useSpeedColors) {
return '#0000ff';
}
// Find the appropriate color segment
for (let i = 1; i < colorStops.length; i++) {
if (speedKmh <= colorStops[i].speed) {
const ratio = (speedKmh - colorStops[i-1].speed) / (colorStops[i].speed - colorStops[i-1].speed);
const color1 = colorStops[i-1].rgb;
const color2 = colorStops[i].rgb;
const r = Math.round(color1.r + (color2.r - color1.r) * ratio);
const g = Math.round(color1.g + (color2.g - color1.g) * ratio);
const b = Math.round(color1.b + (color2.b - color1.b) * ratio);
return `rgb(${r}, ${g}, ${b})`;
}
}
return colorStops[colorStops.length - 1].color;
}
// Helper function to convert hex to RGB
function hexToRGB(hex) {
const r = parseInt(hex.slice(1, 3), 16);
const g = parseInt(hex.slice(3, 5), 16);
const b = parseInt(hex.slice(5, 7), 16);
return { r, g, b };
}
// Add new function for batch processing
function processInBatches(items, batchSize, processFn) {
let index = 0;
const totalItems = items.length;
function processNextBatch() {
const batchStartTime = performance.now();
let processedInThisFrame = 0;
// Process as many items as possible within our time budget
while (index < totalItems && processedInThisFrame < 500) {
const end = Math.min(index + batchSize, totalItems);
// Ensure we're within bounds
for (let i = index; i < end; i++) {
if (items[i]) { // Add null check
processFn(items[i]);
}
}
processedInThisFrame += (end - index);
index = end;
if (performance.now() - batchStartTime > 32) {
break;
}
}
if (index < totalItems) {
setTimeout(processNextBatch, 0);
} else {
// Only clear the array after all processing is complete
items.length = 0;
}
}
processNextBatch();
}
export function addHighlightOnHover(polylineGroup, map, polylineCoordinates, userSettings, distanceUnit) {
const startPoint = polylineCoordinates[0];
const endPoint = polylineCoordinates[polylineCoordinates.length - 1];
@ -28,70 +165,234 @@ export function addHighlightOnHover(polyline, map, polylineCoordinates, userSett
const startIcon = L.divIcon({ html: "🚥", className: "emoji-icon" });
const finishIcon = L.divIcon({ html: "🏁", className: "emoji-icon" });
const isDebugMode = getUrlParameter("debug") === "true";
let popupContent = `
<strong>Start:</strong> ${firstTimestamp}<br>
<strong>End:</strong> ${lastTimestamp}<br>
<strong>Duration:</strong> ${timeOnRoute}<br>
<strong>Total Distance:</strong> ${formatDistance(totalDistance, distanceUnit)}<br>
`;
if (isDebugMode) {
const prevPoint = polylineCoordinates[0];
const nextPoint = polylineCoordinates[polylineCoordinates.length - 1];
const distanceToPrev = haversineDistance(prevPoint[0], prevPoint[1], startPoint[0], startPoint[1]);
const distanceToNext = haversineDistance(endPoint[0], endPoint[1], nextPoint[0], nextPoint[1]);
const timeBetweenPrev = Math.round((startPoint[4] - prevPoint[4]) / 60);
const timeBetweenNext = Math.round((endPoint[4] - nextPoint[4]) / 60);
const pointsNumber = polylineCoordinates.length;
popupContent += `
<strong>Prev Route:</strong> ${Math.round(distanceToPrev)}m and ${minutesToDaysHoursMinutes(timeBetweenPrev)} away<br>
<strong>Next Route:</strong> ${Math.round(distanceToNext)}m and ${minutesToDaysHoursMinutes(timeBetweenNext)} away<br>
<strong>Points:</strong> ${pointsNumber}<br>
`;
}
const startMarker = L.marker([startPoint[0], startPoint[1]], { icon: startIcon }).bindPopup(`Start: ${firstTimestamp}`);
const endMarker = L.marker([endPoint[0], endPoint[1]], { icon: finishIcon }).bindPopup(popupContent);
const startMarker = L.marker([startPoint[0], startPoint[1]], { icon: startIcon });
const endMarker = L.marker([endPoint[0], endPoint[1]], { icon: finishIcon });
let hoverPopup = null;
let clickedLayer = null;
// Add events to both group and individual polylines
polylineGroup.eachLayer((layer) => {
if (layer instanceof L.Polyline) {
layer.on("mouseover", function (e) {
handleMouseOver(e);
});
layer.on("mouseout", function (e) {
handleMouseOut(e);
});
layer.on("click", function (e) {
handleClick(e);
});
}
});
function handleMouseOver(e) {
// Handle both direct layer events and group propagated events
const layer = e.layer || e.target;
let speed = 0;
if (layer instanceof L.Polyline) {
// Get the coordinates array from the layer
const coords = layer.getLatLngs();
if (coords && coords.length >= 2) {
const startPoint = coords[0];
const endPoint = coords[coords.length - 1];
// Find the corresponding markers for these coordinates
const startMarkerData = polylineCoordinates.find(m =>
m[0] === startPoint.lat && m[1] === startPoint.lng
);
const endMarkerData = polylineCoordinates.find(m =>
m[0] === endPoint.lat && m[1] === endPoint.lng
);
// Calculate speed if we have both markers
if (startMarkerData && endMarkerData) {
speed = startMarkerData[5] || endMarkerData[5] || 0;
}
}
}
// Don't apply hover styles if this is the clicked layer
if (!clickedLayer) {
// Apply style to all segments in the group
polylineGroup.eachLayer((segment) => {
if (segment instanceof L.Polyline) {
const newStyle = {
weight: 8,
opacity: 1
};
// Only change color if speed-colored routes are not enabled
if (!userSettings.speed_colored_routes) {
newStyle.color = 'yellow'; // Highlight color
}
segment.setStyle(newStyle);
}
});
startMarker.addTo(map);
endMarker.addTo(map);
const popupContent = `
<strong>Start:</strong> ${firstTimestamp}<br>
<strong>End:</strong> ${lastTimestamp}<br>
<strong>Duration:</strong> ${timeOnRoute}<br>
<strong>Total Distance:</strong> ${formatDistance(totalDistance, distanceUnit)}<br>
<strong>Current Speed:</strong> ${Math.round(speed)} km/h
`;
if (hoverPopup) {
map.closePopup(hoverPopup);
}
hoverPopup = L.popup()
.setLatLng(e.latlng)
.setContent(popupContent)
.openOn(map);
}
}
function handleMouseOut(e) {
// If there's a clicked state, maintain it
if (clickedLayer && polylineGroup.clickedState) {
polylineGroup.eachLayer((layer) => {
if (layer instanceof L.Polyline) {
if (layer === clickedLayer || layer.options.originalPath === clickedLayer.options.originalPath) {
layer.setStyle(polylineGroup.clickedState.style);
}
}
});
return;
}
// Apply normal style only if there's no clicked layer
polylineGroup.eachLayer((layer) => {
if (layer instanceof L.Polyline) {
const originalStyle = {
weight: 3,
opacity: userSettings.route_opacity,
color: layer.options.originalColor
};
layer.setStyle(originalStyle);
}
});
if (hoverPopup && !clickedLayer) {
map.closePopup(hoverPopup);
map.removeLayer(startMarker);
map.removeLayer(endMarker);
}
}
function handleClick(e) {
const newClickedLayer = e.target;
// If clicking the same route that's already clicked, do nothing
if (clickedLayer === newClickedLayer) {
return;
}
// Store reference to previous clicked layer before updating
const previousClickedLayer = clickedLayer;
// Update clicked layer reference
clickedLayer = newClickedLayer;
// Reset previous clicked layer if it exists
if (previousClickedLayer) {
previousClickedLayer.setStyle({
weight: 3,
opacity: userSettings.route_opacity,
color: previousClickedLayer.options.originalColor
});
}
// Define style for clicked state
const clickedStyle = {
weight: 8,
opacity: 1,
color: userSettings.speed_colored_routes ? clickedLayer.options.originalColor : 'yellow'
};
// Apply style to new clicked layer
clickedLayer.setStyle(clickedStyle);
clickedLayer.bringToFront();
// Update clicked state
polylineGroup.clickedState = {
layer: clickedLayer,
style: clickedStyle
};
polyline.on("mouseover", function (e) {
polyline.setStyle(highlightStyle);
startMarker.addTo(map);
endMarker.addTo(map);
const latLng = e.latlng;
const popupContent = `
<strong>Start:</strong> ${firstTimestamp}<br>
<strong>End:</strong> ${lastTimestamp}<br>
<strong>Duration:</strong> ${timeOnRoute}<br>
<strong>Total Distance:</strong> ${formatDistance(totalDistance, distanceUnit)}<br>
<strong>Current Speed:</strong> ${Math.round(clickedLayer.options.speed || 0)} km/h
`;
if (hoverPopup) {
map.closePopup(hoverPopup);
}
hoverPopup = L.popup()
.setLatLng(latLng)
.setContent(popupContent)
.openOn(map);
.setLatLng(e.latlng)
.setContent(popupContent)
.openOn(map);
// Prevent the click event from propagating to the map
L.DomEvent.stopPropagation(e);
}
// Reset highlight when clicking elsewhere on the map
map.on('click', function () {
if (clickedLayer) {
const clickedGroup = clickedLayer.polylineGroup || polylineGroup;
clickedGroup.eachLayer((layer) => {
if (layer instanceof L.Polyline) {
layer.setStyle({
weight: 3,
opacity: userSettings.route_opacity,
color: layer.options.originalColor
});
}
});
clickedLayer = null;
clickedGroup.clickedState = null;
}
if (hoverPopup) {
map.closePopup(hoverPopup);
map.removeLayer(startMarker);
map.removeLayer(endMarker);
}
});
polyline.on("mouseout", function () {
polyline.setStyle(originalStyle);
map.closePopup(hoverPopup);
map.removeLayer(startMarker);
map.removeLayer(endMarker);
});
polyline.on("click", function () {
map.fitBounds(polyline.getBounds());
});
// Close the popup when clicking elsewhere on the map
map.on("click", function () {
map.closePopup(hoverPopup);
});
// Keep the original group events as a fallback
polylineGroup.on("mouseover", handleMouseOver);
polylineGroup.on("mouseout", handleMouseOut);
polylineGroup.on("click", handleClick);
}
export function createPolylinesLayer(markers, map, timezone, routeOpacity, userSettings, distanceUnit) {
// Create a custom pane for our polylines with higher z-index
if (!map.getPane('polylinesPane')) {
map.createPane('polylinesPane');
map.getPane('polylinesPane').style.zIndex = 450; // Above the default overlay pane (400)
}
const renderer = L.canvas({
padding: 0.5,
pane: 'polylinesPane'
});
const splitPolylines = [];
let currentPolyline = [];
const distanceThresholdMeters = parseInt(userSettings.meters_between_routes) || 500;
@ -119,27 +420,150 @@ export function createPolylinesLayer(markers, map, timezone, routeOpacity, userS
splitPolylines.push(currentPolyline);
}
return L.layerGroup(
splitPolylines.map((polylineCoordinates) => {
const latLngs = polylineCoordinates.map((point) => [point[0], point[1]]);
const polyline = L.polyline(latLngs, {
color: "blue",
opacity: 0.6,
weight: 3,
zIndexOffset: 400
// Create the layer group with the polylines
const layerGroup = L.layerGroup(
splitPolylines.map((polylineCoordinates, groupIndex) => {
const segmentGroup = L.featureGroup();
const segments = [];
for (let i = 0; i < polylineCoordinates.length - 1; i++) {
const speed = calculateSpeed(polylineCoordinates[i], polylineCoordinates[i + 1]);
const color = getSpeedColor(speed, userSettings.speed_colored_routes);
const segment = L.polyline(
[
[polylineCoordinates[i][0], polylineCoordinates[i][1]],
[polylineCoordinates[i + 1][0], polylineCoordinates[i + 1][1]]
],
{
renderer: renderer,
color: color,
originalColor: color,
opacity: routeOpacity,
weight: 3,
speed: speed,
interactive: true,
pane: 'polylinesPane',
bubblingMouseEvents: false
}
);
segments.push(segment);
segmentGroup.addLayer(segment);
}
// Add mouseover/mouseout to the entire group
segmentGroup.on('mouseover', function(e) {
L.DomEvent.stopPropagation(e);
segments.forEach(segment => {
segment.setStyle({
weight: 8,
opacity: 1
});
if (map.hasLayer(segment)) {
segment.bringToFront();
}
});
});
addHighlightOnHover(polyline, map, polylineCoordinates, userSettings, distanceUnit);
segmentGroup.on('mouseout', function(e) {
L.DomEvent.stopPropagation(e);
segments.forEach(segment => {
segment.setStyle({
weight: 3,
opacity: routeOpacity,
color: segment.options.originalColor
});
});
});
return polyline;
// Make the group interactive
segmentGroup.options.interactive = true;
segmentGroup.options.bubblingMouseEvents = false;
// Add the hover functionality to the group
addHighlightOnHover(segmentGroup, map, polylineCoordinates, userSettings, distanceUnit);
return segmentGroup;
})
).addTo(map);
);
// Add CSS to ensure our pane receives mouse events
const style = document.createElement('style');
style.textContent = `
.leaflet-polylinesPane-pane {
pointer-events: auto !important;
}
.leaflet-polylinesPane-pane canvas {
pointer-events: auto !important;
}
`;
document.head.appendChild(style);
// Add to map and return
layerGroup.addTo(map);
return layerGroup;
}
export function updatePolylinesOpacity(polylinesLayer, opacity) {
polylinesLayer.eachLayer((layer) => {
if (layer instanceof L.Polyline) {
layer.setStyle({ opacity: opacity });
export function updatePolylinesColors(polylinesLayer, useSpeedColors) {
const defaultStyle = {
color: '#0000ff',
originalColor: '#0000ff'
};
// More efficient segment collection
const segments = new Array();
polylinesLayer.eachLayer(groupLayer => {
if (groupLayer instanceof L.LayerGroup) {
groupLayer.eachLayer(segment => {
if (segment instanceof L.Polyline) {
segments.push(segment);
}
});
}
});
// Reuse style object to reduce garbage collection
const styleObj = {};
// Process segments in larger batches
processInBatches(segments, 200, (segment) => {
try {
if (!useSpeedColors) {
segment.setStyle(defaultStyle);
return;
}
const speed = segment.options.speed || 0;
const newColor = getSpeedColor(speed, true);
// Reuse style object
styleObj.color = newColor;
styleObj.originalColor = newColor;
segment.setStyle(styleObj);
} catch (error) {
console.error('Error processing segment:', error);
}
});
}
export function updatePolylinesOpacity(polylinesLayer, opacity) {
const segments = [];
// Collect all segments first
polylinesLayer.eachLayer((groupLayer) => {
if (groupLayer instanceof L.LayerGroup) {
groupLayer.eachLayer((segment) => {
if (segment instanceof L.Polyline) {
segments.push(segment);
}
});
}
});
// Process segments in batches of 50
processInBatches(segments, 50, (segment) => {
segment.setStyle({ opacity: opacity });
});
}

View file

@ -8,12 +8,15 @@ export function createPopupContent(marker, timezone, distanceUnit) {
marker[3] = marker[3] * 3.28084;
}
// convert marker[5] from m/s to km/h and round to nearest integer
marker[5] = Math.round(marker[5] * 3.6);
return `
<strong>Timestamp:</strong> ${formatDate(marker[4], timezone)}<br>
<strong>Latitude:</strong> ${marker[0]}<br>
<strong>Longitude:</strong> ${marker[1]}<br>
<strong>Altitude:</strong> ${marker[3]}m<br>
<strong>Velocity:</strong> ${marker[5]}km/h<br>
<strong>Speed:</strong> ${marker[5]}km/h<br>
<strong>Battery:</strong> ${marker[2]}%<br>
<strong>Id:</strong> ${marker[6]}<br>
<a href="#" data-id="${marker[6]}" class="delete-point">[Delete]</a>

View file

@ -5,7 +5,11 @@ class Cache::PreheatingJob < ApplicationJob
def perform
User.find_each do |user|
Rails.cache.write("dawarich/user_#{user.id}_years_tracked", user.years_tracked, expires_in: 1.day)
Rails.cache.write(
"dawarich/user_#{user.id}_years_tracked",
user.years_tracked,
expires_in: 1.day
)
end
end
end

View file

@ -4,11 +4,10 @@ class Import::GoogleTakeoutJob < ApplicationJob
queue_as :imports
sidekiq_options retry: false
def perform(import_id, json_string)
def perform(import_id, locations, current_index)
locations_batch = Oj.load(locations)
import = Import.find(import_id)
json = Oj.load(json_string)
GoogleMaps::RecordsParser.new(import).call(json)
GoogleMaps::RecordsImporter.new(import, current_index).call(locations_batch)
end
end

View file

@ -0,0 +1,17 @@
# frozen_string_literal: true
class Points::CreateJob < ApplicationJob
queue_as :default
def perform(params, user_id)
data = Points::Params.new(params, user_id).call
data.each_slice(1000) do |location_batch|
Point.upsert_all(
location_batch,
unique_by: %i[latitude longitude timestamp user_id],
returning: false
)
end
end
end

View file

@ -4,7 +4,7 @@ class ReverseGeocodingJob < ApplicationJob
queue_as :reverse_geocoding
def perform(klass, id)
return unless REVERSE_GEOCODING_ENABLED
return unless DawarichSettings.reverse_geocoding_enabled?
rate_limit_for_photon_api
@ -18,8 +18,8 @@ class ReverseGeocodingJob < ApplicationJob
end
def rate_limit_for_photon_api
return unless PHOTON_API_HOST == 'photon.komoot.io'
return unless DawarichSettings.photon_enabled?
sleep 1 if PHOTON_API_HOST == 'photon.komoot.io'
sleep 1 if DawarichSettings.photon_uses_komoot_io?
end
end

View file

@ -7,6 +7,10 @@ class VisitSuggestingJob < ApplicationJob
def perform(user_ids: [], start_at: 1.day.ago, end_at: Time.current)
users = user_ids.any? ? User.where(id: user_ids) : User.all
users.find_each { Visits::Suggest.new(_1, start_at:, end_at:).call }
users.find_each do |user|
next if user.tracked_points.empty?
Visits::Suggest.new(user, start_at:, end_at:).call
end
end
end

View file

@ -4,6 +4,8 @@ class Import < ApplicationRecord
belongs_to :user
has_many :points, dependent: :destroy
delegate :count, to: :points, prefix: true
include ImportUploader::Attachment(:raw)
enum :source, {

View file

@ -13,7 +13,7 @@ class Place < ApplicationRecord
enum :source, { manual: 0, photon: 1 }
def async_reverse_geocode
return unless REVERSE_GEOCODING_ENABLED
return unless DawarichSettings.reverse_geocoding_enabled?
ReverseGeocodingJob.perform_later(self.class.to_s, id)
end

View file

@ -8,7 +8,11 @@ class Point < ApplicationRecord
belongs_to :user
validates :latitude, :longitude, :timestamp, presence: true
validates :timestamp, uniqueness: {
scope: %i[latitude longitude user_id],
message: 'already has a point at this location and time for this user',
index: true
}
enum :battery_status, { unknown: 0, unplugged: 1, charging: 2, full: 3 }, suffix: true
enum :trigger, {
unknown: 0, background_event: 1, circular_region_event: 2, beacon_event: 3,
@ -33,9 +37,8 @@ class Point < ApplicationRecord
Time.zone.at(timestamp)
end
def async_reverse_geocode(force: false)
return unless REVERSE_GEOCODING_ENABLED
return if import_id.present? && !force
def async_reverse_geocode
return unless DawarichSettings.reverse_geocoding_enabled?
ReverseGeocodingJob.perform_later(self.class.to_s, id)
end

View file

@ -71,15 +71,23 @@ class User < ApplicationRecord
def years_tracked
Rails.cache.fetch("dawarich/user_#{id}_years_tracked", expires_in: 1.day) do
tracked_points
.pluck(:timestamp)
.map { |ts| Time.zone.at(ts) }
.group_by(&:year)
.transform_values do |dates|
dates.map { |date| date.strftime('%b') }.uniq.sort
end
# Use select_all for better performance with large datasets
sql = <<-SQL
SELECT DISTINCT
EXTRACT(YEAR FROM TO_TIMESTAMP(timestamp)) AS year,
TO_CHAR(TO_TIMESTAMP(timestamp), 'Mon') AS month
FROM points
WHERE user_id = #{id}
ORDER BY year DESC, month ASC
SQL
result = ActiveRecord::Base.connection.select_all(sql)
result
.map { |r| [r['year'].to_i, r['month']] }
.group_by { |year, _| year }
.transform_values { |year_data| year_data.map { |_, month| month } }
.map { |year, months| { year: year, months: months } }
.sort_by { |entry| -entry[:year] } # Sort in descending order
end
end

View file

@ -40,7 +40,7 @@ class Visit < ApplicationRecord
end
def async_reverse_geocode
return unless REVERSE_GEOCODING_ENABLED
return unless DawarichSettings.reverse_geocoding_enabled?
return if place.blank?
ReverseGeocodingJob.perform_later('place', place_id)

View file

@ -4,6 +4,7 @@ class Cache::Clean
class << self
def call
Rails.logger.info('Cleaning cache...')
delete_control_flag
delete_version_cache
delete_years_tracked_cache
Rails.logger.info('Cache cleaned')
@ -11,6 +12,10 @@ class Cache::Clean
private
def delete_control_flag
Rails.cache.delete('cache_jobs_scheduled')
end
def delete_version_cache
Rails.cache.delete(CheckAppVersion::VERSION_CACHE_KEY)
end

View file

@ -17,7 +17,10 @@ class CheckAppVersion
def latest_version
Rails.cache.fetch(VERSION_CACHE_KEY, expires_in: 6.hours) do
JSON.parse(Net::HTTP.get(URI.parse(@repo_url)))[0]['name']
versions = JSON.parse(Net::HTTP.get(URI.parse(@repo_url)))
# Find first version that contains only numbers and dots
release_version = versions.find { |v| v['name'].match?(/^\d+\.\d+\.\d+$/) }
release_version ? release_version['name'] : APP_VERSION
end
end
end

View file

@ -0,0 +1,84 @@
# frozen_string_literal: true
class GoogleMaps::RecordsImporter
include Imports::Broadcaster
BATCH_SIZE = 1000
attr_reader :import, :current_index
def initialize(import, current_index = 0)
@import = import
@batch = []
@current_index = current_index
end
def call(locations)
Array(locations).each_slice(BATCH_SIZE) do |location_batch|
batch = location_batch.map { prepare_location_data(_1) }
bulk_insert_points(batch)
broadcast_import_progress(import, current_index)
end
end
private
# rubocop:disable Metrics/MethodLength
def prepare_location_data(location)
{
latitude: location['latitudeE7'].to_f / 10**7,
longitude: location['longitudeE7'].to_f / 10**7,
timestamp: parse_timestamp(location),
altitude: location['altitude'],
velocity: location['velocity'],
raw_data: location,
topic: 'Google Maps Timeline Export',
tracker_id: 'google-maps-timeline-export',
import_id: @import.id,
user_id: @import.user_id,
created_at: Time.current,
updated_at: Time.current
}
end
# rubocop:enable Metrics/MethodLength
def bulk_insert_points(batch)
unique_batch = deduplicate_batch(batch)
# rubocop:disable Rails/SkipsModelValidations
Point.upsert_all(
unique_batch,
unique_by: %i[latitude longitude timestamp user_id],
returning: false,
on_duplicate: :skip
)
# rubocop:enable Rails/SkipsModelValidations
rescue StandardError => e
create_notification("Failed to process location batch: #{e.message}")
end
def deduplicate_batch(batch)
batch.uniq do |record|
[
record[:latitude].round(7),
record[:longitude].round(7),
record[:timestamp],
record[:user_id]
]
end
end
def parse_timestamp(location)
Timestamps.parse_timestamp(
location['timestamp'] || location['timestampMs']
)
end
def create_notification(message)
Notification.create!(
user: @import.user,
title: 'Google\'s Records.json Import Error',
content: message,
kind: :error
)
end
end

View file

@ -1,44 +0,0 @@
# frozen_string_literal: true
class GoogleMaps::RecordsParser
attr_reader :import
def initialize(import)
@import = import
end
def call(json)
data = parse_json(json)
return if Point.exists?(
latitude: data[:latitude],
longitude: data[:longitude],
timestamp: data[:timestamp],
user_id: import.user_id
)
Point.create(
latitude: data[:latitude],
longitude: data[:longitude],
timestamp: data[:timestamp],
raw_data: data[:raw_data],
topic: 'Google Maps Timeline Export',
tracker_id: 'google-maps-timeline-export',
import_id: import.id,
user_id: import.user_id
)
end
private
def parse_json(json)
{
latitude: json['latitudeE7'].to_f / 10**7,
longitude: json['longitudeE7'].to_f / 10**7,
timestamp: Timestamps.parse_timestamp(json['timestamp'] || json['timestampMs']),
altitude: json['altitude'],
velocity: json['velocity'],
raw_data: json
}
end
end

View file

@ -15,7 +15,7 @@ class Gpx::TrackParser
tracks = json['gpx']['trk']
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
tracks_arr.map { parse_track(_1) }.flatten.each.with_index(1) do |point, index|
tracks_arr.map { parse_track(_1) }.flatten.compact.each.with_index(1) do |point, index|
create_point(point, index)
end
end
@ -23,6 +23,8 @@ class Gpx::TrackParser
private
def parse_track(track)
return if track['trkseg'].blank?
segments = track['trkseg']
segments_array = segments.is_a?(Array) ? segments : [segments]

View file

@ -34,7 +34,8 @@ class Immich::RequestPhotos
immich_api_base_url, headers: headers, body: request_body(page)
).body
)
Rails.logger.debug('==== IMMICH RESPONSE ====')
Rails.logger.debug(response)
items = response.dig('assets', 'items')
break if items.blank?

View file

@ -14,8 +14,7 @@ class Imports::Create
create_import_finished_notification(import, user)
schedule_stats_creating(user.id)
schedule_visit_suggesting(user.id, import)
schedule_reverse_geocoding(user.id)
# schedule_visit_suggesting(user.id, import) # Disabled until places & visits are reworked
rescue StandardError => e
create_import_failed_notification(import, user, e)
end
@ -48,10 +47,6 @@ class Imports::Create
VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:)
end
def schedule_reverse_geocoding(user_id)
EnqueueBackgroundJob.perform_later('continue_reverse_geocoding', user_id)
end
def create_import_finished_notification(import, user)
Notifications::Create.new(
user:,

View file

@ -22,7 +22,7 @@ class Jobs::Create
end
points.find_each(batch_size: 1_000) do |point|
point.async_reverse_geocode(force: true)
point.async_reverse_geocode
end
end
end

View file

@ -0,0 +1,49 @@
# frozen_string_literal: true
class Points::Params
attr_reader :data, :points, :user_id
def initialize(json, user_id)
@data = json.with_indifferent_access
@points = @data[:locations]
@user_id = user_id
end
def call
points.map do |point|
next unless params_valid?(point)
{
latitude: point[:geometry][:coordinates][1],
longitude: point[:geometry][:coordinates][0],
battery_status: point[:properties][:battery_state],
battery: battery_level(point[:properties][:battery_level]),
timestamp: DateTime.parse(point[:properties][:timestamp]),
altitude: point[:properties][:altitude],
tracker_id: point[:properties][:device_id],
velocity: point[:properties][:speed],
ssid: point[:properties][:wifi],
accuracy: point[:properties][:horizontal_accuracy],
vertical_accuracy: point[:properties][:vertical_accuracy],
course_accuracy: point[:properties][:course_accuracy],
course: point[:properties][:course],
raw_data: point,
user_id: user_id
}
end.compact
end
private
def battery_level(level)
value = (level.to_f * 100).to_i
value.positive? ? value : nil
end
def params_valid?(point)
point[:geometry].present? &&
point[:geometry][:coordinates].present? &&
point.dig(:properties, :timestamp).present?
end
end

View file

@ -12,8 +12,8 @@ class ReverseGeocoding::Places::FetchData
end
def call
if ::PHOTON_API_HOST.blank?
Rails.logger.warn('PHOTON_API_HOST is not set')
unless DawarichSettings.reverse_geocoding_enabled?
Rails.logger.warn('Reverse geocoding is not enabled')
return
end

View file

@ -1,9 +1,10 @@
# frozen_string_literal: true
# This class is named based on Google Takeout's Records.json file,
# the main source of user's location history data.
# This class is named based on Google Takeout's Records.json file
class Tasks::Imports::GoogleRecords
BATCH_SIZE = 1000 # Adjust based on your needs
def initialize(file_path, user_email)
@file_path = file_path
@user = User.find_by(email: user_email)
@ -14,10 +15,11 @@ class Tasks::Imports::GoogleRecords
import_id = create_import
log_start
file_content = read_file
json_data = Oj.load(file_content)
schedule_import_jobs(json_data, import_id)
process_file_in_batches(import_id)
log_success
rescue Oj::ParseError => e
Rails.logger.error("JSON parsing error: #{e.message}")
raise
end
private
@ -26,14 +28,25 @@ class Tasks::Imports::GoogleRecords
@user.imports.create(name: @file_path, source: :google_records).id
end
def read_file
File.read(@file_path)
end
def process_file_in_batches(import_id)
batch = []
index = 0
def schedule_import_jobs(json_data, import_id)
json_data['locations'].each do |json|
Import::GoogleTakeoutJob.perform_later(import_id, json.to_json)
Oj.load_file(@file_path, mode: :compat) do |record|
next unless record.is_a?(Hash) && record['locations']
record['locations'].each do |location|
batch << location
next unless batch.size >= BATCH_SIZE
index += BATCH_SIZE
Import::GoogleTakeoutJob.perform_later(import_id, Oj.dump(batch), index)
batch = []
end
end
Import::GoogleTakeoutJob.perform_later(import_id, Oj.dump(batch), index) if batch.any?
end
def log_start

View file

@ -20,7 +20,7 @@ class Visits::Suggest
create_visits_notification(user)
return nil unless reverse_geocoding_enabled?
return nil unless DawarichSettings.reverse_geocoding_enabled?
reverse_geocode(visits)
end
@ -68,10 +68,6 @@ class Visits::Suggest
visits.each(&:async_reverse_geocode)
end
def reverse_geocoding_enabled?
::REVERSE_GEOCODING_ENABLED && ::PHOTON_API_HOST.present?
end
def create_visits_notification(user)
content = <<~CONTENT
New visits have been suggested based on your location data from #{Time.zone.at(start_at)} to #{Time.zone.at(end_at)}. You can review them on the <a href="#{visits_path}" class="link">Visits</a> page.

View file

@ -112,3 +112,32 @@
</div>
<label class="modal-backdrop" for="points_rendering_mode_info">Close</label>
</div>
<input type="checkbox" id="speed_colored_routes_info" class="modal-toggle" />
<div class="modal focus:z-99" role="dialog">
<div class="modal-box">
<h3 class="text-lg font-bold">Speed-colored routes</h3>
<p class="py-4">
This checkbox will color the routes based on the speed of each segment.
</p>
<p class="py-4">
Uncheck this checkbox if you want to disable the speed-colored routes.
</p>
<p class="py-4">
Speed coloring is based on the following color stops:
<code>
0 km/h — green, stationary or walking
<br>
15 km/h — cyan, jogging
<br>
30 km/h — magenta, cycling
<br>
50 km/h — yellow, urban driving
<br>
100 km/h — orange-red, highway driving
</code>
</p>
</div>
<label class="modal-backdrop" for="speed_colored_routes_info">Close</label>
</div>

View file

@ -51,6 +51,8 @@
data-api_key="<%= current_user.api_key %>"
data-user_settings=<%= current_user.settings.to_json %>
data-coordinates="<%= @coordinates %>"
data-distance="<%= @distance %>"
data-points_number="<%= @points_number %>"
data-timezone="<%= Rails.configuration.time_zone %>">
<div data-maps-target="container" class="h-[25rem] rounded-lg w-full min-h-screen">
<div id="fog" class="fog"></div>

View file

@ -12,7 +12,7 @@
</div>
</div>
<p><%= stat.distance %><%= DISTANCE_UNIT %></p>
<% if REVERSE_GEOCODING_ENABLED %>
<% if DawarichSettings.reverse_geocoding_enabled? %>
<div class="card-actions justify-end">
<%= countries_and_cities_stat_for_month(stat) %>
</div>

View file

@ -16,7 +16,7 @@
<div class="stat-title">Geopoints tracked</div>
</div>
<% if REVERSE_GEOCODING_ENABLED %>
<% if DawarichSettings.reverse_geocoding_enabled? %>
<%= render 'stats/reverse_geocoding_stats' %>
<% end %>
</div>
@ -39,7 +39,7 @@
<%= number_with_delimiter year_distance_stat(year, current_user) %><%= DISTANCE_UNIT %>
<% end %>
</p>
<% if REVERSE_GEOCODING_ENABLED %>
<% if DawarichSettings.reverse_geocoding_enabled? %>
<div class="card-actions justify-end">
<%= countries_and_cities_stat_for_year(year, stats) %>
</div>

View file

@ -20,4 +20,3 @@ test:
production:
<<: *default
database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %>
url: <%= ENV['DATABASE_URL'] %>

View file

@ -6,8 +6,11 @@ require_relative 'application'
# Initialize the Rails application.
Rails.application.initialize!
# Clear the cache
Cache::CleaningJob.perform_later
# Use an atomic operation to ensure one-time execution
if defined?(Rails::Server) && Rails.cache.write('cache_jobs_scheduled', true, unless_exist: true)
# Clear the cache
Cache::CleaningJob.perform_later
# Preheat the cache
Cache::PreheatingJob.perform_later
# Preheat the cache
Cache::PreheatingJob.perform_later
end

View file

@ -86,11 +86,11 @@ Rails.application.configure do
# Raise error when a before_action's only/except options reference missing actions
config.action_controller.raise_on_missing_callback_actions = true
config.action_mailer.default_url_options = { host: ENV.fetch('APPLICATION_HOST', 'localhost'), port: 3000 }
config.hosts << ENV.fetch('APPLICATION_HOST', 'localhost')
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost')
config.hosts.concat(hosts.split(',')) if hosts.present?
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',')
config.action_mailer.default_url_options = { host: hosts.first, port: 3000 }
config.hosts.concat(hosts) if hosts.present?
config.force_ssl = ENV.fetch('APPLICATION_PROTOCOL', 'http').downcase == 'https'

View file

@ -29,7 +29,11 @@ Rails.application.configure do
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
config.assets.compile = true
config.assets.content_type = {
geojson: 'application/geo+json'
}
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.asset_host = "http://assets.example.com"
@ -53,12 +57,12 @@ Rails.application.configure do
# config.assume_ssl = true
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
config.force_ssl = ENV.fetch('APPLICATION_PROTOCOL', 'http').downcase == 'https'
# Log to STDOUT by default
config.logger = ActiveSupport::Logger.new($stdout)
.tap { |logger| logger.formatter = ::Logger::Formatter.new }
.then { |logger| ActiveSupport::TaggedLogging.new(logger) }
# Direct logs to STDOUT
config.logger = Logger.new($stdout)
config.lograge.enabled = true
config.lograge.formatter = Lograge::Formatters::Json.new
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
@ -99,4 +103,8 @@ Rails.application.configure do
# ]
# Skip DNS rebinding protection for the default health check endpoint.
# config.host_authorization = { exclude: ->(request) { request.path == "/up" } }
hosts = ENV.fetch('APPLICATION_HOSTS', 'localhost').split(',')
config.action_mailer.default_url_options = { host: hosts.first, port: 3000 }
config.hosts.concat(hosts) if hosts.present?
end

View file

@ -1,10 +1,17 @@
# frozen_string_literal: true
MIN_MINUTES_SPENT_IN_CITY = ENV.fetch('MIN_MINUTES_SPENT_IN_CITY', 60).to_i
REVERSE_GEOCODING_ENABLED = ENV.fetch('REVERSE_GEOCODING_ENABLED', 'true') == 'true'
PHOTON_API_HOST = ENV.fetch('PHOTON_API_HOST', nil)
PHOTON_API_USE_HTTPS = ENV.fetch('PHOTON_API_USE_HTTPS', 'true') == 'true'
DISTANCE_UNIT = ENV.fetch('DISTANCE_UNIT', 'km').to_sym
APP_VERSION = File.read('.app_version').strip
TELEMETRY_STRING = Base64.encode64('IjVFvb8j3P9-ArqhSGav9j8YcJaQiuNIzkfOPKQDk2lvKXqb8t1NSRv50oBkaKtlrB_ZRzO9NdurpMtncV_HYQ==')
TELEMETRY_URL = 'https://influxdb2.frey.today/api/v2/write'
# Reverse geocoding settings
PHOTON_API_HOST = ENV.fetch('PHOTON_API_HOST', nil)
PHOTON_API_KEY = ENV.fetch('PHOTON_API_KEY', nil)
PHOTON_API_USE_HTTPS = ENV.fetch('PHOTON_API_USE_HTTPS', 'true') == 'true'
GEOAPIFY_API_KEY = ENV.fetch('GEOAPIFY_API_KEY', nil)
# /Reverse geocoding settings

View file

@ -0,0 +1,21 @@
# frozen_string_literal: true
class DawarichSettings
class << self
def reverse_geocoding_enabled?
@reverse_geocoding_enabled ||= photon_enabled? || geoapify_enabled?
end
def photon_enabled?
@photon_enabled ||= PHOTON_API_HOST.present?
end
def photon_uses_komoot_io?
@photon_uses_komoot_io ||= PHOTON_API_HOST == 'photon.komoot.io'
end
def geoapify_enabled?
@geoapify_enabled ||= GEOAPIFY_API_KEY.present?
end
end
end

View file

@ -12,11 +12,13 @@ settings = {
}
}
if defined?(PHOTON_API_HOST)
if PHOTON_API_HOST.present?
settings[:lookup] = :photon
settings[:photon] = { use_https: PHOTON_API_USE_HTTPS, host: PHOTON_API_HOST }
settings[:http_headers] = { 'X-Api-Key' => PHOTON_API_KEY } if defined?(PHOTON_API_KEY)
elsif GEOAPIFY_API_KEY.present?
settings[:lookup] = :geoapify
settings[:api_key] = GEOAPIFY_API_KEY
end
settings[:http_headers] = { 'X-Api-Key' => PHOTON_API_KEY } if defined?(PHOTON_API_KEY)
Geocoder.configure(settings)

View file

@ -2,6 +2,7 @@
Sidekiq.configure_server do |config|
config.redis = { url: ENV['REDIS_URL'] }
config.logger = Sidekiq::Logger.new($stdout)
if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true'
require 'prometheus_exporter/instrumentation'
@ -27,4 +28,4 @@ Sidekiq.configure_client do |config|
config.redis = { url: ENV['REDIS_URL'] }
end
Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && PHOTON_API_HOST == 'photon.komoot.io'
Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && DawarichSettings.photon_uses_komoot_io?

View file

@ -65,9 +65,10 @@ Rails.application.routes.draw do
get 'health', to: 'health#index'
patch 'settings', to: 'settings#update'
get 'settings', to: 'settings#index'
get 'users/me', to: 'users#me'
resources :areas, only: %i[index create update destroy]
resources :points, only: %i[index destroy]
resources :points, only: %i[index create update destroy]
resources :visits, only: %i[update]
resources :stats, only: :index

View file

@ -12,7 +12,7 @@ class RemovePointsWithoutCoordinates < ActiveRecord::Migration[7.1]
Rails.logger.info 'Points without coordinates removed.'
BulkStatsCalculatingJob.perform_later(User.pluck(:id))
BulkStatsCalculatingJob.perform_later
end
def down

View file

@ -2,11 +2,12 @@
class CreateTelemetryNotification < ActiveRecord::Migration[7.2]
def up
User.find_each do |user|
Notifications::Create.new(
user:, kind: :info, title: 'Telemetry enabled', content: notification_content
).call
end
# TODO: Remove
# User.find_each do |user|
# Notifications::Create.new(
# user:, kind: :info, title: 'Telemetry enabled', content: notification_content
# ).call
# end
end
def down

View file

@ -0,0 +1,37 @@
# frozen_string_literal: true
class CreatePhotonLoadNotification < ActiveRecord::Migration[8.0]
def up
User.find_each do |user|
Notifications::Create.new(
user:, kind: :info, title: '⚠️ Photon API is under heavy load', content: notification_content
).call
end
end
def down
raise ActiveRecord::IrreversibleMigration
end
private
def notification_content
<<~CONTENT
<p>
A few days ago <a href="https://github.com/lonvia" class="underline">@lonvia</a>, maintainer of <a href="https://photon.komoot.io" class="underline">https://photon.komoot.io</a>, the reverse-geocoding API service that Dawarich is using by default, <a href="https://github.com/Freika/dawarich/issues/614">reached me</a> to highlight a problem: Dawarich makes too many requests to https://photon.komoot.io, even with recently introduced rate-limiting to prevent more than 1 request per second.
</p>
<br>
<p>
Photon is a great service and Dawarich wouldn't be what it is now without it, but I have to ask all Dawarich users that are running it on their hardware to either switch to a <a href="https://dawarich.app/docs/tutorials/reverse-geocoding#using-photon-api-hosted-by-freika" class="underline">Photon instance</a> hosted by me (<a href="https://github.com/Freika">Freika</a>) or strongly consider hosting their <a href="https://dawarich.app/docs/tutorials/reverse-geocoding#setting-up-your-own-reverse-geocoding-service" class="underline">own Photon instance</a>. Thanks to <a href="https://github.com/rtuszik/photon-docker">@rtuszik</a>, it's pretty much <code>docker compose up -d</code>. The documentation on the website will be soon updated to also encourage setting up your own Photon instance. More reverse geocoding options will be added in the future.</p>
<br>
<p>Let's decrease load on https://photon.komoot.io together!</p>
<br>
<p>Thank you.</p>
CONTENT
end
end

View file

@ -0,0 +1,31 @@
# frozen_string_literal: true
class RemoveDuplicatePoints < ActiveRecord::Migration[8.0]
def up
# Find duplicate groups using a subquery
duplicate_groups =
Point.select('latitude, longitude, timestamp, user_id, COUNT(*) as count')
.group('latitude, longitude, timestamp, user_id')
.having('COUNT(*) > 1')
puts "Duplicate groups found: #{duplicate_groups.length}"
duplicate_groups.each do |group|
points = Point.where(
latitude: group.latitude,
longitude: group.longitude,
timestamp: group.timestamp,
user_id: group.user_id
).order(id: :asc)
# Keep the latest record and destroy all others
latest = points.last
points.where.not(id: latest.id).destroy_all
end
end
def down
# This migration cannot be reversed
raise ActiveRecord::IrreversibleMigration
end
end

View file

@ -1 +1 @@
DataMigrate::Data.define(version: 20241107112451)
DataMigrate::Data.define(version: 20250120154554)

View file

@ -0,0 +1,8 @@
# frozen_string_literal: true
class AddCourseAndCourseAccuracyToPoints < ActiveRecord::Migration[8.0]
def change
add_column :points, :course, :decimal, precision: 8, scale: 5
add_column :points, :course_accuracy, :decimal, precision: 8, scale: 5
end
end

View file

@ -0,0 +1,11 @@
# frozen_string_literal: true
class AddExternalTrackIdToPoints < ActiveRecord::Migration[8.0]
disable_ddl_transaction!
def change
add_column :points, :external_track_id, :string
add_index :points, :external_track_id, algorithm: :concurrently
end
end

View file

@ -0,0 +1,27 @@
# frozen_string_literal: true
class AddUniqueIndexToPoints < ActiveRecord::Migration[8.0]
disable_ddl_transaction!
def up
return if index_exists?(
:points, %i[latitude longitude timestamp user_id],
name: 'unique_points_lat_long_timestamp_user_id_index'
)
add_index :points, %i[latitude longitude timestamp user_id],
unique: true,
name: 'unique_points_lat_long_timestamp_user_id_index',
algorithm: :concurrently
end
def down
return unless index_exists?(
:points, %i[latitude longitude timestamp user_id],
name: 'unique_points_lat_long_timestamp_user_id_index'
)
remove_index :points, %i[latitude longitude timestamp user_id],
name: 'unique_points_lat_long_timestamp_user_id_index'
end
end

9
db/schema.rb generated
View file

@ -10,9 +10,9 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[8.0].define(version: 2024_12_26_202831) do
ActiveRecord::Schema[8.0].define(version: 2025_01_20_154555) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
enable_extension "pg_catalog.plpgsql"
create_table "action_text_rich_texts", force: :cascade do |t|
t.string "name", null: false
@ -156,14 +156,19 @@ ActiveRecord::Schema[8.0].define(version: 2024_12_26_202831) do
t.jsonb "geodata", default: {}, null: false
t.bigint "visit_id"
t.datetime "reverse_geocoded_at"
t.decimal "course", precision: 8, scale: 5
t.decimal "course_accuracy", precision: 8, scale: 5
t.string "external_track_id"
t.index ["altitude"], name: "index_points_on_altitude"
t.index ["battery"], name: "index_points_on_battery"
t.index ["battery_status"], name: "index_points_on_battery_status"
t.index ["city"], name: "index_points_on_city"
t.index ["connection"], name: "index_points_on_connection"
t.index ["country"], name: "index_points_on_country"
t.index ["external_track_id"], name: "index_points_on_external_track_id"
t.index ["geodata"], name: "index_points_on_geodata", using: :gin
t.index ["import_id"], name: "index_points_on_import_id"
t.index ["latitude", "longitude", "timestamp", "user_id"], name: "unique_points_lat_long_timestamp_user_id_index", unique: true
t.index ["latitude", "longitude"], name: "index_points_on_latitude_and_longitude"
t.index ["reverse_geocoded_at"], name: "index_points_on_reverse_geocoded_at"
t.index ["timestamp"], name: "index_points_on_timestamp"

View file

@ -1,51 +0,0 @@
#!/bin/sh
unset BUNDLE_PATH
unset BUNDLE_BIN
set -e
echo "Environment: $RAILS_ENV"
# set env var defaults
DATABASE_HOST=${DATABASE_HOST:-"dawarich_db"}
DATABASE_PORT=${DATABASE_PORT:-5432}
DATABASE_USERNAME=${DATABASE_USERNAME:-"postgres"}
DATABASE_PASSWORD=${DATABASE_PASSWORD:-"password"}
DATABASE_NAME=${DATABASE_NAME:-"dawarich_development"}
# Remove pre-existing puma/passenger server.pid
rm -f $APP_PATH/tmp/pids/server.pid
# Wait for the database to be ready
until nc -zv $DATABASE_HOST ${DATABASE_PORT:-5432}; do
echo "Waiting for PostgreSQL to be ready..."
sleep 1
done
# Install gems
gem update --system 3.5.7
gem install bundler --version '2.5.9'
# Create the database
if [ "$(psql "postgres://$DATABASE_USERNAME:$DATABASE_PASSWORD@$DATABASE_HOST:$DATABASE_PORT" -XtAc "SELECT 1 FROM pg_database WHERE datname='$DATABASE_NAME'")" = '1' ]; then
echo "Database $DATABASE_NAME already exists, skipping creation..."
else
echo "Creating database $DATABASE_NAME..."
bundle exec rails db:create
fi
# Run database migrations
echo "PostgreSQL is ready. Running database migrations..."
bundle exec rails db:prepare
# Run data migrations
echo "Running DATA migrations..."
bundle exec rake data:migrate
# Run seeds
echo "Running seeds..."
bundle exec rake db:seed
# run passed commands
bundle exec ${@}

View file

@ -1,21 +0,0 @@
#!/bin/sh
set -e
echo "Environment: $RAILS_ENV"
# set env var defaults
DATABASE_HOST=${DATABASE_HOST:-"dawarich_db"}
DATABASE_PORT=${DATABASE_PORT:-5432}
DATABASE_USER=${DATABASE_USER:-"postgres"}
DATABASE_PASSWORD=${DATABASE_PASSWORD:-"password"}
DATABASE_NAME=${DATABASE_NAME:-"dawarich_development"}
# Wait for the database to be ready
until nc -zv $DATABASE_HOST ${DATABASE_PORT:-5432}; do
echo "Waiting for PostgreSQL to be ready..."
sleep 1
done
# run passed commands
bundle exec ${@}

53
docker/Dockerfile.dev Normal file
View file

@ -0,0 +1,53 @@
FROM ruby:3.3.4-alpine
ENV APP_PATH=/var/app
ENV BUNDLE_VERSION=2.5.21
ENV BUNDLE_PATH=/usr/local/bundle/gems
ENV RAILS_LOG_TO_STDOUT=true
ENV RAILS_PORT=3000
ENV RAILS_ENV=development
# Install dependencies for application
RUN apk -U add --no-cache \
build-base \
git \
postgresql-dev \
postgresql-client \
libxml2-dev \
libxslt-dev \
nodejs \
yarn \
imagemagick \
tzdata \
less \
yaml-dev \
gcompat \
&& mkdir -p $APP_PATH
# Update gem system and install bundler
RUN gem update --system 3.6.2 \
&& gem install bundler --version "$BUNDLE_VERSION" \
&& rm -rf $GEM_HOME/cache/*
WORKDIR $APP_PATH
COPY ../Gemfile ../Gemfile.lock ../.ruby-version ../vendor ./
# Install all gems into the image
RUN bundle config set --local path 'vendor/bundle' \
&& bundle install --jobs 4 --retry 3 \
&& rm -rf vendor/bundle/ruby/3.3.0/cache/*.gem
# Copy the rest of the application
COPY ../. ./
# Copy entrypoint scripts and grant execution permissions
COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh
RUN chmod +x /usr/local/bin/web-entrypoint.sh
COPY ./docker/sidekiq-entrypoint.sh /usr/local/bin/sidekiq-entrypoint.sh
RUN chmod +x /usr/local/bin/sidekiq-entrypoint.sh
EXPOSE $RAILS_PORT
ENTRYPOINT [ "bundle", "exec" ]

57
docker/Dockerfile.prod Normal file
View file

@ -0,0 +1,57 @@
FROM ruby:3.3.4-alpine
ENV APP_PATH=/var/app
ENV BUNDLE_VERSION=2.5.21
ENV BUNDLE_PATH=/usr/local/bundle/gems
ENV RAILS_LOG_TO_STDOUT=true
ENV RAILS_PORT=3000
ENV RAILS_ENV=production
# Install dependencies for application
RUN apk -U add --no-cache \
build-base \
git \
postgresql-dev \
postgresql-client \
libxml2-dev \
libxslt-dev \
nodejs \
yarn \
imagemagick \
tzdata \
less \
yaml-dev \
gcompat \
&& mkdir -p $APP_PATH
# Update gem system and install bundler
RUN gem update --system 3.6.2 \
&& gem install bundler --version "$BUNDLE_VERSION" \
&& rm -rf $GEM_HOME/cache/*
WORKDIR $APP_PATH
COPY ../Gemfile ../Gemfile.lock ../.ruby-version ../vendor ./
# Install production gems only
RUN bundle config set --local path 'vendor/bundle' \
&& bundle config set --local without 'development test' \
&& bundle install --jobs 4 --retry 3 \
&& rm -rf vendor/bundle/ruby/3.3.0/cache/*.gem
COPY ../. ./
# Precompile assets for production
RUN SECRET_KEY_BASE_DUMMY=1 bundle exec rake assets:precompile \
&& rm -rf node_modules tmp/cache
# Copy entrypoint scripts and grant execution permissions
COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh
RUN chmod +x /usr/local/bin/web-entrypoint.sh
COPY ./docker/sidekiq-entrypoint.sh /usr/local/bin/sidekiq-entrypoint.sh
RUN chmod +x /usr/local/bin/sidekiq-entrypoint.sh
EXPOSE $RAILS_PORT
ENTRYPOINT [ "bundle", "exec" ]

View file

@ -0,0 +1,156 @@
networks:
dawarich:
services:
dawarich_redis:
image: redis:7.4-alpine
container_name: dawarich_redis
command: redis-server
networks:
- dawarich
volumes:
- dawarich_redis_data:/var/shared/redis
restart: always
healthcheck:
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
interval: 10s
retries: 5
start_period: 30s
timeout: 10s
dawarich_db:
image: postgres:17-alpine
shm_size: 1G
container_name: dawarich_db
volumes:
- dawarich_db_data:/var/lib/postgresql/data
networks:
- dawarich
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: dawarich_production
restart: always
healthcheck:
test: [ "CMD", "pg_isready", "-U", "postgres" ]
interval: 10s
retries: 5
start_period: 30s
timeout: 10s
dawarich_app:
image: dawarich:prod
container_name: dawarich_app
volumes:
- dawarich_public:/var/app/public
- dawarich_watched:/var/app/tmp/imports/watched
networks:
- dawarich
ports:
- 3000:3000
# - 9394:9394 # Prometheus exporter, uncomment if needed
stdin_open: true
tty: true
entrypoint: web-entrypoint.sh
command: ['bin/rails', 'server', '-p', '3000', '-b', '::']
restart: on-failure
environment:
RAILS_ENV: production
REDIS_URL: redis://dawarich_redis:6379/0
DATABASE_HOST: dawarich_db
DATABASE_PORT: 5432
DATABASE_USERNAME: postgres
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_production
MIN_MINUTES_SPENT_IN_CITY: 60
APPLICATION_HOSTS: localhost,::1,127.0.0.1
TIME_ZONE: Europe/London
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PROMETHEUS_EXPORTER_ENABLED: false
PROMETHEUS_EXPORTER_HOST: 0.0.0.0
PROMETHEUS_EXPORTER_PORT: 9394
SECRET_KEY_BASE: 1234567890
RAILS_LOG_TO_STDOUT: "true"
logging:
driver: "json-file"
options:
max-size: "100m"
max-file: "5"
healthcheck:
test: [ "CMD-SHELL", "wget -qO - http://127.0.0.1:3000/api/v1/health | grep -q '\"status\"\\s*:\\s*\"ok\"'" ]
interval: 10s
retries: 30
start_period: 30s
timeout: 10s
depends_on:
dawarich_db:
condition: service_healthy
restart: true
dawarich_redis:
condition: service_healthy
restart: true
deploy:
resources:
limits:
cpus: '0.50' # Limit CPU usage to 50% of one core
memory: '4G' # Limit memory usage to 2GB
dawarich_sidekiq:
image: dawarich:prod
container_name: dawarich_sidekiq
volumes:
- dawarich_public:/var/app/public
- dawarich_watched:/var/app/tmp/imports/watched
networks:
- dawarich
stdin_open: true
tty: true
entrypoint: sidekiq-entrypoint.sh
command: ['bundle', 'exec', 'sidekiq']
restart: on-failure
environment:
RAILS_ENV: production
REDIS_URL: redis://dawarich_redis:6379/0
DATABASE_HOST: dawarich_db
DATABASE_PORT: 5432
DATABASE_USERNAME: postgres
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_production
APPLICATION_HOSTS: localhost,::1,127.0.0.1
BACKGROUND_PROCESSING_CONCURRENCY: 10
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PROMETHEUS_EXPORTER_ENABLED: false
PROMETHEUS_EXPORTER_HOST: dawarich_app
PROMETHEUS_EXPORTER_PORT: 9394
SECRET_KEY_BASE: 1234567890
RAILS_LOG_TO_STDOUT: "true"
logging:
driver: "json-file"
options:
max-size: "100m"
max-file: "5"
healthcheck:
test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ]
interval: 10s
retries: 30
start_period: 30s
timeout: 10s
depends_on:
dawarich_db:
condition: service_healthy
restart: true
dawarich_redis:
condition: service_healthy
restart: true
dawarich_app:
condition: service_healthy
restart: true
deploy:
resources:
limits:
cpus: '0.50' # Limit CPU usage to 50% of one core
memory: '4G' # Limit memory usage to 2GB
volumes:
dawarich_db_data:
dawarich_redis_data:
dawarich_public:
dawarich_watched:

View file

@ -41,7 +41,6 @@ services:
image: freikin/dawarich:latest
container_name: dawarich_app
volumes:
- dawarich_gem_cache_app:/usr/local/bundle/gems
- dawarich_public:/var/app/public
- dawarich_watched:/var/app/tmp/imports/watched
networks:
@ -51,8 +50,8 @@ services:
# - 9394:9394 # Prometheus exporter, uncomment if needed
stdin_open: true
tty: true
entrypoint: dev-entrypoint.sh
command: ['bin/dev']
entrypoint: web-entrypoint.sh
command: ['bin/rails', 'server', '-p', '3000', '-b', '::']
restart: on-failure
environment:
RAILS_ENV: development
@ -62,13 +61,10 @@ services:
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_development
MIN_MINUTES_SPENT_IN_CITY: 60
APPLICATION_HOST: localhost
APPLICATION_HOSTS: localhost
TIME_ZONE: Europe/London
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PHOTON_API_HOST: photon.komoot.io
PHOTON_API_USE_HTTPS: true
PROMETHEUS_EXPORTER_ENABLED: false
PROMETHEUS_EXPORTER_HOST: 0.0.0.0
PROMETHEUS_EXPORTER_PORT: 9394
@ -95,19 +91,18 @@ services:
resources:
limits:
cpus: '0.50' # Limit CPU usage to 50% of one core
memory: '2G' # Limit memory usage to 2GB
memory: '4G' # Limit memory usage to 4GB
dawarich_sidekiq:
image: freikin/dawarich:latest
container_name: dawarich_sidekiq
volumes:
- dawarich_gem_cache_sidekiq:/usr/local/bundle/gems
- dawarich_public:/var/app/public
- dawarich_watched:/var/app/tmp/imports/watched
networks:
- dawarich
stdin_open: true
tty: true
entrypoint: dev-entrypoint.sh
entrypoint: sidekiq-entrypoint.sh
command: ['sidekiq']
restart: on-failure
environment:
@ -117,13 +112,10 @@ services:
DATABASE_USERNAME: postgres
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_development
APPLICATION_HOST: localhost
APPLICATION_HOSTS: localhost
BACKGROUND_PROCESSING_CONCURRENCY: 10
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PHOTON_API_HOST: photon.komoot.io
PHOTON_API_USE_HTTPS: true
PROMETHEUS_EXPORTER_ENABLED: false
PROMETHEUS_EXPORTER_HOST: dawarich_app
PROMETHEUS_EXPORTER_PORT: 9394
@ -153,12 +145,10 @@ services:
resources:
limits:
cpus: '0.50' # Limit CPU usage to 50% of one core
memory: '2G' # Limit memory usage to 2GB
memory: '4G' # Limit memory usage to 4GB
volumes:
dawarich_db_data:
dawarich_gem_cache_app:
dawarich_gem_cache_sidekiq:
dawarich_shared:
dawarich_public:
dawarich_watched:

View file

@ -3,10 +3,6 @@ networks:
volumes:
dawarich_gem_cache_app:
name: dawarich_gem_cache_app
dawarich_gem_cache_sidekiq:
name: dawarich_gem_cache_sidekiq
dawarich_public:
name: dawarich_public
dawarich_keydb:
@ -41,18 +37,14 @@ services:
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_development
MIN_MINUTES_SPENT_IN_CITY: 60
APPLICATION_HOST: localhost
APPLICATION_HOSTS: localhost
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PHOTON_API_HOST: photon.komoot.io
PHOTON_API_USE_HTTPS: true
stdin_open: true
tty: true
entrypoint: dev-entrypoint.sh
command: [ 'bin/dev' ]
volumes:
- dawarich_gem_cache_app:/usr/local/bundle/gems
- dawarich_public:/var/app/dawarich_public
- watched:/var/app/tmp/imports/watched
healthcheck:
@ -96,19 +88,15 @@ services:
DATABASE_USERNAME: postgres
DATABASE_PASSWORD: password
DATABASE_NAME: dawarich_development
APPLICATION_HOST: localhost
APPLICATION_HOSTS: localhost
BACKGROUND_PROCESSING_CONCURRENCY: 10
APPLICATION_PROTOCOL: http
DISTANCE_UNIT: km
PHOTON_API_HOST: photon.komoot.io
PHOTON_API_USE_HTTPS: true
stdin_open: true
tty: true
entrypoint: dev-entrypoint.sh
command: [ 'sidekiq' ]
volumes:
- dawarich_gem_cache_sidekiq:/usr/local/bundle/gems
- dawarich_public:/var/app/dawarich_public
- watched:/var/app/tmp/imports/watched
logging:
@ -158,7 +146,7 @@ services:
- dawarich
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: eJH3YZsVc2s6byhFwpEny
POSTGRES_PASSWORD: password
POSTGRES_DATABASE: dawarich
volumes:
- ./db:/var/lib/postgresql/data

View file

@ -0,0 +1,34 @@
#!/bin/sh
unset BUNDLE_PATH
unset BUNDLE_BIN
set -e
echo "⚠️ Starting Sidekiq in $RAILS_ENV environment ⚠️"
# Parse DATABASE_URL if present, otherwise use individual variables
if [ -n "$DATABASE_URL" ]; then
# Extract components from DATABASE_URL
DATABASE_HOST=$(echo $DATABASE_URL | awk -F[@/] '{print $4}')
DATABASE_PORT=$(echo $DATABASE_URL | awk -F[@/:] '{print $5}')
DATABASE_USERNAME=$(echo $DATABASE_URL | awk -F[:/@] '{print $4}')
DATABASE_PASSWORD=$(echo $DATABASE_URL | awk -F[:/@] '{print $5}')
else
# Use existing environment variables
DATABASE_HOST=${DATABASE_HOST}
DATABASE_PORT=${DATABASE_PORT}
DATABASE_USERNAME=${DATABASE_USERNAME}
DATABASE_PASSWORD=${DATABASE_PASSWORD}
fi
# Wait for the database to become available
echo "⏳ Waiting for database to be ready..."
until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -c '\q'; do
>&2 echo "Postgres is unavailable - retrying..."
sleep 2
done
echo "✅ PostgreSQL is ready!"
# run sidekiq
bundle exec sidekiq

58
docker/web-entrypoint.sh Normal file
View file

@ -0,0 +1,58 @@
#!/bin/sh
unset BUNDLE_PATH
unset BUNDLE_BIN
set -e
echo "⚠️ Starting Rails environment: $RAILS_ENV ⚠️"
# Parse DATABASE_URL if present, otherwise use individual variables
if [ -n "$DATABASE_URL" ]; then
# Extract components from DATABASE_URL
DATABASE_HOST=$(echo $DATABASE_URL | awk -F[@/] '{print $4}')
DATABASE_PORT=$(echo $DATABASE_URL | awk -F[@/:] '{print $5}')
DATABASE_USERNAME=$(echo $DATABASE_URL | awk -F[:/@] '{print $4}')
DATABASE_PASSWORD=$(echo $DATABASE_URL | awk -F[:/@] '{print $5}')
DATABASE_NAME=$(echo $DATABASE_URL | awk -F[@/] '{print $5}')
else
# Use existing environment variables
DATABASE_HOST=${DATABASE_HOST}
DATABASE_PORT=${DATABASE_PORT}
DATABASE_USERNAME=${DATABASE_USERNAME}
DATABASE_PASSWORD=${DATABASE_PASSWORD}
DATABASE_NAME=${DATABASE_NAME}
fi
# Remove pre-existing puma/passenger server.pid
rm -f $APP_PATH/tmp/pids/server.pid
# Wait for the database to become available
echo "⏳ Waiting for database to be ready..."
until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -c '\q'; do
>&2 echo "Postgres is unavailable - retrying..."
sleep 2
done
echo "✅ PostgreSQL is ready!"
# Create database if it doesn't exist
if ! PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -c "SELECT 1 FROM pg_database WHERE datname='$DATABASE_NAME'" | grep -q 1; then
echo "Creating database $DATABASE_NAME..."
bundle exec rails db:create
fi
# Run database migrations
echo "PostgreSQL is ready. Running database migrations..."
bundle exec rails db:migrate
# Run data migrations
echo "Running DATA migrations..."
bundle exec rake data:migrate
# if [ "$RAILS_ENV" != "production" ]; then
echo "Running seeds..."
bundle exec rails db:seed
# fi
# run passed commands
bundle exec ${@}

View file

@ -29,7 +29,7 @@ If you don't want to use dedicated share for projects installed by docker skip i
### Dawarich root folder
1. Open your [Docker root folder](#docker-root-share) in **File station**.
2. Create new folder **dawarich** and open it.
3. Create folders **redis**, **db_data**, **db_shared**, **gem_cache** and **public** in **dawarich** folder.
3. Create folders **redis**, **db_data**, **db_shared** and **public** in **dawarich** folder.
4. Copy [docker compose](synology/docker-compose.yml) and [.env](synology/.env) files form **synology** repo folder into **dawarich** folder on your synology.
# Installation

View file

@ -14,7 +14,6 @@ dawarich_app:
...
environment:
...
APPLICATION_HOST: "yourhost.com" <------------------------------ Edit this
APPLICATION_HOSTS: "yourhost.com,www.yourhost.com,127.0.0.1" <-- Edit this
```
@ -25,7 +24,6 @@ dawarich_sidekiq:
...
environment:
...
APPLICATION_HOST: "yourhost.com" <------------------------------ Edit this
APPLICATION_HOSTS: "yourhost.com,www.yourhost.com,127.0.0.1" <-- Edit this
...
```
@ -48,7 +46,7 @@ server {
brotli on;
brotli_comp_level 6;
brotli_types
brotli_types
text/css
text/plain
text/xml
@ -106,24 +104,24 @@ With the above commands entered, the configuration below should work properly.
```apache
<VirtualHost *:80>
ServerName example.com
ProxyRequests Off
ProxyPreserveHost On
<Proxy *>
Require all granted
</Proxy>
Header always set X-Real-IP %{REMOTE_ADDR}s
Header always set X-Forwarded-For %{REMOTE_ADDR}s
Header always set X-Forwarded-Proto https
Header always set X-Forwarded-Server %{SERVER_NAME}s
Header always set Host %{HTTP_HOST}s
SetOutputFilter BROTLI
AddOutputFilterByType BROTLI_COMPRESS text/css text/plain text/xml text/javascript application/javascript application/json application/manifest+json application/vnd.api+json application/xml application/xhtml+xml application/rss+xml application/atom+xml application/vnd.ms-fontobject application/x-font-ttf application/x-font-opentype application/x-font-truetype image/svg+xml image/x-icon image/vnd.microsoft.icon font/ttf font/eot font/otf font/opentype
BrotliCompressionQuality 6
ProxyPass / http://127.0.0.1:3000/
ProxyPassReverse / http://127.0.0.1:3000/

View file

@ -4,10 +4,9 @@
RAILS_ENV=development
MIN_MINUTES_SPENT_IN_CITY=60
APPLICATION_HOST=dawarich.djhrum.synology.me
APPLICATION_HOSTS=dawarich.example.synology.me
TIME_ZONE=Europe/Berlin
BACKGROUND_PROCESSING_CONCURRENCY=10
MAP_CENTER=[52.520826, 13.409690]
###################################################################################
# Database

View file

@ -8,7 +8,7 @@ services:
restart: unless-stopped
volumes:
- ./redis:/var/shared/redis
dawarich_db:
image: postgres:14.2-alpine
container_name: dawarich_db
@ -28,17 +28,16 @@ services:
- dawarich_redis
stdin_open: true
tty: true
entrypoint: dev-entrypoint.sh
entrypoint: web-entrypoint.sh
command: ['bin/dev']
restart: unless-stopped
env_file:
- .env
volumes:
- ./gem_cache:/usr/local/bundle/gems
- ./public:/var/app/public
ports:
- 32568:3000
dawarich_sidekiq:
image: freikin/dawarich:latest
container_name: dawarich_sidekiq
@ -46,11 +45,10 @@ services:
- dawarich_db
- dawarich_redis
- dawarich_app
entrypoint: dev-entrypoint.sh
entrypoint: sidekiq-entrypoint.sh
command: ['sidekiq']
restart: unless-stopped
env_file:
- .env
volumes:
- ./gem_cache:/usr/local/bundle/gems
- ./public:/var/app/public

View file

@ -25,6 +25,10 @@ FactoryBot.define do
import_id { '' }
city { nil }
country { nil }
reverse_geocoded_at { nil }
course { nil }
course_accuracy { nil }
external_track_id { nil }
user
trait :with_known_location do

View file

@ -10,11 +10,15 @@ FactoryBot.define do
trait :with_points do
after(:build) do |trip|
create_list(
:point, 25,
user: trip.user,
timestamp: trip.started_at + (1..1000).to_a.sample.minutes
)
(1..25).map do |i|
create(
:point,
:with_geodata,
:reverse_geocoded,
timestamp: trip.started_at + i.minutes,
user: trip.user
)
end
end
end
end

File diff suppressed because one or more lines are too long

41
spec/fixtures/files/gpx/arc_example.gpx vendored Normal file
View file

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="utf-8" standalone="no"?>
<gpx creator="Arc App" version="1.1" xmlns="http://www.topografix.com/GPX/1/1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<wpt lat="16.822590884135522" lon="100.26450188975753">
<time>2024-12-17T19:40:05+07:00</time>
<ele>89.9031832732575</ele>
<name>Topland Hotel &amp; Convention Center</name>
</wpt>
<trk>
<type>walking</type>
<trkseg />
</trk>
<trk>
<type>taxi</type>
<trkseg>
<trkpt lat="16.82179723266299" lon="100.26501096574162">
<ele>49.96302288016834</ele>
<time>2024-12-18T08:44:09+07:00</time>
</trkpt>
<trkpt lat="16.821804657654933" lon="100.26501263671403">
<ele>49.884678590538186</ele>
<time>2024-12-18T08:44:16+07:00</time>
</trkpt>
<trkpt lat="16.821831929143876" lon="100.26500741687741">
<ele>49.71960135141746</ele>
<time>2024-12-18T08:44:21+07:00</time>
</trkpt>
<trkpt lat="16.821889949418637" lon="100.26494683052165">
<ele>49.91594081568717</ele>
<time>2024-12-18T08:44:29+07:00</time>
</trkpt>
<trkpt lat="16.821914934283804" lon="100.26485762911803">
<ele>50.344669848377556</ele>
<time>2024-12-18T08:44:38+07:00</time>
</trkpt>
<trkpt lat="16.821949486294397" lon="100.26482772930362">
<ele>50.12800953488726</ele>
<time>2024-12-18T08:44:45+07:00</time>
</trkpt>
</trkseg>
</trk>
</gpx>

View file

@ -0,0 +1,136 @@
{
"locations" : [
{
"type" : "Feature",
"geometry" : {
"type" : "Point",
"coordinates" : [
-122.40530871,
37.744304130000003
]
},
"properties" : {
"horizontal_accuracy" : 5,
"track_id" : "799F32F5-89BB-45FB-A639-098B1B95B09F",
"speed_accuracy" : 0,
"vertical_accuracy" : -1,
"course_accuracy" : 0,
"altitude" : 0,
"speed" : 92.087999999999994,
"course" : 27.07,
"timestamp" : "2025-01-17T21:03:01Z",
"device_id" : "8D5D4197-245B-4619-A88B-2049100ADE46"
}
},
{
"type" : "Feature",
"properties" : {
"timestamp" : "2025-01-17T21:03:02Z",
"horizontal_accuracy" : 5,
"course" : 24.260000000000002,
"speed_accuracy" : 0,
"device_id" : "8D5D4197-245B-4619-A88B-2049100ADE46",
"vertical_accuracy" : -1,
"altitude" : 0,
"track_id" : "799F32F5-89BB-45FB-A639-098B1B95B09F",
"speed" : 92.448000000000008,
"course_accuracy" : 0
},
"geometry" : {
"type" : "Point",
"coordinates" : [
-122.40518926999999,
37.744513759999997
]
}
},
{
"type" : "Feature",
"properties" : {
"altitude" : 0,
"horizontal_accuracy" : 5,
"speed" : 123.76800000000001,
"course_accuracy" : 0,
"speed_accuracy" : 0,
"course" : 309.73000000000002,
"track_id" : "F63A3CF9-2FF8-4076-8F59-5BB1EDC23888",
"device_id" : "8D5D4197-245B-4619-A88B-2049100ADE46",
"timestamp" : "2025-01-17T21:18:38Z",
"vertical_accuracy" : -1
},
"geometry" : {
"type" : "Point",
"coordinates" : [
-122.28487643,
37.454486080000002
]
}
},
{
"type" : "Feature",
"properties" : {
"track_id" : "F63A3CF9-2FF8-4076-8F59-5BB1EDC23888",
"device_id" : "8D5D4197-245B-4619-A88B-2049100ADE46",
"speed_accuracy" : 0,
"course_accuracy" : 0,
"speed" : 123.3,
"horizontal_accuracy" : 5,
"course" : 309.38,
"altitude" : 0,
"timestamp" : "2025-01-17T21:18:39Z",
"vertical_accuracy" : -1
},
"geometry" : {
"coordinates" : [
-122.28517332,
37.454684899999997
],
"type" : "Point"
}
},
{
"geometry" : {
"coordinates" : [
-122.28547306,
37.454883219999999
],
"type" : "Point"
},
"properties" : {
"course_accuracy" : 0,
"device_id" : "8D5D4197-245B-4619-A88B-2049100ADE46",
"vertical_accuracy" : -1,
"course" : 309.73000000000002,
"speed_accuracy" : 0,
"timestamp" : "2025-01-17T21:18:40Z",
"horizontal_accuracy" : 5,
"speed" : 125.06400000000001,
"track_id" : "F63A3CF9-2FF8-4076-8F59-5BB1EDC23888",
"altitude" : 0
},
"type" : "Feature"
},
{
"geometry" : {
"type" : "Point",
"coordinates" : [
-122.28577665,
37.455080109999997
]
},
"properties" : {
"course_accuracy" : 0,
"speed_accuracy" : 0,
"speed" : 124.05600000000001,
"track_id" : "F63A3CF9-2FF8-4076-8F59-5BB1EDC23888",
"course" : 309.73000000000002,
"device_id" : "8D5D4197-245B-4619-A88B-2049100ADE46",
"altitude" : 0,
"horizontal_accuracy" : 5,
"vertical_accuracy" : -1,
"timestamp" : "2025-01-17T21:18:41Z"
},
"type" : "Feature"
}
]
}

View file

@ -9,8 +9,17 @@ RSpec.describe BulkStatsCalculatingJob, type: :job do
let(:timestamp) { DateTime.new(2024, 1, 1).to_i }
let!(:points1) { create_list(:point, 10, user_id: user1.id, timestamp:) }
let!(:points2) { create_list(:point, 10, user_id: user2.id, timestamp:) }
let!(:points1) do
(1..10).map do |i|
create(:point, user_id: user1.id, timestamp: timestamp + i.minutes)
end
end
let!(:points2) do
(1..10).map do |i|
create(:point, user_id: user2.id, timestamp: timestamp + i.minutes)
end
end
it 'enqueues Stats::CalculatingJob for each user' do
expect(Stats::CalculatingJob).to receive(:perform_later).with(user1.id, 2024, 1)

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Points::CreateJob, type: :job do
describe '#perform' do
subject(:perform) { described_class.new.perform(json, user.id) }
let(:file_path) { 'spec/fixtures/files/points/geojson_example.json' }
let(:file) { File.open(file_path) }
let(:json) { JSON.parse(file.read) }
let(:user) { create(:user) }
it 'creates a point' do
expect { perform }.to change { Point.count }.by(6)
end
end
end

View file

@ -12,8 +12,8 @@ RSpec.describe ReverseGeocodingJob, type: :job do
allow(Geocoder).to receive(:search).and_return([double(city: 'City', country: 'Country')])
end
context 'when REVERSE_GEOCODING_ENABLED is false' do
before { stub_const('REVERSE_GEOCODING_ENABLED', false) }
context 'when reverse geocoding is disabled' do
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(false) }
it 'does not update point' do
expect { perform }.not_to(change { point.reload.city })
@ -28,8 +28,8 @@ RSpec.describe ReverseGeocodingJob, type: :job do
end
end
context 'when REVERSE_GEOCODING_ENABLED is true' do
before { stub_const('REVERSE_GEOCODING_ENABLED', true) }
context 'when reverse geocoding is enabled' do
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
let(:stubbed_geocoder) { OpenStruct.new(data: { city: 'City', country: 'Country' }) }

View file

@ -3,9 +3,9 @@
require 'rails_helper'
RSpec.describe VisitSuggestingJob, type: :job do
describe '#perform' do
let!(:users) { [create(:user)] }
let!(:users) { [create(:user)] }
describe '#perform' do
subject { described_class.perform_now }
before do
@ -13,10 +13,22 @@ RSpec.describe VisitSuggestingJob, type: :job do
allow_any_instance_of(Visits::Suggest).to receive(:call)
end
it 'suggests visits' do
subject
context 'when user has no tracked points' do
it 'does not suggest visits' do
subject
expect(Visits::Suggest).to have_received(:new)
expect(Visits::Suggest).not_to have_received(:new)
end
end
context 'when user has tracked points' do
let!(:tracked_point) { create(:point, user: users.first) }
it 'suggests visits' do
subject
expect(Visits::Suggest).to have_received(:new)
end
end
end
end

View file

@ -0,0 +1,100 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe DawarichSettings do
before do
described_class.instance_variables.each do |ivar|
described_class.remove_instance_variable(ivar)
end
end
describe '.reverse_geocoding_enabled?' do
context 'when photon is enabled' do
before do
allow(described_class).to receive(:photon_enabled?).and_return(true)
allow(described_class).to receive(:geoapify_enabled?).and_return(false)
end
it 'returns true' do
expect(described_class.reverse_geocoding_enabled?).to be true
end
end
context 'when geoapify is enabled' do
before do
allow(described_class).to receive(:photon_enabled?).and_return(false)
allow(described_class).to receive(:geoapify_enabled?).and_return(true)
end
it 'returns true' do
expect(described_class.reverse_geocoding_enabled?).to be true
end
end
context 'when neither service is enabled' do
before do
allow(described_class).to receive(:photon_enabled?).and_return(false)
allow(described_class).to receive(:geoapify_enabled?).and_return(false)
end
it 'returns false' do
expect(described_class.reverse_geocoding_enabled?).to be false
end
end
end
describe '.photon_enabled?' do
context 'when PHOTON_API_HOST is present' do
before { stub_const('PHOTON_API_HOST', 'photon.example.com') }
it 'returns true' do
expect(described_class.photon_enabled?).to be true
end
end
context 'when PHOTON_API_HOST is blank' do
before { stub_const('PHOTON_API_HOST', '') }
it 'returns false' do
expect(described_class.photon_enabled?).to be false
end
end
end
describe '.photon_uses_komoot_io?' do
context 'when PHOTON_API_HOST is komoot.io' do
before { stub_const('PHOTON_API_HOST', 'photon.komoot.io') }
it 'returns true' do
expect(described_class.photon_uses_komoot_io?).to be true
end
end
context 'when PHOTON_API_HOST is different' do
before { stub_const('PHOTON_API_HOST', 'photon.example.com') }
it 'returns false' do
expect(described_class.photon_uses_komoot_io?).to be false
end
end
end
describe '.geoapify_enabled?' do
context 'when GEOAPIFY_API_KEY is present' do
before { stub_const('GEOAPIFY_API_KEY', 'some-api-key') }
it 'returns true' do
expect(described_class.geoapify_enabled?).to be true
end
end
context 'when GEOAPIFY_API_KEY is blank' do
before { stub_const('GEOAPIFY_API_KEY', '') }
it 'returns false' do
expect(described_class.geoapify_enabled?).to be false
end
end
end
end

View file

@ -26,7 +26,11 @@ RSpec.describe Import, type: :model do
describe '#years_and_months_tracked' do
let(:import) { create(:import) }
let(:timestamp) { Time.zone.local(2024, 11, 1) }
let!(:points) { create_list(:point, 3, import:, timestamp:) }
let!(:points) do
(1..3).map do |i|
create(:point, import:, timestamp: timestamp + i.minutes)
end
end
it 'returns years and months tracked' do
expect(import.years_and_months_tracked).to eq([[2024, 11]])

View file

@ -23,6 +23,8 @@ RSpec.describe Place, type: :model do
describe '#async_reverse_geocode' do
let(:place) { create(:place) }
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
it 'updates address' do
expect { place.async_reverse_geocode }.to have_enqueued_job(ReverseGeocodingJob).with('Place', place.id)
end

View file

@ -46,6 +46,8 @@ RSpec.describe Point, type: :model do
describe '#async_reverse_geocode' do
let(:point) { build(:point) }
before { allow(DawarichSettings).to receive(:reverse_geocoding_enabled?).and_return(true) }
it 'enqueues ReverseGeocodeJob with correct arguments' do
point.save
@ -56,14 +58,8 @@ RSpec.describe Point, type: :model do
context 'when point is imported' do
let(:point) { build(:point, import_id: 1) }
it 'does not enqueue ReverseGeocodeJob' do
expect { point.async_reverse_geocode }.not_to have_enqueued_job(ReverseGeocodingJob)
end
context 'when reverse geocoding is forced' do
it 'enqueues ReverseGeocodeJob' do
expect { point.async_reverse_geocode(force: true) }.to have_enqueued_job(ReverseGeocodingJob)
end
it 'enqueues ReverseGeocodeJob' do
expect { point.async_reverse_geocode }.to have_enqueued_job(ReverseGeocodingJob)
end
end
end

View file

@ -89,8 +89,14 @@ RSpec.describe Stat, type: :model do
subject { stat.points.to_a }
let(:stat) { create(:stat, year:, month: 1, user:) }
let(:timestamp) { DateTime.new(year, 1, 1, 5, 0, 0) }
let!(:points) { create_list(:point, 3, user:, timestamp:) }
let(:base_timestamp) { DateTime.new(year, 1, 1, 5, 0, 0) }
let!(:points) do
[
create(:point, user:, timestamp: base_timestamp),
create(:point, user:, timestamp: base_timestamp + 1.hour),
create(:point, user:, timestamp: base_timestamp + 2.hours)
]
end
it 'returns points' do
expect(subject).to eq(points)

View file

@ -115,7 +115,11 @@ RSpec.describe User, type: :model do
end
describe '#years_tracked' do
let!(:points) { create_list(:point, 3, user:, timestamp: DateTime.new(2024, 1, 1, 5, 0, 0)) }
let!(:points) do
(1..3).map do |i|
create(:point, user:, timestamp: DateTime.new(2024, 1, 1, 5, 0, 0) + i.minutes)
end
end
it 'returns years tracked' do
expect(user.years_tracked).to eq([{ year: 2024, months: ['Jan'] }])

View file

@ -9,6 +9,18 @@ RSpec.describe 'Api::V1::Healths', type: :request do
get '/api/v1/health'
expect(response).to have_http_status(:success)
expect(response.headers['X-Dawarich-Response']).to eq('Hey, I\'m alive!')
end
end
context 'when user is authenticated' do
let(:user) { create(:user) }
it 'returns http success' do
get '/api/v1/health', headers: { 'Authorization' => "Bearer #{user.api_key}" }
expect(response).to have_http_status(:success)
expect(response.headers['X-Dawarich-Response']).to eq('Hey, I\'m alive and authenticated!')
end
end
end

Some files were not shown because too many files have changed in this diff Show more