mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
commit
7a23255db6
128 changed files with 1756 additions and 732 deletions
|
|
@ -1 +1 @@
|
|||
0.25.3
|
||||
0.25.4
|
||||
|
|
|
|||
|
|
@ -7,9 +7,9 @@ services:
|
|||
dockerfile: Dockerfile
|
||||
container_name: dawarich_dev
|
||||
volumes:
|
||||
- "${PWD}:/var/app:cached"
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
ports:
|
||||
|
|
@ -71,3 +71,4 @@ volumes:
|
|||
dawarich_shared:
|
||||
dawarich_public:
|
||||
dawarich_watched:
|
||||
dawarich_storage:
|
||||
|
|
|
|||
69
CHANGELOG.md
69
CHANGELOG.md
|
|
@ -4,6 +4,75 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
|
||||
# 0.25.4 - 2025-04-02
|
||||
|
||||
⚠️ This release includes a breaking change. ⚠️
|
||||
|
||||
Make sure to add `dawarich_storage` volume to your `docker-compose.yml` file. Example:
|
||||
|
||||
```diff
|
||||
...
|
||||
|
||||
dawarich_app:
|
||||
image: freikin/dawarich:latest
|
||||
container_name: dawarich_app
|
||||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
+ - dawarich_storage:/var/app/storage
|
||||
|
||||
...
|
||||
|
||||
dawarich_sidekiq:
|
||||
image: freikin/dawarich:latest
|
||||
container_name: dawarich_sidekiq
|
||||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
+ - dawarich_storage:/var/app/storage
|
||||
|
||||
volumes:
|
||||
dawarich_db_data:
|
||||
dawarich_shared:
|
||||
dawarich_public:
|
||||
dawarich_watched:
|
||||
+ dawarich_storage:
|
||||
```
|
||||
|
||||
|
||||
In this release we're changing the way import files are being stored. Previously, they were being stored in the `raw_data` column of the `imports` table. Now, they are being attached to the import record. All new imports will be using the new storage, to migrate existing imports, you can use the `rake imports:migrate_to_new_storage` task. Run it in the container shell.
|
||||
|
||||
This is an optional task, that will not affect your points or other data.
|
||||
Big imports might take a while to migrate, so be patient.
|
||||
|
||||
Also, you can now migrate existing exports to the new storage using the `rake exports:migrate_to_new_storage` task (in the container shell) or just delete them.
|
||||
|
||||
If your hardware doesn't have enough memory to migrate the imports, you can delete your imports and re-import them.
|
||||
|
||||
## Added
|
||||
|
||||
- Sentry is now can be used for error tracking.
|
||||
- Subscription management is now available in non self-hosted mode.
|
||||
|
||||
## Changed
|
||||
|
||||
- Import files are now being attached to the import record instead of being stored in the `raw_data` database column.
|
||||
- Import files can now be stored in S3-compatible storage.
|
||||
- Export files are now being attached to the export record instead of being stored in the file system.
|
||||
- Export files can now be stored in S3-compatible storage.
|
||||
- Users can now import Google's Records.json file via the UI instead of using the CLI.
|
||||
- Optional telemetry sending is now disabled and will be removed in the future.
|
||||
|
||||
## Fixed
|
||||
|
||||
- Moving points on the map now works correctly. #957
|
||||
- `rake points:migrate_to_lonlat` task now also reindexes the points table.
|
||||
- Fixed filling `lonlat` column for old places after reverse geocoding.
|
||||
- Deleting an import now correctly recalculates stats.
|
||||
- Datetime across the app is now being displayed in human readable format, i.e 26 Dec 2024, 13:49. Hover over the datetime to see the ISO 8601 timestamp.
|
||||
|
||||
|
||||
# 0.25.3 - 2025-03-22
|
||||
|
||||
## Fixed
|
||||
|
|
|
|||
9
Gemfile
9
Gemfile
|
|
@ -5,6 +5,10 @@ git_source(:github) { |repo| "https://github.com/#{repo}.git" }
|
|||
|
||||
ruby File.read('.ruby-version').strip
|
||||
|
||||
# https://meta.discourse.org/t/cant-rebuild-due-to-aws-sdk-gem-bump-and-new-aws-data-integrity-protections/354217/40
|
||||
gem 'aws-sdk-s3', '~> 1.177.0', require: false
|
||||
gem 'aws-sdk-core', '~> 3.215.1', require: false
|
||||
gem 'aws-sdk-kms', '~> 1.96.0', require: false
|
||||
gem 'bootsnap', require: false
|
||||
gem 'chartkick'
|
||||
gem 'data_migrate'
|
||||
|
|
@ -27,7 +31,8 @@ gem 'rgeo'
|
|||
gem 'rgeo-activerecord'
|
||||
gem 'rswag-api'
|
||||
gem 'rswag-ui'
|
||||
gem 'shrine', '~> 3.6'
|
||||
gem 'sentry-ruby'
|
||||
gem 'sentry-rails'
|
||||
gem 'sidekiq'
|
||||
gem 'sidekiq-cron'
|
||||
gem 'sidekiq-limit_fetch'
|
||||
|
|
@ -37,8 +42,10 @@ gem 'strong_migrations'
|
|||
gem 'tailwindcss-rails'
|
||||
gem 'turbo-rails'
|
||||
gem 'tzinfo-data', platforms: %i[mingw mswin x64_mingw jruby]
|
||||
gem 'jwt'
|
||||
|
||||
group :development, :test do
|
||||
gem 'brakeman', require: false
|
||||
gem 'debug', platforms: %i[mri mingw x64_mingw]
|
||||
gem 'dotenv-rails'
|
||||
gem 'factory_bot_rails'
|
||||
|
|
|
|||
41
Gemfile.lock
41
Gemfile.lock
|
|
@ -79,19 +79,36 @@ GEM
|
|||
public_suffix (>= 2.0.2, < 7.0)
|
||||
ast (2.4.2)
|
||||
attr_extras (7.1.0)
|
||||
aws-eventstream (1.3.2)
|
||||
aws-partitions (1.1072.0)
|
||||
aws-sdk-core (3.215.1)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.992.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.96.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.177.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.11.0)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
base64 (0.2.0)
|
||||
bcrypt (3.1.20)
|
||||
benchmark (0.4.0)
|
||||
bigdecimal (3.1.9)
|
||||
bootsnap (1.18.4)
|
||||
msgpack (~> 1.2)
|
||||
brakeman (7.0.2)
|
||||
racc
|
||||
builder (3.3.0)
|
||||
byebug (11.1.3)
|
||||
chartkick (5.1.3)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.3.5)
|
||||
connection_pool (2.5.0)
|
||||
content_disposition (1.0.0)
|
||||
crack (1.0.0)
|
||||
bigdecimal
|
||||
rexml
|
||||
|
|
@ -121,8 +138,6 @@ GEM
|
|||
dotenv-rails (3.1.7)
|
||||
dotenv (= 3.1.7)
|
||||
railties (>= 6.1)
|
||||
down (5.4.2)
|
||||
addressable (~> 2.8)
|
||||
drb (2.2.1)
|
||||
erubi (1.13.1)
|
||||
et-orbi (1.2.11)
|
||||
|
|
@ -164,9 +179,12 @@ GEM
|
|||
pp (>= 0.6.0)
|
||||
rdoc (>= 4.0.0)
|
||||
reline (>= 0.4.2)
|
||||
jmespath (1.6.2)
|
||||
json (2.10.1)
|
||||
json-schema (5.0.1)
|
||||
addressable (~> 2.8)
|
||||
jwt (2.10.1)
|
||||
base64
|
||||
kaminari (1.2.2)
|
||||
activesupport (>= 4.1.0)
|
||||
kaminari-actionview (= 1.2.2)
|
||||
|
|
@ -369,11 +387,14 @@ GEM
|
|||
rubocop-ast (>= 1.38.0, < 2.0)
|
||||
ruby-progressbar (1.13.0)
|
||||
securerandom (0.4.1)
|
||||
sentry-rails (5.23.0)
|
||||
railties (>= 5.0)
|
||||
sentry-ruby (~> 5.23.0)
|
||||
sentry-ruby (5.23.0)
|
||||
bigdecimal
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
shoulda-matchers (6.4.0)
|
||||
activesupport (>= 5.2.0)
|
||||
shrine (3.6.0)
|
||||
content_disposition (~> 1.0)
|
||||
down (~> 5.1)
|
||||
sidekiq (7.3.9)
|
||||
base64
|
||||
connection_pool (>= 2.3.0)
|
||||
|
|
@ -453,7 +474,11 @@ PLATFORMS
|
|||
|
||||
DEPENDENCIES
|
||||
activerecord-postgis-adapter
|
||||
aws-sdk-core (~> 3.215.1)
|
||||
aws-sdk-kms (~> 1.96.0)
|
||||
aws-sdk-s3 (~> 1.177.0)
|
||||
bootsnap
|
||||
brakeman
|
||||
chartkick
|
||||
data_migrate
|
||||
database_consistency
|
||||
|
|
@ -469,6 +494,7 @@ DEPENDENCIES
|
|||
groupdate
|
||||
httparty
|
||||
importmap-rails
|
||||
jwt
|
||||
kaminari
|
||||
lograge
|
||||
oj
|
||||
|
|
@ -487,8 +513,9 @@ DEPENDENCIES
|
|||
rswag-specs
|
||||
rswag-ui
|
||||
rubocop-rails
|
||||
sentry-rails
|
||||
sentry-ruby
|
||||
shoulda-matchers
|
||||
shrine (~> 3.6)
|
||||
sidekiq
|
||||
sidekiq-cron
|
||||
sidekiq-limit_fetch
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class Api::V1::PointsController < ApiController
|
|||
def update
|
||||
point = current_api_user.tracked_points.find(params[:id])
|
||||
|
||||
point.update(point_params)
|
||||
point.update(lonlat: "POINT(#{point_params[:longitude]} #{point_params[:latitude]})")
|
||||
|
||||
render json: point_serializer.new(point).call
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class ApiController < ApplicationController
|
|||
end
|
||||
|
||||
def authenticate_active_api_user!
|
||||
render json: { error: 'User is not active' }, status: :unauthorized unless current_api_user&.active?
|
||||
render json: { error: 'User is not active' }, status: :unauthorized unless current_api_user&.active_until&.future?
|
||||
|
||||
true
|
||||
end
|
||||
|
|
|
|||
|
|
@ -26,11 +26,17 @@ class ApplicationController < ActionController::Base
|
|||
end
|
||||
|
||||
def authenticate_active_user!
|
||||
return if current_user&.active?
|
||||
return if current_user&.active_until&.future?
|
||||
|
||||
redirect_to root_path, notice: 'Your account is not active.', status: :see_other
|
||||
end
|
||||
|
||||
def authenticate_non_self_hosted!
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
redirect_to root_path, notice: 'You are not authorized to perform this action.', status: :see_other
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_self_hosted_status
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ExportsController < ApplicationController
|
||||
include ActiveStorage::SetCurrent
|
||||
|
||||
before_action :authenticate_user!
|
||||
before_action :set_export, only: %i[destroy]
|
||||
|
||||
|
|
@ -11,9 +13,13 @@ class ExportsController < ApplicationController
|
|||
def create
|
||||
export_name =
|
||||
"export_from_#{params[:start_at].to_date}_to_#{params[:end_at].to_date}.#{params[:file_format]}"
|
||||
export = current_user.exports.create(name: export_name, status: :created)
|
||||
|
||||
ExportJob.perform_later(export.id, params[:start_at], params[:end_at], file_format: params[:file_format])
|
||||
export = current_user.exports.create(
|
||||
name: export_name,
|
||||
status: :created,
|
||||
file_format: params[:file_format],
|
||||
start_at: params[:start_at],
|
||||
end_at: params[:end_at]
|
||||
)
|
||||
|
||||
redirect_to exports_url, notice: 'Export was successfully initiated. Please wait until it\'s finished.'
|
||||
rescue StandardError => e
|
||||
|
|
@ -23,11 +29,7 @@ class ExportsController < ApplicationController
|
|||
end
|
||||
|
||||
def destroy
|
||||
ActiveRecord::Base.transaction do
|
||||
@export.destroy
|
||||
|
||||
File.delete(Rails.root.join('public', 'exports', @export.name))
|
||||
end
|
||||
@export.destroy
|
||||
|
||||
redirect_to exports_url, notice: 'Export was successfully destroyed.', status: :see_other
|
||||
end
|
||||
|
|
@ -37,8 +39,4 @@ class ExportsController < ApplicationController
|
|||
def set_export
|
||||
@export = current_user.exports.find(params[:id])
|
||||
end
|
||||
|
||||
def export_params
|
||||
params.require(:export).permit(:name, :url, :status)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ImportsController < ApplicationController
|
||||
include ActiveStorage::SetCurrent
|
||||
|
||||
before_action :authenticate_user!
|
||||
before_action :authenticate_active_user!, only: %i[new create]
|
||||
before_action :set_import, only: %i[show destroy]
|
||||
|
|
@ -9,7 +11,7 @@ class ImportsController < ApplicationController
|
|||
@imports =
|
||||
current_user
|
||||
.imports
|
||||
.select(:id, :name, :source, :created_at, :points_count)
|
||||
.select(:id, :name, :source, :created_at, :processed)
|
||||
.order(created_at: :desc)
|
||||
.page(params[:page])
|
||||
end
|
||||
|
|
@ -23,27 +25,17 @@ class ImportsController < ApplicationController
|
|||
def create
|
||||
files = import_params[:files].reject(&:blank?)
|
||||
|
||||
import_ids = files.map do |file|
|
||||
import = current_user.imports.create(
|
||||
files.each do |file|
|
||||
import = current_user.imports.build(
|
||||
name: file.original_filename,
|
||||
source: params[:import][:source]
|
||||
)
|
||||
|
||||
file = File.read(file)
|
||||
import.file.attach(io: file, filename: file.original_filename, content_type: file.content_type)
|
||||
|
||||
raw_data =
|
||||
case params[:import][:source]
|
||||
when 'gpx' then Hash.from_xml(file)
|
||||
when 'owntracks' then OwnTracks::RecParser.new(file).call
|
||||
else JSON.parse(file)
|
||||
end
|
||||
|
||||
import.update(raw_data:)
|
||||
import.id
|
||||
import.save!
|
||||
end
|
||||
|
||||
import_ids.each { ImportJob.perform_later(current_user.id, _1) }
|
||||
|
||||
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
|
||||
rescue StandardError => e
|
||||
Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all
|
||||
|
|
|
|||
34
app/controllers/settings/subscriptions_controller.rb
Normal file
34
app/controllers/settings/subscriptions_controller.rb
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Settings::SubscriptionsController < ApplicationController
|
||||
before_action :authenticate_user!
|
||||
before_action :authenticate_non_self_hosted!
|
||||
|
||||
def index; end
|
||||
|
||||
def subscription_callback
|
||||
token = params[:token]
|
||||
|
||||
begin
|
||||
decoded_token = JWT.decode(
|
||||
token,
|
||||
ENV['JWT_SECRET_KEY'],
|
||||
true,
|
||||
{ algorithm: 'HS256' }
|
||||
).first.symbolize_keys
|
||||
|
||||
unless decoded_token[:user_id] == current_user.id
|
||||
redirect_to settings_subscriptions_path, alert: 'Invalid subscription update request.'
|
||||
return
|
||||
end
|
||||
|
||||
current_user.update!(status: decoded_token[:status], active_until: decoded_token[:active_until])
|
||||
|
||||
redirect_to settings_subscriptions_path, notice: 'Your subscription has been updated successfully!'
|
||||
rescue JWT::DecodeError
|
||||
redirect_to settings_subscriptions_path, alert: 'Failed to verify subscription update.'
|
||||
rescue ArgumentError
|
||||
redirect_to settings_subscriptions_path, alert: 'Invalid subscription data received.'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -115,6 +115,17 @@ module ApplicationHelper
|
|||
date.strftime('%e %B %Y')
|
||||
end
|
||||
|
||||
def human_datetime(datetime)
|
||||
return unless datetime
|
||||
|
||||
content_tag(
|
||||
:span,
|
||||
datetime.strftime('%e %b %Y, %H:%M'),
|
||||
class: 'tooltip',
|
||||
data: { tip: datetime.iso8601 }
|
||||
)
|
||||
end
|
||||
|
||||
def speed_text_color(speed)
|
||||
return 'text-default' if speed.to_i >= 0
|
||||
|
||||
|
|
@ -126,4 +137,17 @@ module ApplicationHelper
|
|||
|
||||
speed * 3.6
|
||||
end
|
||||
|
||||
def days_left(active_until)
|
||||
return unless active_until
|
||||
|
||||
time_words = distance_of_time_in_words(Time.zone.now, active_until)
|
||||
|
||||
content_tag(
|
||||
:span,
|
||||
time_words,
|
||||
class: 'tooltip',
|
||||
data: { tip: "Expires on #{active_until.iso8601}" }
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -501,10 +501,11 @@ export default class extends BaseController {
|
|||
}
|
||||
|
||||
deletePoint(id, apiKey) {
|
||||
fetch(`/api/v1/points/${id}?api_key=${apiKey}`, {
|
||||
fetch(`/api/v1/points/${id}`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${apiKey}`
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
class AppVersionCheckingJob < ApplicationJob
|
||||
queue_as :default
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform
|
||||
Rails.cache.delete(CheckAppVersion::VERSION_CACHE_KEY)
|
||||
|
|
|
|||
|
|
@ -3,9 +3,9 @@
|
|||
class ExportJob < ApplicationJob
|
||||
queue_as :exports
|
||||
|
||||
def perform(export_id, start_at, end_at, file_format: :json)
|
||||
def perform(export_id)
|
||||
export = Export.find(export_id)
|
||||
|
||||
Exports::Create.new(export:, start_at:, end_at:, file_format:).call
|
||||
Exports::Create.new(export:).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
11
app/jobs/import/process_job.rb
Normal file
11
app/jobs/import/process_job.rb
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Import::ProcessJob < ApplicationJob
|
||||
queue_as :imports
|
||||
|
||||
def perform(import_id)
|
||||
import = Import.find(import_id)
|
||||
|
||||
import.process!
|
||||
end
|
||||
end
|
||||
13
app/jobs/import/update_points_count_job.rb
Normal file
13
app/jobs/import/update_points_count_job.rb
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Import::UpdatePointsCountJob < ApplicationJob
|
||||
queue_as :imports
|
||||
|
||||
def perform(import_id)
|
||||
import = Import.find(import_id)
|
||||
|
||||
import.update(processed: import.points.count)
|
||||
rescue ActiveRecord::RecordNotFound
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
|
@ -5,6 +5,8 @@ class Import::WatcherJob < ApplicationJob
|
|||
sidekiq_options retry: false
|
||||
|
||||
def perform
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
Imports::Watcher.new.call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ImportJob < ApplicationJob
|
||||
queue_as :imports
|
||||
|
||||
def perform(user_id, import_id)
|
||||
user = User.find(user_id)
|
||||
import = user.imports.find(import_id)
|
||||
|
||||
import.process!
|
||||
end
|
||||
end
|
||||
|
|
@ -59,12 +59,11 @@ module Distanceable
|
|||
return 0 if points.length < 2
|
||||
|
||||
total_meters = points.each_cons(2).sum do |point1, point2|
|
||||
connection.select_value(<<-SQL.squish)
|
||||
SELECT ST_Distance(
|
||||
ST_GeomFromEWKT('#{point1.lonlat}')::geography,
|
||||
ST_GeomFromEWKT('#{point2.lonlat}')::geography
|
||||
)
|
||||
SQL
|
||||
connection.select_value(
|
||||
'SELECT ST_Distance(ST_GeomFromEWKT($1)::geography, ST_GeomFromEWKT($2)::geography)',
|
||||
nil,
|
||||
[point1.lonlat, point2.lonlat]
|
||||
)
|
||||
end
|
||||
|
||||
total_meters.to_f / DISTANCE_UNITS[unit.to_sym]
|
||||
|
|
|
|||
|
|
@ -4,16 +4,35 @@ class Export < ApplicationRecord
|
|||
belongs_to :user
|
||||
|
||||
enum :status, { created: 0, processing: 1, completed: 2, failed: 3 }
|
||||
enum :file_format, { json: 0, gpx: 1 }
|
||||
|
||||
validates :name, presence: true
|
||||
|
||||
before_destroy :delete_export_file
|
||||
has_one_attached :file
|
||||
|
||||
after_commit -> { ExportJob.perform_later(id) }, on: :create
|
||||
after_commit -> { remove_attached_file }, on: :destroy
|
||||
|
||||
def process!
|
||||
Exports::Create.new(export: self).call
|
||||
end
|
||||
|
||||
def migrate_to_new_storage
|
||||
file.attach(io: File.open("public/#{url}"), filename: name)
|
||||
update!(url: nil)
|
||||
|
||||
File.delete("public/#{url}")
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug("Error migrating export #{id}: #{e.message}")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def delete_export_file
|
||||
file_path = Rails.root.join('public', 'exports', "#{name}.json")
|
||||
def remove_attached_file
|
||||
file.purge_later
|
||||
|
||||
File.delete(file_path) if File.exist?(file_path)
|
||||
File.delete("public/#{url}")
|
||||
rescue StandardError => e
|
||||
Rails.logger.debug("Error removing export #{id}: #{e.message}")
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ class Import < ApplicationRecord
|
|||
belongs_to :user
|
||||
has_many :points, dependent: :destroy
|
||||
|
||||
delegate :count, to: :points, prefix: true
|
||||
has_one_attached :file
|
||||
|
||||
include ImportUploader::Attachment(:raw)
|
||||
after_commit -> { Import::ProcessJob.perform_later(id) }, on: :create
|
||||
after_commit :remove_attached_file, on: :destroy
|
||||
|
||||
enum :source, {
|
||||
google_semantic_history: 0, owntracks: 1, google_records: 2,
|
||||
|
|
@ -27,4 +28,18 @@ class Import < ApplicationRecord
|
|||
[time.year, time.month]
|
||||
end.uniq
|
||||
end
|
||||
|
||||
def migrate_to_new_storage
|
||||
return if file.attached?
|
||||
|
||||
raw_file = File.new(raw_data)
|
||||
|
||||
file.attach(io: raw_file, filename: name, content_type: 'application/json')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def remove_attached_file
|
||||
file.purge_later
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ class Point < ApplicationRecord
|
|||
|
||||
after_create :async_reverse_geocode
|
||||
after_create_commit :broadcast_coordinates
|
||||
after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? }
|
||||
|
||||
def self.without_raw_data
|
||||
select(column_names - ['raw_data'])
|
||||
|
|
|
|||
|
|
@ -100,6 +100,22 @@ class User < ApplicationRecord
|
|||
end
|
||||
end
|
||||
|
||||
def can_subscribe?
|
||||
active_until&.past? && !DawarichSettings.self_hosted?
|
||||
end
|
||||
|
||||
def generate_subscription_token
|
||||
payload = {
|
||||
user_id: id,
|
||||
email: email,
|
||||
exp: 30.minutes.from_now.to_i
|
||||
}
|
||||
|
||||
secret_key = ENV['JWT_SECRET_KEY']
|
||||
|
||||
JWT.encode(payload, secret_key, 'HS256')
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_api_key
|
||||
|
|
@ -109,7 +125,8 @@ class User < ApplicationRecord
|
|||
end
|
||||
|
||||
def activate
|
||||
update(status: :active)
|
||||
# TODO: Remove the `status` column in the future.
|
||||
update(status: :active, active_until: 1000.years.from_now)
|
||||
end
|
||||
|
||||
def sanitize_input
|
||||
|
|
@ -118,21 +135,22 @@ class User < ApplicationRecord
|
|||
settings.try(:[], 'maps')&.try(:[], 'url')&.strip!
|
||||
end
|
||||
|
||||
# rubocop:disable Metrics/MethodLength
|
||||
def import_sample_points
|
||||
return unless Rails.env.development? ||
|
||||
Rails.env.production? ||
|
||||
(Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS'])
|
||||
|
||||
raw_data = Hash.from_xml(
|
||||
File.read(Rails.root.join('lib/assets/sample_points.gpx'))
|
||||
)
|
||||
|
||||
import = imports.create(
|
||||
name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME',
|
||||
source: 'gpx',
|
||||
raw_data:
|
||||
source: 'gpx'
|
||||
)
|
||||
|
||||
ImportJob.perform_later(id, import.id)
|
||||
import.file.attach(
|
||||
Rack::Test::UploadedFile.new(
|
||||
Rails.root.join('lib/assets/sample_points.gpx'), 'application/xml'
|
||||
)
|
||||
)
|
||||
end
|
||||
# rubocop:enable Metrics/MethodLength
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,28 +1,30 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Exports::Create
|
||||
def initialize(export:, start_at:, end_at:, file_format: :json)
|
||||
def initialize(export:)
|
||||
@export = export
|
||||
@user = export.user
|
||||
@start_at = start_at.to_datetime
|
||||
@end_at = end_at.to_datetime
|
||||
@file_format = file_format
|
||||
@start_at = export.start_at
|
||||
@end_at = export.end_at
|
||||
@file_format = export.file_format
|
||||
end
|
||||
|
||||
def call
|
||||
export.update!(status: :processing)
|
||||
ActiveRecord::Base.transaction do
|
||||
export.update!(status: :processing)
|
||||
|
||||
points = time_framed_points
|
||||
points = time_framed_points
|
||||
|
||||
data = points_data(points)
|
||||
data = points_data(points)
|
||||
|
||||
create_export_file(data)
|
||||
attach_export_file(data)
|
||||
|
||||
export.update!(status: :completed, url: "exports/#{export.name}")
|
||||
export.update!(status: :completed)
|
||||
|
||||
create_export_finished_notification
|
||||
notify_export_finished
|
||||
end
|
||||
rescue StandardError => e
|
||||
create_failed_export_notification(e)
|
||||
notify_export_failed(e)
|
||||
|
||||
export.update!(status: :failed)
|
||||
end
|
||||
|
|
@ -38,7 +40,7 @@ class Exports::Create
|
|||
.order(timestamp: :asc)
|
||||
end
|
||||
|
||||
def create_export_finished_notification
|
||||
def notify_export_finished
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
|
|
@ -47,7 +49,7 @@ class Exports::Create
|
|||
).call
|
||||
end
|
||||
|
||||
def create_failed_export_notification(error)
|
||||
def notify_export_failed(error)
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
|
|
@ -72,18 +74,18 @@ class Exports::Create
|
|||
Points::GpxSerializer.new(points, export.name).call
|
||||
end
|
||||
|
||||
def create_export_file(data)
|
||||
dir_path = Rails.root.join('public/exports')
|
||||
|
||||
FileUtils.mkdir_p(dir_path) unless Dir.exist?(dir_path)
|
||||
|
||||
file_path = dir_path.join(export.name)
|
||||
|
||||
Rails.logger.info("Creating export file at: #{file_path}")
|
||||
|
||||
File.open(file_path, 'w') { |file| file.write(data) }
|
||||
def attach_export_file(data)
|
||||
export.file.attach(io: StringIO.new(data.to_s), filename: export.name, content_type:)
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Failed to create export file: #{e.message}")
|
||||
raise
|
||||
end
|
||||
|
||||
def content_type
|
||||
case file_format.to_sym
|
||||
when :json then 'application/json'
|
||||
when :gpx then 'application/gpx+xml'
|
||||
else raise ArgumentError, "Unsupported file format: #{file_format}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,34 +2,28 @@
|
|||
|
||||
class Geojson::ImportParser
|
||||
include Imports::Broadcaster
|
||||
include PointValidation
|
||||
|
||||
attr_reader :import, :json, :user_id
|
||||
attr_reader :import, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@json = import.raw_data
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def call
|
||||
data = Geojson::Params.new(json).call
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
|
||||
data.each.with_index(1) do |point, index|
|
||||
next if point_exists?(point, user_id)
|
||||
data = Geojson::Params.new(json).call
|
||||
|
||||
Point.create!(point.merge(user_id:, import_id: import.id))
|
||||
data.each.with_index(1) do |point, index|
|
||||
next if point_exists?(point, user_id)
|
||||
|
||||
broadcast_import_progress(import, index)
|
||||
Point.create!(point.merge(user_id:, import_id: import.id))
|
||||
|
||||
broadcast_import_progress(import, index)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def point_exists?(params, user_id)
|
||||
Point.exists?(
|
||||
lonlat: params[:lonlat],
|
||||
timestamp: params[:timestamp],
|
||||
user_id:
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -48,13 +48,15 @@ class GoogleMaps::PhoneTakeoutParser
|
|||
raw_signals = []
|
||||
raw_array = []
|
||||
|
||||
if import.raw_data.is_a?(Array)
|
||||
raw_array = parse_raw_array(import.raw_data)
|
||||
else
|
||||
if import.raw_data['semanticSegments']
|
||||
semantic_segments = parse_semantic_segments(import.raw_data['semanticSegments'])
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
|
||||
if json.is_a?(Array)
|
||||
raw_array = parse_raw_array(json)
|
||||
else
|
||||
semantic_segments = parse_semantic_segments(json['semanticSegments']) if json['semanticSegments']
|
||||
raw_signals = parse_raw_signals(json['rawSignals']) if json['rawSignals']
|
||||
end
|
||||
raw_signals = parse_raw_signals(import.raw_data['rawSignals']) if import.raw_data['rawSignals']
|
||||
end
|
||||
|
||||
semantic_segments + raw_signals + raw_array
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This class is used to import Google's Records.json file
|
||||
# via the CLI, vs the UI, which uses the `GoogleMaps::RecordsStorage Importer` class.
|
||||
|
||||
class GoogleMaps::RecordsImporter
|
||||
include Imports::Broadcaster
|
||||
|
||||
|
|
|
|||
102
app/services/google_maps/records_storage_importer.rb
Normal file
102
app/services/google_maps/records_storage_importer.rb
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This class is used to import Google's Records.json file
|
||||
# via the UI, vs the CLI, which uses the `GoogleMaps::RecordsImporter` class.
|
||||
|
||||
class GoogleMaps::RecordsStorageImporter
|
||||
BATCH_SIZE = 1000
|
||||
MAX_RETRIES = 3
|
||||
DOWNLOAD_TIMEOUT = 300 # 5 minutes timeout
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@user = User.find_by(id: user_id)
|
||||
end
|
||||
|
||||
def call
|
||||
process_file_in_batches
|
||||
rescue Oj::ParseError => e
|
||||
Rails.logger.error("JSON parsing error: #{e.message}")
|
||||
raise
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :import, :user
|
||||
|
||||
def process_file_in_batches
|
||||
file = download_file
|
||||
verify_file_integrity(file)
|
||||
locations = parse_file(file)
|
||||
process_locations_in_batches(locations) if locations.present?
|
||||
end
|
||||
|
||||
def download_file
|
||||
retries = 0
|
||||
|
||||
begin
|
||||
Timeout.timeout(DOWNLOAD_TIMEOUT) do
|
||||
import.file.download
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
retries += 1
|
||||
if retries <= MAX_RETRIES
|
||||
Rails.logger.warn("Download timeout, attempt #{retries} of #{MAX_RETRIES}")
|
||||
retry
|
||||
else
|
||||
Rails.logger.error("Download failed after #{MAX_RETRIES} attempts")
|
||||
raise
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Download error: #{e.message}")
|
||||
raise
|
||||
end
|
||||
end
|
||||
|
||||
def verify_file_integrity(file)
|
||||
# Verify file size
|
||||
expected_size = import.file.blob.byte_size
|
||||
actual_size = file.size
|
||||
|
||||
if expected_size != actual_size
|
||||
raise "Incomplete download: expected #{expected_size} bytes, got #{actual_size} bytes"
|
||||
end
|
||||
|
||||
# Verify checksum
|
||||
expected_checksum = import.file.blob.checksum
|
||||
actual_checksum = Base64.strict_encode64(Digest::MD5.digest(file))
|
||||
|
||||
return unless expected_checksum != actual_checksum
|
||||
|
||||
raise "Checksum mismatch: expected #{expected_checksum}, got #{actual_checksum}"
|
||||
end
|
||||
|
||||
def parse_file(file)
|
||||
parsed_file = Oj.load(file, mode: :compat)
|
||||
return nil unless parsed_file.is_a?(Hash) && parsed_file['locations']
|
||||
|
||||
parsed_file['locations']
|
||||
end
|
||||
|
||||
def process_locations_in_batches(locations)
|
||||
batch = []
|
||||
index = 0
|
||||
|
||||
locations.each do |location|
|
||||
batch << location
|
||||
|
||||
next unless batch.size >= BATCH_SIZE
|
||||
|
||||
process_batch(batch, index)
|
||||
index += BATCH_SIZE
|
||||
batch = []
|
||||
end
|
||||
|
||||
# Process any remaining records that didn't make a full batch
|
||||
process_batch(batch, index) unless batch.empty?
|
||||
end
|
||||
|
||||
def process_batch(batch, index)
|
||||
GoogleMaps::RecordsImporter.new(import, index).call(batch)
|
||||
end
|
||||
end
|
||||
|
|
@ -13,8 +13,6 @@ class GoogleMaps::SemanticHistoryParser
|
|||
end
|
||||
|
||||
def call
|
||||
points_data = parse_json
|
||||
|
||||
points_data.each_slice(BATCH_SIZE) do |batch|
|
||||
@current_index += batch.size
|
||||
process_batch(batch)
|
||||
|
|
@ -62,10 +60,18 @@ class GoogleMaps::SemanticHistoryParser
|
|||
)
|
||||
end
|
||||
|
||||
def parse_json
|
||||
import.raw_data['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
def points_data
|
||||
data = nil
|
||||
|
||||
import.file.download do |f|
|
||||
json = Oj.load(f)
|
||||
|
||||
data = json['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
end
|
||||
|
||||
data
|
||||
end
|
||||
|
||||
def parse_timeline_object(timeline_object)
|
||||
|
|
|
|||
|
|
@ -3,22 +3,25 @@
|
|||
class Gpx::TrackImporter
|
||||
include Imports::Broadcaster
|
||||
|
||||
attr_reader :import, :json, :user_id
|
||||
attr_reader :import, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@json = import.raw_data
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def call
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
import.file.download do |file|
|
||||
json = Hash.from_xml(file)
|
||||
|
||||
points = tracks_arr.map { parse_track(_1) }.flatten.compact
|
||||
points_data = points.map.with_index(1) { |point, index| prepare_point(point, index) }.compact
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
points = tracks_arr.map { parse_track(_1) }.flatten.compact
|
||||
points_data = points.map { prepare_point(_1) }.compact
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
|
@ -32,7 +35,7 @@ class Gpx::TrackImporter
|
|||
segments_array.compact.map { |segment| segment['trkpt'] }
|
||||
end
|
||||
|
||||
def prepare_point(point, index)
|
||||
def prepare_point(point)
|
||||
return if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
|
||||
|
||||
{
|
||||
|
|
|
|||
|
|
@ -20,10 +20,13 @@ class Immich::ImportGeodata
|
|||
|
||||
create_import_failed_notification(import.name) and return unless import.new_record?
|
||||
|
||||
import.raw_data = immich_data_json
|
||||
import.save!
|
||||
import.file.attach(
|
||||
io: StringIO.new(immich_data_json.to_json),
|
||||
filename: file_name,
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
ImportJob.perform_later(user.id, import.id)
|
||||
import.save!
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -14,7 +14,8 @@ class Imports::Create
|
|||
create_import_finished_notification(import, user)
|
||||
|
||||
schedule_stats_creating(user.id)
|
||||
# schedule_visit_suggesting(user.id, import) # Disabled until places & visits are reworked
|
||||
schedule_visit_suggesting(user.id, import)
|
||||
update_import_points_count(import)
|
||||
rescue StandardError => e
|
||||
create_import_failed_notification(import, user, e)
|
||||
end
|
||||
|
|
@ -26,6 +27,7 @@ class Imports::Create
|
|||
case source
|
||||
when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser
|
||||
when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutParser
|
||||
when 'google_records' then GoogleMaps::RecordsStorageImporter
|
||||
when 'owntracks' then OwnTracks::Importer
|
||||
when 'gpx' then Gpx::TrackImporter
|
||||
when 'geojson' then Geojson::ImportParser
|
||||
|
|
@ -33,6 +35,10 @@ class Imports::Create
|
|||
end
|
||||
end
|
||||
|
||||
def update_import_points_count(import)
|
||||
Import::UpdatePointsCountJob.perform_later(import.id)
|
||||
end
|
||||
|
||||
def schedule_stats_creating(user_id)
|
||||
import.years_and_months_tracked.each do |year, month|
|
||||
Stats::CalculatingJob.perform_later(user_id, year, month)
|
||||
|
|
@ -44,7 +50,7 @@ class Imports::Create
|
|||
start_at = Time.zone.at(points.first.timestamp)
|
||||
end_at = Time.zone.at(points.last.timestamp)
|
||||
|
||||
VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:)
|
||||
VisitSuggestingJob.perform_later(user_id:, start_at:, end_at:)
|
||||
end
|
||||
|
||||
def create_import_finished_notification(import, user)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,6 @@ class Imports::Destroy
|
|||
def call
|
||||
@import.destroy!
|
||||
|
||||
BulkStatsCalculatingJob.perform_later(@user.id)
|
||||
Stats::BulkCalculator.new(@user.id).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ class Imports::Watcher
|
|||
file_names = file_names(user_directory_path)
|
||||
|
||||
file_names.each do |file_name|
|
||||
process_file(user, user_directory_path, file_name)
|
||||
create_import(user, user_directory_path, file_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -26,49 +26,29 @@ class Imports::Watcher
|
|||
def user_directories
|
||||
Dir.entries(WATCHED_DIR_PATH).select do |entry|
|
||||
path = File.join(WATCHED_DIR_PATH, entry)
|
||||
|
||||
File.directory?(path) && !['.', '..'].include?(entry)
|
||||
end
|
||||
end
|
||||
|
||||
def find_user(file_name)
|
||||
email = file_name.split('_').first
|
||||
|
||||
User.find_by(email:)
|
||||
end
|
||||
|
||||
def file_names(directory_path)
|
||||
Dir.entries(directory_path).select { |file| SUPPORTED_FORMATS.include?(File.extname(file)) }
|
||||
end
|
||||
|
||||
def process_file(user, directory_path, file_name)
|
||||
def create_import(user, directory_path, file_name)
|
||||
file_path = File.join(directory_path, file_name)
|
||||
import = Import.find_or_initialize_by(user:, name: file_name)
|
||||
|
||||
return if import.persisted?
|
||||
|
||||
import.source = source(file_name)
|
||||
import.raw_data = raw_data(file_path, import.source)
|
||||
import.file.attach(
|
||||
io: File.open(file_path),
|
||||
filename: file_name,
|
||||
content_type: mime_type(import.source)
|
||||
)
|
||||
|
||||
import.save!
|
||||
|
||||
ImportJob.perform_later(user.id, import.id)
|
||||
end
|
||||
|
||||
def find_or_initialize_import(user, file_name)
|
||||
import_name = file_name.split('_')[1..].join('_')
|
||||
|
||||
Import.find_or_initialize_by(user:, name: import_name)
|
||||
end
|
||||
|
||||
def set_import_attributes(import, file_path, file_name)
|
||||
source = source(file_name)
|
||||
|
||||
import.source = source
|
||||
import.raw_data = raw_data(file_path, source)
|
||||
|
||||
import.save!
|
||||
|
||||
import.id
|
||||
end
|
||||
|
||||
def source(file_name)
|
||||
|
|
@ -89,16 +69,13 @@ class Imports::Watcher
|
|||
end
|
||||
end
|
||||
|
||||
def raw_data(file_path, source)
|
||||
file = File.read(file_path)
|
||||
|
||||
def mime_type(source)
|
||||
case source.to_sym
|
||||
when :gpx
|
||||
Hash.from_xml(file)
|
||||
when :gpx then 'application/xml'
|
||||
when :json, :geojson, :google_phone_takeout, :google_records, :google_semantic_history
|
||||
JSON.parse(file)
|
||||
'application/json'
|
||||
when :owntracks
|
||||
OwnTracks::RecParser.new(file).call
|
||||
'application/octet-stream'
|
||||
else
|
||||
raise UnsupportedSourceError, "Unsupported source: #{source}"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,25 +3,28 @@
|
|||
class OwnTracks::Importer
|
||||
include Imports::Broadcaster
|
||||
|
||||
attr_reader :import, :data, :user_id
|
||||
attr_reader :import, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@data = import.raw_data
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def call
|
||||
points_data = data.map.with_index(1) do |point, index|
|
||||
OwnTracks::Params.new(point).call.merge(
|
||||
import_id: import.id,
|
||||
user_id: user_id,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current
|
||||
)
|
||||
end
|
||||
import.file.download do |file|
|
||||
parsed_data = OwnTracks::RecParser.new(file).call
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
points_data = parsed_data.map do |point|
|
||||
OwnTracks::Params.new(point).call.merge(
|
||||
import_id: import.id,
|
||||
user_id: user_id,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -10,11 +10,8 @@ class OwnTracks::RecParser
|
|||
def call
|
||||
file.split("\n").map do |line|
|
||||
parts = line.split("\t")
|
||||
if parts.size > 2 && parts[1].strip == '*'
|
||||
JSON.parse(parts[2])
|
||||
else
|
||||
nil
|
||||
end
|
||||
|
||||
Oj.load(parts[2]) if parts.size > 2 && parts[1].strip == '*'
|
||||
end.compact
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,8 +23,13 @@ class Photoprism::ImportGeodata
|
|||
import = find_or_create_import(json_data)
|
||||
return create_import_failed_notification(import.name) unless import.new_record?
|
||||
|
||||
import.update!(raw_data: json_data)
|
||||
ImportJob.perform_later(user.id, import.id)
|
||||
import.file.attach(
|
||||
io: StringIO.new(json_data.to_json),
|
||||
filename: file_name(json_data),
|
||||
content_type: 'application/json'
|
||||
)
|
||||
|
||||
import.save!
|
||||
end
|
||||
|
||||
def find_or_create_import(json_data)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
class Photos::ImportParser
|
||||
include Imports::Broadcaster
|
||||
|
||||
include PointValidation
|
||||
attr_reader :import, :json, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
|
|
@ -29,12 +29,4 @@ class Photos::ImportParser
|
|||
|
||||
broadcast_import_progress(import, index)
|
||||
end
|
||||
|
||||
def point_exists?(point, timestamp)
|
||||
Point.exists?(
|
||||
lonlat: "POINT(#{point['longitude']} #{point['latitude']})",
|
||||
timestamp:,
|
||||
user_id:
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ class ReverseGeocoding::Places::FetchData
|
|||
|
||||
first_place = reverse_geocoded_places.shift
|
||||
update_place(first_place)
|
||||
|
||||
reverse_geocoded_places.each { |reverse_geocoded_place| fetch_and_create_place(reverse_geocoded_place) }
|
||||
end
|
||||
|
||||
|
|
@ -49,6 +50,9 @@ class ReverseGeocoding::Places::FetchData
|
|||
new_place.country = data['properties']['country']
|
||||
new_place.geodata = data
|
||||
new_place.source = :photon
|
||||
if new_place.lonlat.blank?
|
||||
new_place.lonlat = "POINT(#{data['geometry']['coordinates'][0]} #{data['geometry']['coordinates'][1]})"
|
||||
end
|
||||
|
||||
new_place.save!
|
||||
end
|
||||
|
|
@ -88,7 +92,7 @@ class ReverseGeocoding::Places::FetchData
|
|||
limit: 10,
|
||||
distance_sort: true,
|
||||
radius: 1,
|
||||
units: ::DISTANCE_UNIT,
|
||||
units: ::DISTANCE_UNIT
|
||||
)
|
||||
|
||||
data.reject do |place|
|
||||
|
|
|
|||
|
|
@ -12,13 +12,17 @@ module Visits
|
|||
end
|
||||
|
||||
def call
|
||||
bounding_box = "ST_MakeEnvelope(#{sw_lng}, #{sw_lat}, #{ne_lng}, #{ne_lat}, 4326)"
|
||||
|
||||
Visit
|
||||
.includes(:place)
|
||||
.where(user:)
|
||||
.joins(:place)
|
||||
.where("ST_Contains(#{bounding_box}, ST_SetSRID(places.lonlat::geometry, 4326))")
|
||||
.where(
|
||||
'ST_Contains(ST_MakeEnvelope(?, ?, ?, ?, 4326), ST_SetSRID(places.lonlat::geometry, 4326))',
|
||||
sw_lng,
|
||||
sw_lat,
|
||||
ne_lng,
|
||||
ne_lat
|
||||
)
|
||||
.order(started_at: :desc)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ImportUploader < Shrine
|
||||
# plugins and uploading logic
|
||||
end
|
||||
|
|
@ -37,11 +37,15 @@
|
|||
<% @exports.each do |export| %>
|
||||
<tr>
|
||||
<td><%= export.name %></td>
|
||||
<td><%= export.created_at.strftime('%Y-%m-%d %H:%M:%S') %></td>
|
||||
<td><%= human_datetime(export.created_at) %></td>
|
||||
<td><%= export.status %></td>
|
||||
<td>
|
||||
<% if export.completed? %>
|
||||
<%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
<% if export.file.present? %>
|
||||
<%= link_to 'Download', rails_blob_path(export.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
<% else %>
|
||||
<%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
<% end %>
|
||||
<% end %>
|
||||
<%= link_to 'Delete', export, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
|
||||
</td>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<div class="w-full mx-auto my-5">
|
||||
<div class="flex justify-between items-center mt-5 mb-5">
|
||||
<div class="hero h-fit bg-base-200 py-20" style="background-image: url(<%= '/images/bg-image.jpg' %>);">
|
||||
<div class="hero h-fit bg-base-200 py-20">
|
||||
<div class="hero-content text-center">
|
||||
<div class="max-w-md">
|
||||
<h1 class="text-5xl font-bold">
|
||||
|
|
|
|||
|
|
@ -13,6 +13,24 @@
|
|||
<p class="text-sm mt-2">JSON files from your Takeout/Location History/Semantic Location History/YEAR</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
|
||||
<div class="form-control">
|
||||
<label class="label cursor-pointer space-x-3">
|
||||
<%= form.radio_button :source, :google_records, class: "radio radio-primary" %>
|
||||
<span class="label-text">Google Records</span>
|
||||
</label>
|
||||
<p class="text-sm mt-2">The Records.json file from your Google Takeout</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
|
||||
<div class="form-control">
|
||||
<label class="label cursor-pointer space-x-3">
|
||||
<%= form.radio_button :source, :google_phone_takeout, class: "radio radio-primary" %>
|
||||
<span class="label-text">Google Phone Takeout</span>
|
||||
</label>
|
||||
<p class="text-sm mt-2">A JSON file you received after your request for Takeout from your mobile device</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
|
||||
<div class="form-control">
|
||||
<label class="label cursor-pointer space-x-3">
|
||||
|
|
@ -31,15 +49,6 @@
|
|||
<p class="text-sm mt-2">A valid GeoJSON file. For example, a file, exported from a Dawarich instance</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
|
||||
<div class="form-control">
|
||||
<label class="label cursor-pointer space-x-3">
|
||||
<%= form.radio_button :source, :google_phone_takeout, class: "radio radio-primary" %>
|
||||
<span class="label-text">Google Phone Takeout</span>
|
||||
</label>
|
||||
<p class="text-sm mt-2">A JSON file you received after your request for Takeout from your mobile device</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="card bordered shadow-lg p-3 hover:shadow-blue-500/50">
|
||||
<div class="form-control">
|
||||
<label class="label cursor-pointer space-x-3">
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
<td>
|
||||
<%= "#{number_with_delimiter import.points.size}" %>
|
||||
</td>
|
||||
<td><%= import.created_at.strftime("%d.%m.%Y, %H:%M") %></td>
|
||||
<td><%= human_datetime(import.created_at) %></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@
|
|||
<% @imports.each do |import| %>
|
||||
<tr data-import-id="<%= import.id %>"
|
||||
id="import-<%= import.id %>"
|
||||
data-points-total="<%= import.points_count %>">
|
||||
data-points-total="<%= import.processed %>">
|
||||
<td>
|
||||
<%= link_to import.name, import, class: 'underline hover:no-underline' %>
|
||||
(<%= import.source %>)
|
||||
|
|
@ -63,17 +63,22 @@
|
|||
<%= link_to '📋', points_path(import_id: import.id) %>
|
||||
</td>
|
||||
<td data-points-count>
|
||||
<%= number_with_delimiter import.points_count %>
|
||||
<%= number_with_delimiter import.processed %>
|
||||
</td>
|
||||
<td data-reverse-geocoded-points-count>
|
||||
<%= number_with_delimiter import.reverse_geocoded_points_count %>
|
||||
</td>
|
||||
<td><%= import.created_at.strftime("%d.%m.%Y, %H:%M") %></td>
|
||||
<td><%= human_datetime(import.created_at) %></td>
|
||||
</tr>
|
||||
<% end %>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<div class="flex justify-center my-5">
|
||||
<div class='flex'>
|
||||
<%= paginate @imports %>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -3,36 +3,6 @@
|
|||
<div class="mx-auto md:w-2/3 w-full">
|
||||
<h1 class="font-bold text-4xl">New import</h1>
|
||||
|
||||
<div role="alert" class="alert alert-info my-5">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" class="stroke-current shrink-0 w-6 h-6"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"></path></svg>
|
||||
<span>
|
||||
<p>To import <code>Records.json</code> file from your Google Takeout Archive, use rake task.</p>
|
||||
|
||||
<p class='mb-3'>Import takes a while to finish, so you might want to run it in <code>screen</code> session.</p>
|
||||
|
||||
<p class='mt-5 mb-2'>1. Upload your Records.json file to your server</p>
|
||||
<p class='mt-5 mb-2'>2. Copy you Records.json to the <code>tmp</code> folder:
|
||||
<div class="mockup-code">
|
||||
<pre data-prefix="$"><code>docker cp Records.json dawarich_app:/var/app/tmp/imports/Records.json</code></pre>
|
||||
</div>
|
||||
</p>
|
||||
<p class='mt-5 mb-2'>3. Attach to the docker container:
|
||||
<div class="mockup-code">
|
||||
<pre data-prefix="$"><code>docker exec -it dawarich_app sh</code></pre>
|
||||
</div>
|
||||
</p>
|
||||
<p class='mt-5 mb-2'>4. Run the rake task:
|
||||
<div class="mockup-code">
|
||||
<pre data-prefix="$"><code>bundle exec rake import:big_file['tmp/imports/Records.json','user@example.com']</code>
|
||||
</pre>
|
||||
</div>
|
||||
</p>
|
||||
<p class='mt-5 mb-2'>5. Wait patiently for process to finish</p>
|
||||
|
||||
<p class='mt-3'>You can monitor progress in <a href="/sidekiq" class="underline">Sidekiq UI</a></p>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<%= render "form", import: @import %>
|
||||
|
||||
<%= link_to "Back to imports", imports_path, class: "btn mx-5 mb-5" %>
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@
|
|||
<%= stylesheet_link_tag "application", "data-turbo-track": "reload" %>
|
||||
<%= javascript_importmap_tags %>
|
||||
<%= render 'application/favicon' %>
|
||||
<%= Sentry.get_trace_propagation_meta.html_safe if Sentry.initialized? %>
|
||||
</head>
|
||||
|
||||
<body class='min-h-screen'>
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@
|
|||
<% @places.each do |place| %>
|
||||
<tr>
|
||||
<td><%= place.name %></td>
|
||||
<td><%= place.created_at.strftime('%Y-%m-%d %H:%M:%S') %></td>
|
||||
<td><%= human_datetime(place.created_at) %></td>
|
||||
<td><%= "#{place.lat}, #{place.lon}" %></td>
|
||||
<td>
|
||||
<%= link_to 'Delete', place, data: { confirm: "Are you sure? Deleting a place will result in deleting all visits for this place.", turbo_confirm: "Are you sure? Deleting a place will result in deleting all visits for this place.", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@
|
|||
%>
|
||||
</td>
|
||||
<td class='<%= speed_text_color(point.velocity) %>'><%= point.velocity %></td>
|
||||
<td><%= point.recorded_at %></td>
|
||||
<td><%= human_datetime(point.recorded_at) %></td>
|
||||
<td><%= point.lat %>, <%= point.lon %></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
|
|
|
|||
|
|
@ -5,4 +5,7 @@
|
|||
<%= link_to 'Background Jobs', settings_background_jobs_path, role: 'tab', class: "tab #{active_tab?(settings_background_jobs_path)}" %>
|
||||
<% end %>
|
||||
<%= link_to 'Map', settings_maps_path, role: 'tab', class: "tab #{active_tab?(settings_maps_path)}" %>
|
||||
<% if !DawarichSettings.self_hosted? %>
|
||||
<%= link_to 'Subscriptions', settings_subscriptions_path, role: 'tab', class: "tab #{active_tab?(settings_subscriptions_path)}" %>
|
||||
<% end %>
|
||||
</div>
|
||||
|
|
|
|||
30
app/views/settings/subscriptions/index.html.erb
Normal file
30
app/views/settings/subscriptions/index.html.erb
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
<% content_for :title, "Subscriptions" %>
|
||||
|
||||
<div class="min-h-content w-full my-5">
|
||||
<%= render 'settings/navigation' %>
|
||||
|
||||
<div class="hero bg-base-200 min-h-80">
|
||||
<div class="hero-content text-center">
|
||||
<div class="max-w-md">
|
||||
<h1 class="text-5xl font-bold">Hello there!</h1>
|
||||
<% if current_user.active_until.future? %>
|
||||
<p class="py-6">
|
||||
You are currently subscribed to Dawarich, hurray!
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Your subscription will be valid for the next <span class="text-accent"><%= days_left(current_user.active_until) %></span>.
|
||||
</p>
|
||||
|
||||
<%= link_to 'Manage subscription', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-primary my-4' %>
|
||||
<% else %>
|
||||
<p class="py-6">
|
||||
You are currently not subscribed to Dawarich. How about we fix that?
|
||||
</p>
|
||||
|
||||
<%= link_to 'Manage subscription', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-primary my-4' %>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -26,7 +26,7 @@
|
|||
<%= number_with_delimiter user.tracked_points.count %>
|
||||
</td>
|
||||
<td>
|
||||
<%= user.created_at.strftime('%Y-%m-%d %H:%M:%S') %>
|
||||
<%= human_datetime(user.created_at) %>
|
||||
</td>
|
||||
</tr>
|
||||
<% end %>
|
||||
|
|
|
|||
|
|
@ -19,6 +19,9 @@
|
|||
</ul>
|
||||
</details>
|
||||
</li>
|
||||
<% if user_signed_in? && current_user.can_subscribe? %>
|
||||
<li><%= link_to 'Subscribe', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-sm btn-success' %></li>
|
||||
<% end %>
|
||||
</ul>
|
||||
</div>
|
||||
<%= link_to 'Dawarich', root_path, class: 'btn btn-ghost normal-case text-xl'%>
|
||||
|
|
@ -67,6 +70,10 @@
|
|||
<div class="navbar-end">
|
||||
<ul class="menu menu-horizontal bg-base-100 rounded-box px-1">
|
||||
<% if user_signed_in? %>
|
||||
<% if current_user.can_subscribe? %>
|
||||
<li><%= link_to 'Subscribe', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-sm btn-success' %></li>
|
||||
<% end %>
|
||||
|
||||
<div class="dropdown dropdown-end dropdown-bottom dropdown-hover"
|
||||
data-controller="notifications"
|
||||
data-notifications-user-id-value="<%= current_user.id %>">
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
require_relative "../config/application"
|
||||
require "importmap/commands"
|
||||
require_relative '../config/application'
|
||||
require 'importmap/commands'
|
||||
|
|
|
|||
|
|
@ -98,4 +98,6 @@ Rails.application.configure do
|
|||
config.logger = Logger.new($stdout)
|
||||
config.lograge.enabled = true
|
||||
config.lograge.formatter = Lograge::Formatters::Json.new
|
||||
|
||||
config.active_storage.service = ENV['SELF_HOSTED'] == 'true' ? :local : :s3
|
||||
end
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ Rails.application.configure do
|
|||
# config.action_dispatch.x_sendfile_header = "X-Accel-Redirect" # for NGINX
|
||||
|
||||
# Store uploaded files on the local file system (see config/storage.yml for options).
|
||||
config.active_storage.service = :local
|
||||
config.active_storage.service = ENV['SELF_HOSTED'] == 'true' ? :local : :s3
|
||||
|
||||
config.silence_healthcheck_path = '/api/v1/health'
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ pin_all_from 'app/javascript/channels', under: 'channels'
|
|||
|
||||
pin 'application', preload: true
|
||||
pin '@rails/actioncable', to: 'actioncable.esm.js'
|
||||
pin '@rails/activestorage', to: 'activestorage.esm.js'
|
||||
pin '@hotwired/turbo-rails', to: 'turbo.min.js', preload: true
|
||||
pin '@hotwired/stimulus', to: 'stimulus.min.js', preload: true
|
||||
pin '@hotwired/stimulus-loading', to: 'stimulus-loading.js', preload: true
|
||||
|
|
@ -17,10 +18,8 @@ pin 'chartkick', to: 'chartkick.js'
|
|||
pin 'Chart.bundle', to: 'Chart.bundle.js'
|
||||
pin 'leaflet.heat' # @0.2.0
|
||||
pin 'leaflet-draw' # @1.0.4
|
||||
pin '@rails/actioncable', to: 'actioncable.esm.js'
|
||||
pin_all_from 'app/javascript/channels', under: 'channels'
|
||||
pin 'notifications_channel', to: 'channels/notifications_channel.js'
|
||||
pin 'points_channel', to: 'channels/points_channel.js'
|
||||
pin 'imports_channel', to: 'channels/imports_channel.js'
|
||||
pin "trix"
|
||||
pin "@rails/actiontext", to: "actiontext.esm.js"
|
||||
pin 'trix'
|
||||
pin '@rails/actiontext', to: 'actiontext.esm.js'
|
||||
|
|
|
|||
|
|
@ -21,3 +21,5 @@ NOMINATIM_API_USE_HTTPS = ENV.fetch('NOMINATIM_API_USE_HTTPS', 'true') == 'true'
|
|||
|
||||
GEOAPIFY_API_KEY = ENV.fetch('GEOAPIFY_API_KEY', nil)
|
||||
# /Reverse geocoding settings
|
||||
|
||||
SENTRY_DSN = ENV.fetch('SENTRY_DSN', nil)
|
||||
|
|
|
|||
16
config/initializers/aws.rb
Normal file
16
config/initializers/aws.rb
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'aws-sdk-core'
|
||||
|
||||
if ENV['AWS_ACCESS_KEY_ID'] &&
|
||||
ENV['AWS_SECRET_ACCESS_KEY'] &&
|
||||
ENV['AWS_REGION'] &&
|
||||
ENV['AWS_ENDPOINT']
|
||||
Aws.config.update(
|
||||
{
|
||||
region: ENV['AWS_REGION'],
|
||||
endpoint: ENV['AWS_ENDPOINT'],
|
||||
credentials: Aws::Credentials.new(ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY'])
|
||||
}
|
||||
)
|
||||
end
|
||||
9
config/initializers/sentry.rb
Normal file
9
config/initializers/sentry.rb
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
return unless SENTRY_DSN
|
||||
|
||||
Sentry.init do |config|
|
||||
config.breadcrumbs_logger = [:active_support_logger]
|
||||
config.dsn = SENTRY_DSN
|
||||
config.traces_sample_rate = 1.0
|
||||
end
|
||||
|
|
@ -22,6 +22,11 @@ Rails.application.routes.draw do
|
|||
resources :users, only: %i[index create destroy edit update]
|
||||
resources :maps, only: %i[index]
|
||||
patch 'maps', to: 'maps#update'
|
||||
resources :subscriptions, only: %i[index] do
|
||||
collection do
|
||||
get :subscription_callback
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
patch 'settings', to: 'settings#update'
|
||||
|
|
|
|||
|
|
@ -25,11 +25,6 @@ app_version_checking_job:
|
|||
class: "AppVersionCheckingJob"
|
||||
queue: default
|
||||
|
||||
telemetry_sending_job:
|
||||
cron: "0 */1 * * *" # every 1 hour
|
||||
class: "TelemetrySendingJob"
|
||||
queue: default
|
||||
|
||||
cache_preheating_job:
|
||||
cron: "0 0 * * *" # every day at 0:00
|
||||
class: "Cache::PreheatingJob"
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'shrine'
|
||||
require 'shrine/storage/file_system'
|
||||
|
||||
Shrine.storages = {
|
||||
cache: Shrine::Storage::FileSystem.new('public', prefix: 'uploads/cache'), # temporary
|
||||
store: Shrine::Storage::FileSystem.new('public', prefix: 'uploads') # permanent
|
||||
}
|
||||
|
||||
Shrine.plugin :activerecord # loads Active Record integration
|
||||
Shrine.plugin :cached_attachment_data # enables retaining cached file across form redisplays
|
||||
Shrine.plugin :restore_cached_data # extracts metadata for assigned cached files
|
||||
|
|
@ -6,13 +6,15 @@ local:
|
|||
service: Disk
|
||||
root: <%= Rails.root.join("storage") %>
|
||||
|
||||
# Use bin/rails credentials:edit to set the AWS secrets (as aws:access_key_id|secret_access_key)
|
||||
# amazon:
|
||||
# service: S3
|
||||
# access_key_id: <%= Rails.application.credentials.dig(:aws, :access_key_id) %>
|
||||
# secret_access_key: <%= Rails.application.credentials.dig(:aws, :secret_access_key) %>
|
||||
# region: us-east-1
|
||||
# bucket: your_own_bucket-<%= Rails.env %>
|
||||
# Only load S3 config if not in test environment
|
||||
<% if !Rails.env.test? && ENV['AWS_ACCESS_KEY_ID'] && ENV['AWS_SECRET_ACCESS_KEY'] && ENV['AWS_REGION'] && ENV['AWS_BUCKET'] %>
|
||||
s3:
|
||||
service: S3
|
||||
access_key_id: <%= ENV.fetch("AWS_ACCESS_KEY_ID") %>
|
||||
secret_access_key: <%= ENV.fetch("AWS_SECRET_ACCESS_KEY") %>
|
||||
region: <%= ENV.fetch("AWS_REGION") %>
|
||||
bucket: <%= ENV.fetch("AWS_BUCKET") %>
|
||||
<% end %>
|
||||
|
||||
# Remember not to checkin your GCS keyfile to a repository
|
||||
# google:
|
||||
|
|
|
|||
13
db/data/20250403204658_update_imports_points_count.rb
Normal file
13
db/data/20250403204658_update_imports_points_count.rb
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class UpdateImportsPointsCount < ActiveRecord::Migration[8.0]
|
||||
def up
|
||||
Import.find_each do |import|
|
||||
Import::UpdatePointsCountJob.perform_later(import.id)
|
||||
end
|
||||
end
|
||||
|
||||
def down
|
||||
raise ActiveRecord::IrreversibleMigration
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SetActiveUntilForSelfhostedUsers < ActiveRecord::Migration[8.0]
|
||||
def up
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
# rubocop:disable Rails/SkipsModelValidations
|
||||
User.where(active_until: nil).update_all(active_until: 1000.years.from_now)
|
||||
# rubocop:enable Rails/SkipsModelValidations
|
||||
end
|
||||
|
||||
def down
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
# rubocop:disable Rails/SkipsModelValidations
|
||||
User.where.not(active_until: nil).update_all(active_until: nil)
|
||||
# rubocop:enable Rails/SkipsModelValidations
|
||||
end
|
||||
end
|
||||
|
|
@ -1 +1 @@
|
|||
DataMigrate::Data.define(version: 20250303194123)
|
||||
DataMigrate::Data.define(version: 20_250_404_182_629)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# This migration comes from active_storage (originally 20170806125915)
|
||||
class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
|
||||
def change
|
||||
|
|
@ -19,7 +21,7 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
|
|||
t.datetime :created_at, null: false
|
||||
end
|
||||
|
||||
t.index [ :key ], unique: true
|
||||
t.index [:key], unique: true
|
||||
end
|
||||
|
||||
create_table :active_storage_attachments, id: primary_key_type do |t|
|
||||
|
|
@ -33,7 +35,8 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
|
|||
t.datetime :created_at, null: false
|
||||
end
|
||||
|
||||
t.index [ :record_type, :record_id, :name, :blob_id ], name: :index_active_storage_attachments_uniqueness, unique: true
|
||||
t.index %i[record_type record_id name blob_id], name: :index_active_storage_attachments_uniqueness,
|
||||
unique: true
|
||||
t.foreign_key :active_storage_blobs, column: :blob_id
|
||||
end
|
||||
|
||||
|
|
@ -41,17 +44,18 @@ class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
|
|||
t.belongs_to :blob, null: false, index: false, type: foreign_key_type
|
||||
t.string :variation_digest, null: false
|
||||
|
||||
t.index [ :blob_id, :variation_digest ], name: :index_active_storage_variant_records_uniqueness, unique: true
|
||||
t.index %i[blob_id variation_digest], name: :index_active_storage_variant_records_uniqueness, unique: true
|
||||
t.foreign_key :active_storage_blobs, column: :blob_id
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
def primary_and_foreign_key_types
|
||||
config = Rails.configuration.generators
|
||||
setting = config.options[config.orm][:primary_key_type]
|
||||
primary_key_type = setting || :primary_key
|
||||
foreign_key_type = setting || :bigint
|
||||
[primary_key_type, foreign_key_type]
|
||||
end
|
||||
|
||||
def primary_and_foreign_key_types
|
||||
config = Rails.configuration.generators
|
||||
setting = config.options[config.orm][:primary_key_type]
|
||||
primary_key_type = setting || :primary_key
|
||||
foreign_key_type = setting || :bigint
|
||||
[primary_key_type, foreign_key_type]
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddFormatStartAtEndAtToExports < ActiveRecord::Migration[8.0]
|
||||
def change
|
||||
add_column :exports, :file_format, :integer, default: 0
|
||||
add_column :exports, :start_at, :datetime
|
||||
add_column :exports, :end_at, :datetime
|
||||
end
|
||||
end
|
||||
7
db/migrate/20250404182437_add_active_until_to_users.rb
Normal file
7
db/migrate/20250404182437_add_active_until_to_users.rb
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddActiveUntilToUsers < ActiveRecord::Migration[8.0]
|
||||
def change
|
||||
add_column :users, :active_until, :datetime
|
||||
end
|
||||
end
|
||||
443
db/schema.rb
generated
443
db/schema.rb
generated
|
|
@ -10,257 +10,264 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema[8.0].define(version: 2025_03_03_194043) do
|
||||
ActiveRecord::Schema[8.0].define(version: 20_250_404_182_437) do
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "pg_catalog.plpgsql"
|
||||
enable_extension "postgis"
|
||||
enable_extension 'pg_catalog.plpgsql'
|
||||
enable_extension 'postgis'
|
||||
|
||||
create_table "action_text_rich_texts", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.text "body"
|
||||
t.string "record_type", null: false
|
||||
t.bigint "record_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.index ["record_type", "record_id", "name"], name: "index_action_text_rich_texts_uniqueness", unique: true
|
||||
create_table 'action_text_rich_texts', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.text 'body'
|
||||
t.string 'record_type', null: false
|
||||
t.bigint 'record_id', null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.index %w[record_type record_id name], name: 'index_action_text_rich_texts_uniqueness', unique: true
|
||||
end
|
||||
|
||||
create_table "active_storage_attachments", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.string "record_type", null: false
|
||||
t.bigint "record_id", null: false
|
||||
t.bigint "blob_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
|
||||
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
|
||||
create_table 'active_storage_attachments', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.string 'record_type', null: false
|
||||
t.bigint 'record_id', null: false
|
||||
t.bigint 'blob_id', null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.index ['blob_id'], name: 'index_active_storage_attachments_on_blob_id'
|
||||
t.index %w[record_type record_id name blob_id], name: 'index_active_storage_attachments_uniqueness',
|
||||
unique: true
|
||||
end
|
||||
|
||||
create_table "active_storage_blobs", force: :cascade do |t|
|
||||
t.string "key", null: false
|
||||
t.string "filename", null: false
|
||||
t.string "content_type"
|
||||
t.text "metadata"
|
||||
t.string "service_name", null: false
|
||||
t.bigint "byte_size", null: false
|
||||
t.string "checksum"
|
||||
t.datetime "created_at", null: false
|
||||
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
|
||||
create_table 'active_storage_blobs', force: :cascade do |t|
|
||||
t.string 'key', null: false
|
||||
t.string 'filename', null: false
|
||||
t.string 'content_type'
|
||||
t.text 'metadata'
|
||||
t.string 'service_name', null: false
|
||||
t.bigint 'byte_size', null: false
|
||||
t.string 'checksum'
|
||||
t.datetime 'created_at', null: false
|
||||
t.index ['key'], name: 'index_active_storage_blobs_on_key', unique: true
|
||||
end
|
||||
|
||||
create_table "active_storage_variant_records", force: :cascade do |t|
|
||||
t.bigint "blob_id", null: false
|
||||
t.string "variation_digest", null: false
|
||||
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
|
||||
create_table 'active_storage_variant_records', force: :cascade do |t|
|
||||
t.bigint 'blob_id', null: false
|
||||
t.string 'variation_digest', null: false
|
||||
t.index %w[blob_id variation_digest], name: 'index_active_storage_variant_records_uniqueness', unique: true
|
||||
end
|
||||
|
||||
create_table "areas", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.bigint "user_id", null: false
|
||||
t.decimal "longitude", precision: 10, scale: 6, null: false
|
||||
t.decimal "latitude", precision: 10, scale: 6, null: false
|
||||
t.integer "radius", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.index ["user_id"], name: "index_areas_on_user_id"
|
||||
create_table 'areas', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.bigint 'user_id', null: false
|
||||
t.decimal 'longitude', precision: 10, scale: 6, null: false
|
||||
t.decimal 'latitude', precision: 10, scale: 6, null: false
|
||||
t.integer 'radius', null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.index ['user_id'], name: 'index_areas_on_user_id'
|
||||
end
|
||||
|
||||
create_table "data_migrations", primary_key: "version", id: :string, force: :cascade do |t|
|
||||
create_table 'data_migrations', primary_key: 'version', id: :string, force: :cascade do |t|
|
||||
end
|
||||
|
||||
create_table "exports", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.string "url"
|
||||
t.integer "status", default: 0, null: false
|
||||
t.bigint "user_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.index ["status"], name: "index_exports_on_status"
|
||||
t.index ["user_id"], name: "index_exports_on_user_id"
|
||||
create_table 'exports', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.string 'url'
|
||||
t.integer 'status', default: 0, null: false
|
||||
t.bigint 'user_id', null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.integer 'file_format', default: 0
|
||||
t.datetime 'start_at'
|
||||
t.datetime 'end_at'
|
||||
t.index ['status'], name: 'index_exports_on_status'
|
||||
t.index ['user_id'], name: 'index_exports_on_user_id'
|
||||
end
|
||||
|
||||
create_table "imports", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.bigint "user_id", null: false
|
||||
t.integer "source", default: 0
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.integer "raw_points", default: 0
|
||||
t.integer "doubles", default: 0
|
||||
t.integer "processed", default: 0
|
||||
t.jsonb "raw_data"
|
||||
t.integer "points_count", default: 0
|
||||
t.index ["source"], name: "index_imports_on_source"
|
||||
t.index ["user_id"], name: "index_imports_on_user_id"
|
||||
create_table 'imports', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.bigint 'user_id', null: false
|
||||
t.integer 'source', default: 0
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.integer 'raw_points', default: 0
|
||||
t.integer 'doubles', default: 0
|
||||
t.integer 'processed', default: 0
|
||||
t.jsonb 'raw_data'
|
||||
t.integer 'points_count', default: 0
|
||||
t.index ['source'], name: 'index_imports_on_source'
|
||||
t.index ['user_id'], name: 'index_imports_on_user_id'
|
||||
end
|
||||
|
||||
create_table "notifications", force: :cascade do |t|
|
||||
t.string "title", null: false
|
||||
t.text "content", null: false
|
||||
t.bigint "user_id", null: false
|
||||
t.integer "kind", default: 0, null: false
|
||||
t.datetime "read_at"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.index ["kind"], name: "index_notifications_on_kind"
|
||||
t.index ["user_id"], name: "index_notifications_on_user_id"
|
||||
create_table 'notifications', force: :cascade do |t|
|
||||
t.string 'title', null: false
|
||||
t.text 'content', null: false
|
||||
t.bigint 'user_id', null: false
|
||||
t.integer 'kind', default: 0, null: false
|
||||
t.datetime 'read_at'
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.index ['kind'], name: 'index_notifications_on_kind'
|
||||
t.index ['user_id'], name: 'index_notifications_on_user_id'
|
||||
end
|
||||
|
||||
create_table "place_visits", force: :cascade do |t|
|
||||
t.bigint "place_id", null: false
|
||||
t.bigint "visit_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.index ["place_id"], name: "index_place_visits_on_place_id"
|
||||
t.index ["visit_id"], name: "index_place_visits_on_visit_id"
|
||||
create_table 'place_visits', force: :cascade do |t|
|
||||
t.bigint 'place_id', null: false
|
||||
t.bigint 'visit_id', null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.index ['place_id'], name: 'index_place_visits_on_place_id'
|
||||
t.index ['visit_id'], name: 'index_place_visits_on_visit_id'
|
||||
end
|
||||
|
||||
create_table "places", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.decimal "longitude", precision: 10, scale: 6, null: false
|
||||
t.decimal "latitude", precision: 10, scale: 6, null: false
|
||||
t.string "city"
|
||||
t.string "country"
|
||||
t.integer "source", default: 0
|
||||
t.jsonb "geodata", default: {}, null: false
|
||||
t.datetime "reverse_geocoded_at"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.geography "lonlat", limit: {srid: 4326, type: "st_point", geographic: true}
|
||||
t.index ["lonlat"], name: "index_places_on_lonlat", using: :gist
|
||||
create_table 'places', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.decimal 'longitude', precision: 10, scale: 6, null: false
|
||||
t.decimal 'latitude', precision: 10, scale: 6, null: false
|
||||
t.string 'city'
|
||||
t.string 'country'
|
||||
t.integer 'source', default: 0
|
||||
t.jsonb 'geodata', default: {}, null: false
|
||||
t.datetime 'reverse_geocoded_at'
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.geography 'lonlat', limit: { srid: 4326, type: 'st_point', geographic: true }
|
||||
t.index ['lonlat'], name: 'index_places_on_lonlat', using: :gist
|
||||
end
|
||||
|
||||
create_table "points", force: :cascade do |t|
|
||||
t.integer "battery_status"
|
||||
t.string "ping"
|
||||
t.integer "battery"
|
||||
t.string "tracker_id"
|
||||
t.string "topic"
|
||||
t.integer "altitude"
|
||||
t.decimal "longitude", precision: 10, scale: 6
|
||||
t.string "velocity"
|
||||
t.integer "trigger"
|
||||
t.string "bssid"
|
||||
t.string "ssid"
|
||||
t.integer "connection"
|
||||
t.integer "vertical_accuracy"
|
||||
t.integer "accuracy"
|
||||
t.integer "timestamp"
|
||||
t.decimal "latitude", precision: 10, scale: 6
|
||||
t.integer "mode"
|
||||
t.text "inrids", default: [], array: true
|
||||
t.text "in_regions", default: [], array: true
|
||||
t.jsonb "raw_data", default: {}
|
||||
t.bigint "import_id"
|
||||
t.string "city"
|
||||
t.string "country"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.bigint "user_id"
|
||||
t.jsonb "geodata", default: {}, null: false
|
||||
t.bigint "visit_id"
|
||||
t.datetime "reverse_geocoded_at"
|
||||
t.decimal "course", precision: 8, scale: 5
|
||||
t.decimal "course_accuracy", precision: 8, scale: 5
|
||||
t.string "external_track_id"
|
||||
t.geography "lonlat", limit: {srid: 4326, type: "st_point", geographic: true}
|
||||
t.index ["altitude"], name: "index_points_on_altitude"
|
||||
t.index ["battery"], name: "index_points_on_battery"
|
||||
t.index ["battery_status"], name: "index_points_on_battery_status"
|
||||
t.index ["city"], name: "index_points_on_city"
|
||||
t.index ["connection"], name: "index_points_on_connection"
|
||||
t.index ["country"], name: "index_points_on_country"
|
||||
t.index ["external_track_id"], name: "index_points_on_external_track_id"
|
||||
t.index ["geodata"], name: "index_points_on_geodata", using: :gin
|
||||
t.index ["import_id"], name: "index_points_on_import_id"
|
||||
t.index ["latitude", "longitude"], name: "index_points_on_latitude_and_longitude"
|
||||
t.index ["lonlat", "timestamp", "user_id"], name: "index_points_on_lonlat_timestamp_user_id", unique: true
|
||||
t.index ["lonlat"], name: "index_points_on_lonlat", using: :gist
|
||||
t.index ["reverse_geocoded_at"], name: "index_points_on_reverse_geocoded_at"
|
||||
t.index ["timestamp"], name: "index_points_on_timestamp"
|
||||
t.index ["trigger"], name: "index_points_on_trigger"
|
||||
t.index ["user_id"], name: "index_points_on_user_id"
|
||||
t.index ["visit_id"], name: "index_points_on_visit_id"
|
||||
create_table 'points', force: :cascade do |t|
|
||||
t.integer 'battery_status'
|
||||
t.string 'ping'
|
||||
t.integer 'battery'
|
||||
t.string 'tracker_id'
|
||||
t.string 'topic'
|
||||
t.integer 'altitude'
|
||||
t.decimal 'longitude', precision: 10, scale: 6
|
||||
t.string 'velocity'
|
||||
t.integer 'trigger'
|
||||
t.string 'bssid'
|
||||
t.string 'ssid'
|
||||
t.integer 'connection'
|
||||
t.integer 'vertical_accuracy'
|
||||
t.integer 'accuracy'
|
||||
t.integer 'timestamp'
|
||||
t.decimal 'latitude', precision: 10, scale: 6
|
||||
t.integer 'mode'
|
||||
t.text 'inrids', default: [], array: true
|
||||
t.text 'in_regions', default: [], array: true
|
||||
t.jsonb 'raw_data', default: {}
|
||||
t.bigint 'import_id'
|
||||
t.string 'city'
|
||||
t.string 'country'
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.bigint 'user_id'
|
||||
t.jsonb 'geodata', default: {}, null: false
|
||||
t.bigint 'visit_id'
|
||||
t.datetime 'reverse_geocoded_at'
|
||||
t.decimal 'course', precision: 8, scale: 5
|
||||
t.decimal 'course_accuracy', precision: 8, scale: 5
|
||||
t.string 'external_track_id'
|
||||
t.geography 'lonlat', limit: { srid: 4326, type: 'st_point', geographic: true }
|
||||
t.index ['altitude'], name: 'index_points_on_altitude'
|
||||
t.index ['battery'], name: 'index_points_on_battery'
|
||||
t.index ['battery_status'], name: 'index_points_on_battery_status'
|
||||
t.index ['city'], name: 'index_points_on_city'
|
||||
t.index ['connection'], name: 'index_points_on_connection'
|
||||
t.index ['country'], name: 'index_points_on_country'
|
||||
t.index ['external_track_id'], name: 'index_points_on_external_track_id'
|
||||
t.index ['geodata'], name: 'index_points_on_geodata', using: :gin
|
||||
t.index ['import_id'], name: 'index_points_on_import_id'
|
||||
t.index %w[latitude longitude], name: 'index_points_on_latitude_and_longitude'
|
||||
t.index %w[lonlat timestamp user_id], name: 'index_points_on_lonlat_timestamp_user_id', unique: true
|
||||
t.index ['lonlat'], name: 'index_points_on_lonlat', using: :gist
|
||||
t.index ['reverse_geocoded_at'], name: 'index_points_on_reverse_geocoded_at'
|
||||
t.index ['timestamp'], name: 'index_points_on_timestamp'
|
||||
t.index ['trigger'], name: 'index_points_on_trigger'
|
||||
t.index ['user_id'], name: 'index_points_on_user_id'
|
||||
t.index ['visit_id'], name: 'index_points_on_visit_id'
|
||||
end
|
||||
|
||||
create_table "stats", force: :cascade do |t|
|
||||
t.integer "year", null: false
|
||||
t.integer "month", null: false
|
||||
t.integer "distance", null: false
|
||||
t.jsonb "toponyms"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.bigint "user_id", null: false
|
||||
t.jsonb "daily_distance", default: {}
|
||||
t.index ["distance"], name: "index_stats_on_distance"
|
||||
t.index ["month"], name: "index_stats_on_month"
|
||||
t.index ["user_id"], name: "index_stats_on_user_id"
|
||||
t.index ["year"], name: "index_stats_on_year"
|
||||
create_table 'stats', force: :cascade do |t|
|
||||
t.integer 'year', null: false
|
||||
t.integer 'month', null: false
|
||||
t.integer 'distance', null: false
|
||||
t.jsonb 'toponyms'
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.bigint 'user_id', null: false
|
||||
t.jsonb 'daily_distance', default: {}
|
||||
t.index ['distance'], name: 'index_stats_on_distance'
|
||||
t.index ['month'], name: 'index_stats_on_month'
|
||||
t.index ['user_id'], name: 'index_stats_on_user_id'
|
||||
t.index ['year'], name: 'index_stats_on_year'
|
||||
end
|
||||
|
||||
create_table "trips", force: :cascade do |t|
|
||||
t.string "name", null: false
|
||||
t.datetime "started_at", null: false
|
||||
t.datetime "ended_at", null: false
|
||||
t.integer "distance"
|
||||
t.bigint "user_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.geometry "path", limit: {srid: 3857, type: "line_string"}
|
||||
t.index ["user_id"], name: "index_trips_on_user_id"
|
||||
create_table 'trips', force: :cascade do |t|
|
||||
t.string 'name', null: false
|
||||
t.datetime 'started_at', null: false
|
||||
t.datetime 'ended_at', null: false
|
||||
t.integer 'distance'
|
||||
t.bigint 'user_id', null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.geometry 'path', limit: { srid: 3857, type: 'line_string' }
|
||||
t.index ['user_id'], name: 'index_trips_on_user_id'
|
||||
end
|
||||
|
||||
create_table "users", force: :cascade do |t|
|
||||
t.string "email", default: "", null: false
|
||||
t.string "encrypted_password", default: "", null: false
|
||||
t.string "reset_password_token"
|
||||
t.datetime "reset_password_sent_at"
|
||||
t.datetime "remember_created_at"
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.string "api_key", default: "", null: false
|
||||
t.string "theme", default: "dark", null: false
|
||||
t.jsonb "settings", default: {"fog_of_war_meters" => "100", "meters_between_routes" => "1000", "minutes_between_routes" => "60"}
|
||||
t.boolean "admin", default: false
|
||||
t.integer "sign_in_count", default: 0, null: false
|
||||
t.datetime "current_sign_in_at"
|
||||
t.datetime "last_sign_in_at"
|
||||
t.string "current_sign_in_ip"
|
||||
t.string "last_sign_in_ip"
|
||||
t.integer "status", default: 0
|
||||
t.index ["email"], name: "index_users_on_email", unique: true
|
||||
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
|
||||
create_table 'users', force: :cascade do |t|
|
||||
t.string 'email', default: '', null: false
|
||||
t.string 'encrypted_password', default: '', null: false
|
||||
t.string 'reset_password_token'
|
||||
t.datetime 'reset_password_sent_at'
|
||||
t.datetime 'remember_created_at'
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.string 'api_key', default: '', null: false
|
||||
t.string 'theme', default: 'dark', null: false
|
||||
t.jsonb 'settings',
|
||||
default: { 'fog_of_war_meters' => '100', 'meters_between_routes' => '1000',
|
||||
'minutes_between_routes' => '60' }
|
||||
t.boolean 'admin', default: false
|
||||
t.integer 'sign_in_count', default: 0, null: false
|
||||
t.datetime 'current_sign_in_at'
|
||||
t.datetime 'last_sign_in_at'
|
||||
t.string 'current_sign_in_ip'
|
||||
t.string 'last_sign_in_ip'
|
||||
t.integer 'status', default: 0
|
||||
t.datetime 'active_until'
|
||||
t.index ['email'], name: 'index_users_on_email', unique: true
|
||||
t.index ['reset_password_token'], name: 'index_users_on_reset_password_token', unique: true
|
||||
end
|
||||
|
||||
add_check_constraint "users", "admin IS NOT NULL", name: "users_admin_null", validate: false
|
||||
add_check_constraint 'users', 'admin IS NOT NULL', name: 'users_admin_null', validate: false
|
||||
|
||||
create_table "visits", force: :cascade do |t|
|
||||
t.bigint "area_id"
|
||||
t.bigint "user_id", null: false
|
||||
t.datetime "started_at", null: false
|
||||
t.datetime "ended_at", null: false
|
||||
t.integer "duration", null: false
|
||||
t.string "name", null: false
|
||||
t.integer "status", default: 0, null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.bigint "place_id"
|
||||
t.index ["area_id"], name: "index_visits_on_area_id"
|
||||
t.index ["place_id"], name: "index_visits_on_place_id"
|
||||
t.index ["started_at"], name: "index_visits_on_started_at"
|
||||
t.index ["user_id"], name: "index_visits_on_user_id"
|
||||
create_table 'visits', force: :cascade do |t|
|
||||
t.bigint 'area_id'
|
||||
t.bigint 'user_id', null: false
|
||||
t.datetime 'started_at', null: false
|
||||
t.datetime 'ended_at', null: false
|
||||
t.integer 'duration', null: false
|
||||
t.string 'name', null: false
|
||||
t.integer 'status', default: 0, null: false
|
||||
t.datetime 'created_at', null: false
|
||||
t.datetime 'updated_at', null: false
|
||||
t.bigint 'place_id'
|
||||
t.index ['area_id'], name: 'index_visits_on_area_id'
|
||||
t.index ['place_id'], name: 'index_visits_on_place_id'
|
||||
t.index ['started_at'], name: 'index_visits_on_started_at'
|
||||
t.index ['user_id'], name: 'index_visits_on_user_id'
|
||||
end
|
||||
|
||||
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
|
||||
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
|
||||
add_foreign_key "areas", "users"
|
||||
add_foreign_key "notifications", "users"
|
||||
add_foreign_key "place_visits", "places"
|
||||
add_foreign_key "place_visits", "visits"
|
||||
add_foreign_key "points", "users"
|
||||
add_foreign_key "points", "visits"
|
||||
add_foreign_key "stats", "users"
|
||||
add_foreign_key "trips", "users"
|
||||
add_foreign_key "visits", "areas"
|
||||
add_foreign_key "visits", "places"
|
||||
add_foreign_key "visits", "users"
|
||||
add_foreign_key 'active_storage_attachments', 'active_storage_blobs', column: 'blob_id'
|
||||
add_foreign_key 'active_storage_variant_records', 'active_storage_blobs', column: 'blob_id'
|
||||
add_foreign_key 'areas', 'users'
|
||||
add_foreign_key 'notifications', 'users'
|
||||
add_foreign_key 'place_visits', 'places'
|
||||
add_foreign_key 'place_visits', 'visits'
|
||||
add_foreign_key 'points', 'users'
|
||||
add_foreign_key 'points', 'visits'
|
||||
add_foreign_key 'stats', 'users'
|
||||
add_foreign_key 'trips', 'users'
|
||||
add_foreign_key 'visits', 'areas'
|
||||
add_foreign_key 'visits', 'places'
|
||||
add_foreign_key 'visits', 'users'
|
||||
end
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ services:
|
|||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
ports:
|
||||
|
|
@ -98,6 +99,7 @@ services:
|
|||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
stdin_open: true
|
||||
|
|
@ -154,3 +156,4 @@ volumes:
|
|||
dawarich_redis_data:
|
||||
dawarich_public:
|
||||
dawarich_watched:
|
||||
dawarich_storage:
|
||||
|
|
|
|||
|
|
@ -43,6 +43,7 @@ services:
|
|||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
ports:
|
||||
|
|
@ -98,6 +99,7 @@ services:
|
|||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
stdin_open: true
|
||||
|
|
@ -152,3 +154,4 @@ volumes:
|
|||
dawarich_shared:
|
||||
dawarich_public:
|
||||
dawarich_watched:
|
||||
dawarich_storage:
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ services:
|
|||
- .env
|
||||
volumes:
|
||||
- ./public:/var/app/public
|
||||
- ./app_storage:/var/app/storage
|
||||
ports:
|
||||
- 32568:3000
|
||||
|
||||
|
|
@ -52,3 +53,4 @@ services:
|
|||
- .env
|
||||
volumes:
|
||||
- ./public:/var/app/public
|
||||
- ./app_storage:/var/app/storage
|
||||
|
|
|
|||
13
lib/tasks/exports.rake
Normal file
13
lib/tasks/exports.rake
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
namespace :exports do
|
||||
desc 'Migrate existing exports from file system to the new file storage'
|
||||
|
||||
task migrate_to_new_storage: :environment do
|
||||
Export.find_each do |export|
|
||||
export.migrate_to_new_storage
|
||||
rescue StandardError => e
|
||||
puts "Error migrating export #{export.id}: #{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,8 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# Usage: rake import:big_file['/path/to/file.json','user@email.com']
|
||||
|
||||
namespace :import do
|
||||
# Usage: rake import:big_file['/path/to/file.json','user@email.com']
|
||||
desc 'Accepts a file path and user email and imports the data into the database'
|
||||
|
||||
task :big_file, %i[file_path user_email] => :environment do |_, args|
|
||||
|
|
|
|||
13
lib/tasks/imports.rake
Normal file
13
lib/tasks/imports.rake
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
namespace :imports do
|
||||
desc 'Migrate existing imports from `raw_data` to the new file storage'
|
||||
|
||||
task migrate_to_new_storage: :environment do
|
||||
Import.find_each do |import|
|
||||
import.migrate_to_new_storage
|
||||
rescue StandardError => e
|
||||
puts "Error migrating import #{import.id}: #{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -5,15 +5,25 @@ namespace :points do
|
|||
task migrate_to_lonlat: :environment do
|
||||
puts 'Updating points to use lonlat...'
|
||||
|
||||
# Use PostGIS functions to properly create geography type
|
||||
result = ActiveRecord::Base.connection.execute(<<~SQL)
|
||||
UPDATE points
|
||||
SET lonlat = ST_SetSRID(ST_MakePoint(longitude, latitude), 4326)::geography
|
||||
WHERE lonlat IS NULL
|
||||
AND longitude IS NOT NULL
|
||||
AND latitude IS NOT NULL;
|
||||
SQL
|
||||
ActiveRecord::Base.connection.execute('REINDEX TABLE points;')
|
||||
|
||||
puts "Successfully updated #{result.cmd_tuples} points with lonlat values"
|
||||
ActiveRecord::Base.transaction do
|
||||
ActiveRecord::Base.connection.execute('ALTER TABLE points DISABLE TRIGGER ALL;')
|
||||
|
||||
# Update the data
|
||||
result = ActiveRecord::Base.connection.execute(<<~SQL)
|
||||
UPDATE points
|
||||
SET lonlat = ST_SetSRID(ST_MakePoint(longitude, latitude), 4326)::geography
|
||||
WHERE lonlat IS NULL
|
||||
AND longitude IS NOT NULL
|
||||
AND latitude IS NOT NULL;
|
||||
SQL
|
||||
|
||||
ActiveRecord::Base.connection.execute('ALTER TABLE points ENABLE TRIGGER ALL;')
|
||||
|
||||
puts "Successfully updated #{result.cmd_tuples} points with lonlat values"
|
||||
end
|
||||
|
||||
ActiveRecord::Base.connection.execute('ANALYZE points;')
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
FactoryBot.define do
|
||||
factory :export do
|
||||
name { 'export' }
|
||||
url { 'exports/export.json' }
|
||||
status { 1 }
|
||||
status { :created }
|
||||
file_format { :json }
|
||||
user
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,8 +3,7 @@
|
|||
FactoryBot.define do
|
||||
factory :import do
|
||||
user
|
||||
name { 'MARCH_2024.json' }
|
||||
name { 'owntracks_export.json' }
|
||||
source { Import.sources[:owntracks] }
|
||||
raw_data { OwnTracks::RecParser.new(File.read('spec/fixtures/files/owntracks/2024-03.rec')).call }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ FactoryBot.define do
|
|||
end
|
||||
|
||||
status { :active }
|
||||
active_until { 1000.years.from_now }
|
||||
|
||||
password { SecureRandom.hex(8) }
|
||||
|
||||
|
|
@ -25,6 +26,11 @@ FactoryBot.define do
|
|||
admin { true }
|
||||
end
|
||||
|
||||
trait :inactive do
|
||||
status { :inactive }
|
||||
active_until { 1.day.ago }
|
||||
end
|
||||
|
||||
trait :with_immich_integration do
|
||||
settings do
|
||||
{
|
||||
|
|
|
|||
10
spec/fixtures/files/google/location-history/with_activitySegment_with_startLocation.json
vendored
Normal file
10
spec/fixtures/files/google/location-history/with_activitySegment_with_startLocation.json
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123422222, "longitudeE7": 123422222 },
|
||||
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123466666, "longitudeE7": 123466666 },
|
||||
"duration": { "startTimestampMs": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123455555, "longitudeE7": 123455555 },
|
||||
"duration": { "startTimestamp": "1742844232" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123444444, "longitudeE7": 123444444 },
|
||||
"duration": { "startTimestamp": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123433333, "longitudeE7": 123433333 },
|
||||
"duration": { "startTimestamp": "2025-03-24T20:20:23+01:00" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
14
spec/fixtures/files/google/location-history/with_activitySegment_without_startLocation.json
vendored
Normal file
14
spec/fixtures/files/google/location-history/with_activitySegment_without_startLocation.json
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"waypointPath": {
|
||||
"waypoints": [
|
||||
{ "latE7": 123411111, "lngE7": 123411111 }
|
||||
]
|
||||
},
|
||||
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
10
spec/fixtures/files/google/location-history/with_placeVisit_with_location_with_coordinates.json
vendored
Normal file
10
spec/fixtures/files/google/location-history/with_placeVisit_with_location_with_coordinates.json
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123477777, "longitudeE7": 123477777 },
|
||||
"duration": { "startTimestamp": "1742844232" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123488888, "longitudeE7": 123488888 },
|
||||
"duration": { "startTimestamp": "2025-03-24T20:25:02+01:00" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123511111, "longitudeE7": 123511111 },
|
||||
"duration": { "startTimestamp": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123499999, "longitudeE7": 123499999 },
|
||||
"duration": { "startTimestamp": "1742844302" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123522222, "longitudeE7": 123522222 },
|
||||
"duration": { "startTimestampMs": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": {},
|
||||
"duration": { "startTimestamp": "2025-03-24 20:25:02 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"otherCandidateLocations": [{ "latitudeE7": 123533333, "longitudeE7": 123533333 }],
|
||||
"duration": { "startTimestamp": "2025-03-24 20:25:02 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -7,7 +7,7 @@ RSpec.describe BulkVisitsSuggestingJob, type: :job do
|
|||
let(:start_at) { 1.day.ago.beginning_of_day }
|
||||
let(:end_at) { 1.day.ago.end_of_day }
|
||||
let(:user) { create(:user) }
|
||||
let(:inactive_user) { create(:user, status: :inactive) }
|
||||
let(:inactive_user) { create(:user, :inactive) }
|
||||
let(:user_with_points) { create(:user) }
|
||||
let(:time_chunks) { [[start_at, end_at]] }
|
||||
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ RSpec.describe ExportJob, type: :job do
|
|||
let(:end_at) { Time.zone.now }
|
||||
|
||||
it 'calls the Exports::Create service class' do
|
||||
expect(Exports::Create).to receive(:new).with(export:, start_at:, end_at:, file_format: :json).and_call_original
|
||||
expect(Exports::Create).to receive(:new).with(export:).and_call_original
|
||||
|
||||
described_class.perform_now(export.id, start_at, end_at)
|
||||
described_class.perform_now(export.id)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,12 +2,17 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe ImportJob, type: :job do
|
||||
RSpec.describe Import::ProcessJob, type: :job do
|
||||
describe '#perform' do
|
||||
subject(:perform) { described_class.new.perform(user.id, import.id) }
|
||||
subject(:perform) { described_class.new.perform(import.id) }
|
||||
|
||||
let(:user) { create(:user) }
|
||||
let!(:import) { create(:import, user:, name: 'owntracks_export.json') }
|
||||
let!(:import) { create(:import, user:, name: '2024-03.rec') }
|
||||
let(:file_path) { Rails.root.join('spec/fixtures/files/owntracks/2024-03.rec') }
|
||||
|
||||
before do
|
||||
import.file.attach(io: File.open(file_path), filename: '2024-03.rec', content_type: 'application/octet-stream')
|
||||
end
|
||||
|
||||
it 'creates points' do
|
||||
expect { perform }.to change { Point.count }.by(9)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue