diff --git a/.app_version b/.app_version index 3d9dcb1b..35aa2f3c 100644 --- a/.app_version +++ b/.app_version @@ -1 +1 @@ -0.25.3 +0.25.4 diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index e2b7aeb3..597df91b 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -7,9 +7,9 @@ services: dockerfile: Dockerfile container_name: dawarich_dev volumes: - - "${PWD}:/var/app:cached" - dawarich_public:/var/app/public - dawarich_watched:/var/app/tmp/imports/watched + - dawarich_storage:/var/app/storage networks: - dawarich ports: @@ -71,3 +71,4 @@ volumes: dawarich_shared: dawarich_public: dawarich_watched: + dawarich_storage: diff --git a/CHANGELOG.md b/CHANGELOG.md index e90314c1..bae9433d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,75 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). + +# 0.25.4 - 2025-04-02 + +⚠️ This release includes a breaking change. ⚠️ + +Make sure to add `dawarich_storage` volume to your `docker-compose.yml` file. Example: + +```diff +... + + dawarich_app: + image: freikin/dawarich:latest + container_name: dawarich_app + volumes: + - dawarich_public:/var/app/public + - dawarich_watched:/var/app/tmp/imports/watched ++ - dawarich_storage:/var/app/storage + +... + + dawarich_sidekiq: + image: freikin/dawarich:latest + container_name: dawarich_sidekiq + volumes: + - dawarich_public:/var/app/public + - dawarich_watched:/var/app/tmp/imports/watched ++ - dawarich_storage:/var/app/storage + +volumes: + dawarich_db_data: + dawarich_shared: + dawarich_public: + dawarich_watched: ++ dawarich_storage: +``` + + +In this release we're changing the way import files are being stored. Previously, they were being stored in the `raw_data` column of the `imports` table. Now, they are being attached to the import record. All new imports will be using the new storage, to migrate existing imports, you can use the `rake imports:migrate_to_new_storage` task. Run it in the container shell. + +This is an optional task, that will not affect your points or other data. +Big imports might take a while to migrate, so be patient. + +Also, you can now migrate existing exports to the new storage using the `rake exports:migrate_to_new_storage` task (in the container shell) or just delete them. + +If your hardware doesn't have enough memory to migrate the imports, you can delete your imports and re-import them. + +## Added + +- Sentry is now can be used for error tracking. +- Subscription management is now available in non self-hosted mode. + +## Changed + +- Import files are now being attached to the import record instead of being stored in the `raw_data` database column. +- Import files can now be stored in S3-compatible storage. +- Export files are now being attached to the export record instead of being stored in the file system. +- Export files can now be stored in S3-compatible storage. +- Users can now import Google's Records.json file via the UI instead of using the CLI. +- Optional telemetry sending is now disabled and will be removed in the future. + +## Fixed + +- Moving points on the map now works correctly. #957 +- `rake points:migrate_to_lonlat` task now also reindexes the points table. +- Fixed filling `lonlat` column for old places after reverse geocoding. +- Deleting an import now correctly recalculates stats. +- Datetime across the app is now being displayed in human readable format, i.e 26 Dec 2024, 13:49. Hover over the datetime to see the ISO 8601 timestamp. + + # 0.25.3 - 2025-03-22 ## Fixed diff --git a/Gemfile b/Gemfile index 4ed5dad3..ec872bfe 100644 --- a/Gemfile +++ b/Gemfile @@ -5,6 +5,10 @@ git_source(:github) { |repo| "https://github.com/#{repo}.git" } ruby File.read('.ruby-version').strip +# https://meta.discourse.org/t/cant-rebuild-due-to-aws-sdk-gem-bump-and-new-aws-data-integrity-protections/354217/40 +gem 'aws-sdk-s3', '~> 1.177.0', require: false +gem 'aws-sdk-core', '~> 3.215.1', require: false +gem 'aws-sdk-kms', '~> 1.96.0', require: false gem 'bootsnap', require: false gem 'chartkick' gem 'data_migrate' @@ -27,7 +31,8 @@ gem 'rgeo' gem 'rgeo-activerecord' gem 'rswag-api' gem 'rswag-ui' -gem 'shrine', '~> 3.6' +gem 'sentry-ruby' +gem 'sentry-rails' gem 'sidekiq' gem 'sidekiq-cron' gem 'sidekiq-limit_fetch' @@ -37,8 +42,10 @@ gem 'strong_migrations' gem 'tailwindcss-rails' gem 'turbo-rails' gem 'tzinfo-data', platforms: %i[mingw mswin x64_mingw jruby] +gem 'jwt' group :development, :test do + gem 'brakeman', require: false gem 'debug', platforms: %i[mri mingw x64_mingw] gem 'dotenv-rails' gem 'factory_bot_rails' diff --git a/Gemfile.lock b/Gemfile.lock index adff6149..ff5a3ab7 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -79,19 +79,36 @@ GEM public_suffix (>= 2.0.2, < 7.0) ast (2.4.2) attr_extras (7.1.0) + aws-eventstream (1.3.2) + aws-partitions (1.1072.0) + aws-sdk-core (3.215.1) + aws-eventstream (~> 1, >= 1.3.0) + aws-partitions (~> 1, >= 1.992.0) + aws-sigv4 (~> 1.9) + jmespath (~> 1, >= 1.6.1) + aws-sdk-kms (1.96.0) + aws-sdk-core (~> 3, >= 3.210.0) + aws-sigv4 (~> 1.5) + aws-sdk-s3 (1.177.0) + aws-sdk-core (~> 3, >= 3.210.0) + aws-sdk-kms (~> 1) + aws-sigv4 (~> 1.5) + aws-sigv4 (1.11.0) + aws-eventstream (~> 1, >= 1.0.2) base64 (0.2.0) bcrypt (3.1.20) benchmark (0.4.0) bigdecimal (3.1.9) bootsnap (1.18.4) msgpack (~> 1.2) + brakeman (7.0.2) + racc builder (3.3.0) byebug (11.1.3) chartkick (5.1.3) coderay (1.1.3) concurrent-ruby (1.3.5) connection_pool (2.5.0) - content_disposition (1.0.0) crack (1.0.0) bigdecimal rexml @@ -121,8 +138,6 @@ GEM dotenv-rails (3.1.7) dotenv (= 3.1.7) railties (>= 6.1) - down (5.4.2) - addressable (~> 2.8) drb (2.2.1) erubi (1.13.1) et-orbi (1.2.11) @@ -164,9 +179,12 @@ GEM pp (>= 0.6.0) rdoc (>= 4.0.0) reline (>= 0.4.2) + jmespath (1.6.2) json (2.10.1) json-schema (5.0.1) addressable (~> 2.8) + jwt (2.10.1) + base64 kaminari (1.2.2) activesupport (>= 4.1.0) kaminari-actionview (= 1.2.2) @@ -369,11 +387,14 @@ GEM rubocop-ast (>= 1.38.0, < 2.0) ruby-progressbar (1.13.0) securerandom (0.4.1) + sentry-rails (5.23.0) + railties (>= 5.0) + sentry-ruby (~> 5.23.0) + sentry-ruby (5.23.0) + bigdecimal + concurrent-ruby (~> 1.0, >= 1.0.2) shoulda-matchers (6.4.0) activesupport (>= 5.2.0) - shrine (3.6.0) - content_disposition (~> 1.0) - down (~> 5.1) sidekiq (7.3.9) base64 connection_pool (>= 2.3.0) @@ -453,7 +474,11 @@ PLATFORMS DEPENDENCIES activerecord-postgis-adapter + aws-sdk-core (~> 3.215.1) + aws-sdk-kms (~> 1.96.0) + aws-sdk-s3 (~> 1.177.0) bootsnap + brakeman chartkick data_migrate database_consistency @@ -469,6 +494,7 @@ DEPENDENCIES groupdate httparty importmap-rails + jwt kaminari lograge oj @@ -487,8 +513,9 @@ DEPENDENCIES rswag-specs rswag-ui rubocop-rails + sentry-rails + sentry-ruby shoulda-matchers - shrine (~> 3.6) sidekiq sidekiq-cron sidekiq-limit_fetch diff --git a/app/controllers/api/v1/points_controller.rb b/app/controllers/api/v1/points_controller.rb index dc34387c..8eddebf6 100644 --- a/app/controllers/api/v1/points_controller.rb +++ b/app/controllers/api/v1/points_controller.rb @@ -32,7 +32,7 @@ class Api::V1::PointsController < ApiController def update point = current_api_user.tracked_points.find(params[:id]) - point.update(point_params) + point.update(lonlat: "POINT(#{point_params[:longitude]} #{point_params[:latitude]})") render json: point_serializer.new(point).call end diff --git a/app/controllers/api_controller.rb b/app/controllers/api_controller.rb index 868c72c0..ab8038cd 100644 --- a/app/controllers/api_controller.rb +++ b/app/controllers/api_controller.rb @@ -13,7 +13,7 @@ class ApiController < ApplicationController end def authenticate_active_api_user! - render json: { error: 'User is not active' }, status: :unauthorized unless current_api_user&.active? + render json: { error: 'User is not active' }, status: :unauthorized unless current_api_user&.active_until&.future? true end diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb index 78071582..500b9711 100644 --- a/app/controllers/application_controller.rb +++ b/app/controllers/application_controller.rb @@ -26,11 +26,17 @@ class ApplicationController < ActionController::Base end def authenticate_active_user! - return if current_user&.active? + return if current_user&.active_until&.future? redirect_to root_path, notice: 'Your account is not active.', status: :see_other end + def authenticate_non_self_hosted! + return unless DawarichSettings.self_hosted? + + redirect_to root_path, notice: 'You are not authorized to perform this action.', status: :see_other + end + private def set_self_hosted_status diff --git a/app/controllers/exports_controller.rb b/app/controllers/exports_controller.rb index 34b239dc..d6fe19e8 100644 --- a/app/controllers/exports_controller.rb +++ b/app/controllers/exports_controller.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true class ExportsController < ApplicationController + include ActiveStorage::SetCurrent + before_action :authenticate_user! before_action :set_export, only: %i[destroy] @@ -11,9 +13,13 @@ class ExportsController < ApplicationController def create export_name = "export_from_#{params[:start_at].to_date}_to_#{params[:end_at].to_date}.#{params[:file_format]}" - export = current_user.exports.create(name: export_name, status: :created) - - ExportJob.perform_later(export.id, params[:start_at], params[:end_at], file_format: params[:file_format]) + export = current_user.exports.create( + name: export_name, + status: :created, + file_format: params[:file_format], + start_at: params[:start_at], + end_at: params[:end_at] + ) redirect_to exports_url, notice: 'Export was successfully initiated. Please wait until it\'s finished.' rescue StandardError => e @@ -23,11 +29,7 @@ class ExportsController < ApplicationController end def destroy - ActiveRecord::Base.transaction do - @export.destroy - - File.delete(Rails.root.join('public', 'exports', @export.name)) - end + @export.destroy redirect_to exports_url, notice: 'Export was successfully destroyed.', status: :see_other end @@ -37,8 +39,4 @@ class ExportsController < ApplicationController def set_export @export = current_user.exports.find(params[:id]) end - - def export_params - params.require(:export).permit(:name, :url, :status) - end end diff --git a/app/controllers/imports_controller.rb b/app/controllers/imports_controller.rb index a6359e67..caf874a9 100644 --- a/app/controllers/imports_controller.rb +++ b/app/controllers/imports_controller.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true class ImportsController < ApplicationController + include ActiveStorage::SetCurrent + before_action :authenticate_user! before_action :authenticate_active_user!, only: %i[new create] before_action :set_import, only: %i[show destroy] @@ -9,7 +11,7 @@ class ImportsController < ApplicationController @imports = current_user .imports - .select(:id, :name, :source, :created_at, :points_count) + .select(:id, :name, :source, :created_at, :processed) .order(created_at: :desc) .page(params[:page]) end @@ -23,27 +25,17 @@ class ImportsController < ApplicationController def create files = import_params[:files].reject(&:blank?) - import_ids = files.map do |file| - import = current_user.imports.create( + files.each do |file| + import = current_user.imports.build( name: file.original_filename, source: params[:import][:source] ) - file = File.read(file) + import.file.attach(io: file, filename: file.original_filename, content_type: file.content_type) - raw_data = - case params[:import][:source] - when 'gpx' then Hash.from_xml(file) - when 'owntracks' then OwnTracks::RecParser.new(file).call - else JSON.parse(file) - end - - import.update(raw_data:) - import.id + import.save! end - import_ids.each { ImportJob.perform_later(current_user.id, _1) } - redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other rescue StandardError => e Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all diff --git a/app/controllers/settings/subscriptions_controller.rb b/app/controllers/settings/subscriptions_controller.rb new file mode 100644 index 00000000..05c39cbd --- /dev/null +++ b/app/controllers/settings/subscriptions_controller.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +class Settings::SubscriptionsController < ApplicationController + before_action :authenticate_user! + before_action :authenticate_non_self_hosted! + + def index; end + + def subscription_callback + token = params[:token] + + begin + decoded_token = JWT.decode( + token, + ENV['JWT_SECRET_KEY'], + true, + { algorithm: 'HS256' } + ).first.symbolize_keys + + unless decoded_token[:user_id] == current_user.id + redirect_to settings_subscriptions_path, alert: 'Invalid subscription update request.' + return + end + + current_user.update!(status: decoded_token[:status], active_until: decoded_token[:active_until]) + + redirect_to settings_subscriptions_path, notice: 'Your subscription has been updated successfully!' + rescue JWT::DecodeError + redirect_to settings_subscriptions_path, alert: 'Failed to verify subscription update.' + rescue ArgumentError + redirect_to settings_subscriptions_path, alert: 'Invalid subscription data received.' + end + end +end diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb index a4a01a5e..c66e3262 100644 --- a/app/helpers/application_helper.rb +++ b/app/helpers/application_helper.rb @@ -115,6 +115,17 @@ module ApplicationHelper date.strftime('%e %B %Y') end + def human_datetime(datetime) + return unless datetime + + content_tag( + :span, + datetime.strftime('%e %b %Y, %H:%M'), + class: 'tooltip', + data: { tip: datetime.iso8601 } + ) + end + def speed_text_color(speed) return 'text-default' if speed.to_i >= 0 @@ -126,4 +137,17 @@ module ApplicationHelper speed * 3.6 end + + def days_left(active_until) + return unless active_until + + time_words = distance_of_time_in_words(Time.zone.now, active_until) + + content_tag( + :span, + time_words, + class: 'tooltip', + data: { tip: "Expires on #{active_until.iso8601}" } + ) + end end diff --git a/app/javascript/controllers/maps_controller.js b/app/javascript/controllers/maps_controller.js index a74aaac3..a93affb4 100644 --- a/app/javascript/controllers/maps_controller.js +++ b/app/javascript/controllers/maps_controller.js @@ -501,10 +501,11 @@ export default class extends BaseController { } deletePoint(id, apiKey) { - fetch(`/api/v1/points/${id}?api_key=${apiKey}`, { + fetch(`/api/v1/points/${id}`, { method: 'DELETE', headers: { 'Content-Type': 'application/json', + 'Authorization': `Bearer ${apiKey}` } }) .then(response => { diff --git a/app/jobs/app_version_checking_job.rb b/app/jobs/app_version_checking_job.rb index a6fc2d9b..2463326d 100644 --- a/app/jobs/app_version_checking_job.rb +++ b/app/jobs/app_version_checking_job.rb @@ -2,6 +2,7 @@ class AppVersionCheckingJob < ApplicationJob queue_as :default + sidekiq_options retry: false def perform Rails.cache.delete(CheckAppVersion::VERSION_CACHE_KEY) diff --git a/app/jobs/export_job.rb b/app/jobs/export_job.rb index b8872c05..ea1ae819 100644 --- a/app/jobs/export_job.rb +++ b/app/jobs/export_job.rb @@ -3,9 +3,9 @@ class ExportJob < ApplicationJob queue_as :exports - def perform(export_id, start_at, end_at, file_format: :json) + def perform(export_id) export = Export.find(export_id) - Exports::Create.new(export:, start_at:, end_at:, file_format:).call + Exports::Create.new(export:).call end end diff --git a/app/jobs/import/process_job.rb b/app/jobs/import/process_job.rb new file mode 100644 index 00000000..4529d5f7 --- /dev/null +++ b/app/jobs/import/process_job.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +class Import::ProcessJob < ApplicationJob + queue_as :imports + + def perform(import_id) + import = Import.find(import_id) + + import.process! + end +end diff --git a/app/jobs/import/update_points_count_job.rb b/app/jobs/import/update_points_count_job.rb new file mode 100644 index 00000000..b0757024 --- /dev/null +++ b/app/jobs/import/update_points_count_job.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +class Import::UpdatePointsCountJob < ApplicationJob + queue_as :imports + + def perform(import_id) + import = Import.find(import_id) + + import.update(processed: import.points.count) + rescue ActiveRecord::RecordNotFound + nil + end +end diff --git a/app/jobs/import/watcher_job.rb b/app/jobs/import/watcher_job.rb index 57ae24bd..a2f6676f 100644 --- a/app/jobs/import/watcher_job.rb +++ b/app/jobs/import/watcher_job.rb @@ -5,6 +5,8 @@ class Import::WatcherJob < ApplicationJob sidekiq_options retry: false def perform + return unless DawarichSettings.self_hosted? + Imports::Watcher.new.call end end diff --git a/app/jobs/import_job.rb b/app/jobs/import_job.rb deleted file mode 100644 index a07cfa46..00000000 --- a/app/jobs/import_job.rb +++ /dev/null @@ -1,12 +0,0 @@ -# frozen_string_literal: true - -class ImportJob < ApplicationJob - queue_as :imports - - def perform(user_id, import_id) - user = User.find(user_id) - import = user.imports.find(import_id) - - import.process! - end -end diff --git a/app/models/concerns/distanceable.rb b/app/models/concerns/distanceable.rb index 6b2d1546..72b12792 100644 --- a/app/models/concerns/distanceable.rb +++ b/app/models/concerns/distanceable.rb @@ -59,12 +59,11 @@ module Distanceable return 0 if points.length < 2 total_meters = points.each_cons(2).sum do |point1, point2| - connection.select_value(<<-SQL.squish) - SELECT ST_Distance( - ST_GeomFromEWKT('#{point1.lonlat}')::geography, - ST_GeomFromEWKT('#{point2.lonlat}')::geography - ) - SQL + connection.select_value( + 'SELECT ST_Distance(ST_GeomFromEWKT($1)::geography, ST_GeomFromEWKT($2)::geography)', + nil, + [point1.lonlat, point2.lonlat] + ) end total_meters.to_f / DISTANCE_UNITS[unit.to_sym] diff --git a/app/models/export.rb b/app/models/export.rb index c9b4d071..03e6e432 100644 --- a/app/models/export.rb +++ b/app/models/export.rb @@ -4,16 +4,35 @@ class Export < ApplicationRecord belongs_to :user enum :status, { created: 0, processing: 1, completed: 2, failed: 3 } + enum :file_format, { json: 0, gpx: 1 } validates :name, presence: true - before_destroy :delete_export_file + has_one_attached :file + + after_commit -> { ExportJob.perform_later(id) }, on: :create + after_commit -> { remove_attached_file }, on: :destroy + + def process! + Exports::Create.new(export: self).call + end + + def migrate_to_new_storage + file.attach(io: File.open("public/#{url}"), filename: name) + update!(url: nil) + + File.delete("public/#{url}") + rescue StandardError => e + Rails.logger.debug("Error migrating export #{id}: #{e.message}") + end private - def delete_export_file - file_path = Rails.root.join('public', 'exports', "#{name}.json") + def remove_attached_file + file.purge_later - File.delete(file_path) if File.exist?(file_path) + File.delete("public/#{url}") + rescue StandardError => e + Rails.logger.debug("Error removing export #{id}: #{e.message}") end end diff --git a/app/models/import.rb b/app/models/import.rb index 045e8b5f..2b302589 100644 --- a/app/models/import.rb +++ b/app/models/import.rb @@ -4,9 +4,10 @@ class Import < ApplicationRecord belongs_to :user has_many :points, dependent: :destroy - delegate :count, to: :points, prefix: true + has_one_attached :file - include ImportUploader::Attachment(:raw) + after_commit -> { Import::ProcessJob.perform_later(id) }, on: :create + after_commit :remove_attached_file, on: :destroy enum :source, { google_semantic_history: 0, owntracks: 1, google_records: 2, @@ -27,4 +28,18 @@ class Import < ApplicationRecord [time.year, time.month] end.uniq end + + def migrate_to_new_storage + return if file.attached? + + raw_file = File.new(raw_data) + + file.attach(io: raw_file, filename: name, content_type: 'application/json') + end + + private + + def remove_attached_file + file.purge_later + end end diff --git a/app/models/point.rb b/app/models/point.rb index 38970d91..a438bdb5 100644 --- a/app/models/point.rb +++ b/app/models/point.rb @@ -30,6 +30,7 @@ class Point < ApplicationRecord after_create :async_reverse_geocode after_create_commit :broadcast_coordinates + after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? } def self.without_raw_data select(column_names - ['raw_data']) diff --git a/app/models/user.rb b/app/models/user.rb index ee4d84f8..dc0bb532 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -100,6 +100,22 @@ class User < ApplicationRecord end end + def can_subscribe? + active_until&.past? && !DawarichSettings.self_hosted? + end + + def generate_subscription_token + payload = { + user_id: id, + email: email, + exp: 30.minutes.from_now.to_i + } + + secret_key = ENV['JWT_SECRET_KEY'] + + JWT.encode(payload, secret_key, 'HS256') + end + private def create_api_key @@ -109,7 +125,8 @@ class User < ApplicationRecord end def activate - update(status: :active) + # TODO: Remove the `status` column in the future. + update(status: :active, active_until: 1000.years.from_now) end def sanitize_input @@ -118,21 +135,22 @@ class User < ApplicationRecord settings.try(:[], 'maps')&.try(:[], 'url')&.strip! end + # rubocop:disable Metrics/MethodLength def import_sample_points return unless Rails.env.development? || Rails.env.production? || (Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS']) - raw_data = Hash.from_xml( - File.read(Rails.root.join('lib/assets/sample_points.gpx')) - ) - import = imports.create( name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME', - source: 'gpx', - raw_data: + source: 'gpx' ) - ImportJob.perform_later(id, import.id) + import.file.attach( + Rack::Test::UploadedFile.new( + Rails.root.join('lib/assets/sample_points.gpx'), 'application/xml' + ) + ) end + # rubocop:enable Metrics/MethodLength end diff --git a/app/services/exports/create.rb b/app/services/exports/create.rb index 08181b4d..d885afb8 100644 --- a/app/services/exports/create.rb +++ b/app/services/exports/create.rb @@ -1,28 +1,30 @@ # frozen_string_literal: true class Exports::Create - def initialize(export:, start_at:, end_at:, file_format: :json) + def initialize(export:) @export = export @user = export.user - @start_at = start_at.to_datetime - @end_at = end_at.to_datetime - @file_format = file_format + @start_at = export.start_at + @end_at = export.end_at + @file_format = export.file_format end def call - export.update!(status: :processing) + ActiveRecord::Base.transaction do + export.update!(status: :processing) - points = time_framed_points + points = time_framed_points - data = points_data(points) + data = points_data(points) - create_export_file(data) + attach_export_file(data) - export.update!(status: :completed, url: "exports/#{export.name}") + export.update!(status: :completed) - create_export_finished_notification + notify_export_finished + end rescue StandardError => e - create_failed_export_notification(e) + notify_export_failed(e) export.update!(status: :failed) end @@ -38,7 +40,7 @@ class Exports::Create .order(timestamp: :asc) end - def create_export_finished_notification + def notify_export_finished Notifications::Create.new( user:, kind: :info, @@ -47,7 +49,7 @@ class Exports::Create ).call end - def create_failed_export_notification(error) + def notify_export_failed(error) Notifications::Create.new( user:, kind: :error, @@ -72,18 +74,18 @@ class Exports::Create Points::GpxSerializer.new(points, export.name).call end - def create_export_file(data) - dir_path = Rails.root.join('public/exports') - - FileUtils.mkdir_p(dir_path) unless Dir.exist?(dir_path) - - file_path = dir_path.join(export.name) - - Rails.logger.info("Creating export file at: #{file_path}") - - File.open(file_path, 'w') { |file| file.write(data) } + def attach_export_file(data) + export.file.attach(io: StringIO.new(data.to_s), filename: export.name, content_type:) rescue StandardError => e Rails.logger.error("Failed to create export file: #{e.message}") raise end + + def content_type + case file_format.to_sym + when :json then 'application/json' + when :gpx then 'application/gpx+xml' + else raise ArgumentError, "Unsupported file format: #{file_format}" + end + end end diff --git a/app/services/geojson/import_parser.rb b/app/services/geojson/import_parser.rb index 13b8651c..95edaf10 100644 --- a/app/services/geojson/import_parser.rb +++ b/app/services/geojson/import_parser.rb @@ -2,34 +2,28 @@ class Geojson::ImportParser include Imports::Broadcaster + include PointValidation - attr_reader :import, :json, :user_id + attr_reader :import, :user_id def initialize(import, user_id) @import = import - @json = import.raw_data @user_id = user_id end def call - data = Geojson::Params.new(json).call + import.file.download do |file| + json = Oj.load(file) - data.each.with_index(1) do |point, index| - next if point_exists?(point, user_id) + data = Geojson::Params.new(json).call - Point.create!(point.merge(user_id:, import_id: import.id)) + data.each.with_index(1) do |point, index| + next if point_exists?(point, user_id) - broadcast_import_progress(import, index) + Point.create!(point.merge(user_id:, import_id: import.id)) + + broadcast_import_progress(import, index) + end end end - - private - - def point_exists?(params, user_id) - Point.exists?( - lonlat: params[:lonlat], - timestamp: params[:timestamp], - user_id: - ) - end end diff --git a/app/services/google_maps/phone_takeout_parser.rb b/app/services/google_maps/phone_takeout_parser.rb index a30b34d3..97d4626c 100644 --- a/app/services/google_maps/phone_takeout_parser.rb +++ b/app/services/google_maps/phone_takeout_parser.rb @@ -48,13 +48,15 @@ class GoogleMaps::PhoneTakeoutParser raw_signals = [] raw_array = [] - if import.raw_data.is_a?(Array) - raw_array = parse_raw_array(import.raw_data) - else - if import.raw_data['semanticSegments'] - semantic_segments = parse_semantic_segments(import.raw_data['semanticSegments']) + import.file.download do |file| + json = Oj.load(file) + + if json.is_a?(Array) + raw_array = parse_raw_array(json) + else + semantic_segments = parse_semantic_segments(json['semanticSegments']) if json['semanticSegments'] + raw_signals = parse_raw_signals(json['rawSignals']) if json['rawSignals'] end - raw_signals = parse_raw_signals(import.raw_data['rawSignals']) if import.raw_data['rawSignals'] end semantic_segments + raw_signals + raw_array diff --git a/app/services/google_maps/records_importer.rb b/app/services/google_maps/records_importer.rb index ec9555f7..3cecb1bd 100644 --- a/app/services/google_maps/records_importer.rb +++ b/app/services/google_maps/records_importer.rb @@ -1,5 +1,8 @@ # frozen_string_literal: true +# This class is used to import Google's Records.json file +# via the CLI, vs the UI, which uses the `GoogleMaps::RecordsStorage Importer` class. + class GoogleMaps::RecordsImporter include Imports::Broadcaster diff --git a/app/services/google_maps/records_storage_importer.rb b/app/services/google_maps/records_storage_importer.rb new file mode 100644 index 00000000..76a7673f --- /dev/null +++ b/app/services/google_maps/records_storage_importer.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +# This class is used to import Google's Records.json file +# via the UI, vs the CLI, which uses the `GoogleMaps::RecordsImporter` class. + +class GoogleMaps::RecordsStorageImporter + BATCH_SIZE = 1000 + MAX_RETRIES = 3 + DOWNLOAD_TIMEOUT = 300 # 5 minutes timeout + + def initialize(import, user_id) + @import = import + @user = User.find_by(id: user_id) + end + + def call + process_file_in_batches + rescue Oj::ParseError => e + Rails.logger.error("JSON parsing error: #{e.message}") + raise + end + + private + + attr_reader :import, :user + + def process_file_in_batches + file = download_file + verify_file_integrity(file) + locations = parse_file(file) + process_locations_in_batches(locations) if locations.present? + end + + def download_file + retries = 0 + + begin + Timeout.timeout(DOWNLOAD_TIMEOUT) do + import.file.download + end + rescue Timeout::Error => e + retries += 1 + if retries <= MAX_RETRIES + Rails.logger.warn("Download timeout, attempt #{retries} of #{MAX_RETRIES}") + retry + else + Rails.logger.error("Download failed after #{MAX_RETRIES} attempts") + raise + end + rescue StandardError => e + Rails.logger.error("Download error: #{e.message}") + raise + end + end + + def verify_file_integrity(file) + # Verify file size + expected_size = import.file.blob.byte_size + actual_size = file.size + + if expected_size != actual_size + raise "Incomplete download: expected #{expected_size} bytes, got #{actual_size} bytes" + end + + # Verify checksum + expected_checksum = import.file.blob.checksum + actual_checksum = Base64.strict_encode64(Digest::MD5.digest(file)) + + return unless expected_checksum != actual_checksum + + raise "Checksum mismatch: expected #{expected_checksum}, got #{actual_checksum}" + end + + def parse_file(file) + parsed_file = Oj.load(file, mode: :compat) + return nil unless parsed_file.is_a?(Hash) && parsed_file['locations'] + + parsed_file['locations'] + end + + def process_locations_in_batches(locations) + batch = [] + index = 0 + + locations.each do |location| + batch << location + + next unless batch.size >= BATCH_SIZE + + process_batch(batch, index) + index += BATCH_SIZE + batch = [] + end + + # Process any remaining records that didn't make a full batch + process_batch(batch, index) unless batch.empty? + end + + def process_batch(batch, index) + GoogleMaps::RecordsImporter.new(import, index).call(batch) + end +end diff --git a/app/services/google_maps/semantic_history_parser.rb b/app/services/google_maps/semantic_history_parser.rb index 77984c09..b8d38c5d 100644 --- a/app/services/google_maps/semantic_history_parser.rb +++ b/app/services/google_maps/semantic_history_parser.rb @@ -13,8 +13,6 @@ class GoogleMaps::SemanticHistoryParser end def call - points_data = parse_json - points_data.each_slice(BATCH_SIZE) do |batch| @current_index += batch.size process_batch(batch) @@ -62,10 +60,18 @@ class GoogleMaps::SemanticHistoryParser ) end - def parse_json - import.raw_data['timelineObjects'].flat_map do |timeline_object| - parse_timeline_object(timeline_object) - end.compact + def points_data + data = nil + + import.file.download do |f| + json = Oj.load(f) + + data = json['timelineObjects'].flat_map do |timeline_object| + parse_timeline_object(timeline_object) + end.compact + end + + data end def parse_timeline_object(timeline_object) diff --git a/app/services/gpx/track_importer.rb b/app/services/gpx/track_importer.rb index 62f327cc..9abd1a56 100644 --- a/app/services/gpx/track_importer.rb +++ b/app/services/gpx/track_importer.rb @@ -3,22 +3,25 @@ class Gpx::TrackImporter include Imports::Broadcaster - attr_reader :import, :json, :user_id + attr_reader :import, :user_id def initialize(import, user_id) @import = import - @json = import.raw_data @user_id = user_id end def call - tracks = json['gpx']['trk'] - tracks_arr = tracks.is_a?(Array) ? tracks : [tracks] + import.file.download do |file| + json = Hash.from_xml(file) - points = tracks_arr.map { parse_track(_1) }.flatten.compact - points_data = points.map.with_index(1) { |point, index| prepare_point(point, index) }.compact + tracks = json['gpx']['trk'] + tracks_arr = tracks.is_a?(Array) ? tracks : [tracks] - bulk_insert_points(points_data) + points = tracks_arr.map { parse_track(_1) }.flatten.compact + points_data = points.map { prepare_point(_1) }.compact + + bulk_insert_points(points_data) + end end private @@ -32,7 +35,7 @@ class Gpx::TrackImporter segments_array.compact.map { |segment| segment['trkpt'] } end - def prepare_point(point, index) + def prepare_point(point) return if point['lat'].blank? || point['lon'].blank? || point['time'].blank? { diff --git a/app/services/immich/import_geodata.rb b/app/services/immich/import_geodata.rb index 469761d6..770a8087 100644 --- a/app/services/immich/import_geodata.rb +++ b/app/services/immich/import_geodata.rb @@ -20,10 +20,13 @@ class Immich::ImportGeodata create_import_failed_notification(import.name) and return unless import.new_record? - import.raw_data = immich_data_json - import.save! + import.file.attach( + io: StringIO.new(immich_data_json.to_json), + filename: file_name, + content_type: 'application/json' + ) - ImportJob.perform_later(user.id, import.id) + import.save! end private diff --git a/app/services/imports/create.rb b/app/services/imports/create.rb index e34661b1..7ad60d36 100644 --- a/app/services/imports/create.rb +++ b/app/services/imports/create.rb @@ -14,7 +14,8 @@ class Imports::Create create_import_finished_notification(import, user) schedule_stats_creating(user.id) - # schedule_visit_suggesting(user.id, import) # Disabled until places & visits are reworked + schedule_visit_suggesting(user.id, import) + update_import_points_count(import) rescue StandardError => e create_import_failed_notification(import, user, e) end @@ -26,6 +27,7 @@ class Imports::Create case source when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutParser + when 'google_records' then GoogleMaps::RecordsStorageImporter when 'owntracks' then OwnTracks::Importer when 'gpx' then Gpx::TrackImporter when 'geojson' then Geojson::ImportParser @@ -33,6 +35,10 @@ class Imports::Create end end + def update_import_points_count(import) + Import::UpdatePointsCountJob.perform_later(import.id) + end + def schedule_stats_creating(user_id) import.years_and_months_tracked.each do |year, month| Stats::CalculatingJob.perform_later(user_id, year, month) @@ -44,7 +50,7 @@ class Imports::Create start_at = Time.zone.at(points.first.timestamp) end_at = Time.zone.at(points.last.timestamp) - VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:) + VisitSuggestingJob.perform_later(user_id:, start_at:, end_at:) end def create_import_finished_notification(import, user) diff --git a/app/services/imports/destroy.rb b/app/services/imports/destroy.rb index 55efb008..fc0a6ff2 100644 --- a/app/services/imports/destroy.rb +++ b/app/services/imports/destroy.rb @@ -11,6 +11,6 @@ class Imports::Destroy def call @import.destroy! - BulkStatsCalculatingJob.perform_later(@user.id) + Stats::BulkCalculator.new(@user.id).call end end diff --git a/app/services/imports/watcher.rb b/app/services/imports/watcher.rb index de9ca262..79e0a59c 100644 --- a/app/services/imports/watcher.rb +++ b/app/services/imports/watcher.rb @@ -16,7 +16,7 @@ class Imports::Watcher file_names = file_names(user_directory_path) file_names.each do |file_name| - process_file(user, user_directory_path, file_name) + create_import(user, user_directory_path, file_name) end end end @@ -26,49 +26,29 @@ class Imports::Watcher def user_directories Dir.entries(WATCHED_DIR_PATH).select do |entry| path = File.join(WATCHED_DIR_PATH, entry) + File.directory?(path) && !['.', '..'].include?(entry) end end - def find_user(file_name) - email = file_name.split('_').first - - User.find_by(email:) - end - def file_names(directory_path) Dir.entries(directory_path).select { |file| SUPPORTED_FORMATS.include?(File.extname(file)) } end - def process_file(user, directory_path, file_name) + def create_import(user, directory_path, file_name) file_path = File.join(directory_path, file_name) import = Import.find_or_initialize_by(user:, name: file_name) return if import.persisted? import.source = source(file_name) - import.raw_data = raw_data(file_path, import.source) + import.file.attach( + io: File.open(file_path), + filename: file_name, + content_type: mime_type(import.source) + ) import.save! - - ImportJob.perform_later(user.id, import.id) - end - - def find_or_initialize_import(user, file_name) - import_name = file_name.split('_')[1..].join('_') - - Import.find_or_initialize_by(user:, name: import_name) - end - - def set_import_attributes(import, file_path, file_name) - source = source(file_name) - - import.source = source - import.raw_data = raw_data(file_path, source) - - import.save! - - import.id end def source(file_name) @@ -89,16 +69,13 @@ class Imports::Watcher end end - def raw_data(file_path, source) - file = File.read(file_path) - + def mime_type(source) case source.to_sym - when :gpx - Hash.from_xml(file) + when :gpx then 'application/xml' when :json, :geojson, :google_phone_takeout, :google_records, :google_semantic_history - JSON.parse(file) + 'application/json' when :owntracks - OwnTracks::RecParser.new(file).call + 'application/octet-stream' else raise UnsupportedSourceError, "Unsupported source: #{source}" end diff --git a/app/services/own_tracks/importer.rb b/app/services/own_tracks/importer.rb index 20dbc706..75cd88ab 100644 --- a/app/services/own_tracks/importer.rb +++ b/app/services/own_tracks/importer.rb @@ -3,25 +3,28 @@ class OwnTracks::Importer include Imports::Broadcaster - attr_reader :import, :data, :user_id + attr_reader :import, :user_id def initialize(import, user_id) @import = import - @data = import.raw_data @user_id = user_id end def call - points_data = data.map.with_index(1) do |point, index| - OwnTracks::Params.new(point).call.merge( - import_id: import.id, - user_id: user_id, - created_at: Time.current, - updated_at: Time.current - ) - end + import.file.download do |file| + parsed_data = OwnTracks::RecParser.new(file).call - bulk_insert_points(points_data) + points_data = parsed_data.map do |point| + OwnTracks::Params.new(point).call.merge( + import_id: import.id, + user_id: user_id, + created_at: Time.current, + updated_at: Time.current + ) + end + + bulk_insert_points(points_data) + end end private diff --git a/app/services/own_tracks/rec_parser.rb b/app/services/own_tracks/rec_parser.rb index 7e502263..7e3550af 100644 --- a/app/services/own_tracks/rec_parser.rb +++ b/app/services/own_tracks/rec_parser.rb @@ -10,11 +10,8 @@ class OwnTracks::RecParser def call file.split("\n").map do |line| parts = line.split("\t") - if parts.size > 2 && parts[1].strip == '*' - JSON.parse(parts[2]) - else - nil - end + + Oj.load(parts[2]) if parts.size > 2 && parts[1].strip == '*' end.compact end end diff --git a/app/services/photoprism/import_geodata.rb b/app/services/photoprism/import_geodata.rb index 182681e6..2d0e7a68 100644 --- a/app/services/photoprism/import_geodata.rb +++ b/app/services/photoprism/import_geodata.rb @@ -23,8 +23,13 @@ class Photoprism::ImportGeodata import = find_or_create_import(json_data) return create_import_failed_notification(import.name) unless import.new_record? - import.update!(raw_data: json_data) - ImportJob.perform_later(user.id, import.id) + import.file.attach( + io: StringIO.new(json_data.to_json), + filename: file_name(json_data), + content_type: 'application/json' + ) + + import.save! end def find_or_create_import(json_data) diff --git a/app/services/photos/import_parser.rb b/app/services/photos/import_parser.rb index 610681fb..b91a9ca3 100644 --- a/app/services/photos/import_parser.rb +++ b/app/services/photos/import_parser.rb @@ -2,7 +2,7 @@ class Photos::ImportParser include Imports::Broadcaster - + include PointValidation attr_reader :import, :json, :user_id def initialize(import, user_id) @@ -29,12 +29,4 @@ class Photos::ImportParser broadcast_import_progress(import, index) end - - def point_exists?(point, timestamp) - Point.exists?( - lonlat: "POINT(#{point['longitude']} #{point['latitude']})", - timestamp:, - user_id: - ) - end end diff --git a/app/services/reverse_geocoding/places/fetch_data.rb b/app/services/reverse_geocoding/places/fetch_data.rb index 3b6309a1..1390c29a 100644 --- a/app/services/reverse_geocoding/places/fetch_data.rb +++ b/app/services/reverse_geocoding/places/fetch_data.rb @@ -19,6 +19,7 @@ class ReverseGeocoding::Places::FetchData first_place = reverse_geocoded_places.shift update_place(first_place) + reverse_geocoded_places.each { |reverse_geocoded_place| fetch_and_create_place(reverse_geocoded_place) } end @@ -49,6 +50,9 @@ class ReverseGeocoding::Places::FetchData new_place.country = data['properties']['country'] new_place.geodata = data new_place.source = :photon + if new_place.lonlat.blank? + new_place.lonlat = "POINT(#{data['geometry']['coordinates'][0]} #{data['geometry']['coordinates'][1]})" + end new_place.save! end @@ -88,7 +92,7 @@ class ReverseGeocoding::Places::FetchData limit: 10, distance_sort: true, radius: 1, - units: ::DISTANCE_UNIT, + units: ::DISTANCE_UNIT ) data.reject do |place| diff --git a/app/services/visits/find_within_bounding_box.rb b/app/services/visits/find_within_bounding_box.rb index 74b72ed7..d5bdb74a 100644 --- a/app/services/visits/find_within_bounding_box.rb +++ b/app/services/visits/find_within_bounding_box.rb @@ -12,13 +12,17 @@ module Visits end def call - bounding_box = "ST_MakeEnvelope(#{sw_lng}, #{sw_lat}, #{ne_lng}, #{ne_lat}, 4326)" - Visit .includes(:place) .where(user:) .joins(:place) - .where("ST_Contains(#{bounding_box}, ST_SetSRID(places.lonlat::geometry, 4326))") + .where( + 'ST_Contains(ST_MakeEnvelope(?, ?, ?, ?, 4326), ST_SetSRID(places.lonlat::geometry, 4326))', + sw_lng, + sw_lat, + ne_lng, + ne_lat + ) .order(started_at: :desc) end diff --git a/app/uploaders/import_uploader.rb b/app/uploaders/import_uploader.rb deleted file mode 100644 index b4513d77..00000000 --- a/app/uploaders/import_uploader.rb +++ /dev/null @@ -1,5 +0,0 @@ -# frozen_string_literal: true - -class ImportUploader < Shrine - # plugins and uploading logic -end diff --git a/app/views/exports/index.html.erb b/app/views/exports/index.html.erb index 33888a94..eaf393b4 100644 --- a/app/views/exports/index.html.erb +++ b/app/views/exports/index.html.erb @@ -37,11 +37,15 @@ <% @exports.each do |export| %> <%= export.name %> - <%= export.created_at.strftime('%Y-%m-%d %H:%M:%S') %> + <%= human_datetime(export.created_at) %> <%= export.status %> <% if export.completed? %> - <%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %> + <% if export.file.present? %> + <%= link_to 'Download', rails_blob_path(export.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %> + <% else %> + <%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %> + <% end %> <% end %> <%= link_to 'Delete', export, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %> diff --git a/app/views/home/index.html.erb b/app/views/home/index.html.erb index 606d4c0d..9b223f40 100644 --- a/app/views/home/index.html.erb +++ b/app/views/home/index.html.erb @@ -1,6 +1,6 @@
-
+

diff --git a/app/views/imports/_form.html.erb b/app/views/imports/_form.html.erb index ccfa8030..2e77a631 100644 --- a/app/views/imports/_form.html.erb +++ b/app/views/imports/_form.html.erb @@ -13,6 +13,24 @@

JSON files from your Takeout/Location History/Semantic Location History/YEAR

+
+
+ +

The Records.json file from your Google Takeout

+
+
+
+
+ +

A JSON file you received after your request for Takeout from your mobile device

+
+
-
-
- -

A JSON file you received after your request for Takeout from your mobile device

-
-
+
+
+ <%= paginate @imports %> +
+
<% end %>
diff --git a/app/views/imports/new.html.erb b/app/views/imports/new.html.erb index 089a09e2..c516bc5f 100644 --- a/app/views/imports/new.html.erb +++ b/app/views/imports/new.html.erb @@ -3,36 +3,6 @@

New import

- - <%= render "form", import: @import %> <%= link_to "Back to imports", imports_path, class: "btn mx-5 mb-5" %> diff --git a/app/views/layouts/application.html.erb b/app/views/layouts/application.html.erb index f41baeda..0763304b 100644 --- a/app/views/layouts/application.html.erb +++ b/app/views/layouts/application.html.erb @@ -15,6 +15,7 @@ <%= stylesheet_link_tag "application", "data-turbo-track": "reload" %> <%= javascript_importmap_tags %> <%= render 'application/favicon' %> + <%= Sentry.get_trace_propagation_meta.html_safe if Sentry.initialized? %> diff --git a/app/views/places/index.html.erb b/app/views/places/index.html.erb index 5ed9365f..fd899884 100644 --- a/app/views/places/index.html.erb +++ b/app/views/places/index.html.erb @@ -38,7 +38,7 @@ <% @places.each do |place| %> <%= place.name %> - <%= place.created_at.strftime('%Y-%m-%d %H:%M:%S') %> + <%= human_datetime(place.created_at) %> <%= "#{place.lat}, #{place.lon}" %> <%= link_to 'Delete', place, data: { confirm: "Are you sure? Deleting a place will result in deleting all visits for this place.", turbo_confirm: "Are you sure? Deleting a place will result in deleting all visits for this place.", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %> diff --git a/app/views/points/_point.html.erb b/app/views/points/_point.html.erb index f96f5714..6c0c238b 100644 --- a/app/views/points/_point.html.erb +++ b/app/views/points/_point.html.erb @@ -14,7 +14,7 @@ %> <%= point.velocity %> - <%= point.recorded_at %> + <%= human_datetime(point.recorded_at) %> <%= point.lat %>, <%= point.lon %> diff --git a/app/views/settings/_navigation.html.erb b/app/views/settings/_navigation.html.erb index 8b5e51e0..40ec1ddb 100644 --- a/app/views/settings/_navigation.html.erb +++ b/app/views/settings/_navigation.html.erb @@ -5,4 +5,7 @@ <%= link_to 'Background Jobs', settings_background_jobs_path, role: 'tab', class: "tab #{active_tab?(settings_background_jobs_path)}" %> <% end %> <%= link_to 'Map', settings_maps_path, role: 'tab', class: "tab #{active_tab?(settings_maps_path)}" %> + <% if !DawarichSettings.self_hosted? %> + <%= link_to 'Subscriptions', settings_subscriptions_path, role: 'tab', class: "tab #{active_tab?(settings_subscriptions_path)}" %> + <% end %>
diff --git a/app/views/settings/subscriptions/index.html.erb b/app/views/settings/subscriptions/index.html.erb new file mode 100644 index 00000000..093b58a9 --- /dev/null +++ b/app/views/settings/subscriptions/index.html.erb @@ -0,0 +1,30 @@ +<% content_for :title, "Subscriptions" %> + +
+ <%= render 'settings/navigation' %> + +
+
+
+

Hello there!

+ <% if current_user.active_until.future? %> +

+ You are currently subscribed to Dawarich, hurray! +

+ +

+ Your subscription will be valid for the next <%= days_left(current_user.active_until) %>. +

+ + <%= link_to 'Manage subscription', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-primary my-4' %> + <% else %> +

+ You are currently not subscribed to Dawarich. How about we fix that? +

+ + <%= link_to 'Manage subscription', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-primary my-4' %> + <% end %> +
+
+
+
diff --git a/app/views/settings/users/index.html.erb b/app/views/settings/users/index.html.erb index 087d0e23..dff5d2fb 100644 --- a/app/views/settings/users/index.html.erb +++ b/app/views/settings/users/index.html.erb @@ -26,7 +26,7 @@ <%= number_with_delimiter user.tracked_points.count %> - <%= user.created_at.strftime('%Y-%m-%d %H:%M:%S') %> + <%= human_datetime(user.created_at) %> <% end %> diff --git a/app/views/shared/_navbar.html.erb b/app/views/shared/_navbar.html.erb index cbbc32a4..0621b407 100644 --- a/app/views/shared/_navbar.html.erb +++ b/app/views/shared/_navbar.html.erb @@ -19,6 +19,9 @@ + <% if user_signed_in? && current_user.can_subscribe? %> +
  • <%= link_to 'Subscribe', "#{ENV['SUBSCRIPTION_URL']}/auth/dawarich?token=#{current_user.generate_subscription_token}", class: 'btn btn-sm btn-success' %>
  • + <% end %>
    <%= link_to 'Dawarich', root_path, class: 'btn btn-ghost normal-case text-xl'%> @@ -67,6 +70,10 @@