From b76602d9c8a80ce5d88494b86f7df802f7de02d0 Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Mon, 9 Jun 2025 13:39:25 +0200 Subject: [PATCH 1/6] Return sidekiq and redis to Dawarich --- .circleci/config.yml | 6 +- .devcontainer/docker-compose.yml | 16 +++ .env.development | 1 + .env.test | 1 + .github/workflows/ci.yml | 7 + DEVELOPMENT.md | 6 + Gemfile | 5 + Gemfile.lock | 27 ++++ app/jobs/app_version_checking_job.rb | 1 + app/jobs/area_visits_calculating_job.rb | 1 + .../area_visits_calculation_scheduling_job.rb | 1 + app/jobs/bulk_visits_suggesting_job.rb | 1 + app/jobs/import/google_takeout_job.rb | 1 + app/jobs/import/photoprism_geodata_job.rb | 1 + app/jobs/import/watcher_job.rb | 1 + app/jobs/visit_suggesting_job.rb | 1 + app/services/tasks/imports/google_records.rb | 2 +- .../notifications/_notification.html.erb | 2 +- .../settings/background_jobs/index.html.erb | 2 +- config/application.rb | 2 + config/initializers/geocoder.rb | 2 +- config/initializers/sidekiq.rb | 42 +++--- config/routes.rb | 21 ++- config/sidekiq.yml | 10 ++ docker/Dockerfile.dev | 3 + docker/Dockerfile.prod | 3 + docker/docker-compose.production.yml | 73 +++++++++- docker/docker-compose.yml | 70 +++++++++- docker/sidekiq-entrypoint.sh | 36 +++++ docs/How_to_install_Dawarich_in_k8s.md | 73 +++++++++- docs/How_to_install_Dawarich_on_Synology.md | 2 +- docs/how_to_setup_reverse_proxy.md | 11 ++ docs/synology/docker-compose.yml | 24 ++++ spec/rails_helper.rb | 1 + spec/requests/sidekiq_spec.rb | 125 ++++++++++++++++++ spec/services/imports/watcher_spec.rb | 2 + 36 files changed, 546 insertions(+), 37 deletions(-) create mode 100644 config/sidekiq.yml create mode 100644 docker/sidekiq-entrypoint.sh create mode 100644 spec/requests/sidekiq_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index e7410a62..13f89c17 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,16 +16,12 @@ jobs: DATABASE_USERNAME: postgres DATABASE_PASSWORD: mysecretpassword DATABASE_PORT: 5432 - QUEUE_DATABASE_HOST: localhost - QUEUE_DATABASE_NAME: dawarich_test_queue - QUEUE_DATABASE_USERNAME: postgres - QUEUE_DATABASE_PASSWORD: mysecretpassword - QUEUE_DATABASE_PORT: 5432 - image: cimg/postgres:13.3-postgis environment: POSTGRES_USER: postgres POSTGRES_DB: dawarich_test POSTGRES_PASSWORD: mysecretpassword + - image: redis:7.0 - image: selenium/standalone-chrome:latest name: chrome environment: diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index cf658aa5..eb632340 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -20,6 +20,7 @@ services: tty: true environment: RAILS_ENV: development + REDIS_URL: redis://dawarich_redis:6379/0 DATABASE_HOST: dawarich_db DATABASE_USERNAME: postgres DATABASE_PASSWORD: password @@ -40,6 +41,21 @@ services: PROMETHEUS_EXPORTER_ENABLED: false PROMETHEUS_EXPORTER_HOST: 0.0.0.0 PROMETHEUS_EXPORTER_PORT: 9394 + dawarich_redis: + image: redis:7.4-alpine + container_name: dawarich_redis + command: redis-server + networks: + - dawarich + volumes: + - dawarich_shared:/data + restart: always + healthcheck: + test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s dawarich_db: image: postgis/postgis:17-3.5-alpine container_name: dawarich_db diff --git a/.env.development b/.env.development index 8e85d36c..8aeb3141 100644 --- a/.env.development +++ b/.env.development @@ -3,3 +3,4 @@ DATABASE_USERNAME=postgres DATABASE_PASSWORD=password DATABASE_NAME=dawarich_development DATABASE_PORT=5432 +REDIS_URL=redis://localhost:6379/1 diff --git a/.env.test b/.env.test index fcfeae00..fea48769 100644 --- a/.env.test +++ b/.env.test @@ -3,3 +3,4 @@ DATABASE_USERNAME=postgres DATABASE_PASSWORD=password DATABASE_NAME=dawarich_test DATABASE_PORT=5432 +REDIS_URL=redis://localhost:6379/1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0971ad40..bd322ea9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,6 +19,10 @@ jobs: ports: - 5432:5432 options: --health-cmd="pg_isready" --health-interval=10s --health-timeout=5s --health-retries=3 + redis: + image: redis + ports: + - 6379:6379 steps: - name: Install packages @@ -53,12 +57,14 @@ jobs: env: RAILS_ENV: test DATABASE_URL: postgres://postgres:postgres@localhost:5432 + REDIS_URL: redis://localhost:6379/1 run: bin/rails db:setup - name: Run main tests (excluding system tests) env: RAILS_ENV: test DATABASE_URL: postgres://postgres:postgres@localhost:5432 + REDIS_URL: redis://localhost:6379/1 run: | bundle exec rspec --exclude-pattern "spec/system/**/*_spec.rb" || (cat log/test.log && exit 1) @@ -66,6 +72,7 @@ jobs: env: RAILS_ENV: test DATABASE_URL: postgres://postgres:postgres@localhost:5432 + REDIS_URL: redis://localhost:6379/1 run: | bundle exec rspec spec/system/ || (cat log/test.log && exit 1) diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index f5551a76..8b1b6a97 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -7,6 +7,12 @@ Now you can create/prepare the Database (this need to be done once): bundle exec rails db:prepare ``` +Afterwards you can run sidekiq: +```bash +bundle exec sidekiq + +``` + And in a second terminal the dawarich-app: ```bash bundle exec bin/dev diff --git a/Gemfile b/Gemfile index 504b4811..688eb883 100644 --- a/Gemfile +++ b/Gemfile @@ -28,6 +28,7 @@ gem 'activerecord-postgis-adapter' gem 'puma' gem 'pundit' gem 'rails', '~> 8.0' +gem 'redis' gem 'rexml' gem 'rgeo' gem 'rgeo-activerecord' @@ -38,6 +39,9 @@ gem 'sentry-ruby' gem 'sentry-rails' gem 'sqlite3', '~> 2.6' gem 'stackprof' +gem 'sidekiq' +gem 'sidekiq-cron' +gem 'sidekiq-limit_fetch' gem 'sprockets-rails' gem 'stimulus-rails' gem 'strong_migrations' @@ -64,6 +68,7 @@ end group :test do gem 'capybara' + gem 'fakeredis' gem 'selenium-webdriver' gem 'shoulda-matchers' gem 'simplecov', require: false diff --git a/Gemfile.lock b/Gemfile.lock index 8fe529cf..8777986d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -134,6 +134,9 @@ GEM bigdecimal rexml crass (1.0.6) + cronex (0.15.0) + tzinfo + unicode (>= 0.4.4.5) csv (3.3.4) data_migrate (11.3.0) activerecord (>= 6.1) @@ -166,6 +169,7 @@ GEM factory_bot_rails (6.4.4) factory_bot (~> 6.5) railties (>= 5.0.0) + fakeredis (0.1.4) ffaker (2.24.0) foreman (0.88.1) fugit (1.11.1) @@ -350,6 +354,10 @@ GEM rdoc (6.14.0) erb psych (>= 4.0.0) + redis (5.4.0) + redis-client (>= 0.22.0) + redis-client (0.24.0) + connection_pool regexp_parser (2.10.0) reline (0.6.1) io-console (~> 0.5) @@ -431,6 +439,19 @@ GEM concurrent-ruby (~> 1.0, >= 1.0.2) shoulda-matchers (6.5.0) activesupport (>= 5.2.0) + sidekiq (8.0.4) + connection_pool (>= 2.5.0) + json (>= 2.9.0) + logger (>= 1.6.2) + rack (>= 3.1.0) + redis-client (>= 0.23.2) + sidekiq-cron (2.3.0) + cronex (>= 0.13.0) + fugit (~> 1.8, >= 1.11.1) + globalid (>= 1.0.1) + sidekiq (>= 6.5.0) + sidekiq-limit_fetch (4.4.1) + sidekiq (>= 6) simplecov (0.22.0) docile (~> 1.1) simplecov-html (~> 0.11) @@ -492,6 +513,7 @@ GEM railties (>= 7.1.0) tzinfo (2.0.6) concurrent-ruby (~> 1.0) + unicode (0.4.4.5) unicode-display_width (3.1.4) unicode-emoji (~> 4.0, >= 4.0.4) unicode-emoji (4.0.4) @@ -537,6 +559,7 @@ DEPENDENCIES devise dotenv-rails factory_bot_rails + fakeredis ffaker foreman geocoder! @@ -556,6 +579,7 @@ DEPENDENCIES puma pundit rails (~> 8.0) + redis rexml rgeo rgeo-activerecord @@ -569,6 +593,9 @@ DEPENDENCIES sentry-rails sentry-ruby shoulda-matchers + sidekiq + sidekiq-cron + sidekiq-limit_fetch simplecov solid_cable (~> 3.0) solid_cache (= 1.0.7) diff --git a/app/jobs/app_version_checking_job.rb b/app/jobs/app_version_checking_job.rb index a6fc2d9b..2463326d 100644 --- a/app/jobs/app_version_checking_job.rb +++ b/app/jobs/app_version_checking_job.rb @@ -2,6 +2,7 @@ class AppVersionCheckingJob < ApplicationJob queue_as :default + sidekiq_options retry: false def perform Rails.cache.delete(CheckAppVersion::VERSION_CACHE_KEY) diff --git a/app/jobs/area_visits_calculating_job.rb b/app/jobs/area_visits_calculating_job.rb index fe74ff9d..95850286 100644 --- a/app/jobs/area_visits_calculating_job.rb +++ b/app/jobs/area_visits_calculating_job.rb @@ -2,6 +2,7 @@ class AreaVisitsCalculatingJob < ApplicationJob queue_as :default + sidekiq_options retry: false def perform(user_id) user = User.find(user_id) diff --git a/app/jobs/area_visits_calculation_scheduling_job.rb b/app/jobs/area_visits_calculation_scheduling_job.rb index a1addc82..db4c5d3e 100644 --- a/app/jobs/area_visits_calculation_scheduling_job.rb +++ b/app/jobs/area_visits_calculation_scheduling_job.rb @@ -2,6 +2,7 @@ class AreaVisitsCalculationSchedulingJob < ApplicationJob queue_as :default + sidekiq_options retry: false def perform User.find_each { AreaVisitsCalculatingJob.perform_later(_1.id) } diff --git a/app/jobs/bulk_visits_suggesting_job.rb b/app/jobs/bulk_visits_suggesting_job.rb index a1ff2ae4..54174bca 100644 --- a/app/jobs/bulk_visits_suggesting_job.rb +++ b/app/jobs/bulk_visits_suggesting_job.rb @@ -4,6 +4,7 @@ # with the default timespan of 1 day. class BulkVisitsSuggestingJob < ApplicationJob queue_as :visit_suggesting + sidekiq_options retry: false # Passing timespan of more than 3 years somehow results in duplicated Places def perform(start_at: 1.day.ago.beginning_of_day, end_at: 1.day.ago.end_of_day, user_ids: []) diff --git a/app/jobs/import/google_takeout_job.rb b/app/jobs/import/google_takeout_job.rb index f52e61c3..02702cf7 100644 --- a/app/jobs/import/google_takeout_job.rb +++ b/app/jobs/import/google_takeout_job.rb @@ -2,6 +2,7 @@ class Import::GoogleTakeoutJob < ApplicationJob queue_as :imports + sidekiq_options retry: false def perform(import_id, locations, current_index) locations_batch = Oj.load(locations) diff --git a/app/jobs/import/photoprism_geodata_job.rb b/app/jobs/import/photoprism_geodata_job.rb index 161667d5..7aa2d27e 100644 --- a/app/jobs/import/photoprism_geodata_job.rb +++ b/app/jobs/import/photoprism_geodata_job.rb @@ -2,6 +2,7 @@ class Import::PhotoprismGeodataJob < ApplicationJob queue_as :imports + sidekiq_options retry: false def perform(user_id) user = User.find(user_id) diff --git a/app/jobs/import/watcher_job.rb b/app/jobs/import/watcher_job.rb index f25c95a8..a2f6676f 100644 --- a/app/jobs/import/watcher_job.rb +++ b/app/jobs/import/watcher_job.rb @@ -2,6 +2,7 @@ class Import::WatcherJob < ApplicationJob queue_as :imports + sidekiq_options retry: false def perform return unless DawarichSettings.self_hosted? diff --git a/app/jobs/visit_suggesting_job.rb b/app/jobs/visit_suggesting_job.rb index 87a2adc7..2659d2d3 100644 --- a/app/jobs/visit_suggesting_job.rb +++ b/app/jobs/visit_suggesting_job.rb @@ -2,6 +2,7 @@ class VisitSuggestingJob < ApplicationJob queue_as :visit_suggesting + sidekiq_options retry: false # Passing timespan of more than 3 years somehow results in duplicated Places def perform(user_id:, start_at:, end_at:) diff --git a/app/services/tasks/imports/google_records.rb b/app/services/tasks/imports/google_records.rb index 7f888bc2..70b5d389 100644 --- a/app/services/tasks/imports/google_records.rb +++ b/app/services/tasks/imports/google_records.rb @@ -54,6 +54,6 @@ class Tasks::Imports::GoogleRecords end def log_success - Rails.logger.info("Imported #{@file_path} for #{@user.email} successfully! Wait for the processing to finish. You can check the status of the import in the Jobs UI (http:///jobs).") + Rails.logger.info("Imported #{@file_path} for #{@user.email} successfully! Wait for the processing to finish. You can check the status of the import in the Sidekiq UI (http:///sidekiq).") end end diff --git a/app/views/notifications/_notification.html.erb b/app/views/notifications/_notification.html.erb index 1ce36922..62a32b81 100644 --- a/app/views/notifications/_notification.html.erb +++ b/app/views/notifications/_notification.html.erb @@ -11,7 +11,7 @@ <% if notification.error? %>
- Please, when reporting a bug to Github Issues, don't forget to include logs from dawarich_app docker container. Thank you! + Please, when reporting a bug to Github Issues, don't forget to include logs from dawarich_app and dawarich_sidekiq docker containers. Thank you!
<% end %> diff --git a/app/views/settings/background_jobs/index.html.erb b/app/views/settings/background_jobs/index.html.erb index f31335ae..ebdaaa2c 100644 --- a/app/views/settings/background_jobs/index.html.erb +++ b/app/views/settings/background_jobs/index.html.erb @@ -45,7 +45,7 @@

Background Jobs Dashboard

This will open the background jobs dashboard in a new tab.

- <%= link_to 'Open Dashboard', mission_control_jobs_url, target: '_blank', class: 'btn btn-primary' %> + <%= link_to 'Open Dashboard', '/sidekiq', target: '_blank', class: 'btn btn-primary' %>
diff --git a/config/application.rb b/config/application.rb index a76fdc15..3d2dd0be 100644 --- a/config/application.rb +++ b/config/application.rb @@ -34,5 +34,7 @@ module Dawarich g.routing_specs false g.helper_specs false end + + config.active_job.queue_adapter = :sidekiq end end diff --git a/config/initializers/geocoder.rb b/config/initializers/geocoder.rb index 60b61bed..e38248d0 100644 --- a/config/initializers/geocoder.rb +++ b/config/initializers/geocoder.rb @@ -4,7 +4,7 @@ settings = { debug_mode: true, timeout: 5, units: :km, - cache: Geocoder::CacheStore::Generic.new(Rails.cache, {}), + cache: cache: Redis.new, always_raise: :all, http_headers: { 'User-Agent' => "Dawarich #{APP_VERSION} (https://dawarich.app)" diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb index 47d89146..6b262868 100644 --- a/config/initializers/sidekiq.rb +++ b/config/initializers/sidekiq.rb @@ -1,24 +1,30 @@ # frozen_string_literal: true -# Sidekiq.configure_server do |config| -# if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true' -# require 'prometheus_exporter/instrumentation' +Sidekiq.configure_server do |config| + config.redis = { url: ENV['REDIS_URL'] } + config.logger = Sidekiq::Logger.new($stdout) -# # Add middleware for collecting job-level metrics -# config.server_middleware do |chain| -# chain.add PrometheusExporter::Instrumentation::Sidekiq -# end + if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true' + require 'prometheus_exporter/instrumentation' + # Add middleware for collecting job-level metrics + config.server_middleware do |chain| + chain.add PrometheusExporter::Instrumentation::Sidekiq + end -# # Capture metrics for failed jobs -# config.death_handlers << PrometheusExporter::Instrumentation::Sidekiq.death_handler + # Capture metrics for failed jobs + config.death_handlers << PrometheusExporter::Instrumentation::Sidekiq.death_handler -# # Start Prometheus instrumentation -# config.on :startup do -# PrometheusExporter::Instrumentation::SidekiqProcess.start -# PrometheusExporter::Instrumentation::SidekiqQueue.start -# PrometheusExporter::Instrumentation::SidekiqStats.start -# end -# end -# end + # Start Prometheus instrumentation + config.on :startup do + PrometheusExporter::Instrumentation::SidekiqProcess.start + PrometheusExporter::Instrumentation::SidekiqQueue.start + PrometheusExporter::Instrumentation::SidekiqStats.start + end + end +end -# Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && DawarichSettings.photon_uses_komoot_io? +Sidekiq.configure_client do |config| + config.redis = { url: ENV['REDIS_URL'] } +end + +Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && DawarichSettings.photon_uses_komoot_io? diff --git a/config/routes.rb b/config/routes.rb index 011cd4e3..1a03af7a 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -1,19 +1,34 @@ # frozen_string_literal: true +require 'sidekiq/web' + Rails.application.routes.draw do mount ActionCable.server => '/cable' mount Rswag::Api::Engine => '/api-docs' mount Rswag::Ui::Engine => '/api-docs' + unless DawarichSettings.self_hosted? + Sidekiq::Web.use(Rack::Auth::Basic) do |username, password| + ActiveSupport::SecurityUtils.secure_compare( + ::Digest::SHA256.hexdigest(username), + ::Digest::SHA256.hexdigest(ENV['SIDEKIQ_USERNAME']) + ) & + ActiveSupport::SecurityUtils.secure_compare( + ::Digest::SHA256.hexdigest(password), + ::Digest::SHA256.hexdigest(ENV['SIDEKIQ_PASSWORD']) + ) + end + end + authenticate :user, lambda { |u| (u.admin? && DawarichSettings.self_hosted?) || (u.admin? && ENV['SIDEKIQ_USERNAME'].present? && ENV['SIDEKIQ_PASSWORD'].present?) } do - mount MissionControl::Jobs::Engine, at: '/jobs' + mount Sidekiq::Web => '/sidekiq' end - # We want to return a nice error message if the user is not authorized to access Jobs - match '/jobs' => redirect { |_, request| + # We want to return a nice error message if the user is not authorized to access Sidekiq + match '/sidekiq' => redirect { |_, request| request.flash[:error] = 'You are not authorized to perform this action.' '/' }, via: :get diff --git a/config/sidekiq.yml b/config/sidekiq.yml new file mode 100644 index 00000000..7bde1468 --- /dev/null +++ b/config/sidekiq.yml @@ -0,0 +1,10 @@ +--- +:concurrency: <%= ENV.fetch("BACKGROUND_PROCESSING_CONCURRENCY", 10) %> +:queues: + - points + - default + - imports + - exports + - stats + - reverse_geocoding + - visit_suggesting diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev index d3c7f1cd..aed33719 100644 --- a/docker/Dockerfile.dev +++ b/docker/Dockerfile.dev @@ -62,6 +62,9 @@ RUN mkdir -p $APP_PATH/tmp && touch $APP_PATH/tmp/caching-dev.txt COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh RUN chmod +x /usr/local/bin/web-entrypoint.sh +COPY ./docker/sidekiq-entrypoint.sh /usr/local/bin/sidekiq-entrypoint.sh +RUN chmod +x /usr/local/bin/sidekiq-entrypoint.sh + EXPOSE $RAILS_PORT ENTRYPOINT ["bundle", "exec"] diff --git a/docker/Dockerfile.prod b/docker/Dockerfile.prod index 12db5c06..e5fd1d61 100644 --- a/docker/Dockerfile.prod +++ b/docker/Dockerfile.prod @@ -61,6 +61,9 @@ RUN SECRET_KEY_BASE_DUMMY=1 bundle exec rake assets:precompile \ COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh RUN chmod +x /usr/local/bin/web-entrypoint.sh +COPY ./docker/sidekiq-entrypoint.sh /usr/local/bin/sidekiq-entrypoint.sh +RUN chmod +x /usr/local/bin/sidekiq-entrypoint.sh + EXPOSE $RAILS_PORT ENTRYPOINT [ "bundle", "exec" ] diff --git a/docker/docker-compose.production.yml b/docker/docker-compose.production.yml index 40ce7c74..37aeb19a 100644 --- a/docker/docker-compose.production.yml +++ b/docker/docker-compose.production.yml @@ -1,6 +1,21 @@ networks: dawarich: services: + dawarich_redis: + image: redis:7.4-alpine + container_name: dawarich_redis + command: redis-server + networks: + - dawarich + volumes: + - dawarich_redis_data:/var/shared/redis + restart: always + healthcheck: + test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s dawarich_db: image: postgis/postgis:17-3.5-alpine shm_size: 1G @@ -41,6 +56,7 @@ services: restart: on-failure environment: RAILS_ENV: production + REDIS_URL: redis://dawarich_redis:6379/0 DATABASE_HOST: dawarich_db DATABASE_PORT: 5432 DATABASE_USERNAME: postgres @@ -80,14 +96,69 @@ services: dawarich_db: condition: service_healthy restart: true + dawarich_redis: + condition: service_healthy + restart: true deploy: resources: limits: cpus: '0.50' # Limit CPU usage to 50% of one core memory: '4G' # Limit memory usage to 2GB - + dawarich_sidekiq: + image: dawarich:prod + container_name: dawarich_sidekiq + volumes: + - dawarich_public:/var/app/public + - dawarich_watched:/var/app/tmp/imports/watched + - dawarich_storage:/var/app/storage + networks: + - dawarich + stdin_open: true + tty: true + entrypoint: sidekiq-entrypoint.sh + command: ['bundle', 'exec', 'sidekiq'] + restart: on-failure + environment: + RAILS_ENV: production + REDIS_URL: redis://dawarich_redis:6379/0 + DATABASE_HOST: dawarich_db + DATABASE_PORT: 5432 + DATABASE_USERNAME: postgres + DATABASE_PASSWORD: password + DATABASE_NAME: dawarich_production + APPLICATION_HOSTS: localhost,::1,127.0.0.1 + BACKGROUND_PROCESSING_CONCURRENCY: 10 + APPLICATION_PROTOCOL: http + PROMETHEUS_EXPORTER_ENABLED: false + PROMETHEUS_EXPORTER_HOST: dawarich_app + PROMETHEUS_EXPORTER_PORT: 9394 + SECRET_KEY_BASE: 1234567890 + RAILS_LOG_TO_STDOUT: "true" + STORE_GEODATA: "true" + logging: + driver: "json-file" + options: + max-size: "100m" + max-file: "5" + healthcheck: + test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ] + interval: 10s + retries: 30 + start_period: 30s + timeout: 10s + depends_on: + dawarich_db: + condition: service_healthy + restart: true + dawarich_redis: + condition: service_healthy + restart: true + dawarich_app: + condition: service_healthy + restart: true volumes: dawarich_db_data: + dawarich_redis_data: dawarich_public: dawarich_watched: dawarich_storage: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 38faea53..220719fc 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -1,6 +1,21 @@ networks: dawarich: services: + dawarich_redis: + image: redis:7.4-alpine + container_name: dawarich_redis + command: redis-server + networks: + - dawarich + volumes: + - dawarich_shared:/data + restart: always + healthcheck: + test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ] + interval: 10s + retries: 5 + start_period: 30s + timeout: 10s dawarich_db: image: postgis/postgis:17-3.5-alpine shm_size: 1G @@ -44,6 +59,7 @@ services: restart: on-failure environment: RAILS_ENV: development + REDIS_URL: redis://dawarich_redis:6379/0 DATABASE_HOST: dawarich_db DATABASE_USERNAME: postgres DATABASE_PASSWORD: password @@ -81,12 +97,64 @@ services: dawarich_db: condition: service_healthy restart: true + dawarich_redis: + condition: service_healthy + restart: true deploy: resources: limits: cpus: '0.50' # Limit CPU usage to 50% of one core memory: '4G' # Limit memory usage to 4GB - + dawarich_sidekiq: + image: freikin/dawarich:latest + container_name: dawarich_sidekiq + volumes: + - dawarich_public:/var/app/public + - dawarich_watched:/var/app/tmp/imports/watched + - dawarich_storage:/var/app/storage + networks: + - dawarich + stdin_open: true + tty: true + entrypoint: sidekiq-entrypoint.sh + command: ['sidekiq'] + restart: on-failure + environment: + RAILS_ENV: development + REDIS_URL: redis://dawarich_redis:6379/0 + DATABASE_HOST: dawarich_db + DATABASE_USERNAME: postgres + DATABASE_PASSWORD: password + DATABASE_NAME: dawarich_development + APPLICATION_HOSTS: localhost + BACKGROUND_PROCESSING_CONCURRENCY: 10 + APPLICATION_PROTOCOL: http + PROMETHEUS_EXPORTER_ENABLED: false + PROMETHEUS_EXPORTER_HOST: dawarich_app + PROMETHEUS_EXPORTER_PORT: 9394 + SELF_HOSTED: "true" + STORE_GEODATA: "true" + logging: + driver: "json-file" + options: + max-size: "100m" + max-file: "5" + healthcheck: + test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ] + interval: 10s + retries: 30 + start_period: 30s + timeout: 10s + depends_on: + dawarich_db: + condition: service_healthy + restart: true + dawarich_redis: + condition: service_healthy + restart: true + dawarich_app: + condition: service_healthy + restart: true volumes: dawarich_db_data: dawarich_sqlite_data: diff --git a/docker/sidekiq-entrypoint.sh b/docker/sidekiq-entrypoint.sh new file mode 100644 index 00000000..b55f3ff0 --- /dev/null +++ b/docker/sidekiq-entrypoint.sh @@ -0,0 +1,36 @@ +#!/bin/sh + +unset BUNDLE_PATH +unset BUNDLE_BIN + +set -e + +echo "⚠️ Starting Sidekiq in $RAILS_ENV environment ⚠️" + +# Parse DATABASE_URL if present, otherwise use individual variables +if [ -n "$DATABASE_URL" ]; then + # Extract components from DATABASE_URL + DATABASE_HOST=$(echo $DATABASE_URL | awk -F[@/] '{print $4}') + DATABASE_PORT=$(echo $DATABASE_URL | awk -F[@/:] '{print $5}') + DATABASE_USERNAME=$(echo $DATABASE_URL | awk -F[:/@] '{print $4}') + DATABASE_PASSWORD=$(echo $DATABASE_URL | awk -F[:/@] '{print $5}') + DATABASE_NAME=$(echo $DATABASE_URL | awk -F[@/] '{print $5}') +else + # Use existing environment variables + DATABASE_HOST=${DATABASE_HOST} + DATABASE_PORT=${DATABASE_PORT} + DATABASE_USERNAME=${DATABASE_USERNAME} + DATABASE_PASSWORD=${DATABASE_PASSWORD} + DATABASE_NAME=${DATABASE_NAME} +fi + +# Wait for the database to become available +echo "⏳ Waiting for database to be ready..." +until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -d "$DATABASE_NAME" -c '\q'; do + >&2 echo "Postgres is unavailable - retrying..." + sleep 2 +done +echo "✅ PostgreSQL is ready!" + +# run sidekiq +bundle exec sidekiq diff --git a/docs/How_to_install_Dawarich_in_k8s.md b/docs/How_to_install_Dawarich_in_k8s.md index 18bd316b..fa108f15 100644 --- a/docs/How_to_install_Dawarich_in_k8s.md +++ b/docs/How_to_install_Dawarich_in_k8s.md @@ -6,7 +6,7 @@ - Kubernetes cluster and basic kubectl knowledge. - Some persistent storage class prepared, in this example, Longhorn. -- Working Postgres instance. In this example Postgres lives in 'db' namespace. +- Working Postgres and Redis instances. In this example Postgres lives in 'db' namespace and Redis in 'redis' namespace. - Ngingx ingress controller with Letsencrypt integeation. - This example uses 'example.com' as a domain name, you want to change it to your own. - This will work on IPv4 and IPv6 Single Stack clusters, as well as Dual Stack deployments. @@ -80,6 +80,8 @@ spec: value: "Europe/Prague" - name: RAILS_ENV value: development + - name: REDIS_URL + value: redis://redis-master.redis.svc.cluster.local:6379/10 - name: DATABASE_HOST value: postgres-postgresql.db.svc.cluster.local - name: DATABASE_PORT @@ -126,10 +128,73 @@ spec: cpu: "2000m" ports: - containerPort: 3000 + - name: dawarich-sidekiq + env: + - name: RAILS_ENV + value: development + - name: REDIS_URL + value: redis://redis-master.redis.svc.cluster.local:6379/10 + - name: DATABASE_HOST + value: postgres-postgresql.db.svc.cluster.local + - name: DATABASE_PORT + value: "5432" + - name: DATABASE_USERNAME + value: postgres + - name: DATABASE_PASSWORD + value: Password123! + - name: DATABASE_NAME + value: dawarich_development + - name: RAILS_MIN_THREADS + value: "5" + - name: RAILS_MAX_THREADS + value: "10" + - name: BACKGROUND_PROCESSING_CONCURRENCY + value: "20" + - name: APPLICATION_HOST + value: localhost + - name: APPLICATION_HOSTS + value: "dawarich.example.com, localhost" + - name: APPLICATION_PROTOCOL + value: http + - name: PHOTON_API_HOST + value: photon.komoot.io + - name: PHOTON_API_USE_HTTPS + value: "true" + image: freikin/dawarich:latest + imagePullPolicy: Always + volumeMounts: + - mountPath: /var/app/public + name: public + - mountPath: /var/app/tmp/imports/watched + name: watched + command: + - "sidekiq-entrypoint.sh" + args: + - "bundle exec sidekiq" + resources: + requests: + memory: "1Gi" + cpu: "250m" + limits: + memory: "3Gi" + cpu: "1500m" + livenessProbe: + httpGet: + path: /api/v1/health + port: 3000 + initialDelaySeconds: 60 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + readinessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 10 + timeoutSeconds: 3 + failureThreshold: 3 volumes: - - name: gem-cache - persistentVolumeClaim: - claimName: gem-cache - name: public persistentVolumeClaim: claimName: public diff --git a/docs/How_to_install_Dawarich_on_Synology.md b/docs/How_to_install_Dawarich_on_Synology.md index db2d522f..ff17f1a8 100644 --- a/docs/How_to_install_Dawarich_on_Synology.md +++ b/docs/How_to_install_Dawarich_on_Synology.md @@ -29,7 +29,7 @@ If you don't want to use dedicated share for projects installed by docker skip i ### Dawarich root folder 1. Open your [Docker root folder](#docker-root-share) in **File station**. 2. Create new folder **dawarich** and open it. -3. Create folders **db_data**, **db_shared** and **public** in **dawarich** folder. +3. Create folders **redis**, **db_data**, **db_shared** and **public** in **dawarich** folder. 4. Copy [docker compose](synology/docker-compose.yml) and [.env](synology/.env) files form **synology** repo folder into **dawarich** folder on your synology. # Installation diff --git a/docs/how_to_setup_reverse_proxy.md b/docs/how_to_setup_reverse_proxy.md index 95706525..efaddd2d 100644 --- a/docs/how_to_setup_reverse_proxy.md +++ b/docs/how_to_setup_reverse_proxy.md @@ -17,6 +17,17 @@ dawarich_app: APPLICATION_HOSTS: "yourhost.com,www.yourhost.com,127.0.0.1" <-- Edit this ``` +```yaml +dawarich_sidekiq: + image: freikin/dawarich:latest + container_name: dawarich_sidekiq + ... + environment: + ... + APPLICATION_HOSTS: "yourhost.com,www.yourhost.com,127.0.0.1" <-- Edit this + ... +``` + For a Synology install, refer to **[Synology Install Tutorial](How_to_install_Dawarich_on_Synology.md)**. In this page, it is explained how to set the APPLICATION_HOSTS environment variable. ### Virtual Host diff --git a/docs/synology/docker-compose.yml b/docs/synology/docker-compose.yml index 534225c5..7822b7c7 100644 --- a/docs/synology/docker-compose.yml +++ b/docs/synology/docker-compose.yml @@ -1,6 +1,13 @@ version: '3' services: + dawarich_redis: + image: redis:7.4-alpine + container_name: dawarich_redis + command: redis-server + restart: unless-stopped + volumes: + - ./redis:/var/shared/redis dawarich_db: image: postgis/postgis:17-3.5-alpine container_name: dawarich_db @@ -17,6 +24,7 @@ services: container_name: dawarich_app depends_on: - dawarich_db + - dawarich_redis stdin_open: true tty: true entrypoint: web-entrypoint.sh @@ -29,3 +37,19 @@ services: - ./app_storage:/var/app/storage ports: - 32568:3000 + + dawarich_sidekiq: + image: freikin/dawarich:latest + container_name: dawarich_sidekiq + depends_on: + - dawarich_db + - dawarich_redis + - dawarich_app + entrypoint: sidekiq-entrypoint.sh + command: ['sidekiq'] + restart: unless-stopped + env_file: + - .env + volumes: + - ./public:/var/app/public + - ./app_storage:/var/app/storage diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 8cd4b1f0..4e34b6af 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -7,6 +7,7 @@ require_relative '../config/environment' abort('The Rails environment is running in production mode!') if Rails.env.production? require 'rspec/rails' require 'rswag/specs' +require 'sidekiq/testing' require 'super_diff/rspec-rails' require 'rake' diff --git a/spec/requests/sidekiq_spec.rb b/spec/requests/sidekiq_spec.rb new file mode 100644 index 00000000..0fc2d1fe --- /dev/null +++ b/spec/requests/sidekiq_spec.rb @@ -0,0 +1,125 @@ +# frozen_string_literal: true + +require 'rails_helper' +require 'sidekiq/web' + +RSpec.describe '/sidekiq', type: :request do + before do + # Allow any ENV key to be accessed and return nil by default + allow(ENV).to receive(:[]).and_return(nil) + + # Stub Sidekiq::Web with a simple Rack app for testing + allow(Sidekiq::Web).to receive(:call) do |_env| + [200, { 'Content-Type' => 'text/html' }, ['Sidekiq Web UI']] + end + end + + context 'when Dawarich is in self-hosted mode' do + before do + allow(DawarichSettings).to receive(:self_hosted?).and_return(true) + allow(ENV).to receive(:[]).with('SIDEKIQ_USERNAME').and_return(nil) + allow(ENV).to receive(:[]).with('SIDEKIQ_PASSWORD').and_return(nil) + end + + context 'when user is not authenticated' do + it 'redirects to sign in page' do + get sidekiq_url + + expect(response).to redirect_to('/users/sign_in') + end + end + + context 'when user is authenticated' do + context 'when user is not admin' do + before { sign_in create(:user) } + + it 'redirects to root page' do + get sidekiq_url + + expect(response).to redirect_to(root_url) + end + + it 'shows flash message' do + get sidekiq_url + + expect(flash[:error]).to eq('You are not authorized to perform this action.') + end + end + + context 'when user is admin' do + before { sign_in create(:user, :admin) } + + it 'renders a successful response' do + get sidekiq_url + + expect(response).to be_successful + end + end + end + end + + context 'when Dawarich is not in self-hosted mode' do + before do + allow(DawarichSettings).to receive(:self_hosted?).and_return(false) + allow(ENV).to receive(:[]).with('SIDEKIQ_USERNAME').and_return(nil) + allow(ENV).to receive(:[]).with('SIDEKIQ_PASSWORD').and_return(nil) + Rails.application.reload_routes! + end + + context 'when user is not authenticated' do + it 'redirects to sign in page' do + get sidekiq_url + + expect(response).to redirect_to('/users/sign_in') + end + end + + context 'when user is authenticated' do + before { sign_in create(:user, :admin) } + + it 'redirects to root page' do + get sidekiq_url + + expect(response).to redirect_to(root_url) + expect(flash[:error]).to eq('You are not authorized to perform this action.') + end + end + end + + context 'when SIDEKIQ_USERNAME and SIDEKIQ_PASSWORD are set' do + before do + allow(DawarichSettings).to receive(:self_hosted?).and_return(false) + allow(ENV).to receive(:[]).with('SIDEKIQ_USERNAME').and_return('admin') + allow(ENV).to receive(:[]).with('SIDEKIQ_PASSWORD').and_return('password') + end + + context 'when user is not authenticated' do + it 'redirects to sign in page' do + get sidekiq_url + + expect(response).to redirect_to('/users/sign_in') + end + end + + context 'when user is not admin' do + before { sign_in create(:user) } + + it 'redirects to root page' do + get sidekiq_url + + expect(response).to redirect_to(root_url) + expect(flash[:error]).to eq('You are not authorized to perform this action.') + end + end + + context 'when user is admin' do + before { sign_in create(:user, :admin) } + + it 'renders a successful response' do + get sidekiq_url + + expect(response).to be_successful + end + end + end +end diff --git a/spec/services/imports/watcher_spec.rb b/spec/services/imports/watcher_spec.rb index 15c8791a..94c04053 100644 --- a/spec/services/imports/watcher_spec.rb +++ b/spec/services/imports/watcher_spec.rb @@ -12,6 +12,8 @@ RSpec.describe Imports::Watcher do stub_const('Imports::Watcher::WATCHED_DIR_PATH', watched_dir_path) end + after { Sidekiq::Testing.fake! } + context 'when user exists' do let!(:user) { create(:user, email: 'user@domain.com') } From 767629b21e4051b153a9096a753fe74c10fe1740 Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Mon, 9 Jun 2025 13:50:43 +0200 Subject: [PATCH 2/6] Remove solid trifecta --- Gemfile | 5 - Procfile | 2 +- app/jobs/jobs/clean_finished_job.rb | 9 -- bin/jobs | 6 - config/cable.yml | 20 +-- config/cache.yml | 15 -- config/database.yml | 83 +--------- config/environments/development.rb | 3 +- config/environments/production.rb | 9 +- config/puma.rb | 9 -- config/queue.yml | 27 ---- config/recurring.yml | 34 ----- db/cable_schema.rb | 24 --- db/cache_schema.rb | 24 --- db/queue_schema.rb | 143 ------------------ docker/web-entrypoint.sh | 49 ------ spec/jobs/area_visits_calculating_job_spec.rb | 6 +- spec/services/imports/create_spec.rb | 10 +- spec/services/imports/watcher_spec.rb | 1 + 19 files changed, 29 insertions(+), 450 deletions(-) delete mode 100644 app/jobs/jobs/clean_finished_job.rb delete mode 100755 bin/jobs delete mode 100644 config/cache.yml delete mode 100644 config/queue.yml delete mode 100644 config/recurring.yml delete mode 100644 db/cable_schema.rb delete mode 100644 db/cache_schema.rb delete mode 100644 db/queue_schema.rb diff --git a/Gemfile b/Gemfile index 688eb883..832d25cd 100644 --- a/Gemfile +++ b/Gemfile @@ -20,7 +20,6 @@ gem 'httparty' gem 'importmap-rails' gem 'kaminari' gem 'lograge' -gem 'mission_control-jobs' gem 'oj' gem 'pg' gem 'prometheus_exporter' @@ -37,7 +36,6 @@ gem 'rswag-api' gem 'rswag-ui' gem 'sentry-ruby' gem 'sentry-rails' -gem 'sqlite3', '~> 2.6' gem 'stackprof' gem 'sidekiq' gem 'sidekiq-cron' @@ -45,9 +43,6 @@ gem 'sidekiq-limit_fetch' gem 'sprockets-rails' gem 'stimulus-rails' gem 'strong_migrations' -gem 'solid_cable', '~> 3.0' -gem 'solid_cache', '1.0.7' -gem 'solid_queue', '~> 1.1' gem 'tailwindcss-rails' gem 'turbo-rails' gem 'tzinfo-data', platforms: %i[mingw mswin x64_mingw jruby] diff --git a/Procfile b/Procfile index d6f4d818..fd4fe014 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1,2 @@ web: bundle exec puma -C config/puma.rb -worker: bundle exec bin/jobs +worker: bundle exec sidekiq -C config/sidekiq.yml diff --git a/app/jobs/jobs/clean_finished_job.rb b/app/jobs/jobs/clean_finished_job.rb deleted file mode 100644 index c5fc2037..00000000 --- a/app/jobs/jobs/clean_finished_job.rb +++ /dev/null @@ -1,9 +0,0 @@ -# frozen_string_literal: true - -class Jobs::CleanFinishedJob < ApplicationJob - queue_as :default - - def perform - SolidQueue::Job.clear_finished_in_batches - end -end diff --git a/bin/jobs b/bin/jobs deleted file mode 100755 index dcf59f30..00000000 --- a/bin/jobs +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env ruby - -require_relative "../config/environment" -require "solid_queue/cli" - -SolidQueue::Cli.start(ARGV) diff --git a/config/cable.yml b/config/cable.yml index 7ca155ef..c3738c80 100644 --- a/config/cable.yml +++ b/config/cable.yml @@ -1,21 +1,11 @@ -# Async adapter only works within the same process, so for manually triggering cable updates from a console, -# and seeing results in the browser, you must do so from the web console (running inside the dev process), -# not a terminal started via bin/rails console! Add "console" to any action or any ERB template view -# to make the web console appear. - -default: &default - adapter: solid_cable - connects_to: - database: - writing: cable - polling_interval: 0.1.seconds - message_retention: 1.day - development: - <<: *default + adapter: redis + url: <%= ENV['REDIS_URL'] %> test: adapter: test production: - <<: *default + adapter: redis + url: <%= ENV.fetch("REDIS_URL") { "redis://localhost:6379/1" } %> + channel_prefix: dawarich_production diff --git a/config/cache.yml b/config/cache.yml deleted file mode 100644 index 040a2f5e..00000000 --- a/config/cache.yml +++ /dev/null @@ -1,15 +0,0 @@ -default: &default - store_options: - # Cap age of oldest cache entry to fulfill retention policies - max_age: <%= 60.days.to_i %> - max_size: <%= 256.megabytes %> - namespace: <%= Rails.env %> - -development: - <<: *default - -test: - <<: *default - -production: - <<: *default diff --git a/config/database.yml b/config/database.yml index f82b2d8a..374dfa53 100644 --- a/config/database.yml +++ b/config/database.yml @@ -9,85 +9,18 @@ default: &default pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 10 } %> timeout: 5000 -sqlite_default: &sqlite_default - adapter: sqlite3 - pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 10 } %> - timeout: 5000 - development: - primary: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_development' %> - queue: - <<: *default - database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_development_queue' %> - password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> - username: <%= ENV['QUEUE_DATABASE_USERNAME'] %> - port: <%= ENV['QUEUE_DATABASE_PORT'] || '5432' %> - host: <%= ENV['QUEUE_DATABASE_HOST'] %> - migrations_paths: db/queue_migrate - cache: - <<: *sqlite_default - database: <%= ENV['CACHE_DATABASE_PATH'] || 'db/cache.sqlite3' %> - migrations_paths: db/cache_migrate - cable: - <<: *sqlite_default - database: <%= ENV['CABLE_DATABASE_PATH'] || 'db/cable.sqlite3' %> - migrations_paths: db/cable_migrate + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_development' %> test: - primary: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_test' %> - password: <%= ENV['DATABASE_PASSWORD'] %> - queue: - <<: *default - database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_test_queue' %> - password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> - username: <%= ENV['QUEUE_DATABASE_USERNAME'] %> - port: <%= ENV['QUEUE_DATABASE_PORT'] || '5432' %> - host: <%= ENV['QUEUE_DATABASE_HOST'] %> - migrations_paths: db/queue_migrate + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_test' %> production: - primary: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %> - queue: - <<: *default - database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_production_queue' %> - password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> - username: <%= ENV['QUEUE_DATABASE_USERNAME'] %> - port: <%= ENV['QUEUE_DATABASE_PORT'] || '5432' %> - host: <%= ENV['QUEUE_DATABASE_HOST'] %> - migrations_paths: db/queue_migrate - cable: - <<: *sqlite_default - database: <%= ENV['CABLE_DATABASE_PATH'] || 'db/cable.sqlite3' %> - migrations_paths: db/cable_migrate - cache: - <<: *sqlite_default - database: <%= ENV['CACHE_DATABASE_PATH'] %> - migrations_paths: db/cache_migrate + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %> staging: - primary: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_staging' %> - password: <%= ENV['DATABASE_PASSWORD'] %> - queue: - <<: *default - database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_staging_queue' %> - password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> - username: <%= ENV['QUEUE_DATABASE_USERNAME'] %> - port: <%= ENV['QUEUE_DATABASE_PORT'] || '5432' %> - host: <%= ENV['QUEUE_DATABASE_HOST'] %> - migrations_paths: db/queue_migrate - cache: - <<: *sqlite_default - database: <%= ENV['CACHE_DATABASE_PATH'] || 'db/cache.sqlite3' %> - migrations_paths: db/cache_migrate - cable: - <<: *sqlite_default - database: <%= ENV['CABLE_DATABASE_PATH'] || 'db/cable.sqlite3' %> - migrations_paths: db/cable_migrate + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_staging' %> diff --git a/config/environments/development.rb b/config/environments/development.rb index 1ee6dff5..dfad5b2f 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -26,8 +26,7 @@ Rails.application.configure do # Enable/disable caching. By default caching is disabled. # Run rails dev:cache to toggle caching. - config.cache_store = :solid_cache_store - config.solid_cache.connects_to = { database: { writing: :cache } } + config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } if Rails.root.join('tmp/caching-dev.txt').exist? config.action_controller.perform_caching = true diff --git a/config/environments/production.rb b/config/environments/production.rb index aac8634c..26913253 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -73,15 +73,10 @@ Rails.application.configure do config.log_level = ENV.fetch('RAILS_LOG_LEVEL', 'info') # Use a different cache store in production. - config.cache_store = :solid_cache_store - config.solid_cache.connects_to = { database: { writing: :cache } } + config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } # Use a real queuing backend for Active Job (and separate queues per environment). - config.active_job.queue_adapter = :solid_queue - config.solid_queue.connects_to = { database: { writing: :queue } } - config.solid_queue.silence_polling = true - config.solid_queue.logger = ActiveSupport::Logger.new($stdout) - # config.active_job.queue_name_prefix = "dawarich_production" + config.active_job.queue_adapter = :sidekiq config.action_mailer.perform_caching = false diff --git a/config/puma.rb b/config/puma.rb index d0e2b212..e0eb3db7 100644 --- a/config/puma.rb +++ b/config/puma.rb @@ -43,15 +43,6 @@ preload_app! # Allow puma to be restarted by `bin/rails restart` command. plugin :tmp_restart -# If env var is set or we're in development, solid_queue will run in puma -if ENV['SOLID_QUEUE_IN_PUMA'] || ENV.fetch('RAILS_ENV', 'development') == 'development' - begin - plugin :solid_queue - rescue => e - puts "Failed to load solid_queue plugin: #{e.message}" - end -end - # Prometheus exporter if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true' require 'prometheus_exporter/instrumentation' diff --git a/config/queue.yml b/config/queue.yml deleted file mode 100644 index 50e1f5b4..00000000 --- a/config/queue.yml +++ /dev/null @@ -1,27 +0,0 @@ - -default: &default - dispatchers: - - polling_interval: 1 - batch_size: 500 - workers: - - queues: "*" - threads: 3 - processes: <%= ENV['BACKGROUND_PROCESSING_CONCURRENCY'] || ENV.fetch("JOB_CONCURRENCY", 10) %> - polling_interval: 2 - - queues: imports - threads: 5 - processes: 1 - polling_interval: 1 - - queues: exports - threads: 5 - processes: 1 - polling_interval: 2 - -development: - <<: *default - -test: - <<: *default - -production: - <<: *default diff --git a/config/recurring.yml b/config/recurring.yml deleted file mode 100644 index 22f57d3f..00000000 --- a/config/recurring.yml +++ /dev/null @@ -1,34 +0,0 @@ -periodic_cleanup: - class: "Jobs::CleanFinishedJob" - queue: default - schedule: every month - -bulk_stats_calculating_job: - class: "BulkStatsCalculatingJob" - queue: stats - schedule: every hour - -area_visits_calculation_scheduling_job: - class: "AreaVisitsCalculationSchedulingJob" - queue: visit_suggesting - schedule: every day at 0:00 - -visit_suggesting_job: - class: "BulkVisitsSuggestingJob" - queue: visit_suggesting - schedule: every day at 00:05 - -watcher_job: - class: "Import::WatcherJob" - queue: imports - schedule: every hour - -app_version_checking_job: - class: "AppVersionCheckingJob" - queue: default - schedule: every 6 hours - -cache_preheating_job: - class: "Cache::PreheatingJob" - queue: default - schedule: every day at 0:00 diff --git a/db/cable_schema.rb b/db/cable_schema.rb deleted file mode 100644 index 55cdb550..00000000 --- a/db/cable_schema.rb +++ /dev/null @@ -1,24 +0,0 @@ -# This file is auto-generated from the current state of the database. Instead -# of editing this file, please use the migrations feature of Active Record to -# incrementally modify your database, and then regenerate this schema definition. -# -# This file is the source Rails uses to define your schema when running `bin/rails -# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to -# be faster and is potentially less error prone than running all of your -# migrations from scratch. Old migrations may fail to apply correctly if those -# migrations use external dependencies or application code. -# -# It's strongly recommended that you check this file into your version control system. - -ActiveRecord::Schema[8.0].define(version: 1) do - create_table "solid_cable_messages", force: :cascade do |t| - t.binary "channel", null: false - t.binary "payload", null: false - t.datetime "created_at", null: false - t.bigint "channel_hash", null: false - t.index ["channel"], name: "index_solid_cable_messages_on_channel" - t.index ["channel_hash"], name: "index_solid_cable_messages_on_channel_hash" - t.index ["created_at"], name: "index_solid_cable_messages_on_created_at" - t.index ["id"], name: "index_solid_cable_messages_on_id", unique: true - end -end diff --git a/db/cache_schema.rb b/db/cache_schema.rb deleted file mode 100644 index fe62ea4a..00000000 --- a/db/cache_schema.rb +++ /dev/null @@ -1,24 +0,0 @@ -# This file is auto-generated from the current state of the database. Instead -# of editing this file, please use the migrations feature of Active Record to -# incrementally modify your database, and then regenerate this schema definition. -# -# This file is the source Rails uses to define your schema when running `bin/rails -# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to -# be faster and is potentially less error prone than running all of your -# migrations from scratch. Old migrations may fail to apply correctly if those -# migrations use external dependencies or application code. -# -# It's strongly recommended that you check this file into your version control system. - -ActiveRecord::Schema[8.0].define(version: 1) do - create_table "solid_cache_entries", force: :cascade do |t| - t.binary "key", null: false - t.binary "value", null: false - t.datetime "created_at", null: false - t.bigint "key_hash", null: false - t.integer "byte_size", null: false - t.index ["byte_size"], name: "index_solid_cache_entries_on_byte_size" - t.index ["key_hash", "byte_size"], name: "index_solid_cache_entries_on_key_hash_and_byte_size" - t.index ["key_hash"], name: "index_solid_cache_entries_on_key_hash", unique: true - end -end diff --git a/db/queue_schema.rb b/db/queue_schema.rb deleted file mode 100644 index 30f375a5..00000000 --- a/db/queue_schema.rb +++ /dev/null @@ -1,143 +0,0 @@ -# This file is auto-generated from the current state of the database. Instead -# of editing this file, please use the migrations feature of Active Record to -# incrementally modify your database, and then regenerate this schema definition. -# -# This file is the source Rails uses to define your schema when running `bin/rails -# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to -# be faster and is potentially less error prone than running all of your -# migrations from scratch. Old migrations may fail to apply correctly if those -# migrations use external dependencies or application code. -# -# It's strongly recommended that you check this file into your version control system. - -ActiveRecord::Schema[8.0].define(version: 1) do - enable_extension "pg_catalog.plpgsql" - - create_table "solid_queue_blocked_executions", force: :cascade do |t| - t.bigint "job_id", null: false - t.string "queue_name", null: false - t.integer "priority", default: 0, null: false - t.string "concurrency_key", null: false - t.datetime "expires_at", null: false - t.datetime "created_at", null: false - t.index ["concurrency_key", "priority", "job_id"], name: "index_solid_queue_blocked_executions_for_release" - t.index ["expires_at", "concurrency_key"], name: "index_solid_queue_blocked_executions_for_maintenance" - t.index ["job_id"], name: "index_solid_queue_blocked_executions_on_job_id", unique: true - end - - create_table "solid_queue_claimed_executions", force: :cascade do |t| - t.bigint "job_id", null: false - t.bigint "process_id" - t.datetime "created_at", null: false - t.index ["job_id"], name: "index_solid_queue_claimed_executions_on_job_id", unique: true - t.index ["process_id", "job_id"], name: "index_solid_queue_claimed_executions_on_process_id_and_job_id" - end - - create_table "solid_queue_failed_executions", force: :cascade do |t| - t.bigint "job_id", null: false - t.text "error" - t.datetime "created_at", null: false - t.index ["job_id"], name: "index_solid_queue_failed_executions_on_job_id", unique: true - end - - create_table "solid_queue_jobs", force: :cascade do |t| - t.string "queue_name", null: false - t.string "class_name", null: false - t.text "arguments" - t.integer "priority", default: 0, null: false - t.string "active_job_id" - t.datetime "scheduled_at" - t.datetime "finished_at" - t.string "concurrency_key" - t.datetime "created_at", null: false - t.datetime "updated_at", null: false - t.index ["active_job_id"], name: "index_solid_queue_jobs_on_active_job_id" - t.index ["class_name"], name: "index_solid_queue_jobs_on_class_name" - t.index ["finished_at"], name: "index_solid_queue_jobs_on_finished_at" - t.index ["queue_name", "finished_at"], name: "index_solid_queue_jobs_for_filtering" - t.index ["scheduled_at", "finished_at"], name: "index_solid_queue_jobs_for_alerting" - end - - create_table "solid_queue_pauses", force: :cascade do |t| - t.string "queue_name", null: false - t.datetime "created_at", null: false - t.index ["queue_name"], name: "index_solid_queue_pauses_on_queue_name", unique: true - end - - create_table "solid_queue_processes", force: :cascade do |t| - t.string "kind", null: false - t.datetime "last_heartbeat_at", null: false - t.bigint "supervisor_id" - t.integer "pid", null: false - t.string "hostname" - t.text "metadata" - t.datetime "created_at", null: false - t.string "name", null: false - t.index ["last_heartbeat_at"], name: "index_solid_queue_processes_on_last_heartbeat_at" - t.index ["name", "supervisor_id"], name: "index_solid_queue_processes_on_name_and_supervisor_id", unique: true - t.index ["supervisor_id"], name: "index_solid_queue_processes_on_supervisor_id" - end - - create_table "solid_queue_ready_executions", force: :cascade do |t| - t.bigint "job_id", null: false - t.string "queue_name", null: false - t.integer "priority", default: 0, null: false - t.datetime "created_at", null: false - t.index ["job_id"], name: "index_solid_queue_ready_executions_on_job_id", unique: true - t.index ["priority", "job_id"], name: "index_solid_queue_poll_all" - t.index ["queue_name", "priority", "job_id"], name: "index_solid_queue_poll_by_queue" - end - - create_table "solid_queue_recurring_executions", force: :cascade do |t| - t.bigint "job_id", null: false - t.string "task_key", null: false - t.datetime "run_at", null: false - t.datetime "created_at", null: false - t.index ["job_id"], name: "index_solid_queue_recurring_executions_on_job_id", unique: true - t.index ["task_key", "run_at"], name: "index_solid_queue_recurring_executions_on_task_key_and_run_at", unique: true - end - - create_table "solid_queue_recurring_tasks", force: :cascade do |t| - t.string "key", null: false - t.string "schedule", null: false - t.string "command", limit: 2048 - t.string "class_name" - t.text "arguments" - t.string "queue_name" - t.integer "priority", default: 0 - t.boolean "static", default: true, null: false - t.text "description" - t.datetime "created_at", null: false - t.datetime "updated_at", null: false - t.index ["key"], name: "index_solid_queue_recurring_tasks_on_key", unique: true - t.index ["static"], name: "index_solid_queue_recurring_tasks_on_static" - end - - create_table "solid_queue_scheduled_executions", force: :cascade do |t| - t.bigint "job_id", null: false - t.string "queue_name", null: false - t.integer "priority", default: 0, null: false - t.datetime "scheduled_at", null: false - t.datetime "created_at", null: false - t.index ["job_id"], name: "index_solid_queue_scheduled_executions_on_job_id", unique: true - t.index ["scheduled_at", "priority", "job_id"], name: "index_solid_queue_dispatch_all" - end - - create_table "solid_queue_semaphores", force: :cascade do |t| - t.string "key", null: false - t.integer "value", default: 1, null: false - t.datetime "expires_at", null: false - t.datetime "created_at", null: false - t.datetime "updated_at", null: false - t.index ["expires_at"], name: "index_solid_queue_semaphores_on_expires_at" - t.index ["key", "value"], name: "index_solid_queue_semaphores_on_key_and_value" - t.index ["key"], name: "index_solid_queue_semaphores_on_key", unique: true - end - - add_foreign_key "solid_queue_blocked_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade - add_foreign_key "solid_queue_claimed_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade - add_foreign_key "solid_queue_failed_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade - add_foreign_key "solid_queue_ready_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade - add_foreign_key "solid_queue_recurring_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade - add_foreign_key "solid_queue_scheduled_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade -end diff --git a/docker/web-entrypoint.sh b/docker/web-entrypoint.sh index 228d89b7..9642055f 100644 --- a/docker/web-entrypoint.sh +++ b/docker/web-entrypoint.sh @@ -31,19 +31,6 @@ export DATABASE_USERNAME export DATABASE_PASSWORD export DATABASE_NAME -# Set queue database name and connection parameters with defaults -QUEUE_DATABASE_NAME=${QUEUE_DATABASE_NAME:-"${DATABASE_NAME}_queue"} -QUEUE_DATABASE_PASSWORD=${QUEUE_DATABASE_PASSWORD:-"$DATABASE_PASSWORD"} -QUEUE_DATABASE_USERNAME=${QUEUE_DATABASE_USERNAME:-"$DATABASE_USERNAME"} -QUEUE_DATABASE_PORT=${QUEUE_DATABASE_PORT:-"$DATABASE_PORT"} -QUEUE_DATABASE_HOST=${QUEUE_DATABASE_HOST:-"$DATABASE_HOST"} - -export QUEUE_DATABASE_NAME -export QUEUE_DATABASE_PASSWORD -export QUEUE_DATABASE_USERNAME -export QUEUE_DATABASE_PORT -export QUEUE_DATABASE_HOST - # Remove pre-existing puma/passenger server.pid rm -f $APP_PATH/tmp/pids/server.pid @@ -67,34 +54,12 @@ create_database() { echo "✅ PostgreSQL database $db_name is ready!" } -# Set up SQLite database directory in the volume for cache and cable -SQLITE_DB_DIR="/dawarich_sqlite_data" -mkdir -p $SQLITE_DB_DIR -echo "Created SQLite database directory at $SQLITE_DB_DIR" - # Step 1: Database Setup echo "Setting up all required databases..." # Create primary PostgreSQL database create_database "$DATABASE_NAME" "$DATABASE_PASSWORD" "$DATABASE_HOST" "$DATABASE_PORT" "$DATABASE_USERNAME" -# Create PostgreSQL queue database for solid_queue -create_database "$QUEUE_DATABASE_NAME" "$QUEUE_DATABASE_PASSWORD" "$QUEUE_DATABASE_HOST" "$QUEUE_DATABASE_PORT" "$QUEUE_DATABASE_USERNAME" - -# Setup SQLite databases for cache and cable - -# Setup Cache database with SQLite -CACHE_DATABASE_PATH=${CACHE_DATABASE_PATH:-"$SQLITE_DB_DIR/${DATABASE_NAME}_cache.sqlite3"} -export CACHE_DATABASE_PATH -echo "✅ SQLite cache database configured at $CACHE_DATABASE_PATH" - -# Setup Cable database with SQLite (only for production and staging) -if [ "$RAILS_ENV" = "production" ] || [ "$RAILS_ENV" = "staging" ]; then - CABLE_DATABASE_PATH=${CABLE_DATABASE_PATH:-"$SQLITE_DB_DIR/${DATABASE_NAME}_cable.sqlite3"} - export CABLE_DATABASE_PATH - echo "✅ SQLite cable database configured at $CABLE_DATABASE_PATH" -fi - # Step 2: Run migrations for all databases echo "Running migrations for all databases..." @@ -102,20 +67,6 @@ echo "Running migrations for all databases..." echo "Running primary database migrations..." bundle exec rails db:migrate -# Run PostgreSQL queue database migrations -echo "Running queue database migrations..." -bundle exec rails db:migrate:queue - -# Run SQLite database migrations -echo "Running cache database migrations..." -bundle exec rails db:migrate:cache - -# Run cable migrations for production/staging -if [ "$RAILS_ENV" = "production" ] || [ "$RAILS_ENV" = "staging" ]; then - echo "Running cable database migrations..." - bundle exec rails db:migrate:cable -fi - # Run data migrations echo "Running DATA migrations..." bundle exec rake data:migrate diff --git a/spec/jobs/area_visits_calculating_job_spec.rb b/spec/jobs/area_visits_calculating_job_spec.rb index 46185a76..629c145c 100644 --- a/spec/jobs/area_visits_calculating_job_spec.rb +++ b/spec/jobs/area_visits_calculating_job_spec.rb @@ -8,9 +8,11 @@ RSpec.describe AreaVisitsCalculatingJob, type: :job do let(:area) { create(:area, user:) } it 'calls the AreaVisitsCalculationService' do - expect(Areas::Visits::Create).to receive(:new).with(user, [area]).and_call_original + Sidekiq::Testing.inline! do + expect(Areas::Visits::Create).to receive(:new).with(user, [area]).and_call_original - described_class.new.perform(user.id) + described_class.new.perform(user.id) + end end end end diff --git a/spec/services/imports/create_spec.rb b/spec/services/imports/create_spec.rb index 176043b6..69634149 100644 --- a/spec/services/imports/create_spec.rb +++ b/spec/services/imports/create_spec.rb @@ -55,12 +55,16 @@ RSpec.describe Imports::Create do context 'when import is successful' do it 'schedules stats creating' do - expect { service.call }.to \ - have_enqueued_job(Stats::CalculatingJob).with(user.id, 2024, 3) + Sidekiq::Testing.inline! do + expect { service.call }.to \ + have_enqueued_job(Stats::CalculatingJob).with(user.id, 2024, 3) + end end it 'schedules visit suggesting' do - expect { service.call }.to have_enqueued_job(VisitSuggestingJob) + Sidekiq::Testing.inline! do + expect { service.call }.to have_enqueued_job(VisitSuggestingJob) + end end end diff --git a/spec/services/imports/watcher_spec.rb b/spec/services/imports/watcher_spec.rb index 94c04053..fadbea84 100644 --- a/spec/services/imports/watcher_spec.rb +++ b/spec/services/imports/watcher_spec.rb @@ -9,6 +9,7 @@ RSpec.describe Imports::Watcher do let(:watched_dir_path) { Rails.root.join('spec/fixtures/files/watched') } before do + Sidekiq::Testing.inline! stub_const('Imports::Watcher::WATCHED_DIR_PATH', watched_dir_path) end From e8e4417f2d24183655bcfe2f121caa9ebe9c60d3 Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Mon, 9 Jun 2025 13:54:13 +0200 Subject: [PATCH 3/6] Remove gems --- Gemfile.lock | 37 ------------------------------------- 1 file changed, 37 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 8777986d..567efbdc 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -238,16 +238,6 @@ GEM mini_mime (1.1.5) mini_portile2 (2.8.9) minitest (5.25.5) - mission_control-jobs (1.0.2) - actioncable (>= 7.1) - actionpack (>= 7.1) - activejob (>= 7.1) - activerecord (>= 7.1) - importmap-rails (>= 1.2.1) - irb (~> 1.13) - railties (>= 7.1) - stimulus-rails - turbo-rails msgpack (1.7.3) multi_json (1.15.0) multi_xml (0.7.1) @@ -458,22 +448,6 @@ GEM simplecov_json_formatter (~> 0.1) simplecov-html (0.13.1) simplecov_json_formatter (0.1.4) - solid_cable (3.0.8) - actioncable (>= 7.2) - activejob (>= 7.2) - activerecord (>= 7.2) - railties (>= 7.2) - solid_cache (1.0.7) - activejob (>= 7.2) - activerecord (>= 7.2) - railties (>= 7.2) - solid_queue (1.1.5) - activejob (>= 7.1) - activerecord (>= 7.1) - concurrent-ruby (>= 1.3.1) - fugit (~> 1.11.0) - railties (>= 7.1) - thor (~> 1.3.1) sprockets (4.2.1) concurrent-ruby (~> 1.0) rack (>= 2.2.4, < 4) @@ -481,12 +455,6 @@ GEM actionpack (>= 6.1) activesupport (>= 6.1) sprockets (>= 3.0.0) - sqlite3 (2.6.0-aarch64-linux-gnu) - sqlite3 (2.6.0-arm-linux-gnu) - sqlite3 (2.6.0-arm64-darwin) - sqlite3 (2.6.0-x86-linux-gnu) - sqlite3 (2.6.0-x86_64-darwin) - sqlite3 (2.6.0-x86_64-linux-gnu) stackprof (0.2.27) stimulus-rails (1.3.4) railties (>= 6.0.0) @@ -570,7 +538,6 @@ DEPENDENCIES jwt kaminari lograge - mission_control-jobs oj pg prometheus_exporter @@ -597,11 +564,7 @@ DEPENDENCIES sidekiq-cron sidekiq-limit_fetch simplecov - solid_cable (~> 3.0) - solid_cache (= 1.0.7) - solid_queue (~> 1.1) sprockets-rails - sqlite3 (~> 2.6) stackprof stimulus-rails strong_migrations From 1ed01a0c0b554c872a577b991672ec2dae6b841e Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Mon, 9 Jun 2025 14:05:19 +0200 Subject: [PATCH 4/6] Fix some issues and clean up compose files --- .devcontainer/docker-compose.yml | 9 --------- config/environments/development.rb | 8 -------- config/initializers/geocoder.rb | 2 +- docker/docker-compose.production.yml | 9 --------- docker/docker-compose.yml | 9 --------- 5 files changed, 1 insertion(+), 36 deletions(-) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index eb632340..b4068664 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -25,15 +25,6 @@ services: DATABASE_USERNAME: postgres DATABASE_PASSWORD: password DATABASE_NAME: dawarich_development - # PostgreSQL database name for solid_queue - QUEUE_DATABASE_HOST: dawarich_db - QUEUE_DATABASE_USERNAME: postgres - QUEUE_DATABASE_PASSWORD: password - QUEUE_DATABASE_PORT: 5432 - QUEUE_DATABASE_NAME: dawarich_development_queue - # SQLite database paths for cache and cable databases - CACHE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_development_cache.sqlite3 - CABLE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_development_cable.sqlite3 MIN_MINUTES_SPENT_IN_CITY: 60 APPLICATION_HOSTS: localhost TIME_ZONE: Europe/London diff --git a/config/environments/development.rb b/config/environments/development.rb index dfad5b2f..c177d97e 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -67,14 +67,6 @@ Rails.application.configure do # Highlight code that enqueued background job in logs. config.active_job.verbose_enqueue_logs = true - config.active_job.queue_adapter = :solid_queue - config.solid_queue.silence_polling = true - # :queue is the name of the database connection - config.solid_queue.connects_to = { database: { writing: :queue } } - - config.mission_control.jobs.http_basic_auth_enabled = false - config.solid_queue.logger = ActiveSupport::Logger.new($stdout) - # Suppress logger output for asset requests. config.assets.quiet = true diff --git a/config/initializers/geocoder.rb b/config/initializers/geocoder.rb index e38248d0..9c115fd7 100644 --- a/config/initializers/geocoder.rb +++ b/config/initializers/geocoder.rb @@ -4,7 +4,7 @@ settings = { debug_mode: true, timeout: 5, units: :km, - cache: cache: Redis.new, + cache: Redis.new, always_raise: :all, http_headers: { 'User-Agent' => "Dawarich #{APP_VERSION} (https://dawarich.app)" diff --git a/docker/docker-compose.production.yml b/docker/docker-compose.production.yml index 37aeb19a..c47c73d0 100644 --- a/docker/docker-compose.production.yml +++ b/docker/docker-compose.production.yml @@ -62,15 +62,6 @@ services: DATABASE_USERNAME: postgres DATABASE_PASSWORD: password DATABASE_NAME: dawarich_production - # PostgreSQL database name for solid_queue - QUEUE_DATABASE_NAME: dawarich_production_queue - QUEUE_DATABASE_PASSWORD: password - QUEUE_DATABASE_USERNAME: postgres - QUEUE_DATABASE_HOST: dawarich_db - QUEUE_DATABASE_PORT: 5432 - # SQLite database paths for cache and cable databases - CACHE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_production_cache.sqlite3 - CABLE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_production_cable.sqlite3 MIN_MINUTES_SPENT_IN_CITY: 60 APPLICATION_HOSTS: localhost,::1,127.0.0.1 TIME_ZONE: Europe/London diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 220719fc..7b4d66a6 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -64,15 +64,6 @@ services: DATABASE_USERNAME: postgres DATABASE_PASSWORD: password DATABASE_NAME: dawarich_development - # PostgreSQL database name for solid_queue - QUEUE_DATABASE_NAME: dawarich_development_queue - QUEUE_DATABASE_PASSWORD: password - QUEUE_DATABASE_USERNAME: postgres - QUEUE_DATABASE_HOST: dawarich_db - QUEUE_DATABASE_PORT: 5432 - # SQLite database paths for cache and cable databases - CACHE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_development_cache.sqlite3 - CABLE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_development_cable.sqlite3 MIN_MINUTES_SPENT_IN_CITY: 60 APPLICATION_HOSTS: localhost TIME_ZONE: Europe/London From 3d26a4962758b2932f8d4686f96efb6b8b893b4a Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Mon, 9 Jun 2025 14:10:49 +0200 Subject: [PATCH 5/6] Fix redis urls --- .devcontainer/docker-compose.yml | 2 +- .env.development | 2 +- .env.test | 2 +- config/cable.yml | 4 ++-- config/initializers/geocoder.rb | 2 +- config/initializers/sidekiq.rb | 2 +- docker/docker-compose.production.yml | 6 +++--- docker/docker-compose.yml | 6 +++--- 8 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index b4068664..94ae2ead 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -20,7 +20,7 @@ services: tty: true environment: RAILS_ENV: development - REDIS_URL: redis://dawarich_redis:6379/0 + REDIS_URL: redis://dawarich_redis:6379 DATABASE_HOST: dawarich_db DATABASE_USERNAME: postgres DATABASE_PASSWORD: password diff --git a/.env.development b/.env.development index 8aeb3141..edab341c 100644 --- a/.env.development +++ b/.env.development @@ -3,4 +3,4 @@ DATABASE_USERNAME=postgres DATABASE_PASSWORD=password DATABASE_NAME=dawarich_development DATABASE_PORT=5432 -REDIS_URL=redis://localhost:6379/1 +REDIS_URL=redis://localhost:6379 diff --git a/.env.test b/.env.test index fea48769..f65e75de 100644 --- a/.env.test +++ b/.env.test @@ -3,4 +3,4 @@ DATABASE_USERNAME=postgres DATABASE_PASSWORD=password DATABASE_NAME=dawarich_test DATABASE_PORT=5432 -REDIS_URL=redis://localhost:6379/1 +REDIS_URL=redis://localhost:6379 diff --git a/config/cable.yml b/config/cable.yml index c3738c80..917fe123 100644 --- a/config/cable.yml +++ b/config/cable.yml @@ -1,11 +1,11 @@ development: adapter: redis - url: <%= ENV['REDIS_URL'] %> + url: <%= "#{ENV.fetch("REDIS_URL")}/2" %> test: adapter: test production: adapter: redis - url: <%= ENV.fetch("REDIS_URL") { "redis://localhost:6379/1" } %> + url: <%= "#{ENV.fetch("REDIS_URL")}/2" %> channel_prefix: dawarich_production diff --git a/config/initializers/geocoder.rb b/config/initializers/geocoder.rb index 9c115fd7..1c5e135c 100644 --- a/config/initializers/geocoder.rb +++ b/config/initializers/geocoder.rb @@ -4,7 +4,7 @@ settings = { debug_mode: true, timeout: 5, units: :km, - cache: Redis.new, + cache: Redis.new(url: ENV['REDIS_URL']), always_raise: :all, http_headers: { 'User-Agent' => "Dawarich #{APP_VERSION} (https://dawarich.app)" diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb index 6b262868..66154781 100644 --- a/config/initializers/sidekiq.rb +++ b/config/initializers/sidekiq.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true Sidekiq.configure_server do |config| - config.redis = { url: ENV['REDIS_URL'] } + config.redis = { url: "#{ENV['REDIS_URL']}/1" } config.logger = Sidekiq::Logger.new($stdout) if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true' diff --git a/docker/docker-compose.production.yml b/docker/docker-compose.production.yml index c47c73d0..52b2c72c 100644 --- a/docker/docker-compose.production.yml +++ b/docker/docker-compose.production.yml @@ -56,7 +56,7 @@ services: restart: on-failure environment: RAILS_ENV: production - REDIS_URL: redis://dawarich_redis:6379/0 + REDIS_URL: redis://dawarich_redis:6379 DATABASE_HOST: dawarich_db DATABASE_PORT: 5432 DATABASE_USERNAME: postgres @@ -111,7 +111,7 @@ services: restart: on-failure environment: RAILS_ENV: production - REDIS_URL: redis://dawarich_redis:6379/0 + REDIS_URL: redis://dawarich_redis:6379 DATABASE_HOST: dawarich_db DATABASE_PORT: 5432 DATABASE_USERNAME: postgres @@ -132,7 +132,7 @@ services: max-size: "100m" max-file: "5" healthcheck: - test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ] + test: [ "CMD-SHELL", "pgrep -f sidekiq" ] interval: 10s retries: 30 start_period: 30s diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 7b4d66a6..34df9468 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -59,7 +59,7 @@ services: restart: on-failure environment: RAILS_ENV: development - REDIS_URL: redis://dawarich_redis:6379/0 + REDIS_URL: redis://dawarich_redis:6379 DATABASE_HOST: dawarich_db DATABASE_USERNAME: postgres DATABASE_PASSWORD: password @@ -112,7 +112,7 @@ services: restart: on-failure environment: RAILS_ENV: development - REDIS_URL: redis://dawarich_redis:6379/0 + REDIS_URL: redis://dawarich_redis:6379 DATABASE_HOST: dawarich_db DATABASE_USERNAME: postgres DATABASE_PASSWORD: password @@ -131,7 +131,7 @@ services: max-size: "100m" max-file: "5" healthcheck: - test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ] + test: [ "CMD-SHELL", "pgrep -f sidekiq" ] interval: 10s retries: 30 start_period: 30s From c718eba6eff9988cea8084b9886cf54a8d0874be Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Mon, 9 Jun 2025 16:00:34 +0200 Subject: [PATCH 6/6] Add release notes --- .app_version | 2 +- .devcontainer/docker-compose.yml | 2 - CHANGELOG.md | 130 ++++++++++++++++++++++++++- config/environments/development.rb | 2 +- config/environments/production.rb | 2 +- config/initializers/geocoder.rb | 2 +- config/initializers/sidekiq.rb | 2 +- docker/docker-compose.production.yml | 2 - docker/docker-compose.yml | 3 +- 9 files changed, 135 insertions(+), 12 deletions(-) diff --git a/.app_version b/.app_version index 69bf493e..697f087f 100644 --- a/.app_version +++ b/.app_version @@ -1 +1 @@ -0.27.5 +0.28.0 diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 94ae2ead..f5752cc9 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -10,7 +10,6 @@ services: - dawarich_public:/var/app/public - dawarich_watched:/var/app/tmp/imports/watched - dawarich_storage:/var/app/storage - - dawarich_sqlite_data:/dawarich_sqlite_data networks: - dawarich ports: @@ -71,4 +70,3 @@ volumes: dawarich_public: dawarich_watched: dawarich_storage: - dawarich_sqlite_data: diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a4ba4fe..1f70c08b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,130 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -# 0.27.5 - 2025-06-08 +# 0.28.0 - 2025-06-09 + +⚠️ This release includes a breaking change. ⚠️ + +_yet another, yay!_ + +Well, we're moving back to Sidekiq and Redis for background jobs and caching. Unfortunately, SolidQueue and SolidCache brought more problems than they solved. Please update your `docker-compose.yml` to use Redis and Sidekiq. + +```diff +networks: + dawarich: +services: ++ dawarich_redis: ++ image: redis:7.4-alpine ++ container_name: dawarich_redis ++ command: redis-server ++ networks: ++ - dawarich ++ volumes: ++ - dawarich_shared:/data ++ restart: always ++ healthcheck: ++ test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ] ++ interval: 10s ++ retries: 5 ++ start_period: 30s ++ timeout: 10s +... + dawarich_app: + image: freikin/dawarich:latest + container_name: dawarich_app + volumes: + - dawarich_public:/var/app/public + - dawarich_watched:/var/app/tmp/imports/watched + - dawarich_storage:/var/app/storage + - dawarich_db_data:/dawarich_db_data +- - dawarich_sqlite_data:/dawarich_sqlite_data + ... + restart: on-failure + environment: + RAILS_ENV: development ++ REDIS_URL: redis://dawarich_redis:6379 + DATABASE_HOST: dawarich_db + DATABASE_USERNAME: postgres + DATABASE_PASSWORD: password + DATABASE_NAME: dawarich_development +- # PostgreSQL database name for solid_queue +- QUEUE_DATABASE_NAME: dawarich_development_queue +- QUEUE_DATABASE_PASSWORD: password +- QUEUE_DATABASE_USERNAME: postgres +- QUEUE_DATABASE_HOST: dawarich_db +- QUEUE_DATABASE_PORT: 5432 +- # SQLite database paths for cache and cable databases +- CACHE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_development_cache.sqlite3 +- CABLE_DATABASE_PATH: /dawarich_sqlite_data/dawarich_development_cable.sqlite3 +... + depends_on: + dawarich_db: + condition: service_healthy + restart: true ++ dawarich_redis: ++ condition: service_healthy ++ restart: true +... ++ dawarich_sidekiq: ++ image: freikin/dawarich:latest ++ container_name: dawarich_sidekiq ++ volumes: ++ - dawarich_public:/var/app/public ++ - dawarich_watched:/var/app/tmp/imports/watched ++ - dawarich_storage:/var/app/storage ++ networks: ++ - dawarich ++ stdin_open: true ++ tty: true ++ entrypoint: sidekiq-entrypoint.sh ++ command: ['sidekiq'] ++ restart: on-failure ++ environment: ++ RAILS_ENV: development ++ REDIS_URL: redis://dawarich_redis:6379 ++ DATABASE_HOST: dawarich_db ++ DATABASE_USERNAME: postgres ++ DATABASE_PASSWORD: password ++ DATABASE_NAME: dawarich_development ++ APPLICATION_HOSTS: localhost ++ BACKGROUND_PROCESSING_CONCURRENCY: 10 ++ APPLICATION_PROTOCOL: http ++ PROMETHEUS_EXPORTER_ENABLED: false ++ PROMETHEUS_EXPORTER_HOST: dawarich_app ++ PROMETHEUS_EXPORTER_PORT: 9394 ++ SELF_HOSTED: "true" ++ STORE_GEODATA: "true" ++ logging: ++ driver: "json-file" ++ options: ++ max-size: "100m" ++ max-file: "5" ++ healthcheck: ++ test: [ "CMD-SHELL", "pgrep -f sidekiq" ] ++ interval: 10s ++ retries: 30 ++ start_period: 30s ++ timeout: 10s ++ depends_on: ++ dawarich_db: ++ condition: service_healthy ++ restart: true ++ dawarich_redis: ++ condition: service_healthy ++ restart: true ++ dawarich_app: ++ condition: service_healthy ++ restart: true +... +volumes: + dawarich_db_data: +- dawarich_sqlite_data: + dawarich_shared: + dawarich_public: + dawarich_watched: + dawarich_storage: + +``` ## Fixed @@ -15,6 +138,11 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## Changed - Geocoder is now being installed from a private fork for debugging purposes. +- Redis is now being used for caching. +- Sidekiq is now being used for background jobs. + +## Removed +- SolidQueue, SolidCache and SolidCable are now removed. # 0.27.4 - 2025-06-06 diff --git a/config/environments/development.rb b/config/environments/development.rb index c177d97e..9d8c02c9 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -26,7 +26,7 @@ Rails.application.configure do # Enable/disable caching. By default caching is disabled. # Run rails dev:cache to toggle caching. - config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } + config.cache_store = :redis_cache_store, { url: "#{ENV['REDIS_URL']}/0" } if Rails.root.join('tmp/caching-dev.txt').exist? config.action_controller.perform_caching = true diff --git a/config/environments/production.rb b/config/environments/production.rb index 26913253..4d6d0330 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -73,7 +73,7 @@ Rails.application.configure do config.log_level = ENV.fetch('RAILS_LOG_LEVEL', 'info') # Use a different cache store in production. - config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } + config.cache_store = :redis_cache_store, { url: "#{ENV['REDIS_URL']}/0" } # Use a real queuing backend for Active Job (and separate queues per environment). config.active_job.queue_adapter = :sidekiq diff --git a/config/initializers/geocoder.rb b/config/initializers/geocoder.rb index 1c5e135c..4ddfe9d4 100644 --- a/config/initializers/geocoder.rb +++ b/config/initializers/geocoder.rb @@ -4,7 +4,7 @@ settings = { debug_mode: true, timeout: 5, units: :km, - cache: Redis.new(url: ENV['REDIS_URL']), + cache: Redis.new(url: "#{ENV['REDIS_URL']}/0"), always_raise: :all, http_headers: { 'User-Agent' => "Dawarich #{APP_VERSION} (https://dawarich.app)" diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb index 66154781..49b0c98b 100644 --- a/config/initializers/sidekiq.rb +++ b/config/initializers/sidekiq.rb @@ -24,7 +24,7 @@ Sidekiq.configure_server do |config| end Sidekiq.configure_client do |config| - config.redis = { url: ENV['REDIS_URL'] } + config.redis = { url: "#{ENV['REDIS_URL']}/1" } end Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && DawarichSettings.photon_uses_komoot_io? diff --git a/docker/docker-compose.production.yml b/docker/docker-compose.production.yml index 52b2c72c..5705da9b 100644 --- a/docker/docker-compose.production.yml +++ b/docker/docker-compose.production.yml @@ -43,7 +43,6 @@ services: - dawarich_watched:/var/app/tmp/imports/watched - dawarich_storage:/var/app/storage - dawarich_db_data:/dawarich_db_data - - dawarich_sqlite_data:/dawarich_sqlite_data networks: - dawarich ports: @@ -153,4 +152,3 @@ volumes: dawarich_public: dawarich_watched: dawarich_storage: - dawarich_sqlite_data: diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 34df9468..89611157 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -46,7 +46,6 @@ services: - dawarich_watched:/var/app/tmp/imports/watched - dawarich_storage:/var/app/storage - dawarich_db_data:/dawarich_db_data - - dawarich_sqlite_data:/dawarich_sqlite_data networks: - dawarich ports: @@ -146,9 +145,9 @@ services: dawarich_app: condition: service_healthy restart: true + volumes: dawarich_db_data: - dawarich_sqlite_data: dawarich_shared: dawarich_public: dawarich_watched: