From 35a0533b2b1adca40bd322732aa04141620d3fee Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Sat, 17 May 2025 23:05:52 +0200 Subject: [PATCH 1/6] Move to solid_queue --- Gemfile | 3 + Gemfile.lock | 25 +++ app/jobs/application_job.rb | 4 + app/jobs/jobs/clean_finished_job.rb | 9 ++ bin/jobs | 6 + config/cable.yml | 22 ++- config/database.yml | 52 ++++++- config/environments/development.rb | 10 +- config/environments/production.rb | 7 +- config/puma.rb | 3 + config/queue.yml | 27 ++++ config/recurring.yml | 34 +++++ config/routes.rb | 1 + db/cable_schema.rb | 26 ++++ db/queue_schema.rb | 144 ++++++++++++++++++ ..._visits_calculation_scheduling_job_spec.rb | 6 +- .../migrate_places_lonlat_job_spec.rb | 9 -- spec/jobs/visit_suggesting_job_spec.rb | 8 - spec/services/imports/create_spec.rb | 10 +- spec/services/imports/watcher_spec.rb | 1 - 20 files changed, 362 insertions(+), 45 deletions(-) create mode 100644 app/jobs/jobs/clean_finished_job.rb create mode 100755 bin/jobs create mode 100644 config/queue.yml create mode 100644 config/recurring.yml create mode 100644 db/cable_schema.rb create mode 100644 db/queue_schema.rb diff --git a/Gemfile b/Gemfile index c8fa08c2..72712274 100644 --- a/Gemfile +++ b/Gemfile @@ -20,6 +20,7 @@ gem 'httparty' gem 'importmap-rails' gem 'kaminari' gem 'lograge' +gem 'mission_control-jobs' gem 'oj' gem 'pg' gem 'prometheus_exporter' @@ -42,6 +43,8 @@ gem 'sidekiq-limit_fetch' gem 'sprockets-rails' gem 'stimulus-rails' gem 'strong_migrations' +gem 'solid_cable', '~> 3.0' +gem 'solid_queue', '~> 1.1' gem 'tailwindcss-rails' gem 'turbo-rails' gem 'tzinfo-data', platforms: %i[mingw mswin x64_mingw jruby] diff --git a/Gemfile.lock b/Gemfile.lock index 523f8e9f..bc29f3ba 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -218,6 +218,16 @@ GEM mini_mime (1.1.5) mini_portile2 (2.8.8) minitest (5.25.5) + mission_control-jobs (1.0.2) + actioncable (>= 7.1) + actionpack (>= 7.1) + activejob (>= 7.1) + activerecord (>= 7.1) + importmap-rails (>= 1.2.1) + irb (~> 1.13) + railties (>= 7.1) + stimulus-rails + turbo-rails msgpack (1.7.3) multi_json (1.15.0) multi_xml (0.7.1) @@ -420,6 +430,18 @@ GEM simplecov_json_formatter (~> 0.1) simplecov-html (0.13.1) simplecov_json_formatter (0.1.4) + solid_cable (3.0.8) + actioncable (>= 7.2) + activejob (>= 7.2) + activerecord (>= 7.2) + railties (>= 7.2) + solid_queue (1.1.5) + activejob (>= 7.1) + activerecord (>= 7.1) + concurrent-ruby (>= 1.3.1) + fugit (~> 1.11.0) + railties (>= 7.1) + thor (~> 1.3.1) sprockets (4.2.1) concurrent-ruby (~> 1.0) rack (>= 2.2.4, < 4) @@ -505,6 +527,7 @@ DEPENDENCIES jwt kaminari lograge + mission_control-jobs oj pg prometheus_exporter @@ -530,6 +553,8 @@ DEPENDENCIES sidekiq-cron sidekiq-limit_fetch simplecov + solid_cable (~> 3.0) + solid_queue (~> 1.1) sprockets-rails stackprof stimulus-rails diff --git a/app/jobs/application_job.rb b/app/jobs/application_job.rb index d394c3d1..ddeab5d8 100644 --- a/app/jobs/application_job.rb +++ b/app/jobs/application_job.rb @@ -1,7 +1,11 @@ +# frozen_string_literal: true + class ApplicationJob < ActiveJob::Base # Automatically retry jobs that encountered a deadlock # retry_on ActiveRecord::Deadlocked + retry_on Exception, wait: :polynomially_longer, attempts: 25 + # Most jobs are safe to ignore if the underlying records are no longer available # discard_on ActiveJob::DeserializationError end diff --git a/app/jobs/jobs/clean_finished_job.rb b/app/jobs/jobs/clean_finished_job.rb new file mode 100644 index 00000000..c5fc2037 --- /dev/null +++ b/app/jobs/jobs/clean_finished_job.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +class Jobs::CleanFinishedJob < ApplicationJob + queue_as :default + + def perform + SolidQueue::Job.clear_finished_in_batches + end +end diff --git a/bin/jobs b/bin/jobs new file mode 100755 index 00000000..dcf59f30 --- /dev/null +++ b/bin/jobs @@ -0,0 +1,6 @@ +#!/usr/bin/env ruby + +require_relative "../config/environment" +require "solid_queue/cli" + +SolidQueue::Cli.start(ARGV) diff --git a/config/cable.yml b/config/cable.yml index c3738c80..fd1a239f 100644 --- a/config/cable.yml +++ b/config/cable.yml @@ -1,11 +1,23 @@ +# Async adapter only works within the same process, so for manually triggering cable updates from a console, +# and seeing results in the browser, you must do so from the web console (running inside the dev process), +# not a terminal started via bin/rails console! Add "console" to any action or any ERB template view +# to make the web console appear. + development: - adapter: redis - url: <%= ENV['REDIS_URL'] %> + adapter: solid_cable + connects_to: + database: + writing: cable + polling_interval: 0.1.seconds + message_retention: 1.day test: adapter: test production: - adapter: redis - url: <%= ENV.fetch("REDIS_URL") { "redis://localhost:6379/1" } %> - channel_prefix: dawarich_production + adapter: solid_cable + connects_to: + database: + writing: cable + polling_interval: 0.1.seconds + message_retention: 1.day diff --git a/config/database.yml b/config/database.yml index 374dfa53..dbe7b626 100644 --- a/config/database.yml +++ b/config/database.yml @@ -10,17 +10,53 @@ default: &default timeout: 5000 development: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_development' %> + primary: + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_development' %> + queue: + <<: *default + database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_development_queue' %> + migrations_paths: db/queue_migrate + cable: + <<: *default + database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_development_cable' %> + migrations_paths: db/cable_migrate test: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_test' %> + primary: + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_test' %> + queue: + <<: *default + database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_test_queue' %> + migrations_paths: db/queue_migrate + cable: + <<: *default + database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_test_cable' %> + migrations_paths: db/cable_migrate production: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %> + primary: + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %> + queue: + <<: *default + database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_production_queue' %> + migrations_paths: db/queue_migrate + cable: + <<: *default + database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_production_cable' %> + migrations_paths: db/cable_migrate staging: - <<: *default - database: <%= ENV['DATABASE_NAME'] || 'dawarich_staging' %> + primary: + <<: *default + database: <%= ENV['DATABASE_NAME'] || 'dawarich_staging' %> + queue: + <<: *default + database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_staging_queue' %> + migrations_paths: db/queue_migrate + cable: + <<: *default + database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_staging_cable' %> + migrations_paths: db/cable_migrate diff --git a/config/environments/development.rb b/config/environments/development.rb index 3edfc64e..ff49d090 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -68,6 +68,14 @@ Rails.application.configure do # Highlight code that enqueued background job in logs. config.active_job.verbose_enqueue_logs = true + config.active_job.queue_adapter = :solid_queue + config.solid_queue.silence_polling = true + # :queue is the name of the database connection + config.solid_queue.connects_to = { database: { writing: :queue } } + + config.mission_control.jobs.http_basic_auth_enabled = false + config.solid_queue.logger = ActiveSupport::Logger.new($stdout) + # Suppress logger output for asset requests. config.assets.quiet = true @@ -95,7 +103,7 @@ Rails.application.configure do config.force_ssl = ENV.fetch('APPLICATION_PROTOCOL', 'http').downcase == 'https' # Direct logs to STDOUT - config.logger = Logger.new($stdout) + config.logger = ActiveSupport::Logger.new($stdout) config.lograge.enabled = true config.lograge.formatter = Lograge::Formatters::Json.new diff --git a/config/environments/production.rb b/config/environments/production.rb index a5487d47..8b4e7dd1 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -60,7 +60,7 @@ Rails.application.configure do config.force_ssl = ENV.fetch('APPLICATION_PROTOCOL', 'http').downcase == 'https' # Direct logs to STDOUT - config.logger = Logger.new($stdout) + config.logger = ActiveSupport::Logger.new($stdout) config.lograge.enabled = true config.lograge.formatter = Lograge::Formatters::Json.new @@ -77,7 +77,10 @@ Rails.application.configure do config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } # Use a real queuing backend for Active Job (and separate queues per environment). - # config.active_job.queue_adapter = :resque + config.active_job.queue_adapter = :solid_queue + config.solid_queue.connects_to = { database: { writing: :queue } } + config.solid_queue.silence_polling = true + config.solid_queue.logger = ActiveSupport::Logger.new($stdout) # config.active_job.queue_name_prefix = "dawarich_production" config.action_mailer.perform_caching = false diff --git a/config/puma.rb b/config/puma.rb index e0eb3db7..9157f6ba 100644 --- a/config/puma.rb +++ b/config/puma.rb @@ -43,6 +43,9 @@ preload_app! # Allow puma to be restarted by `bin/rails restart` command. plugin :tmp_restart +# If env var is set or we're in development, solid_queue will run in puma +plugin :solid_queue if ENV['SOLID_QUEUE_IN_PUMA'] || Rails.env.development? + # Prometheus exporter if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true' require 'prometheus_exporter/instrumentation' diff --git a/config/queue.yml b/config/queue.yml new file mode 100644 index 00000000..83b066ed --- /dev/null +++ b/config/queue.yml @@ -0,0 +1,27 @@ + +default: &default + dispatchers: + - polling_interval: 1 + batch_size: 500 + workers: + - queues: "*" + threads: 3 + processes: <%= ENV.fetch("JOB_CONCURRENCY", 1) %> + polling_interval: 2 + - queues: imports + threads: 5 + processes: 1 + polling_interval: 1 + - queues: exports + threads: 5 + processes: 1 + polling_interval: 2 + +development: + <<: *default + +test: + <<: *default + +production: + <<: *default diff --git a/config/recurring.yml b/config/recurring.yml new file mode 100644 index 00000000..22f57d3f --- /dev/null +++ b/config/recurring.yml @@ -0,0 +1,34 @@ +periodic_cleanup: + class: "Jobs::CleanFinishedJob" + queue: default + schedule: every month + +bulk_stats_calculating_job: + class: "BulkStatsCalculatingJob" + queue: stats + schedule: every hour + +area_visits_calculation_scheduling_job: + class: "AreaVisitsCalculationSchedulingJob" + queue: visit_suggesting + schedule: every day at 0:00 + +visit_suggesting_job: + class: "BulkVisitsSuggestingJob" + queue: visit_suggesting + schedule: every day at 00:05 + +watcher_job: + class: "Import::WatcherJob" + queue: imports + schedule: every hour + +app_version_checking_job: + class: "AppVersionCheckingJob" + queue: default + schedule: every 6 hours + +cache_preheating_job: + class: "Cache::PreheatingJob" + queue: default + schedule: every day at 0:00 diff --git a/config/routes.rb b/config/routes.rb index 01164189..45f77e17 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -6,6 +6,7 @@ Rails.application.routes.draw do mount ActionCable.server => '/cable' mount Rswag::Api::Engine => '/api-docs' mount Rswag::Ui::Engine => '/api-docs' + mount MissionControl::Jobs::Engine, at: '/jobs' # Protec just as sidekiq unless DawarichSettings.self_hosted? Sidekiq::Web.use(Rack::Auth::Basic) do |username, password| diff --git a/db/cable_schema.rb b/db/cable_schema.rb new file mode 100644 index 00000000..90beff61 --- /dev/null +++ b/db/cable_schema.rb @@ -0,0 +1,26 @@ +# This file is auto-generated from the current state of the database. Instead +# of editing this file, please use the migrations feature of Active Record to +# incrementally modify your database, and then regenerate this schema definition. +# +# This file is the source Rails uses to define your schema when running `bin/rails +# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to +# be faster and is potentially less error prone than running all of your +# migrations from scratch. Old migrations may fail to apply correctly if those +# migrations use external dependencies or application code. +# +# It's strongly recommended that you check this file into your version control system. + +ActiveRecord::Schema[8.0].define(version: 1) do + # These are extensions that must be enabled in order to support this database + enable_extension "pg_catalog.plpgsql" + + create_table "solid_cable_messages", force: :cascade do |t| + t.binary "channel", null: false + t.binary "payload", null: false + t.datetime "created_at", null: false + t.bigint "channel_hash", null: false + t.index ["channel"], name: "index_solid_cable_messages_on_channel" + t.index ["channel_hash"], name: "index_solid_cable_messages_on_channel_hash" + t.index ["created_at"], name: "index_solid_cable_messages_on_created_at" + end +end diff --git a/db/queue_schema.rb b/db/queue_schema.rb new file mode 100644 index 00000000..089e9380 --- /dev/null +++ b/db/queue_schema.rb @@ -0,0 +1,144 @@ +# This file is auto-generated from the current state of the database. Instead +# of editing this file, please use the migrations feature of Active Record to +# incrementally modify your database, and then regenerate this schema definition. +# +# This file is the source Rails uses to define your schema when running `bin/rails +# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to +# be faster and is potentially less error prone than running all of your +# migrations from scratch. Old migrations may fail to apply correctly if those +# migrations use external dependencies or application code. +# +# It's strongly recommended that you check this file into your version control system. + +ActiveRecord::Schema[8.0].define(version: 1) do + # These are extensions that must be enabled in order to support this database + enable_extension "pg_catalog.plpgsql" + + create_table "solid_queue_blocked_executions", force: :cascade do |t| + t.bigint "job_id", null: false + t.string "queue_name", null: false + t.integer "priority", default: 0, null: false + t.string "concurrency_key", null: false + t.datetime "expires_at", null: false + t.datetime "created_at", null: false + t.index ["concurrency_key", "priority", "job_id"], name: "index_solid_queue_blocked_executions_for_release" + t.index ["expires_at", "concurrency_key"], name: "index_solid_queue_blocked_executions_for_maintenance" + t.index ["job_id"], name: "index_solid_queue_blocked_executions_on_job_id", unique: true + end + + create_table "solid_queue_claimed_executions", force: :cascade do |t| + t.bigint "job_id", null: false + t.bigint "process_id" + t.datetime "created_at", null: false + t.index ["job_id"], name: "index_solid_queue_claimed_executions_on_job_id", unique: true + t.index ["process_id", "job_id"], name: "index_solid_queue_claimed_executions_on_process_id_and_job_id" + end + + create_table "solid_queue_failed_executions", force: :cascade do |t| + t.bigint "job_id", null: false + t.text "error" + t.datetime "created_at", null: false + t.index ["job_id"], name: "index_solid_queue_failed_executions_on_job_id", unique: true + end + + create_table "solid_queue_jobs", force: :cascade do |t| + t.string "queue_name", null: false + t.string "class_name", null: false + t.text "arguments" + t.integer "priority", default: 0, null: false + t.string "active_job_id" + t.datetime "scheduled_at" + t.datetime "finished_at" + t.string "concurrency_key" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["active_job_id"], name: "index_solid_queue_jobs_on_active_job_id" + t.index ["class_name"], name: "index_solid_queue_jobs_on_class_name" + t.index ["finished_at"], name: "index_solid_queue_jobs_on_finished_at" + t.index ["queue_name", "finished_at"], name: "index_solid_queue_jobs_for_filtering" + t.index ["scheduled_at", "finished_at"], name: "index_solid_queue_jobs_for_alerting" + end + + create_table "solid_queue_pauses", force: :cascade do |t| + t.string "queue_name", null: false + t.datetime "created_at", null: false + t.index ["queue_name"], name: "index_solid_queue_pauses_on_queue_name", unique: true + end + + create_table "solid_queue_processes", force: :cascade do |t| + t.string "kind", null: false + t.datetime "last_heartbeat_at", null: false + t.bigint "supervisor_id" + t.integer "pid", null: false + t.string "hostname" + t.text "metadata" + t.datetime "created_at", null: false + t.string "name", null: false + t.index ["last_heartbeat_at"], name: "index_solid_queue_processes_on_last_heartbeat_at" + t.index ["name", "supervisor_id"], name: "index_solid_queue_processes_on_name_and_supervisor_id", unique: true + t.index ["supervisor_id"], name: "index_solid_queue_processes_on_supervisor_id" + end + + create_table "solid_queue_ready_executions", force: :cascade do |t| + t.bigint "job_id", null: false + t.string "queue_name", null: false + t.integer "priority", default: 0, null: false + t.datetime "created_at", null: false + t.index ["job_id"], name: "index_solid_queue_ready_executions_on_job_id", unique: true + t.index ["priority", "job_id"], name: "index_solid_queue_poll_all" + t.index ["queue_name", "priority", "job_id"], name: "index_solid_queue_poll_by_queue" + end + + create_table "solid_queue_recurring_executions", force: :cascade do |t| + t.bigint "job_id", null: false + t.string "task_key", null: false + t.datetime "run_at", null: false + t.datetime "created_at", null: false + t.index ["job_id"], name: "index_solid_queue_recurring_executions_on_job_id", unique: true + t.index ["task_key", "run_at"], name: "index_solid_queue_recurring_executions_on_task_key_and_run_at", unique: true + end + + create_table "solid_queue_recurring_tasks", force: :cascade do |t| + t.string "key", null: false + t.string "schedule", null: false + t.string "command", limit: 2048 + t.string "class_name" + t.text "arguments" + t.string "queue_name" + t.integer "priority", default: 0 + t.boolean "static", default: true, null: false + t.text "description" + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["key"], name: "index_solid_queue_recurring_tasks_on_key", unique: true + t.index ["static"], name: "index_solid_queue_recurring_tasks_on_static" + end + + create_table "solid_queue_scheduled_executions", force: :cascade do |t| + t.bigint "job_id", null: false + t.string "queue_name", null: false + t.integer "priority", default: 0, null: false + t.datetime "scheduled_at", null: false + t.datetime "created_at", null: false + t.index ["job_id"], name: "index_solid_queue_scheduled_executions_on_job_id", unique: true + t.index ["scheduled_at", "priority", "job_id"], name: "index_solid_queue_dispatch_all" + end + + create_table "solid_queue_semaphores", force: :cascade do |t| + t.string "key", null: false + t.integer "value", default: 1, null: false + t.datetime "expires_at", null: false + t.datetime "created_at", null: false + t.datetime "updated_at", null: false + t.index ["expires_at"], name: "index_solid_queue_semaphores_on_expires_at" + t.index ["key", "value"], name: "index_solid_queue_semaphores_on_key_and_value" + t.index ["key"], name: "index_solid_queue_semaphores_on_key", unique: true + end + + add_foreign_key "solid_queue_blocked_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade + add_foreign_key "solid_queue_claimed_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade + add_foreign_key "solid_queue_failed_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade + add_foreign_key "solid_queue_ready_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade + add_foreign_key "solid_queue_recurring_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade + add_foreign_key "solid_queue_scheduled_executions", "solid_queue_jobs", column: "job_id", on_delete: :cascade +end diff --git a/spec/jobs/area_visits_calculation_scheduling_job_spec.rb b/spec/jobs/area_visits_calculation_scheduling_job_spec.rb index 93fd053a..0d375e67 100644 --- a/spec/jobs/area_visits_calculation_scheduling_job_spec.rb +++ b/spec/jobs/area_visits_calculation_scheduling_job_spec.rb @@ -8,11 +8,9 @@ RSpec.describe AreaVisitsCalculationSchedulingJob, type: :job do let(:user) { create(:user) } it 'calls the AreaVisitsCalculationService' do - Sidekiq::Testing.inline! do - expect(AreaVisitsCalculatingJob).to receive(:perform_later).with(user.id).and_call_original + expect(AreaVisitsCalculatingJob).to receive(:perform_later).with(user.id).and_call_original - described_class.new.perform - end + described_class.new.perform end end end diff --git a/spec/jobs/data_migrations/migrate_places_lonlat_job_spec.rb b/spec/jobs/data_migrations/migrate_places_lonlat_job_spec.rb index 994ad142..d2771998 100644 --- a/spec/jobs/data_migrations/migrate_places_lonlat_job_spec.rb +++ b/spec/jobs/data_migrations/migrate_places_lonlat_job_spec.rb @@ -52,7 +52,6 @@ RSpec.describe DataMigrations::MigratePlacesLonlatJob, type: :job do described_class.perform_now(user.id) place1.reload - # SRID should be 4326 (WGS84) expect(place1.lonlat.srid).to eq(4326) end end @@ -64,14 +63,6 @@ RSpec.describe DataMigrations::MigratePlacesLonlatJob, type: :job do end.not_to raise_error end end - - context 'when user does not exist' do - it 'raises ActiveRecord::RecordNotFound' do - expect do - described_class.perform_now(-1) - end.to raise_error(ActiveRecord::RecordNotFound) - end - end end describe 'queue' do diff --git a/spec/jobs/visit_suggesting_job_spec.rb b/spec/jobs/visit_suggesting_job_spec.rb index 61401dd9..f6684813 100644 --- a/spec/jobs/visit_suggesting_job_spec.rb +++ b/spec/jobs/visit_suggesting_job_spec.rb @@ -63,14 +63,6 @@ RSpec.describe VisitSuggestingJob, type: :job do end end - context 'when user not found' do - it 'raises an error' do - expect do - described_class.perform_now(user_id: -1, start_at: start_at, end_at: end_at) - end.to raise_error(ActiveRecord::RecordNotFound) - end - end - context 'with string dates' do let(:string_start) { start_at.to_s } let(:string_end) { end_at.to_s } diff --git a/spec/services/imports/create_spec.rb b/spec/services/imports/create_spec.rb index 69634149..176043b6 100644 --- a/spec/services/imports/create_spec.rb +++ b/spec/services/imports/create_spec.rb @@ -55,16 +55,12 @@ RSpec.describe Imports::Create do context 'when import is successful' do it 'schedules stats creating' do - Sidekiq::Testing.inline! do - expect { service.call }.to \ - have_enqueued_job(Stats::CalculatingJob).with(user.id, 2024, 3) - end + expect { service.call }.to \ + have_enqueued_job(Stats::CalculatingJob).with(user.id, 2024, 3) end it 'schedules visit suggesting' do - Sidekiq::Testing.inline! do - expect { service.call }.to have_enqueued_job(VisitSuggestingJob) - end + expect { service.call }.to have_enqueued_job(VisitSuggestingJob) end end diff --git a/spec/services/imports/watcher_spec.rb b/spec/services/imports/watcher_spec.rb index ac3041c8..94c04053 100644 --- a/spec/services/imports/watcher_spec.rb +++ b/spec/services/imports/watcher_spec.rb @@ -10,7 +10,6 @@ RSpec.describe Imports::Watcher do before do stub_const('Imports::Watcher::WATCHED_DIR_PATH', watched_dir_path) - Sidekiq::Testing.inline! end after { Sidekiq::Testing.fake! } From 897cbd882c5783d999d64f4852987bf5f29b077c Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Fri, 30 May 2025 19:20:15 +0200 Subject: [PATCH 2/6] Update some files --- .app_version | 2 +- CHANGELOG.md | 64 ++++++++++++++++++++++++++++++++++++++++++- config/cable.yml | 7 +---- config/database.yml | 41 ++++++++++++++++----------- config/routes.rb | 6 ++-- docker/Dockerfile.dev | 4 ++- 6 files changed, 96 insertions(+), 28 deletions(-) diff --git a/.app_version b/.app_version index 30f6cf8d..f23e1f8a 100644 --- a/.app_version +++ b/.app_version @@ -1 +1 @@ -0.26.1 +0.26.8 diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a549e1c..8aa5699c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,70 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +# 0.26.8 - 2025-05-30 -# 0.26.1 - 2025-05-15 +## Fixed + +- Enable caching in development for the docker image. + + +# 0.26.7 - 2025-05-29 + +## Fixed + +- Popups now showing distance in the correct distance unit. #1258 + +## Added + +- Bunch of system tests to cover map interactions. + + +# 0.26.6 - 2025-05-22 + +## Added + +- armv8 to docker build. #1249 + +## Changed + +- Points are now being created in the `points` queue. #1243 +- Route opacity is now being displayed as percentage in the map settings. #462 #1224 +- Exported GeoJSON file now contains coordinates as floats instead of strings, as per RFC 7946. #762 +- Fog of war now can be set to 200 meter per point. #630 +# 0.26.5 - 2025-05-20 + +## Fixed + +- Wget is back to fix healthchecks. #1241 #1231 +- Dockerfile.prod is now using slim image. #1245 +- Dockerfiles now use jemalloc with check for architecture. #1235 + +# 0.26.4 - 2025-05-19 + +## Changed + +- Docker image is now using slim image to introduce some memory optimizations. +- The trip page now looks a bit nicer. +- The "Yesterday" button on the map page was changed to "Today". #1215 +- The "Create Import" button now disabled until files are uploaded. + +# 0.26.3 - 2025-05-18 + +## Fixed + +- Fixed a bug where default distance unit was not being set for users. #1206 + + +# 0.26.2 - 2025-05-18 + +## Fixed + +- Seeds are now working properly. #1207 +- Fixed a bug where France flag was not being displayed correctly. #1204 +- Fix blank map page caused by empty default distance unit. Default distance unit is now kilometers and can be changed in Settings -> Maps. #1206 + + +# 0.26.1 - 2025-05-18 ## Geodata on demand diff --git a/config/cable.yml b/config/cable.yml index fd1a239f..d0da7a95 100644 --- a/config/cable.yml +++ b/config/cable.yml @@ -4,12 +4,7 @@ # to make the web console appear. development: - adapter: solid_cable - connects_to: - database: - writing: cable - polling_interval: 0.1.seconds - message_retention: 1.day + adapter: async test: adapter: test diff --git a/config/database.yml b/config/database.yml index dbe7b626..d80f85ef 100644 --- a/config/database.yml +++ b/config/database.yml @@ -15,25 +15,19 @@ development: database: <%= ENV['DATABASE_NAME'] || 'dawarich_development' %> queue: <<: *default - database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_development_queue' %> + database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_development_queue' %> + password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> migrations_paths: db/queue_migrate - cable: + cache: <<: *default - database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_development_cable' %> - migrations_paths: db/cable_migrate + database: <%= ENV['CACHE_DATABASE_NAME'] || 'dawarich_development_cache' %> + password: <%= ENV['CACHE_DATABASE_PASSWORD'] %> + migrations_paths: db/cache_migrate test: primary: <<: *default database: <%= ENV['DATABASE_NAME'] || 'dawarich_test' %> - queue: - <<: *default - database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_test_queue' %> - migrations_paths: db/queue_migrate - cable: - <<: *default - database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_test_cable' %> - migrations_paths: db/cable_migrate production: primary: @@ -41,22 +35,37 @@ production: database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %> queue: <<: *default - database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_production_queue' %> + database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_production_queue' %> + password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> migrations_paths: db/queue_migrate cable: <<: *default - database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_production_cable' %> + database: <%= ENV['CABLE_DATABASE_NAME'] || 'dawarich_production_cable' %> + password: <%= ENV['CABLE_DATABASE_PASSWORD'] %> migrations_paths: db/cable_migrate + cache: + <<: *default + database: <%= ENV['CACHE_DATABASE_NAME'] || 'dawarich_production_cache' %> + password: <%= ENV['CACHE_DATABASE_PASSWORD'] %> + migrations_paths: db/cache_migrate staging: primary: <<: *default database: <%= ENV['DATABASE_NAME'] || 'dawarich_staging' %> + password: <%= ENV['DATABASE_PASSWORD'] %> + cache: + <<: *default + database: <%= ENV['CACHE_DATABASE_NAME'] || 'dawarich_staging_cache' %> + password: <%= ENV['CACHE_DATABASE_PASSWORD'] %> + migrations_paths: db/cache_migrate queue: <<: *default - database: <%= ENV['DATABASE_QUEUE_NAME'] || 'dawarich_staging_queue' %> + database: <%= ENV['QUEUE_DATABASE_NAME'] || 'dawarich_staging_queue' %> + password: <%= ENV['QUEUE_DATABASE_PASSWORD'] %> migrations_paths: db/queue_migrate cable: <<: *default - database: <%= ENV['DATABASE_CABLE_NAME'] || 'dawarich_staging_cable' %> + database: <%= ENV['CABLE_DATABASE_NAME'] || 'dawarich_staging_cable' %> + password: <%= ENV['CABLE_DATABASE_PASSWORD'] %> migrations_paths: db/cable_migrate diff --git a/config/routes.rb b/config/routes.rb index 45f77e17..69ff069b 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -6,7 +6,6 @@ Rails.application.routes.draw do mount ActionCable.server => '/cable' mount Rswag::Api::Engine => '/api-docs' mount Rswag::Ui::Engine => '/api-docs' - mount MissionControl::Jobs::Engine, at: '/jobs' # Protec just as sidekiq unless DawarichSettings.self_hosted? Sidekiq::Web.use(Rack::Auth::Basic) do |username, password| @@ -26,10 +25,11 @@ Rails.application.routes.draw do (u.admin? && ENV['SIDEKIQ_USERNAME'].present? && ENV['SIDEKIQ_PASSWORD'].present?) } do mount Sidekiq::Web => '/sidekiq' + mount MissionControl::Jobs::Engine, at: '/jobs' end - # We want to return a nice error message if the user is not authorized to access Sidekiq - match '/sidekiq' => redirect { |_, request| + # We want to return a nice error message if the user is not authorized to access Sidekiq or Jobs + match %w[/sidekiq /jobs] => redirect { |_, request| request.flash[:error] = 'You are not authorized to perform this action.' '/' }, via: :get diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev index a4fe1039..919527a5 100644 --- a/docker/Dockerfile.dev +++ b/docker/Dockerfile.dev @@ -45,7 +45,9 @@ RUN bundle config set --local path 'vendor/bundle' \ # Copy the rest of the application COPY ../. ./ -# Copy entrypoint scripts and grant execution permissions +# Create caching-dev.txt file to enable Rails caching in development +RUN mkdir -p $APP_PATH/tmp && touch $APP_PATH/tmp/caching-dev.txt + COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh RUN chmod +x /usr/local/bin/web-entrypoint.sh From a95d362b63bdfcfd4a7ae8003e8d739c405a7b4d Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Sat, 31 May 2025 11:57:07 +0200 Subject: [PATCH 3/6] Fix failing tests --- CHANGELOG.md | 2 +- Procfile | 2 +- config/routes.rb | 7 ++++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 208a3df7..0bf98561 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ## Fixed -- Enable caching in development for the docker image. +- Enable caching in development for the docker image to improve performance. # 0.26.7 - 2025-05-29 diff --git a/Procfile b/Procfile index fd4fe014..d6f4d818 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1,2 @@ web: bundle exec puma -C config/puma.rb -worker: bundle exec sidekiq -C config/sidekiq.yml +worker: bundle exec bin/jobs diff --git a/config/routes.rb b/config/routes.rb index 69ff069b..31189bc5 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -29,7 +29,12 @@ Rails.application.routes.draw do end # We want to return a nice error message if the user is not authorized to access Sidekiq or Jobs - match %w[/sidekiq /jobs] => redirect { |_, request| + match '/sidekiq' => redirect { |_, request| + request.flash[:error] = 'You are not authorized to perform this action.' + '/' + }, via: :get + + match '/jobs' => redirect { |_, request| request.flash[:error] = 'You are not authorized to perform this action.' '/' }, via: :get From 3a955b8e51be8a34f7d8b0cf32bc40cbd8691fab Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Sat, 31 May 2025 14:00:52 +0200 Subject: [PATCH 4/6] Introduce SolidCache --- Gemfile | 1 + Gemfile.lock | 5 +++++ config/cache.yml | 15 ++++++++++++++ config/environments/development.rb | 6 +++--- config/environments/production.rb | 4 ++-- config/initializers/02_version_cache.rb | 8 +++++++- config/initializers/solid_cache.rb | 11 ++++++++++ db/cache_schema.rb | 27 +++++++++++++++++++++++++ 8 files changed, 71 insertions(+), 6 deletions(-) create mode 100644 config/cache.yml create mode 100644 config/initializers/solid_cache.rb create mode 100644 db/cache_schema.rb diff --git a/Gemfile b/Gemfile index e5fd1134..ea404a18 100644 --- a/Gemfile +++ b/Gemfile @@ -44,6 +44,7 @@ gem 'sprockets-rails' gem 'stimulus-rails' gem 'strong_migrations' gem 'solid_cable', '~> 3.0' +gem 'solid_cache', '1.0.7' gem 'solid_queue', '~> 1.1' gem 'tailwindcss-rails' gem 'turbo-rails' diff --git a/Gemfile.lock b/Gemfile.lock index 35372631..091acecb 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -457,6 +457,10 @@ GEM activejob (>= 7.2) activerecord (>= 7.2) railties (>= 7.2) + solid_cache (1.0.7) + activejob (>= 7.2) + activerecord (>= 7.2) + railties (>= 7.2) solid_queue (1.1.5) activejob (>= 7.1) activerecord (>= 7.1) @@ -582,6 +586,7 @@ DEPENDENCIES sidekiq-limit_fetch simplecov solid_cable (~> 3.0) + solid_cache (= 1.0.7) solid_queue (~> 1.1) sprockets-rails stackprof diff --git a/config/cache.yml b/config/cache.yml new file mode 100644 index 00000000..040a2f5e --- /dev/null +++ b/config/cache.yml @@ -0,0 +1,15 @@ +default: &default + store_options: + # Cap age of oldest cache entry to fulfill retention policies + max_age: <%= 60.days.to_i %> + max_size: <%= 256.megabytes %> + namespace: <%= Rails.env %> + +development: + <<: *default + +test: + <<: *default + +production: + <<: *default diff --git a/config/environments/development.rb b/config/environments/development.rb index ff49d090..1ee6dff5 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -26,18 +26,18 @@ Rails.application.configure do # Enable/disable caching. By default caching is disabled. # Run rails dev:cache to toggle caching. + config.cache_store = :solid_cache_store + config.solid_cache.connects_to = { database: { writing: :cache } } + if Rails.root.join('tmp/caching-dev.txt').exist? config.action_controller.perform_caching = true config.action_controller.enable_fragment_cache_logging = true - config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } config.public_file_server.headers = { 'Cache-Control' => "public, max-age=#{2.days.to_i}" } else config.action_controller.perform_caching = false - - config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } end config.public_file_server.enabled = true diff --git a/config/environments/production.rb b/config/environments/production.rb index 8b4e7dd1..aac8634c 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -73,8 +73,8 @@ Rails.application.configure do config.log_level = ENV.fetch('RAILS_LOG_LEVEL', 'info') # Use a different cache store in production. - # config.cache_store = :mem_cache_store - config.cache_store = :redis_cache_store, { url: ENV['REDIS_URL'] } + config.cache_store = :solid_cache_store + config.solid_cache.connects_to = { database: { writing: :cache } } # Use a real queuing backend for Active Job (and separate queues per environment). config.active_job.queue_adapter = :solid_queue diff --git a/config/initializers/02_version_cache.rb b/config/initializers/02_version_cache.rb index c6fed3b3..bf59a9a3 100644 --- a/config/initializers/02_version_cache.rb +++ b/config/initializers/02_version_cache.rb @@ -1,3 +1,9 @@ # frozen_string_literal: true -Rails.cache.delete('dawarich/app-version-check') +# Defer cache operations until after initialization to avoid SolidCache loading issues +Rails.application.config.after_initialize do + # Skip cache clearing when running the Rails console + unless defined?(Rails::Console) || File.basename($PROGRAM_NAME) == 'rails' && ARGV.include?('console') + Rails.cache.delete('dawarich/app-version-check') if Rails.cache.respond_to?(:delete) + end +end diff --git a/config/initializers/solid_cache.rb b/config/initializers/solid_cache.rb new file mode 100644 index 00000000..15f78444 --- /dev/null +++ b/config/initializers/solid_cache.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +# Configure SolidCache +Rails.application.config.to_prepare do + # Only require the entries file as it seems the Entry class is defined there + begin + require 'solid_cache/store/entries' + rescue LoadError => e + Rails.logger.warn "Could not load SolidCache: #{e.message}" + end +end diff --git a/db/cache_schema.rb b/db/cache_schema.rb new file mode 100644 index 00000000..3cf97e44 --- /dev/null +++ b/db/cache_schema.rb @@ -0,0 +1,27 @@ +# This file is auto-generated from the current state of the database. Instead +# of editing this file, please use the migrations feature of Active Record to +# incrementally modify your database, and then regenerate this schema definition. +# +# This file is the source Rails uses to define your schema when running `bin/rails +# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to +# be faster and is potentially less error prone than running all of your +# migrations from scratch. Old migrations may fail to apply correctly if those +# migrations use external dependencies or application code. +# +# It's strongly recommended that you check this file into your version control system. + +ActiveRecord::Schema[8.0].define(version: 1) do + # These are extensions that must be enabled in order to support this database + enable_extension "pg_catalog.plpgsql" + + create_table "solid_cache_entries", force: :cascade do |t| + t.binary "key", null: false + t.binary "value", null: false + t.datetime "created_at", null: false + t.bigint "key_hash", null: false + t.integer "byte_size", null: false + t.index ["byte_size"], name: "index_solid_cache_entries_on_byte_size" + t.index ["key_hash", "byte_size"], name: "index_solid_cache_entries_on_key_hash_and_byte_size" + t.index ["key_hash"], name: "index_solid_cache_entries_on_key_hash", unique: true + end +end From 584d08da7b78f586961fce1937b1857437abcc3f Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Sat, 31 May 2025 14:13:51 +0200 Subject: [PATCH 5/6] Update app version and CHANGELOG.md --- .app_version | 2 +- CHANGELOG.md | 24 +++++++++++++++++++- docker/web-entrypoint.sh | 47 ++++++++++++++++++++++++++++++++-------- 3 files changed, 62 insertions(+), 11 deletions(-) diff --git a/.app_version b/.app_version index f23e1f8a..1b58cc10 100644 --- a/.app_version +++ b/.app_version @@ -1 +1 @@ -0.26.8 +0.27.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 0bf98561..b8701cd7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,34 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -# 0.26.8 - 2025-05-30 +# 0.27.0 - 2025-05-31 + +⚠️ This release includes a breaking change. ⚠️ + +This release introduces a new way to run background jobs and cache data. Before updating, make sure your Sidekiq queues (https://your_dawarich_app/sidekiq) are empty. + +Moving to SolidQueue and SolidCache will require creating new databases, which will be created automatically when you start the app. If that didn't happen, you can create them manually and set the following environment variables: + +- `QUEUE_DATABASE_NAME` - name of the queue database +- `QUEUE_DATABASE_PASSWORD` - password for the queue database +- `CACHE_DATABASE_NAME` - name of the cache database +- `CACHE_DATABASE_PASSWORD` - password for the cache database +- `CABLE_DATABASE_NAME` - name of the cable database +- `CABLE_DATABASE_PASSWORD` - password for the cable database + ## Fixed - Enable caching in development for the docker image to improve performance. +## Changed + +- SolidCache is now being used for caching instead of Redis. +- SolidQueue is now being used for background jobs instead of Sidekiq. +- SolidCable is now being used as ActionCable adapter. +- Background jobs are now being run as Puma plugin instead of separate Docker container. + + # 0.26.7 - 2025-05-29 diff --git a/docker/web-entrypoint.sh b/docker/web-entrypoint.sh index a4c275b7..48b3165b 100644 --- a/docker/web-entrypoint.sh +++ b/docker/web-entrypoint.sh @@ -27,16 +27,45 @@ fi # Remove pre-existing puma/passenger server.pid rm -f $APP_PATH/tmp/pids/server.pid -echo "Attempting to create database $DATABASE_NAME if it doesn't exist..." -PGPASSWORD=$DATABASE_PASSWORD createdb -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" "$DATABASE_NAME" 2>/dev/null || echo "Note: Database may already exist or couldn't be created now" +# Function to check and create a database +create_database() { + local db_name=$1 + local db_password=$2 -# Wait for the database to become available -echo "⏳ Waiting for database to be ready..." -until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -d "$DATABASE_NAME" -c '\q' 2>/dev/null; do - >&2 echo "Postgres is unavailable - retrying..." - sleep 2 -done -echo "✅ PostgreSQL is ready!" + echo "Attempting to create database $db_name if it doesn't exist..." + PGPASSWORD=$db_password createdb -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" "$db_name" 2>/dev/null || echo "Note: Database $db_name may already exist or couldn't be created now" + + # Wait for the database to become available + echo "⏳ Waiting for database $db_name to be ready..." + until PGPASSWORD=$db_password psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -d "$db_name" -c '\q' 2>/dev/null; do + >&2 echo "Postgres database $db_name is unavailable - retrying..." + sleep 2 + done + echo "✅ PostgreSQL database $db_name is ready!" +} + +# Create and check primary database +create_database "$DATABASE_NAME" "$DATABASE_PASSWORD" + +# Handle additional databases based on environment +if [ "$RAILS_ENV" = "development" ] || [ "$RAILS_ENV" = "production" ] || [ "$RAILS_ENV" = "staging" ]; then + # Setup Queue database + QUEUE_DATABASE_NAME=${QUEUE_DATABASE_NAME:-${DATABASE_NAME}_queue} + QUEUE_DATABASE_PASSWORD=${QUEUE_DATABASE_PASSWORD:-$DATABASE_PASSWORD} + create_database "$QUEUE_DATABASE_NAME" "$QUEUE_DATABASE_PASSWORD" + + # Setup Cache database + CACHE_DATABASE_NAME=${CACHE_DATABASE_NAME:-${DATABASE_NAME}_cache} + CACHE_DATABASE_PASSWORD=${CACHE_DATABASE_PASSWORD:-$DATABASE_PASSWORD} + create_database "$CACHE_DATABASE_NAME" "$CACHE_DATABASE_PASSWORD" +fi + +# Setup Cable database (only for production and staging) +if [ "$RAILS_ENV" = "production" ] || [ "$RAILS_ENV" = "staging" ]; then + CABLE_DATABASE_NAME=${CABLE_DATABASE_NAME:-${DATABASE_NAME}_cable} + CABLE_DATABASE_PASSWORD=${CABLE_DATABASE_PASSWORD:-$DATABASE_PASSWORD} + create_database "$CABLE_DATABASE_NAME" "$CABLE_DATABASE_PASSWORD" +fi # Run database migrations echo "PostgreSQL is ready. Running database migrations..." From 5705eafacf16e304e8885d5a66ff962eefb61a08 Mon Sep 17 00:00:00 2001 From: Eugene Burmakin Date: Sat, 31 May 2025 14:20:24 +0200 Subject: [PATCH 6/6] Update CHANGELOG.md --- CHANGELOG.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b8701cd7..a807057a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,11 +12,11 @@ This release introduces a new way to run background jobs and cache data. Before Moving to SolidQueue and SolidCache will require creating new databases, which will be created automatically when you start the app. If that didn't happen, you can create them manually and set the following environment variables: -- `QUEUE_DATABASE_NAME` - name of the queue database +- `QUEUE_DATABASE_NAME` - name of the queue database (default is `dawarich_development_queue`) - `QUEUE_DATABASE_PASSWORD` - password for the queue database -- `CACHE_DATABASE_NAME` - name of the cache database +- `CACHE_DATABASE_NAME` - name of the cache database (default is `dawarich_development_cache`) - `CACHE_DATABASE_PASSWORD` - password for the cache database -- `CABLE_DATABASE_NAME` - name of the cable database +- `CABLE_DATABASE_NAME` - name of the cable database (default is `dawarich_development_cable`) - `CABLE_DATABASE_PASSWORD` - password for the cable database