mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Remove Redis and Sidekiq from Dawarich
This commit is contained in:
parent
296e2c08fa
commit
6d39f4306f
43 changed files with 45 additions and 616 deletions
|
|
@ -1 +1 @@
|
|||
0.27.1
|
||||
0.27.2
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ jobs:
|
|||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: test_database
|
||||
POSTGRES_PASSWORD: mysecretpassword
|
||||
- image: redis:7.0
|
||||
- image: selenium/standalone-chrome:latest
|
||||
name: chrome
|
||||
environment:
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ services:
|
|||
tty: true
|
||||
environment:
|
||||
RAILS_ENV: development
|
||||
REDIS_URL: redis://dawarich_redis:6379/0
|
||||
DATABASE_HOST: dawarich_db
|
||||
DATABASE_USERNAME: postgres
|
||||
DATABASE_PASSWORD: password
|
||||
|
|
@ -31,21 +30,6 @@ services:
|
|||
PROMETHEUS_EXPORTER_ENABLED: false
|
||||
PROMETHEUS_EXPORTER_HOST: 0.0.0.0
|
||||
PROMETHEUS_EXPORTER_PORT: 9394
|
||||
dawarich_redis:
|
||||
image: redis:7.4-alpine
|
||||
container_name: dawarich_redis
|
||||
command: redis-server
|
||||
networks:
|
||||
- dawarich
|
||||
volumes:
|
||||
- dawarich_shared:/data
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
|
||||
interval: 10s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
dawarich_db:
|
||||
image: postgis/postgis:17-3.5-alpine
|
||||
container_name: dawarich_db
|
||||
|
|
|
|||
|
|
@ -3,4 +3,3 @@ DATABASE_USERNAME=postgres
|
|||
DATABASE_PASSWORD=password
|
||||
DATABASE_NAME=dawarich_development
|
||||
DATABASE_PORT=5432
|
||||
REDIS_URL=redis://localhost:6379/1
|
||||
|
|
|
|||
|
|
@ -3,4 +3,3 @@ DATABASE_USERNAME=postgres
|
|||
DATABASE_PASSWORD=password
|
||||
DATABASE_NAME=dawarich_test
|
||||
DATABASE_PORT=5432
|
||||
REDIS_URL=redis://localhost:6379/1
|
||||
|
|
|
|||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
|
@ -30,7 +30,7 @@ A clear and concise description of what you expected to happen.
|
|||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Logs**
|
||||
If applicable, add logs from containers `dawarich_app` and `dawarich_sidekiq` to help explain your problem.
|
||||
If applicable, add logs from the `dawarich_app` container to help explain your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
|
|
|||
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
|
|
@ -20,11 +20,6 @@ jobs:
|
|||
- 5432:5432
|
||||
options: --health-cmd="pg_isready" --health-interval=10s --health-timeout=5s --health-retries=3
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Install packages
|
||||
run: sudo apt-get update && sudo apt-get install --no-install-recommends -y google-chrome-stable curl libjemalloc2 libvips postgresql-client libpq-dev
|
||||
|
|
@ -58,14 +53,12 @@ jobs:
|
|||
env:
|
||||
RAILS_ENV: test
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432
|
||||
REDIS_URL: redis://localhost:6379/1
|
||||
run: bin/rails db:setup
|
||||
|
||||
- name: Run main tests (excluding system tests)
|
||||
env:
|
||||
RAILS_ENV: test
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432
|
||||
REDIS_URL: redis://localhost:6379/1
|
||||
run: |
|
||||
bundle exec rspec --exclude-pattern "spec/system/**/*_spec.rb" || (cat log/test.log && exit 1)
|
||||
|
||||
|
|
@ -73,7 +66,6 @@ jobs:
|
|||
env:
|
||||
RAILS_ENV: test
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432
|
||||
REDIS_URL: redis://localhost:6379/1
|
||||
run: |
|
||||
bundle exec rspec spec/system/ || (cat log/test.log && exit 1)
|
||||
|
||||
|
|
|
|||
10
CHANGELOG.md
10
CHANGELOG.md
|
|
@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# 0.27.2 - 2025-06-02
|
||||
|
||||
You can now safely remove Redis and Sidekiq from your `docker-compose.yml` file, both containers, related volumes, environment variables and container dependencies.
|
||||
|
||||
## Removed
|
||||
|
||||
- Redis and Sidekiq.
|
||||
|
||||
|
||||
|
||||
# 0.27.1 - 2025-06-01
|
||||
|
||||
## Fixed
|
||||
|
|
|
|||
|
|
@ -7,12 +7,6 @@ Now you can create/prepare the Database (this need to be done once):
|
|||
bundle exec rails db:prepare
|
||||
```
|
||||
|
||||
Afterwards you can run sidekiq:
|
||||
```bash
|
||||
bundle exec sidekiq
|
||||
|
||||
```
|
||||
|
||||
And in a second terminal the dawarich-app:
|
||||
```bash
|
||||
bundle exec bin/dev
|
||||
|
|
|
|||
7
Gemfile
7
Gemfile
|
|
@ -38,9 +38,6 @@ gem 'sentry-ruby'
|
|||
gem 'sentry-rails'
|
||||
gem 'sqlite3', '~> 2.6'
|
||||
gem 'stackprof'
|
||||
gem 'sidekiq'
|
||||
gem 'sidekiq-cron'
|
||||
gem 'sidekiq-limit_fetch'
|
||||
gem 'sprockets-rails'
|
||||
gem 'stimulus-rails'
|
||||
gem 'strong_migrations'
|
||||
|
|
@ -67,7 +64,6 @@ end
|
|||
|
||||
group :test do
|
||||
gem 'capybara'
|
||||
gem 'fakeredis'
|
||||
gem 'selenium-webdriver'
|
||||
gem 'shoulda-matchers'
|
||||
gem 'simplecov', require: false
|
||||
|
|
@ -80,6 +76,3 @@ group :development do
|
|||
gem 'foreman'
|
||||
gem 'rubocop-rails', require: false
|
||||
end
|
||||
|
||||
# Use Redis for Action Cable
|
||||
gem 'redis'
|
||||
|
|
|
|||
27
Gemfile.lock
27
Gemfile.lock
|
|
@ -125,9 +125,6 @@ GEM
|
|||
bigdecimal
|
||||
rexml
|
||||
crass (1.0.6)
|
||||
cronex (0.15.0)
|
||||
tzinfo
|
||||
unicode (>= 0.4.4.5)
|
||||
csv (3.3.4)
|
||||
data_migrate (11.2.0)
|
||||
activerecord (>= 6.1)
|
||||
|
|
@ -160,7 +157,6 @@ GEM
|
|||
factory_bot_rails (6.4.4)
|
||||
factory_bot (~> 6.5)
|
||||
railties (>= 5.0.0)
|
||||
fakeredis (0.1.4)
|
||||
ffaker (2.24.0)
|
||||
foreman (0.88.1)
|
||||
fugit (1.11.1)
|
||||
|
|
@ -348,10 +344,6 @@ GEM
|
|||
rdoc (6.14.0)
|
||||
erb
|
||||
psych (>= 4.0.0)
|
||||
redis (5.4.0)
|
||||
redis-client (>= 0.22.0)
|
||||
redis-client (0.24.0)
|
||||
connection_pool
|
||||
regexp_parser (2.10.0)
|
||||
reline (0.6.1)
|
||||
io-console (~> 0.5)
|
||||
|
|
@ -433,19 +425,6 @@ GEM
|
|||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
shoulda-matchers (6.5.0)
|
||||
activesupport (>= 5.2.0)
|
||||
sidekiq (7.3.9)
|
||||
base64
|
||||
connection_pool (>= 2.3.0)
|
||||
logger
|
||||
rack (>= 2.2.4)
|
||||
redis-client (>= 0.22.2)
|
||||
sidekiq-cron (2.2.0)
|
||||
cronex (>= 0.13.0)
|
||||
fugit (~> 1.8, >= 1.11.1)
|
||||
globalid (>= 1.0.1)
|
||||
sidekiq (>= 6.5.0)
|
||||
sidekiq-limit_fetch (4.4.1)
|
||||
sidekiq (>= 6)
|
||||
simplecov (0.22.0)
|
||||
docile (~> 1.1)
|
||||
simplecov-html (~> 0.11)
|
||||
|
|
@ -507,7 +486,6 @@ GEM
|
|||
railties (>= 7.1.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
unicode (0.4.4.5)
|
||||
unicode-display_width (3.1.4)
|
||||
unicode-emoji (~> 4.0, >= 4.0.4)
|
||||
unicode-emoji (4.0.4)
|
||||
|
|
@ -553,7 +531,6 @@ DEPENDENCIES
|
|||
devise
|
||||
dotenv-rails
|
||||
factory_bot_rails
|
||||
fakeredis
|
||||
ffaker
|
||||
foreman
|
||||
geocoder
|
||||
|
|
@ -573,7 +550,6 @@ DEPENDENCIES
|
|||
puma
|
||||
pundit
|
||||
rails (~> 8.0)
|
||||
redis
|
||||
rexml
|
||||
rgeo
|
||||
rgeo-activerecord
|
||||
|
|
@ -587,9 +563,6 @@ DEPENDENCIES
|
|||
sentry-rails
|
||||
sentry-ruby
|
||||
shoulda-matchers
|
||||
sidekiq
|
||||
sidekiq-cron
|
||||
sidekiq-limit_fetch
|
||||
simplecov
|
||||
solid_cable (~> 3.0)
|
||||
solid_cache (= 1.0.7)
|
||||
|
|
|
|||
|
|
@ -6,9 +6,7 @@ class Settings::BackgroundJobsController < ApplicationController
|
|||
%w[start_immich_import start_photoprism_import].include?(params[:job_name])
|
||||
}
|
||||
|
||||
def index
|
||||
@queues = Sidekiq::Queue.all
|
||||
end
|
||||
def index;end
|
||||
|
||||
def create
|
||||
EnqueueBackgroundJob.perform_later(params[:job_name], current_user.id)
|
||||
|
|
@ -25,14 +23,4 @@ class Settings::BackgroundJobsController < ApplicationController
|
|||
|
||||
redirect_to redirect_path, notice: 'Job was successfully created.'
|
||||
end
|
||||
|
||||
def destroy
|
||||
# Clear all jobs in the queue, params[:id] contains queue name
|
||||
queue = Sidekiq::Queue.new(params[:id])
|
||||
|
||||
queue.clear
|
||||
|
||||
flash.now[:notice] = 'Queue was successfully cleared.'
|
||||
redirect_to settings_background_jobs_path, notice: 'Queue was successfully cleared.'
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class AppVersionCheckingJob < ApplicationJob
|
||||
queue_as :default
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform
|
||||
Rails.cache.delete(CheckAppVersion::VERSION_CACHE_KEY)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class AreaVisitsCalculatingJob < ApplicationJob
|
||||
queue_as :default
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(user_id)
|
||||
user = User.find(user_id)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class AreaVisitsCalculationSchedulingJob < ApplicationJob
|
||||
queue_as :default
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform
|
||||
User.find_each { AreaVisitsCalculatingJob.perform_later(_1.id) }
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@
|
|||
# with the default timespan of 1 day.
|
||||
class BulkVisitsSuggestingJob < ApplicationJob
|
||||
queue_as :visit_suggesting
|
||||
sidekiq_options retry: false
|
||||
|
||||
# Passing timespan of more than 3 years somehow results in duplicated Places
|
||||
def perform(start_at: 1.day.ago.beginning_of_day, end_at: 1.day.ago.end_of_day, user_ids: [])
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class Import::GoogleTakeoutJob < ApplicationJob
|
||||
queue_as :imports
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(import_id, locations, current_index)
|
||||
locations_batch = Oj.load(locations)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class Import::ImmichGeodataJob < ApplicationJob
|
||||
queue_as :imports
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(user_id)
|
||||
user = User.find(user_id)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class Import::PhotoprismGeodataJob < ApplicationJob
|
||||
queue_as :imports
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform(user_id)
|
||||
user = User.find(user_id)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class Import::WatcherJob < ApplicationJob
|
||||
queue_as :imports
|
||||
sidekiq_options retry: false
|
||||
|
||||
def perform
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
class VisitSuggestingJob < ApplicationJob
|
||||
queue_as :visit_suggesting
|
||||
sidekiq_options retry: false
|
||||
|
||||
# Passing timespan of more than 3 years somehow results in duplicated Places
|
||||
def perform(user_id:, start_at:, end_at:)
|
||||
|
|
|
|||
|
|
@ -54,6 +54,6 @@ class Tasks::Imports::GoogleRecords
|
|||
end
|
||||
|
||||
def log_success
|
||||
Rails.logger.info("Imported #{@file_path} for #{@user.email} successfully! Wait for the processing to finish. You can check the status of the import in the Sidekiq UI (http://<your-dawarich-url>/sidekiq).")
|
||||
Rails.logger.info("Imported #{@file_path} for #{@user.email} successfully! Wait for the processing to finish. You can check the status of the import in the Jobs UI (http://<your-dawarich-url>/jobs).")
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
<% if notification.error? %>
|
||||
<div class="mt-2">
|
||||
Please, when reporting a bug to <a href="https://github.com/Freika/dawarich/issues" class="link hover:no-underline text-blue-600">Github Issues</a>, don't forget to include logs from <code>dawarich_app</code> and <code>dawarich_sidekiq</code> docker containers. Thank you!
|
||||
Please, when reporting a bug to <a href="https://github.com/Freika/dawarich/issues" class="link hover:no-underline text-blue-600">Github Issues</a>, don't forget to include logs from <code>dawarich_app</code> docker container. Thank you!
|
||||
</div>
|
||||
<% end %>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -39,17 +39,15 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="settings_background_jobs" class="min-w-full">
|
||||
<% @queues.each do |queue| %>
|
||||
<div class="card shadow-2xl bg-base-300 p-5 m-5">
|
||||
<h2 class="text-2xl font-bold"><%= queue.name %></h2>
|
||||
<div class="flex justify-between items-center">
|
||||
<p class="text-lg">Jobs in queue: <%= queue.size %></p>
|
||||
<%= link_to 'Clear queue', settings_background_job_path(queue.name), method: :delete, data: { confirm: 'Are you sure?', turbo_confirm: 'Are you sure?', turbo_method: :delete }, class: 'btn btn-primary' %>
|
||||
<div class="card bg-base-300 w-96 shadow-xl m-5">
|
||||
<div class="card-body">
|
||||
<h2 class="card-title">Background Jobs Dashboard</h2>
|
||||
<p>This will open the background jobs dashboard in a new tab.</p>
|
||||
<div class="card-actions justify-end">
|
||||
<%= link_to 'Open Dashboard', mission_control_jobs_url, target: '_blank', class: 'btn btn-primary' %>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -34,7 +34,5 @@ module Dawarich
|
|||
g.routing_specs false
|
||||
g.helper_specs false
|
||||
end
|
||||
|
||||
config.active_job.queue_adapter = :sidekiq
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
settings = {
|
||||
timeout: 5,
|
||||
units: :km,
|
||||
cache: Redis.new,
|
||||
cache: Geocoder::CacheStore::Generic.new(Rails.cache, {}),
|
||||
always_raise: :all,
|
||||
use_https: PHOTON_API_USE_HTTPS,
|
||||
http_headers: { 'User-Agent' => "Dawarich #{APP_VERSION} (https://dawarich.app)" },
|
||||
|
|
|
|||
|
|
@ -1,31 +1,24 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
Sidekiq.configure_server do |config|
|
||||
config.redis = { url: ENV['REDIS_URL'] }
|
||||
config.logger = Sidekiq::Logger.new($stdout)
|
||||
# Sidekiq.configure_server do |config|
|
||||
# if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true'
|
||||
# require 'prometheus_exporter/instrumentation'
|
||||
|
||||
if ENV['PROMETHEUS_EXPORTER_ENABLED'].to_s == 'true'
|
||||
require 'prometheus_exporter/instrumentation'
|
||||
# # Add middleware for collecting job-level metrics
|
||||
# config.server_middleware do |chain|
|
||||
# chain.add PrometheusExporter::Instrumentation::Sidekiq
|
||||
# end
|
||||
|
||||
# Add middleware for collecting job-level metrics
|
||||
config.server_middleware do |chain|
|
||||
chain.add PrometheusExporter::Instrumentation::Sidekiq
|
||||
end
|
||||
# # Capture metrics for failed jobs
|
||||
# config.death_handlers << PrometheusExporter::Instrumentation::Sidekiq.death_handler
|
||||
|
||||
# Capture metrics for failed jobs
|
||||
config.death_handlers << PrometheusExporter::Instrumentation::Sidekiq.death_handler
|
||||
# # Start Prometheus instrumentation
|
||||
# config.on :startup do
|
||||
# PrometheusExporter::Instrumentation::SidekiqProcess.start
|
||||
# PrometheusExporter::Instrumentation::SidekiqQueue.start
|
||||
# PrometheusExporter::Instrumentation::SidekiqStats.start
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
|
||||
# Start Prometheus instrumentation
|
||||
config.on :startup do
|
||||
PrometheusExporter::Instrumentation::SidekiqProcess.start
|
||||
PrometheusExporter::Instrumentation::SidekiqQueue.start
|
||||
PrometheusExporter::Instrumentation::SidekiqStats.start
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Sidekiq.configure_client do |config|
|
||||
config.redis = { url: ENV['REDIS_URL'] }
|
||||
end
|
||||
|
||||
Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && DawarichSettings.photon_uses_komoot_io?
|
||||
# Sidekiq::Queue['reverse_geocoding'].limit = 1 if Sidekiq.server? && DawarichSettings.photon_uses_komoot_io?
|
||||
|
|
|
|||
|
|
@ -1,39 +1,18 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'sidekiq/web'
|
||||
|
||||
Rails.application.routes.draw do
|
||||
mount ActionCable.server => '/cable'
|
||||
mount Rswag::Api::Engine => '/api-docs'
|
||||
mount Rswag::Ui::Engine => '/api-docs'
|
||||
|
||||
unless DawarichSettings.self_hosted?
|
||||
Sidekiq::Web.use(Rack::Auth::Basic) do |username, password|
|
||||
ActiveSupport::SecurityUtils.secure_compare(
|
||||
::Digest::SHA256.hexdigest(username),
|
||||
::Digest::SHA256.hexdigest(ENV['SIDEKIQ_USERNAME'])
|
||||
) &
|
||||
ActiveSupport::SecurityUtils.secure_compare(
|
||||
::Digest::SHA256.hexdigest(password),
|
||||
::Digest::SHA256.hexdigest(ENV['SIDEKIQ_PASSWORD'])
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
authenticate :user, lambda { |u|
|
||||
(u.admin? && DawarichSettings.self_hosted?) ||
|
||||
(u.admin? && ENV['SIDEKIQ_USERNAME'].present? && ENV['SIDEKIQ_PASSWORD'].present?)
|
||||
} do
|
||||
mount Sidekiq::Web => '/sidekiq'
|
||||
mount MissionControl::Jobs::Engine, at: '/jobs'
|
||||
end
|
||||
|
||||
# We want to return a nice error message if the user is not authorized to access Sidekiq or Jobs
|
||||
match '/sidekiq' => redirect { |_, request|
|
||||
request.flash[:error] = 'You are not authorized to perform this action.'
|
||||
'/'
|
||||
}, via: :get
|
||||
|
||||
# We want to return a nice error message if the user is not authorized to access Jobs
|
||||
match '/jobs' => redirect { |_, request|
|
||||
request.flash[:error] = 'You are not authorized to perform this action.'
|
||||
'/'
|
||||
|
|
@ -41,7 +20,7 @@ Rails.application.routes.draw do
|
|||
|
||||
resources :settings, only: :index
|
||||
namespace :settings do
|
||||
resources :background_jobs, only: %i[index create destroy]
|
||||
resources :background_jobs, only: %i[index create]
|
||||
resources :users, only: %i[index create destroy edit update]
|
||||
resources :maps, only: %i[index]
|
||||
patch 'maps', to: 'maps#update'
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
---
|
||||
:concurrency: <%= ENV.fetch("BACKGROUND_PROCESSING_CONCURRENCY", 10) %>
|
||||
:queues:
|
||||
- points
|
||||
- default
|
||||
- imports
|
||||
- exports
|
||||
- stats
|
||||
- reverse_geocoding
|
||||
- visit_suggesting
|
||||
|
|
@ -60,9 +60,6 @@ RUN mkdir -p $APP_PATH/tmp && touch $APP_PATH/tmp/caching-dev.txt
|
|||
COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/web-entrypoint.sh
|
||||
|
||||
COPY ./docker/sidekiq-entrypoint.sh /usr/local/bin/sidekiq-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/sidekiq-entrypoint.sh
|
||||
|
||||
EXPOSE $RAILS_PORT
|
||||
|
||||
ENTRYPOINT ["bundle", "exec"]
|
||||
|
|
|
|||
|
|
@ -61,9 +61,6 @@ RUN SECRET_KEY_BASE_DUMMY=1 bundle exec rake assets:precompile \
|
|||
COPY ./docker/web-entrypoint.sh /usr/local/bin/web-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/web-entrypoint.sh
|
||||
|
||||
COPY ./docker/sidekiq-entrypoint.sh /usr/local/bin/sidekiq-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/sidekiq-entrypoint.sh
|
||||
|
||||
EXPOSE $RAILS_PORT
|
||||
|
||||
ENTRYPOINT [ "bundle", "exec" ]
|
||||
|
|
|
|||
|
|
@ -1,21 +1,6 @@
|
|||
networks:
|
||||
dawarich:
|
||||
services:
|
||||
dawarich_redis:
|
||||
image: redis:7.4-alpine
|
||||
container_name: dawarich_redis
|
||||
command: redis-server
|
||||
networks:
|
||||
- dawarich
|
||||
volumes:
|
||||
- dawarich_redis_data:/var/shared/redis
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
|
||||
interval: 10s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
dawarich_db:
|
||||
image: postgis/postgis:17-3.5-alpine
|
||||
shm_size: 1G
|
||||
|
|
@ -54,7 +39,6 @@ services:
|
|||
restart: on-failure
|
||||
environment:
|
||||
RAILS_ENV: production
|
||||
REDIS_URL: redis://dawarich_redis:6379/0
|
||||
DATABASE_HOST: dawarich_db
|
||||
DATABASE_PORT: 5432
|
||||
DATABASE_USERNAME: postgres
|
||||
|
|
@ -85,66 +69,6 @@ services:
|
|||
dawarich_db:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
dawarich_redis:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '0.50' # Limit CPU usage to 50% of one core
|
||||
memory: '4G' # Limit memory usage to 2GB
|
||||
dawarich_sidekiq:
|
||||
image: dawarich:prod
|
||||
container_name: dawarich_sidekiq
|
||||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
stdin_open: true
|
||||
tty: true
|
||||
entrypoint: sidekiq-entrypoint.sh
|
||||
command: ['bundle', 'exec', 'sidekiq']
|
||||
restart: on-failure
|
||||
environment:
|
||||
RAILS_ENV: production
|
||||
REDIS_URL: redis://dawarich_redis:6379/0
|
||||
DATABASE_HOST: dawarich_db
|
||||
DATABASE_PORT: 5432
|
||||
DATABASE_USERNAME: postgres
|
||||
DATABASE_PASSWORD: password
|
||||
DATABASE_NAME: dawarich_production
|
||||
APPLICATION_HOSTS: localhost,::1,127.0.0.1
|
||||
BACKGROUND_PROCESSING_CONCURRENCY: 10
|
||||
APPLICATION_PROTOCOL: http
|
||||
PROMETHEUS_EXPORTER_ENABLED: false
|
||||
PROMETHEUS_EXPORTER_HOST: dawarich_app
|
||||
PROMETHEUS_EXPORTER_PORT: 9394
|
||||
SECRET_KEY_BASE: 1234567890
|
||||
RAILS_LOG_TO_STDOUT: "true"
|
||||
STORE_GEODATA: "true"
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "100m"
|
||||
max-file: "5"
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ]
|
||||
interval: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
depends_on:
|
||||
dawarich_db:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
dawarich_redis:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
dawarich_app:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
|
|
@ -153,7 +77,6 @@ services:
|
|||
|
||||
volumes:
|
||||
dawarich_db_data:
|
||||
dawarich_redis_data:
|
||||
dawarich_public:
|
||||
dawarich_watched:
|
||||
dawarich_storage:
|
||||
|
|
|
|||
|
|
@ -1,21 +1,6 @@
|
|||
networks:
|
||||
dawarich:
|
||||
services:
|
||||
dawarich_redis:
|
||||
image: redis:7.0-alpine
|
||||
container_name: dawarich_redis
|
||||
command: redis-server
|
||||
networks:
|
||||
- dawarich
|
||||
volumes:
|
||||
- dawarich_shared:/data
|
||||
restart: always
|
||||
healthcheck:
|
||||
test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ]
|
||||
interval: 10s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
dawarich_db:
|
||||
image: postgis/postgis:17-3.5-alpine
|
||||
shm_size: 1G
|
||||
|
|
@ -58,7 +43,6 @@ services:
|
|||
restart: on-failure
|
||||
environment:
|
||||
RAILS_ENV: development
|
||||
REDIS_URL: redis://dawarich_redis:6379/0
|
||||
DATABASE_HOST: dawarich_db
|
||||
DATABASE_USERNAME: postgres
|
||||
DATABASE_PASSWORD: password
|
||||
|
|
@ -91,64 +75,6 @@ services:
|
|||
dawarich_db:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
dawarich_redis:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '0.50' # Limit CPU usage to 50% of one core
|
||||
memory: '4G' # Limit memory usage to 4GB
|
||||
dawarich_sidekiq:
|
||||
image: freikin/dawarich:latest
|
||||
container_name: dawarich_sidekiq
|
||||
volumes:
|
||||
- dawarich_public:/var/app/public
|
||||
- dawarich_watched:/var/app/tmp/imports/watched
|
||||
- dawarich_storage:/var/app/storage
|
||||
networks:
|
||||
- dawarich
|
||||
stdin_open: true
|
||||
tty: true
|
||||
entrypoint: sidekiq-entrypoint.sh
|
||||
command: ['sidekiq']
|
||||
restart: on-failure
|
||||
environment:
|
||||
RAILS_ENV: development
|
||||
REDIS_URL: redis://dawarich_redis:6379/0
|
||||
DATABASE_HOST: dawarich_db
|
||||
DATABASE_USERNAME: postgres
|
||||
DATABASE_PASSWORD: password
|
||||
DATABASE_NAME: dawarich_development
|
||||
APPLICATION_HOSTS: localhost
|
||||
BACKGROUND_PROCESSING_CONCURRENCY: 10
|
||||
APPLICATION_PROTOCOL: http
|
||||
PROMETHEUS_EXPORTER_ENABLED: false
|
||||
PROMETHEUS_EXPORTER_HOST: dawarich_app
|
||||
PROMETHEUS_EXPORTER_PORT: 9394
|
||||
SELF_HOSTED: "true"
|
||||
STORE_GEODATA: "true"
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "100m"
|
||||
max-file: "5"
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "bundle exec sidekiqmon processes | grep $${HOSTNAME}" ]
|
||||
interval: 10s
|
||||
retries: 30
|
||||
start_period: 30s
|
||||
timeout: 10s
|
||||
depends_on:
|
||||
dawarich_db:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
dawarich_redis:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
dawarich_app:
|
||||
condition: service_healthy
|
||||
restart: true
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
|
|
|
|||
|
|
@ -1,36 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
unset BUNDLE_PATH
|
||||
unset BUNDLE_BIN
|
||||
|
||||
set -e
|
||||
|
||||
echo "⚠️ Starting Sidekiq in $RAILS_ENV environment ⚠️"
|
||||
|
||||
# Parse DATABASE_URL if present, otherwise use individual variables
|
||||
if [ -n "$DATABASE_URL" ]; then
|
||||
# Extract components from DATABASE_URL
|
||||
DATABASE_HOST=$(echo $DATABASE_URL | awk -F[@/] '{print $4}')
|
||||
DATABASE_PORT=$(echo $DATABASE_URL | awk -F[@/:] '{print $5}')
|
||||
DATABASE_USERNAME=$(echo $DATABASE_URL | awk -F[:/@] '{print $4}')
|
||||
DATABASE_PASSWORD=$(echo $DATABASE_URL | awk -F[:/@] '{print $5}')
|
||||
DATABASE_NAME=$(echo $DATABASE_URL | awk -F[@/] '{print $5}')
|
||||
else
|
||||
# Use existing environment variables
|
||||
DATABASE_HOST=${DATABASE_HOST}
|
||||
DATABASE_PORT=${DATABASE_PORT}
|
||||
DATABASE_USERNAME=${DATABASE_USERNAME}
|
||||
DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
DATABASE_NAME=${DATABASE_NAME}
|
||||
fi
|
||||
|
||||
# Wait for the database to become available
|
||||
echo "⏳ Waiting for database to be ready..."
|
||||
until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -p "$DATABASE_PORT" -U "$DATABASE_USERNAME" -d "$DATABASE_NAME" -c '\q'; do
|
||||
>&2 echo "Postgres is unavailable - retrying..."
|
||||
sleep 2
|
||||
done
|
||||
echo "✅ PostgreSQL is ready!"
|
||||
|
||||
# run sidekiq
|
||||
bundle exec sidekiq
|
||||
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
- Kubernetes cluster and basic kubectl knowledge.
|
||||
- Some persistent storage class prepared, in this example, Longhorn.
|
||||
- Working Postgres and Redis instances. In this example Postgres lives in 'db' namespace and Redis in 'redis' namespace.
|
||||
- Working Postgres instances. In this example Postgres lives in 'db' namespace.
|
||||
- Ngingx ingress controller with Letsencrypt integeation.
|
||||
- This example uses 'example.com' as a domain name, you want to change it to your own.
|
||||
- This will work on IPv4 and IPv6 Single Stack clusters, as well as Dual Stack deployments.
|
||||
|
|
@ -80,8 +80,6 @@ spec:
|
|||
value: "Europe/Prague"
|
||||
- name: RAILS_ENV
|
||||
value: development
|
||||
- name: REDIS_URL
|
||||
value: redis://redis-master.redis.svc.cluster.local:6379/10
|
||||
- name: DATABASE_HOST
|
||||
value: postgres-postgresql.db.svc.cluster.local
|
||||
- name: DATABASE_PORT
|
||||
|
|
@ -128,79 +126,10 @@ spec:
|
|||
cpu: "2000m"
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
- name: dawarich-sidekiq
|
||||
env:
|
||||
- name: RAILS_ENV
|
||||
value: development
|
||||
- name: REDIS_URL
|
||||
value: redis://redis-master.redis.svc.cluster.local:6379/10
|
||||
- name: DATABASE_HOST
|
||||
value: postgres-postgresql.db.svc.cluster.local
|
||||
- name: DATABASE_PORT
|
||||
value: "5432"
|
||||
- name: DATABASE_USERNAME
|
||||
value: postgres
|
||||
- name: DATABASE_PASSWORD
|
||||
value: Password123!
|
||||
- name: DATABASE_NAME
|
||||
value: dawarich_development
|
||||
- name: RAILS_MIN_THREADS
|
||||
value: "5"
|
||||
- name: RAILS_MAX_THREADS
|
||||
value: "10"
|
||||
- name: BACKGROUND_PROCESSING_CONCURRENCY
|
||||
value: "20"
|
||||
- name: APPLICATION_HOST
|
||||
value: localhost
|
||||
- name: APPLICATION_HOSTS
|
||||
value: "dawarich.example.com, localhost"
|
||||
- name: APPLICATION_PROTOCOL
|
||||
value: http
|
||||
- name: PHOTON_API_HOST
|
||||
value: photon.komoot.io
|
||||
- name: PHOTON_API_USE_HTTPS
|
||||
value: "true"
|
||||
image: freikin/dawarich:0.16.4
|
||||
imagePullPolicy: Always
|
||||
volumeMounts:
|
||||
- mountPath: /var/app/public
|
||||
name: public
|
||||
- mountPath: /var/app/tmp/imports/watched
|
||||
name: watched
|
||||
command:
|
||||
- "sidekiq-entrypoint.sh"
|
||||
args:
|
||||
- "bundle exec sidekiq"
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "250m"
|
||||
limits:
|
||||
memory: "3Gi"
|
||||
cpu: "1500m"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /api/v1/health
|
||||
port: 3000
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 3000
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
volumes:
|
||||
- name: gem-cache
|
||||
persistentVolumeClaim:
|
||||
claimName: gem-cache
|
||||
- name: gem-sidekiq
|
||||
persistentVolumeClaim:
|
||||
claimName: gem-sidekiq
|
||||
- name: public
|
||||
persistentVolumeClaim:
|
||||
claimName: public
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ If you don't want to use dedicated share for projects installed by docker skip i
|
|||
### Dawarich root folder
|
||||
1. Open your [Docker root folder](#docker-root-share) in **File station**.
|
||||
2. Create new folder **dawarich** and open it.
|
||||
3. Create folders **redis**, **db_data**, **db_shared** and **public** in **dawarich** folder.
|
||||
3. Create folders **db_data**, **db_shared** and **public** in **dawarich** folder.
|
||||
4. Copy [docker compose](synology/docker-compose.yml) and [.env](synology/.env) files form **synology** repo folder into **dawarich** folder on your synology.
|
||||
|
||||
# Installation
|
||||
|
|
|
|||
|
|
@ -17,17 +17,6 @@ dawarich_app:
|
|||
APPLICATION_HOSTS: "yourhost.com,www.yourhost.com,127.0.0.1" <-- Edit this
|
||||
```
|
||||
|
||||
```yaml
|
||||
dawarich_sidekiq:
|
||||
image: freikin/dawarich:latest
|
||||
container_name: dawarich_sidekiq
|
||||
...
|
||||
environment:
|
||||
...
|
||||
APPLICATION_HOSTS: "yourhost.com,www.yourhost.com,127.0.0.1" <-- Edit this
|
||||
...
|
||||
```
|
||||
|
||||
For a Synology install, refer to **[Synology Install Tutorial](How_to_install_Dawarich_on_Synology.md)**. In this page, it is explained how to set the APPLICATION_HOSTS environment variable.
|
||||
|
||||
### Virtual Host
|
||||
|
|
|
|||
|
|
@ -1,14 +1,6 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
dawarich_redis:
|
||||
image: redis:7.0-alpine
|
||||
container_name: dawarich_redis
|
||||
command: redis-server
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./redis:/var/shared/redis
|
||||
|
||||
dawarich_db:
|
||||
image: postgis/postgis:17-3.5-alpine
|
||||
container_name: dawarich_db
|
||||
|
|
@ -25,7 +17,6 @@ services:
|
|||
container_name: dawarich_app
|
||||
depends_on:
|
||||
- dawarich_db
|
||||
- dawarich_redis
|
||||
stdin_open: true
|
||||
tty: true
|
||||
entrypoint: web-entrypoint.sh
|
||||
|
|
@ -38,19 +29,3 @@ services:
|
|||
- ./app_storage:/var/app/storage
|
||||
ports:
|
||||
- 32568:3000
|
||||
|
||||
dawarich_sidekiq:
|
||||
image: freikin/dawarich:latest
|
||||
container_name: dawarich_sidekiq
|
||||
depends_on:
|
||||
- dawarich_db
|
||||
- dawarich_redis
|
||||
- dawarich_app
|
||||
entrypoint: sidekiq-entrypoint.sh
|
||||
command: ['sidekiq']
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./public:/var/app/public
|
||||
- ./app_storage:/var/app/storage
|
||||
|
|
|
|||
|
|
@ -110,10 +110,4 @@ RSpec.describe VisitSuggestingJob, type: :job do
|
|||
expect(described_class.queue_name).to eq('visit_suggesting')
|
||||
end
|
||||
end
|
||||
|
||||
describe 'sidekiq options' do
|
||||
it 'has retry disabled' do
|
||||
expect(described_class.sidekiq_options_hash['retry']).to be false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ require_relative '../config/environment'
|
|||
abort('The Rails environment is running in production mode!') if Rails.env.production?
|
||||
require 'rspec/rails'
|
||||
require 'rswag/specs'
|
||||
require 'sidekiq/testing'
|
||||
require 'super_diff/rspec-rails'
|
||||
|
||||
require 'rake'
|
||||
|
|
|
|||
|
|
@ -90,23 +90,6 @@ RSpec.describe '/settings/background_jobs', type: :request do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'DELETE /destroy' do
|
||||
it 'clears the Sidekiq queue' do
|
||||
queue = instance_double(Sidekiq::Queue)
|
||||
allow(Sidekiq::Queue).to receive(:new).and_return(queue)
|
||||
|
||||
expect(queue).to receive(:clear)
|
||||
|
||||
delete settings_background_job_url('queue_name')
|
||||
end
|
||||
|
||||
it 'redirects to the settings_background_jobs list' do
|
||||
delete settings_background_job_url('queue_name')
|
||||
|
||||
expect(response).to redirect_to(settings_background_jobs_url)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,125 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
require 'sidekiq/web'
|
||||
|
||||
RSpec.describe '/sidekiq', type: :request do
|
||||
before do
|
||||
# Allow any ENV key to be accessed and return nil by default
|
||||
allow(ENV).to receive(:[]).and_return(nil)
|
||||
|
||||
# Stub Sidekiq::Web with a simple Rack app for testing
|
||||
allow(Sidekiq::Web).to receive(:call) do |_env|
|
||||
[200, { 'Content-Type' => 'text/html' }, ['Sidekiq Web UI']]
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Dawarich is in self-hosted mode' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(true)
|
||||
allow(ENV).to receive(:[]).with('SIDEKIQ_USERNAME').and_return(nil)
|
||||
allow(ENV).to receive(:[]).with('SIDEKIQ_PASSWORD').and_return(nil)
|
||||
end
|
||||
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to sign in page' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to redirect_to('/users/sign_in')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is authenticated' do
|
||||
context 'when user is not admin' do
|
||||
before { sign_in create(:user) }
|
||||
|
||||
it 'redirects to root page' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to redirect_to(root_url)
|
||||
end
|
||||
|
||||
it 'shows flash message' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(flash[:error]).to eq('You are not authorized to perform this action.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is admin' do
|
||||
before { sign_in create(:user, :admin) }
|
||||
|
||||
it 'renders a successful response' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Dawarich is not in self-hosted mode' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(false)
|
||||
allow(ENV).to receive(:[]).with('SIDEKIQ_USERNAME').and_return(nil)
|
||||
allow(ENV).to receive(:[]).with('SIDEKIQ_PASSWORD').and_return(nil)
|
||||
Rails.application.reload_routes!
|
||||
end
|
||||
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to sign in page' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to redirect_to('/users/sign_in')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is authenticated' do
|
||||
before { sign_in create(:user, :admin) }
|
||||
|
||||
it 'redirects to root page' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to redirect_to(root_url)
|
||||
expect(flash[:error]).to eq('You are not authorized to perform this action.')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when SIDEKIQ_USERNAME and SIDEKIQ_PASSWORD are set' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(false)
|
||||
allow(ENV).to receive(:[]).with('SIDEKIQ_USERNAME').and_return('admin')
|
||||
allow(ENV).to receive(:[]).with('SIDEKIQ_PASSWORD').and_return('password')
|
||||
end
|
||||
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to sign in page' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to redirect_to('/users/sign_in')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is not admin' do
|
||||
before { sign_in create(:user) }
|
||||
|
||||
it 'redirects to root page' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to redirect_to(root_url)
|
||||
expect(flash[:error]).to eq('You are not authorized to perform this action.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is admin' do
|
||||
before { sign_in create(:user, :admin) }
|
||||
|
||||
it 'renders a successful response' do
|
||||
get sidekiq_url
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -12,8 +12,6 @@ RSpec.describe Imports::Watcher do
|
|||
stub_const('Imports::Watcher::WATCHED_DIR_PATH', watched_dir_path)
|
||||
end
|
||||
|
||||
after { Sidekiq::Testing.fake! }
|
||||
|
||||
context 'when user exists' do
|
||||
let!(:user) { create(:user, email: 'user@domain.com') }
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue