Move import processing to background job & allow to disable reverse geocoding

This commit is contained in:
Eugene Burmakin 2024-03-24 18:05:39 +01:00
parent 2e4390f194
commit 7292115a44
21 changed files with 183 additions and 67 deletions

View file

@ -3,4 +3,4 @@ DATABASE_USERNAME=postgres
DATABASE_PASSWORD=password
DATABASE_NAME=dawarich_development
DATABASE_PORT=5432
REDIS_URL=redis://dawarich_redis:6379/1
REDIS_URL=redis://localhost:6379/1

View file

@ -3,4 +3,4 @@ DATABASE_USERNAME=postgres
DATABASE_PASSWORD=password
DATABASE_NAME=dawarich_test
DATABASE_PORT=5432
REDIS_URL=redis://dawarich_redis:6379/1
REDIS_URL=redis://localhost:6379/1

View file

@ -67,7 +67,7 @@ production_migrate:
ssh dokku_frey 'dokku run dawarich bundle exec rails db:migrate'
build_and_push:
git tag -l "$(version)"
git tag -a "$(version)" -f -m "$(version)"
docker build . -t dawarich:$(version) --platform=linux/amd64
docker tag dawarich:$(version) registry.chibi.rodeo/dawarich:$(version)
docker push registry.chibi.rodeo/dawarich:$(version)

View file

@ -1,2 +1,3 @@
web: bin/rails server -p 3000 -b 0.0.0.0
css: bin/rails tailwindcss:watch
worker: bundle exec sidekiq

View file

@ -15,29 +15,19 @@ class ImportsController < ApplicationController
def create
files = import_params[:files].reject(&:blank?)
imports = []
report = ''
files.each do |file|
json = JSON.parse(file.read)
import = current_user.imports.create(name: file.original_filename, source: params[:import][:source])
result = parser.new(file.path, import.id).call
import = current_user.imports.create(
name: file.original_filename,
source: params[:import][:source],
)
if result[:points].zero?
import.destroy!
else
import.update(raw_points: result[:raw_points], doubles: result[:doubles])
imports << import
end
import.file.attach(file)
end
StatCreatingJob.perform_later(current_user.id)
redirect_to imports_url, notice: "#{imports.size} import files were imported successfully", status: :see_other
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
rescue StandardError => e
imports.each { |import| import&.destroy! }
Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all
Rails.logger.debug e.message
flash.now[:error] = e.message
redirect_to new_import_path, notice: e.message, status: :unprocessable_entity
@ -57,11 +47,4 @@ class ImportsController < ApplicationController
def import_params
params.require(:import).permit(:source, files: [])
end
def parser
case params[:import][:source]
when 'google' then GoogleMaps::TimelineParser
when 'owntracks' then OwnTracks::ExportParser
end
end
end

28
app/jobs/import_job.rb Normal file
View file

@ -0,0 +1,28 @@
class ImportJob < ApplicationJob
queue_as :default
def perform(user_id, import_id)
user = User.find(user_id)
import = user.imports.find(import_id)
file = import.file
sleep 3 # It takes time to process uploaded file
result = parser(import.source).new(import).call
import.update(
raw_points: result[:raw_points], doubles: result[:doubles], processed: result[:processed]
)
StatCreatingJob.perform_later(user_id)
end
private
def parser(source)
case source
when 'google' then GoogleMaps::TimelineParser
when 'owntracks' then OwnTracks::ExportParser
end
end
end

View file

@ -1,5 +1,5 @@
class ReverseGeocodingJob < ApplicationJob
queue_as :default
queue_as :low
def perform(point_id)
point = Point.find(point_id)

View file

@ -2,5 +2,15 @@ class Import < ApplicationRecord
belongs_to :user
has_many :points, dependent: :destroy
has_one_attached :file
enum source: { google: 0, owntracks: 1 }
after_create_commit :async_import
private
def async_import
ImportJob.perform_later(user.id, self.id)
end
end

View file

@ -16,6 +16,8 @@ class Point < ApplicationRecord
private
def async_reverse_geocode
return unless REVERSE_GEOCODING_ENABLED
ReverseGeocodingJob.perform_later(id)
end
end

View file

@ -1,16 +1,11 @@
# frozen_string_literal: true
class GoogleMaps::TimelineParser
attr_reader :file_path, :file, :json, :import_id
attr_reader :import, :json
def initialize(file_path, import_id = nil)
@file_path = file_path
raise 'File not found' unless File.exist?(@file_path)
@file = File.read(@file_path)
@json = JSON.parse(@file)
@import_id = import_id
def initialize(import)
@import = import
@json = JSON.parse(import.file.download)
end
def call
@ -28,15 +23,16 @@ class GoogleMaps::TimelineParser
raw_data: point_data[:raw_data],
topic: 'Google Maps Timeline Export',
tracker_id: 'google-maps-timeline-export',
import_id: import_id
import_id: import.id
)
points += 1
end
doubles = points_data.size - points
processed = points + doubles
{ raw_points: points_data.size, points: points, doubles: doubles }
{ raw_points: points_data.size, points: points, doubles: doubles, processed: processed }
end
private

View file

@ -1,16 +1,11 @@
# frozen_string_literal: true
class OwnTracks::ExportParser
attr_reader :file_path, :file, :json, :import_id
attr_reader :import, :json
def initialize(file_path, import_id = nil)
@file_path = file_path
raise 'File not found' unless File.exist?(@file_path)
@file = File.read(@file_path)
@json = JSON.parse(@file)
@import_id = import_id
def initialize(import)
@import = import
@json = JSON.parse(import.file.download)
end
def call
@ -28,15 +23,16 @@ class OwnTracks::ExportParser
raw_data: point_data[:raw_data],
topic: point_data[:topic],
tracker_id: point_data[:tracker_id],
import_id: import_id
import_id: import.id
)
points += 1
end
doubles = points_data.size - points
processed = points + doubles
{ raw_points: points_data.size, points: points, doubles: doubles }
{ raw_points: points_data.size, points: points, doubles: doubles, processed: processed }
end
private

View file

@ -11,8 +11,7 @@
<thead>
<tr>
<th>Name</th>
<th>Raw points</th>
<th>Points created</th>
<th>Processed</th>
<th>Doubles</th>
<th>Created at</th>
</tr>
@ -23,9 +22,11 @@
<td>
<%= link_to import.name, import, class: 'underline hover:no-underline' %> (<%= import.source %>)
</td>
<td><%= import.raw_points %></td>
<td>
<%= "✅" if import.processed == import.raw_points %>
<%= "#{import.processed}/#{import.raw_points}" %>
</td>
<td><%= import.doubles %></td>
<td><%= import.points.count %></td>
<td><%= import.created_at.strftime("%d.%m.%Y, %H:%M") %></td>
</tr>
<% end %>

View file

@ -1,6 +1,6 @@
<%= "#{@distance} km" if @distance %>
<% if @countries_and_cities&.any? %>
<% if REVERSE_GEOCODING_ENABLED && @countries_and_cities&.any? %>
<% @countries_and_cities.each do |country| %>
<h2 class="text-lg font-semibold mt-5">
<%= country[:country] %> (<%= country[:cities].count %> cities)

View file

@ -32,5 +32,7 @@ module Dawarich
g.routing_specs false
g.helper_specs false
end
config.active_job.queue_adapter = :sidekiq
end
end

View file

@ -1,2 +1,3 @@
MINIMUM_POINTS_IN_CITY = ENV.fetch('MINIMUM_POINTS_IN_CITY', 5).to_i
MAP_CENTER = ENV.fetch('MAP_CENTER', '[55.7522, 37.6156]')
REVERSE_GEOCODING_ENABLED = ENV.fetch('REVERSE_GEOCODING_ENABLED', 'true') == 'true'

4
config/sidekiq.yml Normal file
View file

@ -0,0 +1,4 @@
:queues:
- critical
- default
- low

View file

@ -0,0 +1,57 @@
# This migration comes from active_storage (originally 20170806125915)
class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
def change
# Use Active Record's configured type for primary and foreign keys
primary_key_type, foreign_key_type = primary_and_foreign_key_types
create_table :active_storage_blobs, id: primary_key_type do |t|
t.string :key, null: false
t.string :filename, null: false
t.string :content_type
t.text :metadata
t.string :service_name, null: false
t.bigint :byte_size, null: false
t.string :checksum
if connection.supports_datetime_with_precision?
t.datetime :created_at, precision: 6, null: false
else
t.datetime :created_at, null: false
end
t.index [ :key ], unique: true
end
create_table :active_storage_attachments, id: primary_key_type do |t|
t.string :name, null: false
t.references :record, null: false, polymorphic: true, index: false, type: foreign_key_type
t.references :blob, null: false, type: foreign_key_type
if connection.supports_datetime_with_precision?
t.datetime :created_at, precision: 6, null: false
else
t.datetime :created_at, null: false
end
t.index [ :record_type, :record_id, :name, :blob_id ], name: :index_active_storage_attachments_uniqueness, unique: true
t.foreign_key :active_storage_blobs, column: :blob_id
end
create_table :active_storage_variant_records, id: primary_key_type do |t|
t.belongs_to :blob, null: false, index: false, type: foreign_key_type
t.string :variation_digest, null: false
t.index [ :blob_id, :variation_digest ], name: :index_active_storage_variant_records_uniqueness, unique: true
t.foreign_key :active_storage_blobs, column: :blob_id
end
end
private
def primary_and_foreign_key_types
config = Rails.configuration.generators
setting = config.options[config.orm][:primary_key_type]
primary_key_type = setting || :primary_key
foreign_key_type = setting || :bigint
[primary_key_type, foreign_key_type]
end
end

View file

@ -0,0 +1,5 @@
class AddProcessedToImports < ActiveRecord::Migration[7.1]
def change
add_column :imports, :processed, :integer, default: 0
end
end

33
db/schema.rb generated
View file

@ -10,10 +10,38 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.1].define(version: 2024_03_23_190039) do
ActiveRecord::Schema[7.1].define(version: 2024_03_24_161800) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_storage_attachments", force: :cascade do |t|
t.string "name", null: false
t.string "record_type", null: false
t.bigint "record_id", null: false
t.bigint "blob_id", null: false
t.datetime "created_at", null: false
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
end
create_table "active_storage_blobs", force: :cascade do |t|
t.string "key", null: false
t.string "filename", null: false
t.string "content_type"
t.text "metadata"
t.string "service_name", null: false
t.bigint "byte_size", null: false
t.string "checksum"
t.datetime "created_at", null: false
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
end
create_table "active_storage_variant_records", force: :cascade do |t|
t.bigint "blob_id", null: false
t.string "variation_digest", null: false
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
end
create_table "imports", force: :cascade do |t|
t.string "name", null: false
t.bigint "user_id", null: false
@ -22,6 +50,7 @@ ActiveRecord::Schema[7.1].define(version: 2024_03_23_190039) do
t.datetime "updated_at", null: false
t.integer "raw_points", default: 0
t.integer "doubles", default: 0
t.integer "processed", default: 0
t.index ["source"], name: "index_imports_on_source"
t.index ["user_id"], name: "index_imports_on_user_id"
end
@ -90,5 +119,7 @@ ActiveRecord::Schema[7.1].define(version: 2024_03_23_190039) do
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
add_foreign_key "stats", "users"
end

View file

@ -0,0 +1,5 @@
require 'rails_helper'
RSpec.describe ImportJob, type: :job do
pending "add some examples to (or delete) #{__FILE__}"
end

View file

@ -2,23 +2,17 @@ require 'rails_helper'
RSpec.describe OwnTracks::ExportParser do
describe '#call' do
subject(:parser) { described_class.new(file_path, import_id).call }
subject(:parser) { described_class.new(import.id).call }
let(:file_path) { 'spec/fixtures/owntracks_export.json' }
let(:import_id) { nil }
let(:file) { fixture_file_upload(file_path) }
let(:user) { create(:user) }
let(:import) { create(:import, user: user, file: file, name: File.basename(file.path)) }
context 'when file exists' do
it 'creates points' do
expect { parser }.to change { Point.count }.by(8)
end
end
context 'when file does not exist' do
let(:file_path) { 'spec/fixtures/not_found.json' }
it 'raises error' do
expect { parser }.to raise_error('File not found')
end
end
end
end