mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Move import processing to background job & allow to disable reverse geocoding
This commit is contained in:
parent
2e4390f194
commit
7292115a44
21 changed files with 183 additions and 67 deletions
|
|
@ -3,4 +3,4 @@ DATABASE_USERNAME=postgres
|
||||||
DATABASE_PASSWORD=password
|
DATABASE_PASSWORD=password
|
||||||
DATABASE_NAME=dawarich_development
|
DATABASE_NAME=dawarich_development
|
||||||
DATABASE_PORT=5432
|
DATABASE_PORT=5432
|
||||||
REDIS_URL=redis://dawarich_redis:6379/1
|
REDIS_URL=redis://localhost:6379/1
|
||||||
|
|
|
||||||
|
|
@ -3,4 +3,4 @@ DATABASE_USERNAME=postgres
|
||||||
DATABASE_PASSWORD=password
|
DATABASE_PASSWORD=password
|
||||||
DATABASE_NAME=dawarich_test
|
DATABASE_NAME=dawarich_test
|
||||||
DATABASE_PORT=5432
|
DATABASE_PORT=5432
|
||||||
REDIS_URL=redis://dawarich_redis:6379/1
|
REDIS_URL=redis://localhost:6379/1
|
||||||
|
|
|
||||||
2
Makefile
2
Makefile
|
|
@ -67,7 +67,7 @@ production_migrate:
|
||||||
ssh dokku_frey 'dokku run dawarich bundle exec rails db:migrate'
|
ssh dokku_frey 'dokku run dawarich bundle exec rails db:migrate'
|
||||||
|
|
||||||
build_and_push:
|
build_and_push:
|
||||||
git tag -l "$(version)"
|
git tag -a "$(version)" -f -m "$(version)"
|
||||||
docker build . -t dawarich:$(version) --platform=linux/amd64
|
docker build . -t dawarich:$(version) --platform=linux/amd64
|
||||||
docker tag dawarich:$(version) registry.chibi.rodeo/dawarich:$(version)
|
docker tag dawarich:$(version) registry.chibi.rodeo/dawarich:$(version)
|
||||||
docker push registry.chibi.rodeo/dawarich:$(version)
|
docker push registry.chibi.rodeo/dawarich:$(version)
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1,3 @@
|
||||||
web: bin/rails server -p 3000 -b 0.0.0.0
|
web: bin/rails server -p 3000 -b 0.0.0.0
|
||||||
css: bin/rails tailwindcss:watch
|
css: bin/rails tailwindcss:watch
|
||||||
|
worker: bundle exec sidekiq
|
||||||
|
|
|
||||||
|
|
@ -15,29 +15,19 @@ class ImportsController < ApplicationController
|
||||||
|
|
||||||
def create
|
def create
|
||||||
files = import_params[:files].reject(&:blank?)
|
files = import_params[:files].reject(&:blank?)
|
||||||
imports = []
|
|
||||||
report = ''
|
|
||||||
|
|
||||||
files.each do |file|
|
files.each do |file|
|
||||||
json = JSON.parse(file.read)
|
import = current_user.imports.create(
|
||||||
import = current_user.imports.create(name: file.original_filename, source: params[:import][:source])
|
name: file.original_filename,
|
||||||
result = parser.new(file.path, import.id).call
|
source: params[:import][:source],
|
||||||
|
)
|
||||||
|
|
||||||
if result[:points].zero?
|
import.file.attach(file)
|
||||||
import.destroy!
|
|
||||||
else
|
|
||||||
import.update(raw_points: result[:raw_points], doubles: result[:doubles])
|
|
||||||
|
|
||||||
imports << import
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
StatCreatingJob.perform_later(current_user.id)
|
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
|
||||||
|
|
||||||
redirect_to imports_url, notice: "#{imports.size} import files were imported successfully", status: :see_other
|
|
||||||
rescue StandardError => e
|
rescue StandardError => e
|
||||||
imports.each { |import| import&.destroy! }
|
Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all
|
||||||
|
Rails.logger.debug e.message
|
||||||
flash.now[:error] = e.message
|
flash.now[:error] = e.message
|
||||||
|
|
||||||
redirect_to new_import_path, notice: e.message, status: :unprocessable_entity
|
redirect_to new_import_path, notice: e.message, status: :unprocessable_entity
|
||||||
|
|
@ -57,11 +47,4 @@ class ImportsController < ApplicationController
|
||||||
def import_params
|
def import_params
|
||||||
params.require(:import).permit(:source, files: [])
|
params.require(:import).permit(:source, files: [])
|
||||||
end
|
end
|
||||||
|
|
||||||
def parser
|
|
||||||
case params[:import][:source]
|
|
||||||
when 'google' then GoogleMaps::TimelineParser
|
|
||||||
when 'owntracks' then OwnTracks::ExportParser
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
||||||
28
app/jobs/import_job.rb
Normal file
28
app/jobs/import_job.rb
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
class ImportJob < ApplicationJob
|
||||||
|
queue_as :default
|
||||||
|
|
||||||
|
def perform(user_id, import_id)
|
||||||
|
user = User.find(user_id)
|
||||||
|
import = user.imports.find(import_id)
|
||||||
|
file = import.file
|
||||||
|
|
||||||
|
sleep 3 # It takes time to process uploaded file
|
||||||
|
|
||||||
|
result = parser(import.source).new(import).call
|
||||||
|
|
||||||
|
import.update(
|
||||||
|
raw_points: result[:raw_points], doubles: result[:doubles], processed: result[:processed]
|
||||||
|
)
|
||||||
|
|
||||||
|
StatCreatingJob.perform_later(user_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def parser(source)
|
||||||
|
case source
|
||||||
|
when 'google' then GoogleMaps::TimelineParser
|
||||||
|
when 'owntracks' then OwnTracks::ExportParser
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
class ReverseGeocodingJob < ApplicationJob
|
class ReverseGeocodingJob < ApplicationJob
|
||||||
queue_as :default
|
queue_as :low
|
||||||
|
|
||||||
def perform(point_id)
|
def perform(point_id)
|
||||||
point = Point.find(point_id)
|
point = Point.find(point_id)
|
||||||
|
|
|
||||||
|
|
@ -2,5 +2,15 @@ class Import < ApplicationRecord
|
||||||
belongs_to :user
|
belongs_to :user
|
||||||
has_many :points, dependent: :destroy
|
has_many :points, dependent: :destroy
|
||||||
|
|
||||||
|
has_one_attached :file
|
||||||
|
|
||||||
enum source: { google: 0, owntracks: 1 }
|
enum source: { google: 0, owntracks: 1 }
|
||||||
|
|
||||||
|
after_create_commit :async_import
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def async_import
|
||||||
|
ImportJob.perform_later(user.id, self.id)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,8 @@ class Point < ApplicationRecord
|
||||||
private
|
private
|
||||||
|
|
||||||
def async_reverse_geocode
|
def async_reverse_geocode
|
||||||
|
return unless REVERSE_GEOCODING_ENABLED
|
||||||
|
|
||||||
ReverseGeocodingJob.perform_later(id)
|
ReverseGeocodingJob.perform_later(id)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,11 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class GoogleMaps::TimelineParser
|
class GoogleMaps::TimelineParser
|
||||||
attr_reader :file_path, :file, :json, :import_id
|
attr_reader :import, :json
|
||||||
|
|
||||||
def initialize(file_path, import_id = nil)
|
def initialize(import)
|
||||||
@file_path = file_path
|
@import = import
|
||||||
|
@json = JSON.parse(import.file.download)
|
||||||
raise 'File not found' unless File.exist?(@file_path)
|
|
||||||
|
|
||||||
@file = File.read(@file_path)
|
|
||||||
@json = JSON.parse(@file)
|
|
||||||
@import_id = import_id
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def call
|
def call
|
||||||
|
|
@ -28,15 +23,16 @@ class GoogleMaps::TimelineParser
|
||||||
raw_data: point_data[:raw_data],
|
raw_data: point_data[:raw_data],
|
||||||
topic: 'Google Maps Timeline Export',
|
topic: 'Google Maps Timeline Export',
|
||||||
tracker_id: 'google-maps-timeline-export',
|
tracker_id: 'google-maps-timeline-export',
|
||||||
import_id: import_id
|
import_id: import.id
|
||||||
)
|
)
|
||||||
|
|
||||||
points += 1
|
points += 1
|
||||||
end
|
end
|
||||||
|
|
||||||
doubles = points_data.size - points
|
doubles = points_data.size - points
|
||||||
|
processed = points + doubles
|
||||||
|
|
||||||
{ raw_points: points_data.size, points: points, doubles: doubles }
|
{ raw_points: points_data.size, points: points, doubles: doubles, processed: processed }
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,11 @@
|
||||||
# frozen_string_literal: true
|
# frozen_string_literal: true
|
||||||
|
|
||||||
class OwnTracks::ExportParser
|
class OwnTracks::ExportParser
|
||||||
attr_reader :file_path, :file, :json, :import_id
|
attr_reader :import, :json
|
||||||
|
|
||||||
def initialize(file_path, import_id = nil)
|
def initialize(import)
|
||||||
@file_path = file_path
|
@import = import
|
||||||
|
@json = JSON.parse(import.file.download)
|
||||||
raise 'File not found' unless File.exist?(@file_path)
|
|
||||||
|
|
||||||
@file = File.read(@file_path)
|
|
||||||
@json = JSON.parse(@file)
|
|
||||||
@import_id = import_id
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def call
|
def call
|
||||||
|
|
@ -28,15 +23,16 @@ class OwnTracks::ExportParser
|
||||||
raw_data: point_data[:raw_data],
|
raw_data: point_data[:raw_data],
|
||||||
topic: point_data[:topic],
|
topic: point_data[:topic],
|
||||||
tracker_id: point_data[:tracker_id],
|
tracker_id: point_data[:tracker_id],
|
||||||
import_id: import_id
|
import_id: import.id
|
||||||
)
|
)
|
||||||
|
|
||||||
points += 1
|
points += 1
|
||||||
end
|
end
|
||||||
|
|
||||||
doubles = points_data.size - points
|
doubles = points_data.size - points
|
||||||
|
processed = points + doubles
|
||||||
|
|
||||||
{ raw_points: points_data.size, points: points, doubles: doubles }
|
{ raw_points: points_data.size, points: points, doubles: doubles, processed: processed }
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,7 @@
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>Name</th>
|
<th>Name</th>
|
||||||
<th>Raw points</th>
|
<th>Processed</th>
|
||||||
<th>Points created</th>
|
|
||||||
<th>Doubles</th>
|
<th>Doubles</th>
|
||||||
<th>Created at</th>
|
<th>Created at</th>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
@ -23,9 +22,11 @@
|
||||||
<td>
|
<td>
|
||||||
<%= link_to import.name, import, class: 'underline hover:no-underline' %> (<%= import.source %>)
|
<%= link_to import.name, import, class: 'underline hover:no-underline' %> (<%= import.source %>)
|
||||||
</td>
|
</td>
|
||||||
<td><%= import.raw_points %></td>
|
<td>
|
||||||
|
<%= "✅" if import.processed == import.raw_points %>
|
||||||
|
<%= "#{import.processed}/#{import.raw_points}" %>
|
||||||
|
</td>
|
||||||
<td><%= import.doubles %></td>
|
<td><%= import.doubles %></td>
|
||||||
<td><%= import.points.count %></td>
|
|
||||||
<td><%= import.created_at.strftime("%d.%m.%Y, %H:%M") %></td>
|
<td><%= import.created_at.strftime("%d.%m.%Y, %H:%M") %></td>
|
||||||
</tr>
|
</tr>
|
||||||
<% end %>
|
<% end %>
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
<%= "#{@distance} km" if @distance %>
|
<%= "#{@distance} km" if @distance %>
|
||||||
|
|
||||||
<% if @countries_and_cities&.any? %>
|
<% if REVERSE_GEOCODING_ENABLED && @countries_and_cities&.any? %>
|
||||||
<% @countries_and_cities.each do |country| %>
|
<% @countries_and_cities.each do |country| %>
|
||||||
<h2 class="text-lg font-semibold mt-5">
|
<h2 class="text-lg font-semibold mt-5">
|
||||||
<%= country[:country] %> (<%= country[:cities].count %> cities)
|
<%= country[:country] %> (<%= country[:cities].count %> cities)
|
||||||
|
|
|
||||||
|
|
@ -32,5 +32,7 @@ module Dawarich
|
||||||
g.routing_specs false
|
g.routing_specs false
|
||||||
g.helper_specs false
|
g.helper_specs false
|
||||||
end
|
end
|
||||||
|
|
||||||
|
config.active_job.queue_adapter = :sidekiq
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1,3 @@
|
||||||
MINIMUM_POINTS_IN_CITY = ENV.fetch('MINIMUM_POINTS_IN_CITY', 5).to_i
|
MINIMUM_POINTS_IN_CITY = ENV.fetch('MINIMUM_POINTS_IN_CITY', 5).to_i
|
||||||
MAP_CENTER = ENV.fetch('MAP_CENTER', '[55.7522, 37.6156]')
|
MAP_CENTER = ENV.fetch('MAP_CENTER', '[55.7522, 37.6156]')
|
||||||
|
REVERSE_GEOCODING_ENABLED = ENV.fetch('REVERSE_GEOCODING_ENABLED', 'true') == 'true'
|
||||||
|
|
|
||||||
4
config/sidekiq.yml
Normal file
4
config/sidekiq.yml
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
:queues:
|
||||||
|
- critical
|
||||||
|
- default
|
||||||
|
- low
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
# This migration comes from active_storage (originally 20170806125915)
|
||||||
|
class CreateActiveStorageTables < ActiveRecord::Migration[7.0]
|
||||||
|
def change
|
||||||
|
# Use Active Record's configured type for primary and foreign keys
|
||||||
|
primary_key_type, foreign_key_type = primary_and_foreign_key_types
|
||||||
|
|
||||||
|
create_table :active_storage_blobs, id: primary_key_type do |t|
|
||||||
|
t.string :key, null: false
|
||||||
|
t.string :filename, null: false
|
||||||
|
t.string :content_type
|
||||||
|
t.text :metadata
|
||||||
|
t.string :service_name, null: false
|
||||||
|
t.bigint :byte_size, null: false
|
||||||
|
t.string :checksum
|
||||||
|
|
||||||
|
if connection.supports_datetime_with_precision?
|
||||||
|
t.datetime :created_at, precision: 6, null: false
|
||||||
|
else
|
||||||
|
t.datetime :created_at, null: false
|
||||||
|
end
|
||||||
|
|
||||||
|
t.index [ :key ], unique: true
|
||||||
|
end
|
||||||
|
|
||||||
|
create_table :active_storage_attachments, id: primary_key_type do |t|
|
||||||
|
t.string :name, null: false
|
||||||
|
t.references :record, null: false, polymorphic: true, index: false, type: foreign_key_type
|
||||||
|
t.references :blob, null: false, type: foreign_key_type
|
||||||
|
|
||||||
|
if connection.supports_datetime_with_precision?
|
||||||
|
t.datetime :created_at, precision: 6, null: false
|
||||||
|
else
|
||||||
|
t.datetime :created_at, null: false
|
||||||
|
end
|
||||||
|
|
||||||
|
t.index [ :record_type, :record_id, :name, :blob_id ], name: :index_active_storage_attachments_uniqueness, unique: true
|
||||||
|
t.foreign_key :active_storage_blobs, column: :blob_id
|
||||||
|
end
|
||||||
|
|
||||||
|
create_table :active_storage_variant_records, id: primary_key_type do |t|
|
||||||
|
t.belongs_to :blob, null: false, index: false, type: foreign_key_type
|
||||||
|
t.string :variation_digest, null: false
|
||||||
|
|
||||||
|
t.index [ :blob_id, :variation_digest ], name: :index_active_storage_variant_records_uniqueness, unique: true
|
||||||
|
t.foreign_key :active_storage_blobs, column: :blob_id
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
def primary_and_foreign_key_types
|
||||||
|
config = Rails.configuration.generators
|
||||||
|
setting = config.options[config.orm][:primary_key_type]
|
||||||
|
primary_key_type = setting || :primary_key
|
||||||
|
foreign_key_type = setting || :bigint
|
||||||
|
[primary_key_type, foreign_key_type]
|
||||||
|
end
|
||||||
|
end
|
||||||
5
db/migrate/20240324161800_add_processed_to_imports.rb
Normal file
5
db/migrate/20240324161800_add_processed_to_imports.rb
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
class AddProcessedToImports < ActiveRecord::Migration[7.1]
|
||||||
|
def change
|
||||||
|
add_column :imports, :processed, :integer, default: 0
|
||||||
|
end
|
||||||
|
end
|
||||||
33
db/schema.rb
generated
33
db/schema.rb
generated
|
|
@ -10,10 +10,38 @@
|
||||||
#
|
#
|
||||||
# It's strongly recommended that you check this file into your version control system.
|
# It's strongly recommended that you check this file into your version control system.
|
||||||
|
|
||||||
ActiveRecord::Schema[7.1].define(version: 2024_03_23_190039) do
|
ActiveRecord::Schema[7.1].define(version: 2024_03_24_161800) do
|
||||||
# These are extensions that must be enabled in order to support this database
|
# These are extensions that must be enabled in order to support this database
|
||||||
enable_extension "plpgsql"
|
enable_extension "plpgsql"
|
||||||
|
|
||||||
|
create_table "active_storage_attachments", force: :cascade do |t|
|
||||||
|
t.string "name", null: false
|
||||||
|
t.string "record_type", null: false
|
||||||
|
t.bigint "record_id", null: false
|
||||||
|
t.bigint "blob_id", null: false
|
||||||
|
t.datetime "created_at", null: false
|
||||||
|
t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id"
|
||||||
|
t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true
|
||||||
|
end
|
||||||
|
|
||||||
|
create_table "active_storage_blobs", force: :cascade do |t|
|
||||||
|
t.string "key", null: false
|
||||||
|
t.string "filename", null: false
|
||||||
|
t.string "content_type"
|
||||||
|
t.text "metadata"
|
||||||
|
t.string "service_name", null: false
|
||||||
|
t.bigint "byte_size", null: false
|
||||||
|
t.string "checksum"
|
||||||
|
t.datetime "created_at", null: false
|
||||||
|
t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true
|
||||||
|
end
|
||||||
|
|
||||||
|
create_table "active_storage_variant_records", force: :cascade do |t|
|
||||||
|
t.bigint "blob_id", null: false
|
||||||
|
t.string "variation_digest", null: false
|
||||||
|
t.index ["blob_id", "variation_digest"], name: "index_active_storage_variant_records_uniqueness", unique: true
|
||||||
|
end
|
||||||
|
|
||||||
create_table "imports", force: :cascade do |t|
|
create_table "imports", force: :cascade do |t|
|
||||||
t.string "name", null: false
|
t.string "name", null: false
|
||||||
t.bigint "user_id", null: false
|
t.bigint "user_id", null: false
|
||||||
|
|
@ -22,6 +50,7 @@ ActiveRecord::Schema[7.1].define(version: 2024_03_23_190039) do
|
||||||
t.datetime "updated_at", null: false
|
t.datetime "updated_at", null: false
|
||||||
t.integer "raw_points", default: 0
|
t.integer "raw_points", default: 0
|
||||||
t.integer "doubles", default: 0
|
t.integer "doubles", default: 0
|
||||||
|
t.integer "processed", default: 0
|
||||||
t.index ["source"], name: "index_imports_on_source"
|
t.index ["source"], name: "index_imports_on_source"
|
||||||
t.index ["user_id"], name: "index_imports_on_user_id"
|
t.index ["user_id"], name: "index_imports_on_user_id"
|
||||||
end
|
end
|
||||||
|
|
@ -90,5 +119,7 @@ ActiveRecord::Schema[7.1].define(version: 2024_03_23_190039) do
|
||||||
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
|
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
|
||||||
end
|
end
|
||||||
|
|
||||||
|
add_foreign_key "active_storage_attachments", "active_storage_blobs", column: "blob_id"
|
||||||
|
add_foreign_key "active_storage_variant_records", "active_storage_blobs", column: "blob_id"
|
||||||
add_foreign_key "stats", "users"
|
add_foreign_key "stats", "users"
|
||||||
end
|
end
|
||||||
|
|
|
||||||
5
spec/jobs/import_job_spec.rb
Normal file
5
spec/jobs/import_job_spec.rb
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
require 'rails_helper'
|
||||||
|
|
||||||
|
RSpec.describe ImportJob, type: :job do
|
||||||
|
pending "add some examples to (or delete) #{__FILE__}"
|
||||||
|
end
|
||||||
|
|
@ -2,23 +2,17 @@ require 'rails_helper'
|
||||||
|
|
||||||
RSpec.describe OwnTracks::ExportParser do
|
RSpec.describe OwnTracks::ExportParser do
|
||||||
describe '#call' do
|
describe '#call' do
|
||||||
subject(:parser) { described_class.new(file_path, import_id).call }
|
subject(:parser) { described_class.new(import.id).call }
|
||||||
|
|
||||||
let(:file_path) { 'spec/fixtures/owntracks_export.json' }
|
let(:file_path) { 'spec/fixtures/owntracks_export.json' }
|
||||||
let(:import_id) { nil }
|
let(:file) { fixture_file_upload(file_path) }
|
||||||
|
let(:user) { create(:user) }
|
||||||
|
let(:import) { create(:import, user: user, file: file, name: File.basename(file.path)) }
|
||||||
|
|
||||||
context 'when file exists' do
|
context 'when file exists' do
|
||||||
it 'creates points' do
|
it 'creates points' do
|
||||||
expect { parser }.to change { Point.count }.by(8)
|
expect { parser }.to change { Point.count }.by(8)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when file does not exist' do
|
|
||||||
let(:file_path) { 'spec/fixtures/not_found.json' }
|
|
||||||
|
|
||||||
it 'raises error' do
|
|
||||||
expect { parser }.to raise_error('File not found')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue