This commit is contained in:
Evgenii Burmakin 2025-10-04 16:19:15 +02:00 committed by GitHub
commit 1b8d997cde
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 209 additions and 64 deletions

View file

@ -81,3 +81,8 @@ group :development do
gem 'foreman'
gem 'rubocop-rails', require: false
end
group :development, :test do
gem 'pg_query'
gem 'prosopite'
end

View file

@ -185,6 +185,24 @@ GEM
raabro (~> 1.4)
globalid (1.2.1)
activesupport (>= 6.1)
google-protobuf (4.32.1)
bigdecimal
rake (>= 13)
google-protobuf (4.32.1-aarch64-linux-gnu)
bigdecimal
rake (>= 13)
google-protobuf (4.32.1-arm64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.32.1-x86-linux-gnu)
bigdecimal
rake (>= 13)
google-protobuf (4.32.1-x86_64-darwin)
bigdecimal
rake (>= 13)
google-protobuf (4.32.1-x86_64-linux-gnu)
bigdecimal
rake (>= 13)
gpx (1.2.1)
csv
nokogiri (~> 1.7)
@ -295,12 +313,15 @@ GEM
pg (1.6.2-arm64-darwin)
pg (1.6.2-x86_64-darwin)
pg (1.6.2-x86_64-linux)
pg_query (6.1.0)
google-protobuf (>= 3.25.3)
pp (0.6.2)
prettyprint
prettyprint (0.2.0)
prism (1.5.1)
prometheus_exporter (2.2.0)
webrick
prosopite (2.1.2)
pry (0.15.2)
coderay (~> 1.1)
method_source (~> 1.0)
@ -566,7 +587,9 @@ DEPENDENCIES
oj
parallel
pg
pg_query
prometheus_exporter
prosopite
pry-byebug
pry-rails
puma

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true
class Import < ApplicationRecord
belongs_to :user
belongs_to :user, counter_cache: true
has_many :points, dependent: :destroy
has_one_attached :file

View file

@ -32,7 +32,7 @@ class Point < ApplicationRecord
scope :not_visited, -> { where(visit_id: nil) }
after_create :async_reverse_geocode, if: -> { DawarichSettings.store_geodata? && !reverse_geocoded? }
after_create :set_country
before_create :set_country, unless: -> { country_id.present? }
after_create_commit :broadcast_coordinates
# after_commit :recalculate_track, on: :update, if: -> { track.present? }
@ -95,7 +95,6 @@ class Point < ApplicationRecord
def set_country
self.country_id = found_in_country&.id
save! if changed?
end
def recalculate_track

View file

@ -15,14 +15,16 @@ class Users::ImportData::Imports
imports_created = 0
files_restored = 0
# Preload existing imports to avoid N+1 queries
@existing_imports_cache = load_existing_imports
imports_data.each do |import_data|
next unless import_data.is_a?(Hash)
existing_import = user.imports.find_by(
name: import_data['name'],
source: import_data['source'],
created_at: import_data['created_at']
)
# Normalize created_at for consistent cache key lookup
created_at_normalized = normalize_created_at(import_data['created_at'])
cache_key = import_cache_key(import_data['name'], import_data['source'], created_at_normalized)
existing_import = @existing_imports_cache[cache_key]
if existing_import
Rails.logger.debug "Import already exists: #{import_data['name']}"
@ -34,9 +36,7 @@ class Users::ImportData::Imports
imports_created += 1
if import_data['file_name'] && restore_import_file(import_record, import_data)
files_restored += 1
end
files_restored += 1 if import_data['file_name'] && restore_import_file(import_record, import_data)
end
Rails.logger.info "Imports import completed. Created: #{imports_created}, Files restored: #{files_restored}"
@ -47,6 +47,59 @@ class Users::ImportData::Imports
attr_reader :user, :imports_data, :files_directory
def load_existing_imports
# Extract import identifiers from imports_data and normalize created_at
import_keys = imports_data.select { |id| id.is_a?(Hash) && id['name'].present? && id['source'].present? }
.map do |id|
# Normalize created_at to string for consistent comparison
created_at_normalized = normalize_created_at(id['created_at'])
{ name: id['name'], source: id['source'], created_at: created_at_normalized }
end
return {} if import_keys.empty?
# Build a hash for quick lookup
cache = {}
# Build OR conditions using Arel to fetch all matching imports in a single query
arel_table = Import.arel_table
conditions = import_keys.map do |key|
condition = arel_table[:user_id].eq(user.id)
.and(arel_table[:name].eq(key[:name]))
.and(arel_table[:source].eq(key[:source]))
# Handle created_at being nil
if key[:created_at].nil?
condition.and(arel_table[:created_at].eq(nil))
else
# Parse the string back to Time for querying
condition.and(arel_table[:created_at].eq(Time.zone.parse(key[:created_at])))
end
end.reduce { |result, condition| result.or(condition) }
# Fetch all matching imports in a single query
Import.where(conditions).find_each do |import|
# Normalize created_at from database for cache key
created_at_normalized = normalize_created_at(import.created_at)
cache_key = import_cache_key(import.name, import.source, created_at_normalized)
cache[cache_key] = import
end
cache
end
def normalize_created_at(created_at)
return nil if created_at.nil?
# Convert to string in ISO8601 format for consistent comparison
time = created_at.is_a?(String) ? Time.zone.parse(created_at) : created_at
time&.iso8601
end
def import_cache_key(name, source, created_at)
"#{name}_#{source}_#{created_at}"
end
def create_import_record(import_data)
import_attributes = prepare_import_attributes(import_data)

View file

@ -13,6 +13,9 @@ class Users::ImportData::Places
places_created = 0
# Preload all existing places to avoid N+1 queries
@existing_places_cache = load_existing_places
places_data.each do |place_data|
next unless place_data.is_a?(Hash)
@ -28,6 +31,38 @@ class Users::ImportData::Places
attr_reader :user, :places_data
def load_existing_places
# Extract all coordinates from places_data to preload relevant places
coordinates = places_data.select do |pd|
pd.is_a?(Hash) && pd['name'].present? && pd['latitude'].present? && pd['longitude'].present?
end.map { |pd| { name: pd['name'], lat: pd['latitude'].to_f, lon: pd['longitude'].to_f } }
return {} if coordinates.empty?
# Build a hash for quick lookup: "name_lat_lon" => place
cache = {}
# Build OR conditions using Arel to fetch all matching places in a single query
arel_table = Place.arel_table
conditions = coordinates.map do |coord|
arel_table[:name].eq(coord[:name])
.and(arel_table[:latitude].eq(coord[:lat]))
.and(arel_table[:longitude].eq(coord[:lon]))
end.reduce { |result, condition| result.or(condition) }
# Fetch all matching places in a single query
Place.where(conditions).find_each do |place|
cache_key = place_cache_key(place.name, place.latitude, place.longitude)
cache[cache_key] = place
end
cache
end
def place_cache_key(name, latitude, longitude)
"#{name}_#{latitude}_#{longitude}"
end
def find_or_create_place_for_import(place_data)
name = place_data['name']
latitude = place_data['latitude']&.to_f
@ -41,12 +76,9 @@ class Users::ImportData::Places
Rails.logger.debug "Processing place for import: #{name} at (#{latitude}, #{longitude})"
# During import, we prioritize data integrity for the importing user
# First try exact match (name + coordinates)
existing_place = Place.where(
name: name,
latitude: latitude,
longitude: longitude
).first
# First try exact match (name + coordinates) from cache
cache_key = place_cache_key(name, latitude, longitude)
existing_place = @existing_places_cache[cache_key]
if existing_place
Rails.logger.debug "Found exact place match: #{name} at (#{latitude}, #{longitude}) -> existing place ID #{existing_place.id}"
@ -71,6 +103,9 @@ class Users::ImportData::Places
place.define_singleton_method(:previously_new_record?) { true }
Rails.logger.debug "Created place during import: #{place.name} (ID: #{place.id})"
# Add to cache for subsequent lookups
@existing_places_cache[cache_key] = place
place
rescue ActiveRecord::RecordInvalid => e
Rails.logger.error "Failed to create place: #{place_data.inspect}, error: #{e.message}"

View file

@ -69,4 +69,9 @@ Rails.application.configure do
# Raise error when a before_action's only/except options reference missing actions.
config.action_controller.raise_on_missing_callback_actions = true
config.after_initialize do
Prosopite.rails_logger = true
Prosopite.raise = true
end
end

View file

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AddImportsCountToUsers < ActiveRecord::Migration[8.0]
def change
add_column :users, :imports_count, :integer, default: 0, null: false
end
end

3
db/schema.rb generated
View file

@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[8.0].define(version: 2025_09_18_215512) do
ActiveRecord::Schema[8.0].define(version: 2025_10_01_190702) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_catalog.plpgsql"
enable_extension "postgis"
@ -281,6 +281,7 @@ ActiveRecord::Schema[8.0].define(version: 2025_09_18_215512) do
t.integer "status", default: 0
t.datetime "active_until"
t.integer "points_count", default: 0, null: false
t.integer "imports_count"
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
end

View file

@ -6,9 +6,20 @@ RSpec.describe Points::GpxSerializer do
describe '#call' do
subject(:serializer) { described_class.new(points, 'some_name').call }
let(:country) do
Country.create!(
name: 'Test Country',
iso_a2: 'TC',
iso_a3: 'TST',
geom: 'MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))'
)
end
let(:points) do
# Create points with country_id set to skip the set_country callback
# which would trigger N+1 queries for country lookups
(1..3).map do |i|
create(:point, timestamp: 1.day.ago + i.minutes)
create(:point, timestamp: 1.day.ago + i.minutes, country_id: country.id)
end
end

View file

@ -2,6 +2,7 @@
require 'simplecov'
require 'webmock/rspec'
require 'prosopite'
SimpleCov.start
@ -51,51 +52,56 @@ RSpec.configure do |config|
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# https://relishapp.com/rspec/rspec-core/docs/configuration/zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # https://relishapp.com/rspec/rspec-core/docs/configuration/zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = "doc"
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
config.before(:each) do
Prosopite.scan
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
config.after(:each) do
Prosopite.finish
end
end