mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Merge remote-tracking branch 'origin' into dev
This commit is contained in:
commit
04fbe4d564
4 changed files with 27 additions and 98 deletions
|
|
@ -4,15 +4,6 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# [0.36.4] - Unreleased
|
||||
|
||||
## Fixed
|
||||
|
||||
- Fixed a bug preventing the app to start if a composite index on stats table already exists. #2034 #2051 #2046
|
||||
- New compiled assets will override old ones on app start to prevent serving stale assets.
|
||||
- Number of points in stats should no longer go negative when points are deleted. #2054
|
||||
- Disable Family::Invitations::CleanupJob no invitations are in the database. #2043
|
||||
|
||||
# [0.36.3] - 2025-12-14
|
||||
|
||||
## Added
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -95,36 +95,13 @@ class Users::ImportData
|
|||
|
||||
FileUtils.mkdir_p(File.dirname(extraction_path))
|
||||
|
||||
# Extract with proper error handling and cleanup
|
||||
extract_entry_safely(entry, extraction_path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def extract_entry_safely(entry, extraction_path)
|
||||
# Extract with error handling and cleanup on failure
|
||||
begin
|
||||
entry.get_input_stream do |input|
|
||||
File.open(extraction_path, 'wb') do |output|
|
||||
bytes_copied = IO.copy_stream(input, output)
|
||||
|
||||
# Verify extracted size matches expected size
|
||||
if bytes_copied != entry.size
|
||||
raise "Size mismatch for #{entry.name}: expected #{entry.size} bytes, got #{bytes_copied} bytes"
|
||||
# Manual extraction to bypass size validation for large files
|
||||
entry.get_input_stream do |input|
|
||||
File.open(extraction_path, 'wb') do |output|
|
||||
IO.copy_stream(input, output)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Rails.logger.debug "Successfully extracted #{entry.name} (#{entry.size} bytes)"
|
||||
rescue StandardError => e
|
||||
# Clean up partial file on error
|
||||
FileUtils.rm_f(extraction_path) if File.exist?(extraction_path)
|
||||
|
||||
Rails.logger.error "Failed to extract #{entry.name}: #{e.message}"
|
||||
Rails.logger.error e.backtrace.join("\n")
|
||||
|
||||
# Re-raise to stop the import process
|
||||
raise "Extraction failed for #{entry.name}: #{e.message}"
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -148,9 +125,7 @@ class Users::ImportData
|
|||
Rails.logger.info "Starting data import for user: #{user.email}"
|
||||
|
||||
json_path = @import_directory.join('data.json')
|
||||
unless File.exist?(json_path)
|
||||
raise StandardError, 'Data file not found in archive: data.json'
|
||||
end
|
||||
raise StandardError, 'Data file not found in archive: data.json' unless File.exist?(json_path)
|
||||
|
||||
initialize_stream_state
|
||||
|
||||
|
|
@ -241,10 +216,10 @@ class Users::ImportData
|
|||
|
||||
@places_batch << place_data
|
||||
|
||||
if @places_batch.size >= STREAM_BATCH_SIZE
|
||||
import_places_batch(@places_batch)
|
||||
@places_batch.clear
|
||||
end
|
||||
return unless @places_batch.size >= STREAM_BATCH_SIZE
|
||||
|
||||
import_places_batch(@places_batch)
|
||||
@places_batch.clear
|
||||
end
|
||||
|
||||
def flush_places_batch
|
||||
|
|
@ -342,14 +317,16 @@ class Users::ImportData
|
|||
|
||||
def import_imports(imports_data)
|
||||
Rails.logger.debug "Importing #{imports_data&.size || 0} imports"
|
||||
imports_created, files_restored = Users::ImportData::Imports.new(user, imports_data, @import_directory.join('files')).call
|
||||
imports_created, files_restored = Users::ImportData::Imports.new(user, imports_data,
|
||||
@import_directory.join('files')).call
|
||||
@import_stats[:imports_created] += imports_created.to_i
|
||||
@import_stats[:files_restored] += files_restored.to_i
|
||||
end
|
||||
|
||||
def import_exports(exports_data)
|
||||
Rails.logger.debug "Importing #{exports_data&.size || 0} exports"
|
||||
exports_created, files_restored = Users::ImportData::Exports.new(user, exports_data, @import_directory.join('files')).call
|
||||
exports_created, files_restored = Users::ImportData::Exports.new(user, exports_data,
|
||||
@import_directory.join('files')).call
|
||||
@import_stats[:exports_created] += exports_created.to_i
|
||||
@import_stats[:files_restored] += files_restored.to_i
|
||||
end
|
||||
|
|
@ -418,11 +395,11 @@ class Users::ImportData
|
|||
expected_counts.each do |entity, expected_count|
|
||||
actual_count = @import_stats[:"#{entity}_created"] || 0
|
||||
|
||||
if actual_count < expected_count
|
||||
discrepancy = "#{entity}: expected #{expected_count}, got #{actual_count} (#{expected_count - actual_count} missing)"
|
||||
discrepancies << discrepancy
|
||||
Rails.logger.warn "Import discrepancy - #{discrepancy}"
|
||||
end
|
||||
next unless actual_count < expected_count
|
||||
|
||||
discrepancy = "#{entity}: expected #{expected_count}, got #{actual_count} (#{expected_count - actual_count} missing)"
|
||||
discrepancies << discrepancy
|
||||
Rails.logger.warn "Import discrepancy - #{discrepancy}"
|
||||
end
|
||||
|
||||
if discrepancies.any?
|
||||
|
|
|
|||
|
|
@ -3,55 +3,16 @@
|
|||
class AddCompositeIndexToStats < ActiveRecord::Migration[8.0]
|
||||
disable_ddl_transaction!
|
||||
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
def change
|
||||
total_duplicates = execute(<<-SQL.squish).first['count'].to_i
|
||||
SELECT COUNT(*) as count
|
||||
FROM stats s1
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM stats s2
|
||||
WHERE s2.user_id = s1.user_id
|
||||
AND s2.year = s1.year
|
||||
AND s2.month = s1.month
|
||||
AND s2.id > s1.id
|
||||
)
|
||||
SQL
|
||||
|
||||
if total_duplicates.positive?
|
||||
Rails.logger.info(
|
||||
"Found #{total_duplicates} duplicate stats records. Starting cleanup in batches of #{BATCH_SIZE}..."
|
||||
)
|
||||
end
|
||||
|
||||
deleted_count = 0
|
||||
loop do
|
||||
batch_deleted = execute(<<-SQL.squish).cmd_tuples
|
||||
DELETE FROM stats s1
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM stats s2
|
||||
WHERE s2.user_id = s1.user_id
|
||||
AND s2.year = s1.year
|
||||
AND s2.month = s1.month
|
||||
AND s2.id > s1.id
|
||||
)
|
||||
LIMIT #{BATCH_SIZE}
|
||||
SQL
|
||||
|
||||
break if batch_deleted.zero?
|
||||
|
||||
deleted_count += batch_deleted
|
||||
Rails.logger.info("Cleaned up #{deleted_count}/#{total_duplicates} duplicate stats records")
|
||||
end
|
||||
|
||||
Rails.logger.info("Completed cleanup: removed #{deleted_count} duplicate stats records") if deleted_count.positive?
|
||||
|
||||
# Add composite index for the most common stats lookup pattern:
|
||||
# Stat.find_or_initialize_by(year:, month:, user:)
|
||||
# This query is called on EVERY stats calculation
|
||||
#
|
||||
# Using algorithm: :concurrently to avoid locking the table during index creation
|
||||
# This is crucial for production deployments with existing data
|
||||
add_index :stats, %i[user_id year month],
|
||||
name: 'index_stats_on_user_id_year_month',
|
||||
unique: true,
|
||||
algorithm: :concurrently,
|
||||
if_not_exists: true
|
||||
|
||||
BulkStatsCalculatingJob.perform_later
|
||||
algorithm: :concurrently
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in a new issue