Fix tests

This commit is contained in:
Eugene Burmakin 2025-12-07 12:06:30 +01:00
parent 4fedac720b
commit d8d7f2413c
4 changed files with 100 additions and 39 deletions

View file

@ -14,6 +14,7 @@ gem 'bootsnap', require: false
gem 'chartkick'
gem 'data_migrate'
gem 'devise'
gem 'foreman'
gem 'geocoder', github: 'Freika/geocoder', branch: 'master'
gem 'gpx'
gem 'groupdate'
@ -49,14 +50,13 @@ gem 'sentry-ruby'
gem 'sidekiq', '>= 8.0.5'
gem 'sidekiq-cron', '>= 2.3.1'
gem 'sidekiq-limit_fetch'
gem 'with_advisory_lock'
gem 'sprockets-rails'
gem 'stackprof'
gem 'stimulus-rails'
gem 'tailwindcss-rails', '= 3.3.2'
gem 'turbo-rails', '>= 2.0.17'
gem 'tzinfo-data', platforms: %i[mingw mswin x64_mingw jruby]
gem 'foreman'
gem 'with_advisory_lock'
group :development, :test, :staging do
gem 'brakeman', require: false

View file

@ -18,7 +18,7 @@ module Points
end
gz.close
io.string # Returns compressed bytes
io.string.force_encoding(Encoding::ASCII_8BIT) # Returns compressed bytes in binary encoding
end
end
end

View file

@ -22,7 +22,7 @@ RSpec.describe Points::RawData::ChunkCompressor do
it 'returns compressed gzip data' do
result = compressor.compress
expect(result).to be_a(String)
expect(result.encoding).to eq(Encoding::ASCII_8BIT)
expect(result.encoding.name).to eq('ASCII-8BIT')
end
it 'compresses points as JSONL format' do
@ -59,8 +59,12 @@ RSpec.describe Points::RawData::ChunkCompressor do
end
it 'processes points in batches' do
# Create many points to test batch processing
many_points = create_list(:point, 2500, user: user, raw_data: { lon: 13.4, lat: 52.5 })
# Create many points to test batch processing with unique timestamps
many_points = []
base_time = Time.new(2024, 6, 15).to_i
2500.times do |i|
many_points << create(:point, user: user, timestamp: base_time + i, raw_data: { lon: 13.4, lat: 52.5 })
end
large_compressor = described_class.new(Point.where(id: many_points.map(&:id)))
compressed = large_compressor.compress

View file

@ -12,21 +12,33 @@ RSpec.describe Points::RawData::Restorer do
end
describe '#restore_to_database' do
let(:archive) { create(:points_raw_data_archive, user: user, year: 2024, month: 6) }
let!(:archived_points) do
points = create_list(:point, 3, user: user, timestamp: Time.new(2024, 6, 15).to_i,
raw_data: nil, raw_data_archived: true, raw_data_archive: archive)
create_list(:point, 3, user: user, timestamp: Time.new(2024, 6, 15).to_i,
raw_data: nil, raw_data_archived: true)
end
# Mock archive file with actual point data
compressed_data = gzip_points_data(points.map do |p|
let(:archive) do
# Create archive with actual point data
compressed_data = gzip_points_data(archived_points.map do |p|
{ id: p.id, raw_data: { lon: 13.4, lat: 52.5 } }
end)
allow(archive.file.blob).to receive(:download).and_return(compressed_data)
points
arc = build(:points_raw_data_archive, user: user, year: 2024, month: 6)
arc.file.attach(
io: StringIO.new(compressed_data),
filename: arc.filename,
content_type: 'application/gzip'
)
arc.save!
# Associate points with archive
archived_points.each { |p| p.update!(raw_data_archive: arc) }
arc
end
it 'restores raw_data to database' do
archive # Ensure archive is created before restore
restorer.restore_to_database(user.id, 2024, 6)
archived_points.each(&:reload)
@ -36,6 +48,7 @@ RSpec.describe Points::RawData::Restorer do
end
it 'clears archive flags' do
archive # Ensure archive is created before restore
restorer.restore_to_database(user.id, 2024, 6)
archived_points.each(&:reload)
@ -52,20 +65,32 @@ RSpec.describe Points::RawData::Restorer do
end
context 'with multiple chunks' do
let!(:archive2) { create(:points_raw_data_archive, user: user, year: 2024, month: 6, chunk_number: 2) }
let!(:more_points) do
points = create_list(:point, 2, user: user, timestamp: Time.new(2024, 6, 20).to_i,
raw_data: nil, raw_data_archived: true, raw_data_archive: archive2)
create_list(:point, 2, user: user, timestamp: Time.new(2024, 6, 20).to_i,
raw_data: nil, raw_data_archived: true)
end
compressed_data = gzip_points_data(points.map do |p|
let!(:archive2) do
compressed_data = gzip_points_data(more_points.map do |p|
{ id: p.id, raw_data: { lon: 14.0, lat: 53.0 } }
end)
allow(archive2.file.blob).to receive(:download).and_return(compressed_data)
points
arc = build(:points_raw_data_archive, user: user, year: 2024, month: 6, chunk_number: 2)
arc.file.attach(
io: StringIO.new(compressed_data),
filename: arc.filename,
content_type: 'application/gzip'
)
arc.save!
more_points.each { |p| p.update!(raw_data_archive: arc) }
arc
end
it 'restores from all chunks' do
archive # Ensure first archive is created
archive2 # Ensure second archive is created
restorer.restore_to_database(user.id, 2024, 6)
(archived_points + more_points).each(&:reload)
@ -76,20 +101,31 @@ RSpec.describe Points::RawData::Restorer do
end
describe '#restore_to_memory' do
let(:archive) { create(:points_raw_data_archive, user: user, year: 2024, month: 6) }
let!(:archived_points) do
points = create_list(:point, 2, user: user, timestamp: Time.new(2024, 6, 15).to_i,
raw_data: nil, raw_data_archived: true, raw_data_archive: archive)
create_list(:point, 2, user: user, timestamp: Time.new(2024, 6, 15).to_i,
raw_data: nil, raw_data_archived: true)
end
compressed_data = gzip_points_data(points.map do |p|
let(:archive) do
compressed_data = gzip_points_data(archived_points.map do |p|
{ id: p.id, raw_data: { lon: 13.4, lat: 52.5 } }
end)
allow(archive.file.blob).to receive(:download).and_return(compressed_data)
points
arc = build(:points_raw_data_archive, user: user, year: 2024, month: 6)
arc.file.attach(
io: StringIO.new(compressed_data),
filename: arc.filename,
content_type: 'application/gzip'
)
arc.save!
archived_points.each { |p| p.update!(raw_data_archive: arc) }
arc
end
it 'loads data into cache' do
archive # Ensure archive is created before restore
restorer.restore_to_memory(user.id, 2024, 6)
archived_points.each do |point|
@ -100,6 +136,7 @@ RSpec.describe Points::RawData::Restorer do
end
it 'does not modify database' do
archive # Ensure archive is created before restore
restorer.restore_to_memory(user.id, 2024, 6)
archived_points.each(&:reload)
@ -110,6 +147,7 @@ RSpec.describe Points::RawData::Restorer do
end
it 'sets cache expiration to 1 hour' do
archive # Ensure archive is created before restore
restorer.restore_to_memory(user.id, 2024, 6)
cache_key = "raw_data:temp:#{user.id}:2024:6:#{archived_points.first.id}"
@ -120,25 +158,44 @@ RSpec.describe Points::RawData::Restorer do
end
describe '#restore_all_for_user' do
let!(:june_archive) { create(:points_raw_data_archive, user: user, year: 2024, month: 6) }
let!(:july_archive) { create(:points_raw_data_archive, user: user, year: 2024, month: 7) }
let!(:june_points) do
points = create_list(:point, 2, user: user, timestamp: Time.new(2024, 6, 15).to_i,
raw_data: nil, raw_data_archived: true, raw_data_archive: june_archive)
compressed_data = gzip_points_data(points.map { |p| { id: p.id, raw_data: { month: 'june' } } })
allow(june_archive.file.blob).to receive(:download).and_return(compressed_data)
points
create_list(:point, 2, user: user, timestamp: Time.new(2024, 6, 15).to_i,
raw_data: nil, raw_data_archived: true)
end
let!(:july_points) do
points = create_list(:point, 2, user: user, timestamp: Time.new(2024, 7, 15).to_i,
raw_data: nil, raw_data_archived: true, raw_data_archive: july_archive)
create_list(:point, 2, user: user, timestamp: Time.new(2024, 7, 15).to_i,
raw_data: nil, raw_data_archived: true)
end
compressed_data = gzip_points_data(points.map { |p| { id: p.id, raw_data: { month: 'july' } } })
allow(july_archive.file.blob).to receive(:download).and_return(compressed_data)
points
let!(:june_archive) do
compressed_data = gzip_points_data(june_points.map { |p| { id: p.id, raw_data: { month: 'june' } } })
arc = build(:points_raw_data_archive, user: user, year: 2024, month: 6)
arc.file.attach(
io: StringIO.new(compressed_data),
filename: arc.filename,
content_type: 'application/gzip'
)
arc.save!
june_points.each { |p| p.update!(raw_data_archive: arc) }
arc
end
let!(:july_archive) do
compressed_data = gzip_points_data(july_points.map { |p| { id: p.id, raw_data: { month: 'july' } } })
arc = build(:points_raw_data_archive, user: user, year: 2024, month: 7)
arc.file.attach(
io: StringIO.new(compressed_data),
filename: arc.filename,
content_type: 'application/gzip'
)
arc.save!
july_points.each { |p| p.update!(raw_data_archive: arc) }
arc
end
it 'restores all months for user' do