mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-12 02:01:39 -05:00
* 0.36.2 (#2007) * fix: move foreman to global gems to fix startup crash (#1971) * Update exporting code to stream points data to file in batches to red… (#1980) * Update exporting code to stream points data to file in batches to reduce memory usage * Update changelog * Update changelog * Feature/maplibre frontend (#1953) * Add a plan to use MapLibre GL JS for the frontend map rendering, replacing Leaflet * Implement phase 1 * Phases 1-3 + part of 4 * Fix e2e tests * Phase 6 * Implement fog of war * Phase 7 * Next step: fix specs, phase 7 done * Use our own map tiles * Extract v2 map logic to separate manager classes * Update settings panel on v2 map * Update v2 e2e tests structure * Reimplement location search in maps v2 * Update speed routes * Implement visits and places creation in v2 * Fix last failing test * Implement visits merging * Fix a routes e2e test and simplify the routes layer styling. * Extract js to modules from maps_v2_controller.js * Implement area creation * Fix spec problem * Fix some e2e tests * Implement live mode in v2 map * Update icons and panel * Extract some styles * Remove unused file * Start adding dark theme to popups on MapLibre maps * Make popups respect dark theme * Move v2 maps to maplibre namespace * Update v2 references to maplibre * Put place, area and visit info into side panel * Update API to use safe settings config method * Fix specs * Fix method name to config in SafeSettings and update usages accordingly * Add missing public files * Add handling for real time points * Fix remembering enabled/disabled layers of the v2 map * Fix lots of e2e tests * Add settings to select map version * Use maps/v2 as main path for MapLibre maps * Update routing * Update live mode * Update maplibre controller * Update changelog * Remove some console.log statements --------- Co-authored-by: Robin Tuszik <mail@robin.gg> * Remove esbuild scripts from package.json * Remove sideEffects field from package.json * Raw data archivation * Add tests * Fix tests * Fix tests * Update ExceptionReporter * Add schedule to run raw data archival job monthly * Change file structure for raw data archival feature * Update changelog and version for raw data archival feature --------- Co-authored-by: Robin Tuszik <mail@robin.gg>
94 lines
2.7 KiB
Ruby
94 lines
2.7 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
require 'rails_helper'
|
|
|
|
RSpec.describe Points::RawData::ChunkCompressor do
|
|
let(:user) { create(:user) }
|
|
|
|
before do
|
|
# Stub broadcasting to avoid ActionCable issues in tests
|
|
allow(PointsChannel).to receive(:broadcast_to)
|
|
end
|
|
let(:points) do
|
|
[
|
|
create(:point, user: user, raw_data: { lon: 13.4, lat: 52.5 }),
|
|
create(:point, user: user, raw_data: { lon: 13.5, lat: 52.6 }),
|
|
create(:point, user: user, raw_data: { lon: 13.6, lat: 52.7 })
|
|
]
|
|
end
|
|
let(:compressor) { described_class.new(Point.where(id: points.map(&:id))) }
|
|
|
|
describe '#compress' do
|
|
it 'returns compressed gzip data' do
|
|
result = compressor.compress
|
|
expect(result).to be_a(String)
|
|
expect(result.encoding.name).to eq('ASCII-8BIT')
|
|
end
|
|
|
|
it 'compresses points as JSONL format' do
|
|
compressed = compressor.compress
|
|
|
|
# Decompress and verify format
|
|
io = StringIO.new(compressed)
|
|
gz = Zlib::GzipReader.new(io)
|
|
lines = gz.readlines
|
|
gz.close
|
|
|
|
expect(lines.count).to eq(3)
|
|
|
|
# Each line should be valid JSON
|
|
lines.each_with_index do |line, index|
|
|
data = JSON.parse(line)
|
|
expect(data).to have_key('id')
|
|
expect(data).to have_key('raw_data')
|
|
expect(data['id']).to eq(points[index].id)
|
|
end
|
|
end
|
|
|
|
it 'includes point ID and raw_data in each line' do
|
|
compressed = compressor.compress
|
|
|
|
io = StringIO.new(compressed)
|
|
gz = Zlib::GzipReader.new(io)
|
|
first_line = gz.readline
|
|
gz.close
|
|
|
|
data = JSON.parse(first_line)
|
|
expect(data['id']).to eq(points.first.id)
|
|
expect(data['raw_data']).to eq({ 'lon' => 13.4, 'lat' => 52.5 })
|
|
end
|
|
|
|
it 'processes points in batches' do
|
|
# Create many points to test batch processing with unique timestamps
|
|
many_points = []
|
|
base_time = Time.new(2024, 6, 15).to_i
|
|
2500.times do |i|
|
|
many_points << create(:point, user: user, timestamp: base_time + i, raw_data: { lon: 13.4, lat: 52.5 })
|
|
end
|
|
large_compressor = described_class.new(Point.where(id: many_points.map(&:id)))
|
|
|
|
compressed = large_compressor.compress
|
|
|
|
io = StringIO.new(compressed)
|
|
gz = Zlib::GzipReader.new(io)
|
|
line_count = 0
|
|
gz.each_line { line_count += 1 }
|
|
gz.close
|
|
|
|
expect(line_count).to eq(2500)
|
|
end
|
|
|
|
it 'produces smaller compressed output than uncompressed' do
|
|
compressed = compressor.compress
|
|
|
|
# Decompress to get original size
|
|
io = StringIO.new(compressed)
|
|
gz = Zlib::GzipReader.new(io)
|
|
decompressed = gz.read
|
|
gz.close
|
|
|
|
# Compressed should be smaller
|
|
expect(compressed.bytesize).to be < decompressed.bytesize
|
|
end
|
|
end
|
|
end
|