mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-14 03:01:39 -05:00
Minor changes
This commit is contained in:
parent
462f5f361d
commit
4a9a87350c
15 changed files with 355 additions and 207 deletions
|
|
@ -8,6 +8,10 @@ class Api::V1::Overland::BatchesController < ApiController
|
|||
Overland::PointsCreator.new(batch_params, current_api_user.id).call
|
||||
|
||||
render json: { result: 'ok' }, status: :created
|
||||
rescue StandardError => e
|
||||
Sentry.capture_exception(e) if defined?(Sentry)
|
||||
|
||||
render json: { error: 'Batch creation failed' }, status: :internal_server_error
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -7,7 +7,11 @@ class Api::V1::Owntracks::PointsController < ApiController
|
|||
def create
|
||||
OwnTracks::PointCreator.new(point_params, current_api_user.id).call
|
||||
|
||||
render json: {}, status: :ok
|
||||
render json: [], status: :ok
|
||||
rescue StandardError => e
|
||||
Sentry.capture_exception(e) if defined?(Sentry)
|
||||
|
||||
render json: { error: 'Point creation failed' }, status: :internal_server_error
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -30,14 +30,27 @@ module Api
|
|||
end
|
||||
end
|
||||
|
||||
# Support optional pagination (backward compatible - returns all if no page param)
|
||||
if params[:page].present?
|
||||
per_page = [params[:per_page]&.to_i || 100, 500].min
|
||||
@places = @places.page(params[:page]).per(per_page)
|
||||
# Support pagination (defaults to page 1 with all results if no page param)
|
||||
page = params[:page].presence || 1
|
||||
per_page = [params[:per_page]&.to_i || 100, 500].min
|
||||
|
||||
# Apply pagination only if page param is explicitly provided
|
||||
if params[:page].present?
|
||||
@places = @places.page(page).per(per_page)
|
||||
end
|
||||
|
||||
# Always set pagination headers for consistency
|
||||
if @places.respond_to?(:current_page)
|
||||
# Paginated collection
|
||||
response.set_header('X-Current-Page', @places.current_page.to_s)
|
||||
response.set_header('X-Total-Pages', @places.total_pages.to_s)
|
||||
response.set_header('X-Total-Count', @places.total_count.to_s)
|
||||
else
|
||||
# Non-paginated collection - treat as single page with all results
|
||||
total = @places.count
|
||||
response.set_header('X-Current-Page', '1')
|
||||
response.set_header('X-Total-Pages', '1')
|
||||
response.set_header('X-Total-Count', total.to_s)
|
||||
end
|
||||
|
||||
render json: @places.map { |place| serialize_place(place) }
|
||||
|
|
|
|||
|
|
@ -53,9 +53,11 @@ class Api::V1::PointsController < ApiController
|
|||
def update
|
||||
point = current_api_user.points.find(params[:id])
|
||||
|
||||
point.update(lonlat: "POINT(#{point_params[:longitude]} #{point_params[:latitude]})")
|
||||
|
||||
render json: point_serializer.new(point).call
|
||||
if point.update(lonlat: "POINT(#{point_params[:longitude]} #{point_params[:latitude]})")
|
||||
render json: point_serializer.new(point.reload).call
|
||||
else
|
||||
render json: { error: point.errors.full_messages.join(', ') }, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def destroy
|
||||
|
|
|
|||
|
|
@ -80,14 +80,12 @@ class StatsController < ApplicationController
|
|||
end
|
||||
|
||||
def build_stats
|
||||
# Select only needed columns - avoid loading large JSONB fields
|
||||
# daily_distance and h3_hex_ids are never needed on index page
|
||||
columns = [:id, :year, :month, :distance, :updated_at, :user_id]
|
||||
columns = %i[id year month distance updated_at user_id]
|
||||
columns << :toponyms if DawarichSettings.reverse_geocoding_enabled?
|
||||
|
||||
current_user.stats
|
||||
.select(columns)
|
||||
.order(year: :desc, updated_at: :desc)
|
||||
.group_by(&:year)
|
||||
.select(columns)
|
||||
.order(year: :desc, updated_at: :desc)
|
||||
.group_by(&:year)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ export class LayerManager {
|
|||
|
||||
// Layer order matters - layers added first render below layers added later
|
||||
// Order: scratch (bottom) -> heatmap -> areas -> tracks -> routes (visual) -> visits -> places -> photos -> family -> points -> routes-hit (interaction) -> recent-point (top) -> fog (canvas overlay)
|
||||
// Note: routes-hit is above points visually but points dragging takes precedence via event ordering
|
||||
|
||||
await this._addScratchLayer(pointsGeoJSON)
|
||||
this._addHeatmapLayer(pointsGeoJSON)
|
||||
|
|
@ -50,7 +51,7 @@ export class LayerManager {
|
|||
|
||||
this._addFamilyLayer()
|
||||
this._addPointsLayer(pointsGeoJSON)
|
||||
this._addRoutesHitLayer() // Add hit target layer after points for better interactivity
|
||||
this._addRoutesHitLayer() // Add hit target layer after points, will be on top visually
|
||||
this._addRecentPointLayer()
|
||||
this._addFogLayer(pointsGeoJSON)
|
||||
|
||||
|
|
@ -228,8 +229,8 @@ export class LayerManager {
|
|||
}
|
||||
|
||||
_addRoutesHitLayer() {
|
||||
// Add invisible hit target layer for routes after points layer
|
||||
// This ensures route interactions work even when points are on top
|
||||
// Add invisible hit target layer for routes
|
||||
// Use beforeId to place it BELOW points layer so points remain draggable on top
|
||||
if (!this.map.getLayer('routes-hit') && this.map.getSource('routes-source')) {
|
||||
this.map.addLayer({
|
||||
id: 'routes-hit',
|
||||
|
|
@ -244,7 +245,7 @@ export class LayerManager {
|
|||
'line-width': 20, // Much wider for easier clicking/hovering
|
||||
'line-opacity': 0
|
||||
}
|
||||
})
|
||||
}, 'points') // Add before 'points' layer so points are on top for interaction
|
||||
// Match visibility with routes layer
|
||||
const routesLayer = this.layers.routesLayer
|
||||
if (routesLayer && !routesLayer.visible) {
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import { BaseLayer } from './base_layer'
|
||||
import { RouteSegmenter } from '../utils/route_segmenter'
|
||||
|
||||
/**
|
||||
* Routes layer showing travel paths
|
||||
* Connects points chronologically with solid color
|
||||
* Uses RouteSegmenter for route processing logic
|
||||
*/
|
||||
export class RoutesLayer extends BaseLayer {
|
||||
constructor(map, options = {}) {
|
||||
|
|
@ -161,6 +163,8 @@ export class RoutesLayer extends BaseLayer {
|
|||
|
||||
/**
|
||||
* Calculate haversine distance between two points in kilometers
|
||||
* Delegates to RouteSegmenter utility
|
||||
* @deprecated Use RouteSegmenter.haversineDistance directly
|
||||
* @param {number} lat1 - First point latitude
|
||||
* @param {number} lon1 - First point longitude
|
||||
* @param {number} lat2 - Second point latitude
|
||||
|
|
@ -168,130 +172,17 @@ export class RoutesLayer extends BaseLayer {
|
|||
* @returns {number} Distance in kilometers
|
||||
*/
|
||||
static haversineDistance(lat1, lon1, lat2, lon2) {
|
||||
const R = 6371 // Earth's radius in kilometers
|
||||
const dLat = (lat2 - lat1) * Math.PI / 180
|
||||
const dLon = (lon2 - lon1) * Math.PI / 180
|
||||
const a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||
Math.cos(lat1 * Math.PI / 180) * Math.cos(lat2 * Math.PI / 180) *
|
||||
Math.sin(dLon / 2) * Math.sin(dLon / 2)
|
||||
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a))
|
||||
return R * c
|
||||
return RouteSegmenter.haversineDistance(lat1, lon1, lat2, lon2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert points to route LineStrings with splitting
|
||||
* Matches V1's route splitting logic for consistency
|
||||
* Also handles International Date Line (IDL) crossings
|
||||
* Delegates to RouteSegmenter utility for processing
|
||||
* @param {Array} points - Points from API
|
||||
* @param {Object} options - Splitting options
|
||||
* @returns {Object} GeoJSON FeatureCollection
|
||||
*/
|
||||
static pointsToRoutes(points, options = {}) {
|
||||
if (points.length < 2) {
|
||||
return { type: 'FeatureCollection', features: [] }
|
||||
}
|
||||
|
||||
// Default thresholds (matching V1 defaults from polylines.js)
|
||||
// Note: V1 has a unit mismatch bug where it compares km to meters directly
|
||||
// We replicate this behavior for consistency with V1
|
||||
const distanceThresholdKm = options.distanceThresholdMeters || 500
|
||||
const timeThresholdMinutes = options.timeThresholdMinutes || 60
|
||||
|
||||
// Sort by timestamp
|
||||
const sorted = points.slice().sort((a, b) => a.timestamp - b.timestamp)
|
||||
|
||||
// Split into segments based on distance and time gaps (like V1)
|
||||
const segments = []
|
||||
let currentSegment = [sorted[0]]
|
||||
|
||||
for (let i = 1; i < sorted.length; i++) {
|
||||
const prev = sorted[i - 1]
|
||||
const curr = sorted[i]
|
||||
|
||||
// Calculate distance between consecutive points
|
||||
const distance = this.haversineDistance(
|
||||
prev.latitude, prev.longitude,
|
||||
curr.latitude, curr.longitude
|
||||
)
|
||||
|
||||
// Calculate time difference in minutes
|
||||
const timeDiff = (curr.timestamp - prev.timestamp) / 60
|
||||
|
||||
// Split if any threshold is exceeded
|
||||
if (distance > distanceThresholdKm || timeDiff > timeThresholdMinutes) {
|
||||
if (currentSegment.length > 1) {
|
||||
segments.push(currentSegment)
|
||||
}
|
||||
currentSegment = [curr]
|
||||
} else {
|
||||
currentSegment.push(curr)
|
||||
}
|
||||
}
|
||||
|
||||
if (currentSegment.length > 1) {
|
||||
segments.push(currentSegment)
|
||||
}
|
||||
|
||||
// Convert segments to LineStrings
|
||||
const features = segments.map(segment => {
|
||||
// Unwrap coordinates to handle International Date Line (IDL) crossings
|
||||
// This ensures routes draw the short way across IDL instead of wrapping around globe
|
||||
const coordinates = []
|
||||
let offset = 0 // Cumulative longitude offset for unwrapping
|
||||
|
||||
for (let i = 0; i < segment.length; i++) {
|
||||
const point = segment[i]
|
||||
let lon = point.longitude + offset
|
||||
|
||||
// Check for IDL crossing between consecutive points
|
||||
if (i > 0) {
|
||||
const prevLon = coordinates[i - 1][0]
|
||||
const lonDiff = lon - prevLon
|
||||
|
||||
// If longitude jumps more than 180°, we crossed the IDL
|
||||
if (lonDiff > 180) {
|
||||
// Crossed from east to west (e.g., 170° to -170°)
|
||||
// Subtract 360° to make it continuous (e.g., 170° to -170° becomes 170° to -170°-360° = -530°)
|
||||
offset -= 360
|
||||
lon -= 360
|
||||
} else if (lonDiff < -180) {
|
||||
// Crossed from west to east (e.g., -170° to 170°)
|
||||
// Add 360° to make it continuous (e.g., -170° to 170° becomes -170° to 170°+360° = 530°)
|
||||
offset += 360
|
||||
lon += 360
|
||||
}
|
||||
}
|
||||
|
||||
coordinates.push([lon, point.latitude])
|
||||
}
|
||||
|
||||
// Calculate total distance for the segment
|
||||
let totalDistance = 0
|
||||
for (let i = 0; i < segment.length - 1; i++) {
|
||||
totalDistance += this.haversineDistance(
|
||||
segment[i].latitude, segment[i].longitude,
|
||||
segment[i + 1].latitude, segment[i + 1].longitude
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'LineString',
|
||||
coordinates
|
||||
},
|
||||
properties: {
|
||||
pointCount: segment.length,
|
||||
startTime: segment[0].timestamp,
|
||||
endTime: segment[segment.length - 1].timestamp,
|
||||
distance: totalDistance
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features
|
||||
}
|
||||
return RouteSegmenter.pointsToRoutes(points, options)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
187
app/javascript/maps_maplibre/utils/route_segmenter.js
Normal file
187
app/javascript/maps_maplibre/utils/route_segmenter.js
Normal file
|
|
@ -0,0 +1,187 @@
|
|||
/**
|
||||
* RouteSegmenter - Utility for converting points into route segments
|
||||
* Handles route splitting based on time/distance thresholds and IDL crossings
|
||||
*/
|
||||
export class RouteSegmenter {
|
||||
/**
|
||||
* Calculate haversine distance between two points in kilometers
|
||||
* @param {number} lat1 - First point latitude
|
||||
* @param {number} lon1 - First point longitude
|
||||
* @param {number} lat2 - Second point latitude
|
||||
* @param {number} lon2 - Second point longitude
|
||||
* @returns {number} Distance in kilometers
|
||||
*/
|
||||
static haversineDistance(lat1, lon1, lat2, lon2) {
|
||||
const R = 6371 // Earth's radius in kilometers
|
||||
const dLat = (lat2 - lat1) * Math.PI / 180
|
||||
const dLon = (lon2 - lon1) * Math.PI / 180
|
||||
const a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
|
||||
Math.cos(lat1 * Math.PI / 180) * Math.cos(lat2 * Math.PI / 180) *
|
||||
Math.sin(dLon / 2) * Math.sin(dLon / 2)
|
||||
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a))
|
||||
return R * c
|
||||
}
|
||||
|
||||
/**
|
||||
* Unwrap coordinates to handle International Date Line (IDL) crossings
|
||||
* This ensures routes draw the short way across IDL instead of wrapping around globe
|
||||
* @param {Array} segment - Array of points with longitude and latitude properties
|
||||
* @returns {Array} Array of [lon, lat] coordinate pairs with IDL unwrapping applied
|
||||
*/
|
||||
static unwrapCoordinates(segment) {
|
||||
const coordinates = []
|
||||
let offset = 0 // Cumulative longitude offset for unwrapping
|
||||
|
||||
for (let i = 0; i < segment.length; i++) {
|
||||
const point = segment[i]
|
||||
let lon = point.longitude + offset
|
||||
|
||||
// Check for IDL crossing between consecutive points
|
||||
if (i > 0) {
|
||||
const prevLon = coordinates[i - 1][0]
|
||||
const lonDiff = lon - prevLon
|
||||
|
||||
// If longitude jumps more than 180°, we crossed the IDL
|
||||
if (lonDiff > 180) {
|
||||
// Crossed from east to west (e.g., 170° to -170°)
|
||||
// Subtract 360° to make it continuous
|
||||
offset -= 360
|
||||
lon -= 360
|
||||
} else if (lonDiff < -180) {
|
||||
// Crossed from west to east (e.g., -170° to 170°)
|
||||
// Add 360° to make it continuous
|
||||
offset += 360
|
||||
lon += 360
|
||||
}
|
||||
}
|
||||
|
||||
coordinates.push([lon, point.latitude])
|
||||
}
|
||||
|
||||
return coordinates
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate total distance for a segment
|
||||
* @param {Array} segment - Array of points
|
||||
* @returns {number} Total distance in kilometers
|
||||
*/
|
||||
static calculateSegmentDistance(segment) {
|
||||
let totalDistance = 0
|
||||
for (let i = 0; i < segment.length - 1; i++) {
|
||||
totalDistance += this.haversineDistance(
|
||||
segment[i].latitude, segment[i].longitude,
|
||||
segment[i + 1].latitude, segment[i + 1].longitude
|
||||
)
|
||||
}
|
||||
return totalDistance
|
||||
}
|
||||
|
||||
/**
|
||||
* Split points into segments based on distance and time gaps
|
||||
* @param {Array} points - Sorted array of points
|
||||
* @param {Object} options - Splitting options
|
||||
* @param {number} options.distanceThresholdKm - Distance threshold in km
|
||||
* @param {number} options.timeThresholdMinutes - Time threshold in minutes
|
||||
* @returns {Array} Array of segments
|
||||
*/
|
||||
static splitIntoSegments(points, options) {
|
||||
const { distanceThresholdKm, timeThresholdMinutes } = options
|
||||
|
||||
const segments = []
|
||||
let currentSegment = [points[0]]
|
||||
|
||||
for (let i = 1; i < points.length; i++) {
|
||||
const prev = points[i - 1]
|
||||
const curr = points[i]
|
||||
|
||||
// Calculate distance between consecutive points
|
||||
const distance = this.haversineDistance(
|
||||
prev.latitude, prev.longitude,
|
||||
curr.latitude, curr.longitude
|
||||
)
|
||||
|
||||
// Calculate time difference in minutes
|
||||
const timeDiff = (curr.timestamp - prev.timestamp) / 60
|
||||
|
||||
// Split if any threshold is exceeded
|
||||
if (distance > distanceThresholdKm || timeDiff > timeThresholdMinutes) {
|
||||
if (currentSegment.length > 1) {
|
||||
segments.push(currentSegment)
|
||||
}
|
||||
currentSegment = [curr]
|
||||
} else {
|
||||
currentSegment.push(curr)
|
||||
}
|
||||
}
|
||||
|
||||
if (currentSegment.length > 1) {
|
||||
segments.push(currentSegment)
|
||||
}
|
||||
|
||||
return segments
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a segment to a GeoJSON LineString feature
|
||||
* @param {Array} segment - Array of points
|
||||
* @returns {Object} GeoJSON Feature
|
||||
*/
|
||||
static segmentToFeature(segment) {
|
||||
const coordinates = this.unwrapCoordinates(segment)
|
||||
const totalDistance = this.calculateSegmentDistance(segment)
|
||||
|
||||
return {
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'LineString',
|
||||
coordinates
|
||||
},
|
||||
properties: {
|
||||
pointCount: segment.length,
|
||||
startTime: segment[0].timestamp,
|
||||
endTime: segment[segment.length - 1].timestamp,
|
||||
distance: totalDistance
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert points to route LineStrings with splitting
|
||||
* Matches V1's route splitting logic for consistency
|
||||
* Also handles International Date Line (IDL) crossings
|
||||
* @param {Array} points - Points from API
|
||||
* @param {Object} options - Splitting options
|
||||
* @param {number} options.distanceThresholdMeters - Distance threshold in meters (note: unit mismatch preserved for V1 compat)
|
||||
* @param {number} options.timeThresholdMinutes - Time threshold in minutes
|
||||
* @returns {Object} GeoJSON FeatureCollection
|
||||
*/
|
||||
static pointsToRoutes(points, options = {}) {
|
||||
if (points.length < 2) {
|
||||
return { type: 'FeatureCollection', features: [] }
|
||||
}
|
||||
|
||||
// Default thresholds (matching V1 defaults from polylines.js)
|
||||
// Note: V1 has a unit mismatch bug where it compares km to meters directly
|
||||
// We replicate this behavior for consistency with V1
|
||||
const distanceThresholdKm = options.distanceThresholdMeters || 500
|
||||
const timeThresholdMinutes = options.timeThresholdMinutes || 60
|
||||
|
||||
// Sort by timestamp
|
||||
const sorted = points.slice().sort((a, b) => a.timestamp - b.timestamp)
|
||||
|
||||
// Split into segments based on distance and time gaps
|
||||
const segments = this.splitIntoSegments(sorted, {
|
||||
distanceThresholdKm,
|
||||
timeThresholdMinutes
|
||||
})
|
||||
|
||||
// Convert segments to LineStrings
|
||||
const features = segments.map(segment => this.segmentToFeature(segment))
|
||||
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -12,7 +12,6 @@ module Points
|
|||
gz = Zlib::GzipWriter.new(io)
|
||||
written_count = 0
|
||||
|
||||
# Stream points to avoid memory issues with large months
|
||||
@points.select(:id, :raw_data).find_each(batch_size: 1000) do |point|
|
||||
# Write as JSONL (one JSON object per line)
|
||||
gz.puts({ id: point.id, raw_data: point.raw_data }.to_json)
|
||||
|
|
@ -22,7 +21,6 @@ module Points
|
|||
gz.close
|
||||
compressed_data = io.string.force_encoding(Encoding::ASCII_8BIT)
|
||||
|
||||
# Return both compressed data and count for validation
|
||||
{ data: compressed_data, count: written_count }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ module Points
|
|||
def clear_specific_archive(archive_id)
|
||||
archive = Points::RawDataArchive.find(archive_id)
|
||||
|
||||
unless archive.verified_at.present?
|
||||
if archive.verified_at.blank?
|
||||
Rails.logger.warn("Archive #{archive_id} not verified, skipping clear")
|
||||
return { cleared: 0, skipped: 0 }
|
||||
end
|
||||
|
|
@ -33,7 +33,7 @@ module Points
|
|||
|
||||
def clear_month(user_id, year, month)
|
||||
archives = Points::RawDataArchive.for_month(user_id, year, month)
|
||||
.where.not(verified_at: nil)
|
||||
.where.not(verified_at: nil)
|
||||
|
||||
Rails.logger.info("Clearing #{archives.count} verified archives for #{year}-#{format('%02d', month)}...")
|
||||
|
||||
|
|
@ -75,13 +75,11 @@ module Points
|
|||
@stats[:cleared] += cleared_count
|
||||
Rails.logger.info("✓ Cleared #{cleared_count} points for archive #{archive.id}")
|
||||
|
||||
# Report successful clear operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'clear',
|
||||
status: 'success'
|
||||
).call
|
||||
|
||||
# Report points removed (cleared from database)
|
||||
Metrics::Archives::PointsArchived.new(
|
||||
count: cleared_count,
|
||||
operation: 'removed'
|
||||
|
|
@ -90,7 +88,6 @@ module Points
|
|||
ExceptionReporter.call(e, "Failed to clear points for archive #{archive.id}")
|
||||
Rails.logger.error("✗ Failed to clear archive #{archive.id}: #{e.message}")
|
||||
|
||||
# Report failed clear operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'clear',
|
||||
status: 'failure'
|
||||
|
|
|
|||
|
|
@ -18,19 +18,16 @@ module Points
|
|||
|
||||
Rails.logger.info("✓ Restored #{total_points} points")
|
||||
|
||||
# Report successful restore operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'restore',
|
||||
status: 'success'
|
||||
).call
|
||||
|
||||
# Report points restored (removed from archived state)
|
||||
Metrics::Archives::PointsArchived.new(
|
||||
count: total_points,
|
||||
operation: 'removed'
|
||||
).call
|
||||
rescue StandardError => e
|
||||
# Report failed restore operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'restore',
|
||||
status: 'failure'
|
||||
|
|
@ -109,10 +106,8 @@ module Points
|
|||
end
|
||||
|
||||
def download_and_decompress(archive)
|
||||
# Download via ActiveStorage
|
||||
compressed_content = archive.file.blob.download
|
||||
|
||||
# Decompress
|
||||
io = StringIO.new(compressed_content)
|
||||
gz = Zlib::GzipReader.new(io)
|
||||
content = gz.read
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ module Points
|
|||
|
||||
def verify_month(user_id, year, month)
|
||||
archives = Points::RawDataArchive.for_month(user_id, year, month)
|
||||
.where(verified_at: nil)
|
||||
.where(verified_at: nil)
|
||||
|
||||
Rails.logger.info("Verifying #{archives.count} archives for #{year}-#{format('%02d', month)}...")
|
||||
|
||||
|
|
@ -49,13 +49,11 @@ module Points
|
|||
@stats[:verified] += 1
|
||||
Rails.logger.info("✓ Archive #{archive.id} verified successfully")
|
||||
|
||||
# Report successful verification operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'verify',
|
||||
status: 'success'
|
||||
).call
|
||||
|
||||
# Report verification duration
|
||||
report_verification_metric(start_time, 'success')
|
||||
else
|
||||
@stats[:failed] += 1
|
||||
|
|
@ -65,13 +63,11 @@ module Points
|
|||
"Archive verification failed for archive #{archive.id}"
|
||||
)
|
||||
|
||||
# Report failed verification operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'verify',
|
||||
status: 'failure'
|
||||
).call
|
||||
|
||||
# Report verification duration with check name
|
||||
check_name = extract_check_name_from_error(verification_result[:error])
|
||||
report_verification_metric(start_time, 'failure', check_name)
|
||||
end
|
||||
|
|
@ -80,43 +76,30 @@ module Points
|
|||
ExceptionReporter.call(e, "Failed to verify archive #{archive.id}")
|
||||
Rails.logger.error("✗ Archive #{archive.id} verification error: #{e.message}")
|
||||
|
||||
# Report failed verification operation
|
||||
Metrics::Archives::Operation.new(
|
||||
operation: 'verify',
|
||||
status: 'failure'
|
||||
).call
|
||||
|
||||
# Report verification duration
|
||||
report_verification_metric(start_time, 'failure', 'exception')
|
||||
end
|
||||
|
||||
def perform_verification(archive)
|
||||
# 1. Verify file exists and is attached
|
||||
unless archive.file.attached?
|
||||
return { success: false, error: 'File not attached' }
|
||||
end
|
||||
return { success: false, error: 'File not attached' } unless archive.file.attached?
|
||||
|
||||
# 2. Verify file can be downloaded
|
||||
begin
|
||||
compressed_content = archive.file.blob.download
|
||||
rescue StandardError => e
|
||||
return { success: false, error: "File download failed: #{e.message}" }
|
||||
end
|
||||
|
||||
# 3. Verify file size is reasonable
|
||||
if compressed_content.bytesize.zero?
|
||||
return { success: false, error: 'File is empty' }
|
||||
end
|
||||
return { success: false, error: 'File is empty' } if compressed_content.bytesize.zero?
|
||||
|
||||
# 4. Verify MD5 checksum (if blob has checksum)
|
||||
if archive.file.blob.checksum.present?
|
||||
calculated_checksum = Digest::MD5.base64digest(compressed_content)
|
||||
if calculated_checksum != archive.file.blob.checksum
|
||||
return { success: false, error: 'MD5 checksum mismatch' }
|
||||
end
|
||||
return { success: false, error: 'MD5 checksum mismatch' } if calculated_checksum != archive.file.blob.checksum
|
||||
end
|
||||
|
||||
# 5. Verify file can be decompressed and is valid JSONL, extract data
|
||||
begin
|
||||
archived_data = decompress_and_extract_data(compressed_content)
|
||||
rescue StandardError => e
|
||||
|
|
@ -125,7 +108,6 @@ module Points
|
|||
|
||||
point_ids = archived_data.keys
|
||||
|
||||
# 6. Verify point count matches
|
||||
if point_ids.count != archive.point_count
|
||||
return {
|
||||
success: false,
|
||||
|
|
@ -133,13 +115,11 @@ module Points
|
|||
}
|
||||
end
|
||||
|
||||
# 7. Verify point IDs checksum matches
|
||||
calculated_checksum = calculate_checksum(point_ids)
|
||||
if calculated_checksum != archive.point_ids_checksum
|
||||
return { success: false, error: 'Point IDs checksum mismatch' }
|
||||
end
|
||||
|
||||
# 8. Check which points still exist in database (informational only)
|
||||
existing_count = Point.where(id: point_ids).count
|
||||
if existing_count != point_ids.count
|
||||
Rails.logger.info(
|
||||
|
|
@ -148,7 +128,6 @@ module Points
|
|||
)
|
||||
end
|
||||
|
||||
# 9. Verify archived raw_data matches current database raw_data (only for existing points)
|
||||
if existing_count.positive?
|
||||
verification_result = verify_raw_data_matches(archived_data)
|
||||
return verification_result unless verification_result[:success]
|
||||
|
|
@ -178,18 +157,17 @@ module Points
|
|||
def verify_raw_data_matches(archived_data)
|
||||
# For small archives, verify all points. For large archives, sample up to 100 points.
|
||||
# Always verify all if 100 or fewer points for maximum accuracy
|
||||
if archived_data.size <= 100
|
||||
point_ids_to_check = archived_data.keys
|
||||
else
|
||||
point_ids_to_check = archived_data.keys.sample(100)
|
||||
end
|
||||
point_ids_to_check = if archived_data.size <= 100
|
||||
archived_data.keys
|
||||
else
|
||||
archived_data.keys.sample(100)
|
||||
end
|
||||
|
||||
# Filter to only check points that still exist in the database
|
||||
existing_point_ids = Point.where(id: point_ids_to_check).pluck(:id)
|
||||
|
||||
|
||||
if existing_point_ids.empty?
|
||||
# No points remain to verify, but that's OK
|
||||
Rails.logger.info("No points remaining to verify raw_data matches")
|
||||
Rails.logger.info('No points remaining to verify raw_data matches')
|
||||
return { success: true }
|
||||
end
|
||||
|
||||
|
|
@ -244,7 +222,7 @@ module Points
|
|||
'empty_file'
|
||||
when /MD5 checksum mismatch/i
|
||||
'md5_checksum_mismatch'
|
||||
when /Decompression\/parsing failed/i
|
||||
when %r{Decompression/parsing failed}i
|
||||
'decompression_failed'
|
||||
when /Point count mismatch/i
|
||||
'count_mismatch'
|
||||
|
|
|
|||
|
|
@ -65,7 +65,6 @@ class Stats::CalculateMonth
|
|||
end
|
||||
|
||||
def toponyms
|
||||
# Reuse already-loaded points instead of making a duplicate query
|
||||
CountriesAndCities.new(points).call
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -275,3 +275,96 @@ export async function getRoutesSourceData(page) {
|
|||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for settings panel to be visible
|
||||
* @param {Page} page - Playwright page object
|
||||
* @param {number} timeout - Timeout in milliseconds (default: 5000)
|
||||
*/
|
||||
export async function waitForSettingsPanel(page, timeout = 5000) {
|
||||
await page.waitForSelector('[data-maps--maplibre-target="settingsPanel"]', {
|
||||
state: 'visible',
|
||||
timeout
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a specific tab to be active in settings panel
|
||||
* @param {Page} page - Playwright page object
|
||||
* @param {string} tabName - Tab name (e.g., 'layers', 'settings')
|
||||
* @param {number} timeout - Timeout in milliseconds (default: 5000)
|
||||
*/
|
||||
export async function waitForActiveTab(page, tabName, timeout = 5000) {
|
||||
await page.waitForFunction(
|
||||
(name) => {
|
||||
const tab = document.querySelector(`button[data-tab="${name}"]`);
|
||||
return tab?.getAttribute('aria-selected') === 'true';
|
||||
},
|
||||
tabName,
|
||||
{ timeout }
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Open settings panel and switch to a specific tab
|
||||
* @param {Page} page - Playwright page object
|
||||
* @param {string} tabName - Tab name (e.g., 'layers', 'settings')
|
||||
*/
|
||||
export async function openSettingsTab(page, tabName) {
|
||||
// Open settings panel
|
||||
const settingsButton = page.locator('[data-action="click->maps--maplibre#toggleSettings"]').first();
|
||||
await settingsButton.click();
|
||||
await waitForSettingsPanel(page);
|
||||
|
||||
// Click the desired tab
|
||||
const tabButton = page.locator(`button[data-tab="${tabName}"]`);
|
||||
await tabButton.click();
|
||||
await waitForActiveTab(page, tabName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a layer to exist on the map
|
||||
* @param {Page} page - Playwright page object
|
||||
* @param {string} layerId - Layer ID to wait for
|
||||
* @param {number} timeout - Timeout in milliseconds (default: 10000)
|
||||
*/
|
||||
export async function waitForLayer(page, layerId, timeout = 10000) {
|
||||
await page.waitForFunction(
|
||||
(id) => {
|
||||
const element = document.querySelector('[data-controller*="maps--maplibre"]');
|
||||
if (!element) return false;
|
||||
const app = window.Stimulus || window.Application;
|
||||
if (!app) return false;
|
||||
const controller = app.getControllerForElementAndIdentifier(element, 'maps--maplibre');
|
||||
return controller?.map?.getLayer(id) !== undefined;
|
||||
},
|
||||
layerId,
|
||||
{ timeout }
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for layer visibility to change
|
||||
* @param {Page} page - Playwright page object
|
||||
* @param {string} layerId - Layer ID
|
||||
* @param {boolean} expectedVisibility - Expected visibility state (true for visible, false for hidden)
|
||||
* @param {number} timeout - Timeout in milliseconds (default: 5000)
|
||||
*/
|
||||
export async function waitForLayerVisibility(page, layerId, expectedVisibility, timeout = 5000) {
|
||||
await page.waitForFunction(
|
||||
({ id, visible }) => {
|
||||
const element = document.querySelector('[data-controller*="maps--maplibre"]');
|
||||
if (!element) return false;
|
||||
const app = window.Stimulus || window.Application;
|
||||
if (!app) return false;
|
||||
const controller = app.getControllerForElementAndIdentifier(element, 'maps--maplibre');
|
||||
if (!controller?.map) return false;
|
||||
|
||||
const visibility = controller.map.getLayoutProperty(id, 'visibility');
|
||||
const isVisible = visibility === 'visible' || visibility === undefined;
|
||||
return isVisible === visible;
|
||||
},
|
||||
{ id: layerId, visible: expectedVisibility },
|
||||
{ timeout }
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -231,19 +231,13 @@ test.describe('Points Layer', () => {
|
|||
routesSource?._data?.features?.length > 0
|
||||
}, { timeout: 15000 })
|
||||
|
||||
// Ensure points layer is visible
|
||||
await page.evaluate(() => {
|
||||
const element = document.querySelector('[data-controller*="maps--maplibre"]')
|
||||
const app = window.Stimulus || window.Application
|
||||
const controller = app.getControllerForElementAndIdentifier(element, 'maps--maplibre')
|
||||
const pointsLayer = controller?.layerManager?.layers?.pointsLayer
|
||||
if (pointsLayer) {
|
||||
const visibility = controller.map.getLayoutProperty('points', 'visibility')
|
||||
if (visibility === 'none') {
|
||||
pointsLayer.show()
|
||||
}
|
||||
}
|
||||
})
|
||||
// Ensure points layer is visible by clicking the checkbox
|
||||
const pointsCheckbox = page.locator('[data-maps--maplibre-target="pointsToggle"]')
|
||||
const isChecked = await pointsCheckbox.isChecked()
|
||||
if (!isChecked) {
|
||||
await pointsCheckbox.click()
|
||||
await page.waitForTimeout(500)
|
||||
}
|
||||
|
||||
await page.waitForTimeout(2000)
|
||||
|
||||
|
|
@ -363,19 +357,13 @@ test.describe('Points Layer', () => {
|
|||
return source?._data?.features?.length > 0
|
||||
}, { timeout: 15000 })
|
||||
|
||||
// Ensure points layer is visible
|
||||
await page.evaluate(() => {
|
||||
const element = document.querySelector('[data-controller*="maps--maplibre"]')
|
||||
const app = window.Stimulus || window.Application
|
||||
const controller = app.getControllerForElementAndIdentifier(element, 'maps--maplibre')
|
||||
const pointsLayer = controller?.layerManager?.layers?.pointsLayer
|
||||
if (pointsLayer) {
|
||||
const visibility = controller.map.getLayoutProperty('points', 'visibility')
|
||||
if (visibility === 'none') {
|
||||
pointsLayer.show()
|
||||
}
|
||||
}
|
||||
})
|
||||
// Ensure points layer is visible by clicking the checkbox
|
||||
const pointsCheckbox = page.locator('[data-maps--maplibre-target="pointsToggle"]')
|
||||
const isChecked = await pointsCheckbox.isChecked()
|
||||
if (!isChecked) {
|
||||
await pointsCheckbox.click()
|
||||
await page.waitForTimeout(500)
|
||||
}
|
||||
|
||||
await page.waitForTimeout(2000)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue