mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 01:31:39 -05:00
Rework GeoJSON import to support FeatureCollection as a root object and remove points counter for imports
This commit is contained in:
parent
90bfd13f95
commit
3592f46492
13 changed files with 139 additions and 132 deletions
|
|
@ -1 +1 @@
|
||||||
0.13.1
|
0.13.2
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||||
|
|
||||||
|
## [0.13.2] — 2024-09-06
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- GeoJSON import now correctly imports files with FeatureCollection as a root object
|
||||||
|
|
||||||
|
|
||||||
## [0.13.1] — 2024-09-05
|
## [0.13.1] — 2024-09-05
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
||||||
|
|
@ -7,62 +7,6 @@ class ImportJob < ApplicationJob
|
||||||
user = User.find(user_id)
|
user = User.find(user_id)
|
||||||
import = user.imports.find(import_id)
|
import = user.imports.find(import_id)
|
||||||
|
|
||||||
result = parser(import.source).new(import, user_id).call
|
import.process!
|
||||||
|
|
||||||
import.update(
|
|
||||||
raw_points: result[:raw_points], doubles: result[:doubles], processed: result[:processed]
|
|
||||||
)
|
|
||||||
|
|
||||||
create_import_finished_notification(import, user)
|
|
||||||
|
|
||||||
schedule_stats_creating(user_id)
|
|
||||||
schedule_visit_suggesting(user_id, import)
|
|
||||||
rescue StandardError => e
|
|
||||||
create_import_failed_notification(import, user, e)
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def parser(source)
|
|
||||||
# Bad classes naming by the way, they are not parsers, they are point creators
|
|
||||||
case source
|
|
||||||
when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser
|
|
||||||
when 'google_records' then GoogleMaps::RecordsParser
|
|
||||||
when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutParser
|
|
||||||
when 'owntracks' then OwnTracks::ExportParser
|
|
||||||
when 'gpx' then Gpx::TrackParser
|
|
||||||
when 'immich_api' then Immich::ImportParser
|
|
||||||
when 'geojson' then Geojson::ImportParser
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def schedule_stats_creating(user_id)
|
|
||||||
StatCreatingJob.perform_later(user_id)
|
|
||||||
end
|
|
||||||
|
|
||||||
def schedule_visit_suggesting(user_id, import)
|
|
||||||
points = import.points.order(:timestamp)
|
|
||||||
start_at = Time.zone.at(points.first.timestamp)
|
|
||||||
end_at = Time.zone.at(points.last.timestamp)
|
|
||||||
|
|
||||||
VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_import_finished_notification(import, user)
|
|
||||||
Notifications::Create.new(
|
|
||||||
user:,
|
|
||||||
kind: :info,
|
|
||||||
title: 'Import finished',
|
|
||||||
content: "Import \"#{import.name}\" successfully finished."
|
|
||||||
).call
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_import_failed_notification(import, user, error)
|
|
||||||
Notifications::Create.new(
|
|
||||||
user:,
|
|
||||||
kind: :error,
|
|
||||||
title: 'Import failed',
|
|
||||||
content: "Import \"#{import.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"
|
|
||||||
).call
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -12,4 +12,8 @@ class Import < ApplicationRecord
|
||||||
google_semantic_history: 0, owntracks: 1, google_records: 2,
|
google_semantic_history: 0, owntracks: 1, google_records: 2,
|
||||||
google_phone_takeout: 3, gpx: 4, immich_api: 5, geojson: 6
|
google_phone_takeout: 3, gpx: 4, immich_api: 5, geojson: 6
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def process!
|
||||||
|
Imports::Create.new(user, self).call
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -11,36 +11,62 @@ class Geojson::Params
|
||||||
case json['type']
|
case json['type']
|
||||||
when 'Feature' then process_feature(json)
|
when 'Feature' then process_feature(json)
|
||||||
when 'FeatureCollection' then process_feature_collection(json)
|
when 'FeatureCollection' then process_feature_collection(json)
|
||||||
end
|
end.flatten
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def process_feature(json)
|
def process_feature(json)
|
||||||
json['features'].map do |point|
|
case json[:geometry][:type]
|
||||||
next if point[:geometry].nil? || point.dig(:properties, :timestamp).nil?
|
when 'Point'
|
||||||
|
build_point(json)
|
||||||
build_point(point)
|
when 'LineString'
|
||||||
end.compact
|
build_line(json)
|
||||||
|
when 'MultiLineString'
|
||||||
|
build_multi_line(json)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def process_feature_collection(json)
|
def process_feature_collection(json)
|
||||||
json['features'].map { |feature| process_feature(feature) }
|
json['features'].map { |feature| process_feature(feature) }
|
||||||
end
|
end
|
||||||
|
|
||||||
def build_point(point)
|
def build_point(feature)
|
||||||
{
|
{
|
||||||
latitude: point[:geometry][:coordinates][1],
|
latitude: feature[:geometry][:coordinates][1],
|
||||||
longitude: point[:geometry][:coordinates][0],
|
longitude: feature[:geometry][:coordinates][0],
|
||||||
battery_status: point[:properties][:battery_state],
|
battery_status: feature[:properties][:battery_state],
|
||||||
battery: battery_level(point[:properties][:battery_level]),
|
battery: battery_level(feature[:properties][:battery_level]),
|
||||||
|
timestamp: timestamp(feature),
|
||||||
|
altitude: altitude(feature),
|
||||||
|
velocity: feature[:properties][:speed],
|
||||||
|
tracker_id: feature[:properties][:device_id],
|
||||||
|
ssid: feature[:properties][:wifi],
|
||||||
|
accuracy: feature[:properties][:horizontal_accuracy],
|
||||||
|
vertical_accuracy: feature[:properties][:vertical_accuracy],
|
||||||
|
raw_data: feature
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def build_line(feature)
|
||||||
|
feature[:geometry][:coordinates].map do |point|
|
||||||
|
build_line_point(feature, point)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def build_multi_line(feature)
|
||||||
|
feature[:geometry][:coordinates].map do |line|
|
||||||
|
line.map do |point|
|
||||||
|
build_line_point(feature, point)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def build_line_point(feature, point)
|
||||||
|
{
|
||||||
|
latitude: point[1],
|
||||||
|
longitude: point[0],
|
||||||
timestamp: timestamp(point),
|
timestamp: timestamp(point),
|
||||||
altitude: altitude(point),
|
|
||||||
velocity: point[:properties][:speed],
|
|
||||||
tracker_id: point[:properties][:device_id],
|
|
||||||
ssid: point[:properties][:wifi],
|
|
||||||
accuracy: point[:properties][:horizontal_accuracy],
|
|
||||||
vertical_accuracy: point[:properties][:vertical_accuracy],
|
|
||||||
raw_data: point
|
raw_data: point
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
@ -51,13 +77,14 @@ class Geojson::Params
|
||||||
value.positive? ? value : nil
|
value.positive? ? value : nil
|
||||||
end
|
end
|
||||||
|
|
||||||
def altitude(point)
|
def altitude(feature)
|
||||||
point.dig(:properties, :altitude) || point.dig(:geometry, :coordinates, 2)
|
feature.dig(:properties, :altitude) || feature.dig(:geometry, :coordinates, 2)
|
||||||
end
|
end
|
||||||
|
|
||||||
def timestamp(point)
|
def timestamp(feature)
|
||||||
value = point.dig(:properties, :timestamp) || point.dig(:geometry, :coordinates, 3)
|
return Time.zone.at(feature[3]) if feature.is_a?(Array)
|
||||||
|
|
||||||
|
value = feature.dig(:properties, :timestamp) || feature.dig(:geometry, :coordinates, 3)
|
||||||
Time.zone.at(value)
|
Time.zone.at(value)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,6 @@ class GoogleMaps::PhoneTakeoutParser
|
||||||
def call
|
def call
|
||||||
points_data = parse_json
|
points_data = parse_json
|
||||||
|
|
||||||
points = 0
|
|
||||||
|
|
||||||
points_data.compact.each do |point_data|
|
points_data.compact.each do |point_data|
|
||||||
next if Point.exists?(
|
next if Point.exists?(
|
||||||
timestamp: point_data[:timestamp],
|
timestamp: point_data[:timestamp],
|
||||||
|
|
@ -34,14 +32,7 @@ class GoogleMaps::PhoneTakeoutParser
|
||||||
tracker_id: 'google-maps-phone-timeline-export',
|
tracker_id: 'google-maps-phone-timeline-export',
|
||||||
user_id:
|
user_id:
|
||||||
)
|
)
|
||||||
|
|
||||||
points += 1
|
|
||||||
end
|
end
|
||||||
|
|
||||||
doubles = points_data.size - points
|
|
||||||
processed = points + doubles
|
|
||||||
|
|
||||||
{ raw_points: points_data.size, points:, doubles:, processed: }
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
@ -58,7 +49,9 @@ class GoogleMaps::PhoneTakeoutParser
|
||||||
if import.raw_data.is_a?(Array)
|
if import.raw_data.is_a?(Array)
|
||||||
raw_array = parse_raw_array(import.raw_data)
|
raw_array = parse_raw_array(import.raw_data)
|
||||||
else
|
else
|
||||||
semantic_segments = parse_semantic_segments(import.raw_data['semanticSegments']) if import.raw_data['semanticSegments']
|
if import.raw_data['semanticSegments']
|
||||||
|
semantic_segments = parse_semantic_segments(import.raw_data['semanticSegments'])
|
||||||
|
end
|
||||||
raw_signals = parse_raw_signals(import.raw_data['rawSignals']) if import.raw_data['rawSignals']
|
raw_signals = parse_raw_signals(import.raw_data['rawSignals']) if import.raw_data['rawSignals']
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,6 @@ class GoogleMaps::SemanticHistoryParser
|
||||||
def call
|
def call
|
||||||
points_data = parse_json
|
points_data = parse_json
|
||||||
|
|
||||||
points = 0
|
|
||||||
|
|
||||||
points_data.each do |point_data|
|
points_data.each do |point_data|
|
||||||
next if Point.exists?(
|
next if Point.exists?(
|
||||||
timestamp: point_data[:timestamp],
|
timestamp: point_data[:timestamp],
|
||||||
|
|
@ -31,14 +29,7 @@ class GoogleMaps::SemanticHistoryParser
|
||||||
import_id: import.id,
|
import_id: import.id,
|
||||||
user_id:
|
user_id:
|
||||||
)
|
)
|
||||||
|
|
||||||
points += 1
|
|
||||||
end
|
end
|
||||||
|
|
||||||
doubles = points_data.size - points
|
|
||||||
processed = points + doubles
|
|
||||||
|
|
||||||
{ raw_points: points_data.size, points:, doubles:, processed: }
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
||||||
|
|
@ -13,32 +13,23 @@ class Gpx::TrackParser
|
||||||
tracks = json['gpx']['trk']
|
tracks = json['gpx']['trk']
|
||||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||||
|
|
||||||
tracks_arr
|
tracks_arr.map { parse_track(_1) }.flatten
|
||||||
.map { parse_track(_1) }
|
|
||||||
.flatten
|
|
||||||
.reduce { |result, points| result.merge(points) { _2 + _3 } }
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def parse_track(track)
|
def parse_track(track)
|
||||||
segments = track['trkseg']
|
segments = track['trkseg']
|
||||||
segments_arr = segments.is_a?(Array) ? segments : [segments]
|
segments_array = segments.is_a?(Array) ? segments : [segments]
|
||||||
|
|
||||||
segments_arr.map do |segment|
|
segments_array.map do |segment|
|
||||||
trackpoints = segment['trkpt']
|
segment['trkpt'].each { create_point(_1) }
|
||||||
|
|
||||||
points = trackpoints.reduce(0) { _1 + create_point(_2) }
|
|
||||||
doubles = trackpoints.size - points
|
|
||||||
processed = points + doubles
|
|
||||||
|
|
||||||
{ raw_points: trackpoints.size, points:, doubles:, processed: }
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_point(point)
|
def create_point(point)
|
||||||
return 0 if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
|
return if point['lat'].blank? || point['lon'].blank? || point['time'].blank?
|
||||||
return 0 if point_exists?(point)
|
return if point_exists?(point)
|
||||||
|
|
||||||
Point.create(
|
Point.create(
|
||||||
latitude: point['lat'].to_d,
|
latitude: point['lat'].to_d,
|
||||||
|
|
@ -49,8 +40,6 @@ class Gpx::TrackParser
|
||||||
raw_data: point,
|
raw_data: point,
|
||||||
user_id:
|
user_id:
|
||||||
)
|
)
|
||||||
|
|
||||||
1
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def point_exists?(point)
|
def point_exists?(point)
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,6 @@ class Immich::ImportParser
|
||||||
|
|
||||||
def call
|
def call
|
||||||
json.each { |point| create_point(point) }
|
json.each { |point| create_point(point) }
|
||||||
|
|
||||||
{ raw_points: 0, points: 0, doubles: 0, processed: 0 }
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_point(point)
|
def create_point(point)
|
||||||
|
|
|
||||||
66
app/services/imports/create.rb
Normal file
66
app/services/imports/create.rb
Normal file
|
|
@ -0,0 +1,66 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class Imports::Create
|
||||||
|
attr_reader :user, :import
|
||||||
|
|
||||||
|
def initialize(user, import)
|
||||||
|
@user = user
|
||||||
|
@import = import
|
||||||
|
end
|
||||||
|
|
||||||
|
def call
|
||||||
|
parser(import.source).new(import, user.id).call
|
||||||
|
|
||||||
|
create_import_finished_notification(import, user)
|
||||||
|
|
||||||
|
schedule_stats_creating(user.id)
|
||||||
|
schedule_visit_suggesting(user.id, import)
|
||||||
|
rescue StandardError => e
|
||||||
|
create_import_failed_notification(import, user, e)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def parser(source)
|
||||||
|
# Bad classes naming by the way, they are not parsers, they are point creators
|
||||||
|
case source
|
||||||
|
when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser
|
||||||
|
when 'google_records' then GoogleMaps::RecordsParser
|
||||||
|
when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutParser
|
||||||
|
when 'owntracks' then OwnTracks::ExportParser
|
||||||
|
when 'gpx' then Gpx::TrackParser
|
||||||
|
when 'immich_api' then Immich::ImportParser
|
||||||
|
when 'geojson' then Geojson::ImportParser
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def schedule_stats_creating(user_id)
|
||||||
|
StatCreatingJob.perform_later(user_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
def schedule_visit_suggesting(user_id, import)
|
||||||
|
points = import.points.order(:timestamp)
|
||||||
|
start_at = Time.zone.at(points.first.timestamp)
|
||||||
|
end_at = Time.zone.at(points.last.timestamp)
|
||||||
|
|
||||||
|
VisitSuggestingJob.perform_later(user_ids: [user_id], start_at:, end_at:)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_import_finished_notification(import, user)
|
||||||
|
Notifications::Create.new(
|
||||||
|
user:,
|
||||||
|
kind: :info,
|
||||||
|
title: 'Import finished',
|
||||||
|
content: "Import \"#{import.name}\" successfully finished."
|
||||||
|
).call
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_import_failed_notification(import, user, error)
|
||||||
|
Notifications::Create.new(
|
||||||
|
user:,
|
||||||
|
kind: :error,
|
||||||
|
title: 'Import failed',
|
||||||
|
content: "Import \"#{import.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"
|
||||||
|
).call
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -12,8 +12,6 @@ class OwnTracks::ExportParser
|
||||||
def call
|
def call
|
||||||
points_data = parse_json
|
points_data = parse_json
|
||||||
|
|
||||||
points = 0
|
|
||||||
|
|
||||||
points_data.each do |point_data|
|
points_data.each do |point_data|
|
||||||
next if Point.exists?(
|
next if Point.exists?(
|
||||||
timestamp: point_data[:timestamp],
|
timestamp: point_data[:timestamp],
|
||||||
|
|
@ -28,14 +26,7 @@ class OwnTracks::ExportParser
|
||||||
end
|
end
|
||||||
|
|
||||||
point.save
|
point.save
|
||||||
|
|
||||||
points += 1
|
|
||||||
end
|
end
|
||||||
|
|
||||||
doubles = points_data.size - points
|
|
||||||
processed = points + doubles
|
|
||||||
|
|
||||||
{ raw_points: points_data.size, points:, doubles:, processed: }
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,6 @@ RSpec.describe Gpx::TrackParser do
|
||||||
context 'when file has a single segment' do
|
context 'when file has a single segment' do
|
||||||
it 'creates points' do
|
it 'creates points' do
|
||||||
expect { parser }.to change { Point.count }.by(301)
|
expect { parser }.to change { Point.count }.by(301)
|
||||||
expect(parser).to eq({ doubles: 4, points: 301, processed: 305, raw_points: 305 })
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
@ -24,7 +23,6 @@ RSpec.describe Gpx::TrackParser do
|
||||||
|
|
||||||
it 'creates points' do
|
it 'creates points' do
|
||||||
expect { parser }.to change { Point.count }.by(558)
|
expect { parser }.to change { Point.count }.by(558)
|
||||||
expect(parser).to eq({ doubles: 0, points: 558, processed: 558, raw_points: 558 })
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
@ -34,7 +32,6 @@ RSpec.describe Gpx::TrackParser do
|
||||||
|
|
||||||
it 'creates points' do
|
it 'creates points' do
|
||||||
expect { parser }.to change { Point.count }.by(407)
|
expect { parser }.to change { Point.count }.by(407)
|
||||||
expect(parser).to eq({ doubles: 0, points: 407, processed: 407, raw_points: 407 })
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -113,7 +113,7 @@ paths:
|
||||||
- Health
|
- Health
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: areas found
|
description: Healthy
|
||||||
"/api/v1/overland/batches":
|
"/api/v1/overland/batches":
|
||||||
post:
|
post:
|
||||||
summary: Creates a batch of points
|
summary: Creates a batch of points
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue