Implement Google Maps records parser

This commit is contained in:
Eugene Burmakin 2024-05-18 15:00:44 +02:00
parent 05df65a6f4
commit 494eb2c0ec
7 changed files with 82 additions and 9 deletions

View file

@ -1,13 +1,14 @@
# frozen_string_literal: true
class ImportsController < ApplicationController
before_action :authenticate_user!
before_action :set_import, only: %i[ show destroy ]
before_action :set_import, only: %i[show destroy]
def index
@imports = current_user.imports
end
def show
end
def show; end
def new
@import = Import.new
@ -39,7 +40,7 @@ class ImportsController < ApplicationController
def destroy
@import.destroy!
redirect_to imports_url, notice: "Import was successfully destroyed.", status: :see_other
redirect_to imports_url, notice: 'Import was successfully destroyed.', status: :see_other
end
private

View file

@ -20,7 +20,8 @@ class ImportJob < ApplicationJob
def parser(source)
case source
when 'google' then GoogleMaps::TimelineParser
when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser
when 'google_records' then GoogleMaps::RecordsParser
when 'owntracks' then OwnTracks::ExportParser
end
end

View file

@ -6,5 +6,5 @@ class Import < ApplicationRecord
include ImportUploader::Attachment(:raw)
enum source: { google: 0, owntracks: 1 }
enum source: { google_semantic_history: 0, owntracks: 1, google_records: 2 }
end

View file

@ -0,0 +1,49 @@
# frozen_string_literal: true
class GoogleMaps::RecordsParser
attr_reader :import
def initialize(import)
@import = import
end
def call
points_data = parse_json
points = 0
points_data.each do |point_data|
next if Point.exists?(timestamp: point_data[:timestamp])
Point.create(
latitude: point_data[:latitude],
longitude: point_data[:longitude],
timestamp: point_data[:timestamp],
raw_data: point_data[:raw_data],
topic: 'Google Maps Timeline Export',
tracker_id: 'google-maps-timeline-export',
import_id: import.id
)
points += 1
end
doubles = points_data.size - points
processed = points + doubles
{ raw_points: points_data.size, points:, doubles:, processed: }
end
private
def parse_json
import.raw_data['locations'].map do |record|
{
latitude: record['latitudeE7'].to_f / 10**7,
longitude: record['longitudeE7'].to_f / 10**7,
timestamp: DateTime.parse(record['timestamp']).to_i,
raw_data: record
}
end.reject(&:blank?)
end
end

View file

@ -1,6 +1,6 @@
# frozen_string_literal: true
class GoogleMaps::TimelineParser
class GoogleMaps::SemanticHistoryParser
attr_reader :import
def initialize(import)

22
spec/fixtures/files/google/records.json vendored Normal file
View file

@ -0,0 +1,22 @@
{
"locations": [{
"latitudeE7": 533690550,
"longitudeE7": 836950010,
"accuracy": 150,
"source": "UNKNOWN",
"timestamp": "2012-12-15T14:21:29.460Z"
}, {
"latitudeE7": 533563380,
"longitudeE7": 837616500,
"accuracy": 18000,
"source": "UNKNOWN",
"timestamp": "2013-01-04T10:22:43.225Z"
}, {
"latitudeE7": 533690589,
"longitudeE7": 836951347,
"accuracy": 22,
"source": "WIFI",
"deviceTag": 1184882232,
"timestamp": "2013-03-01T05:17:39.849Z"
}]
}

View file

@ -71,7 +71,7 @@ paths:
summary: Creates a batch of points
value:
locations:
- type: FeatureCollection
- type: Feature
geometry:
type: Point
coordinates:
@ -172,7 +172,7 @@ paths:
lat: 52.502397
lon: 13.356718
tid: Swagger
tst: 1716033410
tst: 1716036830
servers:
- url: http://{defaultHost}
variables: