Update Records.json importing script

This commit is contained in:
Eugene Burmakin 2024-06-10 19:08:27 +02:00
parent 0057bd7a4d
commit 6ed9a27040
8 changed files with 30 additions and 42 deletions

View file

@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/) The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/). and this project adheres to [Semantic Versioning](http://semver.org/).
## [0.5.3] — 2024-06-10
### Fixed
- Reworked code responsible for importing "Records.json" file from Google Takeout. Now it is more reliable and faster, and should not throw as many errors as before.
---
## [0.5.2] — 2024-06-08 ## [0.5.2] — 2024-06-08
### Added ### Added

View file

@ -1,5 +1,13 @@
# Dawarich # Dawarich
## Screenshots
![Map](screenshots/map.jpeg)
![Stats](screenshots/stats.jpeg)
![Import](screenshots/imports.jpeg)
[Discord](https://discord.gg/pHsBjpt5J8) | [![ko-fi](https://ko-fi.com/img/githubbutton_sm.svg)](https://ko-fi.com/H2H3IDYDD) [Discord](https://discord.gg/pHsBjpt5J8) | [![ko-fi](https://ko-fi.com/img/githubbutton_sm.svg)](https://ko-fi.com/H2H3IDYDD)
Dawarich is a self-hosted web application to replace Google Timeline (aka Google Location History). It allows you to import your location history from Google Maps Timeline and Owntracks, view it on a map and see some statistics, such as the number of countries and cities visited, and distance traveled. Dawarich is a self-hosted web application to replace Google Timeline (aka Google Location History). It allows you to import your location history from Google Maps Timeline and Owntracks, view it on a map and see some statistics, such as the number of countries and cities visited, and distance traveled.
@ -75,14 +83,6 @@ Copy the contents of the `docker-compose.yml` file to your server and run `docke
| BACKGROUND_PROCESSING_CONCURRENCY (only for dawarich_sidekiq service) | Number of simultaneously processed background jobs, default is 10 | | BACKGROUND_PROCESSING_CONCURRENCY (only for dawarich_sidekiq service) | Number of simultaneously processed background jobs, default is 10 |
## Screenshots
![Map](screenshots/map.jpeg)
![Stats](screenshots/stats.jpeg)
![Import](screenshots/imports.jpeg)
## Star History ## Star History
As you could probably guess, I like statistics. As you could probably guess, I like statistics.

File diff suppressed because one or more lines are too long

View file

@ -1,6 +1,5 @@
# frozen_string_literal: true # frozen_string_literal: true
require 'redis_client'
class GoogleMaps::RecordsParser class GoogleMaps::RecordsParser
attr_reader :import attr_reader :import
@ -11,6 +10,8 @@ class GoogleMaps::RecordsParser
def call(json) def call(json)
data = parse_json(json) data = parse_json(json)
return if Point.exists?(latitude: data[:latitude], longitude: data[:longitude], timestamp: data[:timestamp])
Point.create( Point.create(
latitude: data[:latitude], latitude: data[:latitude],
longitude: data[:longitude], longitude: data[:longitude],
@ -30,6 +31,8 @@ class GoogleMaps::RecordsParser
latitude: json['latitudeE7'].to_f / 10**7, latitude: json['latitudeE7'].to_f / 10**7,
longitude: json['longitudeE7'].to_f / 10**7, longitude: json['longitudeE7'].to_f / 10**7,
timestamp: DateTime.parse(json['timestamp']).to_i, timestamp: DateTime.parse(json['timestamp']).to_i,
altitude: json['altitude'],
velocity: json['velocity'],
raw_data: json raw_data: json
} }
end end

View file

@ -1,26 +0,0 @@
# frozen_string_literal: true
require 'oj'
class StreamHandler < Oj::ScHandler
attr_reader :import_id
def initialize(import_id)
@import_id = import_id
@buffer = {}
end
def hash_start
{}
end
def hash_end
ImportGoogleTakeoutJob.perform_later(import_id, @buffer.to_json)
@buffer = {}
end
def hash_set(_buffer, key, value)
@buffer[key] = value
end
end

View file

@ -1,3 +1,5 @@
# frozen_string_literal: true
Sidekiq.configure_server do |config| Sidekiq.configure_server do |config|
config.redis = { url: ENV['REDIS_URL'] } config.redis = { url: ENV['REDIS_URL'] }
end end

View file

@ -1,5 +1,5 @@
--- ---
:concurrency: <%= ENV.fetch("BACKGROUND_PROCESSING_CONCURRENCY", 10) %> :concurrency: <%= ENV.fetch("BACKGROUND_PROCESSING_CONCURRENCY", 100) %>
:queues: :queues:
- default - default
- imports - imports

View file

@ -11,15 +11,17 @@ namespace :import do
raise 'User not found' unless user raise 'User not found' unless user
import = user.imports.create(name: args[:file_path], source: :google_records) import = user.imports.create(name: args[:file_path], source: :google_records)
import_id = import.id
handler = StreamHandler.new(import.id)
pp "Importing #{args[:file_path]} for #{user.email}, file size is #{File.size(args[:file_path])}... This might take a while, have patience!" pp "Importing #{args[:file_path]} for #{user.email}, file size is #{File.size(args[:file_path])}... This might take a while, have patience!"
File.open(args[:file_path], 'r') do |content| content = File.read(args[:file_path]); nil
Oj.sc_parse(handler, content) data = Oj.load(content); nil
data['locations'].each do |json|
ImportGoogleTakeoutJob.perform_later(import_id, json.to_json)
end end
pp "Imported #{args[:file_path]} for #{user.email} successfully!" pp "Imported #{args[:file_path]} for #{user.email} successfully! Wait for the processing to finish. You can check the status of the import in the Sidekiq UI (http://<your-dawarich-url>/sidekiq)."
end end
end end