mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 17:21:38 -05:00
Fix failing tests and move testing imports to files
This commit is contained in:
parent
26c7a4cca3
commit
fc8d0d8ddc
29 changed files with 260 additions and 194 deletions
|
|
@ -10,13 +10,13 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||
|
||||
- Migrate existing imports from `raw_data` to the new file storage.
|
||||
- Stream import files for parsing instead of downloading them.
|
||||
- Add randomized name to the import files before attaching them to the import record.
|
||||
- Export files should also be stored in the S3-compatible storage and be available for download.
|
||||
|
||||
## Changed
|
||||
|
||||
- Import files are now being attached to the import record instead of being stored in the `raw_data` database column.
|
||||
- Import files can now be stored in S3-compatible storage.
|
||||
- Export files are now being attached to the export record instead of being stored in the file system.
|
||||
- Export files can now be stored in S3-compatible storage.
|
||||
|
||||
# 0.25.3 - 2025-03-22
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,13 @@ class ExportsController < ApplicationController
|
|||
def create
|
||||
export_name =
|
||||
"export_from_#{params[:start_at].to_date}_to_#{params[:end_at].to_date}.#{params[:file_format]}"
|
||||
export = current_user.exports.create(name: export_name, status: :created)
|
||||
|
||||
ExportJob.perform_later(export.id, params[:start_at], params[:end_at], file_format: params[:file_format])
|
||||
export = current_user.exports.create(
|
||||
name: export_name,
|
||||
status: :created,
|
||||
format: params[:file_format],
|
||||
start_at: params[:start_at],
|
||||
end_at: params[:end_at]
|
||||
)
|
||||
|
||||
redirect_to exports_url, notice: 'Export was successfully initiated. Please wait until it\'s finished.'
|
||||
rescue StandardError => e
|
||||
|
|
@ -23,11 +27,7 @@ class ExportsController < ApplicationController
|
|||
end
|
||||
|
||||
def destroy
|
||||
ActiveRecord::Base.transaction do
|
||||
@export.destroy
|
||||
|
||||
File.delete(Rails.root.join('public', 'exports', @export.name))
|
||||
end
|
||||
@export.destroy
|
||||
|
||||
redirect_to exports_url, notice: 'Export was successfully destroyed.', status: :see_other
|
||||
end
|
||||
|
|
@ -39,6 +39,6 @@ class ExportsController < ApplicationController
|
|||
end
|
||||
|
||||
def export_params
|
||||
params.require(:export).permit(:name, :url, :status)
|
||||
params.require(:export).permit(:name, :url, :status, :format)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,9 +3,9 @@
|
|||
class ExportJob < ApplicationJob
|
||||
queue_as :exports
|
||||
|
||||
def perform(export_id, start_at, end_at, file_format: :json)
|
||||
def perform(export_id)
|
||||
export = Export.find(export_id)
|
||||
|
||||
Exports::Create.new(export:, start_at:, end_at:, file_format:).call
|
||||
Exports::Create.new(export:).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -4,16 +4,16 @@ class Export < ApplicationRecord
|
|||
belongs_to :user
|
||||
|
||||
enum :status, { created: 0, processing: 1, completed: 2, failed: 3 }
|
||||
enum :format, { json: 0, gpx: 1 }
|
||||
|
||||
validates :name, presence: true
|
||||
|
||||
before_destroy :delete_export_file
|
||||
has_one_attached :file
|
||||
|
||||
private
|
||||
after_commit -> { ExportJob.perform_later(id) }, on: :create
|
||||
after_commit -> { file.purge }, on: :destroy
|
||||
|
||||
def delete_export_file
|
||||
file_path = Rails.root.join('public', 'exports', "#{name}.json")
|
||||
|
||||
File.delete(file_path) if File.exist?(file_path)
|
||||
def process!
|
||||
Exports::Create.new(export: self).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,26 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Exports::Create
|
||||
def initialize(export:, start_at:, end_at:, file_format: :json)
|
||||
def initialize(export:)
|
||||
@export = export
|
||||
@user = export.user
|
||||
@start_at = start_at.to_datetime
|
||||
@end_at = end_at.to_datetime
|
||||
@file_format = file_format
|
||||
@start_at = export.start_at
|
||||
@end_at = export.end_at
|
||||
@file_format = export.format
|
||||
end
|
||||
|
||||
def call
|
||||
export.update!(status: :processing)
|
||||
ActiveRecord::Base.transaction do
|
||||
export.update!(status: :processing)
|
||||
|
||||
points = time_framed_points
|
||||
points = time_framed_points
|
||||
|
||||
data = points_data(points)
|
||||
data = points_data(points)
|
||||
|
||||
create_export_file(data)
|
||||
attach_export_file(data)
|
||||
|
||||
export.update!(status: :completed, url: "exports/#{export.name}")
|
||||
export.update!(status: :completed)
|
||||
|
||||
create_export_finished_notification
|
||||
create_export_finished_notification
|
||||
end
|
||||
rescue StandardError => e
|
||||
create_failed_export_notification(e)
|
||||
|
||||
|
|
@ -72,18 +74,18 @@ class Exports::Create
|
|||
Points::GpxSerializer.new(points, export.name).call
|
||||
end
|
||||
|
||||
def create_export_file(data)
|
||||
dir_path = Rails.root.join('public/exports')
|
||||
|
||||
FileUtils.mkdir_p(dir_path) unless Dir.exist?(dir_path)
|
||||
|
||||
file_path = dir_path.join(export.name)
|
||||
|
||||
Rails.logger.info("Creating export file at: #{file_path}")
|
||||
|
||||
File.open(file_path, 'w') { |file| file.write(data) }
|
||||
def attach_export_file(data)
|
||||
export.file.attach(io: StringIO.new(data.to_s), filename: export.name, content_type:)
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Failed to create export file: #{e.message}")
|
||||
raise
|
||||
end
|
||||
|
||||
def content_type
|
||||
case file_format.to_sym
|
||||
when :json then 'application/json'
|
||||
when :gpx then 'application/gpx+xml'
|
||||
else raise ArgumentError, "Unsupported file format: #{file_format}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,9 +13,7 @@ class GoogleMaps::SemanticHistoryParser
|
|||
end
|
||||
|
||||
def call
|
||||
points_data = parse_json
|
||||
|
||||
points_data.each_slice(BATCH_SIZE) do |batch|
|
||||
parsed_json.each_slice(BATCH_SIZE) do |batch|
|
||||
@current_index += batch.size
|
||||
process_batch(batch)
|
||||
broadcast_import_progress(import, @current_index)
|
||||
|
|
@ -62,14 +60,18 @@ class GoogleMaps::SemanticHistoryParser
|
|||
)
|
||||
end
|
||||
|
||||
def parse_json
|
||||
def parsed_json
|
||||
data = nil
|
||||
|
||||
import.file.download do |f|
|
||||
json = Oj.load(f)
|
||||
|
||||
json['timelineObjects'].flat_map do |timeline_object|
|
||||
data = json['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
end
|
||||
|
||||
data
|
||||
end
|
||||
|
||||
def parse_timeline_object(timeline_object)
|
||||
|
|
|
|||
|
|
@ -41,7 +41,11 @@
|
|||
<td><%= export.status %></td>
|
||||
<td>
|
||||
<% if export.completed? %>
|
||||
<%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
<% if export.url.present? %>
|
||||
<%= link_to 'Download', export.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
<% else %>
|
||||
<%= link_to 'Download', export.file.url, class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
<% end %>
|
||||
<% end %>
|
||||
<%= link_to 'Delete', export, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :delete }, method: :delete, class: "px-4 py-2 bg-red-500 text-white rounded-md" %>
|
||||
</td>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AddFormatStartAtEndAtToExports < ActiveRecord::Migration[8.0]
|
||||
def change
|
||||
add_column :exports, :format, :integer, default: 0
|
||||
add_column :exports, :start_at, :datetime
|
||||
add_column :exports, :end_at, :datetime
|
||||
end
|
||||
end
|
||||
5
db/schema.rb
generated
5
db/schema.rb
generated
|
|
@ -10,7 +10,7 @@
|
|||
#
|
||||
# It's strongly recommended that you check this file into your version control system.
|
||||
|
||||
ActiveRecord::Schema[8.0].define(version: 2025_03_03_194043) do
|
||||
ActiveRecord::Schema[8.0].define(version: 2025_03_24_180755) do
|
||||
# These are extensions that must be enabled in order to support this database
|
||||
enable_extension "pg_catalog.plpgsql"
|
||||
enable_extension "postgis"
|
||||
|
|
@ -74,6 +74,9 @@ ActiveRecord::Schema[8.0].define(version: 2025_03_03_194043) do
|
|||
t.bigint "user_id", null: false
|
||||
t.datetime "created_at", null: false
|
||||
t.datetime "updated_at", null: false
|
||||
t.integer "format", default: 0
|
||||
t.datetime "start_at"
|
||||
t.datetime "end_at"
|
||||
t.index ["status"], name: "index_exports_on_status"
|
||||
t.index ["user_id"], name: "index_exports_on_user_id"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@
|
|||
FactoryBot.define do
|
||||
factory :export do
|
||||
name { 'export' }
|
||||
url { 'exports/export.json' }
|
||||
status { 1 }
|
||||
status { :created }
|
||||
format { :json }
|
||||
user
|
||||
end
|
||||
end
|
||||
|
|
|
|||
10
spec/fixtures/files/google/location-history/with_activitySegment_with_startLocation.json
vendored
Normal file
10
spec/fixtures/files/google/location-history/with_activitySegment_with_startLocation.json
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123422222, "longitudeE7": 123422222 },
|
||||
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123466666, "longitudeE7": 123466666 },
|
||||
"duration": { "startTimestampMs": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123455555, "longitudeE7": 123455555 },
|
||||
"duration": { "startTimestamp": "1742844232" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123444444, "longitudeE7": 123444444 },
|
||||
"duration": { "startTimestamp": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"startLocation": { "latitudeE7": 123433333, "longitudeE7": 123433333 },
|
||||
"duration": { "startTimestamp": "2025-03-24T20:20:23+01:00" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
14
spec/fixtures/files/google/location-history/with_activitySegment_without_startLocation.json
vendored
Normal file
14
spec/fixtures/files/google/location-history/with_activitySegment_without_startLocation.json
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"waypointPath": {
|
||||
"waypoints": [
|
||||
{ "latE7": 123411111, "lngE7": 123411111 }
|
||||
]
|
||||
},
|
||||
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"activitySegment": {
|
||||
"duration": { "startTimestamp": "2025-03-24 20:07:24 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
10
spec/fixtures/files/google/location-history/with_placeVisit_with_location_with_coordinates.json
vendored
Normal file
10
spec/fixtures/files/google/location-history/with_placeVisit_with_location_with_coordinates.json
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123477777, "longitudeE7": 123477777 },
|
||||
"duration": { "startTimestamp": "1742844232" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123488888, "longitudeE7": 123488888 },
|
||||
"duration": { "startTimestamp": "2025-03-24T20:25:02+01:00" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123511111, "longitudeE7": 123511111 },
|
||||
"duration": { "startTimestamp": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123499999, "longitudeE7": 123499999 },
|
||||
"duration": { "startTimestamp": "1742844302" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": { "latitudeE7": 123522222, "longitudeE7": 123522222 },
|
||||
"duration": { "startTimestampMs": "1742844302585" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"location": {},
|
||||
"duration": { "startTimestamp": "2025-03-24 20:25:02 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"timelineObjects": [
|
||||
{
|
||||
"placeVisit": {
|
||||
"otherCandidateLocations": [{ "latitudeE7": 123533333, "longitudeE7": 123533333 }],
|
||||
"duration": { "startTimestamp": "2025-03-24 20:25:02 +0100" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -8,8 +8,8 @@ RSpec.describe ExportJob, type: :job do
|
|||
let(:end_at) { Time.zone.now }
|
||||
|
||||
it 'calls the Exports::Create service class' do
|
||||
expect(Exports::Create).to receive(:new).with(export:, start_at:, end_at:, file_format: :json).and_call_original
|
||||
expect(Exports::Create).to receive(:new).with(export:).and_call_original
|
||||
|
||||
described_class.perform_now(export.id, start_at, end_at)
|
||||
described_class.perform_now(export.id)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,5 +9,6 @@ RSpec.describe Export, type: :model do
|
|||
|
||||
describe 'enums' do
|
||||
it { is_expected.to define_enum_for(:status).with_values(created: 0, processing: 1, completed: 2, failed: 3) }
|
||||
it { is_expected.to define_enum_for(:format).with_values(json: 0, gpx: 1) }
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -76,25 +76,9 @@ RSpec.describe '/exports', type: :request do
|
|||
end
|
||||
|
||||
describe 'DELETE /destroy' do
|
||||
let!(:export) { create(:export, user:, url: 'exports/export.json', name: 'export.json') }
|
||||
let(:export_file) { Rails.root.join('public', 'exports', export.name) }
|
||||
let!(:export) { create(:export, user:, name: 'export.json') }
|
||||
|
||||
before do
|
||||
sign_in user
|
||||
|
||||
FileUtils.mkdir_p(File.dirname(export_file))
|
||||
File.write(export_file, '{"some": "data"}')
|
||||
end
|
||||
|
||||
after { FileUtils.rm_f(export_file) }
|
||||
|
||||
it 'removes the export file from disk' do
|
||||
expect(File.exist?(export_file)).to be true
|
||||
|
||||
delete export_url(export)
|
||||
|
||||
expect(File.exist?(export_file)).to be false
|
||||
end
|
||||
before { sign_in user }
|
||||
|
||||
it 'destroys the requested export' do
|
||||
expect { delete export_url(export) }.to change(Export, :count).by(-1)
|
||||
|
|
|
|||
|
|
@ -4,15 +4,17 @@ require 'rails_helper'
|
|||
|
||||
RSpec.describe Exports::Create do
|
||||
describe '#call' do
|
||||
subject(:create_export) { described_class.new(export:, start_at:, end_at:, file_format:).call }
|
||||
subject(:create_export) { described_class.new(export:).call }
|
||||
|
||||
let(:file_format) { :json }
|
||||
let(:user) { create(:user) }
|
||||
let(:start_at) { DateTime.new(2021, 1, 1).to_s }
|
||||
let(:end_at) { DateTime.new(2021, 1, 2).to_s }
|
||||
let(:export_name) { "#{start_at.to_date}_#{end_at.to_date}.#{file_format}" }
|
||||
let(:export) { create(:export, user:, name: export_name, status: :created) }
|
||||
let(:export_content) { Points::GeojsonSerializer.new(points).call }
|
||||
let(:export) do
|
||||
create(:export, user:, name: export_name, status: :created, format: file_format, start_at:, end_at:)
|
||||
end
|
||||
let(:export_content) { Points::GeojsonSerializer.new(points).call }
|
||||
let(:reverse_geocoded_at) { Time.zone.local(2021, 1, 1) }
|
||||
let!(:points) do
|
||||
10.times.map do |i|
|
||||
|
|
@ -35,10 +37,10 @@ RSpec.describe Exports::Create do
|
|||
expect(File.read(file_path).strip).to eq(export_content)
|
||||
end
|
||||
|
||||
it 'sets the export url' do
|
||||
it 'sets the export file' do
|
||||
create_export
|
||||
|
||||
expect(export.reload.url).to eq("exports/#{export.name}")
|
||||
expect(export.reload.file.attached?).to be_truthy
|
||||
end
|
||||
|
||||
it 'updates the export status to completed' do
|
||||
|
|
@ -53,7 +55,7 @@ RSpec.describe Exports::Create do
|
|||
|
||||
context 'when an error occurs' do
|
||||
before do
|
||||
allow(File).to receive(:open).and_raise(StandardError)
|
||||
allow_any_instance_of(Points::GeojsonSerializer).to receive(:call).and_raise(StandardError)
|
||||
end
|
||||
|
||||
it 'updates the export status to failed' do
|
||||
|
|
|
|||
|
|
@ -7,36 +7,28 @@ RSpec.describe GoogleMaps::SemanticHistoryParser do
|
|||
subject(:parser) { described_class.new(import, user.id).call }
|
||||
|
||||
let(:user) { create(:user) }
|
||||
let(:time) { Time.zone.now }
|
||||
let!(:import) { create(:import, user:) }
|
||||
let(:file_path) { Rails.root.join("spec/fixtures/files/google/location-history/#{file_name}.json") }
|
||||
|
||||
before do
|
||||
import.file.attach(
|
||||
io: File.open(file_path),
|
||||
filename: 'semantic_history.json',
|
||||
content_type: 'application/json'
|
||||
)
|
||||
end
|
||||
|
||||
context 'when activitySegment is present' do
|
||||
context 'when startLocation is blank' do
|
||||
let(:import) { create(:import, raw_data: { 'timelineObjects' => [activity_segment] }) }
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'waypointPath' => {
|
||||
'waypoints' => [
|
||||
{ 'latE7' => 123_456_789, 'lngE7' => 123_456_789 }
|
||||
]
|
||||
},
|
||||
'duration' => { 'startTimestamp' => time.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_without_startLocation' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3411111 12.3411111)')
|
||||
end
|
||||
|
||||
context 'when waypointPath is blank' do
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'duration' => { 'startTimestamp' => time.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_without_startLocation_without_waypointPath' }
|
||||
|
||||
it 'does not create a point' do
|
||||
expect { parser }.not_to change(Point, :count)
|
||||
|
|
@ -45,78 +37,47 @@ RSpec.describe GoogleMaps::SemanticHistoryParser do
|
|||
end
|
||||
|
||||
context 'when startLocation is present' do
|
||||
let(:import) { create(:import, raw_data: { 'timelineObjects' => [activity_segment] }) }
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => time.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_with_startLocation' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3422222 12.3422222)')
|
||||
end
|
||||
|
||||
context 'with different timestamp formats' do
|
||||
context 'when timestamp is in ISO format' do
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => time.iso8601 }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_with_startLocation_with_iso_timestamp' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3433333 12.3433333)')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timestamp is in seconds format' do
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => time.to_i.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_with_startLocation_timestamp_in_seconds_format' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3444444 12.3444444)')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timestamp is in milliseconds format' do
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => (time.to_f * 1000).to_i.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_with_startLocation_timestamp_in_milliseconds_format' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3455555 12.3455555)')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timestampMs is used' do
|
||||
let(:activity_segment) do
|
||||
{
|
||||
'activitySegment' => {
|
||||
'startLocation' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'timestampMs' => (time.to_f * 1000).to_i.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_activitySegment_with_startLocation_timestampMs' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3466666 12.3466666)')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -125,110 +86,65 @@ RSpec.describe GoogleMaps::SemanticHistoryParser do
|
|||
|
||||
context 'when placeVisit is present' do
|
||||
context 'when location with coordinates is present' do
|
||||
let(:import) { create(:import, raw_data: { 'timelineObjects' => [place_visit] }) }
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => time.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_with_location_with_coordinates' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3477777 12.3477777)')
|
||||
end
|
||||
|
||||
context 'with different timestamp formats' do
|
||||
context 'when timestamp is in ISO format' do
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => time.iso8601 }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_iso_timestamp' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3488888 12.3488888)')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timestamp is in seconds format' do
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => time.to_i.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_seconds_timestamp' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3499999 12.3499999)')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timestamp is in milliseconds format' do
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'startTimestamp' => (time.to_f * 1000).to_i.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_milliseconds_timestamp' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3511111 12.3511111)')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timestampMs is used' do
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'location' => { 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 },
|
||||
'duration' => { 'timestampMs' => (time.to_f * 1000).to_i.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_with_location_with_coordinates_with_timestampMs' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3522222 12.3522222)')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when location with coordinates is blank' do
|
||||
let(:import) { create(:import, raw_data: { 'timelineObjects' => [place_visit] }) }
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'location' => {},
|
||||
'duration' => { 'startTimestamp' => time.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_without_location_with_coordinates' }
|
||||
|
||||
it 'does not create a point' do
|
||||
expect { parser }.not_to change(Point, :count)
|
||||
end
|
||||
|
||||
context 'when otherCandidateLocations is present' do
|
||||
let(:place_visit) do
|
||||
{
|
||||
'placeVisit' => {
|
||||
'otherCandidateLocations' => [{ 'latitudeE7' => 123_456_789, 'longitudeE7' => 123_456_789 }],
|
||||
'duration' => { 'startTimestamp' => time.to_s }
|
||||
}
|
||||
}
|
||||
end
|
||||
let(:file_name) { 'with_placeVisit_without_location_with_coordinates_with_otherCandidateLocations' }
|
||||
|
||||
it 'creates a point' do
|
||||
expect { parser }.to change(Point, :count).by(1)
|
||||
expect(Point.last.lonlat.to_s).to eq('POINT (12.3533333 12.3533333)')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in a new issue