Merge pull request #1131 from Freika/dev

0.25.10
This commit is contained in:
Evgenii Burmakin 2025-05-08 11:54:48 +02:00 committed by GitHub
commit ebdac37a01
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 945 additions and 443 deletions

View file

@ -1 +1 @@
0.25.9
0.25.10

View file

@ -36,6 +36,12 @@ jobs:
- name: Install dependencies
run: npm install
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: freikin/dawarich
- name: Login to Docker Hub
uses: docker/login-action@v3.1.0
with:
@ -67,6 +73,7 @@ jobs:
file: ./docker/Dockerfile.dev
push: true
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

2
.gitignore vendored
View file

@ -68,5 +68,7 @@
/config/credentials/production.key
/config/credentials/production.yml.enc
/config/credentials/staging.key
/config/credentials/staging.yml.enc
Makefile

View file

@ -4,6 +4,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
# 0.25.10 - 2025-05-02
## Added
- Vector maps are supported in non-self-hosted mode.
- Credentials for Sidekiq UI are now being set via environment variables: `SIDEKIQ_USERNAME` and `SIDEKIQ_PASSWORD`. Default credentials are `sidekiq` and `password`. If you don't set them, in self-hosted mode, Sidekiq UI will not be protected by basic auth.
- New import page now shows progress of the upload.
## Changed
- Datetime is now being displayed with seconds in the Points page. #1088
- Imported files are now being uploaded via direct uploads.
- `/api/v1/points` endpoint now creates accepted points synchronously.
## Removed
- Sample points are no longer being imported automatically for new users.
# 0.25.9 - 2025-04-29
## Fixed

View file

@ -426,7 +426,7 @@ GEM
stimulus-rails (1.3.4)
railties (>= 6.0.0)
stringio (3.1.7)
strong_migrations (2.2.0)
strong_migrations (2.3.0)
activerecord (>= 7)
super_diff (0.15.0)
attr_extras (>= 6.2.4)

View file

@ -1,3 +1,2 @@
web: bundle exec puma -C config/puma.rb
release: bundle exec rails db:migrate
worker: bundle exec sidekiq -C config/sidekiq.yml

View file

@ -1,10 +1,21 @@
{
"name": "dawarich",
"description": "Dawarich",
"buildpacks": [
{ "url": "https://github.com/heroku/heroku-buildpack-nodejs.git" },
{ "url": "https://github.com/heroku/heroku-buildpack-ruby.git" }
],
"formation": {
"web": {
"quantity": 1
},
"worker": {
"quantity": 0
"quantity": 1
}
},
"scripts": {
"dokku": {
"predeploy": "bundle exec rails db:migrate"
}
}
}

File diff suppressed because one or more lines are too long

View file

@ -24,9 +24,9 @@ class Api::V1::PointsController < ApiController
end
def create
Points::CreateJob.perform_later(batch_params, current_api_user.id)
points = Points::Create.new(current_api_user, batch_params).call
render json: { message: 'Points are being processed' }
render json: { data: points }
end
def update

View file

@ -10,10 +10,10 @@ class Api::V1::SubscriptionsController < ApiController
render json: { message: 'Subscription updated successfully' }
rescue JWT::DecodeError => e
Sentry.capture_exception(e)
ExceptionReporter.call(e)
render json: { message: 'Failed to verify subscription update.' }, status: :unauthorized
rescue ArgumentError => e
Sentry.capture_exception(e)
ExceptionReporter.call(e)
render json: { message: 'Invalid subscription data received.' }, status: :unprocessable_entity
end
end

View file

@ -25,6 +25,8 @@ class ExportsController < ApplicationController
rescue StandardError => e
export&.destroy
ExceptionReporter.call(e)
redirect_to exports_url, alert: "Export failed to initiate: #{e.message}", status: :unprocessable_entity
end

View file

@ -31,26 +31,43 @@ class ImportsController < ApplicationController
end
def create
files = import_params[:files].reject(&:blank?)
files_params = params.dig(:import, :files)
raw_files = Array(files_params).reject(&:blank?)
files.each do |file|
import = current_user.imports.build(
name: file.original_filename,
source: params[:import][:source]
)
import.file.attach(io: file, filename: file.original_filename, content_type: file.content_type)
import.save!
if raw_files.empty?
redirect_to new_import_path, alert: 'No files were selected for upload', status: :unprocessable_entity
return
end
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
created_imports = []
raw_files.each do |item|
next if item.is_a?(ActionDispatch::Http::UploadedFile)
import = create_import_from_signed_id(item)
created_imports << import if import.present?
end
if created_imports.any?
redirect_to imports_url,
notice: "#{created_imports.size} files are queued to be imported in background",
status: :see_other
else
redirect_to new_import_path,
alert: 'No valid file references were found. Please upload files using the file selector.',
status: :unprocessable_entity
end
rescue StandardError => e
Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all
if created_imports.present?
import_ids = created_imports.map(&:id).compact
Import.where(id: import_ids).destroy_all if import_ids.any?
end
flash.now[:error] = e.message
Rails.logger.error "Import error: #{e.message}"
Rails.logger.error e.backtrace.join("\n")
ExceptionReporter.call(e)
redirect_to new_import_path, notice: e.message, status: :unprocessable_entity
redirect_to new_import_path, alert: e.message, status: :unprocessable_entity
end
def destroy
@ -68,4 +85,21 @@ class ImportsController < ApplicationController
def import_params
params.require(:import).permit(:source, files: [])
end
def create_import_from_signed_id(signed_id)
Rails.logger.debug "Creating import from signed ID: #{signed_id[0..20]}..."
blob = ActiveStorage::Blob.find_signed(signed_id)
import = current_user.imports.build(
name: blob.filename.to_s,
source: params[:import][:source]
)
import.file.attach(blob)
import.save!
import
end
end

View file

@ -126,6 +126,17 @@ module ApplicationHelper
)
end
def human_datetime_with_seconds(datetime)
return unless datetime
content_tag(
:span,
datetime.strftime('%e %b %Y, %H:%M:%S'),
class: 'tooltip',
data: { tip: datetime.iso8601 }
)
end
def speed_text_color(speed)
return 'text-default' if speed.to_i >= 0

View file

@ -0,0 +1,149 @@
import { Controller } from "@hotwired/stimulus"
import { DirectUpload } from "@rails/activestorage"
export default class extends Controller {
static targets = ["input", "progress", "progressBar", "submit", "form"]
static values = {
url: String
}
connect() {
this.inputTarget.addEventListener("change", this.upload.bind(this))
// Add form submission handler to disable the file input
if (this.hasFormTarget) {
this.formTarget.addEventListener("submit", this.onSubmit.bind(this))
}
}
onSubmit(event) {
if (this.isUploading) {
// If still uploading, prevent submission
event.preventDefault()
console.log("Form submission prevented during upload")
return
}
// Disable the file input to prevent it from being submitted with the form
// This ensures only our hidden inputs with signed IDs are submitted
this.inputTarget.disabled = true
// Check if we have any signed IDs
const signedIds = this.element.querySelectorAll('input[name="import[files][]"][type="hidden"]')
if (signedIds.length === 0) {
event.preventDefault()
console.log("No files uploaded yet")
alert("Please select and upload files first")
} else {
console.log(`Submitting form with ${signedIds.length} uploaded files`)
}
}
upload() {
const files = this.inputTarget.files
if (files.length === 0) return
console.log(`Uploading ${files.length} files`)
this.isUploading = true
// Disable submit button during upload
this.submitTarget.disabled = true
// Always remove any existing progress bar to ensure we create a fresh one
if (this.hasProgressTarget) {
this.progressTarget.remove()
}
// Create a wrapper div for better positioning and visibility
const progressWrapper = document.createElement("div")
progressWrapper.className = "mt-4 mb-6 border p-4 rounded-lg bg-gray-50"
// Add a label
const progressLabel = document.createElement("div")
progressLabel.className = "font-medium mb-2 text-gray-700"
progressLabel.textContent = "Upload Progress"
progressWrapper.appendChild(progressLabel)
// Create a new progress container
const progressContainer = document.createElement("div")
progressContainer.setAttribute("data-direct-upload-target", "progress")
progressContainer.className = "w-full bg-gray-200 rounded-full h-4"
// Create the progress bar fill element
const progressBarFill = document.createElement("div")
progressBarFill.setAttribute("data-direct-upload-target", "progressBar")
progressBarFill.className = "bg-blue-600 h-4 rounded-full transition-all duration-300"
progressBarFill.style.width = "0%"
// Add the fill element to the container
progressContainer.appendChild(progressBarFill)
progressWrapper.appendChild(progressContainer)
progressBarFill.dataset.percentageDisplay = "true"
// Add the progress wrapper AFTER the file input field but BEFORE the submit button
this.submitTarget.parentNode.insertBefore(progressWrapper, this.submitTarget)
console.log("Progress bar created and inserted before submit button")
let uploadCount = 0
const totalFiles = files.length
// Clear any existing hidden fields for files
this.element.querySelectorAll('input[name="import[files][]"][type="hidden"]').forEach(el => {
if (el !== this.inputTarget) {
el.remove()
}
});
Array.from(files).forEach(file => {
console.log(`Starting upload for ${file.name}`)
const upload = new DirectUpload(file, this.urlValue, this)
upload.create((error, blob) => {
uploadCount++
if (error) {
console.error("Error uploading file:", error)
} else {
console.log(`Successfully uploaded ${file.name} with ID: ${blob.signed_id}`)
// Create a hidden field with the correct name
const hiddenField = document.createElement("input")
hiddenField.setAttribute("type", "hidden")
hiddenField.setAttribute("name", "import[files][]")
hiddenField.setAttribute("value", blob.signed_id)
this.element.appendChild(hiddenField)
console.log("Added hidden field with signed ID:", blob.signed_id)
}
// Enable submit button when all uploads are complete
if (uploadCount === totalFiles) {
this.submitTarget.disabled = false
this.isUploading = false
console.log("All uploads completed")
console.log(`Ready to submit with ${this.element.querySelectorAll('input[name="import[files][]"][type="hidden"]').length} files`)
}
})
})
}
directUploadWillStoreFileWithXHR(request) {
request.upload.addEventListener("progress", event => {
if (!this.hasProgressBarTarget) {
console.warn("Progress bar target not found")
return
}
const progress = (event.loaded / event.total) * 100
const progressPercentage = `${progress.toFixed(1)}%`
console.log(`Upload progress: ${progressPercentage}`)
this.progressBarTarget.style.width = progressPercentage
// Update text percentage if exists
const percentageDisplay = this.element.querySelector('[data-percentage-display="true"]')
if (percentageDisplay) {
percentageDisplay.textContent = progressPercentage
}
})
}
}

View file

@ -40,6 +40,7 @@ export default class extends BaseController {
console.log("Map controller connected");
this.apiKey = this.element.dataset.api_key;
this.selfHosted = this.element.dataset.self_hosted;
this.markers = JSON.parse(this.element.dataset.coordinates);
this.timezone = this.element.dataset.timezone;
this.userSettings = JSON.parse(this.element.dataset.user_settings);
@ -425,7 +426,7 @@ export default class extends BaseController {
baseMaps() {
let selectedLayerName = this.userSettings.preferred_map_layer || "OpenStreetMap";
let maps = createAllMapLayers(this.map, selectedLayerName);
let maps = createAllMapLayers(this.map, selectedLayerName, this.selfHosted);
// Add custom map if it exists in settings
if (this.userSettings.maps && this.userSettings.maps.url) {
@ -448,8 +449,28 @@ export default class extends BaseController {
maps[this.userSettings.maps.name] = customLayer;
} else {
// If no custom map is set, ensure a default layer is added
const defaultLayer = maps[selectedLayerName] || maps["OpenStreetMap"] || maps["Atlas"];
defaultLayer.addTo(this.map);
// First check if maps object has any entries
if (Object.keys(maps).length === 0) {
// Fallback to OSM if no maps are configured
maps["OpenStreetMap"] = L.tileLayer("https://tile.openstreetmap.org/{z}/{x}/{y}.png", {
maxZoom: 19,
attribution: "&copy; <a href='http://www.openstreetmap.org/copyright'>OpenStreetMap</a>"
});
}
// Now try to get the selected layer or fall back to alternatives
const defaultLayer = maps[selectedLayerName] || Object.values(maps)[0];
if (defaultLayer) {
defaultLayer.addTo(this.map);
} else {
console.error("Could not find any default map layer");
// Ultimate fallback - create and add OSM layer directly
L.tileLayer("https://tile.openstreetmap.org/{z}/{x}/{y}.png", {
maxZoom: 19,
attribution: "&copy; <a href='http://www.openstreetmap.org/copyright'>OpenStreetMap</a>"
}).addTo(this.map);
}
}
return maps;

View file

@ -53,7 +53,7 @@ export default class extends BaseController {
// Add base map layer
const selectedLayerName = this.userSettings.preferred_map_layer || "OpenStreetMap";
const maps = this.baseMaps();
const defaultLayer = maps[selectedLayerName] || maps["OpenStreetMap"] || maps["Atlas"];
const defaultLayer = maps[selectedLayerName] || Object.values(maps)[0];
defaultLayer.addTo(this.map);
// Add scale control to bottom right

View file

@ -1,20 +1,42 @@
// Import the maps configuration
// In non-self-hosted mode, we need to mount external maps_config.js to the container
import { mapsConfig } from './maps_config';
import { mapsConfig as vectorMapsConfig } from './vector_maps_config';
import { mapsConfig as rasterMapsConfig } from './raster_maps_config';
export function createMapLayer(map, selectedLayerName, layerKey) {
const config = mapsConfig[layerKey];
export function createMapLayer(map, selectedLayerName, layerKey, selfHosted) {
const config = selfHosted === "true" ? rasterMapsConfig[layerKey] : vectorMapsConfig[layerKey];
if (!config) {
console.warn(`No configuration found for layer: ${layerKey}`);
return null;
}
let layer = L.tileLayer(config.url, {
maxZoom: config.maxZoom,
attribution: config.attribution,
// Add any other config properties that might be needed
});
let layer;
if (selfHosted === "true") {
layer = L.tileLayer(config.url, {
maxZoom: config.maxZoom,
attribution: config.attribution,
crossOrigin: true,
// Add any other config properties that might be needed
});
} else {
// Use the global protomapsL object (loaded via script tag)
try {
if (typeof window.protomapsL === 'undefined') {
throw new Error('protomapsL is not defined');
}
layer = window.protomapsL.leafletLayer({
url: config.url,
flavor: config.flavor,
crossOrigin: true,
});
} catch (error) {
console.error('Error creating protomaps layer:', error);
throw new Error('Failed to create vector tile layer. protomapsL may not be available.');
}
}
if (selectedLayerName === layerKey) {
return layer.addTo(map);
@ -24,11 +46,11 @@ export function createMapLayer(map, selectedLayerName, layerKey) {
}
// Helper function to create all map layers
export function createAllMapLayers(map, selectedLayerName) {
export function createAllMapLayers(map, selectedLayerName, selfHosted) {
const layers = {};
const mapsConfig = selfHosted === "true" ? rasterMapsConfig : vectorMapsConfig;
Object.keys(mapsConfig).forEach(layerKey => {
layers[layerKey] = createMapLayer(map, selectedLayerName, layerKey);
layers[layerKey] = createMapLayer(map, selectedLayerName, layerKey, selfHosted);
});
return layers;

View file

@ -0,0 +1,32 @@
export const mapsConfig = {
"Light": {
url: "https://tyles.dwri.xyz/20250420/{z}/{x}/{y}.mvt",
flavor: "light",
maxZoom: 16,
attribution: "<a href='https://github.com/protomaps/basemaps'>Protomaps</a>, &copy; <a href='https://openstreetmap.org'>OpenStreetMap</a>"
},
"Dark": {
url: "https://tyles.dwri.xyz/20250420/{z}/{x}/{y}.mvt",
flavor: "dark",
maxZoom: 16,
attribution: "<a href='https://github.com/protomaps/basemaps'>Protomaps</a>, &copy; <a href='https://openstreetmap.org'>OpenStreetMap</a>"
},
"White": {
url: "https://tyles.dwri.xyz/20250420/{z}/{x}/{y}.mvt",
flavor: "white",
maxZoom: 16,
attribution: "<a href='https://github.com/protomaps/basemaps'>Protomaps</a>, &copy; <a href='https://openstreetmap.org'>OpenStreetMap</a>"
},
"Grayscale": {
url: "https://tyles.dwri.xyz/20250420/{z}/{x}/{y}.mvt",
flavor: "grayscale",
maxZoom: 16,
attribution: "<a href='https://github.com/protomaps/basemaps'>Protomaps</a>, &copy; <a href='https://openstreetmap.org'>OpenStreetMap</a>"
},
"Black": {
url: "https://tyles.dwri.xyz/20250420/{z}/{x}/{y}.mvt",
flavor: "black",
maxZoom: 16,
attribution: "<a href='https://github.com/protomaps/basemaps'>Protomaps</a>, &copy; <a href='https://openstreetmap.org'>OpenStreetMap</a>"
},
};

View file

@ -30,7 +30,6 @@ class Point < ApplicationRecord
after_create :async_reverse_geocode
after_create_commit :broadcast_coordinates
# after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? }
def self.without_raw_data
select(column_names - ['raw_data'])

View file

@ -16,7 +16,6 @@ class User < ApplicationRecord
has_many :trips, dependent: :destroy
after_create :create_api_key
after_create :import_sample_points
after_commit :activate, on: :create, if: -> { DawarichSettings.self_hosted? }
before_save :sanitize_input
@ -134,23 +133,4 @@ class User < ApplicationRecord
settings['photoprism_url']&.gsub!(%r{/+\z}, '')
settings.try(:[], 'maps')&.try(:[], 'url')&.strip!
end
# rubocop:disable Metrics/MethodLength
def import_sample_points
return unless Rails.env.development? ||
Rails.env.production? ||
(Rails.env.test? && ENV['IMPORT_SAMPLE_POINTS'])
import = imports.create(
name: 'DELETE_ME_this_is_a_demo_import_DELETE_ME',
source: 'gpx'
)
import.file.attach(
Rack::Test::UploadedFile.new(
Rails.root.join('lib/assets/sample_points.gpx'), 'application/xml'
)
)
end
# rubocop:enable Metrics/MethodLength
end

View file

@ -0,0 +1,9 @@
# frozen_string_literal: true
class ExceptionReporter
def self.call(exception)
return unless DawarichSettings.self_hosted?
Sentry.capture_exception(exception)
end
end

View file

@ -0,0 +1,30 @@
# frozen_string_literal: true
class Points::Create
attr_reader :user, :params
def initialize(user, params)
@user = user
@params = params.to_h
end
def call
data = Points::Params.new(params, user.id).call
created_points = []
data.each_slice(1000) do |location_batch|
# rubocop:disable Rails/SkipsModelValidations
result = Point.upsert_all(
location_batch,
unique_by: %i[lonlat timestamp user_id],
returning: Arel.sql('id, timestamp, ST_X(lonlat::geometry) AS longitude, ST_Y(lonlat::geometry) AS latitude')
)
# rubocop:enable Rails/SkipsModelValidations
created_points.concat(result)
end
created_points
end
end

View file

@ -41,8 +41,7 @@ class Points::Params
end
def params_valid?(point)
point[:geometry].present? &&
point[:geometry][:coordinates].present? &&
point.dig(:geometry, :coordinates).present? &&
point.dig(:properties, :timestamp).present?
end

View file

@ -27,6 +27,8 @@ class Visits::Suggest
title: 'Error suggesting visits',
content: "Error suggesting visits: #{e.message}\n#{e.backtrace.join("\n")}"
)
ExceptionReporter.call(e)
end
private

View file

@ -4,21 +4,15 @@
<p class='py-2'>
<p>Docs: <%= link_to "API documentation", '/api-docs', class: 'underline hover:no-underline' %></p>
Usage example:
<h3 class='text-xl font-bold mt-4'>Usage examples</h3>
<div role="tablist" class="tabs tabs-boxed">
<input type="radio" name="my_tabs_2" role="tab" class="tab" aria-label="OwnTracks" checked />
<div role="tabpanel" class="tab-content bg-base-100 border-base-300 rounded-box p-6">
<p><code><%= api_v1_owntracks_points_url(api_key: current_user.api_key) %></code></p>
</div>
<h3 class='text-lg font-bold mt-4'>OwnTracks</h3>
<p><code><%= api_v1_owntracks_points_url(api_key: current_user.api_key) %></code></p>
<input type="radio" name="my_tabs_2" role="tab" class="tab" aria-label="Overland" />
<div role="tabpanel" class="tab-content bg-base-100 border-base-300 rounded-box p-6">
<p><code><%= api_v1_overland_batches_url(api_key: current_user.api_key) %></code></p>
</div>
</div>
<h3 class='text-lg font-bold mt-4'>Overland</h3>
<p><code><%= api_v1_overland_batches_url(api_key: current_user.api_key) %></code></p>
</p>
<p class='py-2'>
<%= link_to "Generate new API key", generate_api_key_path, data: { confirm: "Are you sure?", turbo_confirm: "Are you sure?", turbo_method: :post }, class: 'btn btn-primary' %>
<%= link_to "Generate new API key", generate_api_key_path, data: { confirm: "Are you sure? This will invalidate the current API key.", turbo_confirm: "Are you sure?", turbo_method: :post }, class: 'btn btn-primary' %>
</p>
</p>

View file

@ -1,4 +1,8 @@
<%= form_with model: import, class: "contents" do |form| %>
<%= form_with model: import, class: "contents", data: {
controller: "direct-upload",
direct_upload_url_value: rails_direct_uploads_url,
direct_upload_target: "form"
} do |form| %>
<div class="form-control w-full">
<label class="label">
<span class="label-text">Select source</span>
@ -65,10 +69,18 @@
<div class="label">
<span class="label-text">Select one or multiple files</span>
</div>
<%= form.file_field :files, multiple: true, class: "file-input file-input-bordered w-full max-w-xs" %>
<%= form.file_field :files,
multiple: true,
direct_upload: true,
class: "file-input file-input-bordered w-full max-w-xs",
data: { direct_upload_target: "input" } %>
<div class="text-sm text-gray-500 mt-2">
Files will be uploaded directly to storage. Please be patient during upload.
</div>
</label>
<div class="inline">
<%= form.submit class: "rounded-lg py-3 px-5 bg-blue-600 text-white inline-block font-medium cursor-pointer" %>
<%= form.submit class: "rounded-lg py-3 px-5 bg-blue-600 text-white inline-block font-medium cursor-pointer",
data: { direct_upload_target: "submit" } %>
</div>
<% end %>

View file

@ -13,6 +13,8 @@
<%= stylesheet_link_tag "tailwind", "inter-font", "data-turbo-track": "reload" %>
<%= stylesheet_link_tag "application", "data-turbo-track": "reload" %>
<%= javascript_importmap_tags %>
<%= javascript_include_tag "https://unpkg.com/protomaps-leaflet@5.0.0/dist/protomaps-leaflet.js" %>
<%= render 'application/favicon' %>
<%= Sentry.get_trace_propagation_meta.html_safe if Sentry.initialized? %>
</head>

View file

@ -49,6 +49,7 @@
data-points-target="map"
data-distance_unit="<%= DISTANCE_UNIT %>"
data-api_key="<%= current_user.api_key %>"
data-self_hosted="<%= @self_hosted %>"
data-user_settings='<%= current_user.settings.to_json.html_safe %>'
data-coordinates="<%= @coordinates %>"
data-distance="<%= @distance %>"

View file

@ -14,7 +14,7 @@
%>
</td>
<td class='<%= speed_text_color(point.velocity) %>'><%= point.velocity %></td>
<td><%= human_datetime(point.recorded_at) %></td>
<td><%= human_datetime_with_seconds(point.recorded_at) %></td>
<td><%= point.lat %>, <%= point.lon %></td>
<td></td>
</tr>

View file

@ -5,7 +5,4 @@
<%= link_to 'Background Jobs', settings_background_jobs_path, role: 'tab', class: "tab #{active_tab?(settings_background_jobs_path)}" %>
<% end %>
<%= link_to 'Map', settings_maps_path, role: 'tab', class: "tab #{active_tab?(settings_maps_path)}" %>
<% if !DawarichSettings.self_hosted? %>
<%= link_to 'Subscriptions', "#{MANAGER_URL}/auth/dawarich?token=#{current_user.generate_subscription_token}", role: 'tab', class: "tab" %>
<% end %>
</div>

View file

@ -120,6 +120,10 @@
<ul class="p-2 bg-base-100 rounded-t-none z-10">
<li><%= link_to 'Account', edit_user_registration_path %></li>
<li><%= link_to 'Settings', settings_path %></li>
<% if !DawarichSettings.self_hosted? %>
<li><%= link_to 'Subscription', "#{MANAGER_URL}/auth/dawarich?token=#{current_user.generate_subscription_token}" %></li>
<% end %>
<li><%= link_to 'Logout', destroy_user_session_path, method: :delete, data: { turbo_method: :delete } %></li>
</ul>
</details>

View file

@ -20,3 +20,7 @@ test:
production:
<<: *default
database: <%= ENV['DATABASE_NAME'] || 'dawarich_production' %>
staging:
<<: *default
database: <%= ENV['DATABASE_NAME'] || 'dawarich_staging' %>

View file

@ -15,16 +15,14 @@ settings = {
if PHOTON_API_HOST.present?
settings[:lookup] = :photon
settings[:photon] = { use_https: PHOTON_API_USE_HTTPS, host: PHOTON_API_HOST }
settings[:http_headers] = { 'X-Api-Key' => PHOTON_API_KEY } if defined?(PHOTON_API_KEY)
settings[:http_headers] = { 'X-Api-Key' => PHOTON_API_KEY } if PHOTON_API_KEY.present?
elsif GEOAPIFY_API_KEY.present?
settings[:lookup] = :geoapify
settings[:api_key] = GEOAPIFY_API_KEY
elsif NOMINATIM_API_HOST.present?
settings[:lookup] = :nominatim
settings[:nominatim] = { use_https: NOMINATIM_API_USE_HTTPS, host: NOMINATIM_API_HOST }
if NOMINATIM_API_KEY.present?
settings[:api_key] = NOMINATIM_API_KEY
end
settings[:api_key] = NOMINATIM_API_KEY if NOMINATIM_API_KEY.present?
end
Geocoder.configure(settings)

View file

@ -7,15 +7,17 @@ Rails.application.routes.draw do
mount Rswag::Api::Engine => '/api-docs'
mount Rswag::Ui::Engine => '/api-docs'
Sidekiq::Web.use(Rack::Auth::Basic) do |username, password|
ActiveSupport::SecurityUtils.secure_compare(
::Digest::SHA256.hexdigest(username),
::Digest::SHA256.hexdigest(ENV['SIDEKIQ_USERNAME'])
) &
unless DawarichSettings.self_hosted?
Sidekiq::Web.use(Rack::Auth::Basic) do |username, password|
ActiveSupport::SecurityUtils.secure_compare(
::Digest::SHA256.hexdigest(password),
::Digest::SHA256.hexdigest(ENV['SIDEKIQ_PASSWORD'])
)
::Digest::SHA256.hexdigest(username),
::Digest::SHA256.hexdigest(ENV['SIDEKIQ_USERNAME'])
) &
ActiveSupport::SecurityUtils.secure_compare(
::Digest::SHA256.hexdigest(password),
::Digest::SHA256.hexdigest(ENV['SIDEKIQ_PASSWORD'])
)
end
end
authenticate :user, lambda { |u|

View file

@ -7,6 +7,8 @@ ENV RAILS_LOG_TO_STDOUT=true
ENV RAILS_PORT=3000
ENV RAILS_ENV=development
ENV SELF_HOSTED=true
ENV SIDEKIQ_USERNAME=sidekiq
ENV SIDEKIQ_PASSWORD=password
# Install dependencies for application
RUN apk -U add --no-cache \

View file

@ -5,7 +5,9 @@ namespace :points do
task migrate_to_lonlat: :environment do
puts 'Updating points to use lonlat...'
points = Point.where(longitude: nil, latitude: nil).select(:id, :longitude, :latitude, :raw_data, :user_id)
points =
Point.where(longitude: nil, latitude: nil)
.select(:id, :longitude, :latitude, :raw_data, :user_id, :timestamp)
points.find_each do |point|
Points::RawDataLonlatExtractor.new(point).call

105
package-lock.json generated
View file

@ -7,11 +7,14 @@
"dependencies": {
"@hotwired/turbo-rails": "^7.3.0",
"@rails/actiontext": "^8.0.0",
"daisyui": "^4.7.3",
"leaflet": "^1.9.4",
"postcss": "^8.4.49",
"trix": "^2.1.8"
},
"devDependencies": {
"daisyui": "^4.7.3"
"engines": {
"node": "18.17.1",
"npm": "9.6.7"
}
},
"node_modules/@hotwired/turbo": {
@ -59,7 +62,6 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz",
"integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==",
"dev": true,
"engines": {
"node": ">= 6"
}
@ -68,7 +70,6 @@
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.8.0.tgz",
"integrity": "sha512-Jd6Ig3/pe62/qe5SBPTN8h8LeUg/pT4lLgtavPf7updwwHpvFzxvOQBHYj2LZDMjUnBzgvIUSjRcf6oT5HzHFg==",
"dev": true,
"dependencies": {
"cssesc": "^3.0.0",
"fastparse": "^1.1.2"
@ -78,7 +79,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
"integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==",
"dev": true,
"bin": {
"cssesc": "bin/cssesc"
},
@ -90,7 +90,6 @@
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/culori/-/culori-3.3.0.tgz",
"integrity": "sha512-pHJg+jbuFsCjz9iclQBqyL3B2HLCBF71BwVNujUYEvCeQMvV97R59MNK3R2+jgJ3a1fcZgI9B3vYgz8lzr/BFQ==",
"dev": true,
"engines": {
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
}
@ -99,7 +98,6 @@
"version": "4.7.3",
"resolved": "https://registry.npmjs.org/daisyui/-/daisyui-4.7.3.tgz",
"integrity": "sha512-R8jUpBMAUm4rSyxzGa9QqFdJTkzREtb1QahXdDoOfElGiF4VbSuu5bfqQoOro1kkSagPy+aTKu5WtSSXmH3u3g==",
"dev": true,
"dependencies": {
"css-selector-tokenizer": "^0.8",
"culori": "^3",
@ -117,8 +115,7 @@
"node_modules/fastparse": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz",
"integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==",
"dev": true
"integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ=="
},
"node_modules/leaflet": {
"version": "1.9.4",
@ -126,17 +123,15 @@
"integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA=="
},
"node_modules/nanoid": {
"version": "3.3.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
"dev": true,
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"peer": true,
"bin": {
"nanoid": "bin/nanoid.cjs"
},
@ -145,16 +140,14 @@
}
},
"node_modules/picocolors": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
"dev": true
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="
},
"node_modules/postcss": {
"version": "8.4.35",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
"integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==",
"dev": true,
"version": "8.5.3",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
"integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
"funding": [
{
"type": "opencollective",
@ -169,11 +162,10 @@
"url": "https://github.com/sponsors/ai"
}
],
"peer": true,
"dependencies": {
"nanoid": "^3.3.7",
"picocolors": "^1.0.0",
"source-map-js": "^1.0.2"
"nanoid": "^3.3.8",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
@ -183,7 +175,6 @@
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz",
"integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==",
"dev": true,
"dependencies": {
"camelcase-css": "^2.0.1"
},
@ -199,11 +190,9 @@
}
},
"node_modules/source-map-js": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
"dev": true,
"peer": true,
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
"engines": {
"node": ">=0.10.0"
}
@ -258,14 +247,12 @@
"camelcase-css": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz",
"integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==",
"dev": true
"integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA=="
},
"css-selector-tokenizer": {
"version": "0.8.0",
"resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.8.0.tgz",
"integrity": "sha512-Jd6Ig3/pe62/qe5SBPTN8h8LeUg/pT4lLgtavPf7updwwHpvFzxvOQBHYj2LZDMjUnBzgvIUSjRcf6oT5HzHFg==",
"dev": true,
"requires": {
"cssesc": "^3.0.0",
"fastparse": "^1.1.2"
@ -274,20 +261,17 @@
"cssesc": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
"integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==",
"dev": true
"integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="
},
"culori": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/culori/-/culori-3.3.0.tgz",
"integrity": "sha512-pHJg+jbuFsCjz9iclQBqyL3B2HLCBF71BwVNujUYEvCeQMvV97R59MNK3R2+jgJ3a1fcZgI9B3vYgz8lzr/BFQ==",
"dev": true
"integrity": "sha512-pHJg+jbuFsCjz9iclQBqyL3B2HLCBF71BwVNujUYEvCeQMvV97R59MNK3R2+jgJ3a1fcZgI9B3vYgz8lzr/BFQ=="
},
"daisyui": {
"version": "4.7.3",
"resolved": "https://registry.npmjs.org/daisyui/-/daisyui-4.7.3.tgz",
"integrity": "sha512-R8jUpBMAUm4rSyxzGa9QqFdJTkzREtb1QahXdDoOfElGiF4VbSuu5bfqQoOro1kkSagPy+aTKu5WtSSXmH3u3g==",
"dev": true,
"requires": {
"css-selector-tokenizer": "^0.8",
"culori": "^3",
@ -298,8 +282,7 @@
"fastparse": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz",
"integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==",
"dev": true
"integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ=="
},
"leaflet": {
"version": "1.9.4",
@ -307,45 +290,37 @@
"integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA=="
},
"nanoid": {
"version": "3.3.7",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
"dev": true,
"peer": true
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="
},
"picocolors": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
"dev": true
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="
},
"postcss": {
"version": "8.4.35",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz",
"integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==",
"dev": true,
"peer": true,
"version": "8.5.3",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz",
"integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==",
"requires": {
"nanoid": "^3.3.7",
"picocolors": "^1.0.0",
"source-map-js": "^1.0.2"
"nanoid": "^3.3.8",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
}
},
"postcss-js": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz",
"integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==",
"dev": true,
"requires": {
"camelcase-css": "^2.0.1"
}
},
"source-map-js": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
"dev": true,
"peer": true
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="
},
"spark-md5": {
"version": "3.0.2",

View file

@ -2,11 +2,13 @@
"dependencies": {
"@hotwired/turbo-rails": "^7.3.0",
"@rails/actiontext": "^8.0.0",
"daisyui": "^4.7.3",
"leaflet": "^1.9.4",
"postcss": "^8.4.49",
"trix": "^2.1.8"
},
"devDependencies": {
"daisyui": "^4.7.3"
"engines": {
"node": "18.17.1",
"npm": "9.6.7"
}
}

View file

@ -56,26 +56,6 @@ RSpec.describe User, type: :model do
end
end
end
describe '#import_sample_points' do
before do
ENV['IMPORT_SAMPLE_POINTS'] = 'true'
end
after do
ENV['IMPORT_SAMPLE_POINTS'] = nil
end
it 'creates a sample import and enqueues an import job' do
user = create(:user)
expect(user.imports.count).to eq(1)
expect(user.imports.first.name).to eq('DELETE_ME_this_is_a_demo_import_DELETE_ME')
expect(user.imports.first.source).to eq('gpx')
expect(Import::ProcessJob).to have_been_enqueued.with(user.imports.first.id)
end
end
end
describe 'methods' do

View file

@ -9,6 +9,16 @@ RSpec.describe 'Api::V1::Points', type: :request do
create(:point, user:, timestamp: 1.day.ago + i.minutes)
end
end
let(:point_params) do
{
locations: [
{
geometry: { type: 'Point', coordinates: [1.0, 1.0] },
properties: { timestamp: '2025-01-17T21:03:01Z' }
}
]
}
end
describe 'GET /index' do
context 'when regular version of points is requested' do
@ -122,9 +132,16 @@ RSpec.describe 'Api::V1::Points', type: :request do
describe 'POST /create' do
it 'returns a successful response' do
post "/api/v1/points?api_key=#{user.api_key}", params: { point: { latitude: 1.0, longitude: 1.0 } }
post "/api/v1/points?api_key=#{user.api_key}", params: point_params
expect(response).to have_http_status(:success)
expect(response).to have_http_status(:ok)
json_response = JSON.parse(response.body)['data']
expect(json_response.size).to be_positive
expect(json_response.first['latitude']).to eq(1.0)
expect(json_response.first['longitude']).to eq(1.0)
expect(json_response.first['timestamp']).to be_an_instance_of(Integer)
end
context 'when user is inactive' do
@ -133,7 +150,7 @@ RSpec.describe 'Api::V1::Points', type: :request do
end
it 'returns an unauthorized response' do
post "/api/v1/points?api_key=#{user.api_key}", params: { point: { latitude: 1.0, longitude: 1.0 } }
post "/api/v1/points?api_key=#{user.api_key}", params: point_params
expect(response).to have_http_status(:unauthorized)
end

View file

@ -42,16 +42,22 @@ RSpec.describe 'Imports', type: :request do
context 'when importing owntracks data' do
let(:file) { fixture_file_upload('owntracks/2024-03.rec', 'text/plain') }
let(:blob) { create_blob_for_file(file) }
let(:signed_id) { generate_signed_id_for_blob(blob) }
it 'queues import job' do
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
expect do
post imports_path, params: { import: { source: 'owntracks', files: [file] } }
post imports_path, params: { import: { source: 'owntracks', files: [signed_id] } }
end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
end
it 'creates a new import' do
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
expect do
post imports_path, params: { import: { source: 'owntracks', files: [file] } }
post imports_path, params: { import: { source: 'owntracks', files: [signed_id] } }
end.to change(user.imports, :count).by(1)
expect(response).to redirect_to(imports_path)
@ -60,21 +66,58 @@ RSpec.describe 'Imports', type: :request do
context 'when importing gpx data' do
let(:file) { fixture_file_upload('gpx/gpx_track_single_segment.gpx', 'application/gpx+xml') }
let(:blob) { create_blob_for_file(file) }
let(:signed_id) { generate_signed_id_for_blob(blob) }
it 'queues import job' do
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
expect do
post imports_path, params: { import: { source: 'gpx', files: [file] } }
post imports_path, params: { import: { source: 'gpx', files: [signed_id] } }
end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
end
it 'creates a new import' do
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
expect do
post imports_path, params: { import: { source: 'gpx', files: [file] } }
post imports_path, params: { import: { source: 'gpx', files: [signed_id] } }
end.to change(user.imports, :count).by(1)
expect(response).to redirect_to(imports_path)
end
end
context 'when an error occurs during import creation' do
let(:file1) { fixture_file_upload('owntracks/2024-03.rec', 'text/plain') }
let(:file2) { fixture_file_upload('gpx/gpx_track_single_segment.gpx', 'application/gpx+xml') }
let(:blob1) { create_blob_for_file(file1) }
let(:blob2) { create_blob_for_file(file2) }
let(:signed_id1) { generate_signed_id_for_blob(blob1) }
let(:signed_id2) { generate_signed_id_for_blob(blob2) }
it 'deletes any created imports' do
# The first blob should be found correctly
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id1).and_return(blob1)
# The second blob find will raise an error
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id2).and_raise(StandardError, 'Test error')
# Allow ExceptionReporter to be called without actually calling it
allow(ExceptionReporter).to receive(:call)
# The request should not ultimately create any imports
expect do
post imports_path, params: { import: { source: 'owntracks', files: [signed_id1, signed_id2] } }
end.not_to change(Import, :count)
# Check that we were redirected with an error message
expect(response).to have_http_status(422)
# Just check that we have an alert message, not its exact content
# since error handling might transform the message
expect(flash[:alert]).not_to be_nil
end
end
end
end
@ -138,4 +181,17 @@ RSpec.describe 'Imports', type: :request do
end
end
end
# Helper methods for creating ActiveStorage blobs and signed IDs in tests
def create_blob_for_file(file)
ActiveStorage::Blob.create_and_upload!(
io: file.open,
filename: file.original_filename,
content_type: file.content_type
)
end
def generate_signed_id_for_blob(blob)
blob.signed_id
end
end

View file

@ -0,0 +1,238 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Points::Create do
describe '#call' do
let(:user) { create(:user) }
let(:timestamp) { Time.current }
let(:params_service) { instance_double(Points::Params) }
let(:point_params) do
{
locations: [
{ lat: 51.5074, lon: -0.1278, timestamp: timestamp.iso8601 },
{ lat: 40.7128, lon: -74.0060, timestamp: (timestamp + 1.hour).iso8601 }
]
}
end
let(:processed_data) do
[
{
lonlat: 'POINT(-0.1278 51.5074)',
timestamp: timestamp,
user_id: user.id,
created_at: anything,
updated_at: anything
},
{
lonlat: 'POINT(-74.006 40.7128)',
timestamp: timestamp + 1.hour,
user_id: user.id,
created_at: anything,
updated_at: anything
}
]
end
let(:upsert_result) do
[
Point.new(id: 1, lonlat: 'POINT(-0.1278 51.5074)', timestamp: timestamp),
Point.new(id: 2, lonlat: 'POINT(-74.006 40.7128)', timestamp: timestamp + 1.hour)
]
end
it 'processes the points and upserts them to the database' do
expect(Points::Params).to receive(:new).with(point_params, user.id).and_return(params_service)
expect(params_service).to receive(:call).and_return(processed_data)
expect(Point).to receive(:upsert_all)
.with(
processed_data,
unique_by: %i[lonlat timestamp user_id],
returning: Arel.sql('id, timestamp, ST_X(lonlat::geometry) AS longitude, ST_Y(lonlat::geometry) AS latitude')
)
.and_return(upsert_result)
result = described_class.new(user, point_params).call
expect(result).to eq(upsert_result)
end
context 'with large datasets' do
let(:many_locations) do
2001.times.map do |i|
{ lat: 51.5074 + (i * 0.001), lon: -0.1278 - (i * 0.001), timestamp: (timestamp + i.minutes).iso8601 }
end
end
let(:large_params) { { locations: many_locations } }
let(:large_processed_data) do
many_locations.map.with_index do |loc, i|
{
lonlat: "POINT(#{loc[:lon]} #{loc[:lat]})",
timestamp: timestamp + i.minutes,
user_id: user.id,
created_at: Time.current,
updated_at: Time.current
}
end
end
let(:first_batch_result) { 1000.times.map { |i| Point.new(id: i + 1, lonlat: anything, timestamp: anything) } }
let(:second_batch_result) do
1000.times.map do |i|
Point.new(id: i + 1001, lonlat: anything, timestamp: anything)
end
end
let(:third_batch_result) { [Point.new(id: 2001, lonlat: anything, timestamp: anything)] }
let(:combined_results) { first_batch_result + second_batch_result + third_batch_result }
before do
allow(Points::Params).to receive(:new).with(large_params, user.id).and_return(params_service)
allow(params_service).to receive(:call).and_return(large_processed_data)
allow(Point).to receive(:upsert_all).exactly(3).times.and_return(first_batch_result, second_batch_result,
third_batch_result)
end
it 'handles batching for large datasets' do
result = described_class.new(user, large_params).call
expect(result.size).to eq(2001)
expect(result).to eq(combined_results)
end
end
context 'with real data insertion' do
let(:actual_processed_data) do
[
{
lonlat: 'POINT(-0.1278 51.5074)',
timestamp: timestamp,
user_id: user.id,
created_at: Time.current,
updated_at: Time.current
},
{
lonlat: 'POINT(-74.006 40.7128)',
timestamp: timestamp + 1.hour,
user_id: user.id,
created_at: Time.current,
updated_at: Time.current
}
]
end
before do
allow_any_instance_of(Points::Params).to receive(:call).and_return(actual_processed_data)
end
it 'creates points in the database' do
expect do
described_class.new(user, point_params).call
end.to change(Point, :count).by(2)
points = Point.order(:timestamp).last(2)
expect(points[0].lonlat.x).to be_within(0.0001).of(-0.1278)
expect(points[0].lonlat.y).to be_within(0.0001).of(51.5074)
point_time = points[0].timestamp.is_a?(Integer) ? Time.zone.at(points[0].timestamp) : points[0].timestamp
expect(point_time).to be_within(1.second).of(timestamp)
expect(points[1].lonlat.x).to be_within(0.0001).of(-74.006)
expect(points[1].lonlat.y).to be_within(0.0001).of(40.7128)
point_time = points[1].timestamp.is_a?(Integer) ? Time.zone.at(points[1].timestamp) : points[1].timestamp
expect(point_time).to be_within(1.second).of(timestamp + 1.hour)
end
end
context 'with GeoJSON example data' do
let(:geojson_file) { file_fixture('points/geojson_example.json') }
let(:geojson_data) { JSON.parse(File.read(geojson_file)) }
let(:expected_processed_data) do
[
{
lonlat: 'POINT(-122.40530871 37.744304130000003)',
timestamp: Time.parse('2025-01-17T21:03:01Z'),
user_id: user.id,
created_at: Time.current,
updated_at: Time.current
},
{
lonlat: 'POINT(-122.40518926999999 37.744513759999997)',
timestamp: Time.parse('2025-01-17T21:03:02Z'),
user_id: user.id,
created_at: Time.current,
updated_at: Time.current
}
]
end
let(:all_processed_data) do
6.times.map do |i|
if i < 2
expected_processed_data[i]
else
{
lonlat: 'POINT(-122.0 37.0)',
timestamp: Time.parse('2025-01-17T21:03:03Z') + i.minutes,
user_id: user.id,
created_at: Time.current,
updated_at: Time.current
}
end
end
end
let(:expected_results) do
all_processed_data.map.with_index do |data, i|
expected_time = data[:timestamp].to_i
Point.new(
id: i + 1,
lonlat: data[:lonlat],
timestamp: expected_time
)
end
end
before do
allow(Points::Params).to receive(:new).with(geojson_data, user.id).and_return(params_service)
allow(params_service).to receive(:call).and_return(all_processed_data)
allow(Point).to receive(:upsert_all)
.with(
all_processed_data,
unique_by: %i[lonlat timestamp user_id],
returning: Arel.sql('id, timestamp, ST_X(lonlat::geometry) AS longitude, ST_Y(lonlat::geometry) AS latitude')
)
.and_return(expected_results)
end
it 'correctly processes real GeoJSON example data' do
result = described_class.new(user, geojson_data).call
expect(result.size).to eq(6)
expect(result).to eq(expected_results)
# Compare the x and y coordinates instead of the full point object
expect(result[0].lonlat.x).to be_within(0.0001).of(-122.40530871)
expect(result[0].lonlat.y).to be_within(0.0001).of(37.744304130000003)
# Convert timestamp back to Time for comparison
time_obj = Time.zone.at(result[0].timestamp)
expected_time = Time.parse('2025-01-17T21:03:01Z')
expect(time_obj).to be_within(1.second).of(expected_time)
expect(result[1].lonlat.x).to be_within(0.0001).of(-122.40518926999999)
expect(result[1].lonlat.y).to be_within(0.0001).of(37.744513759999997)
# Convert timestamp back to Time for comparison
time_obj = Time.zone.at(result[1].timestamp)
expected_time = Time.parse('2025-01-17T21:03:02Z')
expect(time_obj).to be_within(1.second).of(expected_time)
end
end
end
end

View file

@ -100,76 +100,84 @@ describe 'Points API', type: :request do
parameter name: :locations, in: :body, schema: {
type: :object,
properties: {
type: { type: :string },
geometry: {
type: :object,
properties: {
type: {
type: :string,
example: 'Point',
description: 'the geometry type, always Point'
},
coordinates: {
type: :array,
items: {
type: :number,
example: [-122.40530871, 37.74430413],
description: 'the coordinates of the point, longitude and latitude'
locations: {
type: :array,
items: {
type: :object,
properties: {
type: { type: :string },
geometry: {
type: :object,
properties: {
type: {
type: :string,
example: 'Point',
description: 'the geometry type, always Point'
},
coordinates: {
type: :array,
items: {
type: :number,
example: [-122.40530871, 37.74430413],
description: 'the coordinates of the point, longitude and latitude'
}
}
}
},
properties: {
type: :object,
properties: {
timestamp: {
type: :string,
example: '2025-01-17T21:03:01Z',
description: 'the timestamp of the point'
},
horizontal_accuracy: {
type: :number,
example: 5,
description: 'the horizontal accuracy of the point in meters'
},
vertical_accuracy: {
type: :number,
example: -1,
description: 'the vertical accuracy of the point in meters'
},
altitude: {
type: :number,
example: 0,
description: 'the altitude of the point in meters'
},
speed: {
type: :number,
example: 92.088,
description: 'the speed of the point in meters per second'
},
speed_accuracy: {
type: :number,
example: 0,
description: 'the speed accuracy of the point in meters per second'
},
course_accuracy: {
type: :number,
example: 0,
description: 'the course accuracy of the point in degrees'
},
track_id: {
type: :string,
example: '799F32F5-89BB-45FB-A639-098B1B95B09F',
description: 'the track id of the point set by the device'
},
device_id: {
type: :string,
example: '8D5D4197-245B-4619-A88B-2049100ADE46',
description: 'the device id of the point set by the device'
}
}
}
}
},
required: %w[geometry properties]
}
},
properties: {
type: :object,
properties: {
timestamp: {
type: :string,
example: '2025-01-17T21:03:01Z',
description: 'the timestamp of the point'
},
horizontal_accuracy: {
type: :number,
example: 5,
description: 'the horizontal accuracy of the point in meters'
},
vertical_accuracy: {
type: :number,
example: -1,
description: 'the vertical accuracy of the point in meters'
},
altitude: {
type: :number,
example: 0,
description: 'the altitude of the point in meters'
},
speed: {
type: :number,
example: 92.088,
description: 'the speed of the point in meters per second'
},
speed_accuracy: {
type: :number,
example: 0,
description: 'the speed accuracy of the point in meters per second'
},
course_accuracy: {
type: :number,
example: 0,
description: 'the course accuracy of the point in degrees'
},
track_id: {
type: :string,
example: '799F32F5-89BB-45FB-A639-098B1B95B09F',
description: 'the track id of the point set by the device'
},
device_id: {
type: :string,
example: '8D5D4197-245B-4619-A88B-2049100ADE46',
description: 'the device id of the point set by the device'
}
}
},
required: %w[geometry properties]
}
}
}
@ -179,8 +187,7 @@ describe 'Points API', type: :request do
let(:file_path) { 'spec/fixtures/files/points/geojson_example.json' }
let(:file) { File.open(file_path) }
let(:json) { JSON.parse(file.read) }
let(:params) { json }
let(:locations) { params['locations'] }
let(:locations) { json }
let(:api_key) { create(:user).api_key }
run_test!
@ -190,8 +197,7 @@ describe 'Points API', type: :request do
let(:file_path) { 'spec/fixtures/files/points/geojson_example.json' }
let(:file) { File.open(file_path) }
let(:json) { JSON.parse(file.read) }
let(:params) { json }
let(:locations) { params['locations'] }
let(:locations) { json }
let(:api_key) { 'invalid_api_key' }
run_test!

View file

@ -847,65 +847,72 @@ paths:
schema:
type: object
properties:
type:
type: string
geometry:
type: object
properties:
type:
type: string
example: Point
description: the geometry type, always Point
coordinates:
type: array
items:
type: number
example:
- -122.40530871
- 37.74430413
description: the coordinates of the point, longitude and latitude
properties:
type: object
properties:
timestamp:
type: string
example: '2025-01-17T21:03:01Z'
description: the timestamp of the point
horizontal_accuracy:
type: number
example: 5
description: the horizontal accuracy of the point in meters
vertical_accuracy:
type: number
example: -1
description: the vertical accuracy of the point in meters
altitude:
type: number
example: 0
description: the altitude of the point in meters
speed:
type: number
example: 92.088
description: the speed of the point in meters per second
speed_accuracy:
type: number
example: 0
description: the speed accuracy of the point in meters per second
course_accuracy:
type: number
example: 0
description: the course accuracy of the point in degrees
track_id:
type: string
example: 799F32F5-89BB-45FB-A639-098B1B95B09F
description: the track id of the point set by the device
device_id:
type: string
example: 8D5D4197-245B-4619-A88B-2049100ADE46
description: the device id of the point set by the device
required:
- geometry
- properties
locations:
type: array
items:
type: object
properties:
type:
type: string
geometry:
type: object
properties:
type:
type: string
example: Point
description: the geometry type, always Point
coordinates:
type: array
items:
type: number
example:
- -122.40530871
- 37.74430413
description: the coordinates of the point, longitude
and latitude
properties:
type: object
properties:
timestamp:
type: string
example: '2025-01-17T21:03:01Z'
description: the timestamp of the point
horizontal_accuracy:
type: number
example: 5
description: the horizontal accuracy of the point in meters
vertical_accuracy:
type: number
example: -1
description: the vertical accuracy of the point in meters
altitude:
type: number
example: 0
description: the altitude of the point in meters
speed:
type: number
example: 92.088
description: the speed of the point in meters per second
speed_accuracy:
type: number
example: 0
description: the speed accuracy of the point in meters
per second
course_accuracy:
type: number
example: 0
description: the course accuracy of the point in degrees
track_id:
type: string
example: 799F32F5-89BB-45FB-A639-098B1B95B09F
description: the track id of the point set by the device
device_id:
type: string
example: 8D5D4197-245B-4619-A88B-2049100ADE46
description: the device id of the point set by the device
required:
- geometry
- properties
examples:
'0':
summary: Creates a batch of points

124
yarn.lock
View file

@ -1,124 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@hotwired/turbo-rails@^7.3.0":
version "7.3.0"
resolved "https://registry.npmjs.org/@hotwired/turbo-rails/-/turbo-rails-7.3.0.tgz"
integrity sha512-fvhO64vp/a2UVQ3jue9WTc2JisMv9XilIC7ViZmXAREVwiQ2S4UC7Go8f9A1j4Xu7DBI6SbFdqILk5ImqVoqyA==
dependencies:
"@hotwired/turbo" "^7.3.0"
"@rails/actioncable" "^7.0"
"@hotwired/turbo@^7.3.0":
version "7.3.0"
resolved "https://registry.npmjs.org/@hotwired/turbo/-/turbo-7.3.0.tgz"
integrity sha512-Dcu+NaSvHLT7EjrDrkEmH4qET2ZJZ5IcCWmNXxNQTBwlnE5tBZfN6WxZ842n5cHV52DH/AKNirbPBtcEXDLW4g==
"@rails/actioncable@^7.0":
version "7.1.3"
resolved "https://registry.npmjs.org/@rails/actioncable/-/actioncable-7.1.3.tgz"
integrity sha512-ojNvnoZtPN0pYvVFtlO7dyEN9Oml1B6IDM+whGKVak69MMYW99lC2NOWXWeE3bmwEydbP/nn6ERcpfjHVjYQjA==
"@rails/actiontext@^8.0.0":
version "8.0.0"
resolved "https://registry.npmjs.org/@rails/actiontext/-/actiontext-8.0.0.tgz"
integrity sha512-8pvXDEHqlVHptzfYDUXmBpstHsfHAVacYxO47cWDRjRmp1zdVXusLcom8UvqkRdTcAPXpte+LkjcfpD9S4DSSQ==
dependencies:
"@rails/activestorage" ">= 8.0.0-alpha"
"@rails/activestorage@>= 8.0.0-alpha":
version "8.0.0"
resolved "https://registry.npmjs.org/@rails/activestorage/-/activestorage-8.0.0.tgz"
integrity sha512-qoA7U1gMcWXhDnImwDIyRQDXkQKzThT2lu2Xpim8CnTOCEeAgkQ5Co2kzodpAI2grF1JSDvwXSPYNWwVAswndA==
dependencies:
spark-md5 "^3.0.1"
camelcase-css@^2.0.1:
version "2.0.1"
resolved "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz"
integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==
css-selector-tokenizer@^0.8:
version "0.8.0"
resolved "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.8.0.tgz"
integrity sha512-Jd6Ig3/pe62/qe5SBPTN8h8LeUg/pT4lLgtavPf7updwwHpvFzxvOQBHYj2LZDMjUnBzgvIUSjRcf6oT5HzHFg==
dependencies:
cssesc "^3.0.0"
fastparse "^1.1.2"
cssesc@^3.0.0:
version "3.0.0"
resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz"
integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==
culori@^3:
version "3.3.0"
resolved "https://registry.npmjs.org/culori/-/culori-3.3.0.tgz"
integrity sha512-pHJg+jbuFsCjz9iclQBqyL3B2HLCBF71BwVNujUYEvCeQMvV97R59MNK3R2+jgJ3a1fcZgI9B3vYgz8lzr/BFQ==
daisyui@^4.7.3:
version "4.7.3"
resolved "https://registry.npmjs.org/daisyui/-/daisyui-4.7.3.tgz"
integrity sha512-R8jUpBMAUm4rSyxzGa9QqFdJTkzREtb1QahXdDoOfElGiF4VbSuu5bfqQoOro1kkSagPy+aTKu5WtSSXmH3u3g==
dependencies:
css-selector-tokenizer "^0.8"
culori "^3"
picocolors "^1"
postcss-js "^4"
fastparse@^1.1.2:
version "1.1.2"
resolved "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz"
integrity sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==
leaflet@^1.9.4:
version "1.9.4"
resolved "https://registry.npmjs.org/leaflet/-/leaflet-1.9.4.tgz"
integrity sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA==
nanoid@^3.3.7:
version "3.3.8"
resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf"
integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==
picocolors@^1:
version "1.0.0"
resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz"
integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
picocolors@^1.1.1:
version "1.1.1"
resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b"
integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==
postcss-js@^4:
version "4.0.1"
resolved "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz"
integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==
dependencies:
camelcase-css "^2.0.1"
postcss@^8.4.49:
version "8.4.49"
resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.49.tgz#4ea479048ab059ab3ae61d082190fabfd994fe19"
integrity sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==
dependencies:
nanoid "^3.3.7"
picocolors "^1.1.1"
source-map-js "^1.2.1"
source-map-js@^1.2.1:
version "1.2.1"
resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46"
integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==
spark-md5@^3.0.1:
version "3.0.2"
resolved "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz"
integrity sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw==
trix@^2.1.8:
version "2.1.8"
resolved "https://registry.npmjs.org/trix/-/trix-2.1.8.tgz#b9383af8cd9c1a0a0818d6b4e0c9e771bf7fd564"
integrity sha512-y1h5mKQcjMsZDsUOqOgyIUfw+Z31u4Fe9JqXtKGUzIC7FM9cTpxZFFWxQggwXBo18ccIKYx1Fn9toVO5mCpn9g==