mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 01:31:39 -05:00
Merge pull request #1134 from Freika/fix/protomaps-leaflet
Fix/protomaps leaflet
This commit is contained in:
commit
f205b3b288
11 changed files with 304 additions and 32 deletions
|
|
@ -10,10 +10,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
|
|||
|
||||
- Vector maps are supported in non-self-hosted mode.
|
||||
- Credentials for Sidekiq UI are now being set via environment variables: `SIDEKIQ_USERNAME` and `SIDEKIQ_PASSWORD`. Default credentials are `sidekiq` and `password`.
|
||||
- New import page now shows progress of the upload.
|
||||
|
||||
## Changed
|
||||
|
||||
- Datetime is now being displayed with seconds in the Points page. #1088
|
||||
- Imported files are now being uploaded via direct uploads.
|
||||
|
||||
## Removed
|
||||
|
||||
|
|
|
|||
|
|
@ -10,10 +10,10 @@ class Api::V1::SubscriptionsController < ApiController
|
|||
|
||||
render json: { message: 'Subscription updated successfully' }
|
||||
rescue JWT::DecodeError => e
|
||||
Sentry.capture_exception(e)
|
||||
ExceptionReporter.call(e)
|
||||
render json: { message: 'Failed to verify subscription update.' }, status: :unauthorized
|
||||
rescue ArgumentError => e
|
||||
Sentry.capture_exception(e)
|
||||
ExceptionReporter.call(e)
|
||||
render json: { message: 'Invalid subscription data received.' }, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,6 +25,8 @@ class ExportsController < ApplicationController
|
|||
rescue StandardError => e
|
||||
export&.destroy
|
||||
|
||||
ExceptionReporter.call(e)
|
||||
|
||||
redirect_to exports_url, alert: "Export failed to initiate: #{e.message}", status: :unprocessable_entity
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -31,26 +31,43 @@ class ImportsController < ApplicationController
|
|||
end
|
||||
|
||||
def create
|
||||
files = import_params[:files].reject(&:blank?)
|
||||
files_params = params.dig(:import, :files)
|
||||
raw_files = Array(files_params).reject(&:blank?)
|
||||
|
||||
files.each do |file|
|
||||
import = current_user.imports.build(
|
||||
name: file.original_filename,
|
||||
source: params[:import][:source]
|
||||
)
|
||||
|
||||
import.file.attach(io: file, filename: file.original_filename, content_type: file.content_type)
|
||||
|
||||
import.save!
|
||||
if raw_files.empty?
|
||||
redirect_to new_import_path, alert: 'No files were selected for upload', status: :unprocessable_entity
|
||||
return
|
||||
end
|
||||
|
||||
redirect_to imports_url, notice: "#{files.size} files are queued to be imported in background", status: :see_other
|
||||
created_imports = []
|
||||
|
||||
raw_files.each do |item|
|
||||
next if item.is_a?(ActionDispatch::Http::UploadedFile)
|
||||
|
||||
import = create_import_from_signed_id(item)
|
||||
created_imports << import if import.present?
|
||||
end
|
||||
|
||||
if created_imports.any?
|
||||
redirect_to imports_url,
|
||||
notice: "#{created_imports.size} files are queued to be imported in background",
|
||||
status: :see_other
|
||||
else
|
||||
redirect_to new_import_path,
|
||||
alert: 'No valid file references were found. Please upload files using the file selector.',
|
||||
status: :unprocessable_entity
|
||||
end
|
||||
rescue StandardError => e
|
||||
Import.where(user: current_user, name: files.map(&:original_filename)).destroy_all
|
||||
if created_imports.present?
|
||||
import_ids = created_imports.map(&:id).compact
|
||||
Import.where(id: import_ids).destroy_all if import_ids.any?
|
||||
end
|
||||
|
||||
flash.now[:error] = e.message
|
||||
Rails.logger.error "Import error: #{e.message}"
|
||||
Rails.logger.error e.backtrace.join("\n")
|
||||
ExceptionReporter.call(e)
|
||||
|
||||
redirect_to new_import_path, notice: e.message, status: :unprocessable_entity
|
||||
redirect_to new_import_path, alert: e.message, status: :unprocessable_entity
|
||||
end
|
||||
|
||||
def destroy
|
||||
|
|
@ -68,4 +85,21 @@ class ImportsController < ApplicationController
|
|||
def import_params
|
||||
params.require(:import).permit(:source, files: [])
|
||||
end
|
||||
|
||||
def create_import_from_signed_id(signed_id)
|
||||
Rails.logger.debug "Creating import from signed ID: #{signed_id[0..20]}..."
|
||||
|
||||
blob = ActiveStorage::Blob.find_signed(signed_id)
|
||||
|
||||
import = current_user.imports.build(
|
||||
name: blob.filename.to_s,
|
||||
source: params[:import][:source]
|
||||
)
|
||||
|
||||
import.file.attach(blob)
|
||||
|
||||
import.save!
|
||||
|
||||
import
|
||||
end
|
||||
end
|
||||
|
|
|
|||
149
app/javascript/controllers/direct_upload_controller.js
Normal file
149
app/javascript/controllers/direct_upload_controller.js
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
import { Controller } from "@hotwired/stimulus"
|
||||
import { DirectUpload } from "@rails/activestorage"
|
||||
|
||||
export default class extends Controller {
|
||||
static targets = ["input", "progress", "progressBar", "submit", "form"]
|
||||
static values = {
|
||||
url: String
|
||||
}
|
||||
|
||||
connect() {
|
||||
this.inputTarget.addEventListener("change", this.upload.bind(this))
|
||||
|
||||
// Add form submission handler to disable the file input
|
||||
if (this.hasFormTarget) {
|
||||
this.formTarget.addEventListener("submit", this.onSubmit.bind(this))
|
||||
}
|
||||
}
|
||||
|
||||
onSubmit(event) {
|
||||
if (this.isUploading) {
|
||||
// If still uploading, prevent submission
|
||||
event.preventDefault()
|
||||
console.log("Form submission prevented during upload")
|
||||
return
|
||||
}
|
||||
|
||||
// Disable the file input to prevent it from being submitted with the form
|
||||
// This ensures only our hidden inputs with signed IDs are submitted
|
||||
this.inputTarget.disabled = true
|
||||
|
||||
// Check if we have any signed IDs
|
||||
const signedIds = this.element.querySelectorAll('input[name="import[files][]"][type="hidden"]')
|
||||
if (signedIds.length === 0) {
|
||||
event.preventDefault()
|
||||
console.log("No files uploaded yet")
|
||||
alert("Please select and upload files first")
|
||||
} else {
|
||||
console.log(`Submitting form with ${signedIds.length} uploaded files`)
|
||||
}
|
||||
}
|
||||
|
||||
upload() {
|
||||
const files = this.inputTarget.files
|
||||
if (files.length === 0) return
|
||||
|
||||
console.log(`Uploading ${files.length} files`)
|
||||
this.isUploading = true
|
||||
|
||||
// Disable submit button during upload
|
||||
this.submitTarget.disabled = true
|
||||
|
||||
// Always remove any existing progress bar to ensure we create a fresh one
|
||||
if (this.hasProgressTarget) {
|
||||
this.progressTarget.remove()
|
||||
}
|
||||
|
||||
// Create a wrapper div for better positioning and visibility
|
||||
const progressWrapper = document.createElement("div")
|
||||
progressWrapper.className = "mt-4 mb-6 border p-4 rounded-lg bg-gray-50"
|
||||
|
||||
// Add a label
|
||||
const progressLabel = document.createElement("div")
|
||||
progressLabel.className = "font-medium mb-2 text-gray-700"
|
||||
progressLabel.textContent = "Upload Progress"
|
||||
progressWrapper.appendChild(progressLabel)
|
||||
|
||||
// Create a new progress container
|
||||
const progressContainer = document.createElement("div")
|
||||
progressContainer.setAttribute("data-direct-upload-target", "progress")
|
||||
progressContainer.className = "w-full bg-gray-200 rounded-full h-4"
|
||||
|
||||
// Create the progress bar fill element
|
||||
const progressBarFill = document.createElement("div")
|
||||
progressBarFill.setAttribute("data-direct-upload-target", "progressBar")
|
||||
progressBarFill.className = "bg-blue-600 h-4 rounded-full transition-all duration-300"
|
||||
progressBarFill.style.width = "0%"
|
||||
|
||||
// Add the fill element to the container
|
||||
progressContainer.appendChild(progressBarFill)
|
||||
progressWrapper.appendChild(progressContainer)
|
||||
progressBarFill.dataset.percentageDisplay = "true"
|
||||
|
||||
// Add the progress wrapper AFTER the file input field but BEFORE the submit button
|
||||
this.submitTarget.parentNode.insertBefore(progressWrapper, this.submitTarget)
|
||||
|
||||
console.log("Progress bar created and inserted before submit button")
|
||||
|
||||
let uploadCount = 0
|
||||
const totalFiles = files.length
|
||||
|
||||
// Clear any existing hidden fields for files
|
||||
this.element.querySelectorAll('input[name="import[files][]"][type="hidden"]').forEach(el => {
|
||||
if (el !== this.inputTarget) {
|
||||
el.remove()
|
||||
}
|
||||
});
|
||||
|
||||
Array.from(files).forEach(file => {
|
||||
console.log(`Starting upload for ${file.name}`)
|
||||
const upload = new DirectUpload(file, this.urlValue, this)
|
||||
upload.create((error, blob) => {
|
||||
uploadCount++
|
||||
|
||||
if (error) {
|
||||
console.error("Error uploading file:", error)
|
||||
} else {
|
||||
console.log(`Successfully uploaded ${file.name} with ID: ${blob.signed_id}`)
|
||||
|
||||
// Create a hidden field with the correct name
|
||||
const hiddenField = document.createElement("input")
|
||||
hiddenField.setAttribute("type", "hidden")
|
||||
hiddenField.setAttribute("name", "import[files][]")
|
||||
hiddenField.setAttribute("value", blob.signed_id)
|
||||
this.element.appendChild(hiddenField)
|
||||
|
||||
console.log("Added hidden field with signed ID:", blob.signed_id)
|
||||
}
|
||||
|
||||
// Enable submit button when all uploads are complete
|
||||
if (uploadCount === totalFiles) {
|
||||
this.submitTarget.disabled = false
|
||||
this.isUploading = false
|
||||
console.log("All uploads completed")
|
||||
console.log(`Ready to submit with ${this.element.querySelectorAll('input[name="import[files][]"][type="hidden"]').length} files`)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
directUploadWillStoreFileWithXHR(request) {
|
||||
request.upload.addEventListener("progress", event => {
|
||||
if (!this.hasProgressBarTarget) {
|
||||
console.warn("Progress bar target not found")
|
||||
return
|
||||
}
|
||||
|
||||
const progress = (event.loaded / event.total) * 100
|
||||
const progressPercentage = `${progress.toFixed(1)}%`
|
||||
console.log(`Upload progress: ${progressPercentage}`)
|
||||
this.progressBarTarget.style.width = progressPercentage
|
||||
|
||||
// Update text percentage if exists
|
||||
const percentageDisplay = this.element.querySelector('[data-percentage-display="true"]')
|
||||
if (percentageDisplay) {
|
||||
percentageDisplay.textContent = progressPercentage
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -12,7 +12,7 @@ export function createMapLayer(map, selectedLayerName, layerKey, selfHosted) {
|
|||
}
|
||||
|
||||
let layer;
|
||||
console.log("isSelfhosted: ", selfHosted)
|
||||
|
||||
if (selfHosted === "true") {
|
||||
layer = L.tileLayer(config.url, {
|
||||
maxZoom: config.maxZoom,
|
||||
|
|
@ -21,13 +21,21 @@ export function createMapLayer(map, selectedLayerName, layerKey, selfHosted) {
|
|||
// Add any other config properties that might be needed
|
||||
});
|
||||
} else {
|
||||
layer = protomapsL.leafletLayer(
|
||||
{
|
||||
// Use the global protomapsL object (loaded via script tag)
|
||||
try {
|
||||
if (typeof window.protomapsL === 'undefined') {
|
||||
throw new Error('protomapsL is not defined');
|
||||
}
|
||||
|
||||
layer = window.protomapsL.leafletLayer({
|
||||
url: config.url,
|
||||
flavor: config.flavor,
|
||||
crossOrigin: true,
|
||||
}
|
||||
)
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error creating protomaps layer:', error);
|
||||
throw new Error('Failed to create vector tile layer. protomapsL may not be available.');
|
||||
}
|
||||
}
|
||||
|
||||
if (selectedLayerName === layerKey) {
|
||||
|
|
|
|||
9
app/services/exception_reporter.rb
Normal file
9
app/services/exception_reporter.rb
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class ExceptionReporter
|
||||
def self.call(exception)
|
||||
return unless DawarichSettings.self_hosted?
|
||||
|
||||
Sentry.capture_exception(exception)
|
||||
end
|
||||
end
|
||||
|
|
@ -27,6 +27,8 @@ class Visits::Suggest
|
|||
title: 'Error suggesting visits',
|
||||
content: "Error suggesting visits: #{e.message}\n#{e.backtrace.join("\n")}"
|
||||
)
|
||||
|
||||
ExceptionReporter.call(e)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
<%= form_with model: import, class: "contents" do |form| %>
|
||||
<%= form_with model: import, class: "contents", data: {
|
||||
controller: "direct-upload",
|
||||
direct_upload_url_value: rails_direct_uploads_url,
|
||||
direct_upload_target: "form"
|
||||
} do |form| %>
|
||||
<div class="form-control w-full">
|
||||
<label class="label">
|
||||
<span class="label-text">Select source</span>
|
||||
|
|
@ -65,10 +69,18 @@
|
|||
<div class="label">
|
||||
<span class="label-text">Select one or multiple files</span>
|
||||
</div>
|
||||
<%= form.file_field :files, multiple: true, class: "file-input file-input-bordered w-full max-w-xs" %>
|
||||
<%= form.file_field :files,
|
||||
multiple: true,
|
||||
direct_upload: true,
|
||||
class: "file-input file-input-bordered w-full max-w-xs",
|
||||
data: { direct_upload_target: "input" } %>
|
||||
<div class="text-sm text-gray-500 mt-2">
|
||||
Files will be uploaded directly to storage. Please be patient during upload.
|
||||
</div>
|
||||
</label>
|
||||
|
||||
<div class="inline">
|
||||
<%= form.submit class: "rounded-lg py-3 px-5 bg-blue-600 text-white inline-block font-medium cursor-pointer" %>
|
||||
<%= form.submit class: "rounded-lg py-3 px-5 bg-blue-600 text-white inline-block font-medium cursor-pointer",
|
||||
data: { direct_upload_target: "submit" } %>
|
||||
</div>
|
||||
<% end %>
|
||||
|
|
|
|||
|
|
@ -15,16 +15,14 @@ settings = {
|
|||
if PHOTON_API_HOST.present?
|
||||
settings[:lookup] = :photon
|
||||
settings[:photon] = { use_https: PHOTON_API_USE_HTTPS, host: PHOTON_API_HOST }
|
||||
settings[:http_headers] = { 'X-Api-Key' => PHOTON_API_KEY } if defined?(PHOTON_API_KEY)
|
||||
settings[:http_headers] = { 'X-Api-Key' => PHOTON_API_KEY } if PHOTON_API_KEY.present?
|
||||
elsif GEOAPIFY_API_KEY.present?
|
||||
settings[:lookup] = :geoapify
|
||||
settings[:api_key] = GEOAPIFY_API_KEY
|
||||
elsif NOMINATIM_API_HOST.present?
|
||||
settings[:lookup] = :nominatim
|
||||
settings[:nominatim] = { use_https: NOMINATIM_API_USE_HTTPS, host: NOMINATIM_API_HOST }
|
||||
if NOMINATIM_API_KEY.present?
|
||||
settings[:api_key] = NOMINATIM_API_KEY
|
||||
end
|
||||
settings[:api_key] = NOMINATIM_API_KEY if NOMINATIM_API_KEY.present?
|
||||
end
|
||||
|
||||
Geocoder.configure(settings)
|
||||
|
|
|
|||
|
|
@ -42,16 +42,22 @@ RSpec.describe 'Imports', type: :request do
|
|||
|
||||
context 'when importing owntracks data' do
|
||||
let(:file) { fixture_file_upload('owntracks/2024-03.rec', 'text/plain') }
|
||||
let(:blob) { create_blob_for_file(file) }
|
||||
let(:signed_id) { generate_signed_id_for_blob(blob) }
|
||||
|
||||
it 'queues import job' do
|
||||
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
|
||||
|
||||
expect do
|
||||
post imports_path, params: { import: { source: 'owntracks', files: [file] } }
|
||||
post imports_path, params: { import: { source: 'owntracks', files: [signed_id] } }
|
||||
end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
|
||||
end
|
||||
|
||||
it 'creates a new import' do
|
||||
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
|
||||
|
||||
expect do
|
||||
post imports_path, params: { import: { source: 'owntracks', files: [file] } }
|
||||
post imports_path, params: { import: { source: 'owntracks', files: [signed_id] } }
|
||||
end.to change(user.imports, :count).by(1)
|
||||
|
||||
expect(response).to redirect_to(imports_path)
|
||||
|
|
@ -60,21 +66,58 @@ RSpec.describe 'Imports', type: :request do
|
|||
|
||||
context 'when importing gpx data' do
|
||||
let(:file) { fixture_file_upload('gpx/gpx_track_single_segment.gpx', 'application/gpx+xml') }
|
||||
let(:blob) { create_blob_for_file(file) }
|
||||
let(:signed_id) { generate_signed_id_for_blob(blob) }
|
||||
|
||||
it 'queues import job' do
|
||||
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
|
||||
|
||||
expect do
|
||||
post imports_path, params: { import: { source: 'gpx', files: [file] } }
|
||||
post imports_path, params: { import: { source: 'gpx', files: [signed_id] } }
|
||||
end.to have_enqueued_job(Import::ProcessJob).on_queue('imports').at_least(1).times
|
||||
end
|
||||
|
||||
it 'creates a new import' do
|
||||
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id).and_return(blob)
|
||||
|
||||
expect do
|
||||
post imports_path, params: { import: { source: 'gpx', files: [file] } }
|
||||
post imports_path, params: { import: { source: 'gpx', files: [signed_id] } }
|
||||
end.to change(user.imports, :count).by(1)
|
||||
|
||||
expect(response).to redirect_to(imports_path)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when an error occurs during import creation' do
|
||||
let(:file1) { fixture_file_upload('owntracks/2024-03.rec', 'text/plain') }
|
||||
let(:file2) { fixture_file_upload('gpx/gpx_track_single_segment.gpx', 'application/gpx+xml') }
|
||||
let(:blob1) { create_blob_for_file(file1) }
|
||||
let(:blob2) { create_blob_for_file(file2) }
|
||||
let(:signed_id1) { generate_signed_id_for_blob(blob1) }
|
||||
let(:signed_id2) { generate_signed_id_for_blob(blob2) }
|
||||
|
||||
it 'deletes any created imports' do
|
||||
# The first blob should be found correctly
|
||||
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id1).and_return(blob1)
|
||||
|
||||
# The second blob find will raise an error
|
||||
allow(ActiveStorage::Blob).to receive(:find_signed).with(signed_id2).and_raise(StandardError, 'Test error')
|
||||
|
||||
# Allow ExceptionReporter to be called without actually calling it
|
||||
allow(ExceptionReporter).to receive(:call)
|
||||
|
||||
# The request should not ultimately create any imports
|
||||
expect do
|
||||
post imports_path, params: { import: { source: 'owntracks', files: [signed_id1, signed_id2] } }
|
||||
end.not_to change(Import, :count)
|
||||
|
||||
# Check that we were redirected with an error message
|
||||
expect(response).to have_http_status(422)
|
||||
# Just check that we have an alert message, not its exact content
|
||||
# since error handling might transform the message
|
||||
expect(flash[:alert]).not_to be_nil
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -138,4 +181,17 @@ RSpec.describe 'Imports', type: :request do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Helper methods for creating ActiveStorage blobs and signed IDs in tests
|
||||
def create_blob_for_file(file)
|
||||
ActiveStorage::Blob.create_and_upload!(
|
||||
io: file.open,
|
||||
filename: file.original_filename,
|
||||
content_type: file.content_type
|
||||
)
|
||||
end
|
||||
|
||||
def generate_signed_id_for_blob(blob)
|
||||
blob.signed_id
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Reference in a new issue