mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-11 09:41:40 -05:00
commit
1b9d5d9279
41 changed files with 815 additions and 571 deletions
|
|
@ -1 +1 @@
|
|||
0.25.5
|
||||
0.25.6
|
||||
|
|
|
|||
24
CHANGELOG.md
24
CHANGELOG.md
|
|
@ -5,6 +5,22 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
|||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
|
||||
# 0.25.6 - 2025-04-23
|
||||
|
||||
## Added
|
||||
|
||||
- In the map settings (top left corner of the map), you can now select colors for your colored routes. #682
|
||||
|
||||
## Changed
|
||||
|
||||
- Import edit page now allows to edit import name.
|
||||
- Importing data now does not create a notification for the user.
|
||||
- Updating stats now does not create a notification for the user.
|
||||
|
||||
## Fixed
|
||||
|
||||
- Fixed a bug where an import was failing due to partial file download. #1069 #1073 #1024 #1051
|
||||
|
||||
# 0.25.5 - 2025-04-18
|
||||
|
||||
This release introduces a new way to send transactional emails using SMTP. Example may include password reset, email confirmation, etc.
|
||||
|
|
@ -22,19 +38,23 @@ This is optional feature and is not required for the app to work.
|
|||
|
||||
## Removed
|
||||
|
||||
- Optional telemetry was removed from the app.
|
||||
- Sidekiq Web UI is now protected by basic auth in non-self-hosted mode.
|
||||
- Optional telemetry was removed from the app. The `ENABLE_TELEMETRY` env var can be safely removed from docker compose.
|
||||
|
||||
## Changed
|
||||
|
||||
- `rake points:migrate_to_lonlat` task now also tries to extract latitude and longitude from `raw_data` column before using `longitude` and `latitude` columns to fill `lonlat` column.
|
||||
- Docker entrypoints are now using `DATABASE_NAME` environment variable to check if Postgres is existing/available.
|
||||
- Sidekiq web UI is now protected by basic auth. Use `SIDEKIQ_USERNAME` and `SIDEKIQ_PASSWORD` environment variables to set the credentials.
|
||||
|
||||
## Added
|
||||
|
||||
- You can now provide SMTP settings in ENV vars to send emails.
|
||||
- You can now edit imports. #1044 #623
|
||||
|
||||
## Fixed
|
||||
|
||||
- Importing data from Immich now works correctly. #1019
|
||||
|
||||
|
||||
# 0.25.4 - 2025-04-02
|
||||
|
||||
|
|
|
|||
22
Gemfile.lock
22
Gemfile.lock
|
|
@ -104,11 +104,11 @@ GEM
|
|||
brakeman (7.0.2)
|
||||
racc
|
||||
builder (3.3.0)
|
||||
byebug (11.1.3)
|
||||
byebug (12.0.0)
|
||||
chartkick (5.1.4)
|
||||
coderay (1.1.3)
|
||||
concurrent-ruby (1.3.5)
|
||||
connection_pool (2.5.0)
|
||||
connection_pool (2.5.1)
|
||||
crack (1.0.0)
|
||||
bigdecimal
|
||||
rexml
|
||||
|
|
@ -116,7 +116,7 @@ GEM
|
|||
cronex (0.15.0)
|
||||
tzinfo
|
||||
unicode (>= 0.4.4.5)
|
||||
csv (3.3.2)
|
||||
csv (3.3.4)
|
||||
data_migrate (11.2.0)
|
||||
activerecord (>= 6.1)
|
||||
railties (>= 6.1)
|
||||
|
|
@ -164,7 +164,7 @@ GEM
|
|||
groupdate (6.5.1)
|
||||
activesupport (>= 7)
|
||||
hashdiff (1.1.2)
|
||||
httparty (0.22.0)
|
||||
httparty (0.23.1)
|
||||
csv
|
||||
mini_mime (>= 1.0.0)
|
||||
multi_xml (>= 0.5.2)
|
||||
|
|
@ -263,12 +263,12 @@ GEM
|
|||
prism (1.4.0)
|
||||
prometheus_exporter (2.2.0)
|
||||
webrick
|
||||
pry (0.14.2)
|
||||
pry (0.15.2)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
pry-byebug (3.10.1)
|
||||
byebug (~> 11.0)
|
||||
pry (>= 0.13, < 0.15)
|
||||
pry-byebug (3.11.0)
|
||||
byebug (~> 12.0)
|
||||
pry (>= 0.13, < 0.16)
|
||||
pry-rails (0.3.11)
|
||||
pry (>= 0.13.0)
|
||||
psych (5.2.3)
|
||||
|
|
@ -281,7 +281,7 @@ GEM
|
|||
activesupport (>= 3.0.0)
|
||||
raabro (1.4.0)
|
||||
racc (1.8.1)
|
||||
rack (3.1.12)
|
||||
rack (3.1.13)
|
||||
rack-session (2.1.0)
|
||||
base64 (>= 0.1.0)
|
||||
rack (>= 3.0.0)
|
||||
|
|
@ -334,7 +334,7 @@ GEM
|
|||
responders (3.1.1)
|
||||
actionpack (>= 5.2)
|
||||
railties (>= 5.2)
|
||||
rexml (3.4.0)
|
||||
rexml (3.4.1)
|
||||
rgeo (3.0.1)
|
||||
rgeo-activerecord (8.0.0)
|
||||
activerecord (>= 7.0)
|
||||
|
|
@ -403,7 +403,7 @@ GEM
|
|||
logger
|
||||
rack (>= 2.2.4)
|
||||
redis-client (>= 0.22.2)
|
||||
sidekiq-cron (2.1.0)
|
||||
sidekiq-cron (2.2.0)
|
||||
cronex (>= 0.13.0)
|
||||
fugit (~> 1.8, >= 1.11.1)
|
||||
globalid (>= 1.0.1)
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -30,7 +30,7 @@ class Api::V1::SettingsController < ApiController
|
|||
:time_threshold_minutes, :merge_threshold_minutes, :route_opacity,
|
||||
:preferred_map_layer, :points_rendering_mode, :live_map_enabled,
|
||||
:immich_url, :immich_api_key, :photoprism_url, :photoprism_api_key,
|
||||
:speed_colored_routes
|
||||
:speed_colored_routes, :speed_color_scale
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
19
app/controllers/api/v1/subscriptions_controller.rb
Normal file
19
app/controllers/api/v1/subscriptions_controller.rb
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Api::V1::SubscriptionsController < ApiController
|
||||
skip_before_action :authenticate_api_key, only: %i[callback]
|
||||
def callback
|
||||
decoded_token = Subscription::DecodeJwtToken.new(params[:token]).call
|
||||
|
||||
user = User.find(decoded_token[:user_id])
|
||||
user.update!(status: decoded_token[:status], active_until: decoded_token[:active_until])
|
||||
|
||||
render json: { message: 'Subscription updated successfully' }
|
||||
rescue JWT::DecodeError => e
|
||||
Sentry.capture_exception(e)
|
||||
render json: { message: 'Failed to verify subscription update.' }, status: :unauthorized
|
||||
rescue ArgumentError => e
|
||||
Sentry.capture_exception(e)
|
||||
render json: { message: 'Invalid subscription data received.' }, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Settings::SubscriptionsController < ApplicationController
|
||||
before_action :authenticate_user!
|
||||
before_action :authenticate_non_self_hosted!
|
||||
|
||||
def index; end
|
||||
|
||||
def subscription_callback
|
||||
token = params[:token]
|
||||
|
||||
begin
|
||||
decoded_token = JWT.decode(
|
||||
token,
|
||||
ENV['JWT_SECRET_KEY'],
|
||||
true,
|
||||
{ algorithm: 'HS256' }
|
||||
).first.symbolize_keys
|
||||
|
||||
unless decoded_token[:user_id] == current_user.id
|
||||
redirect_to settings_subscriptions_path, alert: 'Invalid subscription update request.'
|
||||
return
|
||||
end
|
||||
|
||||
current_user.update!(status: decoded_token[:status], active_until: decoded_token[:active_until])
|
||||
|
||||
redirect_to settings_subscriptions_path, notice: 'Your subscription has been updated successfully!'
|
||||
rescue JWT::DecodeError
|
||||
redirect_to settings_subscriptions_path, alert: 'Failed to verify subscription update.'
|
||||
rescue ArgumentError
|
||||
redirect_to settings_subscriptions_path, alert: 'Invalid subscription data received.'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -8,7 +8,10 @@ import { createMarkersArray } from "../maps/markers";
|
|||
import {
|
||||
createPolylinesLayer,
|
||||
updatePolylinesOpacity,
|
||||
updatePolylinesColors
|
||||
updatePolylinesColors,
|
||||
colorFormatEncode,
|
||||
colorFormatDecode,
|
||||
colorStopsFallback
|
||||
} from "../maps/polylines";
|
||||
|
||||
import { fetchAndDrawAreas, handleAreaCreated } from "../maps/areas";
|
||||
|
|
@ -47,6 +50,7 @@ export default class extends BaseController {
|
|||
this.liveMapEnabled = this.userSettings.live_map_enabled || false;
|
||||
this.countryCodesMap = countryCodesMap();
|
||||
this.speedColoredPolylines = this.userSettings.speed_colored_routes || false;
|
||||
this.speedColorScale = this.userSettings.speed_color_scale || colorFormatEncode(speedColorScaleDefault);
|
||||
|
||||
this.center = this.markers[this.markers.length - 1] || [52.514568, 13.350111];
|
||||
|
||||
|
|
@ -444,7 +448,7 @@ export default class extends BaseController {
|
|||
maps[this.userSettings.maps.name] = customLayer;
|
||||
} else {
|
||||
// If no custom map is set, ensure a default layer is added
|
||||
const defaultLayer = maps[selectedLayerName] || maps["OpenStreetMap"];
|
||||
const defaultLayer = maps[selectedLayerName] || maps["OpenStreetMap"] || maps["Atlas"];
|
||||
defaultLayer.addTo(this.map);
|
||||
}
|
||||
|
||||
|
|
@ -699,7 +703,7 @@ export default class extends BaseController {
|
|||
|
||||
// Form HTML
|
||||
div.innerHTML = `
|
||||
<form id="settings-form" class="w-48">
|
||||
<form id="settings-form" class="w-48 h-144 overflow-y-auto">
|
||||
<label for="route-opacity">Route Opacity</label>
|
||||
<div class="join">
|
||||
<input type="number" class="input input-ghost join-item focus:input-ghost input-xs input-bordered w-full max-w-xs" id="route-opacity" name="route_opacity" min="0" max="1" step="0.1" value="${this.routeOpacity}">
|
||||
|
|
@ -768,7 +772,16 @@ export default class extends BaseController {
|
|||
<input type="checkbox" id="speed_colored_routes" name="speed_colored_routes" class='w-4' style="width: 20px;" ${this.speedColoredRoutesChecked()} />
|
||||
</label>
|
||||
|
||||
<button type="submit">Update</button>
|
||||
<label for="speed_color_scale">Speed color scale</label>
|
||||
<div class="join">
|
||||
<input type="text" class="join-item input input-ghost focus:input-ghost input-xs input-bordered w-full max-w-xs" id="speed_color_scale" name="speed_color_scale" min="5" max="100" step="1" value="${this.speedColorScale}">
|
||||
<label for="speed_color_scale_info" class="btn-xs join-item">?</label>
|
||||
</div>
|
||||
<button type="button" id="edit-gradient-btn" class="btn btn-xs mt-2">Edit Scale</button>
|
||||
|
||||
<hr>
|
||||
|
||||
<button type="submit" class="btn btn-xs mt-2">Update</button>
|
||||
</form>
|
||||
`;
|
||||
|
||||
|
|
@ -781,6 +794,12 @@ export default class extends BaseController {
|
|||
// Prevent map interactions when interacting with the form
|
||||
L.DomEvent.disableClickPropagation(div);
|
||||
|
||||
// Attach event listener to the "Edit Gradient" button:
|
||||
const editBtn = div.querySelector("#edit-gradient-btn");
|
||||
if (editBtn) {
|
||||
editBtn.addEventListener("click", this.showGradientEditor.bind(this));
|
||||
}
|
||||
|
||||
// Add event listener to the form submission
|
||||
div.querySelector('#settings-form').addEventListener(
|
||||
'submit', this.updateSettings.bind(this)
|
||||
|
|
@ -829,7 +848,8 @@ export default class extends BaseController {
|
|||
merge_threshold_minutes: event.target.merge_threshold_minutes.value,
|
||||
points_rendering_mode: event.target.points_rendering_mode.value,
|
||||
live_map_enabled: event.target.live_map_enabled.checked,
|
||||
speed_colored_routes: event.target.speed_colored_routes.checked
|
||||
speed_colored_routes: event.target.speed_colored_routes.checked,
|
||||
speed_color_scale: event.target.speed_color_scale.value
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
|
@ -866,7 +886,18 @@ export default class extends BaseController {
|
|||
if (this.polylinesLayer) {
|
||||
updatePolylinesColors(
|
||||
this.polylinesLayer,
|
||||
newSettings.speed_colored_routes
|
||||
newSettings.speed_colored_routes,
|
||||
newSettings.speed_color_scale
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (newSettings.speed_color_scale !== this.userSettings.speed_color_scale) {
|
||||
if (this.polylinesLayer) {
|
||||
updatePolylinesColors(
|
||||
this.polylinesLayer,
|
||||
newSettings.speed_colored_routes,
|
||||
newSettings.speed_color_scale
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -1352,4 +1383,143 @@ export default class extends BaseController {
|
|||
|
||||
container.innerHTML = html;
|
||||
}
|
||||
|
||||
showGradientEditor() {
|
||||
const modal = document.createElement("div");
|
||||
modal.id = "gradient-editor-modal";
|
||||
Object.assign(modal.style, {
|
||||
position: "fixed",
|
||||
top: "0",
|
||||
left: "0",
|
||||
right: "0",
|
||||
bottom: "0",
|
||||
backgroundColor: "rgba(0, 0, 0, 0.5)",
|
||||
display: "flex",
|
||||
justifyContent: "center",
|
||||
alignItems: "center",
|
||||
zIndex: "100",
|
||||
});
|
||||
|
||||
const content = document.createElement("div");
|
||||
Object.assign(content.style, {
|
||||
backgroundColor: "#fff",
|
||||
padding: "20px",
|
||||
borderRadius: "5px",
|
||||
minWidth: "300px",
|
||||
maxHeight: "80vh",
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
});
|
||||
|
||||
const title = document.createElement("h2");
|
||||
title.textContent = "Edit Speed Color Scale";
|
||||
content.appendChild(title);
|
||||
|
||||
const gradientContainer = document.createElement("div");
|
||||
gradientContainer.id = "gradient-editor-container";
|
||||
Object.assign(gradientContainer.style, {
|
||||
marginTop: "15px",
|
||||
overflowY: "auto",
|
||||
flex: "1",
|
||||
border: "1px solid #ccc",
|
||||
padding: "5px",
|
||||
});
|
||||
|
||||
const createRow = (stop = { speed: 0, color: "#000000" }) => {
|
||||
const row = document.createElement("div");
|
||||
row.style.display = "flex";
|
||||
row.style.alignItems = "center";
|
||||
row.style.gap = "10px";
|
||||
row.style.marginBottom = "8px";
|
||||
|
||||
const speedInput = document.createElement("input");
|
||||
speedInput.type = "number";
|
||||
speedInput.value = stop.speed;
|
||||
speedInput.style.width = "70px";
|
||||
|
||||
const colorInput = document.createElement("input");
|
||||
colorInput.type = "color";
|
||||
colorInput.value = stop.color;
|
||||
colorInput.style.width = "70px";
|
||||
|
||||
const removeBtn = document.createElement("button");
|
||||
removeBtn.textContent = "x";
|
||||
removeBtn.style.color = "#cc3311";
|
||||
removeBtn.style.flexShrink = "0";
|
||||
removeBtn.addEventListener("click", () => {
|
||||
if (gradientContainer.childElementCount > 1) {
|
||||
gradientContainer.removeChild(row);
|
||||
} else {
|
||||
showFlashMessage('error', 'At least one gradient stop is required.');
|
||||
}
|
||||
});
|
||||
|
||||
row.appendChild(speedInput);
|
||||
row.appendChild(colorInput);
|
||||
row.appendChild(removeBtn);
|
||||
return row;
|
||||
};
|
||||
|
||||
let stops;
|
||||
try {
|
||||
stops = colorFormatDecode(this.speedColorScale);
|
||||
} catch (error) {
|
||||
stops = colorStopsFallback;
|
||||
}
|
||||
stops.forEach(stop => {
|
||||
const row = createRow(stop);
|
||||
gradientContainer.appendChild(row);
|
||||
});
|
||||
|
||||
content.appendChild(gradientContainer);
|
||||
|
||||
const addRowBtn = document.createElement("button");
|
||||
addRowBtn.textContent = "Add Row";
|
||||
addRowBtn.style.marginTop = "10px";
|
||||
addRowBtn.addEventListener("click", () => {
|
||||
const newRow = createRow({ speed: 0, color: "#000000" });
|
||||
gradientContainer.appendChild(newRow);
|
||||
});
|
||||
content.appendChild(addRowBtn);
|
||||
|
||||
const btnContainer = document.createElement("div");
|
||||
btnContainer.style.display = "flex";
|
||||
btnContainer.style.justifyContent = "flex-end";
|
||||
btnContainer.style.gap = "10px";
|
||||
btnContainer.style.marginTop = "15px";
|
||||
|
||||
const cancelBtn = document.createElement("button");
|
||||
cancelBtn.textContent = "Cancel";
|
||||
cancelBtn.addEventListener("click", () => {
|
||||
document.body.removeChild(modal);
|
||||
});
|
||||
|
||||
const saveBtn = document.createElement("button");
|
||||
saveBtn.textContent = "Save";
|
||||
saveBtn.addEventListener("click", () => {
|
||||
const newStops = [];
|
||||
gradientContainer.querySelectorAll("div").forEach(row => {
|
||||
const inputs = row.querySelectorAll("input");
|
||||
const speed = Number(inputs[0].value);
|
||||
const color = inputs[1].value;
|
||||
newStops.push({ speed, color });
|
||||
});
|
||||
|
||||
const newGradient = colorFormatEncode(newStops);
|
||||
|
||||
this.speedColorScale = newGradient;
|
||||
const speedColorScaleInput = document.getElementById("speed_color_scale");
|
||||
if (speedColorScaleInput) {
|
||||
speedColorScaleInput.value = newGradient;
|
||||
}
|
||||
|
||||
document.body.removeChild(modal);
|
||||
});
|
||||
|
||||
btnContainer.appendChild(cancelBtn);
|
||||
btnContainer.appendChild(saveBtn);
|
||||
content.appendChild(btnContainer);
|
||||
modal.appendChild(content);
|
||||
document.body.appendChild(modal);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,17 +5,7 @@
|
|||
|
||||
import BaseController from "./base_controller"
|
||||
import L from "leaflet"
|
||||
import {
|
||||
osmMapLayer,
|
||||
osmHotMapLayer,
|
||||
OPNVMapLayer,
|
||||
openTopoMapLayer,
|
||||
cyclOsmMapLayer,
|
||||
esriWorldStreetMapLayer,
|
||||
esriWorldTopoMapLayer,
|
||||
esriWorldImageryMapLayer,
|
||||
esriWorldGrayCanvasMapLayer
|
||||
} from "../maps/layers"
|
||||
import { createAllMapLayers } from "../maps/layers"
|
||||
import { createPopupContent } from "../maps/popups"
|
||||
import {
|
||||
fetchAndDisplayPhotos,
|
||||
|
|
@ -61,7 +51,10 @@ export default class extends BaseController {
|
|||
this.map = L.map(this.containerTarget).setView(center, zoom)
|
||||
|
||||
// Add base map layer
|
||||
osmMapLayer(this.map, "OpenStreetMap")
|
||||
const selectedLayerName = this.userSettings.preferred_map_layer || "OpenStreetMap";
|
||||
const maps = this.baseMaps();
|
||||
const defaultLayer = maps[selectedLayerName] || maps["OpenStreetMap"] || maps["Atlas"];
|
||||
defaultLayer.addTo(this.map);
|
||||
|
||||
// Add scale control to bottom right
|
||||
L.control.scale({
|
||||
|
|
@ -168,18 +161,30 @@ export default class extends BaseController {
|
|||
|
||||
baseMaps() {
|
||||
let selectedLayerName = this.userSettings.preferred_map_layer || "OpenStreetMap";
|
||||
let maps = createAllMapLayers(this.map, selectedLayerName);
|
||||
|
||||
return {
|
||||
OpenStreetMap: osmMapLayer(this.map, selectedLayerName),
|
||||
"OpenStreetMap.HOT": osmHotMapLayer(this.map, selectedLayerName),
|
||||
OPNV: OPNVMapLayer(this.map, selectedLayerName),
|
||||
openTopo: openTopoMapLayer(this.map, selectedLayerName),
|
||||
cyclOsm: cyclOsmMapLayer(this.map, selectedLayerName),
|
||||
esriWorldStreet: esriWorldStreetMapLayer(this.map, selectedLayerName),
|
||||
esriWorldTopo: esriWorldTopoMapLayer(this.map, selectedLayerName),
|
||||
esriWorldImagery: esriWorldImageryMapLayer(this.map, selectedLayerName),
|
||||
esriWorldGrayCanvas: esriWorldGrayCanvasMapLayer(this.map, selectedLayerName)
|
||||
};
|
||||
// Add custom map if it exists in settings
|
||||
if (this.userSettings.maps && this.userSettings.maps.url) {
|
||||
const customLayer = L.tileLayer(this.userSettings.maps.url, {
|
||||
maxZoom: 19,
|
||||
attribution: "© OpenStreetMap contributors"
|
||||
});
|
||||
|
||||
// If this is the preferred layer, add it to the map immediately
|
||||
if (selectedLayerName === this.userSettings.maps.name) {
|
||||
customLayer.addTo(this.map);
|
||||
// Remove any other base layers that might be active
|
||||
Object.values(maps).forEach(layer => {
|
||||
if (this.map.hasLayer(layer)) {
|
||||
this.map.removeLayer(layer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
maps[this.userSettings.maps.name] = customLayer;
|
||||
}
|
||||
|
||||
return maps;
|
||||
}
|
||||
|
||||
addMarkers() {
|
||||
|
|
|
|||
|
|
@ -48,127 +48,3 @@ export function osmMapLayer(map, selectedLayerName) {
|
|||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function osmHotMapLayer(map, selectedLayerName) {
|
||||
let layerName = "OpenStreetMap.HOT";
|
||||
let layer = L.tileLayer("https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png", {
|
||||
maxZoom: 19,
|
||||
attribution: "© OpenStreetMap contributors, Tiles style by Humanitarian OpenStreetMap Team hosted by OpenStreetMap France",
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function OPNVMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'OPNV';
|
||||
let layer = L.tileLayer('https://tileserver.memomaps.de/tilegen/{z}/{x}/{y}.png', {
|
||||
maxZoom: 18,
|
||||
attribution: 'Map <a href="https://memomaps.de/">memomaps.de</a> <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, map data © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function openTopoMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'openTopo';
|
||||
let layer = L.tileLayer('https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png', {
|
||||
maxZoom: 17,
|
||||
attribution: 'Map data: © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, <a href="http://viewfinderpanoramas.org">SRTM</a> | Map style: © <a href="https://opentopomap.org">OpenTopoMap</a> (<a href="https://creativecommons.org/licenses/by-sa/3.0/">CC-BY-SA</a>)'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function cyclOsmMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'cyclOsm';
|
||||
let layer = L.tileLayer('https://{s}.tile-cyclosm.openstreetmap.fr/cyclosm/{z}/{x}/{y}.png', {
|
||||
maxZoom: 20,
|
||||
attribution: '<a href="https://github.com/cyclosm/cyclosm-cartocss-style/releases" title="CyclOSM - Open Bicycle render">CyclOSM</a> | Map data: © <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function esriWorldStreetMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'esriWorldStreet';
|
||||
let layer = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
minZoom: 1,
|
||||
maxZoom: 19,
|
||||
bounds: [[-90, -180], [90, 180]],
|
||||
noWrap: true,
|
||||
attribution: 'Tiles © Esri — Source: Esri, DeLorme, NAVTEQ, USGS, Intermap, iPC, NRCAN, Esri Japan, METI, Esri China (Hong Kong), Esri (Thailand), TomTom, 2012'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function esriWorldTopoMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'esriWorldTopo';
|
||||
let layer = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}', {
|
||||
minZoom: 1,
|
||||
maxZoom: 19,
|
||||
bounds: [[-90, -180], [90, 180]],
|
||||
noWrap: true,
|
||||
attribution: 'Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function esriWorldImageryMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'esriWorldImagery';
|
||||
let layer = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', {
|
||||
minZoom: 1,
|
||||
maxZoom: 19,
|
||||
bounds: [[-90, -180], [90, 180]],
|
||||
noWrap: true,
|
||||
attribution: 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
||||
export function esriWorldGrayCanvasMapLayer(map, selectedLayerName) {
|
||||
let layerName = 'esriWorldGrayCanvas';
|
||||
let layer = L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/Canvas/World_Light_Gray_Base/MapServer/tile/{z}/{y}/{x}', {
|
||||
minZoom: 1,
|
||||
maxZoom: 16,
|
||||
bounds: [[-90, -180], [90, 180]],
|
||||
noWrap: true,
|
||||
attribution: 'Tiles © Esri — Esri, DeLorme, NAVTEQ'
|
||||
});
|
||||
|
||||
if (selectedLayerName === layerName) {
|
||||
return layer.addTo(map);
|
||||
} else {
|
||||
return layer;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,22 +25,44 @@ export function calculateSpeed(point1, point2) {
|
|||
}
|
||||
|
||||
// Optimize getSpeedColor by pre-calculating color stops
|
||||
const colorStops = [
|
||||
export const colorStopsFallback = [
|
||||
{ speed: 0, color: '#00ff00' }, // Stationary/very slow (green)
|
||||
{ speed: 15, color: '#00ffff' }, // Walking/jogging (cyan)
|
||||
{ speed: 30, color: '#ff00ff' }, // Cycling/slow driving (magenta)
|
||||
{ speed: 50, color: '#ffff00' }, // Urban driving (yellow)
|
||||
{ speed: 100, color: '#ff3300' } // Highway driving (red)
|
||||
].map(stop => ({
|
||||
...stop,
|
||||
rgb: hexToRGB(stop.color)
|
||||
}));
|
||||
];
|
||||
|
||||
export function getSpeedColor(speedKmh, useSpeedColors) {
|
||||
export function colorFormatEncode(arr) {
|
||||
return arr.map(item => `${item.speed}:${item.color}`).join('|');
|
||||
}
|
||||
|
||||
export function colorFormatDecode(str) {
|
||||
return str.split('|').map(segment => {
|
||||
const [speed, color] = segment.split(':');
|
||||
return { speed: Number(speed), color };
|
||||
});
|
||||
}
|
||||
|
||||
export function getSpeedColor(speedKmh, useSpeedColors, speedColorScale) {
|
||||
if (!useSpeedColors) {
|
||||
return '#0000ff';
|
||||
}
|
||||
|
||||
let colorStops;
|
||||
|
||||
try {
|
||||
colorStops = colorFormatDecode(speedColorScale).map(stop => ({
|
||||
...stop,
|
||||
rgb: hexToRGB(stop.color)
|
||||
}));;
|
||||
} catch (error) { // If user has given invalid values
|
||||
colorStops = colorStopsFallback.map(stop => ({
|
||||
...stop,
|
||||
rgb: hexToRGB(stop.color)
|
||||
}));;
|
||||
}
|
||||
|
||||
// Find the appropriate color segment
|
||||
for (let i = 1; i < colorStops.length; i++) {
|
||||
if (speedKmh <= colorStops[i].speed) {
|
||||
|
|
@ -388,7 +410,7 @@ export function createPolylinesLayer(markers, map, timezone, routeOpacity, userS
|
|||
|
||||
for (let i = 0; i < polylineCoordinates.length - 1; i++) {
|
||||
const speed = calculateSpeed(polylineCoordinates[i], polylineCoordinates[i + 1]);
|
||||
const color = getSpeedColor(speed, userSettings.speed_colored_routes);
|
||||
const color = getSpeedColor(speed, userSettings.speed_colored_routes, userSettings.speed_color_scale);
|
||||
|
||||
const segment = L.polyline(
|
||||
[
|
||||
|
|
@ -466,7 +488,7 @@ export function createPolylinesLayer(markers, map, timezone, routeOpacity, userS
|
|||
return layerGroup;
|
||||
}
|
||||
|
||||
export function updatePolylinesColors(polylinesLayer, useSpeedColors) {
|
||||
export function updatePolylinesColors(polylinesLayer, useSpeedColors, speedColorScale) {
|
||||
const defaultStyle = {
|
||||
color: '#0000ff',
|
||||
originalColor: '#0000ff'
|
||||
|
|
@ -496,7 +518,7 @@ export function updatePolylinesColors(polylinesLayer, useSpeedColors) {
|
|||
}
|
||||
|
||||
const speed = segment.options.speed || 0;
|
||||
const newColor = getSpeedColor(speed, true);
|
||||
const newColor = getSpeedColor(speed, true, speedColorScale);
|
||||
|
||||
// Reuse style object
|
||||
styleObj.color = newColor;
|
||||
|
|
|
|||
|
|
@ -5,25 +5,12 @@ class Stats::CalculatingJob < ApplicationJob
|
|||
|
||||
def perform(user_id, year, month)
|
||||
Stats::CalculateMonth.new(user_id, year, month).call
|
||||
|
||||
create_stats_updated_notification(user_id, year, month)
|
||||
rescue StandardError => e
|
||||
create_stats_update_failed_notification(user_id, e)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_stats_updated_notification(user_id, year, month)
|
||||
user = User.find(user_id)
|
||||
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: "Stats updated for #{Date::MONTHNAMES[month.to_i]} of #{year}",
|
||||
content: "Stats updated for #{Date::MONTHNAMES[month.to_i]} of #{year}"
|
||||
).call
|
||||
end
|
||||
|
||||
def create_stats_update_failed_notification(user_id, error)
|
||||
user = User.find(user_id)
|
||||
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ class Point < ApplicationRecord
|
|||
|
||||
after_create :async_reverse_geocode
|
||||
after_create_commit :broadcast_coordinates
|
||||
after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? }
|
||||
# after_commit -> { Import::UpdatePointsCountJob.perform_later(import_id) }, on: :destroy, if: -> { import_id.present? }
|
||||
|
||||
def self.without_raw_data
|
||||
select(column_names - ['raw_data'])
|
||||
|
|
|
|||
|
|
@ -1,29 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Geojson::ImportParser
|
||||
include Imports::Broadcaster
|
||||
include PointValidation
|
||||
|
||||
attr_reader :import, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
|
||||
data = Geojson::Params.new(json).call
|
||||
|
||||
data.each.with_index(1) do |point, index|
|
||||
next if point_exists?(point, user_id)
|
||||
|
||||
Point.create!(point.merge(user_id:, import_id: import.id))
|
||||
|
||||
broadcast_import_progress(import, index)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
28
app/services/geojson/importer.rb
Normal file
28
app/services/geojson/importer.rb
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Geojson::Importer
|
||||
include Imports::Broadcaster
|
||||
include PointValidation
|
||||
|
||||
attr_reader :import, :user_id
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
@user_id = user_id
|
||||
end
|
||||
|
||||
def call
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Oj.load(file_content)
|
||||
|
||||
data = Geojson::Params.new(json).call
|
||||
|
||||
data.each.with_index(1) do |point, index|
|
||||
next if point_exists?(point, user_id)
|
||||
|
||||
Point.create!(point.merge(user_id:, import_id: import.id))
|
||||
|
||||
broadcast_import_progress(import, index)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class GoogleMaps::PhoneTakeoutParser
|
||||
class GoogleMaps::PhoneTakeoutImporter
|
||||
include Imports::Broadcaster
|
||||
|
||||
attr_reader :import, :user_id
|
||||
|
|
@ -42,21 +42,19 @@ class GoogleMaps::PhoneTakeoutParser
|
|||
def parse_json
|
||||
# location-history.json could contain an array of data points
|
||||
# or an object with semanticSegments, rawSignals and rawArray
|
||||
# I guess there are no easy ways with Google since these two are
|
||||
# 3rd and 4th formats of their location data exports
|
||||
semantic_segments = []
|
||||
raw_signals = []
|
||||
raw_array = []
|
||||
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
|
||||
if json.is_a?(Array)
|
||||
raw_array = parse_raw_array(json)
|
||||
else
|
||||
semantic_segments = parse_semantic_segments(json['semanticSegments']) if json['semanticSegments']
|
||||
raw_signals = parse_raw_signals(json['rawSignals']) if json['rawSignals']
|
||||
end
|
||||
json = Oj.load(file_content)
|
||||
|
||||
if json.is_a?(Array)
|
||||
raw_array = parse_raw_array(json)
|
||||
else
|
||||
semantic_segments = parse_semantic_segments(json['semanticSegments']) if json['semanticSegments']
|
||||
raw_signals = parse_raw_signals(json['rawSignals']) if json['rawSignals']
|
||||
end
|
||||
|
||||
semantic_segments + raw_signals + raw_array
|
||||
|
|
@ -5,8 +5,6 @@
|
|||
|
||||
class GoogleMaps::RecordsStorageImporter
|
||||
BATCH_SIZE = 1000
|
||||
MAX_RETRIES = 3
|
||||
DOWNLOAD_TIMEOUT = 300 # 5 minutes timeout
|
||||
|
||||
def initialize(import, user_id)
|
||||
@import = import
|
||||
|
|
@ -25,54 +23,13 @@ class GoogleMaps::RecordsStorageImporter
|
|||
attr_reader :import, :user
|
||||
|
||||
def process_file_in_batches
|
||||
file = download_file
|
||||
verify_file_integrity(file)
|
||||
locations = parse_file(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
locations = parse_file(file_content)
|
||||
process_locations_in_batches(locations) if locations.present?
|
||||
end
|
||||
|
||||
def download_file
|
||||
retries = 0
|
||||
|
||||
begin
|
||||
Timeout.timeout(DOWNLOAD_TIMEOUT) do
|
||||
import.file.download
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
retries += 1
|
||||
if retries <= MAX_RETRIES
|
||||
Rails.logger.warn("Download timeout, attempt #{retries} of #{MAX_RETRIES}")
|
||||
retry
|
||||
else
|
||||
Rails.logger.error("Download failed after #{MAX_RETRIES} attempts")
|
||||
raise
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Download error: #{e.message}")
|
||||
raise
|
||||
end
|
||||
end
|
||||
|
||||
def verify_file_integrity(file)
|
||||
# Verify file size
|
||||
expected_size = import.file.blob.byte_size
|
||||
actual_size = file.size
|
||||
|
||||
if expected_size != actual_size
|
||||
raise "Incomplete download: expected #{expected_size} bytes, got #{actual_size} bytes"
|
||||
end
|
||||
|
||||
# Verify checksum
|
||||
expected_checksum = import.file.blob.checksum
|
||||
actual_checksum = Base64.strict_encode64(Digest::MD5.digest(file))
|
||||
|
||||
return unless expected_checksum != actual_checksum
|
||||
|
||||
raise "Checksum mismatch: expected #{expected_checksum}, got #{actual_checksum}"
|
||||
end
|
||||
|
||||
def parse_file(file)
|
||||
parsed_file = Oj.load(file, mode: :compat)
|
||||
def parse_file(file_content)
|
||||
parsed_file = Oj.load(file_content, mode: :compat)
|
||||
return nil unless parsed_file.is_a?(Hash) && parsed_file['locations']
|
||||
|
||||
parsed_file['locations']
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class GoogleMaps::SemanticHistoryParser
|
||||
class GoogleMaps::SemanticHistoryImporter
|
||||
include Imports::Broadcaster
|
||||
|
||||
BATCH_SIZE = 1000
|
||||
|
|
@ -61,17 +61,12 @@ class GoogleMaps::SemanticHistoryParser
|
|||
end
|
||||
|
||||
def points_data
|
||||
data = nil
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Oj.load(file_content)
|
||||
|
||||
import.file.download do |f|
|
||||
json = Oj.load(f)
|
||||
|
||||
data = json['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
end
|
||||
|
||||
data
|
||||
json['timelineObjects'].flat_map do |timeline_object|
|
||||
parse_timeline_object(timeline_object)
|
||||
end.compact
|
||||
end
|
||||
|
||||
def parse_timeline_object(timeline_object)
|
||||
|
|
@ -13,17 +13,16 @@ class Gpx::TrackImporter
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
json = Hash.from_xml(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Hash.from_xml(file_content)
|
||||
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
tracks = json['gpx']['trk']
|
||||
tracks_arr = tracks.is_a?(Array) ? tracks : [tracks]
|
||||
|
||||
points = tracks_arr.map { parse_track(_1) }.flatten.compact
|
||||
points_data = points.map { prepare_point(_1) }.compact
|
||||
points = tracks_arr.map { parse_track(_1) }.flatten.compact
|
||||
points_data = points.map { prepare_point(_1) }.compact
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -11,8 +11,6 @@ class Imports::Create
|
|||
def call
|
||||
parser(import.source).new(import, user.id).call
|
||||
|
||||
create_import_finished_notification(import, user)
|
||||
|
||||
schedule_stats_creating(user.id)
|
||||
schedule_visit_suggesting(user.id, import)
|
||||
update_import_points_count(import)
|
||||
|
|
@ -25,13 +23,13 @@ class Imports::Create
|
|||
def parser(source)
|
||||
# Bad classes naming by the way, they are not parsers, they are point creators
|
||||
case source
|
||||
when 'google_semantic_history' then GoogleMaps::SemanticHistoryParser
|
||||
when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutParser
|
||||
when 'google_semantic_history' then GoogleMaps::SemanticHistoryImporter
|
||||
when 'google_phone_takeout' then GoogleMaps::PhoneTakeoutImporter
|
||||
when 'google_records' then GoogleMaps::RecordsStorageImporter
|
||||
when 'owntracks' then OwnTracks::Importer
|
||||
when 'gpx' then Gpx::TrackImporter
|
||||
when 'geojson' then Geojson::ImportParser
|
||||
when 'immich_api', 'photoprism_api' then Photos::ImportParser
|
||||
when 'geojson' then Geojson::Importer
|
||||
when 'immich_api', 'photoprism_api' then Photos::Importer
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -53,21 +51,22 @@ class Imports::Create
|
|||
VisitSuggestingJob.perform_later(user_id:, start_at:, end_at:)
|
||||
end
|
||||
|
||||
def create_import_finished_notification(import, user)
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :info,
|
||||
title: 'Import finished',
|
||||
content: "Import \"#{import.name}\" successfully finished."
|
||||
).call
|
||||
end
|
||||
|
||||
def create_import_failed_notification(import, user, error)
|
||||
message = import_failed_message(import, error)
|
||||
|
||||
Notifications::Create.new(
|
||||
user:,
|
||||
kind: :error,
|
||||
title: 'Import failed',
|
||||
content: "Import \"#{import.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"
|
||||
content: message
|
||||
).call
|
||||
end
|
||||
|
||||
def import_failed_message(import, error)
|
||||
if DawarichSettings.self_hosted?
|
||||
"Import \"#{import.name}\" failed: #{error.message}, stacktrace: #{error.backtrace.join("\n")}"
|
||||
else
|
||||
"Import \"#{import.name}\" failed, please contact us at hi@dawarich.com"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
81
app/services/imports/secure_file_downloader.rb
Normal file
81
app/services/imports/secure_file_downloader.rb
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Imports::SecureFileDownloader
|
||||
DOWNLOAD_TIMEOUT = 300 # 5 minutes timeout
|
||||
MAX_RETRIES = 3
|
||||
|
||||
def initialize(storage_attachment)
|
||||
@storage_attachment = storage_attachment
|
||||
end
|
||||
|
||||
def download_with_verification
|
||||
retries = 0
|
||||
file_content = nil
|
||||
|
||||
begin
|
||||
Timeout.timeout(DOWNLOAD_TIMEOUT) do
|
||||
# Download the file to a string
|
||||
tempfile = Tempfile.new("download_#{Time.now.to_i}", binmode: true)
|
||||
begin
|
||||
# Try to download block-by-block
|
||||
storage_attachment.download do |chunk|
|
||||
tempfile.write(chunk)
|
||||
end
|
||||
tempfile.rewind
|
||||
file_content = tempfile.read
|
||||
ensure
|
||||
tempfile.close
|
||||
tempfile.unlink
|
||||
end
|
||||
|
||||
# If we didn't get any content but no error occurred, try a different approach
|
||||
if file_content.nil? || file_content.empty?
|
||||
Rails.logger.warn('No content received from block download, trying alternative method')
|
||||
# Some ActiveStorage attachments may work differently, try direct access if possible
|
||||
file_content = storage_attachment.blob.download
|
||||
end
|
||||
end
|
||||
rescue Timeout::Error => e
|
||||
retries += 1
|
||||
if retries <= MAX_RETRIES
|
||||
Rails.logger.warn("Download timeout, attempt #{retries} of #{MAX_RETRIES}")
|
||||
retry
|
||||
else
|
||||
Rails.logger.error("Download failed after #{MAX_RETRIES} attempts")
|
||||
raise
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Download error: #{e.message}")
|
||||
raise
|
||||
end
|
||||
|
||||
raise 'Download completed but no content was received' if file_content.nil? || file_content.empty?
|
||||
|
||||
verify_file_integrity(file_content)
|
||||
file_content
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :storage_attachment
|
||||
|
||||
def verify_file_integrity(file_content)
|
||||
return if file_content.nil? || file_content.empty?
|
||||
|
||||
# Verify file size
|
||||
expected_size = storage_attachment.blob.byte_size
|
||||
actual_size = file_content.bytesize
|
||||
|
||||
if expected_size != actual_size
|
||||
raise "Incomplete download: expected #{expected_size} bytes, got #{actual_size} bytes"
|
||||
end
|
||||
|
||||
# Verify checksum
|
||||
expected_checksum = storage_attachment.blob.checksum
|
||||
actual_checksum = Base64.strict_encode64(Digest::MD5.digest(file_content))
|
||||
|
||||
return unless expected_checksum != actual_checksum
|
||||
|
||||
raise "Checksum mismatch: expected #{expected_checksum}, got #{actual_checksum}"
|
||||
end
|
||||
end
|
||||
|
|
@ -11,20 +11,19 @@ class OwnTracks::Importer
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
parsed_data = OwnTracks::RecParser.new(file).call
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
parsed_data = OwnTracks::RecParser.new(file_content).call
|
||||
|
||||
points_data = parsed_data.map do |point|
|
||||
OwnTracks::Params.new(point).call.merge(
|
||||
import_id: import.id,
|
||||
user_id: user_id,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
points_data = parsed_data.map do |point|
|
||||
OwnTracks::Params.new(point).call.merge(
|
||||
import_id: import.id,
|
||||
user_id: user_id,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
bulk_insert_points(points_data)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Photos::ImportParser
|
||||
class Photos::Importer
|
||||
include Imports::Broadcaster
|
||||
include PointValidation
|
||||
attr_reader :import, :user_id
|
||||
|
|
@ -11,11 +11,10 @@ class Photos::ImportParser
|
|||
end
|
||||
|
||||
def call
|
||||
import.file.download do |file|
|
||||
json = Oj.load(file)
|
||||
file_content = Imports::SecureFileDownloader.new(import.file).download_with_verification
|
||||
json = Oj.load(file_content)
|
||||
|
||||
json.each.with_index(1) { |point, index| create_point(point, index) }
|
||||
end
|
||||
json.each.with_index(1) { |point, index| create_point(point, index) }
|
||||
end
|
||||
|
||||
def create_point(point, index)
|
||||
16
app/services/subscription/decode_jwt_token.rb
Normal file
16
app/services/subscription/decode_jwt_token.rb
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class Subscription::DecodeJwtToken
|
||||
def initialize(token)
|
||||
@token = token
|
||||
end
|
||||
|
||||
def call
|
||||
JWT.decode(
|
||||
@token,
|
||||
ENV['JWT_SECRET_KEY'],
|
||||
true,
|
||||
{ algorithm: 'HS256' }
|
||||
).first.symbolize_keys
|
||||
end
|
||||
end
|
||||
|
|
@ -1,8 +1,20 @@
|
|||
<div class="mx-auto md:w-2/3 w-full">
|
||||
<h1 class="font-bold text-4xl">Editing import</h1>
|
||||
|
||||
<%= render "form", import: @import %>
|
||||
<%= form_with model: @import, class: 'form-body mt-4' do |form| %>
|
||||
<div class="form-control">
|
||||
<%= form.label :name %>
|
||||
<%= form.text_field :name, class: 'input input-bordered' %>
|
||||
</div>
|
||||
|
||||
<%= link_to "Show this import", @import, class: "ml-2 rounded-lg py-3 px-5 bg-gray-100 inline-block font-medium" %>
|
||||
<%= link_to "Back to imports", imports_path, class: "ml-2 rounded-lg py-3 px-5 bg-gray-100 inline-block font-medium" %>
|
||||
<div class="form-control">
|
||||
<%= form.label :source %>
|
||||
<%= form.select :source, options_for_select(Import.sources.keys.map { |source| [source.humanize, source] }, @import.source), {}, class: 'select select-bordered' %>
|
||||
</div>
|
||||
|
||||
<div class='my-4'>
|
||||
<%= form.submit class: "rounded-lg py-3 px-5 bg-blue-600 text-white inline-block font-medium cursor-pointer" %>
|
||||
<%= link_to "Back to imports", imports_path, class: "rounded-lg py-3 px-5 bg-blue-600 text-white inline-block font-medium cursor-pointer" %>
|
||||
</div>
|
||||
<% end %>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -141,3 +141,20 @@
|
|||
</div>
|
||||
<label class="modal-backdrop" for="speed_colored_routes_info">Close</label>
|
||||
</div>
|
||||
|
||||
<input type="checkbox" id="speed_color_scale_info" class="modal-toggle" />
|
||||
<div class="modal focus:z-99" role="dialog">
|
||||
<div class="modal-box">
|
||||
<h3 class="text-lg font-bold">Speed color scale</h3>
|
||||
<p class="py-4">
|
||||
Value in format <code>speed_kmh:hex_color|...</code>.
|
||||
</p>
|
||||
<p class="py-4">
|
||||
Here you can set a custom color scale for speed colored routes. It uses color stops at specified km/h values and creates a gradient from it. The default value is <code>0:#00ff00|15:#00ffff|30:#ff00ff|50:#ffff00|100:#ff3300</code>
|
||||
</p>
|
||||
<p class="py-4">
|
||||
You can also use the 'Edit Scale' button to edit it using an UI.
|
||||
</p>
|
||||
</div>
|
||||
<label class="modal-backdrop" for="speed_color_scale_info">Close</label>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -6,6 +6,6 @@
|
|||
<% end %>
|
||||
<%= link_to 'Map', settings_maps_path, role: 'tab', class: "tab #{active_tab?(settings_maps_path)}" %>
|
||||
<% if !DawarichSettings.self_hosted? %>
|
||||
<%= link_to 'Subscriptions', settings_subscriptions_path, role: 'tab', class: "tab #{active_tab?(settings_subscriptions_path)}" %>
|
||||
<%= link_to 'Subscriptions', "#{MANAGER_URL}/auth/dawarich?token=#{current_user.generate_subscription_token}", role: 'tab', class: "tab" %>
|
||||
<% end %>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@
|
|||
<% end %>
|
||||
</ul>
|
||||
</div>
|
||||
<%= link_to 'Dawarich', root_path, class: 'btn btn-ghost normal-case text-xl'%>
|
||||
<%= link_to 'Dawarich<sup>α</sup>'.html_safe, root_path, class: 'btn btn-ghost normal-case text-xl'%>
|
||||
<div class="badge mx-4 <%= 'badge-outline' if new_version_available? %>">
|
||||
<a href="https://github.com/Freika/dawarich/releases/latest" target="_blank" class="inline-flex items-center">
|
||||
<% if new_version_available? %>
|
||||
|
|
|
|||
|
|
@ -37,11 +37,6 @@ Rails.application.routes.draw do
|
|||
resources :users, only: %i[index create destroy edit update]
|
||||
resources :maps, only: %i[index]
|
||||
patch 'maps', to: 'maps#update'
|
||||
resources :subscriptions, only: %i[index] do
|
||||
collection do
|
||||
get :subscription_callback
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
patch 'settings', to: 'settings#update'
|
||||
|
|
@ -131,6 +126,8 @@ Rails.application.routes.draw do
|
|||
namespace :maps do
|
||||
resources :tile_usage, only: [:create]
|
||||
end
|
||||
|
||||
post 'subscriptions/callback', to: 'subscriptions#callback'
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,10 +25,6 @@ RSpec.describe Import::ProcessJob, type: :job do
|
|||
perform
|
||||
end
|
||||
|
||||
it 'creates a notification' do
|
||||
expect { perform }.to change { Notification.count }.by(1)
|
||||
end
|
||||
|
||||
context 'when there is an error' do
|
||||
before do
|
||||
allow_any_instance_of(OwnTracks::Importer).to receive(:call).and_raise(StandardError)
|
||||
|
|
|
|||
|
|
@ -29,12 +29,5 @@ RSpec.describe Stats::CalculatingJob, type: :job do
|
|||
expect(Notification.last.kind).to eq('error')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Stats::CalculateMonth does not raise an error' do
|
||||
it 'creates an info notification' do
|
||||
expect { subject }.to change { Notification.count }.by(1)
|
||||
expect(Notification.last.kind).to eq('info')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -79,14 +79,4 @@ RSpec.describe Point, type: :model do
|
|||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'callbacks' do
|
||||
describe '#update_import_points_count' do
|
||||
let(:point) { create(:point, import_id: 1) }
|
||||
|
||||
it 'updates the import points count' do
|
||||
expect { point.destroy }.to have_enqueued_job(Import::UpdatePointsCountJob).with(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
130
spec/requests/api/v1/subscriptions_spec.rb
Normal file
130
spec/requests/api/v1/subscriptions_spec.rb
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe 'Api::V1::Subscriptions', type: :request do
|
||||
let(:user) { create(:user, :inactive) }
|
||||
let(:jwt_secret) { ENV['JWT_SECRET_KEY'] }
|
||||
|
||||
before do
|
||||
stub_const('ENV', ENV.to_h.merge('JWT_SECRET_KEY' => 'test_secret'))
|
||||
stub_request(:any, 'https://api.github.com/repos/Freika/dawarich/tags')
|
||||
.to_return(status: 200, body: '[{"name": "1.0.0"}]', headers: {})
|
||||
end
|
||||
|
||||
context 'when Dawarich is not self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(false)
|
||||
end
|
||||
|
||||
describe 'POST /api/v1/subscriptions/callback' do
|
||||
context 'when user is not authenticated' do
|
||||
it 'requires authentication' do
|
||||
# Make request without authentication
|
||||
post '/api/v1/subscriptions/callback', params: { token: 'invalid' }
|
||||
|
||||
# Either we get redirected (302) or get an unauthorized response (401) or unprocessable (422)
|
||||
# All indicate that authentication is required
|
||||
expect([401, 302, 422]).to include(response.status)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is authenticated' do
|
||||
before { sign_in user }
|
||||
|
||||
context 'with valid token' do
|
||||
let(:token) do
|
||||
JWT.encode(
|
||||
{ user_id: user.id, status: 'active', active_until: 1.year.from_now },
|
||||
jwt_secret,
|
||||
'HS256'
|
||||
)
|
||||
end
|
||||
|
||||
it 'updates user status and returns success message' do
|
||||
decoded_data = { user_id: user.id, status: 'active', active_until: 1.year.from_now.to_s }
|
||||
mock_decoder = instance_double(Subscription::DecodeJwtToken, call: decoded_data)
|
||||
allow(Subscription::DecodeJwtToken).to receive(:new).with(token).and_return(mock_decoder)
|
||||
|
||||
post '/api/v1/subscriptions/callback', params: { token: token }
|
||||
|
||||
expect(user.reload.status).to eq('active')
|
||||
expect(user.active_until).to be_within(1.day).of(1.year.from_now)
|
||||
expect(response).to have_http_status(:ok)
|
||||
expect(JSON.parse(response.body)['message']).to eq('Subscription updated successfully')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with token for different user' do
|
||||
let(:other_user) { create(:user) }
|
||||
let(:token) do
|
||||
JWT.encode(
|
||||
{ user_id: other_user.id, status: 'active', active_until: 1.year.from_now },
|
||||
jwt_secret,
|
||||
'HS256'
|
||||
)
|
||||
end
|
||||
|
||||
it 'updates provided user' do
|
||||
decoded_data = { user_id: other_user.id, status: 'active', active_until: 1.year.from_now.to_s }
|
||||
mock_decoder = instance_double(Subscription::DecodeJwtToken, call: decoded_data)
|
||||
allow(Subscription::DecodeJwtToken).to receive(:new).with(token).and_return(mock_decoder)
|
||||
|
||||
post '/api/v1/subscriptions/callback', params: { token: token }
|
||||
|
||||
expect(user.reload.status).not_to eq('active')
|
||||
expect(other_user.reload.status).to eq('active')
|
||||
expect(response).to have_http_status(:ok)
|
||||
expect(JSON.parse(response.body)['message']).to eq('Subscription updated successfully')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid token' do
|
||||
it 'returns unauthorized error with decode error message' do
|
||||
allow(Subscription::DecodeJwtToken).to receive(:new).with('invalid')
|
||||
.and_raise(JWT::DecodeError.new('Invalid token'))
|
||||
|
||||
post '/api/v1/subscriptions/callback', params: { token: 'invalid' }
|
||||
|
||||
expect(response).to have_http_status(:unauthorized)
|
||||
expect(JSON.parse(response.body)['message']).to eq('Failed to verify subscription update.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with malformed token data' do
|
||||
let(:token) do
|
||||
JWT.encode({ user_id: 'invalid', status: nil }, jwt_secret, 'HS256')
|
||||
end
|
||||
|
||||
it 'returns unprocessable_entity error with invalid data message' do
|
||||
allow(Subscription::DecodeJwtToken).to receive(:new).with(token)
|
||||
.and_raise(ArgumentError.new('Invalid token data'))
|
||||
|
||||
post '/api/v1/subscriptions/callback', params: { token: token }
|
||||
|
||||
expect(response).to have_http_status(:unprocessable_entity)
|
||||
expect(JSON.parse(response.body)['message']).to eq('Invalid subscription data received.')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Dawarich is self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(true)
|
||||
sign_in user
|
||||
end
|
||||
|
||||
describe 'POST /api/v1/subscriptions/callback' do
|
||||
it 'is blocked for self-hosted instances' do
|
||||
# Make request in self-hosted environment
|
||||
post '/api/v1/subscriptions/callback', params: { token: 'invalid' }
|
||||
|
||||
# In a self-hosted environment, we either get redirected or receive an error
|
||||
# Either way, the access is blocked as expected
|
||||
expect([401, 302, 303, 422]).to include(response.status)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,141 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe 'Settings::Subscriptions', type: :request do
|
||||
let(:user) { create(:user, :inactive) }
|
||||
let(:jwt_secret) { ENV['JWT_SECRET_KEY'] }
|
||||
|
||||
before do
|
||||
stub_const('ENV', ENV.to_h.merge('JWT_SECRET_KEY' => 'test_secret'))
|
||||
stub_request(:any, 'https://api.github.com/repos/Freika/dawarich/tags')
|
||||
.to_return(status: 200, body: '[{"name": "1.0.0"}]', headers: {})
|
||||
end
|
||||
|
||||
context 'when Dawarich is not self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(false)
|
||||
end
|
||||
|
||||
describe 'GET /settings/subscriptions' do
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to login page' do
|
||||
get settings_subscriptions_path
|
||||
|
||||
expect(response).to redirect_to(new_user_session_path)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is authenticated' do
|
||||
before { sign_in user }
|
||||
|
||||
it 'returns successful response' do
|
||||
get settings_subscriptions_path
|
||||
|
||||
expect(response).to be_successful
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /settings/subscriptions/callback' do
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to login page' do
|
||||
get subscription_callback_settings_subscriptions_path(token: 'invalid')
|
||||
|
||||
expect(response).to redirect_to(new_user_session_path)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when user is authenticated' do
|
||||
before { sign_in user }
|
||||
|
||||
context 'with valid token' do
|
||||
let(:token) do
|
||||
JWT.encode(
|
||||
{ user_id: user.id, status: 'active', active_until: 1.year.from_now },
|
||||
jwt_secret,
|
||||
'HS256'
|
||||
)
|
||||
end
|
||||
|
||||
it 'updates user status and redirects with success message' do
|
||||
get subscription_callback_settings_subscriptions_path(token: token)
|
||||
|
||||
expect(user.reload.status).to eq('active')
|
||||
expect(user.active_until).to be_within(1.day).of(1.year.from_now)
|
||||
expect(response).to redirect_to(settings_subscriptions_path)
|
||||
expect(flash[:notice]).to eq('Your subscription has been updated successfully!')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with token for different user' do
|
||||
let(:other_user) { create(:user) }
|
||||
let(:token) do
|
||||
JWT.encode(
|
||||
{ user_id: other_user.id, status: 'active' },
|
||||
jwt_secret,
|
||||
'HS256'
|
||||
)
|
||||
end
|
||||
|
||||
it 'does not update status and redirects with error' do
|
||||
get subscription_callback_settings_subscriptions_path(token: token)
|
||||
|
||||
expect(user.reload.status).not_to eq('active')
|
||||
expect(response).to redirect_to(settings_subscriptions_path)
|
||||
expect(flash[:alert]).to eq('Invalid subscription update request.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with invalid token' do
|
||||
it 'redirects with decode error message' do
|
||||
get subscription_callback_settings_subscriptions_path(token: 'invalid')
|
||||
|
||||
expect(response).to redirect_to(settings_subscriptions_path)
|
||||
expect(flash[:alert]).to eq('Failed to verify subscription update.')
|
||||
end
|
||||
end
|
||||
|
||||
context 'with malformed token data' do
|
||||
let(:token) do
|
||||
JWT.encode({ user_id: 'invalid', status: nil }, jwt_secret, 'HS256')
|
||||
end
|
||||
|
||||
it 'redirects with invalid data message' do
|
||||
get subscription_callback_settings_subscriptions_path(token: token)
|
||||
|
||||
expect(response).to redirect_to(settings_subscriptions_path)
|
||||
expect(flash[:alert]).to eq('Invalid subscription update request.')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'when Dawarich is self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(true)
|
||||
sign_in user
|
||||
end
|
||||
|
||||
describe 'GET /settings/subscriptions' do
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to root path' do
|
||||
get settings_subscriptions_path
|
||||
|
||||
expect(response).to redirect_to(root_path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'GET /settings/subscriptions/callback' do
|
||||
context 'when user is not authenticated' do
|
||||
it 'redirects to root path' do
|
||||
get subscription_callback_settings_subscriptions_path(token: 'invalid')
|
||||
|
||||
expect(response).to redirect_to(root_path)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Geojson::ImportParser do
|
||||
RSpec.describe Geojson::Importer do
|
||||
describe '#call' do
|
||||
subject(:service) { described_class.new(import, user.id).call }
|
||||
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe GoogleMaps::PhoneTakeoutParser do
|
||||
RSpec.describe GoogleMaps::PhoneTakeoutImporter do
|
||||
describe '#call' do
|
||||
subject(:parser) { described_class.new(import, user.id).call }
|
||||
|
||||
|
|
@ -145,23 +145,35 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do
|
|||
|
||||
context 'with download issues' do
|
||||
it 'retries on timeout' do
|
||||
call_count = 0
|
||||
allow(import.file).to receive(:download) do
|
||||
call_count += 1
|
||||
call_count < 3 ? raise(Timeout::Error) : file_content
|
||||
end
|
||||
# Create a mock that will return a successful result
|
||||
# The internal retries are implemented inside SecureFileDownloader,
|
||||
# not in the RecordsStorageImporter
|
||||
downloader = instance_double(Imports::SecureFileDownloader)
|
||||
|
||||
expect(Rails.logger).to receive(:warn).twice
|
||||
# Create the downloader mock before it gets used
|
||||
expect(Imports::SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader)
|
||||
|
||||
# The SecureFileDownloader handles all the retries internally
|
||||
# From the perspective of the importer, it just gets the file content
|
||||
expect(downloader).to receive(:download_with_verification).once.and_return(file_content)
|
||||
|
||||
# Run the method
|
||||
subject.call
|
||||
expect(call_count).to eq(3)
|
||||
end
|
||||
|
||||
it 'fails after max retries' do
|
||||
allow(import.file).to receive(:download).and_raise(Timeout::Error)
|
||||
# The retry mechanism is in SecureFileDownloader, not RecordsStorageImporter
|
||||
# So we need to simulate that the method throws the error after internal retries
|
||||
downloader = instance_double(Imports::SecureFileDownloader)
|
||||
|
||||
expect(Rails.logger).to receive(:warn).exactly(3).times
|
||||
expect(Rails.logger).to receive(:error).with('Download failed after 3 attempts')
|
||||
# Create the downloader mock before it gets used - expect only one call from the importer
|
||||
expect(Imports::SecureFileDownloader).to receive(:new).with(import.file).and_return(downloader)
|
||||
|
||||
# This should be called once, and the internal retries should have been attempted
|
||||
# After the max retries, it will still raise the Timeout::Error that bubbles up
|
||||
expect(downloader).to receive(:download_with_verification).once.and_raise(Timeout::Error)
|
||||
|
||||
# We expect the error to bubble up to the caller
|
||||
expect { subject.call }.to raise_error(Timeout::Error)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe GoogleMaps::SemanticHistoryParser do
|
||||
RSpec.describe GoogleMaps::SemanticHistoryImporter do
|
||||
describe '#call' do
|
||||
subject(:parser) { described_class.new(import, user.id).call }
|
||||
|
||||
|
|
@ -17,8 +17,8 @@ RSpec.describe Imports::Create do
|
|||
content_type: 'application/json')
|
||||
end
|
||||
|
||||
it 'calls the GoogleMaps::SemanticHistoryParser' do
|
||||
expect(GoogleMaps::SemanticHistoryParser).to \
|
||||
it 'calls the GoogleMaps::SemanticHistoryImporter' do
|
||||
expect(GoogleMaps::SemanticHistoryImporter).to \
|
||||
receive(:new).with(import, user.id).and_return(double(call: true))
|
||||
service.call
|
||||
end
|
||||
|
|
@ -31,15 +31,15 @@ RSpec.describe Imports::Create do
|
|||
context 'when source is google_phone_takeout' do
|
||||
let(:import) { create(:import, source: 'google_phone_takeout') }
|
||||
|
||||
it 'calls the GoogleMaps::PhoneTakeoutParser' do
|
||||
expect(GoogleMaps::PhoneTakeoutParser).to \
|
||||
it 'calls the GoogleMaps::PhoneTakeoutImporter' do
|
||||
expect(GoogleMaps::PhoneTakeoutImporter).to \
|
||||
receive(:new).with(import, user.id).and_return(double(call: true))
|
||||
service.call
|
||||
end
|
||||
end
|
||||
|
||||
context 'when source is owntracks' do
|
||||
let(:import) { create(:import, source: 'owntracks') }
|
||||
let(:import) { create(:import, source: 'owntracks', name: '2024-03.rec') }
|
||||
let(:file_path) { Rails.root.join('spec/fixtures/files/owntracks/2024-03.rec') }
|
||||
let(:file) { Rack::Test::UploadedFile.new(file_path, 'application/octet-stream') }
|
||||
|
||||
|
|
@ -54,12 +54,6 @@ RSpec.describe Imports::Create do
|
|||
end
|
||||
|
||||
context 'when import is successful' do
|
||||
it 'creates a finished notification' do
|
||||
service.call
|
||||
|
||||
expect(user.notifications.last.kind).to eq('info')
|
||||
end
|
||||
|
||||
it 'schedules stats creating' do
|
||||
Sidekiq::Testing.inline! do
|
||||
expect { service.call }.to \
|
||||
|
|
@ -79,10 +73,38 @@ RSpec.describe Imports::Create do
|
|||
allow(OwnTracks::Importer).to receive(:new).with(import, user.id).and_raise(StandardError)
|
||||
end
|
||||
|
||||
it 'creates a failed notification' do
|
||||
service.call
|
||||
context 'when self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(true)
|
||||
end
|
||||
|
||||
expect(user.notifications.last.kind).to eq('error')
|
||||
after do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_call_original
|
||||
end
|
||||
|
||||
it 'creates a failed notification' do
|
||||
service.call
|
||||
|
||||
expect(user.notifications.last.content).to \
|
||||
include('Import "2024-03.rec" failed: StandardError, stacktrace: ')
|
||||
end
|
||||
end
|
||||
|
||||
context 'when not self-hosted' do
|
||||
before do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_return(false)
|
||||
end
|
||||
|
||||
after do
|
||||
allow(DawarichSettings).to receive(:self_hosted?).and_call_original
|
||||
end
|
||||
|
||||
it 'does not create a failed notification' do
|
||||
service.call
|
||||
|
||||
expect(user.notifications.last.content).to \
|
||||
include('Import "2024-03.rec" failed, please contact us at hi@dawarich.com')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -107,8 +129,8 @@ RSpec.describe Imports::Create do
|
|||
context 'when source is geojson' do
|
||||
let(:import) { create(:import, source: 'geojson') }
|
||||
|
||||
it 'calls the Geojson::ImportParser' do
|
||||
expect(Geojson::ImportParser).to \
|
||||
it 'calls the Geojson::Importer' do
|
||||
expect(Geojson::Importer).to \
|
||||
receive(:new).with(import, user.id).and_return(double(call: true))
|
||||
service.call
|
||||
end
|
||||
|
|
@ -117,8 +139,8 @@ RSpec.describe Imports::Create do
|
|||
context 'when source is immich_api' do
|
||||
let(:import) { create(:import, source: 'immich_api') }
|
||||
|
||||
it 'calls the Photos::ImportParser' do
|
||||
expect(Photos::ImportParser).to \
|
||||
it 'calls the Photos::Importer' do
|
||||
expect(Photos::Importer).to \
|
||||
receive(:new).with(import, user.id).and_return(double(call: true))
|
||||
service.call
|
||||
end
|
||||
|
|
@ -127,8 +149,8 @@ RSpec.describe Imports::Create do
|
|||
context 'when source is photoprism_api' do
|
||||
let(:import) { create(:import, source: 'photoprism_api') }
|
||||
|
||||
it 'calls the Photos::ImportParser' do
|
||||
expect(Photos::ImportParser).to \
|
||||
it 'calls the Photos::Importer' do
|
||||
expect(Photos::Importer).to \
|
||||
receive(:new).with(import, user.id).and_return(double(call: true))
|
||||
service.call
|
||||
end
|
||||
|
|
|
|||
109
spec/services/imports/secure_file_downloader_spec.rb
Normal file
109
spec/services/imports/secure_file_downloader_spec.rb
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Imports::SecureFileDownloader do
|
||||
let(:file_content) { 'test content' }
|
||||
let(:file_size) { file_content.bytesize }
|
||||
let(:checksum) { Base64.strict_encode64(Digest::MD5.digest(file_content)) }
|
||||
let(:blob) { double('ActiveStorage::Blob', byte_size: file_size, checksum: checksum) }
|
||||
# Create a mock that mimics ActiveStorage::Attached::One
|
||||
let(:storage_attachment) { double('ActiveStorage::Attached::One', blob: blob) }
|
||||
|
||||
subject { described_class.new(storage_attachment) }
|
||||
|
||||
describe '#download_with_verification' do
|
||||
context 'when download is successful' do
|
||||
before do
|
||||
# Mock the download method to yield the file content
|
||||
allow(storage_attachment).to receive(:download) do |&block|
|
||||
block.call(file_content)
|
||||
end
|
||||
end
|
||||
|
||||
it 'returns the file content' do
|
||||
expect(subject.download_with_verification).to eq(file_content)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when timeout occurs but succeeds on retry' do
|
||||
it 'retries the download internally and returns success after retries' do
|
||||
call_count = 0
|
||||
|
||||
# Mock storage_attachment to fail twice then succeed
|
||||
allow(storage_attachment).to receive(:download) do |&block|
|
||||
call_count += 1
|
||||
raise Timeout::Error if call_count < 3
|
||||
|
||||
block.call(file_content)
|
||||
end
|
||||
|
||||
# Expect logging for each retry attempt
|
||||
expect(Rails.logger).to receive(:warn).with(/Download timeout, attempt 1 of/).ordered
|
||||
expect(Rails.logger).to receive(:warn).with(/Download timeout, attempt 2 of/).ordered
|
||||
|
||||
# The method should eventually return the content
|
||||
result = subject.download_with_verification
|
||||
expect(result).to eq(file_content)
|
||||
expect(call_count).to eq(3) # Verify retry attempts
|
||||
end
|
||||
end
|
||||
|
||||
context 'when all download attempts timeout' do
|
||||
it 'raises the error after max retries' do
|
||||
# Make download always raise Timeout::Error
|
||||
allow(storage_attachment).to receive(:download).and_raise(Timeout::Error)
|
||||
|
||||
# Expect warnings for each retry
|
||||
described_class::MAX_RETRIES.times do |i|
|
||||
expect(Rails.logger).to receive(:warn).with(/Download timeout, attempt #{i + 1} of/).ordered
|
||||
end
|
||||
|
||||
# Expect error log on final failure
|
||||
expect(Rails.logger).to receive(:error).with(/Download failed after/).ordered
|
||||
|
||||
# It should raise the Timeout::Error
|
||||
expect { subject.download_with_verification }.to raise_error(Timeout::Error)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when file size does not match' do
|
||||
let(:blob) { double('ActiveStorage::Blob', byte_size: 100, checksum: checksum) }
|
||||
|
||||
before do
|
||||
allow(storage_attachment).to receive(:download) do |&block|
|
||||
block.call(file_content)
|
||||
end
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject.download_with_verification }.to raise_error(/Incomplete download/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when checksum does not match' do
|
||||
let(:blob) { double('ActiveStorage::Blob', byte_size: file_size, checksum: 'invalid_checksum') }
|
||||
|
||||
before do
|
||||
allow(storage_attachment).to receive(:download) do |&block|
|
||||
block.call(file_content)
|
||||
end
|
||||
end
|
||||
|
||||
it 'raises an error' do
|
||||
expect { subject.download_with_verification }.to raise_error(/Checksum mismatch/)
|
||||
end
|
||||
end
|
||||
|
||||
context 'when download fails with a different error' do
|
||||
before do
|
||||
allow(storage_attachment).to receive(:download).and_raise(StandardError, 'Download failed')
|
||||
end
|
||||
|
||||
it 'logs the error and re-raises it' do
|
||||
expect(Rails.logger).to receive(:error).with(/Download error: Download failed/)
|
||||
expect { subject.download_with_verification }.to raise_error(StandardError, 'Download failed')
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
require 'rails_helper'
|
||||
|
||||
RSpec.describe Photos::ImportParser do
|
||||
RSpec.describe Photos::Importer do
|
||||
describe '#call' do
|
||||
subject(:service) { described_class.new(import, user.id).call }
|
||||
|
||||
Loading…
Reference in a new issue