mirror of
https://github.com/Freika/dawarich.git
synced 2026-01-10 01:01:39 -05:00
Clean up some code
This commit is contained in:
parent
d10ca668a9
commit
32a00db9b9
12 changed files with 45 additions and 97 deletions
28
CHANGELOG.md
28
CHANGELOG.md
|
|
@ -4,27 +4,25 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](http://keepachangelog.com/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/).
|
||||
|
||||
# [UNRELEASED]
|
||||
# [0.29.0] - 2025-06-30
|
||||
|
||||
## Added
|
||||
|
||||
- [x] In the User Settings, you can now export your user data as a zip file. It will contain the following:
|
||||
- [x] All your points
|
||||
- [x] All your places
|
||||
- [x] All your visits
|
||||
- [x] All your areas
|
||||
- [x] All your imports with files
|
||||
- [x] All your exports with files
|
||||
- [x] All your trips
|
||||
- [x] All your notifications
|
||||
- [x] All your stats
|
||||
- In the User Settings, you can now export your user data as a zip file. It will contain the following:
|
||||
- All your points
|
||||
- All your places
|
||||
- All your visits
|
||||
- All your areas
|
||||
- All your imports with files
|
||||
- All your exports with files
|
||||
- All your trips
|
||||
- All your notifications
|
||||
- All your stats
|
||||
|
||||
- [ ] In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation.
|
||||
- [ ] User can select to override settings or not.
|
||||
- [ ] Check distance units if they are correct
|
||||
- [ ] Why import creates more points than the original?
|
||||
- In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation.
|
||||
|
||||
- Export file size is now displayed in the exports and imports lists.
|
||||
- A button to download an import file is now displayed in the imports list. It may not work properly for imports created before the 0.25.4 release.
|
||||
|
||||
## Changed
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ class ImportsController < ApplicationController
|
|||
@imports =
|
||||
current_user
|
||||
.imports
|
||||
.select(:id, :name, :source, :created_at, :processed)
|
||||
.select(:id, :name, :source, :created_at, :processed, :status)
|
||||
.order(created_at: :desc)
|
||||
.page(params[:page])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -61,15 +61,8 @@ class Settings::UsersController < ApplicationController
|
|||
|
||||
archive_file = params[:archive]
|
||||
|
||||
# Validate file type
|
||||
unless archive_file.content_type == 'application/zip' ||
|
||||
archive_file.content_type == 'application/x-zip-compressed' ||
|
||||
File.extname(archive_file.original_filename).downcase == '.zip'
|
||||
redirect_to edit_user_registration_path, alert: 'Please upload a valid ZIP file.'
|
||||
return
|
||||
end
|
||||
validate_archive_file(archive_file)
|
||||
|
||||
# Create Import record for user data archive
|
||||
import = current_user.imports.build(
|
||||
name: archive_file.original_filename,
|
||||
source: :user_data_archive
|
||||
|
|
@ -95,4 +88,14 @@ class Settings::UsersController < ApplicationController
|
|||
def user_params
|
||||
params.require(:user).permit(:email, :password)
|
||||
end
|
||||
|
||||
def validate_archive_file(archive_file)
|
||||
unless archive_file.content_type == 'application/zip' ||
|
||||
archive_file.content_type == 'application/x-zip-compressed' ||
|
||||
File.extname(archive_file.original_filename).downcase == '.zip'
|
||||
|
||||
redirect_to edit_user_registration_path, alert: 'Please upload a valid ZIP file.'
|
||||
return
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,37 +9,26 @@ class Users::ImportDataJob < ApplicationJob
|
|||
import = Import.find(import_id)
|
||||
user = import.user
|
||||
|
||||
# Download the archive file to a temporary location
|
||||
archive_path = download_import_archive(import)
|
||||
|
||||
# Validate that the archive file exists
|
||||
unless File.exist?(archive_path)
|
||||
raise StandardError, "Archive file not found: #{archive_path}"
|
||||
end
|
||||
|
||||
# Perform the import
|
||||
import_stats = Users::ImportData.new(user, archive_path).import
|
||||
|
||||
Rails.logger.info "Import completed successfully for user #{user.email}: #{import_stats}"
|
||||
rescue StandardError => e
|
||||
user_id = user&.id || import&.user_id || "unknown"
|
||||
user_id = user&.id || import&.user_id || 'unknown'
|
||||
ExceptionReporter.call(e, "Import job failed for user #{user_id}")
|
||||
|
||||
# Create failure notification if user is available
|
||||
if user
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: "Your data import failed with error: #{e.message}. Please check the archive format and try again.",
|
||||
kind: :error
|
||||
).call
|
||||
end
|
||||
create_import_failed_notification(user, e)
|
||||
|
||||
raise e
|
||||
ensure
|
||||
# Clean up the uploaded archive file if it exists
|
||||
if archive_path && File.exist?(archive_path)
|
||||
File.delete(archive_path)
|
||||
|
||||
Rails.logger.info "Cleaned up archive file: #{archive_path}"
|
||||
end
|
||||
end
|
||||
|
|
@ -61,4 +50,13 @@ class Users::ImportDataJob < ApplicationJob
|
|||
|
||||
temp_path
|
||||
end
|
||||
|
||||
def create_import_failed_notification(user, error)
|
||||
::Notifications::Create.new(
|
||||
user: user,
|
||||
title: 'Data import failed',
|
||||
content: "Your data import failed with error: #{error.message}. Please check the archive format and try again.",
|
||||
kind: :error
|
||||
).call
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ class Import < ApplicationRecord
|
|||
file.attach(io: raw_file, filename: name, content_type: 'application/json')
|
||||
end
|
||||
|
||||
private
|
||||
private
|
||||
|
||||
def remove_attached_file
|
||||
file.purge_later
|
||||
|
|
|
|||
|
|
@ -23,12 +23,7 @@ class ReverseGeocoding::Points::FetchData
|
|||
response = Geocoder.search([point.lat, point.lon]).first
|
||||
return if response.blank? || response.data['error'].present?
|
||||
|
||||
country_record = Country.find_or_create_by(name: response.country) do |country|
|
||||
iso_a2, iso_a3 = extract_iso_codes(response)
|
||||
country.iso_a2 = iso_a2
|
||||
country.iso_a3 = iso_a3
|
||||
country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))"
|
||||
end if response.country
|
||||
country_record = Country.find_by(name: response.country) if response.country
|
||||
|
||||
point.update!(
|
||||
city: response.city,
|
||||
|
|
@ -37,19 +32,4 @@ class ReverseGeocoding::Points::FetchData
|
|||
reverse_geocoded_at: Time.current
|
||||
)
|
||||
end
|
||||
|
||||
def extract_iso_codes(response)
|
||||
# First, try to get the ISO A2 code from the Geocoder response
|
||||
iso_a2 = response.data.dig('properties', 'countrycode')&.upcase
|
||||
|
||||
if iso_a2.present?
|
||||
# If we have a valid ISO A2 code, get the corresponding ISO A3 code
|
||||
iso_a3 = Countries::IsoCodeMapper.iso_a3_from_a2(iso_a2)
|
||||
return [iso_a2, iso_a3] if iso_a3.present?
|
||||
end
|
||||
|
||||
# If no valid ISO code from Geocoder, try to match the country name
|
||||
# This will return proper ISO codes if the country name is recognized
|
||||
Countries::IsoCodeMapper.fallback_codes_from_country_name(response.country)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -238,7 +238,6 @@ class Users::ExportData
|
|||
|
||||
# Stream JSON writing instead of building in memory
|
||||
File.open(json_file_path, 'w') do |file|
|
||||
# Start JSON and add counts summary
|
||||
file.write('{"counts":')
|
||||
file.write(calculate_entity_counts.to_json)
|
||||
|
||||
|
|
@ -278,29 +277,24 @@ class Users::ExportData
|
|||
zip_file_path = @export_directory.join('export.zip')
|
||||
create_zip_archive(@export_directory, zip_file_path)
|
||||
|
||||
# Attach the zip file to the Export record
|
||||
export_record.file.attach(
|
||||
io: File.open(zip_file_path),
|
||||
filename: export_record.name,
|
||||
content_type: 'application/zip'
|
||||
)
|
||||
|
||||
# Mark export as completed
|
||||
export_record.update!(status: :completed)
|
||||
|
||||
# Create notification
|
||||
create_success_notification
|
||||
|
||||
export_record
|
||||
rescue StandardError => e
|
||||
# Mark export as failed if an error occurs
|
||||
export_record.update!(status: :failed) if export_record
|
||||
|
||||
ExceptionReporter.call(e, 'Export failed')
|
||||
|
||||
raise e
|
||||
ensure
|
||||
# Cleanup temporary files
|
||||
cleanup_temporary_files(@export_directory) if @export_directory&.exist?
|
||||
end
|
||||
end
|
||||
|
|
@ -337,23 +331,19 @@ class Users::ExportData
|
|||
end
|
||||
|
||||
def create_zip_archive(export_directory, zip_file_path)
|
||||
# Set global compression for better file size reduction
|
||||
original_compression = Zip.default_compression
|
||||
Zip.default_compression = Zip::Entry::DEFLATED
|
||||
|
||||
# Create zip archive with optimized compression
|
||||
Zip::File.open(zip_file_path, Zip::File::CREATE) do |zipfile|
|
||||
Dir.glob(export_directory.join('**', '*')).each do |file|
|
||||
next if File.directory?(file) || file == zip_file_path.to_s
|
||||
|
||||
relative_path = file.sub(export_directory.to_s + '/', '')
|
||||
|
||||
# Add file to the zip archive
|
||||
zipfile.add(relative_path, file)
|
||||
end
|
||||
end
|
||||
ensure
|
||||
# Restore original compression setting
|
||||
Zip.default_compression = original_compression if original_compression
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@ class Users::ExportData::Exports
|
|||
def call
|
||||
exports_with_files = user.exports.includes(:file_attachment).to_a
|
||||
|
||||
# Only use parallel processing if we have multiple exports
|
||||
if exports_with_files.size > 1
|
||||
# Use fewer threads to avoid database connection issues
|
||||
results = Parallel.map(exports_with_files, in_threads: 2) do |export|
|
||||
process_export(export)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@ class Users::ExportData::Imports
|
|||
def call
|
||||
imports_with_files = user.imports.includes(:file_attachment).to_a
|
||||
|
||||
# Only use parallel processing if we have multiple imports
|
||||
if imports_with_files.size > 1
|
||||
# Use fewer threads to avoid database connection issues
|
||||
results = Parallel.map(imports_with_files, in_threads: 2) do |import|
|
||||
process_import(import)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -188,14 +188,6 @@ class Users::ImportData::Points
|
|||
country = countries_lookup[country_info['name']]
|
||||
end
|
||||
|
||||
# If still not found, create a new country record
|
||||
if country.nil? && country_info['name'].present?
|
||||
country = create_missing_country(country_info)
|
||||
# Add to lookup cache for subsequent points
|
||||
@countries_lookup[country_info['name']] = country
|
||||
@countries_lookup[[country.name, country.iso_a2, country.iso_a3]] = country
|
||||
end
|
||||
|
||||
if country
|
||||
attributes['country_id'] = country.id
|
||||
Rails.logger.debug "Resolved country reference: #{country_info['name']} -> #{country.id}"
|
||||
|
|
@ -204,16 +196,7 @@ class Users::ImportData::Points
|
|||
end
|
||||
end
|
||||
|
||||
def create_missing_country(country_info)
|
||||
Country.find_or_create_by(name: country_info['name']) do |new_country|
|
||||
new_country.iso_a2 = country_info['iso_a2'] || Countries::IsoCodeMapper.fallback_codes_from_country_name(country_info['name'])[0]
|
||||
new_country.iso_a3 = country_info['iso_a3'] || Countries::IsoCodeMapper.fallback_codes_from_country_name(country_info['name'])[1]
|
||||
new_country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))" # Default geometry
|
||||
end
|
||||
rescue StandardError => e
|
||||
Rails.logger.error "Failed to create missing country: #{e.message}"
|
||||
nil
|
||||
end
|
||||
|
||||
|
||||
def resolve_visit_reference(attributes, visit_reference)
|
||||
return unless visit_reference.is_a?(Hash)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="overflow-x-auto">
|
||||
<table class="table">
|
||||
<table class="table overflow-x-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
|
|
@ -41,7 +41,7 @@
|
|||
<td><%= number_to_human_size(export.file.byte_size) || 'N/A' %></td>
|
||||
<td><%= human_datetime(export.created_at) %></td>
|
||||
<td><%= export.status %></td>
|
||||
<td>
|
||||
<td class="whitespace-nowrap">
|
||||
<% if export.completed? %>
|
||||
<% if export.file.present? %>
|
||||
<%= link_to 'Download', rails_blob_path(export.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: export.name %>
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@
|
|||
</div>
|
||||
</div>
|
||||
<div class="overflow-x-auto">
|
||||
<table class="table">
|
||||
<table class="table overflow-x-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
|
|
@ -68,9 +68,9 @@
|
|||
<td data-points-count>
|
||||
<%= number_with_delimiter import.processed %>
|
||||
</td>
|
||||
<td><%#= import.status %></td>
|
||||
<td><%= import.status %></td>
|
||||
<td><%= human_datetime(import.created_at) %></td>
|
||||
<td>
|
||||
<td class="whitespace-nowrap">
|
||||
<% if import.file.present? %>
|
||||
<%= link_to 'Download', rails_blob_path(import.file, disposition: 'attachment'), class: "px-4 py-2 bg-blue-500 text-white rounded-md", download: import.name %>
|
||||
<% end %>
|
||||
|
|
|
|||
Loading…
Reference in a new issue