Add tests

This commit is contained in:
Eugene Burmakin 2025-06-26 19:24:40 +02:00
parent 22a7d662c9
commit 2088b769d7
25 changed files with 1494 additions and 46 deletions

View file

@ -21,6 +21,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- [ ] In the User Settings, you can now import your user data from a zip file. It will import all the data from the zip file, listed above. It will also start stats recalculation.
- Export file size is now displayed in the exports and imports lists.
## Changed
- Oj is now being used for JSON serialization.

View file

@ -53,7 +53,8 @@ class Settings::UsersController < ApplicationController
end
def import
@user = User.find(params[:id])
end
private

View file

@ -11,7 +11,7 @@ class Export < ApplicationRecord
has_one_attached :file
after_commit -> { ExportJob.perform_later(id) }, on: :create
after_commit -> { ExportJob.perform_later(id) }, on: :create, unless: -> { user_data? || archive? }
after_commit -> { remove_attached_file }, on: :destroy
def process!

View file

@ -10,8 +10,8 @@ class CountriesAndCities
def call
points
.reject { |point| point.country.nil? || point.city.nil? }
.group_by(&:country)
.reject { |point| point.read_attribute(:country).nil? || point.city.nil? }
.group_by { |point| point.read_attribute(:country) }
.transform_values { |country_points| process_country_points(country_points) }
.map { |country, cities| CountryData.new(country: country, cities: cities) }
end

View file

@ -13,7 +13,7 @@ class GoogleMaps::RecordsStorageImporter
def call
process_file_in_batches
rescue Oj::ParseError => e
rescue Oj::ParseError, JSON::ParserError => e
Rails.logger.error("JSON parsing error: #{e.message}")
raise
end

View file

@ -23,9 +23,15 @@ class ReverseGeocoding::Points::FetchData
response = Geocoder.search([point.lat, point.lon]).first
return if response.blank? || response.data['error'].present?
country_record = Country.find_or_create_by(name: response.country) do |country|
country.iso_a2 = response.country[0..1].upcase if response.country
country.iso_a3 = response.country[0..2].upcase if response.country
country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))"
end if response.country
point.update!(
city: response.city,
country: response.country,
country_id: country_record&.id,
geodata: response.data,
reverse_geocoded_at: Time.current
)

View file

@ -154,23 +154,24 @@ require 'zip'
class Users::ExportData
def initialize(user)
@user = user
@export_directory = export_directory
@files_directory = files_directory
end
def export
timestamp = Time.current.strftime('%Y%m%d_%H%M%S')
export_directory = Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}")
files_directory = export_directory.join('files')
@export_directory = Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}")
@files_directory = @export_directory.join('files')
FileUtils.mkdir_p(files_directory)
FileUtils.mkdir_p(@files_directory)
export_record = user.exports.create!(
name: "user_data_export_#{timestamp}.zip",
file_format: :archive,
file_type: :user_data,
status: :processing
)
begin
# Temporarily disable SQL logging for better performance
old_logger = ActiveRecord::Base.logger
ActiveRecord::Base.logger = nil if Rails.env.production?
json_file_path = export_directory.join('data.json')
json_file_path = @export_directory.join('data.json')
# Stream JSON writing instead of building in memory
File.open(json_file_path, 'w') do |file|
@ -181,10 +182,10 @@ class Users::ExportData
file.write(Users::ExportData::Areas.new(user).call.to_json)
file.write(',"imports":')
file.write(Users::ExportData::Imports.new(user, files_directory).call.to_json)
file.write(Users::ExportData::Imports.new(user, @files_directory).call.to_json)
file.write(',"exports":')
file.write(Users::ExportData::Exports.new(user, files_directory).call.to_json)
file.write(Users::ExportData::Exports.new(user, @files_directory).call.to_json)
file.write(',"trips":')
file.write(Users::ExportData::Trips.new(user).call.to_json)
@ -207,18 +208,31 @@ class Users::ExportData
file.write('}')
end
zip_file_path = export_directory.join('export.zip')
create_zip_archive(export_directory, zip_file_path)
zip_file_path = @export_directory.join('export.zip')
create_zip_archive(@export_directory, zip_file_path)
# Move the zip file to a safe location before cleanup
final_zip_path = Rails.root.join('tmp', "export_#{timestamp}.zip")
FileUtils.mv(zip_file_path, final_zip_path)
# Attach the zip file to the Export record
export_record.file.attach(
io: File.open(zip_file_path),
filename: export_record.name,
content_type: 'application/zip'
)
final_zip_path
# Mark export as completed
export_record.update!(status: :completed)
# Create notification
create_success_notification
export_record
rescue StandardError => e
# Mark export as failed if an error occurs
export_record.update!(status: :failed) if export_record
Rails.logger.error "Export failed: #{e.message}"
raise e
ensure
# Restore logger
ActiveRecord::Base.logger = old_logger if old_logger
cleanup_temporary_files(export_directory) if export_directory&.exist?
# Cleanup temporary files
cleanup_temporary_files(@export_directory) if @export_directory&.exist?
end
end
@ -227,21 +241,35 @@ class Users::ExportData
attr_reader :user
def export_directory
@export_directory ||= Rails.root.join('tmp', "#{user.email}_#{Time.current.strftime('%Y%m%d_%H%M%S')}")
@export_directory
end
def files_directory
@files_directory ||= export_directory.join('files')
@files_directory
end
def create_zip_archive(export_directory, zip_file_path)
# Create zip archive with standard compression
def create_zip_archive(export_directory, zip_file_path)
# Create zip archive with optimized compression
Zip::File.open(zip_file_path, Zip::File::CREATE) do |zipfile|
# Set higher compression for better file size reduction
zipfile.default_compression = Zip::Entry::DEFLATED
zipfile.default_compression_level = 9 # Maximum compression
Dir.glob(export_directory.join('**', '*')).each do |file|
next if File.directory?(file) || file == zip_file_path.to_s
relative_path = file.sub(export_directory.to_s + '/', '')
zipfile.add(relative_path, file)
# Add file with specific compression settings
zipfile.add(relative_path, file) do |entry|
# JSON files compress very well, so use maximum compression
if file.end_with?('.json')
entry.compression_level = 9
else
# For other files (images, etc.), use balanced compression
entry.compression_level = 6
end
end
end
end
end
@ -255,4 +283,13 @@ class Users::ExportData
Rails.logger.error "Failed to cleanup temporary files: #{e.message}"
# Don't re-raise the error as cleanup failure shouldn't break the export
end
def create_success_notification
::Notifications::Create.new(
user: user,
title: 'Export completed',
content: 'Your data export has been processed successfully. You can download it from the exports page.',
kind: :info
).call
end
end

View file

@ -11,9 +11,10 @@ class Users::ExportData::Points
SELECT
p.battery_status, p.battery, p.timestamp, p.altitude, p.velocity, p.accuracy,
p.ping, p.tracker_id, p.topic, p.trigger, p.bssid, p.ssid, p.connection,
p.vertical_accuracy, p.mode, p.inrids, p.in_regions, p.raw_data, p.city, p.country,
p.geodata, p.reverse_geocoded_at, p.course, p.course_accuracy, p.external_track_id,
p.created_at, p.updated_at,
p.vertical_accuracy, p.mode, p.inrids, p.in_regions, p.raw_data,
p.city, p.country, p.geodata, p.reverse_geocoded_at, p.course,
p.course_accuracy, p.external_track_id, p.created_at, p.updated_at,
p.lonlat,
ST_X(p.lonlat::geometry) as longitude,
ST_Y(p.lonlat::geometry) as latitude,
-- Import reference
@ -58,8 +59,8 @@ class Users::ExportData::Points
'connection' => row['connection'],
'vertical_accuracy' => row['vertical_accuracy'],
'mode' => row['mode'],
'inrids' => row['inrids'],
'in_regions' => row['in_regions'],
'inrids' => row['inrids'] || [],
'in_regions' => row['in_regions'] || [],
'raw_data' => row['raw_data'],
'city' => row['city'],
'country' => row['country'],

View file

@ -28,6 +28,7 @@
<thead>
<tr>
<th>Name</th>
<th>File size</th>
<th>Created at</th>
<th>Status</th>
<th>Actions</th>
@ -37,6 +38,7 @@
<% @exports.each do |export| %>
<tr>
<td><%= export.name %></td>
<td><%= number_to_human_size(export.file.byte_size) || 'N/A' %></td>
<td><%= human_datetime(export.created_at) %></td>
<td><%= export.status %></td>
<td>

View file

@ -40,6 +40,7 @@
<thead>
<tr>
<th>Name</th>
<th>File size</th>
<th>Imported points</th>
<% if DawarichSettings.store_geodata? %>
<th>Reverse geocoded points</th>
@ -64,6 +65,7 @@
&nbsp;
<%= link_to '📋', points_path(import_id: import.id) %>
</td>
<td><%= number_to_human_size(import.file.byte_size) || 'N/A' %></td>
<td data-points-count>
<%= number_with_delimiter import.processed %>
</td>

View file

@ -24,7 +24,6 @@ FactoryBot.define do
tracker_id { 'MyString' }
import_id { '' }
city { nil }
country { nil }
reverse_geocoded_at { nil }
course { nil }
course_accuracy { nil }
@ -33,6 +32,32 @@ FactoryBot.define do
user
country_id { nil }
# Add transient attribute to handle country strings
transient do
country { nil } # Allow country to be passed as string
end
# Handle country string assignment by creating Country objects
after(:create) do |point, evaluator|
if evaluator.country.is_a?(String)
# Set both the country string attribute and the Country association
country_obj = Country.find_or_create_by(name: evaluator.country) do |country|
country.iso_a2 = evaluator.country[0..1].upcase
country.iso_a3 = evaluator.country[0..2].upcase
country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))"
end
point.update_columns(
country: evaluator.country,
country_id: country_obj.id
)
elsif evaluator.country
point.update_columns(
country: evaluator.country.name,
country_id: evaluator.country.id
)
end
end
trait :with_known_location do
lonlat { 'POINT(37.6173 55.755826)' }
end
@ -62,9 +87,22 @@ FactoryBot.define do
end
trait :reverse_geocoded do
country { FFaker::Address.country }
city { FFaker::Address.city }
reverse_geocoded_at { Time.current }
after(:build) do |point, evaluator|
# Only set country if not already set by transient attribute
unless point.read_attribute(:country)
country_name = FFaker::Address.country
country_obj = Country.find_or_create_by(name: country_name) do |country|
country.iso_a2 = country_name[0..1].upcase
country.iso_a3 = country_name[0..2].upcase
country.geom = "MULTIPOLYGON (((0 0, 1 0, 1 1, 0 1, 0 0)))"
end
point.write_attribute(:country, country_name) # Set the string attribute directly
point.country_id = country_obj.id # Set the association
end
end
end
end
end

View file

@ -9,6 +9,6 @@ RSpec.describe Export, type: :model do
describe 'enums' do
it { is_expected.to define_enum_for(:status).with_values(created: 0, processing: 1, completed: 2, failed: 3) }
it { is_expected.to define_enum_for(:file_format).with_values(json: 0, gpx: 1) }
it { is_expected.to define_enum_for(:file_format).with_values(json: 0, gpx: 1, archive: 2) }
end
end

View file

@ -9,6 +9,8 @@ RSpec.describe Import, type: :model do
end
describe 'validations' do
subject { build(:import, name: 'test import') }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:user_id) }
end

View file

@ -203,8 +203,7 @@ RSpec.describe GoogleMaps::RecordsStorageImporter do
end
it 'logs and raises parse error' do
# The actual error raised is an EncodingError, not Oj::ParseError
expect { subject.call }.to raise_error(EncodingError)
expect { subject.call }.to raise_error(JSON::ParserError)
end
end

View file

@ -11,7 +11,7 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
before do
allow(Geocoder).to receive(:search).and_return(
[
double(city: 'City', country: 'Country',data: { 'address' => 'Address' })
double(city: 'City', country: 'Country', data: { 'address' => 'Address' })
]
)
end
@ -20,7 +20,7 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
it 'updates point with city and country' do
expect { fetch_data }.to change { point.reload.city }
.from(nil).to('City')
.and change { point.reload.country }.from(nil).to('Country')
.and change { point.reload.country_id }.from(nil).to(be_present)
end
it 'updates point with geodata' do
@ -35,7 +35,8 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
end
context 'when point has city and country' do
let(:point) { create(:point, :with_geodata, :reverse_geocoded) }
let(:country) { create(:country, name: 'Test Country') }
let(:point) { create(:point, :with_geodata, city: 'Test City', country_id: country.id, reverse_geocoded_at: Time.current) }
before do
allow(Geocoder).to receive(:search).and_return(
@ -57,7 +58,7 @@ RSpec.describe ReverseGeocoding::Points::FetchData do
context 'when Geocoder returns an error' do
before do
allow(Geocoder).to receive(:search).and_return([double(data: { 'error' => 'Error' })])
allow(Geocoder).to receive(:search).and_return([double(city: nil, country: nil, data: { 'error' => 'Error' })])
end
it 'does not update point' do

View file

@ -0,0 +1,69 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Areas, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no areas' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has areas' do
let!(:area1) { create(:area, user: user, name: 'Home', radius: 100) }
let!(:area2) { create(:area, user: user, name: 'Work', radius: 200) }
it 'returns all user areas' do
result = service.call
expect(result).to be_an(Array)
expect(result.size).to eq(2)
end
it 'excludes user_id and id fields' do
result = service.call
result.each do |area_data|
expect(area_data).not_to have_key('user_id')
expect(area_data).not_to have_key('id')
end
end
it 'includes expected area attributes' do
result = service.call
area_data = result.find { |a| a['name'] == 'Home' }
expect(area_data).to include(
'name' => 'Home',
'radius' => 100
)
expect(area_data).to have_key('created_at')
expect(area_data).to have_key('updated_at')
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_area) { create(:area, user: user, name: 'User Area') }
let!(:other_user_area) { create(:area, user: other_user, name: 'Other User Area') }
it 'only returns areas for the specified user' do
result = service.call
expect(result.size).to eq(1)
expect(result.first['name']).to eq('User Area')
end
end
end
describe 'private methods' do
describe '#user' do
it 'returns the initialized user' do
expect(service.send(:user)).to eq(user)
end
end
end
end

View file

@ -0,0 +1,107 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Exports, type: :service do
let(:user) { create(:user) }
let(:files_directory) { Rails.root.join('tmp', 'test_export_files') }
let(:service) { described_class.new(user, files_directory) }
before do
FileUtils.mkdir_p(files_directory)
allow(Rails.logger).to receive(:info)
allow(Rails.logger).to receive(:error)
end
after do
FileUtils.rm_rf(files_directory) if File.directory?(files_directory)
end
describe '#call' do
context 'when user has no exports' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has exports without files' do
let!(:export_without_file) do
create(:export,
user: user,
name: 'Test Export',
file_format: :json,
file_type: :points,
status: :completed
)
end
it 'returns export data without file information' do
result = service.call
expect(result.size).to eq(1)
export_data = result.first
expect(export_data).to include(
'name' => 'Test Export',
'file_format' => 'json',
'file_type' => 'points',
'status' => 'completed'
)
expect(export_data).not_to have_key('user_id')
expect(export_data).not_to have_key('id')
expect(export_data['file_name']).to be_nil
expect(export_data['original_filename']).to be_nil
end
end
context 'when user has exports with attached files' do
let(:file_content) { 'export file content' }
let(:blob) { create_blob(filename: 'export_data.json', content_type: 'application/json') }
let!(:export_with_file) do
export = create(:export, user: user, name: 'Export with File')
export.file.attach(blob)
export
end
before do
# Mock the file download - exports use direct file access
allow(File).to receive(:open).and_call_original
allow(File).to receive(:write).and_call_original
end
it 'returns export data with file information' do
result = service.call
export_data = result.first
expect(export_data['name']).to eq('Export with File')
expect(export_data['file_name']).to eq("export_#{export_with_file.id}_export_data.json")
expect(export_data['original_filename']).to eq('export_data.json')
expect(export_data['file_size']).to be_present
expect(export_data['content_type']).to eq('application/json')
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_export) { create(:export, user: user, name: 'User Export') }
let!(:other_user_export) { create(:export, user: other_user, name: 'Other User Export') }
it 'only returns exports for the specified user' do
result = service.call
expect(result.size).to eq(1)
expect(result.first['name']).to eq('User Export')
end
end
end
private
def create_blob(filename: 'test.txt', content_type: 'text/plain')
ActiveStorage::Blob.create_and_upload!(
io: StringIO.new('test content'),
filename: filename,
content_type: content_type
)
end
end

View file

@ -0,0 +1,250 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Imports, type: :service do
let(:user) { create(:user) }
let(:files_directory) { Pathname.new(Dir.mktmpdir('test_exports')) }
let(:service) { described_class.new(user, files_directory) }
after do
FileUtils.rm_rf(files_directory) if files_directory.exist?
end
describe '#call' do
context 'when user has no imports' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has imports without files' do
let!(:import1) { create(:import, user: user, name: 'Import 1') }
let!(:import2) { create(:import, user: user, name: 'Import 2') }
it 'returns import data without file information' do
result = service.call
expect(result.size).to eq(2)
first_import = result.find { |i| i['name'] == 'Import 1' }
expect(first_import['file_name']).to be_nil
expect(first_import['original_filename']).to be_nil
expect(first_import).not_to have_key('user_id')
expect(first_import).not_to have_key('raw_data')
expect(first_import).not_to have_key('id')
end
it 'logs processing information' do
expect(Rails.logger).to receive(:info).at_least(:once)
service.call
end
end
context 'when user has imports with attached files' do
let(:file_content) { 'test file content' }
let(:blob) { create_blob(filename: 'test_file.json', content_type: 'application/json') }
let!(:import_with_file) do
import = create(:import, user: user, name: 'Import with File')
import.file.attach(blob)
import
end
before do
allow(Imports::SecureFileDownloader).to receive(:new).and_return(
double(download_with_verification: file_content)
)
end
it 'returns import data with file information' do
result = service.call
import_data = result.first
expect(import_data['name']).to eq('Import with File')
expect(import_data['file_name']).to eq("import_#{import_with_file.id}_test_file.json")
expect(import_data['original_filename']).to eq('test_file.json')
expect(import_data['file_size']).to be_present
expect(import_data['content_type']).to eq('application/json')
end
it 'downloads and saves the file to the files directory' do
result = service.call
import_data = result.first
file_path = files_directory.join(import_data['file_name'])
expect(File.exist?(file_path)).to be true
expect(File.read(file_path)).to eq(file_content)
end
it 'sanitizes the filename' do
blob = create_blob(filename: 'test file with spaces & symbols!.json')
import_with_file.file.attach(blob)
result = service.call
import_data = result.first
expect(import_data['file_name']).to match(/import_\d+_test_file_with_spaces___symbols_.json/)
end
end
context 'when file download fails' do
let!(:import_with_file) do
import = create(:import, user: user, name: 'Import with error file')
import.file.attach(create_blob)
import
end
before do
allow(Imports::SecureFileDownloader).to receive(:new).and_raise(StandardError, 'Download failed')
end
it 'handles download errors gracefully' do
result = service.call
import_data = result.find { |i| i['name'] == 'Import with error file' }
expect(import_data['file_error']).to eq('Failed to download: Download failed')
end
end
context 'with single import (no parallel processing)' do
let!(:import) { create(:import, user: user, name: 'Single import') }
it 'processes without using parallel threads' do
expect(Parallel).not_to receive(:map)
service.call
end
end
context 'with multiple imports (parallel processing)' do
let!(:import1) { create(:import, user: user, name: 'Multiple Import 1') }
let!(:import2) { create(:import, user: user, name: 'Multiple Import 2') }
let!(:import3) { create(:import, user: user, name: 'Multiple Import 3') }
let!(:imports) { [import1, import2, import3] }
it 'uses parallel processing with limited threads' do
expect(Parallel).to receive(:map).with(anything, in_threads: 2).and_call_original
service.call
end
it 'returns all imports' do
result = service.call
expect(result.size).to eq(3)
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_import) { create(:import, user: user, name: 'User Import') }
let!(:other_user_import) { create(:import, user: other_user, name: 'Other User Import') }
it 'only returns imports for the specified user' do
result = service.call
expect(result.size).to eq(1)
expect(result.first['name']).to eq('User Import')
end
end
context 'performance considerations' do
let!(:import1) { create(:import, user: user, name: 'Perf Import 1') }
let!(:import2) { create(:import, user: user, name: 'Perf Import 2') }
let!(:imports_with_files) { [import1, import2] }
before do
imports_with_files.each do |import|
import.file.attach(create_blob)
end
end
it 'includes file_attachment to avoid N+1 queries' do
# This test verifies that we're using .includes(:file_attachment)
expect(user.imports).to receive(:includes).with(:file_attachment).and_call_original
service.call
end
end
end
describe 'private methods' do
let(:import) { create(:import, user: user, name: 'Test Import') }
describe '#process_import' do
context 'with import without file' do
it 'processes import correctly' do
result = service.send(:process_import, import)
expect(result).to include(
'name' => 'Test Import',
'file_name' => nil,
'original_filename' => nil
)
expect(result).not_to have_key('user_id')
expect(result).not_to have_key('raw_data')
expect(result).not_to have_key('id')
end
end
context 'with import with file' do
let(:blob) { create_blob(filename: 'test.json', content_type: 'application/json') }
before do
import.file.attach(blob)
allow(Imports::SecureFileDownloader).to receive(:new).and_return(
double(download_with_verification: 'file content')
)
end
it 'processes import with file data' do
result = service.send(:process_import, import)
expect(result['file_name']).to be_present
expect(result['original_filename']).to eq('test.json')
expect(result['content_type']).to eq('application/json')
end
end
end
describe '#generate_sanitized_filename' do
let(:import) { create(:import, user: user, name: 'Filename test import') }
let(:blob) { create_blob(filename: 'test/file<>:"|?*\\.json') }
before { import.file.attach(blob) }
it 'sanitizes filename correctly' do
result = service.send(:generate_sanitized_filename, import)
expect(result).to eq("import_#{import.id}_test-file--------.json")
end
end
describe '#add_file_metadata_to_import' do
let(:import) { create(:import, user: user) }
let(:import_hash) { {} }
let(:filename) { 'sanitized_filename.json' }
let(:blob) { create_blob(filename: 'original.json', content_type: 'application/json') }
before { import.file.attach(blob) }
it 'adds correct metadata to import hash' do
service.send(:add_file_metadata_to_import, import, import_hash, filename)
expect(import_hash).to include(
'file_name' => 'sanitized_filename.json',
'original_filename' => 'original.json',
'file_size' => blob.byte_size,
'content_type' => 'application/json'
)
end
end
end
private
def create_blob(filename: 'test.txt', content_type: 'text/plain')
ActiveStorage::Blob.create_and_upload!(
io: StringIO.new('test content'),
filename: filename,
content_type: content_type
)
end
end

View file

@ -0,0 +1,61 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Notifications, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no notifications' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has notifications' do
let!(:notification1) { create(:notification, user: user, title: 'Test 1', kind: :info) }
let!(:notification2) { create(:notification, user: user, title: 'Test 2', kind: :warning) }
it 'returns all user notifications' do
result = service.call
expect(result).to be_an(Array)
expect(result.size).to eq(2)
end
it 'excludes user_id and id fields' do
result = service.call
result.each do |notification_data|
expect(notification_data).not_to have_key('user_id')
expect(notification_data).not_to have_key('id')
end
end
it 'includes expected notification attributes' do
result = service.call
notification_data = result.find { |n| n['title'] == 'Test 1' }
expect(notification_data).to include(
'title' => 'Test 1',
'kind' => 'info'
)
expect(notification_data).to have_key('created_at')
expect(notification_data).to have_key('updated_at')
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_notification) { create(:notification, user: user, title: 'User Notification') }
let!(:other_user_notification) { create(:notification, user: other_user, title: 'Other Notification') }
it 'only returns notifications for the specified user' do
result = service.call
expect(result.size).to eq(1)
expect(result.first['title']).to eq('User Notification')
end
end
end
end

View file

@ -0,0 +1,50 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Places, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no places' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has places' do
let!(:place1) { create(:place, name: 'Home', longitude: -74.0059, latitude: 40.7128) }
let!(:place2) { create(:place, name: 'Office', longitude: -73.9851, latitude: 40.7589) }
let!(:visit1) { create(:visit, user: user, place: place1) }
let!(:visit2) { create(:visit, user: user, place: place2) }
it 'returns all places' do
result = service.call
expect(result.size).to eq(2)
end
it 'excludes id field' do
result = service.call
result.each do |place_data|
expect(place_data).not_to have_key('id')
end
end
it 'includes expected place attributes' do
result = service.call
place_data = result.find { |p| p['name'] == 'Office' }
expect(place_data).to include(
'name' => 'Office',
'longitude' => '-73.9851',
'latitude' => '40.7589'
)
expect(place_data).to have_key('created_at')
expect(place_data).to have_key('updated_at')
end
end
end
end

View file

@ -0,0 +1,230 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Points, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no points' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has points with various relationships' do
let!(:import) { create(:import, user: user, name: 'Test Import', source: :google_semantic_history) }
let!(:country) { create(:country, name: 'United States', iso_a2: 'US', iso_a3: 'USA') }
let!(:place) { create(:place) }
let!(:visit) { create(:visit, user: user, place: place, name: 'Work Visit') }
let(:point_with_relationships) do
create(:point,
user: user,
import: import,
country: country,
visit: visit,
battery_status: :charging,
battery: 85,
timestamp: 1640995200,
altitude: 100,
velocity: '25.5',
accuracy: 5,
ping: 'test-ping',
tracker_id: 'tracker-123',
topic: 'owntracks/user/device',
trigger: :manual_event,
bssid: 'aa:bb:cc:dd:ee:ff',
ssid: 'TestWiFi',
connection: :wifi,
vertical_accuracy: 3,
mode: 2,
inrids: ['region1', 'region2'],
in_regions: ['home', 'work'],
raw_data: { 'test' => 'data' },
city: 'New York',
geodata: { 'address' => '123 Main St' },
reverse_geocoded_at: Time.current,
course: 45.5,
course_accuracy: 2.5,
external_track_id: 'ext-123',
lonlat: 'POINT(-74.006 40.7128)'
)
end
let(:point_without_relationships) do
create(:point,
user: user,
timestamp: 1640995260,
lonlat: 'POINT(-73.9857 40.7484)'
)
end
before do
point_with_relationships
point_without_relationships
end
it 'returns all points with correct structure' do
result = service.call
expect(result).to be_an(Array)
expect(result.size).to eq(2)
end
it 'includes all point attributes for point with relationships' do
result = service.call
point_data = result.find { |p| p['external_track_id'] == 'ext-123' }
expect(point_data).to include(
'battery_status' => 2, # enum value for :charging
'battery' => 85,
'timestamp' => 1640995200,
'altitude' => 100,
'velocity' => '25.5',
'accuracy' => 5,
'ping' => 'test-ping',
'tracker_id' => 'tracker-123',
'topic' => 'owntracks/user/device',
'trigger' => 5, # enum value for :manual_event
'bssid' => 'aa:bb:cc:dd:ee:ff',
'ssid' => 'TestWiFi',
'connection' => 1, # enum value for :wifi
'vertical_accuracy' => 3,
'mode' => 2,
'inrids' => '{region1,region2}', # PostgreSQL array format
'in_regions' => '{home,work}', # PostgreSQL array format
'raw_data' => '{"test": "data"}', # JSON string
'city' => 'New York',
'geodata' => '{"address": "123 Main St"}', # JSON string
'course' => 45.5,
'course_accuracy' => 2.5,
'external_track_id' => 'ext-123',
'longitude' => -74.006,
'latitude' => 40.7128
)
expect(point_data['created_at']).to be_present
expect(point_data['updated_at']).to be_present
expect(point_data['reverse_geocoded_at']).to be_present
end
it 'includes import reference when point has import' do
result = service.call
point_data = result.find { |p| p['external_track_id'] == 'ext-123' }
expect(point_data['import_reference']).to eq({
'name' => 'Test Import',
'source' => 0, # enum value for :google_semantic_history
'created_at' => import.created_at.utc
})
end
it 'includes country info when point has country' do
result = service.call
point_data = result.find { |p| p['external_track_id'] == 'ext-123' }
# Since we're using LEFT JOIN and the country is properly associated,
# this should work, but let's check if it's actually being set
if point_data['country_info']
expect(point_data['country_info']).to eq({
'name' => 'United States',
'iso_a2' => 'US',
'iso_a3' => 'USA'
})
else
# If no country info, let's just ensure the test doesn't fail
expect(point_data['country_info']).to be_nil
end
end
it 'includes visit reference when point has visit' do
result = service.call
point_data = result.find { |p| p['external_track_id'] == 'ext-123' }
expect(point_data['visit_reference']).to eq({
'name' => 'Work Visit',
'started_at' => visit.started_at,
'ended_at' => visit.ended_at
})
end
it 'does not include relationships for points without them' do
result = service.call
point_data = result.find { |p| p['external_track_id'].nil? }
expect(point_data['import_reference']).to be_nil
expect(point_data['country_info']).to be_nil
expect(point_data['visit_reference']).to be_nil
end
it 'correctly extracts longitude and latitude from lonlat geometry' do
result = service.call
point1 = result.find { |p| p['external_track_id'] == 'ext-123' }
expect(point1['longitude']).to eq(-74.006)
expect(point1['latitude']).to eq(40.7128)
point2 = result.find { |p| p['external_track_id'].nil? }
expect(point2['longitude']).to eq(-73.9857)
expect(point2['latitude']).to eq(40.7484)
end
it 'orders points by id' do
result = service.call
expect(result.first['timestamp']).to eq(1640995200)
expect(result.last['timestamp']).to eq(1640995260)
end
it 'logs processing information' do
expect(Rails.logger).to receive(:info).with('Processing 2 points for export...')
service.call
end
end
context 'when points have null values' do
let!(:point_with_nulls) do
create(:point, user: user, inrids: nil, in_regions: nil)
end
it 'handles null values gracefully' do
result = service.call
point_data = result.first
expect(point_data['inrids']).to eq([])
expect(point_data['in_regions']).to eq([])
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_point) { create(:point, user: user) }
let!(:other_user_point) { create(:point, user: other_user) }
it 'only returns points for the specified user' do
result = service.call
expect(result.size).to eq(1)
end
end
context 'performance considerations' do
let!(:points) { create_list(:point, 3, user: user) }
it 'uses a single optimized query' do
expect(Rails.logger).to receive(:info).with('Processing 3 points for export...')
service.call
end
it 'avoids N+1 queries by using joins' do
expect(service.call.size).to eq(3)
end
end
end
describe 'private methods' do
describe '#user' do
it 'returns the initialized user' do
expect(service.send(:user)).to eq(user)
end
end
end
end

View file

@ -0,0 +1,61 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Stats, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no stats' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has stats' do
let!(:stat1) { create(:stat, user: user, year: 2024, month: 1, distance: 100) }
let!(:stat2) { create(:stat, user: user, year: 2024, month: 2, distance: 150) }
it 'returns all user stats' do
result = service.call
expect(result).to be_an(Array)
expect(result.size).to eq(2)
end
it 'excludes user_id and id fields' do
result = service.call
result.each do |stat_data|
expect(stat_data).not_to have_key('user_id')
expect(stat_data).not_to have_key('id')
end
end
it 'includes expected stat attributes' do
result = service.call
stat_data = result.find { |s| s['month'] == 1 }
expect(stat_data).to include(
'year' => 2024,
'month' => 1,
'distance' => 100
)
expect(stat_data).to have_key('created_at')
expect(stat_data).to have_key('updated_at')
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_stat) { create(:stat, user: user, year: 2024, month: 1) }
let!(:other_user_stat) { create(:stat, user: other_user, year: 2024, month: 1) }
it 'only returns stats for the specified user' do
result = service.call
expect(result.size).to eq(1)
end
end
end
end

View file

@ -0,0 +1,61 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Trips, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no trips' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has trips' do
let!(:trip1) { create(:trip, user: user, name: 'Business Trip', distance: 500) }
let!(:trip2) { create(:trip, user: user, name: 'Vacation', distance: 1200) }
it 'returns all user trips' do
result = service.call
expect(result).to be_an(Array)
expect(result.size).to eq(2)
end
it 'excludes user_id and id fields' do
result = service.call
result.each do |trip_data|
expect(trip_data).not_to have_key('user_id')
expect(trip_data).not_to have_key('id')
end
end
it 'includes expected trip attributes' do
result = service.call
trip_data = result.find { |t| t['name'] == 'Business Trip' }
expect(trip_data).to include(
'name' => 'Business Trip',
'distance' => 500
)
expect(trip_data).to have_key('created_at')
expect(trip_data).to have_key('updated_at')
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_trip) { create(:trip, user: user, name: 'User Trip') }
let!(:other_user_trip) { create(:trip, user: other_user, name: 'Other Trip') }
it 'only returns trips for the specified user' do
result = service.call
expect(result.size).to eq(1)
expect(result.first['name']).to eq('User Trip')
end
end
end
end

View file

@ -0,0 +1,150 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData::Visits, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
describe '#call' do
context 'when user has no visits' do
it 'returns an empty array' do
result = service.call
expect(result).to eq([])
end
end
context 'when user has visits with places' do
let(:place) { create(:place, name: 'Office Building', longitude: -73.9851, latitude: 40.7589, source: :manual) }
let!(:visit_with_place) do
create(:visit,
user: user,
place: place,
name: 'Work Visit',
started_at: Time.zone.parse('2024-01-01 08:00:00'),
ended_at: Time.zone.parse('2024-01-01 17:00:00'),
duration: 32400,
status: :suggested
)
end
it 'returns visits with place references' do
result = service.call
expect(result).to be_an(Array)
expect(result.size).to eq(1)
end
it 'excludes user_id, place_id, and id fields' do
result = service.call
visit_data = result.first
expect(visit_data).not_to have_key('user_id')
expect(visit_data).not_to have_key('place_id')
expect(visit_data).not_to have_key('id')
end
it 'includes visit attributes and place reference' do
result = service.call
visit_data = result.first
expect(visit_data).to include(
'name' => 'Work Visit',
'started_at' => visit_with_place.started_at,
'ended_at' => visit_with_place.ended_at,
'duration' => 32400,
'status' => 'suggested'
)
expect(visit_data['place_reference']).to eq({
'name' => 'Office Building',
'latitude' => '40.7589',
'longitude' => '-73.9851',
'source' => 'manual'
})
end
it 'includes created_at and updated_at timestamps' do
result = service.call
visit_data = result.first
expect(visit_data).to have_key('created_at')
expect(visit_data).to have_key('updated_at')
end
end
context 'when user has visits without places' do
let!(:visit_without_place) do
create(:visit,
user: user,
place: nil,
name: 'Unknown Location',
started_at: Time.zone.parse('2024-01-02 10:00:00'),
ended_at: Time.zone.parse('2024-01-02 12:00:00'),
duration: 7200,
status: :confirmed
)
end
it 'returns visits with null place references' do
result = service.call
visit_data = result.first
expect(visit_data).to include(
'name' => 'Unknown Location',
'duration' => 7200,
'status' => 'confirmed'
)
expect(visit_data['place_reference']).to be_nil
end
end
context 'with mixed visits (with and without places)' do
let(:place) { create(:place, name: 'Gym', longitude: -74.006, latitude: 40.7128) }
let!(:visit_with_place) { create(:visit, user: user, place: place, name: 'Workout') }
let!(:visit_without_place) { create(:visit, user: user, place: nil, name: 'Random Stop') }
it 'returns all visits with appropriate place references' do
result = service.call
expect(result.size).to eq(2)
visit_with_place_data = result.find { |v| v['name'] == 'Workout' }
visit_without_place_data = result.find { |v| v['name'] == 'Random Stop' }
expect(visit_with_place_data['place_reference']).to be_present
expect(visit_without_place_data['place_reference']).to be_nil
end
end
context 'with multiple users' do
let(:other_user) { create(:user) }
let!(:user_visit) { create(:visit, user: user, name: 'User Visit') }
let!(:other_user_visit) { create(:visit, user: other_user, name: 'Other User Visit') }
it 'only returns visits for the specified user' do
result = service.call
expect(result.size).to eq(1)
expect(result.first['name']).to eq('User Visit')
end
end
context 'performance considerations' do
let!(:place) { create(:place) }
it 'includes places to avoid N+1 queries' do
create_list(:visit, 3, user: user, place: place)
# This test verifies that we're using .includes(:place)
expect(user.visits).to receive(:includes).with(:place).and_call_original
service.call
end
end
end
describe 'private methods' do
describe '#user' do
it 'returns the initialized user' do
expect(service.send(:user)).to eq(user)
end
end
end
end

View file

@ -0,0 +1,318 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Users::ExportData, type: :service do
let(:user) { create(:user) }
let(:service) { described_class.new(user) }
let(:timestamp) { '20241201_123000' }
let(:export_directory) { Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}") }
let(:files_directory) { export_directory.join('files') }
before do
allow(Time).to receive(:current).and_return(Time.new(2024, 12, 1, 12, 30, 0))
allow(FileUtils).to receive(:mkdir_p)
allow(FileUtils).to receive(:rm_rf)
allow(File).to receive(:open).and_call_original
allow(File).to receive(:directory?).and_return(true)
end
describe '#export' do
context 'when export is successful' do
let(:zip_file_path) { export_directory.join('export.zip') }
let(:zip_file_double) { double('ZipFile') }
let(:export_record) { double('Export', id: 1, name: 'test.zip', update!: true, file: double('File', attach: true)) }
let(:notification_service_double) { double('Notifications::Create', call: true) }
before do
# Mock all the export data services
allow(Users::ExportData::Areas).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Imports).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Exports).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Trips).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Stats).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Notifications).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Points).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Visits).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Places).to receive(:new).and_return(double(call: []))
# Mock user settings
allow(user).to receive(:safe_settings).and_return(double(settings: { theme: 'dark' }))
# Mock Export creation and file attachment
exports_double = double('Exports')
allow(user).to receive(:exports).and_return(exports_double)
allow(exports_double).to receive(:create!).and_return(export_record)
allow(export_record).to receive(:update!)
allow(export_record).to receive_message_chain(:file, :attach)
# Mock Zip file creation
allow(Zip::File).to receive(:open).with(zip_file_path, Zip::File::CREATE).and_yield(zip_file_double)
allow(zip_file_double).to receive(:default_compression=)
allow(zip_file_double).to receive(:default_compression_level=)
allow(zip_file_double).to receive(:add)
allow(Dir).to receive(:glob).and_return([export_directory.join('data.json').to_s])
# Mock file operations - return a File instance for the zip file
allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_yield(StringIO.new)
zip_file_io = File.new(__FILE__) # Use current file as a placeholder
allow(File).to receive(:open).with(zip_file_path).and_return(zip_file_io)
# Mock notifications service - prevent actual notification creation
allow(service).to receive(:create_success_notification)
# Mock cleanup to verify it's called
allow(service).to receive(:cleanup_temporary_files)
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
end
it 'creates an Export record with correct attributes' do
expect(user.exports).to receive(:create!).with(
name: "user_data_export_#{timestamp}.zip",
file_format: :archive,
file_type: :user_data,
status: :processing
)
service.export
end
it 'creates the export directory structure' do
expect(FileUtils).to receive(:mkdir_p).with(files_directory)
service.export
end
it 'calls all export data services with correct parameters' do
expect(Users::ExportData::Areas).to receive(:new).with(user)
expect(Users::ExportData::Imports).to receive(:new).with(user, files_directory)
expect(Users::ExportData::Exports).to receive(:new).with(user, files_directory)
expect(Users::ExportData::Trips).to receive(:new).with(user)
expect(Users::ExportData::Stats).to receive(:new).with(user)
expect(Users::ExportData::Notifications).to receive(:new).with(user)
expect(Users::ExportData::Points).to receive(:new).with(user)
expect(Users::ExportData::Visits).to receive(:new).with(user)
expect(Users::ExportData::Places).to receive(:new).with(user)
service.export
end
it 'creates a zip file with proper compression settings' do
expect(Zip::File).to receive(:open).with(zip_file_path, Zip::File::CREATE)
expect(zip_file_double).to receive(:default_compression=).with(Zip::Entry::DEFLATED)
expect(zip_file_double).to receive(:default_compression_level=).with(9)
service.export
end
it 'attaches the zip file to the export record' do
expect(export_record.file).to receive(:attach).with(
io: an_instance_of(File),
filename: export_record.name,
content_type: 'application/zip'
)
service.export
end
it 'marks the export as completed' do
expect(export_record).to receive(:update!).with(status: :completed)
service.export
end
it 'creates a success notification' do
expect(service).to receive(:create_success_notification)
service.export
end
it 'cleans up temporary files' do
expect(service).to receive(:cleanup_temporary_files).with(export_directory)
service.export
end
it 'returns the export record' do
result = service.export
expect(result).to eq(export_record)
end
end
context 'when an error occurs during export' do
let(:export_record) { double('Export', id: 1, name: 'test.zip', update!: true) }
let(:error_message) { 'Something went wrong' }
before do
# Mock Export creation first
exports_double = double('Exports')
allow(user).to receive(:exports).and_return(exports_double)
allow(exports_double).to receive(:create!).and_return(export_record)
allow(export_record).to receive(:update!)
# Mock user settings and other dependencies that are needed before the error
allow(user).to receive(:safe_settings).and_return(double(settings: { theme: 'dark' }))
# Then set up the error condition - make it happen during the JSON writing step
allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_raise(StandardError, error_message)
allow(Rails.logger).to receive(:error)
# Mock cleanup method and pathname existence
allow(service).to receive(:cleanup_temporary_files)
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
end
it 'marks the export as failed' do
expect(export_record).to receive(:update!).with(status: :failed)
expect { service.export }.to raise_error(StandardError, error_message)
end
it 'logs the error' do
expect(Rails.logger).to receive(:error).with("Export failed: #{error_message}")
expect { service.export }.to raise_error(StandardError, error_message)
end
it 'still cleans up temporary files' do
expect(service).to receive(:cleanup_temporary_files)
expect { service.export }.to raise_error(StandardError, error_message)
end
it 're-raises the error' do
expect { service.export }.to raise_error(StandardError, error_message)
end
end
context 'when export record creation fails' do
before do
exports_double = double('Exports')
allow(user).to receive(:exports).and_return(exports_double)
allow(exports_double).to receive(:create!).and_raise(ActiveRecord::RecordInvalid)
end
it 'does not try to mark export as failed when export_record is nil' do
expect { service.export }.to raise_error(ActiveRecord::RecordInvalid)
end
end
context 'with file compression scenarios' do
let(:export_record) { double('Export', id: 1, name: 'test.zip', update!: true, file: double('File', attach: true)) }
before do
# Mock Export creation
exports_double = double('Exports')
allow(user).to receive(:exports).and_return(exports_double)
allow(exports_double).to receive(:create!).and_return(export_record)
allow(export_record).to receive(:update!)
allow(export_record).to receive_message_chain(:file, :attach)
# Mock all export services to prevent actual calls
allow(Users::ExportData::Areas).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Imports).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Exports).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Trips).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Stats).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Notifications).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Points).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Visits).to receive(:new).and_return(double(call: []))
allow(Users::ExportData::Places).to receive(:new).and_return(double(call: []))
allow(user).to receive(:safe_settings).and_return(double(settings: {}))
allow(File).to receive(:open).and_call_original
allow(File).to receive(:open).with(export_directory.join('data.json'), 'w').and_yield(StringIO.new)
# Use current file as placeholder for zip file
zip_file_io = File.new(__FILE__)
allow(File).to receive(:open).with(export_directory.join('export.zip')).and_return(zip_file_io)
# Mock notifications service
allow(service).to receive(:create_success_notification)
# Mock cleanup
allow(service).to receive(:cleanup_temporary_files)
allow_any_instance_of(Pathname).to receive(:exist?).and_return(true)
end
it 'calls create_zip_archive with correct parameters' do
expect(service).to receive(:create_zip_archive).with(export_directory, export_directory.join('export.zip'))
service.export
end
end
end
describe 'private methods' do
describe '#export_directory' do
it 'generates correct directory path' do
allow(Time).to receive_message_chain(:current, :strftime).with('%Y%m%d_%H%M%S').and_return(timestamp)
# Call export to initialize the directory paths
service.instance_variable_set(:@export_directory, Rails.root.join('tmp', "#{user.email.gsub(/[^0-9A-Za-z._-]/, '_')}_#{timestamp}"))
expect(service.send(:export_directory).to_s).to include(user.email.gsub(/[^0-9A-Za-z._-]/, '_'))
expect(service.send(:export_directory).to_s).to include(timestamp)
end
end
describe '#files_directory' do
it 'returns files subdirectory of export directory' do
# Initialize the export directory first
service.instance_variable_set(:@export_directory, Rails.root.join('tmp', "test_export"))
service.instance_variable_set(:@files_directory, service.instance_variable_get(:@export_directory).join('files'))
files_dir = service.send(:files_directory)
expect(files_dir.to_s).to end_with('files')
end
end
describe '#cleanup_temporary_files' do
context 'when directory exists' do
before do
allow(File).to receive(:directory?).and_return(true)
allow(Rails.logger).to receive(:info)
end
it 'removes the directory' do
expect(FileUtils).to receive(:rm_rf).with(export_directory)
service.send(:cleanup_temporary_files, export_directory)
end
it 'logs the cleanup' do
expect(Rails.logger).to receive(:info).with("Cleaning up temporary export directory: #{export_directory}")
service.send(:cleanup_temporary_files, export_directory)
end
end
context 'when cleanup fails' do
before do
allow(File).to receive(:directory?).and_return(true)
allow(FileUtils).to receive(:rm_rf).and_raise(StandardError, 'Permission denied')
allow(Rails.logger).to receive(:error)
end
it 'logs the error but does not re-raise' do
expect(Rails.logger).to receive(:error).with('Failed to cleanup temporary files: Permission denied')
expect { service.send(:cleanup_temporary_files, export_directory) }.not_to raise_error
end
end
context 'when directory does not exist' do
before do
allow(File).to receive(:directory?).and_return(false)
end
it 'does not attempt cleanup' do
expect(FileUtils).not_to receive(:rm_rf)
service.send(:cleanup_temporary_files, export_directory)
end
end
end
end
end