Merge pull request 'feat(categories): categoryBackupService pre-migration SREF wrapper (#120)' (#124) from issue-120-category-backup-service into main

This commit is contained in:
maximus 2026-04-19 20:48:51 +00:00
commit 63feebefc8
4 changed files with 370 additions and 0 deletions

View file

@ -0,0 +1,48 @@
use std::fs;
use std::path::PathBuf;
use tauri::Manager;
/// Subdirectory under the user's Documents folder where pre-migration backups
/// are written by default. Keeping the location predictable makes it easy for
/// users to find their backup files even if the app is uninstalled.
const BACKUP_SUBDIR: &str = "Simpl-Resultat/backups";
fn resolve_backup_dir(app: &tauri::AppHandle) -> Result<PathBuf, String> {
let documents = app
.path()
.document_dir()
.map_err(|e| format!("Cannot resolve Documents directory: {}", e))?;
Ok(documents.join(BACKUP_SUBDIR))
}
/// Resolve `~/Documents/Simpl-Resultat/backups/` and create it if missing.
/// Returns the absolute path as a string. Used by the pre-migration backup
/// flow to place SREF files in a predictable, user-visible location.
#[tauri::command]
pub fn ensure_backup_dir(app: tauri::AppHandle) -> Result<String, String> {
let dir = resolve_backup_dir(&app)?;
if !dir.exists() {
fs::create_dir_all(&dir).map_err(|e| {
// Surface permission issues explicitly — the TS layer maps this to
// a user-facing i18n key.
if e.kind() == std::io::ErrorKind::PermissionDenied {
format!("permission_denied: {}", dir.to_string_lossy())
} else {
format!("create_dir_failed: {}: {}", dir.to_string_lossy(), e)
}
})?;
}
Ok(dir.to_string_lossy().to_string())
}
/// Return the size of a file on disk in bytes. Used to report the size of a
/// freshly-written backup to the UI. Returns a clear error if the file does
/// not exist or cannot be read.
#[tauri::command]
pub fn get_file_size(file_path: String) -> Result<u64, String> {
let metadata =
fs::metadata(&file_path).map_err(|e| format!("Cannot stat file {}: {}", file_path, e))?;
Ok(metadata.len())
}

View file

@ -1,5 +1,6 @@
pub mod account_cache;
pub mod auth_commands;
pub mod backup_commands;
pub mod entitlements;
pub mod export_import_commands;
pub mod feedback_commands;
@ -9,6 +10,7 @@ pub mod profile_commands;
pub mod token_store;
pub use auth_commands::*;
pub use backup_commands::*;
pub use entitlements::*;
pub use export_import_commands::*;
pub use feedback_commands::*;

View file

@ -187,6 +187,8 @@ pub fn run() {
commands::get_token_store_mode,
commands::send_feedback,
commands::get_feedback_user_agent,
commands::ensure_backup_dir,
commands::get_file_size,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");

View file

@ -0,0 +1,318 @@
import { invoke } from "@tauri-apps/api/core";
import { getVersion } from "@tauri-apps/api/app";
import type { Profile } from "./profileService";
import {
getExportCategories,
getExportSuppliers,
getExportKeywords,
getExportTransactions,
serializeToJson,
parseImportedJson,
type ExportEnvelope,
} from "./dataExportService";
// -----------------------------------------------------------------------------
// Pre-migration backup service
//
// Wrapper around dataExportService + write_export_file / read_import_file that
// produces and *verifies* a full SREF backup before the v2 -> v1 categories
// migration. Any failure (write, re-read, checksum mismatch) throws with a
// structured error code so the UI can map it to an i18n key and abort the
// migration cleanly.
//
// The service intentionally never writes anything to the DB and never mutates
// the profile; it only reads data via the existing export helpers and emits a
// file. The caller (migration page) is responsible for aborting on throw.
// -----------------------------------------------------------------------------
/** Result of a successful pre-migration backup, returned to the caller. */
export interface BackupResult {
/** Absolute path to the written SREF file on disk. */
path: string;
/** Size of the file on disk in bytes (ciphertext size when encrypted). */
size: number;
/** Hex-encoded SHA-256 of the decrypted JSON payload we wrote. */
checksum: string;
/** ISO-8601 timestamp at which verification succeeded. */
verifiedAt: string;
/** True when the backup is encrypted (profile had a PIN). */
encrypted: boolean;
}
/**
* Minimal profile shape consumed by this service. We accept the full `Profile`
* plus an optional clear-text `password` because the PIN hash alone cannot be
* used to derive the SREF encryption key the caller (migration page) is
* expected to prompt the user for their PIN right before backup and forward
* the verified plaintext here.
*/
export interface BackupProfileInput {
profile: Profile;
/**
* Clear-text PIN for encrypting the SREF file. Required when
* `profile.pin_hash` is set; ignored otherwise. Never logged.
*/
password?: string;
}
/** Stable error codes — the UI layer maps these to i18n keys. */
export type BackupErrorCode =
| "missing_password"
| "documents_dir_unavailable"
| "permission_denied"
| "disk_space"
| "create_dir_failed"
| "write_failed"
| "read_back_failed"
| "verification_mismatch";
export class BackupError extends Error {
public readonly code: BackupErrorCode;
public readonly detail: string;
constructor(code: BackupErrorCode, detail: string) {
super(`backup_failed:${code}:${detail}`);
this.name = "BackupError";
this.code = code;
this.detail = detail;
}
}
// -----------------------------------------------------------------------------
// Filename helpers
// -----------------------------------------------------------------------------
/**
* Replace filesystem-hostile characters in a profile name so the backup file
* can be written on Windows, macOS and Linux without surprises. Empty or
* all-stripped names fall back to "profile".
*/
export function sanitizeProfileName(name: string): string {
// Normalize whitespace, strip path separators and characters Windows forbids.
// We keep unicode letters/digits, hyphens, dots and underscores.
const stripped = name
.trim()
.replace(/[\\/:*?"<>|\x00-\x1f]/g, "")
.replace(/\s+/g, "-")
.replace(/\.+$/g, "")
.slice(0, 80);
return stripped.length > 0 ? stripped : "profile";
}
/**
* Produce a filesystem-safe ISO-8601 timestamp: `2026-04-19T14-22-05Z`.
* Colons are replaced with dashes (Windows forbids `:` in filenames); the
* fractional seconds and final `Z` are preserved to keep it sortable.
*/
export function filesystemSafeIsoTimestamp(date: Date = new Date()): string {
return date.toISOString().replace(/:/g, "-").replace(/\.\d+/, "");
}
/**
* `<SanitizedProfileName>_avant-migration-<ISO8601>.sref`
*/
export function buildBackupFilename(profileName: string, date: Date = new Date()): string {
return `${sanitizeProfileName(profileName)}_avant-migration-${filesystemSafeIsoTimestamp(date)}.sref`;
}
// -----------------------------------------------------------------------------
// Path joining (platform-agnostic; we only need to append a filename)
// -----------------------------------------------------------------------------
function joinPath(dir: string, filename: string): string {
// Strip any trailing separator (either style), then reattach with the one
// the directory itself appears to be using. Fallback to `/`.
const trimmed = dir.replace(/[\\/]+$/, "");
const sep = trimmed.includes("\\") && !trimmed.includes("/") ? "\\" : "/";
return `${trimmed}${sep}${filename}`;
}
// -----------------------------------------------------------------------------
// SHA-256 via Web Crypto
// -----------------------------------------------------------------------------
/**
* Compute the hex-encoded SHA-256 digest of a UTF-8 string. Uses the Web
* Crypto API exposed by the Tauri WebView (available on Windows, Linux and
* macOS in Tauri v2).
*/
export async function sha256Hex(content: string): Promise<string> {
const bytes = new TextEncoder().encode(content);
const digest = await crypto.subtle.digest("SHA-256", bytes);
const view = new Uint8Array(digest);
let hex = "";
for (let i = 0; i < view.length; i++) {
hex += view[i].toString(16).padStart(2, "0");
}
return hex;
}
// -----------------------------------------------------------------------------
// Tauri command wrappers
// -----------------------------------------------------------------------------
async function ensureBackupDir(): Promise<string> {
try {
return await invoke<string>("ensure_backup_dir");
} catch (e) {
const message = e instanceof Error ? e.message : String(e);
if (message.startsWith("permission_denied")) {
throw new BackupError("permission_denied", message);
}
if (message.startsWith("create_dir_failed")) {
throw new BackupError("create_dir_failed", message);
}
throw new BackupError("documents_dir_unavailable", message);
}
}
async function writeBackupFile(
filePath: string,
content: string,
password: string | null,
): Promise<void> {
try {
await invoke("write_export_file", {
filePath,
content,
password,
});
} catch (e) {
const message = e instanceof Error ? e.message : String(e);
// Tauri's fs error strings surface the underlying io::Error; map the
// common ones to stable codes. We deliberately do not echo `password`.
const lower = message.toLowerCase();
if (lower.includes("no space") || lower.includes("disk full")) {
throw new BackupError("disk_space", message);
}
if (lower.includes("permission denied")) {
throw new BackupError("permission_denied", message);
}
throw new BackupError("write_failed", message);
}
}
async function readBackupFile(filePath: string, password: string | null): Promise<string> {
try {
return await invoke<string>("read_import_file", {
filePath,
password,
});
} catch (e) {
const message = e instanceof Error ? e.message : String(e);
throw new BackupError("read_back_failed", message);
}
}
async function getFileSize(filePath: string): Promise<number> {
try {
return await invoke<number>("get_file_size", { filePath });
} catch (e) {
const message = e instanceof Error ? e.message : String(e);
throw new BackupError("read_back_failed", message);
}
}
// -----------------------------------------------------------------------------
// Public entry point
// -----------------------------------------------------------------------------
/**
* Create and verify a full SREF backup of the currently connected profile,
* to be used as a safety net before the v2 -> v1 categories migration.
*
* Flow:
* 1. Gather categories, suppliers, keywords, and transactions via the
* existing dataExportService helpers.
* 2. Serialize to JSON using the `transactions_with_categories` envelope.
* 3. Resolve `~/Documents/Simpl-Resultat/backups/` (create if missing).
* 4. Write the file via `write_export_file` (encrypted when a PIN password
* is supplied, plaintext otherwise).
* 5. Re-read the file via `read_import_file` and validate:
* - JSON parses and has a well-formed envelope;
* - SHA-256 of the re-read content matches the SHA-256 of what we
* just serialized.
* 6. Return a {@link BackupResult} or throw a {@link BackupError}.
*
* On any failure past step 3, the partially-written file may exist on disk
* we deliberately leave it alone. The migration page is responsible for
* aborting and surfacing the error to the user; the broken file does not
* affect the profile DB.
*/
export async function createPreMigrationBackup(
input: BackupProfileInput,
): Promise<BackupResult> {
const { profile, password } = input;
const isProtected = profile.pin_hash !== null && profile.pin_hash !== "";
const trimmedPassword = password?.trim() ?? "";
if (isProtected && trimmedPassword.length === 0) {
throw new BackupError(
"missing_password",
"Profile is PIN-protected — a password is required to encrypt the backup",
);
}
// 1. Gather data — same mode as "transactions_with_categories" export.
const appVersion = await getVersion();
const [categories, suppliers, keywords, transactions] = await Promise.all([
getExportCategories(),
getExportSuppliers(),
getExportKeywords(),
getExportTransactions(),
]);
const content = serializeToJson(
"transactions_with_categories",
{ categories, suppliers, keywords, transactions },
appVersion,
);
// 2. Compute the expected checksum before writing.
const expectedChecksum = await sha256Hex(content);
// 3. Resolve target directory and build the filename.
const dir = await ensureBackupDir();
const filename = buildBackupFilename(profile.name);
const filePath = joinPath(dir, filename);
// 4. Write — encrypted with the PIN when the profile is protected.
const passwordArg = isProtected ? trimmedPassword : null;
await writeBackupFile(filePath, content, passwordArg);
// 5. Read back and verify.
const roundTripped = await readBackupFile(filePath, passwordArg);
let parsedEnvelope: ExportEnvelope;
try {
parsedEnvelope = parseImportedJson(roundTripped).envelope;
} catch (e) {
const message = e instanceof Error ? e.message : String(e);
throw new BackupError("verification_mismatch", `envelope_parse: ${message}`);
}
if (parsedEnvelope.export_type !== "transactions_with_categories") {
throw new BackupError(
"verification_mismatch",
`envelope_type: ${parsedEnvelope.export_type}`,
);
}
const actualChecksum = await sha256Hex(roundTripped);
if (actualChecksum !== expectedChecksum) {
throw new BackupError(
"verification_mismatch",
`checksum_diff: expected=${expectedChecksum} actual=${actualChecksum}`,
);
}
const size = await getFileSize(filePath);
return {
path: filePath,
size,
checksum: actualChecksum,
verifiedAt: new Date().toISOString(),
encrypted: isProtected,
};
}