From 49e0bd2c94ef6e59510acd969b4e0de6ab728b25 Mon Sep 17 00:00:00 2001 From: Le-King-Fu Date: Sun, 8 Feb 2026 03:38:46 +0000 Subject: [PATCH] feat: implement CSV import wizard with folder-based source detection Full import pipeline: Rust backend (6 Tauri commands for folder scanning, file reading, encoding detection, hashing, folder picker), TypeScript services (DB, import sources, transactions, auto-categorization, user preferences), utility parsers (French amounts, multi-format dates), 12 React components forming a 7-step wizard (source list, config, column mapping, preview, duplicate detection, import, report), and i18n support (FR/EN, ~60 keys each). Co-Authored-By: Claude Opus 4.6 --- src-tauri/Cargo.lock | 79 ++ src-tauri/Cargo.toml | 4 + src-tauri/capabilities/default.json | 3 +- src-tauri/src/commands/fs_commands.rs | 209 ++++++ src-tauri/src/commands/mod.rs | 3 + src-tauri/src/lib.rs | 10 + src/components/import/ColumnMappingEditor.tsx | 163 ++++ src/components/import/DuplicateCheckPanel.tsx | 149 ++++ src/components/import/FilePreviewTable.tsx | 102 +++ src/components/import/ImportConfirmation.tsx | 98 +++ src/components/import/ImportFolderConfig.tsx | 64 ++ src/components/import/ImportProgress.tsx | 55 ++ src/components/import/ImportReportPanel.tsx | 127 ++++ src/components/import/SourceCard.tsx | 55 ++ src/components/import/SourceConfigPanel.tsx | 230 ++++++ src/components/import/SourceList.tsx | 64 ++ src/components/import/WizardNavigation.tsx | 65 ++ src/hooks/useImportWizard.ts | 708 ++++++++++++++++++ src/i18n/locales/en.json | 98 ++- src/i18n/locales/fr.json | 98 ++- src/pages/ImportPage.tsx | 154 +++- src/services/categorizationService.ts | 76 ++ src/services/db.ts | 10 + src/services/importSourceService.ts | 98 +++ src/services/importedFileService.ts | 61 ++ src/services/transactionService.ts | 91 +++ src/services/userPreferenceService.ts | 32 + src/shared/types/index.ts | 83 ++ src/utils/amountParser.ts | 26 + src/utils/dateParser.ts | 47 ++ 30 files changed, 3054 insertions(+), 8 deletions(-) create mode 100644 src-tauri/src/commands/fs_commands.rs create mode 100644 src-tauri/src/commands/mod.rs create mode 100644 src/components/import/ColumnMappingEditor.tsx create mode 100644 src/components/import/DuplicateCheckPanel.tsx create mode 100644 src/components/import/FilePreviewTable.tsx create mode 100644 src/components/import/ImportConfirmation.tsx create mode 100644 src/components/import/ImportFolderConfig.tsx create mode 100644 src/components/import/ImportProgress.tsx create mode 100644 src/components/import/ImportReportPanel.tsx create mode 100644 src/components/import/SourceCard.tsx create mode 100644 src/components/import/SourceConfigPanel.tsx create mode 100644 src/components/import/SourceList.tsx create mode 100644 src/components/import/WizardNavigation.tsx create mode 100644 src/hooks/useImportWizard.ts create mode 100644 src/services/categorizationService.ts create mode 100644 src/services/db.ts create mode 100644 src/services/importSourceService.ts create mode 100644 src/services/importedFileService.ts create mode 100644 src/services/transactionService.ts create mode 100644 src/services/userPreferenceService.ts create mode 100644 src/utils/amountParser.ts create mode 100644 src/utils/dateParser.ts diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 5079cd0..59a6be6 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -762,6 +762,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ "bitflags 2.10.0", + "block2", + "libc", "objc2", ] @@ -870,6 +872,15 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7" +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + [[package]] name = "endi" version = "1.1.1" @@ -3152,6 +3163,30 @@ dependencies = [ "web-sys", ] +[[package]] +name = "rfd" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15ad77d9e70a92437d8f74c35d99b4e4691128df018833e99f90bcd36152672" +dependencies = [ + "block2", + "dispatch2", + "glib-sys", + "gobject-sys", + "gtk-sys", + "js-sys", + "log", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", + "raw-window-handle", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-sys 0.60.2", +] + [[package]] name = "rsa" version = "0.9.10" @@ -3528,12 +3563,16 @@ checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" name = "simpl-result" version = "0.1.0" dependencies = [ + "encoding_rs", "serde", "serde_json", + "sha2", "tauri", "tauri-build", + "tauri-plugin-dialog", "tauri-plugin-opener", "tauri-plugin-sql", + "walkdir", ] [[package]] @@ -4144,6 +4183,46 @@ dependencies = [ "walkdir", ] +[[package]] +name = "tauri-plugin-dialog" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9204b425d9be8d12aa60c2a83a289cf7d1caae40f57f336ed1155b3a5c0e359b" +dependencies = [ + "log", + "raw-window-handle", + "rfd", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-plugin-fs", + "thiserror 2.0.18", + "url", +] + +[[package]] +name = "tauri-plugin-fs" +version = "2.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed390cc669f937afeb8b28032ce837bac8ea023d975a2e207375ec05afaf1804" +dependencies = [ + "anyhow", + "dunce", + "glob", + "percent-encoding", + "schemars 0.8.22", + "serde", + "serde_json", + "serde_repr", + "tauri", + "tauri-plugin", + "tauri-utils", + "thiserror 2.0.18", + "toml 0.9.11+spec-1.1.0", + "url", +] + [[package]] name = "tauri-plugin-opener" version = "2.5.3" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 299c164..ab65fad 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -21,6 +21,10 @@ tauri-build = { version = "2", features = [] } tauri = { version = "2", features = [] } tauri-plugin-opener = "2" tauri-plugin-sql = { version = "2", features = ["sqlite"] } +tauri-plugin-dialog = "2" serde = { version = "1", features = ["derive"] } serde_json = "1" +sha2 = "0.10" +encoding_rs = "0.8" +walkdir = "2" diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index 139854f..8aa143c 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -8,6 +8,7 @@ "opener:default", "sql:default", "sql:allow-execute", - "sql:allow-select" + "sql:allow-select", + "dialog:default" ] } diff --git a/src-tauri/src/commands/fs_commands.rs b/src-tauri/src/commands/fs_commands.rs new file mode 100644 index 0000000..8d7e989 --- /dev/null +++ b/src-tauri/src/commands/fs_commands.rs @@ -0,0 +1,209 @@ +use encoding_rs::{UTF_8, WINDOWS_1252, ISO_8859_15}; +use serde::Serialize; +use sha2::{Digest, Sha256}; +use std::fs; +use std::path::Path; +use tauri_plugin_dialog::DialogExt; +use walkdir::WalkDir; + +#[derive(Debug, Serialize, Clone)] +pub struct ScannedFile { + pub filename: String, + pub file_path: String, + pub size_bytes: u64, + pub modified_at: String, +} + +#[derive(Debug, Serialize, Clone)] +pub struct ScannedSource { + pub folder_name: String, + pub folder_path: String, + pub files: Vec, +} + +#[tauri::command] +pub fn scan_import_folder(folder_path: String) -> Result, String> { + let root = Path::new(&folder_path); + if !root.is_dir() { + return Err(format!("Folder does not exist: {}", folder_path)); + } + + let mut sources: Vec = Vec::new(); + + let entries = fs::read_dir(root).map_err(|e| format!("Cannot read folder: {}", e))?; + + for entry in entries { + let entry = entry.map_err(|e| format!("Error reading entry: {}", e))?; + let path = entry.path(); + + if !path.is_dir() { + continue; + } + + let folder_name = path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + + // Skip hidden folders + if folder_name.starts_with('.') { + continue; + } + + let mut files: Vec = Vec::new(); + + for file_entry in WalkDir::new(&path).max_depth(1).into_iter().flatten() { + let file_path = file_entry.path(); + if !file_path.is_file() { + continue; + } + + let ext = file_path + .extension() + .unwrap_or_default() + .to_string_lossy() + .to_lowercase(); + + if ext != "csv" && ext != "txt" { + continue; + } + + let metadata = fs::metadata(file_path) + .map_err(|e| format!("Cannot read metadata: {}", e))?; + + let modified_at = metadata + .modified() + .map(|t| { + let duration = t + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default(); + duration.as_secs().to_string() + }) + .unwrap_or_default(); + + files.push(ScannedFile { + filename: file_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(), + file_path: file_path.to_string_lossy().to_string(), + size_bytes: metadata.len(), + modified_at, + }); + } + + files.sort_by(|a, b| a.filename.cmp(&b.filename)); + + sources.push(ScannedSource { + folder_name, + folder_path: path.to_string_lossy().to_string(), + files, + }); + } + + sources.sort_by(|a, b| a.folder_name.cmp(&b.folder_name)); + + Ok(sources) +} + +#[tauri::command] +pub fn read_file_content(file_path: String, encoding: String) -> Result { + let bytes = fs::read(&file_path).map_err(|e| format!("Cannot read file: {}", e))?; + + let content = decode_bytes(&bytes, &encoding)?; + Ok(content) +} + +#[tauri::command] +pub fn hash_file(file_path: String) -> Result { + let bytes = fs::read(&file_path).map_err(|e| format!("Cannot read file: {}", e))?; + let mut hasher = Sha256::new(); + hasher.update(&bytes); + let result = hasher.finalize(); + Ok(format!("{:x}", result)) +} + +#[tauri::command] +pub fn detect_encoding(file_path: String) -> Result { + let bytes = fs::read(&file_path).map_err(|e| format!("Cannot read file: {}", e))?; + + // Check BOM + if bytes.starts_with(&[0xEF, 0xBB, 0xBF]) { + return Ok("utf-8".to_string()); + } + if bytes.starts_with(&[0xFF, 0xFE]) { + return Ok("utf-16le".to_string()); + } + if bytes.starts_with(&[0xFE, 0xFF]) { + return Ok("utf-16be".to_string()); + } + + // Try UTF-8 first + if std::str::from_utf8(&bytes).is_ok() { + return Ok("utf-8".to_string()); + } + + // Default to windows-1252 for French bank CSVs + Ok("windows-1252".to_string()) +} + +#[tauri::command] +pub fn get_file_preview( + file_path: String, + encoding: String, + max_lines: usize, +) -> Result { + let bytes = fs::read(&file_path).map_err(|e| format!("Cannot read file: {}", e))?; + let content = decode_bytes(&bytes, &encoding)?; + + let lines: Vec<&str> = content.lines().take(max_lines).collect(); + Ok(lines.join("\n")) +} + +#[tauri::command] +pub async fn pick_folder(app: tauri::AppHandle) -> Result, String> { + let folder = app + .dialog() + .file() + .blocking_pick_folder(); + + Ok(folder.map(|f| f.to_string())) +} + +fn decode_bytes(bytes: &[u8], encoding: &str) -> Result { + // Strip BOM if present + let bytes = if bytes.starts_with(&[0xEF, 0xBB, 0xBF]) { + &bytes[3..] + } else { + bytes + }; + + match encoding.to_lowercase().as_str() { + "utf-8" | "utf8" => { + String::from_utf8(bytes.to_vec()).map_err(|e| format!("UTF-8 decode error: {}", e)) + } + "windows-1252" | "cp1252" => { + let (cow, _, had_errors) = WINDOWS_1252.decode(bytes); + if had_errors { + Err("Windows-1252 decode error".to_string()) + } else { + Ok(cow.into_owned()) + } + } + "iso-8859-1" | "iso-8859-15" | "latin1" | "latin9" => { + let (cow, _, had_errors) = ISO_8859_15.decode(bytes); + if had_errors { + Err("ISO-8859-15 decode error".to_string()) + } else { + Ok(cow.into_owned()) + } + } + _ => { + // Fallback to UTF-8 + let (cow, _, _) = UTF_8.decode(bytes); + Ok(cow.into_owned()) + } + } +} diff --git a/src-tauri/src/commands/mod.rs b/src-tauri/src/commands/mod.rs new file mode 100644 index 0000000..7d7886c --- /dev/null +++ b/src-tauri/src/commands/mod.rs @@ -0,0 +1,3 @@ +pub mod fs_commands; + +pub use fs_commands::*; diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 251f5f6..a65b9d71 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -1,3 +1,4 @@ +mod commands; mod database; use tauri_plugin_sql::{Migration, MigrationKind}; @@ -13,11 +14,20 @@ pub fn run() { tauri::Builder::default() .plugin(tauri_plugin_opener::init()) + .plugin(tauri_plugin_dialog::init()) .plugin( tauri_plugin_sql::Builder::default() .add_migrations("sqlite:simpl_resultat.db", migrations) .build(), ) + .invoke_handler(tauri::generate_handler![ + commands::scan_import_folder, + commands::read_file_content, + commands::hash_file, + commands::detect_encoding, + commands::get_file_preview, + commands::pick_folder, + ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); } diff --git a/src/components/import/ColumnMappingEditor.tsx b/src/components/import/ColumnMappingEditor.tsx new file mode 100644 index 0000000..6bba264 --- /dev/null +++ b/src/components/import/ColumnMappingEditor.tsx @@ -0,0 +1,163 @@ +import { useTranslation } from "react-i18next"; +import type { ColumnMapping, AmountMode } from "../../shared/types"; + +interface ColumnMappingEditorProps { + headers: string[]; + mapping: ColumnMapping; + amountMode: AmountMode; + onMappingChange: (mapping: ColumnMapping) => void; + onAmountModeChange: (mode: AmountMode) => void; +} + +export default function ColumnMappingEditor({ + headers, + mapping, + amountMode, + onMappingChange, + onAmountModeChange, +}: ColumnMappingEditorProps) { + const { t } = useTranslation(); + + const columnOptions = headers.map((h, i) => ( + + )); + + const selectClass = + "w-full px-3 py-2 text-sm rounded-lg border border-[var(--border)] bg-[var(--card)] text-[var(--foreground)] focus:outline-none focus:ring-2 focus:ring-[var(--primary)]"; + + return ( +
+

+ {t("import.config.columnMapping")} +

+ +
+
+ + +
+ +
+ + +
+
+ +
+ +
+ + +
+
+ + {amountMode === "single" ? ( +
+ + +
+ ) : ( +
+
+ + +
+
+ + +
+
+ )} +
+ ); +} diff --git a/src/components/import/DuplicateCheckPanel.tsx b/src/components/import/DuplicateCheckPanel.tsx new file mode 100644 index 0000000..40114c3 --- /dev/null +++ b/src/components/import/DuplicateCheckPanel.tsx @@ -0,0 +1,149 @@ +import { useTranslation } from "react-i18next"; +import { AlertTriangle, CheckCircle, FileWarning } from "lucide-react"; +import type { DuplicateCheckResult } from "../../shared/types"; + +interface DuplicateCheckPanelProps { + result: DuplicateCheckResult; + onSkipDuplicates: () => void; + onIncludeAll: () => void; + skipDuplicates: boolean; +} + +export default function DuplicateCheckPanel({ + result, + onSkipDuplicates, + onIncludeAll, + skipDuplicates, +}: DuplicateCheckPanelProps) { + const { t } = useTranslation(); + + return ( +
+

+ {t("import.duplicates.title")} +

+ + {/* File-level duplicate */} + {result.fileAlreadyImported && ( +
+ +
+

+ {t("import.duplicates.fileAlreadyImported")} +

+

+ {t("import.duplicates.fileAlreadyImportedDesc")} +

+
+
+ )} + + {/* Row-level duplicates */} + {result.duplicateRows.length > 0 ? ( +
+
+ +
+

+ {t("import.duplicates.rowsFound", { + count: result.duplicateRows.length, + })} +

+

+ {t("import.duplicates.rowsFoundDesc")} +

+
+
+ + {/* Duplicate action */} +
+ + +
+ + {/* Duplicate table */} +
+ + + + + + + + + + + {result.duplicateRows.map((row) => ( + + + + + + + ))} + +
+ # + + {t("import.preview.date")} + + {t("import.preview.description")} + + {t("import.preview.amount")} +
+ {row.rowIndex + 1} + {row.date} + {row.description} + + {row.amount.toFixed(2)} +
+
+
+ ) : ( + !result.fileAlreadyImported && ( +
+ +

+ {t("import.duplicates.noneFound")} +

+
+ ) + )} + + {/* Summary */} +
+

+ {t("import.duplicates.summary", { + total: result.newRows.length + result.duplicateRows.length, + new: result.newRows.length, + duplicates: result.duplicateRows.length, + })} +

+
+
+ ); +} diff --git a/src/components/import/FilePreviewTable.tsx b/src/components/import/FilePreviewTable.tsx new file mode 100644 index 0000000..ed11dc1 --- /dev/null +++ b/src/components/import/FilePreviewTable.tsx @@ -0,0 +1,102 @@ +import { useTranslation } from "react-i18next"; +import { AlertCircle } from "lucide-react"; +import type { ParsedRow } from "../../shared/types"; + +interface FilePreviewTableProps { + rows: ParsedRow[]; +} + +export default function FilePreviewTable({ + rows, +}: FilePreviewTableProps) { + const { t } = useTranslation(); + + if (rows.length === 0) { + return ( +
+ {t("import.preview.noData")} +
+ ); + } + + const errorCount = rows.filter((r) => r.error).length; + + return ( +
+
+

+ {t("import.preview.title")} +

+
+ + {t("import.preview.rowCount", { count: rows.length })} + + {errorCount > 0 && ( + + + {t("import.preview.errorCount", { count: errorCount })} + + )} +
+
+ +
+ + + + + + + + + + + + {rows.map((row) => ( + + + + + + + + ))} + +
+ # + + {t("import.preview.date")} + + {t("import.preview.description")} + + {t("import.preview.amount")} + + {t("import.preview.raw")} +
+ {row.rowIndex + 1} + + {row.parsed?.date || ( + + {row.error || "—"} + + )} + + {row.parsed?.description || "—"} + + {row.parsed?.amount != null + ? row.parsed.amount.toFixed(2) + : "—"} + + {row.raw.join(" | ")} +
+
+
+ ); +} diff --git a/src/components/import/ImportConfirmation.tsx b/src/components/import/ImportConfirmation.tsx new file mode 100644 index 0000000..c3c32b8 --- /dev/null +++ b/src/components/import/ImportConfirmation.tsx @@ -0,0 +1,98 @@ +import { useTranslation } from "react-i18next"; +import { FileText, Settings, CheckCircle } from "lucide-react"; +import type { SourceConfig, ScannedFile, DuplicateCheckResult } from "../../shared/types"; + +interface ImportConfirmationProps { + sourceName: string; + config: SourceConfig; + selectedFiles: ScannedFile[]; + duplicateResult: DuplicateCheckResult; + skipDuplicates: boolean; +} + +export default function ImportConfirmation({ + sourceName, + config, + selectedFiles, + duplicateResult, + skipDuplicates, +}: ImportConfirmationProps) { + const { t } = useTranslation(); + + const rowsToImport = skipDuplicates + ? duplicateResult.newRows.length + : duplicateResult.newRows.length + duplicateResult.duplicateRows.length; + + return ( +
+

+ {t("import.confirm.title")} +

+ +
+ {/* Source */} +
+ +
+

{t("import.confirm.source")}

+

+ {sourceName} +

+
+
+ + {/* Files */} +
+ +
+

{t("import.confirm.files")}

+

+ {selectedFiles.map((f) => f.filename).join(", ")} +

+
+
+ + {/* Config summary */} +
+

+ {t("import.confirm.settings")} +

+
+
+ {t("import.config.delimiter")}:{" "} + {config.delimiter === ";" ? ";" : config.delimiter === "," ? "," : config.delimiter} +
+
+ {t("import.config.encoding")}:{" "} + {config.encoding} +
+
+ {t("import.config.dateFormat")}:{" "} + {config.dateFormat} +
+
+ {t("import.config.skipLines")}:{" "} + {config.skipLines} +
+
+
+ + {/* Rows to import */} +
+ +
+

+ {t("import.confirm.rowsToImport")} +

+

+ {t("import.confirm.rowsSummary", { + count: rowsToImport, + skipped: skipDuplicates ? duplicateResult.duplicateRows.length : 0, + })} +

+
+
+
+
+ ); +} diff --git a/src/components/import/ImportFolderConfig.tsx b/src/components/import/ImportFolderConfig.tsx new file mode 100644 index 0000000..7b83ad9 --- /dev/null +++ b/src/components/import/ImportFolderConfig.tsx @@ -0,0 +1,64 @@ +import { useTranslation } from "react-i18next"; +import { FolderOpen, RefreshCw } from "lucide-react"; + +interface ImportFolderConfigProps { + folderPath: string | null; + onBrowse: () => void; + onRefresh: () => void; + isLoading: boolean; +} + +export default function ImportFolderConfig({ + folderPath, + onBrowse, + onRefresh, + isLoading, +}: ImportFolderConfigProps) { + const { t } = useTranslation(); + + return ( +
+
+
+ +
+

+ {t("import.folder.label")} +

+ {folderPath ? ( +

+ {folderPath} +

+ ) : ( +

+ {t("import.folder.notConfigured")} +

+ )} +
+
+
+ {folderPath && ( + + )} + +
+
+
+ ); +} diff --git a/src/components/import/ImportProgress.tsx b/src/components/import/ImportProgress.tsx new file mode 100644 index 0000000..3bd8843 --- /dev/null +++ b/src/components/import/ImportProgress.tsx @@ -0,0 +1,55 @@ +import { useTranslation } from "react-i18next"; +import { Loader2 } from "lucide-react"; + +interface ImportProgressProps { + currentFile: string; + progress: number; + total: number; +} + +export default function ImportProgress({ + currentFile, + progress, + total, +}: ImportProgressProps) { + const { t } = useTranslation(); + + const percentage = total > 0 ? Math.round((progress / total) * 100) : 0; + + return ( +
+

+ {t("import.progress.title")} +

+ +
+ +

+ {t("import.progress.importing")} +

+

+ {currentFile} +

+ + {/* Progress bar */} +
+
+ + {progress} / {total} {t("import.progress.rows")} + + {percentage}% +
+
+
+
+
+
+
+ ); +} diff --git a/src/components/import/ImportReportPanel.tsx b/src/components/import/ImportReportPanel.tsx new file mode 100644 index 0000000..e17096c --- /dev/null +++ b/src/components/import/ImportReportPanel.tsx @@ -0,0 +1,127 @@ +import { useTranslation } from "react-i18next"; +import { + CheckCircle, + XCircle, + AlertTriangle, + Tag, + FileText, +} from "lucide-react"; +import type { ImportReport } from "../../shared/types"; + +interface ImportReportPanelProps { + report: ImportReport; + onDone: () => void; +} + +export default function ImportReportPanel({ + report, + onDone, +}: ImportReportPanelProps) { + const { t } = useTranslation(); + + const stats = [ + { + icon: FileText, + label: t("import.report.totalRows"), + value: report.totalRows, + color: "text-[var(--foreground)]", + }, + { + icon: CheckCircle, + label: t("import.report.imported"), + value: report.importedCount, + color: "text-emerald-500", + }, + { + icon: AlertTriangle, + label: t("import.report.skippedDuplicates"), + value: report.skippedDuplicates, + color: "text-amber-500", + }, + { + icon: XCircle, + label: t("import.report.errors"), + value: report.errorCount, + color: "text-red-500", + }, + { + icon: Tag, + label: t("import.report.categorized"), + value: report.categorizedCount, + color: "text-[var(--primary)]", + }, + { + icon: Tag, + label: t("import.report.uncategorized"), + value: report.uncategorizedCount, + color: "text-[var(--muted-foreground)]", + }, + ]; + + return ( +
+

+ {t("import.report.title")} +

+ + {/* Stats grid */} +
+ {stats.map((stat) => ( +
+
+ + + {stat.label} + +
+

{stat.value}

+
+ ))} +
+ + {/* Errors list */} + {report.errors.length > 0 && ( +
+

+ {t("import.report.errorDetails")} +

+
+ + + + + + + + + {report.errors.map((err, i) => ( + + + + + ))} + +
+ {t("import.report.row")} + + {t("import.report.errorMessage")} +
{err.rowIndex + 1}{err.message}
+
+
+ )} + + {/* Done button */} +
+ +
+
+ ); +} diff --git a/src/components/import/SourceCard.tsx b/src/components/import/SourceCard.tsx new file mode 100644 index 0000000..cd28b8a --- /dev/null +++ b/src/components/import/SourceCard.tsx @@ -0,0 +1,55 @@ +import { useTranslation } from "react-i18next"; +import { FolderOpen, FileText, CheckCircle } from "lucide-react"; +import type { ScannedSource } from "../../shared/types"; + +interface SourceCardProps { + source: ScannedSource; + isConfigured: boolean; + newFileCount: number; + onClick: () => void; +} + +export default function SourceCard({ + source, + isConfigured, + newFileCount, + onClick, +}: SourceCardProps) { + const { t } = useTranslation(); + + return ( + + ); +} diff --git a/src/components/import/SourceConfigPanel.tsx b/src/components/import/SourceConfigPanel.tsx new file mode 100644 index 0000000..1c88131 --- /dev/null +++ b/src/components/import/SourceConfigPanel.tsx @@ -0,0 +1,230 @@ +import { useTranslation } from "react-i18next"; +import type { + ScannedSource, + ScannedFile, + SourceConfig, + AmountMode, + ColumnMapping, +} from "../../shared/types"; +import ColumnMappingEditor from "./ColumnMappingEditor"; + +interface SourceConfigPanelProps { + source: ScannedSource; + config: SourceConfig; + selectedFiles: ScannedFile[]; + headers: string[]; + onConfigChange: (config: SourceConfig) => void; + onFileToggle: (file: ScannedFile) => void; + onSelectAllFiles: () => void; +} + +export default function SourceConfigPanel({ + source, + config, + selectedFiles, + headers, + onConfigChange, + onFileToggle, + onSelectAllFiles, +}: SourceConfigPanelProps) { + const { t } = useTranslation(); + + const selectClass = + "w-full px-3 py-2 text-sm rounded-lg border border-[var(--border)] bg-[var(--card)] text-[var(--foreground)] focus:outline-none focus:ring-2 focus:ring-[var(--primary)]"; + const inputClass = selectClass; + + const updateConfig = (partial: Partial) => { + onConfigChange({ ...config, ...partial }); + }; + + return ( +
+

+ {t("import.config.title")} — {source.folder_name} +

+ + {/* Source name */} +
+ + updateConfig({ name: e.target.value })} + className={inputClass} + /> +
+ + {/* Basic settings */} +
+
+ + +
+ +
+ + +
+ +
+ + +
+
+ + {/* Skip lines & header */} +
+
+ + + updateConfig({ skipLines: parseInt(e.target.value) || 0 }) + } + className={inputClass} + /> +
+
+ +
+
+ + {/* Sign convention */} +
+ +
+ + +
+
+ + {/* Column mapping */} + {headers.length > 0 && ( + + onConfigChange({ ...config, columnMapping: mapping }) + } + onAmountModeChange={(mode: AmountMode) => + onConfigChange({ ...config, amountMode: mode }) + } + /> + )} + + {/* File selection */} +
+
+

+ {t("import.config.selectFiles")} +

+ +
+
+ {source.files.map((file) => { + const isSelected = selectedFiles.some( + (f) => f.file_path === file.file_path + ); + return ( + + ); + })} +
+
+
+ ); +} diff --git a/src/components/import/SourceList.tsx b/src/components/import/SourceList.tsx new file mode 100644 index 0000000..2c98d8c --- /dev/null +++ b/src/components/import/SourceList.tsx @@ -0,0 +1,64 @@ +import { useTranslation } from "react-i18next"; +import { Inbox } from "lucide-react"; +import type { ScannedSource } from "../../shared/types"; +import SourceCard from "./SourceCard"; + +interface SourceListProps { + sources: ScannedSource[]; + configuredSourceNames: Set; + importedFileHashes: Map>; + onSelectSource: (source: ScannedSource) => void; +} + +export default function SourceList({ + sources, + configuredSourceNames, + importedFileHashes, + onSelectSource, +}: SourceListProps) { + const { t } = useTranslation(); + + if (sources.length === 0) { + return ( +
+ +

+ {t("import.sources.empty")} +

+
+ ); + } + + return ( +
+

+ {t("import.sources.title")} +

+
+ {sources.map((source) => { + const isConfigured = configuredSourceNames.has(source.folder_name); + // Count files not yet imported for this source + const sourceHashes = importedFileHashes.get(source.folder_name); + const newFileCount = sourceHashes + ? source.files.filter( + (f) => !sourceHashes.has(f.filename) + ).length + : source.files.length; + + return ( + onSelectSource(source)} + /> + ); + })} +
+
+ ); +} diff --git a/src/components/import/WizardNavigation.tsx b/src/components/import/WizardNavigation.tsx new file mode 100644 index 0000000..f2a37a3 --- /dev/null +++ b/src/components/import/WizardNavigation.tsx @@ -0,0 +1,65 @@ +import { useTranslation } from "react-i18next"; +import { ChevronLeft, ChevronRight, X } from "lucide-react"; + +interface WizardNavigationProps { + onBack?: () => void; + onNext?: () => void; + onCancel?: () => void; + nextLabel?: string; + backLabel?: string; + nextDisabled?: boolean; + showBack?: boolean; + showNext?: boolean; + showCancel?: boolean; +} + +export default function WizardNavigation({ + onBack, + onNext, + onCancel, + nextLabel, + backLabel, + nextDisabled = false, + showBack = true, + showNext = true, + showCancel = true, +}: WizardNavigationProps) { + const { t } = useTranslation(); + + return ( +
+
+ {showCancel && onCancel && ( + + )} +
+
+ {showBack && onBack && ( + + )} + {showNext && onNext && ( + + )} +
+
+ ); +} diff --git a/src/hooks/useImportWizard.ts b/src/hooks/useImportWizard.ts new file mode 100644 index 0000000..702711b --- /dev/null +++ b/src/hooks/useImportWizard.ts @@ -0,0 +1,708 @@ +import { useReducer, useCallback, useEffect } from "react"; +import { invoke } from "@tauri-apps/api/core"; +import Papa from "papaparse"; +import type { + ImportWizardStep, + ScannedSource, + ScannedFile, + SourceConfig, + ParsedRow, + DuplicateCheckResult, + ImportReport, + ImportSource, + ColumnMapping, +} from "../shared/types"; +import { + getImportFolder, + setImportFolder, +} from "../services/userPreferenceService"; +import { + getAllSources, + getSourceByName, + createSource, + updateSource, +} from "../services/importSourceService"; +import { + existsByHash, + createImportedFile, + updateFileStatus, + getFilesBySourceId, +} from "../services/importedFileService"; +import { + insertBatch, + findDuplicates, +} from "../services/transactionService"; +import { categorizeBatch } from "../services/categorizationService"; +import { parseDate } from "../utils/dateParser"; +import { parseFrenchAmount } from "../utils/amountParser"; + +interface WizardState { + step: ImportWizardStep; + importFolder: string | null; + scannedSources: ScannedSource[]; + selectedSource: ScannedSource | null; + selectedFiles: ScannedFile[]; + sourceConfig: SourceConfig; + existingSource: ImportSource | null; + parsedPreview: ParsedRow[]; + previewHeaders: string[]; + duplicateResult: DuplicateCheckResult | null; + skipDuplicates: boolean; + importReport: ImportReport | null; + importProgress: { current: number; total: number; file: string }; + isLoading: boolean; + error: string | null; + configuredSourceNames: Set; + importedFilesBySource: Map>; +} + +type WizardAction = + | { type: "SET_LOADING"; payload: boolean } + | { type: "SET_ERROR"; payload: string | null } + | { type: "SET_STEP"; payload: ImportWizardStep } + | { type: "SET_IMPORT_FOLDER"; payload: string | null } + | { type: "SET_SCANNED_SOURCES"; payload: ScannedSource[] } + | { type: "SET_SELECTED_SOURCE"; payload: ScannedSource } + | { type: "SET_SELECTED_FILES"; payload: ScannedFile[] } + | { type: "SET_SOURCE_CONFIG"; payload: SourceConfig } + | { type: "SET_EXISTING_SOURCE"; payload: ImportSource | null } + | { type: "SET_PARSED_PREVIEW"; payload: { rows: ParsedRow[]; headers: string[] } } + | { type: "SET_DUPLICATE_RESULT"; payload: DuplicateCheckResult } + | { type: "SET_SKIP_DUPLICATES"; payload: boolean } + | { type: "SET_IMPORT_REPORT"; payload: ImportReport } + | { type: "SET_IMPORT_PROGRESS"; payload: { current: number; total: number; file: string } } + | { type: "SET_CONFIGURED_SOURCES"; payload: { names: Set; files: Map> } } + | { type: "RESET" }; + +const defaultConfig: SourceConfig = { + name: "", + delimiter: ";", + encoding: "utf-8", + dateFormat: "DD/MM/YYYY", + skipLines: 0, + columnMapping: { date: 0, description: 1, amount: 2 }, + amountMode: "single", + signConvention: "negative_expense", + hasHeader: true, +}; + +const initialState: WizardState = { + step: "source-list", + importFolder: null, + scannedSources: [], + selectedSource: null, + selectedFiles: [], + sourceConfig: { ...defaultConfig }, + existingSource: null, + parsedPreview: [], + previewHeaders: [], + duplicateResult: null, + skipDuplicates: true, + importReport: null, + importProgress: { current: 0, total: 0, file: "" }, + isLoading: false, + error: null, + configuredSourceNames: new Set(), + importedFilesBySource: new Map(), +}; + +function reducer(state: WizardState, action: WizardAction): WizardState { + switch (action.type) { + case "SET_LOADING": + return { ...state, isLoading: action.payload }; + case "SET_ERROR": + return { ...state, error: action.payload, isLoading: false }; + case "SET_STEP": + return { ...state, step: action.payload }; + case "SET_IMPORT_FOLDER": + return { ...state, importFolder: action.payload }; + case "SET_SCANNED_SOURCES": + return { ...state, scannedSources: action.payload, isLoading: false }; + case "SET_SELECTED_SOURCE": + return { ...state, selectedSource: action.payload }; + case "SET_SELECTED_FILES": + return { ...state, selectedFiles: action.payload }; + case "SET_SOURCE_CONFIG": + return { ...state, sourceConfig: action.payload }; + case "SET_EXISTING_SOURCE": + return { ...state, existingSource: action.payload }; + case "SET_PARSED_PREVIEW": + return { + ...state, + parsedPreview: action.payload.rows, + previewHeaders: action.payload.headers, + isLoading: false, + }; + case "SET_DUPLICATE_RESULT": + return { ...state, duplicateResult: action.payload, isLoading: false }; + case "SET_SKIP_DUPLICATES": + return { ...state, skipDuplicates: action.payload }; + case "SET_IMPORT_REPORT": + return { ...state, importReport: action.payload, isLoading: false }; + case "SET_IMPORT_PROGRESS": + return { ...state, importProgress: action.payload }; + case "SET_CONFIGURED_SOURCES": + return { + ...state, + configuredSourceNames: action.payload.names, + importedFilesBySource: action.payload.files, + }; + case "RESET": + return { + ...initialState, + importFolder: state.importFolder, + scannedSources: state.scannedSources, + configuredSourceNames: state.configuredSourceNames, + importedFilesBySource: state.importedFilesBySource, + }; + default: + return state; + } +} + +export function useImportWizard() { + const [state, dispatch] = useReducer(reducer, initialState); + + // Load import folder on mount + useEffect(() => { + (async () => { + try { + const folder = await getImportFolder(); + dispatch({ type: "SET_IMPORT_FOLDER", payload: folder }); + if (folder) { + await scanFolderInternal(folder); + } + } catch { + // No folder configured yet + } + })(); + }, []); // eslint-disable-line react-hooks/exhaustive-deps + + const loadConfiguredSources = useCallback(async () => { + const sources = await getAllSources(); + const names = new Set(sources.map((s) => s.name)); + const files = new Map>(); + + for (const source of sources) { + const imported = await getFilesBySourceId(source.id); + files.set( + source.name, + new Set(imported.map((f) => f.filename)) + ); + } + + dispatch({ type: "SET_CONFIGURED_SOURCES", payload: { names, files } }); + }, []); + + const scanFolderInternal = useCallback( + async (folder: string) => { + dispatch({ type: "SET_LOADING", payload: true }); + try { + const sources = await invoke("scan_import_folder", { + folderPath: folder, + }); + dispatch({ type: "SET_SCANNED_SOURCES", payload: sources }); + await loadConfiguredSources(); + } catch (e) { + dispatch({ + type: "SET_ERROR", + payload: e instanceof Error ? e.message : String(e), + }); + } + }, + [loadConfiguredSources] + ); + + const browseFolder = useCallback(async () => { + try { + const folder = await invoke("pick_folder"); + if (folder) { + await setImportFolder(folder); + dispatch({ type: "SET_IMPORT_FOLDER", payload: folder }); + await scanFolderInternal(folder); + } + } catch (e) { + dispatch({ + type: "SET_ERROR", + payload: e instanceof Error ? e.message : String(e), + }); + } + }, [scanFolderInternal]); + + const refreshFolder = useCallback(async () => { + if (state.importFolder) { + await scanFolderInternal(state.importFolder); + } + }, [state.importFolder, scanFolderInternal]); + + const selectSource = useCallback( + async (source: ScannedSource) => { + dispatch({ type: "SET_SELECTED_SOURCE", payload: source }); + dispatch({ type: "SET_SELECTED_FILES", payload: source.files }); + + // Check if this source already has config in DB + const existing = await getSourceByName(source.folder_name); + dispatch({ type: "SET_EXISTING_SOURCE", payload: existing }); + + if (existing) { + // Restore config from DB + const mapping = JSON.parse(existing.column_mapping) as ColumnMapping; + const config: SourceConfig = { + name: existing.name, + delimiter: existing.delimiter, + encoding: existing.encoding, + dateFormat: existing.date_format, + skipLines: existing.skip_lines, + columnMapping: mapping, + amountMode: + mapping.debitAmount !== undefined ? "debit_credit" : "single", + signConvention: "negative_expense", + hasHeader: true, + }; + dispatch({ type: "SET_SOURCE_CONFIG", payload: config }); + } else { + // Auto-detect encoding for first file + let encoding = "utf-8"; + if (source.files.length > 0) { + try { + encoding = await invoke("detect_encoding", { + filePath: source.files[0].file_path, + }); + } catch { + // fallback to utf-8 + } + } + + dispatch({ + type: "SET_SOURCE_CONFIG", + payload: { + ...defaultConfig, + name: source.folder_name, + encoding, + }, + }); + } + + // Load preview headers from first file + if (source.files.length > 0) { + await loadHeaders(source.files[0].file_path, existing); + } + + dispatch({ type: "SET_STEP", payload: "source-config" }); + }, + [] // eslint-disable-line react-hooks/exhaustive-deps + ); + + const loadHeaders = async ( + filePath: string, + existing: ImportSource | null + ) => { + try { + const encoding = existing?.encoding || "utf-8"; + const preview = await invoke("get_file_preview", { + filePath, + encoding, + maxLines: 5, + }); + const delimiter = existing?.delimiter || ";"; + const parsed = Papa.parse(preview, { delimiter }); + if (parsed.data.length > 0) { + dispatch({ + type: "SET_PARSED_PREVIEW", + payload: { + rows: [], + headers: (parsed.data[0] as string[]).map((h) => h.trim()), + }, + }); + } + } catch { + // ignore preview errors + } + }; + + const updateConfig = useCallback((config: SourceConfig) => { + dispatch({ type: "SET_SOURCE_CONFIG", payload: config }); + }, []); + + const toggleFile = useCallback( + (file: ScannedFile) => { + const exists = state.selectedFiles.some( + (f) => f.file_path === file.file_path + ); + if (exists) { + dispatch({ + type: "SET_SELECTED_FILES", + payload: state.selectedFiles.filter( + (f) => f.file_path !== file.file_path + ), + }); + } else { + dispatch({ + type: "SET_SELECTED_FILES", + payload: [...state.selectedFiles, file], + }); + } + }, + [state.selectedFiles] + ); + + const selectAllFiles = useCallback(() => { + if (state.selectedSource) { + dispatch({ + type: "SET_SELECTED_FILES", + payload: state.selectedSource.files, + }); + } + }, [state.selectedSource]); + + const parsePreview = useCallback(async () => { + if (state.selectedFiles.length === 0) return; + + dispatch({ type: "SET_LOADING", payload: true }); + dispatch({ type: "SET_ERROR", payload: null }); + + try { + const config = state.sourceConfig; + const allRows: ParsedRow[] = []; + let headers: string[] = []; + + for (const file of state.selectedFiles) { + const content = await invoke("read_file_content", { + filePath: file.file_path, + encoding: config.encoding, + }); + + const parsed = Papa.parse(content, { + delimiter: config.delimiter, + skipEmptyLines: true, + }); + + const data = parsed.data as string[][]; + const startIdx = config.skipLines + (config.hasHeader ? 1 : 0); + + if (config.hasHeader && data.length > config.skipLines) { + headers = data[config.skipLines].map((h) => h.trim()); + } + + for (let i = startIdx; i < data.length; i++) { + const raw = data[i]; + if (raw.length <= 1 && raw[0]?.trim() === "") continue; + + try { + const date = parseDate( + raw[config.columnMapping.date]?.trim() || "", + config.dateFormat + ); + const description = + raw[config.columnMapping.description]?.trim() || ""; + + let amount: number; + if (config.amountMode === "debit_credit") { + const debit = parseFrenchAmount( + raw[config.columnMapping.debitAmount ?? 0] || "" + ); + const credit = parseFrenchAmount( + raw[config.columnMapping.creditAmount ?? 0] || "" + ); + amount = isNaN(credit) ? -(isNaN(debit) ? 0 : debit) : credit; + } else { + amount = parseFrenchAmount( + raw[config.columnMapping.amount ?? 0] || "" + ); + if (config.signConvention === "positive_expense" && !isNaN(amount)) { + amount = -amount; + } + } + + if (!date) { + allRows.push({ + rowIndex: allRows.length, + raw, + parsed: null, + error: "Invalid date", + }); + } else if (isNaN(amount)) { + allRows.push({ + rowIndex: allRows.length, + raw, + parsed: null, + error: "Invalid amount", + }); + } else { + allRows.push({ + rowIndex: allRows.length, + raw, + parsed: { date, description, amount }, + }); + } + } catch { + allRows.push({ + rowIndex: allRows.length, + raw, + parsed: null, + error: "Parse error", + }); + } + } + } + + dispatch({ + type: "SET_PARSED_PREVIEW", + payload: { rows: allRows, headers }, + }); + dispatch({ type: "SET_STEP", payload: "file-preview" }); + } catch (e) { + dispatch({ + type: "SET_ERROR", + payload: e instanceof Error ? e.message : String(e), + }); + } + }, [state.selectedFiles, state.sourceConfig]); + + const checkDuplicates = useCallback(async () => { + dispatch({ type: "SET_LOADING", payload: true }); + dispatch({ type: "SET_ERROR", payload: null }); + + try { + // Save/update source config in DB + const config = state.sourceConfig; + const mappingJson = JSON.stringify(config.columnMapping); + + let sourceId: number; + if (state.existingSource) { + sourceId = state.existingSource.id; + await updateSource(sourceId, { + name: config.name, + delimiter: config.delimiter, + encoding: config.encoding, + date_format: config.dateFormat, + column_mapping: mappingJson, + skip_lines: config.skipLines, + }); + } else { + sourceId = await createSource({ + name: config.name, + delimiter: config.delimiter, + encoding: config.encoding, + date_format: config.dateFormat, + column_mapping: mappingJson, + skip_lines: config.skipLines, + }); + } + + // Check file-level duplicates + let fileAlreadyImported = false; + let existingFileId: number | undefined; + + if (state.selectedFiles.length > 0) { + const hash = await invoke("hash_file", { + filePath: state.selectedFiles[0].file_path, + }); + const existing = await existsByHash(sourceId, hash); + if (existing) { + fileAlreadyImported = true; + existingFileId = existing.id; + } + } + + // Check row-level duplicates + const validRows = state.parsedPreview.filter((r) => r.parsed); + const duplicateMatches = await findDuplicates( + validRows.map((r) => ({ + date: r.parsed!.date, + description: r.parsed!.description, + amount: r.parsed!.amount, + })) + ); + + const duplicateIndices = new Set(duplicateMatches.map((d) => d.rowIndex)); + const newRows = validRows.filter( + (_, i) => !duplicateIndices.has(i) + ); + const duplicateRows = duplicateMatches.map((d) => ({ + rowIndex: d.rowIndex, + date: d.date, + description: d.description, + amount: d.amount, + existingTransactionId: d.existingTransactionId, + })); + + dispatch({ + type: "SET_DUPLICATE_RESULT", + payload: { + fileAlreadyImported, + existingFileId, + duplicateRows, + newRows, + }, + }); + dispatch({ type: "SET_STEP", payload: "duplicate-check" }); + } catch (e) { + dispatch({ + type: "SET_ERROR", + payload: e instanceof Error ? e.message : String(e), + }); + } + }, [state.sourceConfig, state.existingSource, state.selectedFiles, state.parsedPreview]); + + const executeImport = useCallback(async () => { + if (!state.duplicateResult) return; + + dispatch({ type: "SET_STEP", payload: "importing" }); + dispatch({ type: "SET_ERROR", payload: null }); + + try { + const config = state.sourceConfig; + + // Get or create source ID + const dbSource = await getSourceByName(config.name); + if (!dbSource) throw new Error("Source not found in database"); + const sourceId = dbSource.id; + + // Determine rows to import + const rowsToImport = state.skipDuplicates + ? state.duplicateResult.newRows + : [ + ...state.duplicateResult.newRows, + ...state.parsedPreview.filter( + (r) => + r.parsed && + state.duplicateResult!.duplicateRows.some( + (d) => d.rowIndex === r.rowIndex + ) + ), + ]; + + const validRows = rowsToImport.filter((r) => r.parsed); + const totalRows = validRows.length; + + dispatch({ + type: "SET_IMPORT_PROGRESS", + payload: { current: 0, total: totalRows, file: state.selectedFiles[0]?.filename || "" }, + }); + + // Create imported file record + let fileHash = ""; + if (state.selectedFiles.length > 0) { + fileHash = await invoke("hash_file", { + filePath: state.selectedFiles[0].file_path, + }); + } + + const fileId = await createImportedFile({ + source_id: sourceId, + filename: state.selectedFiles.map((f) => f.filename).join(", "), + file_hash: fileHash, + row_count: totalRows, + status: "completed", + }); + + // Auto-categorize + const descriptions = validRows.map((r) => r.parsed!.description); + const categorizations = await categorizeBatch(descriptions); + + let categorizedCount = 0; + let uncategorizedCount = 0; + const errors: Array<{ rowIndex: number; message: string }> = []; + + // Build transaction records + const transactions = validRows.map((row, i) => { + const cat = categorizations[i]; + if (cat.category_id) { + categorizedCount++; + } else { + uncategorizedCount++; + } + return { + date: row.parsed!.date, + description: row.parsed!.description, + amount: row.parsed!.amount, + source_id: sourceId, + file_id: fileId, + original_description: row.raw.join(config.delimiter), + category_id: cat.category_id, + supplier_id: cat.supplier_id, + }; + }); + + // Insert in batches + let importedCount = 0; + try { + importedCount = await insertBatch(transactions); + + dispatch({ + type: "SET_IMPORT_PROGRESS", + payload: { current: importedCount, total: totalRows, file: "done" }, + }); + } catch (e) { + await updateFileStatus(fileId, "error", 0, String(e)); + errors.push({ + rowIndex: 0, + message: e instanceof Error ? e.message : String(e), + }); + } + + // Count errors from parsing + const parseErrors = state.parsedPreview.filter((r) => r.error); + for (const err of parseErrors) { + errors.push({ rowIndex: err.rowIndex, message: err.error || "Parse error" }); + } + + const report: ImportReport = { + totalRows: state.parsedPreview.length, + importedCount, + skippedDuplicates: state.skipDuplicates + ? state.duplicateResult.duplicateRows.length + : 0, + errorCount: errors.length, + categorizedCount, + uncategorizedCount, + errors, + }; + + dispatch({ type: "SET_IMPORT_REPORT", payload: report }); + dispatch({ type: "SET_STEP", payload: "report" }); + + // Refresh configured sources + await loadConfiguredSources(); + } catch (e) { + dispatch({ + type: "SET_ERROR", + payload: e instanceof Error ? e.message : String(e), + }); + dispatch({ type: "SET_STEP", payload: "confirm" }); + } + }, [ + state.duplicateResult, + state.sourceConfig, + state.skipDuplicates, + state.parsedPreview, + state.selectedFiles, + loadConfiguredSources, + ]); + + const goToStep = useCallback((step: ImportWizardStep) => { + dispatch({ type: "SET_STEP", payload: step }); + }, []); + + const reset = useCallback(() => { + dispatch({ type: "RESET" }); + }, []); + + return { + state, + browseFolder, + refreshFolder, + selectSource, + updateConfig, + toggleFile, + selectAllFiles, + parsePreview, + checkDuplicates, + executeImport, + goToStep, + reset, + setSkipDuplicates: (v: boolean) => + dispatch({ type: "SET_SKIP_DUPLICATES", payload: v }), + }; +} diff --git a/src/i18n/locales/en.json b/src/i18n/locales/en.json index c54c0be..57cc701 100644 --- a/src/i18n/locales/en.json +++ b/src/i18n/locales/en.json @@ -24,7 +24,103 @@ "source": "Source", "file": "File", "status": "Status", - "date": "Date" + "date": "Date", + "folder": { + "label": "Import folder", + "notConfigured": "No folder configured", + "browse": "Browse", + "refresh": "Refresh" + }, + "sources": { + "title": "Import Sources", + "empty": "No sources found. Create subfolders in your import folder with CSV files.", + "new": "new", + "fileCount_one": "{{count}} file", + "fileCount_other": "{{count}} files", + "fileCount": "{{count}} file(s)" + }, + "config": { + "title": "Source Configuration", + "sourceName": "Source name", + "delimiter": "Delimiter", + "semicolon": "Semicolon", + "comma": "Comma", + "tab": "Tab", + "encoding": "Encoding", + "dateFormat": "Date format", + "skipLines": "Lines to skip", + "hasHeader": "First row contains headers", + "signConvention": "Sign convention", + "negativeExpense": "Negative expenses", + "positiveExpense": "Positive expenses", + "columnMapping": "Column mapping", + "dateColumn": "Date column", + "descriptionColumn": "Description column", + "amountColumn": "Amount column", + "amountMode": "Amount mode", + "singleAmount": "Single amount", + "debitCredit": "Separate debit / credit", + "debitColumn": "Debit column", + "creditColumn": "Credit column", + "selectFiles": "Files to import", + "selectAll": "Select all" + }, + "preview": { + "title": "Data Preview", + "noData": "No data to display", + "rowCount": "{{count}} row(s)", + "errorCount": "{{count}} error(s)", + "date": "Date", + "description": "Description", + "amount": "Amount", + "raw": "Raw data", + "moreRows": "... and {{count}} more row(s)" + }, + "duplicates": { + "title": "Duplicate Detection", + "fileAlreadyImported": "This file has already been imported", + "fileAlreadyImportedDesc": "A file with the same content already exists in the database.", + "rowsFound": "{{count}} duplicate(s) found", + "rowsFoundDesc": "These rows match existing transactions.", + "noneFound": "No duplicates found", + "skip": "Skip duplicates", + "includeAll": "Import all", + "summary": "Total: {{total}} rows — {{new}} new — {{duplicates}} duplicate(s)" + }, + "confirm": { + "title": "Import Confirmation", + "source": "Source", + "files": "Files", + "settings": "Settings", + "rowsToImport": "Rows to import", + "rowsSummary": "{{count}} row(s) to import, {{skipped}} duplicate(s) skipped" + }, + "progress": { + "title": "Import in Progress", + "importing": "Importing...", + "rows": "rows" + }, + "report": { + "title": "Import Report", + "totalRows": "Total rows", + "imported": "Imported", + "skippedDuplicates": "Skipped duplicates", + "errors": "Errors", + "categorized": "Categorized", + "uncategorized": "Uncategorized", + "errorDetails": "Error details", + "row": "Row", + "errorMessage": "Error message", + "done": "Done" + }, + "wizard": { + "back": "Back", + "next": "Next", + "preview": "Preview", + "checkDuplicates": "Check duplicates", + "confirm": "Confirm", + "import": "Import" + } }, "transactions": { "title": "Transactions", diff --git a/src/i18n/locales/fr.json b/src/i18n/locales/fr.json index 639af69..570af8f 100644 --- a/src/i18n/locales/fr.json +++ b/src/i18n/locales/fr.json @@ -24,7 +24,103 @@ "source": "Source", "file": "Fichier", "status": "Statut", - "date": "Date" + "date": "Date", + "folder": { + "label": "Dossier d'import", + "notConfigured": "Aucun dossier configuré", + "browse": "Parcourir", + "refresh": "Actualiser" + }, + "sources": { + "title": "Sources d'import", + "empty": "Aucune source trouvée. Créez des sous-dossiers dans votre dossier d'import avec des fichiers CSV.", + "new": "nouveau", + "fileCount_one": "{{count}} fichier", + "fileCount_other": "{{count}} fichiers", + "fileCount": "{{count}} fichier(s)" + }, + "config": { + "title": "Configuration de la source", + "sourceName": "Nom de la source", + "delimiter": "Délimiteur", + "semicolon": "Point-virgule", + "comma": "Virgule", + "tab": "Tabulation", + "encoding": "Encodage", + "dateFormat": "Format de date", + "skipLines": "Lignes à ignorer", + "hasHeader": "La première ligne contient les en-têtes", + "signConvention": "Convention de signe", + "negativeExpense": "Dépenses négatives", + "positiveExpense": "Dépenses positives", + "columnMapping": "Mapping des colonnes", + "dateColumn": "Colonne date", + "descriptionColumn": "Colonne description", + "amountColumn": "Colonne montant", + "amountMode": "Mode montant", + "singleAmount": "Montant unique", + "debitCredit": "Débit / Crédit séparés", + "debitColumn": "Colonne débit", + "creditColumn": "Colonne crédit", + "selectFiles": "Fichiers à importer", + "selectAll": "Tout sélectionner" + }, + "preview": { + "title": "Aperçu des données", + "noData": "Aucune donnée à afficher", + "rowCount": "{{count}} ligne(s)", + "errorCount": "{{count}} erreur(s)", + "date": "Date", + "description": "Description", + "amount": "Montant", + "raw": "Données brutes", + "moreRows": "... et {{count}} ligne(s) supplémentaire(s)" + }, + "duplicates": { + "title": "Détection des doublons", + "fileAlreadyImported": "Ce fichier a déjà été importé", + "fileAlreadyImportedDesc": "Un fichier avec le même contenu existe déjà dans la base de données.", + "rowsFound": "{{count}} doublon(s) détecté(s)", + "rowsFoundDesc": "Ces lignes correspondent à des transactions déjà existantes.", + "noneFound": "Aucun doublon détecté", + "skip": "Ignorer les doublons", + "includeAll": "Tout importer", + "summary": "Total : {{total}} lignes — {{new}} nouvelles — {{duplicates}} doublon(s)" + }, + "confirm": { + "title": "Confirmation de l'import", + "source": "Source", + "files": "Fichiers", + "settings": "Paramètres", + "rowsToImport": "Lignes à importer", + "rowsSummary": "{{count}} ligne(s) à importer, {{skipped}} doublon(s) ignoré(s)" + }, + "progress": { + "title": "Import en cours", + "importing": "Import en cours...", + "rows": "lignes" + }, + "report": { + "title": "Rapport d'import", + "totalRows": "Total lignes", + "imported": "Importées", + "skippedDuplicates": "Doublons ignorés", + "errors": "Erreurs", + "categorized": "Catégorisées", + "uncategorized": "Non catégorisées", + "errorDetails": "Détail des erreurs", + "row": "Ligne", + "errorMessage": "Message d'erreur", + "done": "Terminé" + }, + "wizard": { + "back": "Retour", + "next": "Suivant", + "preview": "Aperçu", + "checkDuplicates": "Vérifier les doublons", + "confirm": "Confirmer", + "import": "Importer" + } }, "transactions": { "title": "Transactions", diff --git a/src/pages/ImportPage.tsx b/src/pages/ImportPage.tsx index d05bf68..4c6f5ad 100644 --- a/src/pages/ImportPage.tsx +++ b/src/pages/ImportPage.tsx @@ -1,16 +1,160 @@ import { useTranslation } from "react-i18next"; -import { Upload } from "lucide-react"; +import { useImportWizard } from "../hooks/useImportWizard"; +import ImportFolderConfig from "../components/import/ImportFolderConfig"; +import SourceList from "../components/import/SourceList"; +import SourceConfigPanel from "../components/import/SourceConfigPanel"; +import FilePreviewTable from "../components/import/FilePreviewTable"; +import DuplicateCheckPanel from "../components/import/DuplicateCheckPanel"; +import ImportConfirmation from "../components/import/ImportConfirmation"; +import ImportProgress from "../components/import/ImportProgress"; +import ImportReportPanel from "../components/import/ImportReportPanel"; +import WizardNavigation from "../components/import/WizardNavigation"; +import { AlertCircle } from "lucide-react"; export default function ImportPage() { const { t } = useTranslation(); + const { + state, + browseFolder, + refreshFolder, + selectSource, + updateConfig, + toggleFile, + selectAllFiles, + parsePreview, + checkDuplicates, + executeImport, + goToStep, + reset, + setSkipDuplicates, + } = useImportWizard(); return (

{t("import.title")}

-
- -

{t("import.dropzone")}

-
+ + {/* Error banner */} + {state.error && ( +
+ +

+ {state.error} +

+
+ )} + + {/* Folder config - always visible */} + + + {/* Wizard steps */} + {state.step === "source-list" && ( + + )} + + {state.step === "source-config" && state.selectedSource && ( +
+ + goToStep("source-list")} + onNext={parsePreview} + onCancel={reset} + nextLabel={t("import.wizard.preview")} + nextDisabled={ + state.selectedFiles.length === 0 || !state.sourceConfig.name + } + /> +
+ )} + + {state.step === "file-preview" && ( +
+ + {state.parsedPreview.length > 20 && ( +

+ {t("import.preview.moreRows", { + count: state.parsedPreview.length - 20, + })} +

+ )} + goToStep("source-config")} + onNext={checkDuplicates} + onCancel={reset} + nextLabel={t("import.wizard.checkDuplicates")} + nextDisabled={ + state.parsedPreview.filter((r) => r.parsed).length === 0 + } + /> +
+ )} + + {state.step === "duplicate-check" && state.duplicateResult && ( +
+ setSkipDuplicates(true)} + onIncludeAll={() => setSkipDuplicates(false)} + /> + goToStep("file-preview")} + onNext={() => goToStep("confirm")} + onCancel={reset} + nextLabel={t("import.wizard.confirm")} + /> +
+ )} + + {state.step === "confirm" && state.duplicateResult && ( +
+ + goToStep("duplicate-check")} + onNext={executeImport} + onCancel={reset} + nextLabel={t("import.wizard.import")} + showCancel={false} + /> +
+ )} + + {state.step === "importing" && ( + + )} + + {state.step === "report" && state.importReport && ( + + )}
); } diff --git a/src/services/categorizationService.ts b/src/services/categorizationService.ts new file mode 100644 index 0000000..e4c7023 --- /dev/null +++ b/src/services/categorizationService.ts @@ -0,0 +1,76 @@ +import { getDb } from "./db"; +import type { Keyword } from "../shared/types"; + +/** + * Normalize a description for keyword matching: + * - lowercase + * - strip accents via NFD decomposition + * - collapse whitespace + */ +function normalizeDescription(desc: string): string { + return desc + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .toLowerCase() + .replace(/\s+/g, " ") + .trim(); +} + +interface CategorizationResult { + category_id: number | null; + supplier_id: number | null; +} + +/** + * Auto-categorize a single transaction description. + * Returns matching category_id and supplier_id, or nulls if no match. + */ +export async function categorizeDescription( + description: string +): Promise { + const db = await getDb(); + const keywords = await db.select( + "SELECT * FROM keywords WHERE is_active = 1 ORDER BY priority DESC" + ); + + const normalized = normalizeDescription(description); + + for (const kw of keywords) { + const normalizedKeyword = normalizeDescription(kw.keyword); + if (normalized.includes(normalizedKeyword)) { + return { + category_id: kw.category_id, + supplier_id: kw.supplier_id ?? null, + }; + } + } + + return { category_id: null, supplier_id: null }; +} + +/** + * Auto-categorize a batch of transactions (by their descriptions). + * Returns an array of results in the same order. + */ +export async function categorizeBatch( + descriptions: string[] +): Promise { + const db = await getDb(); + const keywords = await db.select( + "SELECT * FROM keywords WHERE is_active = 1 ORDER BY priority DESC" + ); + + return descriptions.map((desc) => { + const normalized = normalizeDescription(desc); + for (const kw of keywords) { + const normalizedKeyword = normalizeDescription(kw.keyword); + if (normalized.includes(normalizedKeyword)) { + return { + category_id: kw.category_id, + supplier_id: kw.supplier_id ?? null, + }; + } + } + return { category_id: null, supplier_id: null }; + }); +} diff --git a/src/services/db.ts b/src/services/db.ts new file mode 100644 index 0000000..9a36d8a --- /dev/null +++ b/src/services/db.ts @@ -0,0 +1,10 @@ +import Database from "@tauri-apps/plugin-sql"; + +let dbInstance: Database | null = null; + +export async function getDb(): Promise { + if (!dbInstance) { + dbInstance = await Database.load("sqlite:simpl_resultat.db"); + } + return dbInstance; +} diff --git a/src/services/importSourceService.ts b/src/services/importSourceService.ts new file mode 100644 index 0000000..9f68460 --- /dev/null +++ b/src/services/importSourceService.ts @@ -0,0 +1,98 @@ +import { getDb } from "./db"; +import type { ImportSource } from "../shared/types"; + +export async function getAllSources(): Promise { + const db = await getDb(); + return db.select("SELECT * FROM import_sources ORDER BY name"); +} + +export async function getSourceByName( + name: string +): Promise { + const db = await getDb(); + const rows = await db.select( + "SELECT * FROM import_sources WHERE name = $1", + [name] + ); + return rows.length > 0 ? rows[0] : null; +} + +export async function getSourceById( + id: number +): Promise { + const db = await getDb(); + const rows = await db.select( + "SELECT * FROM import_sources WHERE id = $1", + [id] + ); + return rows.length > 0 ? rows[0] : null; +} + +export async function createSource( + source: Omit +): Promise { + const db = await getDb(); + const result = await db.execute( + `INSERT INTO import_sources (name, description, date_format, delimiter, encoding, column_mapping, skip_lines) + VALUES ($1, $2, $3, $4, $5, $6, $7)`, + [ + source.name, + source.description || null, + source.date_format, + source.delimiter, + source.encoding, + source.column_mapping, + source.skip_lines, + ] + ); + return result.lastInsertId as number; +} + +export async function updateSource( + id: number, + source: Partial> +): Promise { + const db = await getDb(); + const fields: string[] = []; + const values: unknown[] = []; + let paramIndex = 1; + + if (source.name !== undefined) { + fields.push(`name = $${paramIndex++}`); + values.push(source.name); + } + if (source.description !== undefined) { + fields.push(`description = $${paramIndex++}`); + values.push(source.description); + } + if (source.date_format !== undefined) { + fields.push(`date_format = $${paramIndex++}`); + values.push(source.date_format); + } + if (source.delimiter !== undefined) { + fields.push(`delimiter = $${paramIndex++}`); + values.push(source.delimiter); + } + if (source.encoding !== undefined) { + fields.push(`encoding = $${paramIndex++}`); + values.push(source.encoding); + } + if (source.column_mapping !== undefined) { + fields.push(`column_mapping = $${paramIndex++}`); + values.push(source.column_mapping); + } + if (source.skip_lines !== undefined) { + fields.push(`skip_lines = $${paramIndex++}`); + values.push(source.skip_lines); + } + + if (fields.length === 0) return; + + fields.push(`updated_at = CURRENT_TIMESTAMP`); + values.push(id); + + await db.execute( + `UPDATE import_sources SET ${fields.join(", ")} WHERE id = $${paramIndex}`, + values + ); +} diff --git a/src/services/importedFileService.ts b/src/services/importedFileService.ts new file mode 100644 index 0000000..2a5b94d --- /dev/null +++ b/src/services/importedFileService.ts @@ -0,0 +1,61 @@ +import { getDb } from "./db"; +import type { ImportedFile } from "../shared/types"; + +export async function getFilesBySourceId( + sourceId: number +): Promise { + const db = await getDb(); + return db.select( + "SELECT * FROM imported_files WHERE source_id = $1 ORDER BY import_date DESC", + [sourceId] + ); +} + +export async function existsByHash( + sourceId: number, + fileHash: string +): Promise { + const db = await getDb(); + const rows = await db.select( + "SELECT * FROM imported_files WHERE source_id = $1 AND file_hash = $2", + [sourceId, fileHash] + ); + return rows.length > 0 ? rows[0] : null; +} + +export async function createImportedFile(file: { + source_id: number; + filename: string; + file_hash: string; + row_count: number; + status: string; + notes?: string; +}): Promise { + const db = await getDb(); + const result = await db.execute( + `INSERT INTO imported_files (source_id, filename, file_hash, row_count, status, notes) + VALUES ($1, $2, $3, $4, $5, $6)`, + [ + file.source_id, + file.filename, + file.file_hash, + file.row_count, + file.status, + file.notes || null, + ] + ); + return result.lastInsertId as number; +} + +export async function updateFileStatus( + id: number, + status: string, + rowCount?: number, + notes?: string +): Promise { + const db = await getDb(); + await db.execute( + `UPDATE imported_files SET status = $1, row_count = COALESCE($2, row_count), notes = COALESCE($3, notes) WHERE id = $4`, + [status, rowCount ?? null, notes ?? null, id] + ); +} diff --git a/src/services/transactionService.ts b/src/services/transactionService.ts new file mode 100644 index 0000000..a64cd59 --- /dev/null +++ b/src/services/transactionService.ts @@ -0,0 +1,91 @@ +import { getDb } from "./db"; +import type { Transaction } from "../shared/types"; + +export async function insertBatch( + transactions: Array<{ + date: string; + description: string; + amount: number; + source_id: number; + file_id: number; + original_description: string; + category_id?: number | null; + supplier_id?: number | null; + }> +): Promise { + const db = await getDb(); + let insertedCount = 0; + + // Process in batches of 500 + const batchSize = 500; + for (let i = 0; i < transactions.length; i += batchSize) { + const batch = transactions.slice(i, i + batchSize); + + await db.execute("BEGIN TRANSACTION", []); + try { + for (const tx of batch) { + await db.execute( + `INSERT INTO transactions (date, description, amount, source_id, file_id, original_description, category_id, supplier_id) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, + [ + tx.date, + tx.description, + tx.amount, + tx.source_id, + tx.file_id, + tx.original_description, + tx.category_id ?? null, + tx.supplier_id ?? null, + ] + ); + insertedCount++; + } + await db.execute("COMMIT", []); + } catch (e) { + await db.execute("ROLLBACK", []); + throw e; + } + } + + return insertedCount; +} + +export async function findDuplicates( + rows: Array<{ date: string; description: string; amount: number }> +): Promise< + Array<{ + rowIndex: number; + existingTransactionId: number; + date: string; + description: string; + amount: number; + }> +> { + const db = await getDb(); + const duplicates: Array<{ + rowIndex: number; + existingTransactionId: number; + date: string; + description: string; + amount: number; + }> = []; + + for (let i = 0; i < rows.length; i++) { + const row = rows[i]; + const existing = await db.select( + `SELECT id FROM transactions WHERE date = $1 AND description = $2 AND amount = $3 LIMIT 1`, + [row.date, row.description, row.amount] + ); + if (existing.length > 0) { + duplicates.push({ + rowIndex: i, + existingTransactionId: existing[0].id, + date: row.date, + description: row.description, + amount: row.amount, + }); + } + } + + return duplicates; +} diff --git a/src/services/userPreferenceService.ts b/src/services/userPreferenceService.ts new file mode 100644 index 0000000..a7b49b5 --- /dev/null +++ b/src/services/userPreferenceService.ts @@ -0,0 +1,32 @@ +import { getDb } from "./db"; +import type { UserPreference } from "../shared/types"; + +export async function getPreference(key: string): Promise { + const db = await getDb(); + const rows = await db.select( + "SELECT * FROM user_preferences WHERE key = $1", + [key] + ); + return rows.length > 0 ? rows[0].value : null; +} + +export async function setPreference( + key: string, + value: string +): Promise { + const db = await getDb(); + await db.execute( + `INSERT INTO user_preferences (key, value, updated_at) + VALUES ($1, $2, CURRENT_TIMESTAMP) + ON CONFLICT(key) DO UPDATE SET value = $2, updated_at = CURRENT_TIMESTAMP`, + [key, value] + ); +} + +export async function getImportFolder(): Promise { + return getPreference("import_folder"); +} + +export async function setImportFolder(path: string): Promise { + return setPreference("import_folder", path); +} diff --git a/src/shared/types/index.ts b/src/shared/types/index.ts index bc6332a..583be23 100644 --- a/src/shared/types/index.ts +++ b/src/shared/types/index.ts @@ -127,3 +127,86 @@ export interface NavItem { icon: string; labelKey: string; } + +// --- Import Wizard Types --- + +export interface ScannedFile { + filename: string; + file_path: string; + size_bytes: number; + modified_at: string; +} + +export interface ScannedSource { + folder_name: string; + folder_path: string; + files: ScannedFile[]; +} + +export interface ColumnMapping { + date: number; + description: number; + amount?: number; + debitAmount?: number; + creditAmount?: number; +} + +export type AmountMode = "single" | "debit_credit"; +export type SignConvention = "negative_expense" | "positive_expense"; + +export interface SourceConfig { + name: string; + delimiter: string; + encoding: string; + dateFormat: string; + skipLines: number; + columnMapping: ColumnMapping; + amountMode: AmountMode; + signConvention: SignConvention; + hasHeader: boolean; +} + +export interface ParsedRow { + rowIndex: number; + raw: string[]; + parsed: { + date: string; + description: string; + amount: number; + } | null; + error?: string; +} + +export interface DuplicateRow { + rowIndex: number; + date: string; + description: string; + amount: number; + existingTransactionId: number; +} + +export interface DuplicateCheckResult { + fileAlreadyImported: boolean; + existingFileId?: number; + duplicateRows: DuplicateRow[]; + newRows: ParsedRow[]; +} + +export interface ImportReport { + totalRows: number; + importedCount: number; + skippedDuplicates: number; + errorCount: number; + categorizedCount: number; + uncategorizedCount: number; + errors: Array<{ rowIndex: number; message: string }>; +} + +export type ImportWizardStep = + | "source-list" + | "source-config" + | "file-preview" + | "duplicate-check" + | "confirm" + | "importing" + | "report"; diff --git a/src/utils/amountParser.ts b/src/utils/amountParser.ts new file mode 100644 index 0000000..bdd79c4 --- /dev/null +++ b/src/utils/amountParser.ts @@ -0,0 +1,26 @@ +/** + * Parse a French-formatted amount string to a number. + * Handles formats like: 1.234,56 / 1234,56 / -1 234.56 / 1 234,56 + */ +export function parseFrenchAmount(raw: string): number { + if (!raw || typeof raw !== "string") return NaN; + + let cleaned = raw.trim(); + + // Remove currency symbols and whitespace + cleaned = cleaned.replace(/[€$£\s\u00A0]/g, ""); + + // Detect if comma is decimal separator (French style) + // Pattern: digits followed by comma followed by exactly 1-2 digits at end + const frenchPattern = /,\d{1,2}$/; + if (frenchPattern.test(cleaned)) { + // French format: remove dots (thousand sep), replace comma with dot (decimal) + cleaned = cleaned.replace(/\./g, "").replace(",", "."); + } else { + // English format or no decimal: remove commas (thousand sep) + cleaned = cleaned.replace(/,/g, ""); + } + + const result = parseFloat(cleaned); + return isNaN(result) ? NaN : result; +} diff --git a/src/utils/dateParser.ts b/src/utils/dateParser.ts new file mode 100644 index 0000000..079d828 --- /dev/null +++ b/src/utils/dateParser.ts @@ -0,0 +1,47 @@ +/** + * Parse a date string with a given format and return ISO YYYY-MM-DD. + * Supported formats: DD/MM/YYYY, MM/DD/YYYY, YYYY-MM-DD, DD-MM-YYYY, DD.MM.YYYY + */ +export function parseDate(raw: string, format: string): string { + if (!raw || typeof raw !== "string") return ""; + + const cleaned = raw.trim(); + let day: string, month: string, year: string; + + // Extract parts based on separator + const parts = cleaned.split(/[/\-\.]/); + if (parts.length !== 3) return ""; + + switch (format) { + case "DD/MM/YYYY": + case "DD-MM-YYYY": + case "DD.MM.YYYY": + [day, month, year] = parts; + break; + case "MM/DD/YYYY": + case "MM-DD-YYYY": + [month, day, year] = parts; + break; + case "YYYY-MM-DD": + case "YYYY/MM/DD": + [year, month, day] = parts; + break; + default: + // Default to DD/MM/YYYY (French) + [day, month, year] = parts; + break; + } + + // Validate + const y = parseInt(year, 10); + const m = parseInt(month, 10); + const d = parseInt(day, 10); + + if (isNaN(y) || isNaN(m) || isNaN(d)) return ""; + if (m < 1 || m > 12 || d < 1 || d > 31) return ""; + + // Handle 2-digit years + const fullYear = y < 100 ? (y > 50 ? 1900 + y : 2000 + y) : y; + + return `${fullYear.toString().padStart(4, "0")}-${m.toString().padStart(2, "0")}-${d.toString().padStart(2, "0")}`; +}