- Add automatic re-hashing of legacy SHA-256 PINs to Argon2id on successful verification, returning new hash to frontend for persistence - Use constant-time comparison (subtle::ConstantTimeEq) for both Argon2id and legacy SHA-256 hash verification - Add unit tests for hash_pin, verify_pin (Argon2id and legacy paths), re-hashing flow, error cases, and hex encoding roundtrip - Update frontend to handle VerifyPinResult struct and save rehashed PIN hash via profile update Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
374 lines
13 KiB
Rust
374 lines
13 KiB
Rust
use argon2::{Algorithm, Argon2, Params, Version};
|
|
use rand::RngCore;
|
|
use serde::{Deserialize, Serialize};
|
|
use sha2::{Digest, Sha256, Sha384};
|
|
use std::fs;
|
|
use subtle::ConstantTimeEq;
|
|
use tauri::Manager;
|
|
|
|
use crate::database;
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct Profile {
|
|
pub id: String,
|
|
pub name: String,
|
|
pub color: String,
|
|
pub pin_hash: Option<String>,
|
|
pub db_filename: String,
|
|
pub created_at: String,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct ProfilesConfig {
|
|
pub active_profile_id: String,
|
|
pub profiles: Vec<Profile>,
|
|
}
|
|
|
|
fn get_profiles_path(app: &tauri::AppHandle) -> Result<std::path::PathBuf, String> {
|
|
let app_dir = app
|
|
.path()
|
|
.app_data_dir()
|
|
.map_err(|e| format!("Cannot get app data dir: {}", e))?;
|
|
Ok(app_dir.join("profiles.json"))
|
|
}
|
|
|
|
fn make_default_config() -> ProfilesConfig {
|
|
let now = chrono_now();
|
|
let default_id = "default".to_string();
|
|
ProfilesConfig {
|
|
active_profile_id: default_id.clone(),
|
|
profiles: vec![Profile {
|
|
id: default_id,
|
|
name: "Default".to_string(),
|
|
color: "#4A90A4".to_string(),
|
|
pin_hash: None,
|
|
db_filename: "simpl_resultat.db".to_string(),
|
|
created_at: now,
|
|
}],
|
|
}
|
|
}
|
|
|
|
fn chrono_now() -> String {
|
|
// Simple ISO-ish timestamp without pulling in chrono crate
|
|
let dur = std::time::SystemTime::now()
|
|
.duration_since(std::time::UNIX_EPOCH)
|
|
.unwrap_or_default();
|
|
let secs = dur.as_secs();
|
|
// Return as unix timestamp string — frontend can format it
|
|
secs.to_string()
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub fn load_profiles(app: tauri::AppHandle) -> Result<ProfilesConfig, String> {
|
|
let path = get_profiles_path(&app)?;
|
|
|
|
if !path.exists() {
|
|
let config = make_default_config();
|
|
let json =
|
|
serde_json::to_string_pretty(&config).map_err(|e| format!("JSON error: {}", e))?;
|
|
|
|
// Ensure parent dir exists
|
|
if let Some(parent) = path.parent() {
|
|
fs::create_dir_all(parent)
|
|
.map_err(|e| format!("Cannot create app data dir: {}", e))?;
|
|
}
|
|
|
|
fs::write(&path, json).map_err(|e| format!("Cannot write profiles.json: {}", e))?;
|
|
return Ok(config);
|
|
}
|
|
|
|
let content =
|
|
fs::read_to_string(&path).map_err(|e| format!("Cannot read profiles.json: {}", e))?;
|
|
let config: ProfilesConfig =
|
|
serde_json::from_str(&content).map_err(|e| format!("Invalid profiles.json: {}", e))?;
|
|
Ok(config)
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub fn save_profiles(app: tauri::AppHandle, config: ProfilesConfig) -> Result<(), String> {
|
|
let path = get_profiles_path(&app)?;
|
|
let json =
|
|
serde_json::to_string_pretty(&config).map_err(|e| format!("JSON error: {}", e))?;
|
|
fs::write(&path, json).map_err(|e| format!("Cannot write profiles.json: {}", e))
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub fn delete_profile_db(app: tauri::AppHandle, db_filename: String) -> Result<(), String> {
|
|
if db_filename == "simpl_resultat.db" {
|
|
return Err("Cannot delete the default profile database".to_string());
|
|
}
|
|
|
|
let app_dir = app
|
|
.path()
|
|
.app_data_dir()
|
|
.map_err(|e| format!("Cannot get app data dir: {}", e))?;
|
|
let db_path = app_dir.join(&db_filename);
|
|
|
|
if db_path.exists() {
|
|
fs::remove_file(&db_path)
|
|
.map_err(|e| format!("Cannot delete database file: {}", e))?;
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub fn get_new_profile_init_sql() -> Result<Vec<String>, String> {
|
|
Ok(vec![
|
|
database::CONSOLIDATED_SCHEMA.to_string(),
|
|
database::SEED_CATEGORIES.to_string(),
|
|
])
|
|
}
|
|
|
|
// Argon2id parameters for PIN hashing (same as export_import_commands.rs)
|
|
const ARGON2_M_COST: u32 = 65536; // 64 MiB
|
|
const ARGON2_T_COST: u32 = 3;
|
|
const ARGON2_P_COST: u32 = 1;
|
|
const ARGON2_OUTPUT_LEN: usize = 32;
|
|
const ARGON2_SALT_LEN: usize = 16;
|
|
|
|
fn argon2_hash(pin: &str, salt: &[u8]) -> Result<Vec<u8>, String> {
|
|
let params = Params::new(ARGON2_M_COST, ARGON2_T_COST, ARGON2_P_COST, Some(ARGON2_OUTPUT_LEN))
|
|
.map_err(|e| format!("Argon2 params error: {}", e))?;
|
|
let argon2 = Argon2::new(Algorithm::Argon2id, Version::V0x13, params);
|
|
let mut hash = vec![0u8; ARGON2_OUTPUT_LEN];
|
|
argon2
|
|
.hash_password_into(pin.as_bytes(), salt, &mut hash)
|
|
.map_err(|e| format!("Argon2 hash error: {}", e))?;
|
|
Ok(hash)
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub fn hash_pin(pin: String) -> Result<String, String> {
|
|
let mut salt = [0u8; ARGON2_SALT_LEN];
|
|
rand::rngs::OsRng.fill_bytes(&mut salt);
|
|
let salt_hex = hex_encode(&salt);
|
|
|
|
let hash = argon2_hash(&pin, &salt)?;
|
|
let hash_hex = hex_encode(&hash);
|
|
|
|
// Store as "argon2id:salt:hash" to distinguish from legacy SHA-256 "salt:hash"
|
|
Ok(format!("argon2id:{}:{}", salt_hex, hash_hex))
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct VerifyPinResult {
|
|
pub valid: bool,
|
|
/// New Argon2id hash when a legacy SHA-256 PIN was successfully verified and re-hashed
|
|
pub rehashed: Option<String>,
|
|
}
|
|
|
|
#[tauri::command]
|
|
pub fn verify_pin(pin: String, stored_hash: String) -> Result<VerifyPinResult, String> {
|
|
// Argon2id format: "argon2id:salt_hex:hash_hex"
|
|
if let Some(rest) = stored_hash.strip_prefix("argon2id:") {
|
|
let parts: Vec<&str> = rest.split(':').collect();
|
|
if parts.len() != 2 {
|
|
return Err("Invalid Argon2id hash format".to_string());
|
|
}
|
|
let salt = hex_decode(parts[0])?;
|
|
let expected_hash = hex_decode(parts[1])?;
|
|
|
|
let computed = argon2_hash(&pin, &salt)?;
|
|
|
|
let valid = computed.ct_eq(&expected_hash).into();
|
|
return Ok(VerifyPinResult { valid, rehashed: None });
|
|
}
|
|
|
|
// Legacy SHA-256 format: "salt_hex:hash_hex"
|
|
let parts: Vec<&str> = stored_hash.split(':').collect();
|
|
if parts.len() != 2 {
|
|
return Err("Invalid stored hash format".to_string());
|
|
}
|
|
let salt_hex = parts[0];
|
|
let expected_hash = hex_decode(parts[1])?;
|
|
|
|
let mut hasher = Sha256::new();
|
|
hasher.update(salt_hex.as_bytes());
|
|
hasher.update(pin.as_bytes());
|
|
let result = hasher.finalize();
|
|
|
|
let valid: bool = result.as_slice().ct_eq(&expected_hash).into();
|
|
|
|
if valid {
|
|
// Re-hash with Argon2id so this legacy PIN is upgraded
|
|
let new_hash = hash_pin(pin)?;
|
|
Ok(VerifyPinResult { valid: true, rehashed: Some(new_hash) })
|
|
} else {
|
|
Ok(VerifyPinResult { valid: false, rehashed: None })
|
|
}
|
|
}
|
|
|
|
fn hex_encode(bytes: &[u8]) -> String {
|
|
bytes.iter().map(|b| format!("{:02x}", b)).collect()
|
|
}
|
|
|
|
fn hex_decode(hex: &str) -> Result<Vec<u8>, String> {
|
|
if hex.len() % 2 != 0 {
|
|
return Err("Invalid hex string length".to_string());
|
|
}
|
|
(0..hex.len())
|
|
.step_by(2)
|
|
.map(|i| {
|
|
u8::from_str_radix(&hex[i..i + 2], 16)
|
|
.map_err(|e| format!("Invalid hex character: {}", e))
|
|
})
|
|
.collect()
|
|
}
|
|
|
|
/// Repair migration checksums for a profile database.
|
|
/// Updates stored checksums to match current migration SQL, avoiding re-application
|
|
/// of destructive migrations (e.g., migration 2 which DELETEs categories/keywords).
|
|
#[tauri::command]
|
|
pub fn repair_migrations(app: tauri::AppHandle, db_filename: String) -> Result<bool, String> {
|
|
let app_dir = app
|
|
.path()
|
|
.app_data_dir()
|
|
.map_err(|e| format!("Cannot get app data dir: {}", e))?;
|
|
let db_path = app_dir.join(&db_filename);
|
|
|
|
if !db_path.exists() {
|
|
return Ok(false);
|
|
}
|
|
|
|
let conn = rusqlite::Connection::open(&db_path)
|
|
.map_err(|e| format!("Cannot open database: {}", e))?;
|
|
|
|
let table_exists: bool = conn
|
|
.query_row(
|
|
"SELECT COUNT(*) > 0 FROM sqlite_master WHERE type='table' AND name='_sqlx_migrations'",
|
|
[],
|
|
|row| row.get(0),
|
|
)
|
|
.unwrap_or(false);
|
|
|
|
if !table_exists {
|
|
return Ok(false);
|
|
}
|
|
|
|
// Current migration SQL — must match the vec in lib.rs
|
|
let migrations: &[(i64, &str)] = &[
|
|
(1, database::SCHEMA),
|
|
(2, database::SEED_CATEGORIES),
|
|
];
|
|
|
|
let mut repaired = false;
|
|
for (version, sql) in migrations {
|
|
let expected_checksum = Sha384::digest(sql.as_bytes()).to_vec();
|
|
|
|
// Check if this migration exists with a different checksum
|
|
let needs_repair: bool = conn
|
|
.query_row(
|
|
"SELECT COUNT(*) > 0 FROM _sqlx_migrations WHERE version = ?1 AND checksum != ?2",
|
|
rusqlite::params![version, expected_checksum],
|
|
|row| row.get(0),
|
|
)
|
|
.unwrap_or(false);
|
|
|
|
if needs_repair {
|
|
conn.execute(
|
|
"UPDATE _sqlx_migrations SET checksum = ?1 WHERE version = ?2",
|
|
rusqlite::params![expected_checksum, version],
|
|
)
|
|
.map_err(|e| format!("Cannot repair migration {}: {}", version, e))?;
|
|
repaired = true;
|
|
}
|
|
}
|
|
|
|
Ok(repaired)
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
#[test]
|
|
fn test_hash_pin_produces_argon2id_format() {
|
|
let hash = hash_pin("1234".to_string()).unwrap();
|
|
assert!(hash.starts_with("argon2id:"), "Hash should start with 'argon2id:' prefix");
|
|
let parts: Vec<&str> = hash.split(':').collect();
|
|
assert_eq!(parts.len(), 3, "Hash should have 3 parts: prefix:salt:hash");
|
|
assert_eq!(parts[1].len(), ARGON2_SALT_LEN * 2, "Salt should be {} hex chars", ARGON2_SALT_LEN * 2);
|
|
assert_eq!(parts[2].len(), ARGON2_OUTPUT_LEN * 2, "Hash should be {} hex chars", ARGON2_OUTPUT_LEN * 2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_hash_pin_different_salts() {
|
|
let h1 = hash_pin("1234".to_string()).unwrap();
|
|
let h2 = hash_pin("1234".to_string()).unwrap();
|
|
assert_ne!(h1, h2, "Two hashes of the same PIN should use different salts");
|
|
}
|
|
|
|
#[test]
|
|
fn test_verify_argon2id_pin_correct() {
|
|
let hash = hash_pin("5678".to_string()).unwrap();
|
|
let result = verify_pin("5678".to_string(), hash).unwrap();
|
|
assert!(result.valid, "Correct PIN should verify");
|
|
assert!(result.rehashed.is_none(), "Argon2id PIN should not be rehashed");
|
|
}
|
|
|
|
#[test]
|
|
fn test_verify_argon2id_pin_wrong() {
|
|
let hash = hash_pin("5678".to_string()).unwrap();
|
|
let result = verify_pin("0000".to_string(), hash).unwrap();
|
|
assert!(!result.valid, "Wrong PIN should not verify");
|
|
assert!(result.rehashed.is_none());
|
|
}
|
|
|
|
#[test]
|
|
fn test_verify_legacy_sha256_correct_and_rehash() {
|
|
// Create a legacy SHA-256 hash: "salt_hex:sha256(salt_hex + pin)"
|
|
let salt_hex = "abcdef0123456789";
|
|
let mut hasher = Sha256::new();
|
|
hasher.update(salt_hex.as_bytes());
|
|
hasher.update(b"4321");
|
|
let hash_bytes = hasher.finalize();
|
|
let hash_hex = hex_encode(&hash_bytes);
|
|
let stored = format!("{}:{}", salt_hex, hash_hex);
|
|
|
|
let result = verify_pin("4321".to_string(), stored).unwrap();
|
|
assert!(result.valid, "Correct legacy PIN should verify");
|
|
assert!(result.rehashed.is_some(), "Legacy PIN should be rehashed to Argon2id");
|
|
|
|
// Verify the rehashed value is a valid Argon2id hash
|
|
let new_hash = result.rehashed.unwrap();
|
|
assert!(new_hash.starts_with("argon2id:"));
|
|
|
|
// Verify the rehashed value works for future verification
|
|
let result2 = verify_pin("4321".to_string(), new_hash).unwrap();
|
|
assert!(result2.valid, "Rehashed PIN should verify");
|
|
assert!(result2.rehashed.is_none(), "Already Argon2id, no rehash needed");
|
|
}
|
|
|
|
#[test]
|
|
fn test_verify_legacy_sha256_wrong() {
|
|
let salt_hex = "abcdef0123456789";
|
|
let mut hasher = Sha256::new();
|
|
hasher.update(salt_hex.as_bytes());
|
|
hasher.update(b"4321");
|
|
let hash_bytes = hasher.finalize();
|
|
let hash_hex = hex_encode(&hash_bytes);
|
|
let stored = format!("{}:{}", salt_hex, hash_hex);
|
|
|
|
let result = verify_pin("9999".to_string(), stored).unwrap();
|
|
assert!(!result.valid, "Wrong legacy PIN should not verify");
|
|
assert!(result.rehashed.is_none(), "Failed verification should not rehash");
|
|
}
|
|
|
|
#[test]
|
|
fn test_verify_invalid_format() {
|
|
let result = verify_pin("1234".to_string(), "invalid".to_string());
|
|
assert!(result.is_err(), "Single-part hash should fail");
|
|
|
|
let result = verify_pin("1234".to_string(), "argon2id:bad".to_string());
|
|
assert!(result.is_err(), "Argon2id with wrong part count should fail");
|
|
}
|
|
|
|
#[test]
|
|
fn test_hex_roundtrip() {
|
|
let original = vec![0u8, 127, 255, 1, 16];
|
|
let encoded = hex_encode(&original);
|
|
let decoded = hex_decode(&encoded).unwrap();
|
|
assert_eq!(original, decoded);
|
|
}
|
|
}
|