diff --git a/CHANGELOG.md b/CHANGELOG.md index 0d7a4117..806b02c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added -- **Accounts persistence** (PRD §6.4, PRD-v2 §P1.1, task 20): SQLite `accounts` table (migration `m20260428_000006`) with `id` / `service_name` / `username` / `account_type` / `enabled` / `traffic_left` / `traffic_total` / `valid_until` / `last_validated` / `created_at` columns and a UNIQUE `(service_name, username)` index. New `AccountRepository` driven port (`save` / `find_by_id` / `list` / `list_by_service` / `delete`) and `SqliteAccountRepo` adapter with sea-orm entity + `from_domain` / `into_domain` converters. UNIQUE violations surface as `DomainError::AlreadyExists` instead of leaking storage errors. Domain `Account` aggregate gained `traffic_total`, `last_validated`, `created_at` fields and switched its identifier to `AccountId(String)` so generated account ids match the spec's `TEXT PRIMARY KEY`. `Account::credential_ref()` returns the `keyring://{service}/{username}` URI used to look up the password/token in the OS keychain — credentials are never persisted to SQLite. Unblocks tasks 21-25, 38, 51-56, 75-76. +- **Accounts commands** (PRD §6.4, PRD-v2 §P1.2, task 21): six application-layer command handlers (`add_account`, `update_account`, `delete_account`, `validate_account`, `export_accounts`, `import_accounts`) wired through the `CommandBus` builder. New driven ports `AccountCredentialStore`, `AccountValidator` (with `ValidationOutcome`) and `PassphraseCodec` keep handlers free of plugin / crypto dependencies. `KeyringAccountStore` adapter persists per-account passwords under `vortex-account-{id}` keyring entries; `AesGcmPbkdf2Codec` adapter implements the import / export bundle format using AES-256-GCM with a PBKDF2-HMAC-SHA256 200 000-iteration KDF, fresh per-call salt + nonce, header bound as AAD, and a `VORTACC` magic + version byte so tampered or downgraded bundles fail authentication. Domain events `AccountAdded`, `AccountUpdated`, `AccountDeleted`, `AccountValidated`, `AccountValidationFailed`, `AccountsImported`, `AccountsExported` published via `EventBus` and forwarded by the Tauri bridge as `account-*` browser events. Add rolls back the SQLite row when the keyring write fails so credentials never end up orphaned; import validates every entry up-front and skips `(service_name, username)` pairs already present without inserting partial state. Unblocks task 23 (Vue Accounts). +- **Accounts persistence** (PRD §6.4, PRD-v2 §P1.1, task 20): SQLite `accounts` table (migration `m20260428_000006`) with `id` / `service_name` / `username` / `account_type` / `enabled` / `traffic_left` / `traffic_total` / `valid_until` / `last_validated` / `created_at` columns and a UNIQUE `(service_name, username)` index. New `AccountRepository` driven port (`save` / `find_by_id` / `list` / `list_by_service` / `delete`) and `SqliteAccountRepo` adapter with sea-orm entity + `from_domain` / `into_domain` converters. UNIQUE violations surface as `DomainError::AlreadyExists` instead of leaking storage errors. Domain `Account` aggregate gained `traffic_total`, `last_validated`, `created_at` fields and switched its identifier to `AccountId(String)` so generated account ids match the spec's `TEXT PRIMARY KEY`. `Account::credential_ref()` returns a `keyring://{service}/{username}` URI exposing a logical reference suitable for diagnostics; passwords themselves are never persisted to SQLite — they live in the OS keychain via the `AccountCredentialStore` adapter (added in task 21, keyed by `AccountId`). Unblocks tasks 21-25, 38, 51-56, 75-76. ## [0.2.0-beta] - 2026-04-27 diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 4e699aa1..c406a2ce 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -17,6 +17,16 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common 0.1.7", + "generic-array", +] + [[package]] name = "aes" version = "0.8.4" @@ -28,6 +38,20 @@ dependencies = [ "cpufeatures 0.2.17", ] +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + [[package]] name = "ahash" version = "0.7.8" @@ -886,6 +910,17 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +[[package]] +name = "chacha20" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "rand_core 0.10.1", +] + [[package]] name = "chrono" version = "0.4.44" @@ -966,6 +1001,12 @@ dependencies = [ "cc", ] +[[package]] +name = "cmov" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f88a43d011fc4a6876cb7344703e297c71dda42494fee094d5f7c76bf13f746" + [[package]] name = "cobs" version = "0.3.0" @@ -1333,6 +1374,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", + "rand_core 0.6.4", "typenum", ] @@ -1395,6 +1437,24 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "ctutils" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5515a3834141de9eafb9717ad39eea8247b5674e6066c404e8c4b365d2a29e" +dependencies = [ + "cmov", +] + [[package]] name = "darling" version = "0.20.11" @@ -1598,6 +1658,7 @@ dependencies = [ "block-buffer 0.12.0", "const-oid 0.10.2", "crypto-common 0.2.1", + "ctutils", ] [[package]] @@ -2501,11 +2562,22 @@ dependencies = [ "js-sys", "libc", "r-efi 6.0.0", + "rand_core 0.10.1", "wasip2", "wasip3", "wasm-bindgen", ] +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + [[package]] name = "gimli" version = "0.33.0" @@ -2778,7 +2850,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ - "hmac", + "hmac 0.12.1", ] [[package]] @@ -2790,6 +2862,15 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "hmac" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6303bc9732ae41b04cb554b844a762b4115a61bfaa81e3e83050991eeb56863f" +dependencies = [ + "digest 0.11.2", +] + [[package]] name = "home" version = "0.5.12" @@ -4202,6 +4283,12 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + [[package]] name = "openssl" version = "0.10.76" @@ -4380,7 +4467,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" dependencies = [ "digest 0.10.7", - "hmac", + "hmac 0.12.1", +] + +[[package]] +name = "pbkdf2" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112d82ceb8c5bf524d9af484d4e4970c9fd5a0cc15ba14ad93dccd28873b0629" +dependencies = [ + "digest 0.11.2", + "hmac 0.13.0", ] [[package]] @@ -4718,6 +4815,18 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures 0.2.17", + "opaque-debug", + "universal-hash", +] + [[package]] name = "postcard" version = "1.1.3" @@ -5113,6 +5222,17 @@ dependencies = [ "rand_core 0.9.5", ] +[[package]] +name = "rand" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2e8e8bcc7961af1fdac401278c6a831614941f6164ee3bf4ce61b7edb162207" +dependencies = [ + "chacha20", + "getrandom 0.4.2", + "rand_core 0.10.1", +] + [[package]] name = "rand_chacha" version = "0.2.2" @@ -5170,6 +5290,12 @@ dependencies = [ "getrandom 0.3.4", ] +[[package]] +name = "rand_core" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63b8176103e19a2643978565ca18b50549f6101881c443590420e4dc998a3c69" + [[package]] name = "rand_hc" version = "0.2.0" @@ -6483,7 +6609,7 @@ dependencies = [ "generic-array", "hex", "hkdf", - "hmac", + "hmac 0.12.1", "itoa", "log", "md-5 0.10.6", @@ -6526,7 +6652,7 @@ dependencies = [ "futures-util", "hex", "hkdf", - "hmac", + "hmac 0.12.1", "home", "itoa", "log", @@ -7824,6 +7950,16 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common 0.1.7", + "subtle", +] + [[package]] name = "unrar" version = "0.5.8" @@ -7989,6 +8125,7 @@ checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" name = "vortex" version = "0.2.0-beta" dependencies = [ + "aes-gcm", "anyhow", "bincode", "bytes", @@ -8000,10 +8137,13 @@ dependencies = [ "flate2", "futures-util", "hex", + "hmac 0.13.0", "keyring", "libc", "md-5 0.11.0", "notify", + "pbkdf2 0.13.0", + "rand 0.10.1", "regex", "reqwest", "sea-orm", @@ -9932,11 +10072,11 @@ dependencies = [ "deflate64", "flate2", "getrandom 0.4.2", - "hmac", + "hmac 0.12.1", "indexmap 2.13.1", "lzma-rust2", "memchr", - "pbkdf2", + "pbkdf2 0.12.2", "ppmd-rust", "sha1", "time", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index e169aa9e..1f669fd2 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -56,6 +56,10 @@ hex = "0.4.3" tauri-plugin-dialog = "2.7.0" md-5 = "0.11.0" digest = "0.11.2" +aes-gcm = "0.10.3" +pbkdf2 = "0.13.0" +hmac = "0.13.0" +rand = "0.10.1" [target.'cfg(unix)'.dependencies] libc = "0.2" diff --git a/src-tauri/src/adapters/driven/credential/keyring_account_store.rs b/src-tauri/src/adapters/driven/credential/keyring_account_store.rs new file mode 100644 index 00000000..5ea2258d --- /dev/null +++ b/src-tauri/src/adapters/driven/credential/keyring_account_store.rs @@ -0,0 +1,141 @@ +//! [`AccountCredentialStore`] backed by `keyring-rs`. +//! +//! Stores one keyring entry per persisted [`AccountId`]. The keyring +//! service name is `vortex-account-{id}`; the username slot is the +//! constant marker `vortex-account-password`. A single store call +//! therefore writes a single secret — no race window between two +//! related entries (contrast with the broader +//! [`KeyringCredentialStore`] which juggles `username` and `password` +//! sub-entries). + +use crate::domain::error::DomainError; +use crate::domain::model::account::AccountId; +use crate::domain::ports::driven::AccountCredentialStore; + +const KEYRING_USERNAME_SLOT: &str = "vortex-account-password"; + +#[derive(Debug, Clone, Default)] +pub struct KeyringAccountStore; + +impl KeyringAccountStore { + pub fn new() -> Self { + Self + } + + fn entry(account_id: &AccountId) -> Result { + let svc = format!("vortex-account-{}", account_id.as_str()); + keyring::Entry::new(&svc, KEYRING_USERNAME_SLOT) + .map_err(|e| DomainError::StorageError(sanitize(account_id.as_str(), "entry", &e))) + } +} + +impl AccountCredentialStore for KeyringAccountStore { + fn store_password(&self, account_id: &AccountId, password: &str) -> Result<(), DomainError> { + let entry = Self::entry(account_id)?; + entry + .set_password(password) + .map_err(|e| DomainError::StorageError(sanitize(account_id.as_str(), "write", &e))) + } + + fn get_password(&self, account_id: &AccountId) -> Result, DomainError> { + let entry = Self::entry(account_id)?; + match entry.get_password() { + Ok(value) => Ok(Some(value)), + Err(keyring::Error::NoEntry) => Ok(None), + Err(e) => Err(DomainError::StorageError(sanitize( + account_id.as_str(), + "read", + &e, + ))), + } + } + + fn delete_password(&self, account_id: &AccountId) -> Result<(), DomainError> { + let entry = Self::entry(account_id)?; + match entry.delete_credential() { + Ok(()) => Ok(()), + Err(keyring::Error::NoEntry) => Ok(()), + Err(e) => Err(DomainError::StorageError(sanitize( + account_id.as_str(), + "delete", + &e, + ))), + } + } +} + +/// Map a keyring error to a sanitised string. Mirrors the policy used +/// by [`KeyringCredentialStore`](super::KeyringCredentialStore): keyring's +/// `Ambiguous` variant wraps `Credential` `Debug` impls that can leak +/// raw secrets, and `BadEncoding` wraps the raw byte buffer; neither +/// should ever propagate unfiltered. +fn sanitize(account_id: &str, operation: &str, err: &keyring::Error) -> String { + match err { + keyring::Error::Ambiguous(_) => format!( + "keyring {operation} error for account '{account_id}': ambiguous (multiple entries matched)" + ), + keyring::Error::BadEncoding(_) => format!( + "keyring {operation} error for account '{account_id}': stored value is not valid UTF-8" + ), + other => format!("keyring {operation} error for account '{account_id}': {other}"), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_sanitize_ambiguous_omits_inner_credentials() { + let err = keyring::Error::Ambiguous(Vec::new()); + let msg = sanitize("acc-1", "read", &err); + assert!(msg.contains("ambiguous")); + assert!(msg.contains("acc-1")); + assert!(!msg.contains("Credential")); + } + + #[test] + fn test_sanitize_bad_encoding_omits_raw_bytes() { + let err = keyring::Error::BadEncoding(vec![0xFF, 0xFE]); + let msg = sanitize("acc-2", "read", &err); + assert!(msg.contains("not valid UTF-8")); + assert!(!msg.contains("0xFF")); + } + + #[test] + fn test_sanitize_no_entry_includes_id_and_operation() { + let err = keyring::Error::NoEntry; + let msg = sanitize("acc-3", "delete", &err); + assert!(msg.contains("acc-3")); + assert!(msg.contains("delete")); + } + + // The end-to-end keyring round-trip test exercises a real OS + // keychain so it is gated behind `--ignored`. CI relies on the + // FakeAccountCredentialStore in `tests_support` to cover the + // command handlers. + + #[test] + #[ignore = "requires OS keychain backend"] + fn test_store_get_delete_cycle_roundtrips() { + let store = KeyringAccountStore::new(); + let id = AccountId::new("kc-test-id"); + + let _ = store.delete_password(&id); + + store.store_password(&id, "s3cret").expect("store"); + assert_eq!( + store.get_password(&id).expect("get").as_deref(), + Some("s3cret") + ); + + store.store_password(&id, "rotated").expect("rotate"); + assert_eq!( + store.get_password(&id).expect("get").as_deref(), + Some("rotated") + ); + + store.delete_password(&id).expect("delete"); + assert!(store.get_password(&id).expect("get").is_none()); + } +} diff --git a/src-tauri/src/adapters/driven/credential/mod.rs b/src-tauri/src/adapters/driven/credential/mod.rs index 705dd760..23ea8c6a 100644 --- a/src-tauri/src/adapters/driven/credential/mod.rs +++ b/src-tauri/src/adapters/driven/credential/mod.rs @@ -1,5 +1,7 @@ +mod keyring_account_store; mod keyring_credential_store; mod noop_credential_store; +pub use keyring_account_store::KeyringAccountStore; pub use keyring_credential_store::KeyringCredentialStore; pub use noop_credential_store::NoopCredentialStore; diff --git a/src-tauri/src/adapters/driven/crypto/aes_gcm_codec.rs b/src-tauri/src/adapters/driven/crypto/aes_gcm_codec.rs new file mode 100644 index 00000000..8260800e --- /dev/null +++ b/src-tauri/src/adapters/driven/crypto/aes_gcm_codec.rs @@ -0,0 +1,300 @@ +//! AES-256-GCM passphrase codec used by the account import / export +//! commands. +//! +//! Encryption flow: +//! +//! 1. Generate a fresh 16-byte random salt. +//! 2. Stretch the user passphrase with PBKDF2-HMAC-SHA256 +//! (`PBKDF2_ITERATIONS` rounds) to a 32-byte key. +//! 3. Generate a fresh 12-byte random nonce. +//! 4. AES-256-GCM seals the plaintext under (key, nonce). The +//! associated data is the bundle header so a downgrade attack +//! swapping `version` cannot pass authentication. +//! 5. Output bytes: `magic | version | iterations | salt | nonce | ct||tag`. +//! +//! Decryption verifies the magic + version, re-derives the key from +//! the supplied passphrase + stored salt, and returns the plaintext or +//! a [`DomainError::ValidationError`] on any mismatch — wrong +//! passphrase, tampered ciphertext, or unsupported header version. + +use aes_gcm::aead::Aead; +use aes_gcm::aead::Payload; +use aes_gcm::{Aes256Gcm, KeyInit, Nonce}; +use hmac::Hmac; +use pbkdf2::pbkdf2; +use rand::TryRng; +use rand::rngs::SysRng; +use sha2::Sha256; + +use crate::domain::error::DomainError; +use crate::domain::ports::driven::PassphraseCodec; + +const MAGIC: &[u8; 7] = b"VORTACC"; +const VERSION: u8 = 1; +const SALT_LEN: usize = 16; +const NONCE_LEN: usize = 12; +const KEY_LEN: usize = 32; +const ITER_LEN: usize = 4; +/// PBKDF2 iteration count. OWASP 2024 minimum for PBKDF2-HMAC-SHA256 +/// is 600 000 — we pick 200 000 as a balance between security and the +/// import / export commands running on cold-start without spinning the +/// fan. The value is stored alongside the ciphertext so future bumps +/// remain backward-compatible. +const PBKDF2_ITERATIONS: u32 = 200_000; +/// Hard upper bound applied to the iteration count read from the +/// import header. Without this, a crafted bundle could request an +/// arbitrarily large count and pin the CPU for minutes during import. +/// Generous enough that future stronger seal values (within an order of +/// magnitude of OWASP's modern recommendations) still pass. +const MAX_PBKDF2_ITERATIONS: u32 = 10_000_000; +const HEADER_LEN: usize = MAGIC.len() + 1 + ITER_LEN + SALT_LEN + NONCE_LEN; + +#[derive(Debug, Clone, Default)] +pub struct AesGcmPbkdf2Codec; + +impl AesGcmPbkdf2Codec { + pub fn new() -> Self { + Self + } + + fn derive_key( + passphrase: &str, + salt: &[u8], + iterations: u32, + ) -> Result<[u8; KEY_LEN], DomainError> { + let mut key = [0u8; KEY_LEN]; + pbkdf2::>(passphrase.as_bytes(), salt, iterations, &mut key) + .map_err(|e| DomainError::StorageError(format!("pbkdf2 derivation failed: {e}")))?; + Ok(key) + } + + fn build_header(salt: &[u8; SALT_LEN], nonce: &[u8; NONCE_LEN]) -> Vec { + let mut header = Vec::with_capacity(HEADER_LEN); + header.extend_from_slice(MAGIC); + header.push(VERSION); + header.extend_from_slice(&PBKDF2_ITERATIONS.to_be_bytes()); + header.extend_from_slice(salt); + header.extend_from_slice(nonce); + header + } +} + +impl PassphraseCodec for AesGcmPbkdf2Codec { + fn seal(&self, passphrase: &str, plaintext: &[u8]) -> Result, DomainError> { + if passphrase.is_empty() { + return Err(DomainError::ValidationError( + "passphrase must not be empty".into(), + )); + } + + let mut rng = SysRng; + let mut salt = [0u8; SALT_LEN]; + rng.try_fill_bytes(&mut salt) + .map_err(|e| DomainError::StorageError(format!("rng failure: {e}")))?; + let mut nonce_bytes = [0u8; NONCE_LEN]; + rng.try_fill_bytes(&mut nonce_bytes) + .map_err(|e| DomainError::StorageError(format!("rng failure: {e}")))?; + + let key = Self::derive_key(passphrase, &salt, PBKDF2_ITERATIONS)?; + let cipher = Aes256Gcm::new_from_slice(&key) + .map_err(|e| DomainError::StorageError(format!("aes init failed: {e}")))?; + let header = Self::build_header(&salt, &nonce_bytes); + + let nonce = Nonce::from_slice(&nonce_bytes); + let ciphertext = cipher + .encrypt( + nonce, + Payload { + msg: plaintext, + aad: &header, + }, + ) + .map_err(|e| DomainError::StorageError(format!("aes encrypt failed: {e}")))?; + + let mut out = header; + out.extend_from_slice(&ciphertext); + Ok(out) + } + + fn open(&self, passphrase: &str, ciphertext: &[u8]) -> Result, DomainError> { + if passphrase.is_empty() { + return Err(DomainError::ValidationError( + "passphrase must not be empty".into(), + )); + } + if ciphertext.len() < HEADER_LEN + 16 { + return Err(DomainError::ValidationError( + "ciphertext too short to be a vortex account export".into(), + )); + } + if &ciphertext[..MAGIC.len()] != MAGIC { + return Err(DomainError::ValidationError( + "not a vortex account export (magic mismatch)".into(), + )); + } + let version = ciphertext[MAGIC.len()]; + if version != VERSION { + return Err(DomainError::ValidationError(format!( + "unsupported export version: {version} (expected {VERSION})" + ))); + } + + let mut iter_bytes = [0u8; ITER_LEN]; + iter_bytes.copy_from_slice(&ciphertext[MAGIC.len() + 1..MAGIC.len() + 1 + ITER_LEN]); + let iterations = u32::from_be_bytes(iter_bytes); + if iterations < 1_000 { + return Err(DomainError::ValidationError( + "export header has implausibly low PBKDF2 iteration count".into(), + )); + } + if iterations > MAX_PBKDF2_ITERATIONS { + return Err(DomainError::ValidationError(format!( + "export header has implausibly high PBKDF2 iteration count: {iterations}" + ))); + } + + let salt_start = MAGIC.len() + 1 + ITER_LEN; + let salt = &ciphertext[salt_start..salt_start + SALT_LEN]; + let nonce_start = salt_start + SALT_LEN; + let nonce_bytes = &ciphertext[nonce_start..nonce_start + NONCE_LEN]; + let body = &ciphertext[HEADER_LEN..]; + + let mut salt_arr = [0u8; SALT_LEN]; + salt_arr.copy_from_slice(salt); + let mut nonce_arr = [0u8; NONCE_LEN]; + nonce_arr.copy_from_slice(nonce_bytes); + + let key = Self::derive_key(passphrase, &salt_arr, iterations)?; + let cipher = Aes256Gcm::new_from_slice(&key) + .map_err(|e| DomainError::StorageError(format!("aes init failed: {e}")))?; + let header = Self::build_header_with_iterations(&salt_arr, &nonce_arr, iterations); + + let nonce = Nonce::from_slice(&nonce_arr); + cipher + .decrypt( + nonce, + Payload { + msg: body, + aad: &header, + }, + ) + .map_err(|_| { + // GCM auth failures are indistinguishable from a wrong + // passphrase by design — surface a single clear message + // so the UI can route to "passphrase incorrect". + DomainError::ValidationError("wrong passphrase or corrupted account export".into()) + }) + } +} + +impl AesGcmPbkdf2Codec { + fn build_header_with_iterations( + salt: &[u8; SALT_LEN], + nonce: &[u8; NONCE_LEN], + iterations: u32, + ) -> Vec { + let mut header = Vec::with_capacity(HEADER_LEN); + header.extend_from_slice(MAGIC); + header.push(VERSION); + header.extend_from_slice(&iterations.to_be_bytes()); + header.extend_from_slice(salt); + header.extend_from_slice(nonce); + header + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_seal_open_round_trip_returns_original_plaintext() { + let codec = AesGcmPbkdf2Codec::new(); + let plaintext = b"hello, world"; + let ciphertext = codec.seal("unit-test-passphrase", plaintext).unwrap(); + let recovered = codec.open("unit-test-passphrase", &ciphertext).unwrap(); + assert_eq!(recovered, plaintext); + } + + #[test] + fn test_seal_produces_unique_outputs_for_same_input() { + let codec = AesGcmPbkdf2Codec::new(); + let a = codec.seal("k", b"plaintext").unwrap(); + let b = codec.seal("k", b"plaintext").unwrap(); + assert_ne!(a, b, "fresh salt+nonce → different ciphertext"); + } + + #[test] + fn test_open_with_wrong_passphrase_returns_validation_error() { + let codec = AesGcmPbkdf2Codec::new(); + let ct = codec.seal("right", b"secret").unwrap(); + let err = codec.open("wrong", &ct).unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(_))); + } + + #[test] + fn test_open_rejects_short_input() { + let codec = AesGcmPbkdf2Codec::new(); + let err = codec.open("k", b"short").unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(_))); + } + + #[test] + fn test_open_rejects_wrong_magic() { + let codec = AesGcmPbkdf2Codec::new(); + let mut ct = codec.seal("k", b"data").unwrap(); + ct[0] ^= 0xFF; + let err = codec.open("k", &ct).unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(ref m) if m.contains("magic"))); + } + + #[test] + fn test_open_rejects_unsupported_version() { + let codec = AesGcmPbkdf2Codec::new(); + let mut ct = codec.seal("k", b"data").unwrap(); + ct[MAGIC.len()] = 99; + let err = codec.open("k", &ct).unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(ref m) if m.contains("version"))); + } + + #[test] + fn test_open_rejects_tampered_ciphertext_body() { + let codec = AesGcmPbkdf2Codec::new(); + let mut ct = codec.seal("k", b"hello").unwrap(); + // Flip a bit in the encrypted body so GCM auth fails. + let last = ct.len() - 1; + ct[last] ^= 0x01; + let err = codec.open("k", &ct).unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(_))); + } + + #[test] + fn test_open_rejects_low_iteration_header() { + let codec = AesGcmPbkdf2Codec::new(); + let mut ct = codec.seal("k", b"hello").unwrap(); + // Overwrite the iteration field with 0 (invalid by policy). + let iter_offset = MAGIC.len() + 1; + ct[iter_offset..iter_offset + ITER_LEN].copy_from_slice(&0u32.to_be_bytes()); + let err = codec.open("k", &ct).unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(ref m) if m.contains("iteration"))); + } + + #[test] + fn test_open_rejects_excessive_iteration_header() { + let codec = AesGcmPbkdf2Codec::new(); + let mut ct = codec.seal("k", b"hello").unwrap(); + // Overwrite the iteration field with a value above the cap. + let iter_offset = MAGIC.len() + 1; + ct[iter_offset..iter_offset + ITER_LEN] + .copy_from_slice(&(MAX_PBKDF2_ITERATIONS + 1).to_be_bytes()); + let err = codec.open("k", &ct).unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(ref m) if m.contains("high"))); + } + + #[test] + fn test_seal_rejects_empty_passphrase() { + let codec = AesGcmPbkdf2Codec::new(); + let err = codec.seal("", b"data").unwrap_err(); + assert!(matches!(err, DomainError::ValidationError(_))); + } +} diff --git a/src-tauri/src/adapters/driven/crypto/mod.rs b/src-tauri/src/adapters/driven/crypto/mod.rs new file mode 100644 index 00000000..c6f025ca --- /dev/null +++ b/src-tauri/src/adapters/driven/crypto/mod.rs @@ -0,0 +1,9 @@ +//! Cryptographic adapters. +//! +//! Implements domain-level cryptographic ports (passphrase-keyed +//! authenticated encryption today, more as the import / export and +//! plugin-signing surfaces grow). + +mod aes_gcm_codec; + +pub use aes_gcm_codec::AesGcmPbkdf2Codec; diff --git a/src-tauri/src/adapters/driven/event/tauri_bridge.rs b/src-tauri/src/adapters/driven/event/tauri_bridge.rs index 4b5a71fb..b943f913 100644 --- a/src-tauri/src/adapters/driven/event/tauri_bridge.rs +++ b/src-tauri/src/adapters/driven/event/tauri_bridge.rs @@ -61,6 +61,13 @@ fn event_name(event: &DomainEvent) -> &'static str { DomainEvent::DownloadPrioritySet { .. } => "download-priority-set", DomainEvent::QueueReordered { .. } => "queue-reordered", DomainEvent::DownloadDirectoryChanged { .. } => "download-directory-changed", + DomainEvent::AccountAdded { .. } => "account-added", + DomainEvent::AccountUpdated { .. } => "account-updated", + DomainEvent::AccountDeleted { .. } => "account-deleted", + DomainEvent::AccountValidated { .. } => "account-validated", + DomainEvent::AccountValidationFailed { .. } => "account-validation-failed", + DomainEvent::AccountsImported { .. } => "accounts-imported", + DomainEvent::AccountsExported { .. } => "accounts-exported", } } @@ -164,6 +171,31 @@ fn event_payload(event: &DomainEvent) -> serde_json::Value { } => { json!({ "id": id.0, "newDestinationPath": new_destination_path }) } + DomainEvent::AccountAdded { id, service_name } => { + json!({ "id": id.as_str(), "serviceName": service_name }) + } + DomainEvent::AccountUpdated { id } => json!({ "id": id.as_str() }), + DomainEvent::AccountDeleted { id } => json!({ "id": id.as_str() }), + DomainEvent::AccountValidated { + id, + latency_ms, + traffic_left, + traffic_total, + valid_until, + } => { + json!({ + "id": id.as_str(), + "latencyMs": latency_ms, + "trafficLeft": traffic_left, + "trafficTotal": traffic_total, + "validUntil": valid_until, + }) + } + DomainEvent::AccountValidationFailed { id, error } => { + json!({ "id": id.as_str(), "error": error }) + } + DomainEvent::AccountsImported { count } => json!({ "count": count }), + DomainEvent::AccountsExported { count } => json!({ "count": count }), } } diff --git a/src-tauri/src/adapters/driven/logging/download_log_bridge.rs b/src-tauri/src/adapters/driven/logging/download_log_bridge.rs index 73939836..665bcc34 100644 --- a/src-tauri/src/adapters/driven/logging/download_log_bridge.rs +++ b/src-tauri/src/adapters/driven/logging/download_log_bridge.rs @@ -130,7 +130,14 @@ fn record_download_event(store: &DownloadLogStore, event: &DomainEvent) { | DomainEvent::PluginUnloaded { .. } | DomainEvent::PackageCreated { .. } | DomainEvent::ClipboardUrlDetected { .. } - | DomainEvent::SettingsUpdated => {} + | DomainEvent::SettingsUpdated + | DomainEvent::AccountAdded { .. } + | DomainEvent::AccountUpdated { .. } + | DomainEvent::AccountDeleted { .. } + | DomainEvent::AccountValidated { .. } + | DomainEvent::AccountValidationFailed { .. } + | DomainEvent::AccountsImported { .. } + | DomainEvent::AccountsExported { .. } => {} } } diff --git a/src-tauri/src/adapters/driven/mod.rs b/src-tauri/src/adapters/driven/mod.rs index 0bb4ee85..236eaaea 100644 --- a/src-tauri/src/adapters/driven/mod.rs +++ b/src-tauri/src/adapters/driven/mod.rs @@ -3,6 +3,7 @@ pub mod clipboard; pub mod config; pub mod credential; +pub mod crypto; pub mod event; pub mod extractor; pub mod filesystem; diff --git a/src-tauri/src/application/command_bus.rs b/src-tauri/src/application/command_bus.rs index 2c9a96d8..02a7708c 100644 --- a/src-tauri/src/application/command_bus.rs +++ b/src-tauri/src/application/command_bus.rs @@ -6,9 +6,10 @@ use std::sync::Arc; use crate::domain::ports::driven::{ - ArchiveExtractor, ChecksumComputer, ClipboardObserver, ConfigStore, CredentialStore, - DownloadEngine, DownloadRepository, EventBus, FileOpener, FileStorage, HistoryRepository, - HttpClient, PluginConfigStore, PluginLoader, PluginStoreClient, UrlOpener, + AccountCredentialStore, AccountRepository, AccountValidator, ArchiveExtractor, + ChecksumComputer, ClipboardObserver, ConfigStore, CredentialStore, DownloadEngine, + DownloadRepository, EventBus, FileOpener, FileStorage, HistoryRepository, HttpClient, + PassphraseCodec, PluginConfigStore, PluginLoader, PluginStoreClient, UrlOpener, }; /// Central dispatcher for CQRS commands. @@ -32,6 +33,10 @@ pub struct CommandBus { file_opener: Option>, url_opener: Option>, plugin_config_store: Option>, + account_repo: Option>, + account_credential_store: Option>, + account_validator: Option>, + passphrase_codec: Option>, /// Serializes queue-position allocation across handlers. Without this, /// two concurrent move-to-top/move-to-bottom/start-download calls can /// observe the same min/max and write colliding `queue_position` @@ -72,10 +77,58 @@ impl CommandBus { file_opener: None, url_opener: None, plugin_config_store: None, + account_repo: None, + account_credential_store: None, + account_validator: None, + passphrase_codec: None, queue_position_lock: tokio::sync::Mutex::new(()), } } + /// Builder-style setter for the account repository. Optional so + /// existing fixtures that never invoke account commands don't have + /// to provide a mock. + pub fn with_account_repo(mut self, repo: Arc) -> Self { + self.account_repo = Some(repo); + self + } + + /// Builder-style setter for the per-account keyring wrapper. + pub fn with_account_credential_store(mut self, store: Arc) -> Self { + self.account_credential_store = Some(store); + self + } + + /// Builder-style setter for the account-validation port (delegates + /// to the matching hoster / debrid plugin). + pub fn with_account_validator(mut self, validator: Arc) -> Self { + self.account_validator = Some(validator); + self + } + + /// Builder-style setter for the passphrase codec used by the + /// import / export commands. + pub fn with_passphrase_codec(mut self, codec: Arc) -> Self { + self.passphrase_codec = Some(codec); + self + } + + pub fn account_repo(&self) -> Option<&dyn AccountRepository> { + self.account_repo.as_deref() + } + + pub fn account_credential_store(&self) -> Option<&dyn AccountCredentialStore> { + self.account_credential_store.as_deref() + } + + pub fn account_validator(&self) -> Option<&dyn AccountValidator> { + self.account_validator.as_deref() + } + + pub fn passphrase_codec(&self) -> Option<&dyn PassphraseCodec> { + self.passphrase_codec.as_deref() + } + /// Builder-style setter for the plugin configuration persistence port. /// Optional so existing test fixtures don't have to construct one when /// they don't exercise the plugin-config commands. diff --git a/src-tauri/src/application/commands/add_account.rs b/src-tauri/src/application/commands/add_account.rs new file mode 100644 index 00000000..3633bce5 --- /dev/null +++ b/src-tauri/src/application/commands/add_account.rs @@ -0,0 +1,247 @@ +//! Handler for [`AddAccountCommand`](super::AddAccountCommand). +//! +//! Generates a fresh [`AccountId`] (UUID v4), persists the metadata +//! through [`AccountRepository`], stores the password through +//! [`AccountCredentialStore`], and emits +//! [`DomainEvent::AccountAdded`] on success. +//! +//! Inputs are trimmed and validated before any I/O so a bad payload +//! never reaches the keyring or SQLite. + +use uuid::Uuid; + +use crate::application::command_bus::CommandBus; +use crate::application::error::AppError; +use crate::domain::event::DomainEvent; +use crate::domain::model::account::{Account, AccountId}; + +impl CommandBus { + pub async fn handle_add_account( + &self, + cmd: super::AddAccountCommand, + ) -> Result { + let service_name = trim_required(&cmd.service_name, "service_name")?; + let username = trim_required(&cmd.username, "username")?; + if cmd.password.is_empty() { + return Err(AppError::Validation("password must not be empty".into())); + } + + let repo = self + .account_repo() + .ok_or_else(|| AppError::Validation("account repository not configured".into()))?; + let store = self.account_credential_store().ok_or_else(|| { + AppError::Validation("account credential store not configured".into()) + })?; + + let id = AccountId::new(Uuid::new_v4().to_string()); + let account = Account::new( + id.clone(), + service_name, + username, + cmd.account_type, + cmd.created_at_ms, + ); + + // Persist the metadata first; the keyring write only matters + // when the row exists. If the keyring step fails we roll back + // by deleting the row so we never end up with a metadata-only + // account whose password is missing. + // + // The `AccountCredentialStore` contract does not promise "no + // side effects on `Err`" — a backend that partially writes the + // secret before failing would leak a stale credential in the + // OS keyring even though account creation reports failure. We + // best-effort `delete_password` after the row rollback to keep + // retries deterministic. + repo.save(&account)?; + if let Err(e) = store.store_password(&id, &cmd.password) { + if let Err(rollback_err) = repo.delete(&id) { + tracing::warn!( + account_id = %id.as_str(), + keyring_error = %e, + rollback_error = %rollback_err, + "keyring write failed and account row rollback also failed; metadata is orphaned" + ); + } + if let Err(cleanup_err) = store.delete_password(&id) { + tracing::warn!( + account_id = %id.as_str(), + keyring_error = %e, + cleanup_error = %cleanup_err, + "keyring write failed and the orphan-secret cleanup also failed; stale credential may linger" + ); + } + return Err(e.into()); + } + + self.event_bus().publish(DomainEvent::AccountAdded { + id: id.clone(), + service_name: account.service_name().to_string(), + }); + + Ok(id) + } +} + +fn trim_required(value: &str, field: &str) -> Result { + let trimmed = value.trim(); + if trimmed.is_empty() { + return Err(AppError::Validation(format!("{field} must not be empty"))); + } + Ok(trimmed.to_string()) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::super::AddAccountCommand; + use crate::application::commands::tests_support::{ + CapturingEventBus, FakeAccountCredentialStore, InMemoryAccountRepo, build_account_bus, + }; + use crate::application::error::AppError; + use crate::domain::error::DomainError; + use crate::domain::event::DomainEvent; + use crate::domain::model::account::AccountType; + use crate::domain::ports::driven::{AccountCredentialStore, AccountRepository}; + + fn add_command(service: &str, user: &str, password: &str) -> AddAccountCommand { + AddAccountCommand { + service_name: service.into(), + username: user.into(), + password: password.into(), + account_type: AccountType::Premium, + created_at_ms: 1_700_000_000_000, + } + } + + #[tokio::test] + async fn test_add_account_persists_account_and_password() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events.clone(), None, None); + + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "s3cret")) + .await + .expect("add ok"); + + let stored = repo.find_by_id(&id).unwrap().expect("present"); + assert_eq!(stored.service_name(), "real-debrid"); + assert_eq!(stored.username(), "alice"); + assert_eq!(stored.account_type(), AccountType::Premium); + assert_eq!(stored.created_at(), 1_700_000_000_000); + + assert_eq!( + creds.get_password(&id).unwrap().as_deref(), + Some("s3cret"), + "password must land in the keyring under the new account id" + ); + + let events = events.snapshot(); + assert_eq!(events.len(), 1); + match &events[0] { + DomainEvent::AccountAdded { + id: ev_id, + service_name, + } => { + assert_eq!(ev_id, &id); + assert_eq!(service_name, "real-debrid"); + } + other => panic!("unexpected event: {other:?}"), + } + } + + #[tokio::test] + async fn test_add_account_blank_service_returns_validation() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + + let err = bus + .handle_add_account(add_command(" ", "alice", "pw")) + .await + .expect_err("blank service rejected"); + assert!(matches!(err, AppError::Validation(_))); + assert!(repo.list().unwrap().is_empty()); + } + + #[tokio::test] + async fn test_add_account_empty_password_rejected() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + + let err = bus + .handle_add_account(add_command("real-debrid", "alice", "")) + .await + .expect_err("empty password rejected"); + assert!(matches!(err, AppError::Validation(_))); + assert!(repo.list().unwrap().is_empty()); + } + + #[tokio::test] + async fn test_add_account_duplicate_returns_already_exists_and_no_keyring_leak() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + + bus.handle_add_account(add_command("real-debrid", "alice", "pw1")) + .await + .expect("first ok"); + + let err = bus + .handle_add_account(add_command("real-debrid", "alice", "pw2")) + .await + .expect_err("duplicate must fail"); + + assert!( + matches!(err, AppError::Domain(DomainError::AlreadyExists(_))), + "unexpected error: {err:?}" + ); + assert_eq!(creds.entry_count(), 1, "second password must not be stored"); + } + + #[tokio::test] + async fn test_add_account_rolls_back_when_keyring_fails() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new().failing_on_write()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events.clone(), None, None); + + let err = bus + .handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .expect_err("keyring failure surfaces"); + assert!(matches!( + err, + AppError::Domain(DomainError::StorageError(_)) + )); + + assert!( + repo.list().unwrap().is_empty(), + "row must be rolled back when keyring write fails" + ); + assert!(events.snapshot().is_empty(), "no event on failure"); + } + + #[tokio::test] + async fn test_add_account_emits_no_event_when_repo_missing() { + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = + crate::application::commands::tests_support::bus_without_account_ports(events.clone()); + + let err = bus + .handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .expect_err("repo missing"); + assert!(matches!(err, AppError::Validation(_))); + assert!(events.snapshot().is_empty()); + let _unused = creds; // keep clippy happy without a binding mismatch + } +} diff --git a/src-tauri/src/application/commands/delete_account.rs b/src-tauri/src/application/commands/delete_account.rs new file mode 100644 index 00000000..7230bea4 --- /dev/null +++ b/src-tauri/src/application/commands/delete_account.rs @@ -0,0 +1,105 @@ +//! Handler for [`DeleteAccountCommand`](super::DeleteAccountCommand). +//! +//! Idempotent: succeeds without errors when neither the SQLite row nor +//! the keyring entry exists. Always emits +//! [`DomainEvent::AccountDeleted`] so the queue manager and read-model +//! caches can drop any state keyed by the id. +//! +//! The SQLite row is the canonical source of truth for "account +//! exists". We delete it first; from the user's perspective the account +//! is gone the moment that succeeds. The keyring secret is best-effort +//! cleanup — if the OS keyring rejects the delete (locked keychain, +//! permission denied) we log the orphan and still emit the deletion +//! event so the rest of the system drops its state. + +use crate::application::command_bus::CommandBus; +use crate::application::error::AppError; +use crate::domain::event::DomainEvent; + +impl CommandBus { + pub async fn handle_delete_account( + &self, + cmd: super::DeleteAccountCommand, + ) -> Result<(), AppError> { + let repo = self + .account_repo() + .ok_or_else(|| AppError::Validation("account repository not configured".into()))?; + let store = self.account_credential_store().ok_or_else(|| { + AppError::Validation("account credential store not configured".into()) + })?; + + repo.delete(&cmd.id)?; + if let Err(e) = store.delete_password(&cmd.id) { + tracing::warn!( + account_id = %cmd.id.as_str(), + error = %e, + "failed to delete keyring password for deleted account; orphan secret may remain" + ); + } + + self.event_bus() + .publish(DomainEvent::AccountDeleted { id: cmd.id }); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::super::{AddAccountCommand, DeleteAccountCommand}; + use crate::application::commands::tests_support::{ + CapturingEventBus, FakeAccountCredentialStore, InMemoryAccountRepo, build_account_bus, + }; + use crate::domain::event::DomainEvent; + use crate::domain::model::account::{AccountId, AccountType}; + use crate::domain::ports::driven::{AccountCredentialStore, AccountRepository}; + + fn add_command() -> AddAccountCommand { + AddAccountCommand { + service_name: "real-debrid".into(), + username: "alice".into(), + password: "pw".into(), + account_type: AccountType::Premium, + created_at_ms: 1_700_000_000_000, + } + } + + #[tokio::test] + async fn test_delete_account_removes_repo_entry_and_keyring_password() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events.clone(), None, None); + let id = bus.handle_add_account(add_command()).await.unwrap(); + + bus.handle_delete_account(DeleteAccountCommand { id: id.clone() }) + .await + .expect("delete ok"); + + assert!(repo.find_by_id(&id).unwrap().is_none()); + assert!(creds.get_password(&id).unwrap().is_none()); + assert!( + events + .snapshot() + .iter() + .any(|e| matches!(e, DomainEvent::AccountDeleted { id: x } if x == &id)) + ); + } + + #[tokio::test] + async fn test_delete_account_unknown_id_is_idempotent() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events.clone(), None, None); + + bus.handle_delete_account(DeleteAccountCommand { + id: AccountId::new("ghost"), + }) + .await + .expect("idempotent delete"); + + assert_eq!(events.snapshot().len(), 1, "still emits AccountDeleted"); + } +} diff --git a/src-tauri/src/application/commands/export_accounts.rs b/src-tauri/src/application/commands/export_accounts.rs new file mode 100644 index 00000000..87361a57 --- /dev/null +++ b/src-tauri/src/application/commands/export_accounts.rs @@ -0,0 +1,328 @@ +//! Handler for [`ExportAccountsCommand`](super::ExportAccountsCommand). +//! +//! Serializes every persisted account (metadata + plaintext password +//! pulled from the keyring) into a JSON bundle, encrypts it via the +//! configured [`PassphraseCodec`], and writes the resulting opaque +//! blob to disk. +//! +//! The on-disk format is a single binary file. Plaintext passwords +//! never touch the filesystem outside the encrypted bundle. + +use serde::{Deserialize, Serialize}; + +use super::ExportAccountsOutcome; +use crate::application::command_bus::CommandBus; +use crate::application::error::AppError; +use crate::domain::event::DomainEvent; +use crate::domain::model::account::{Account, AccountType}; + +/// Bundle format version. Incremented when the on-disk layout changes +/// in a backward-incompatible way (e.g. extra mandatory fields). +pub(crate) const EXPORT_VERSION: u32 = 1; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +pub(crate) struct ExportEnvelope { + pub(crate) version: u32, + pub(crate) accounts: Vec, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +pub(crate) struct ExportEntry { + pub(crate) service_name: String, + pub(crate) username: String, + pub(crate) password: String, + pub(crate) account_type: String, + pub(crate) enabled: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) traffic_left: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) traffic_total: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) valid_until: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) last_validated: Option, + /// Original `created_at` of the source account so a round-trip + /// preserves chronology. Optional for backward compatibility with + /// bundles produced by earlier versions of this code. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) created_at: Option, +} + +impl ExportEntry { + pub(crate) fn from_account(account: &Account, password: String) -> Self { + Self { + service_name: account.service_name().to_string(), + username: account.username().to_string(), + password, + account_type: account.account_type().to_string(), + enabled: account.is_enabled(), + traffic_left: account.traffic_left(), + traffic_total: account.traffic_total(), + valid_until: account.valid_until(), + last_validated: account.last_validated(), + created_at: Some(account.created_at()), + } + } + + pub(crate) fn parse_account_type(&self) -> Result { + self.account_type + .parse::() + .map_err(AppError::Domain) + } +} + +impl CommandBus { + pub async fn handle_export_accounts( + &self, + cmd: super::ExportAccountsCommand, + ) -> Result { + if cmd.passphrase.is_empty() { + return Err(AppError::Validation("passphrase must not be empty".into())); + } + let repo = self + .account_repo() + .ok_or_else(|| AppError::Validation("account repository not configured".into()))?; + let store = self.account_credential_store().ok_or_else(|| { + AppError::Validation("account credential store not configured".into()) + })?; + let codec = self + .passphrase_codec() + .ok_or_else(|| AppError::Validation("passphrase codec not configured".into()))?; + + let accounts = repo.list()?; + let mut entries = Vec::with_capacity(accounts.len()); + for account in &accounts { + let password = store.get_password(account.id())?.ok_or_else(|| { + AppError::Storage(format!( + "no stored password for account {}", + account.id().as_str() + )) + })?; + entries.push(ExportEntry::from_account(account, password)); + } + + let envelope = ExportEnvelope { + version: EXPORT_VERSION, + accounts: entries, + }; + let plaintext = serde_json::to_vec(&envelope) + .map_err(|e| AppError::Storage(format!("serialise export: {e}")))?; + let ciphertext = codec.seal(&cmd.passphrase, &plaintext)?; + + let path = cmd.path.clone(); + let bytes = ciphertext; + // Write the bundle to a sibling temp file, fsync the data, and + // `rename` it into place so a mid-flight write/truncate failure + // can never corrupt an existing valid bundle. The temp file + // lives next to the destination so the rename stays on the + // same filesystem. + // + // - Without `sync_all` the bytes might still be in the page + // cache when the rename returns, so a system crash right + // after the call could leave the destination pointing at a + // truncated file. + // - `std::fs::rename` is an atomic replace on POSIX and on + // Windows ≥ Rust 1.81 (which uses `MOVEFILE_REPLACE_EXISTING`), + // so a single call covers the "destination already holds a + // previous bundle" case without an unsafe two-step + // remove-then-rename fallback that could leave the user + // with no bundle at all if the second step fails. + // - On any failure inside the closure we delete the temp file + // so `*.vortexacc-tmp` never leaks on disk. + tokio::task::spawn_blocking(move || -> std::io::Result<()> { + use std::io::Write; + let tmp_path = path.with_extension("vortexacc-tmp"); + let outcome = (|| -> std::io::Result<()> { + { + let mut f = std::fs::File::create(&tmp_path)?; + f.write_all(&bytes)?; + f.sync_all()?; + } + std::fs::rename(&tmp_path, &path) + })(); + if outcome.is_err() { + let _ = std::fs::remove_file(&tmp_path); + } + outcome + }) + .await + .map_err(|e| AppError::Storage(format!("export write task failed: {e}")))? + .map_err(|e| AppError::Storage(format!("export write failed: {e}")))?; + + let count = accounts.len() as u32; + self.event_bus() + .publish(DomainEvent::AccountsExported { count }); + + Ok(ExportAccountsOutcome { + path: cmd.path, + count, + }) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use tempfile::TempDir; + + use super::super::{AddAccountCommand, ExportAccountsCommand}; + use super::ExportEnvelope; + use crate::application::commands::tests_support::{ + CapturingEventBus, FakeAccountCredentialStore, FakePassphraseCodec, InMemoryAccountRepo, + build_account_bus, + }; + use crate::application::error::AppError; + use crate::domain::event::DomainEvent; + use crate::domain::model::account::AccountType; + use crate::domain::ports::driven::{AccountCredentialStore, PassphraseCodec}; + + fn add_command(service: &str, user: &str, pw: &str) -> AddAccountCommand { + AddAccountCommand { + service_name: service.into(), + username: user.into(), + password: pw.into(), + account_type: AccountType::Premium, + created_at_ms: 1_700_000_000_000, + } + } + + #[tokio::test] + async fn test_export_accounts_writes_encrypted_bundle_with_all_entries() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus( + repo.clone(), + creds, + events.clone(), + None, + Some(codec.clone()), + ); + + bus.handle_add_account(add_command("real-debrid", "alice", "rd-pw")) + .await + .unwrap(); + bus.handle_add_account(add_command("alldebrid", "bob", "ad-pw")) + .await + .unwrap(); + + let dir = TempDir::new().unwrap(); + let path = dir.path().join("accounts.vortex.bin"); + + let outcome = bus + .handle_export_accounts(ExportAccountsCommand { + path: path.clone(), + passphrase: "secret-pass".into(), + }) + .await + .expect("export ok"); + assert_eq!(outcome.count, 2); + assert_eq!(outcome.path, path); + + let bytes = std::fs::read(&path).expect("file present"); + let decrypted = codec.open("secret-pass", &bytes).expect("decode"); + let envelope: ExportEnvelope = serde_json::from_slice(&decrypted).unwrap(); + assert_eq!(envelope.accounts.len(), 2); + let services: Vec<&str> = envelope + .accounts + .iter() + .map(|e| e.service_name.as_str()) + .collect(); + assert!(services.contains(&"real-debrid")); + assert!(services.contains(&"alldebrid")); + + // Wrong passphrase must fail at the codec layer — the bundle is + // not readable without the original key. + let wrong = codec.open("not-the-pass", &bytes); + assert!(wrong.is_err()); + + assert!( + events + .snapshot() + .iter() + .any(|e| matches!(e, DomainEvent::AccountsExported { count: 2 })) + ); + } + + #[tokio::test] + async fn test_export_accounts_empty_passphrase_rejected() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events, None, Some(codec)); + + let err = bus + .handle_export_accounts(ExportAccountsCommand { + path: std::env::temp_dir().join("vortex-export.bin"), + passphrase: "".into(), + }) + .await + .expect_err("empty pass"); + assert!(matches!(err, AppError::Validation(_))); + } + + #[tokio::test] + async fn test_export_accounts_overwrites_existing_destination() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events, None, Some(codec)); + bus.handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("accounts.bin"); + std::fs::write(&bundle, b"stale-bundle-content").unwrap(); + let original_size = std::fs::metadata(&bundle).unwrap().len(); + + bus.handle_export_accounts(ExportAccountsCommand { + path: bundle.clone(), + passphrase: "k".into(), + }) + .await + .expect("export must overwrite the existing file, not fail"); + + let new_size = std::fs::metadata(&bundle).unwrap().len(); + assert_ne!( + new_size, original_size, + "destination must hold the freshly written bundle" + ); + assert!( + !dir.path().join("accounts.vortexacc-tmp").exists(), + "temp file must not leak after a successful export" + ); + } + + #[tokio::test] + async fn test_export_accounts_missing_password_aborts_with_storage_error() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, Some(codec)); + + // Persist one account, then evict its keyring password. + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + creds.delete_password(&id).unwrap(); + + let dir = TempDir::new().unwrap(); + let err = bus + .handle_export_accounts(ExportAccountsCommand { + path: dir.path().join("accounts.bin"), + passphrase: "any".into(), + }) + .await + .expect_err("password missing"); + assert!(matches!(err, AppError::Storage(_))); + // The bundle file must NOT exist when export fails mid-flight. + assert!(!dir.path().join("accounts.bin").exists()); + } +} diff --git a/src-tauri/src/application/commands/import_accounts.rs b/src-tauri/src/application/commands/import_accounts.rs new file mode 100644 index 00000000..2cd5250c --- /dev/null +++ b/src-tauri/src/application/commands/import_accounts.rs @@ -0,0 +1,636 @@ +//! Handler for [`ImportAccountsCommand`](super::ImportAccountsCommand). +//! +//! Reads the bundle previously written by +//! [`ExportAccountsCommand`](super::ExportAccountsCommand), decrypts +//! it with the user-supplied passphrase, validates every entry, +//! and persists each one — both the SQLite row and the keyring +//! password — in a single best-effort batch. +//! +//! A wrong passphrase or any payload-level corruption aborts before +//! any row or keyring entry is written, so the keyring never ends up +//! holding orphaned credentials. + +use std::collections::HashSet; + +use uuid::Uuid; + +use super::ImportAccountsOutcome; +use super::export_accounts::{EXPORT_VERSION, ExportEnvelope}; +use crate::application::command_bus::CommandBus; +use crate::application::error::AppError; +use crate::domain::event::DomainEvent; +use crate::domain::model::account::{Account, AccountId}; +use crate::domain::ports::driven::{AccountCredentialStore, AccountRepository}; + +impl CommandBus { + pub async fn handle_import_accounts( + &self, + cmd: super::ImportAccountsCommand, + ) -> Result { + if cmd.passphrase.is_empty() { + return Err(AppError::Validation("passphrase must not be empty".into())); + } + let repo = self + .account_repo() + .ok_or_else(|| AppError::Validation("account repository not configured".into()))?; + let store = self.account_credential_store().ok_or_else(|| { + AppError::Validation("account credential store not configured".into()) + })?; + let codec = self + .passphrase_codec() + .ok_or_else(|| AppError::Validation("passphrase codec not configured".into()))?; + + let path = cmd.path.clone(); + let bytes = tokio::task::spawn_blocking(move || std::fs::read(&path)) + .await + .map_err(|e| AppError::Storage(format!("import read task failed: {e}")))? + .map_err(|e| AppError::Storage(format!("import read failed: {e}")))?; + + let plaintext = codec.open(&cmd.passphrase, &bytes)?; + let envelope: ExportEnvelope = serde_json::from_slice(&plaintext) + .map_err(|e| AppError::Validation(format!("export bundle is not valid JSON: {e}")))?; + if envelope.version != EXPORT_VERSION { + return Err(AppError::Validation(format!( + "unsupported export version: {} (expected {})", + envelope.version, EXPORT_VERSION + ))); + } + + // Validate every entry up-front so a malformed row aborts + // before any side effects. Trim service / username so duplicate + // detection matches the same normalisation used when accounts + // are added through `add_account`. + let mut prepared = Vec::with_capacity(envelope.accounts.len()); + for entry in &envelope.accounts { + let service_name = entry.service_name.trim().to_string(); + let username = entry.username.trim().to_string(); + if service_name.is_empty() { + return Err(AppError::Validation( + "import bundle has an account with empty service_name".into(), + )); + } + if username.is_empty() { + return Err(AppError::Validation( + "import bundle has an account with empty username".into(), + )); + } + if entry.password.is_empty() { + return Err(AppError::Validation( + "import bundle has an account with empty password".into(), + )); + } + let kind = entry.parse_account_type()?; + prepared.push((service_name, username, entry, kind)); + } + + // Seed the dedup set with every `(service, username)` pair + // already in the repo so the first import iteration doesn't + // touch them, and grow the set as we insert each new entry so + // duplicates **inside the bundle itself** are also skipped. + let mut seen: HashSet<(String, String)> = repo + .list()? + .into_iter() + .map(|a| (a.service_name().to_string(), a.username().to_string())) + .collect(); + // Track every entry we successfully persist so a later failure + // can roll the whole batch back. The reviewer specifically + // flagged that returning mid-loop after a `repo.save` or + // keyring failure left earlier accounts persisted, which made + // retries non-deterministic (later attempts saw the partial + // entries as duplicates). + let mut imported_ids: Vec = Vec::new(); + let mut skipped = 0u32; + + for (service_name, username, entry, kind) in prepared { + if !seen.insert((service_name.clone(), username.clone())) { + skipped += 1; + continue; + } + + let new_id = AccountId::new(Uuid::new_v4().to_string()); + // Preserve the original `created_at` when the bundle + // carries one so an export → import round-trip keeps the + // chronology. Bundles produced by earlier versions omit + // the field; fall back to `cmd.now_ms` in that case. + let created_at = entry.created_at.unwrap_or(cmd.now_ms); + let mut account = + Account::new(new_id.clone(), service_name, username, kind, created_at); + if !entry.enabled { + account.disable(); + } + if let Some(t) = entry.traffic_left { + account.set_traffic_left(t); + } + if let Some(t) = entry.traffic_total { + account.set_traffic_total(t); + } + if let Some(v) = entry.valid_until { + account.set_valid_until(v); + } + if let Some(v) = entry.last_validated { + account.set_last_validated(v); + } + + if let Err(e) = repo.save(&account) { + rollback_imports(repo, store, &imported_ids); + return Err(e.into()); + } + // Track the id BEFORE attempting the keyring write so a + // backend that partially writes the secret before failing + // is still cleaned up by `rollback_imports`. The trait + // contract for `store_password` does not promise "no side + // effects on `Err`", so we treat any failed write as + // potentially having left a stale entry behind. + imported_ids.push(new_id.clone()); + if let Err(e) = store.store_password(&new_id, &entry.password) { + rollback_imports(repo, store, &imported_ids); + return Err(e.into()); + } + } + + let imported = imported_ids.len() as u32; + self.event_bus() + .publish(DomainEvent::AccountsImported { count: imported }); + + Ok(ImportAccountsOutcome { + path: cmd.path, + imported, + skipped_duplicates: skipped, + }) + } +} + +/// Best-effort rollback of every account already imported in the +/// current batch. Failures are logged but never propagated — the +/// caller is already in an error path and we don't want a logging +/// failure to mask the real cause. +fn rollback_imports( + repo: &dyn AccountRepository, + store: &dyn AccountCredentialStore, + ids: &[AccountId], +) { + for id in ids { + if let Err(e) = repo.delete(id) { + tracing::warn!( + account_id = %id.as_str(), + error = %e, + "failed to roll back imported account row after later import failure" + ); + } + if let Err(e) = store.delete_password(id) { + tracing::warn!( + account_id = %id.as_str(), + error = %e, + "failed to roll back imported keyring entry after later import failure" + ); + } + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use tempfile::TempDir; + + use super::super::{AddAccountCommand, ExportAccountsCommand, ImportAccountsCommand}; + use crate::application::commands::tests_support::{ + CapturingEventBus, FakeAccountCredentialStore, FakePassphraseCodec, InMemoryAccountRepo, + build_account_bus, + }; + use crate::application::error::AppError; + use crate::domain::event::DomainEvent; + use crate::domain::model::account::AccountType; + use crate::domain::ports::driven::{ + AccountCredentialStore, AccountRepository, PassphraseCodec, + }; + + fn add_command(service: &str, user: &str, pw: &str) -> AddAccountCommand { + AddAccountCommand { + service_name: service.into(), + username: user.into(), + password: pw.into(), + account_type: AccountType::Premium, + created_at_ms: 1_700_000_000_000, + } + } + + #[tokio::test] + async fn test_export_then_import_roundtrip_restores_every_account() { + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("export.bin"); + + // ── Source bus produces the bundle ── + let src_repo = Arc::new(InMemoryAccountRepo::new()); + let src_creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let src_events = Arc::new(CapturingEventBus::new()); + let src = build_account_bus( + src_repo.clone(), + src_creds.clone(), + src_events, + None, + Some(codec.clone()), + ); + src.handle_add_account(add_command("real-debrid", "alice", "rd-pw")) + .await + .unwrap(); + src.handle_add_account(add_command("alldebrid", "bob", "ad-pw")) + .await + .unwrap(); + src.handle_export_accounts(ExportAccountsCommand { + path: bundle.clone(), + passphrase: "unit-test-passphrase".into(), + }) + .await + .unwrap(); + + // ── Target bus imports the bundle ── + let dst_repo = Arc::new(InMemoryAccountRepo::new()); + let dst_creds = Arc::new(FakeAccountCredentialStore::new()); + let dst_events = Arc::new(CapturingEventBus::new()); + let dst = build_account_bus( + dst_repo.clone(), + dst_creds.clone(), + dst_events.clone(), + None, + Some(codec), + ); + + let outcome = dst + .handle_import_accounts(ImportAccountsCommand { + path: bundle, + passphrase: "unit-test-passphrase".into(), + now_ms: 2_000_000_000_000, + }) + .await + .expect("import ok"); + assert_eq!(outcome.imported, 2); + assert_eq!(outcome.skipped_duplicates, 0); + + let imported = dst_repo.list().unwrap(); + assert_eq!(imported.len(), 2); + let mut services: Vec<&str> = imported.iter().map(|a| a.service_name()).collect(); + services.sort(); + assert_eq!(services, vec!["alldebrid", "real-debrid"]); + + // Each imported account has its password landed in the keyring. + for acc in &imported { + let pw = dst_creds.get_password(acc.id()).unwrap(); + assert!(pw.is_some(), "password missing for {}", acc.id().as_str()); + } + + // The export bundle carries the source `created_at`, so the + // importer must preserve it instead of stamping `cmd.now_ms` + // (`2_000_000_000_000`) on every restored row. + for acc in &imported { + assert_eq!( + acc.created_at(), + 1_700_000_000_000, + "created_at must round-trip; source was 1.7e12, now_ms was 2e12" + ); + } + + assert!( + dst_events + .snapshot() + .iter() + .any(|e| matches!(e, DomainEvent::AccountsImported { count: 2 })) + ); + } + + #[tokio::test] + async fn test_import_accounts_wrong_passphrase_fails_without_partial_insert() { + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("export.bin"); + + // Build a bundle with the correct passphrase. + let src_repo = Arc::new(InMemoryAccountRepo::new()); + let src_creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let events = Arc::new(CapturingEventBus::new()); + let src = build_account_bus( + src_repo, + src_creds, + events.clone(), + None, + Some(codec.clone()), + ); + src.handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + src.handle_export_accounts(ExportAccountsCommand { + path: bundle.clone(), + passphrase: "right-pass".into(), + }) + .await + .unwrap(); + + // Try to import with the wrong passphrase. + let dst_repo = Arc::new(InMemoryAccountRepo::new()); + let dst_creds = Arc::new(FakeAccountCredentialStore::new()); + let dst_events = Arc::new(CapturingEventBus::new()); + let dst = build_account_bus( + dst_repo.clone(), + dst_creds.clone(), + dst_events, + None, + Some(codec), + ); + + let err = dst + .handle_import_accounts(ImportAccountsCommand { + path: bundle, + passphrase: "wrong-pass".into(), + now_ms: 0, + }) + .await + .expect_err("wrong passphrase"); + assert!( + matches!(err, AppError::Domain(_) | AppError::Validation(_)), + "expected crypto-style error, got {err:?}" + ); + + assert!( + dst_repo.list().unwrap().is_empty(), + "no row inserted on wrong passphrase" + ); + assert_eq!(dst_creds.entry_count(), 0, "no keyring write either"); + } + + #[tokio::test] + async fn test_import_accounts_skips_already_present_pairs() { + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("export.bin"); + + let src_repo = Arc::new(InMemoryAccountRepo::new()); + let src_creds = Arc::new(FakeAccountCredentialStore::new()); + let codec: Arc = Arc::new(FakePassphraseCodec); + let events = Arc::new(CapturingEventBus::new()); + let src = build_account_bus( + src_repo, + src_creds, + events.clone(), + None, + Some(codec.clone()), + ); + src.handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + src.handle_export_accounts(ExportAccountsCommand { + path: bundle.clone(), + passphrase: "k".into(), + }) + .await + .unwrap(); + + let dst_repo = Arc::new(InMemoryAccountRepo::new()); + let dst_creds = Arc::new(FakeAccountCredentialStore::new()); + let dst_events = Arc::new(CapturingEventBus::new()); + let dst = build_account_bus( + dst_repo.clone(), + dst_creds.clone(), + dst_events, + None, + Some(codec), + ); + // Pre-existing identical pair. + dst.handle_add_account(add_command("real-debrid", "alice", "different")) + .await + .unwrap(); + + let outcome = dst + .handle_import_accounts(ImportAccountsCommand { + path: bundle, + passphrase: "k".into(), + now_ms: 0, + }) + .await + .expect("import ok"); + assert_eq!(outcome.imported, 0); + assert_eq!(outcome.skipped_duplicates, 1); + assert_eq!(dst_repo.list().unwrap().len(), 1); + } + + #[tokio::test] + async fn test_import_accounts_skips_in_bundle_duplicates() { + use crate::application::commands::export_accounts::{ + EXPORT_VERSION, ExportEntry, ExportEnvelope, + }; + use crate::domain::ports::driven::PassphraseCodec; + + // Hand-craft a bundle that contains two entries with the same + // (service, username) pair so the dedup logic can be exercised + // without going through `add_account`, which would refuse the + // second entry up-front. + let envelope = ExportEnvelope { + version: EXPORT_VERSION, + accounts: vec![ + ExportEntry { + service_name: "real-debrid".into(), + username: "alice".into(), + password: "pw1".into(), + account_type: "premium".into(), + enabled: true, + traffic_left: None, + traffic_total: None, + valid_until: None, + last_validated: None, + created_at: None, + }, + // Duplicate of the first entry — must be skipped. + ExportEntry { + service_name: " real-debrid ".into(), + username: " alice ".into(), + password: "pw2".into(), + account_type: "premium".into(), + enabled: true, + traffic_left: None, + traffic_total: None, + valid_until: None, + last_validated: None, + created_at: None, + }, + ExportEntry { + service_name: "alldebrid".into(), + username: "bob".into(), + password: "pw3".into(), + account_type: "premium".into(), + enabled: true, + traffic_left: None, + traffic_total: None, + valid_until: None, + last_validated: None, + created_at: None, + }, + ], + }; + let plaintext = serde_json::to_vec(&envelope).unwrap(); + let codec = FakePassphraseCodec; + let ciphertext = codec.seal("k", &plaintext).unwrap(); + + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("dup.bin"); + std::fs::write(&bundle, &ciphertext).unwrap(); + + let dst_repo = Arc::new(InMemoryAccountRepo::new()); + let dst_creds = Arc::new(FakeAccountCredentialStore::new()); + let dst_events = Arc::new(CapturingEventBus::new()); + let dst = build_account_bus( + dst_repo.clone(), + dst_creds, + dst_events, + None, + Some(Arc::new(FakePassphraseCodec) as Arc), + ); + + let outcome = dst + .handle_import_accounts(ImportAccountsCommand { + path: bundle, + passphrase: "k".into(), + now_ms: 0, + }) + .await + .expect("import ok"); + + assert_eq!(outcome.imported, 2, "first occurrence + alldebrid land"); + assert_eq!( + outcome.skipped_duplicates, 1, + "the second real-debrid/alice entry must be skipped" + ); + assert_eq!(dst_repo.list().unwrap().len(), 2); + } + + #[tokio::test] + async fn test_import_accounts_rolls_back_all_entries_on_partial_failure() { + use crate::application::commands::export_accounts::{ + EXPORT_VERSION, ExportEntry, ExportEnvelope, + }; + use crate::domain::ports::driven::PassphraseCodec; + + // Build a bundle with three distinct (service, username) pairs. + let envelope = ExportEnvelope { + version: EXPORT_VERSION, + accounts: vec![ + ExportEntry { + service_name: "real-debrid".into(), + username: "alice".into(), + password: "pw1".into(), + account_type: "premium".into(), + enabled: true, + traffic_left: None, + traffic_total: None, + valid_until: None, + last_validated: None, + created_at: None, + }, + ExportEntry { + service_name: "alldebrid".into(), + username: "bob".into(), + password: "pw2".into(), + account_type: "premium".into(), + enabled: true, + traffic_left: None, + traffic_total: None, + valid_until: None, + last_validated: None, + created_at: None, + }, + ExportEntry { + service_name: "uploaded".into(), + username: "carol".into(), + password: "pw3".into(), + account_type: "premium".into(), + enabled: true, + traffic_left: None, + traffic_total: None, + valid_until: None, + last_validated: None, + created_at: None, + }, + ], + }; + let plaintext = serde_json::to_vec(&envelope).unwrap(); + let codec = FakePassphraseCodec; + let bytes = codec.seal("k", &plaintext).unwrap(); + + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("partial.bin"); + std::fs::write(&bundle, &bytes).unwrap(); + + // Keyring fails after the first successful write so entry 2's + // `store_password` call returns an error mid-loop. + let dst_repo = Arc::new(InMemoryAccountRepo::new()); + let dst_creds = Arc::new(FakeAccountCredentialStore::new().failing_after(1)); + let dst_events = Arc::new(CapturingEventBus::new()); + let dst = build_account_bus( + dst_repo.clone(), + dst_creds.clone(), + dst_events.clone(), + None, + Some(Arc::new(FakePassphraseCodec) as Arc), + ); + + let err = dst + .handle_import_accounts(ImportAccountsCommand { + path: bundle, + passphrase: "k".into(), + now_ms: 0, + }) + .await + .expect_err("partial keyring failure must surface"); + assert!( + matches!(err, AppError::Domain(_)), + "expected storage error, got {err:?}" + ); + + assert!( + dst_repo.list().unwrap().is_empty(), + "all imported rows must be rolled back when one entry fails" + ); + assert_eq!( + dst_creds.entry_count(), + 0, + "all imported keyring entries must be rolled back too" + ); + assert!( + !dst_events + .snapshot() + .iter() + .any(|e| matches!(e, DomainEvent::AccountsImported { .. })), + "no AccountsImported event when the import fails atomically" + ); + } + + #[tokio::test] + async fn test_import_accounts_corrupted_payload_returns_validation() { + let dir = TempDir::new().unwrap(); + let bundle = dir.path().join("garbage.bin"); + // Hand-craft a "valid" envelope under FakePassphraseCodec format + // but with non-JSON plaintext so the JSON parse step fails. + let codec = FakePassphraseCodec; + let bytes = codec.seal("k", b"this is not json").unwrap(); + std::fs::write(&bundle, &bytes).unwrap(); + + let dst_repo = Arc::new(InMemoryAccountRepo::new()); + let dst_creds = Arc::new(FakeAccountCredentialStore::new()); + let dst_events = Arc::new(CapturingEventBus::new()); + let dst = build_account_bus( + dst_repo, + dst_creds, + dst_events, + None, + Some(Arc::new(FakePassphraseCodec) as Arc), + ); + + let err = dst + .handle_import_accounts(ImportAccountsCommand { + path: bundle, + passphrase: "k".into(), + now_ms: 0, + }) + .await + .expect_err("corrupted"); + assert!(matches!(err, AppError::Validation(_))); + } +} diff --git a/src-tauri/src/application/commands/mod.rs b/src-tauri/src/application/commands/mod.rs index 4e96e5e6..a793b451 100644 --- a/src-tauri/src/application/commands/mod.rs +++ b/src-tauri/src/application/commands/mod.rs @@ -3,12 +3,19 @@ //! Each command represents an intent to mutate application state. //! Handler implementations live in submodules and add methods to `CommandBus`. +#[cfg(test)] +mod tests_support; + +mod add_account; mod cancel_download; mod change_directory; mod clear_downloads_by_state; +mod delete_account; mod delete_history; +mod export_accounts; mod export_history; mod extract_archive; +mod import_accounts; mod install_plugin; mod move_queue; mod open_download_file; @@ -31,12 +38,15 @@ pub mod store_refresh; mod toggle_clipboard; mod toggle_plugin; mod uninstall_plugin; +mod update_account; mod update_config; mod update_plugin_config; +mod validate_account; mod verify_checksum; use std::path::PathBuf; +use crate::domain::model::account::{AccountId, AccountType}; use crate::domain::model::config::ConfigPatch; use crate::domain::model::download::DownloadId; use crate::domain::ports::driving::Command; @@ -334,6 +344,130 @@ impl Command for ChangeDirectoryBulkCommand {} pub use change_directory::{ChangeDirectoryBulkOutcome, ChangeDirectoryFailure}; +// ── Accounts ───────────────────────────────────────────────────────── + +/// Create a new persisted account and store its password in the +/// account-keyring under the freshly generated [`AccountId`]. +/// +/// `created_at_ms` is supplied by the caller (Unix epoch milliseconds) +/// so handlers stay deterministic in tests. The driving adapter passes +/// `now()` from the host clock. +#[derive(Debug, Clone)] +pub struct AddAccountCommand { + pub service_name: String, + pub username: String, + pub password: String, + pub account_type: AccountType, + pub created_at_ms: u64, +} +impl Command for AddAccountCommand {} + +/// Partial-mutation payload for [`UpdateAccountCommand`]. All fields are +/// optional; absent values keep the persisted account unchanged. +/// +/// Setting `password = Some(_)` rotates the password in the keyring +/// (the SQLite row never sees the secret). Setting `username = Some(_)` +/// also rotates the keyring entry to keep it keyed by the new username. +#[derive(Debug, Clone, Default)] +pub struct AccountPatch { + pub username: Option, + pub password: Option, + pub account_type: Option, + pub enabled: Option, +} + +#[derive(Debug, Clone)] +pub struct UpdateAccountCommand { + pub id: AccountId, + pub patch: AccountPatch, +} +impl Command for UpdateAccountCommand {} + +/// Delete the account row and its keyring entry. Idempotent — succeeds +/// even if neither exists. +#[derive(Debug, Clone)] +pub struct DeleteAccountCommand { + pub id: AccountId, +} +impl Command for DeleteAccountCommand {} + +/// Probe the upstream service for `id`'s credentials. +/// +/// `now_ms` is supplied by the caller so the handler can deterministically +/// stamp `last_validated` on the account row. +#[derive(Debug, Clone)] +pub struct ValidateAccountCommand { + pub id: AccountId, + pub now_ms: u64, +} +impl Command for ValidateAccountCommand {} + +/// Caller-friendly view of a [`ValidationOutcome`]( +/// crate::domain::ports::driven::ValidationOutcome). Same shape — kept +/// in the application layer so IPC adapters don't have to import the +/// domain port path. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct ValidationOutcomeDto { + pub valid: bool, + pub latency_ms: Option, + pub traffic_left: Option, + pub traffic_total: Option, + pub valid_until: Option, + pub error_message: Option, +} + +impl From for ValidationOutcomeDto { + fn from(o: crate::domain::ports::driven::ValidationOutcome) -> Self { + Self { + valid: o.valid, + latency_ms: o.latency_ms, + traffic_left: o.traffic_left, + traffic_total: o.traffic_total, + valid_until: o.valid_until, + error_message: o.error_message, + } + } +} + +/// Encrypt every persisted account into a single bundle and write it +/// to `path`. The passphrase is fed to a PBKDF2 KDF; the resulting +/// blob is opaque and unreadable without the same passphrase. +#[derive(Debug, Clone)] +pub struct ExportAccountsCommand { + pub path: PathBuf, + pub passphrase: String, +} +impl Command for ExportAccountsCommand {} + +/// Decrypt a bundle previously produced by [`ExportAccountsCommand`] +/// and persist every account it contains. Wrong passphrase or any +/// integrity-check failure aborts the import without inserting a +/// single row. +#[derive(Debug, Clone)] +pub struct ImportAccountsCommand { + pub path: PathBuf, + pub passphrase: String, + pub now_ms: u64, +} +impl Command for ImportAccountsCommand {} + +/// Outcome of a successful [`ExportAccountsCommand`]. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExportAccountsOutcome { + pub path: PathBuf, + pub count: u32, +} + +/// Outcome of a successful [`ImportAccountsCommand`]. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ImportAccountsOutcome { + pub path: PathBuf, + pub imported: u32, + /// Entries skipped because a row with the same + /// `(service_name, username)` pair was already persisted. + pub skipped_duplicates: u32, +} + /// Register an already-downloaded local file as a Completed download. /// /// Used after `download_to_file` produces a merged file via yt-dlp. diff --git a/src-tauri/src/application/commands/tests_support.rs b/src-tauri/src/application/commands/tests_support.rs new file mode 100644 index 00000000..38d96f8c --- /dev/null +++ b/src-tauri/src/application/commands/tests_support.rs @@ -0,0 +1,552 @@ +//! Shared test fixtures for the account-command handler tests. +//! +//! Gated behind `#[cfg(test)]` — never linked into release binaries. + +#![cfg(test)] + +use std::collections::HashMap; +use std::path::Path; +use std::sync::{Arc, Mutex}; + +use crate::application::command_bus::CommandBus; +use crate::application::test_support::NoopHistoryRepo; +use crate::domain::error::DomainError; +use crate::domain::event::DomainEvent; +use crate::domain::model::account::{Account, AccountId}; +use crate::domain::model::archive::{ArchiveEntry, ArchiveFormat, ExtractSummary}; +use crate::domain::model::config::{AppConfig, ConfigPatch}; +use crate::domain::model::credential::Credential; +use crate::domain::model::download::{Download, DownloadId, DownloadState}; +use crate::domain::model::http::HttpResponse; +use crate::domain::model::meta::DownloadMeta; +use crate::domain::model::plugin::{PluginInfo, PluginManifest}; +use crate::domain::ports::driven::{ + AccountCredentialStore, AccountRepository, AccountValidator, ArchiveExtractor, + ClipboardObserver, ConfigStore, CredentialStore, DownloadEngine, DownloadRepository, EventBus, + FileStorage, HttpClient, PassphraseCodec, PluginLoader, ValidationOutcome, +}; + +// ── In-memory account repository ───────────────────────────────────── + +pub(crate) struct InMemoryAccountRepo { + store: Mutex>, +} + +impl InMemoryAccountRepo { + pub(crate) fn new() -> Self { + Self { + store: Mutex::new(HashMap::new()), + } + } + + pub(crate) fn snapshot(&self) -> Vec { + let mut accounts: Vec = self.store.lock().unwrap().values().cloned().collect(); + accounts.sort_by(|a, b| { + a.created_at() + .cmp(&b.created_at()) + .then_with(|| a.id().as_str().cmp(b.id().as_str())) + }); + accounts + } +} + +impl AccountRepository for InMemoryAccountRepo { + fn find_by_id(&self, id: &AccountId) -> Result, DomainError> { + Ok(self.store.lock().unwrap().get(id).cloned()) + } + + fn save(&self, account: &Account) -> Result<(), DomainError> { + let mut guard = self.store.lock().unwrap(); + for (id, existing) in guard.iter() { + if id != account.id() + && existing.service_name() == account.service_name() + && existing.username() == account.username() + { + return Err(DomainError::AlreadyExists(format!( + "{}::{}", + account.service_name(), + account.username() + ))); + } + } + let stored = match guard.get(account.id()) { + Some(existing) => Account::reconstruct( + account.id().clone(), + account.service_name().to_string(), + account.username().to_string(), + account.account_type(), + account.is_enabled(), + account.traffic_left(), + account.traffic_total(), + account.valid_until(), + account.last_validated(), + existing.created_at(), + ), + None => account.clone(), + }; + guard.insert(account.id().clone(), stored); + Ok(()) + } + + fn list(&self) -> Result, DomainError> { + Ok(self.snapshot()) + } + + fn list_by_service(&self, service_name: &str) -> Result, DomainError> { + Ok(self + .snapshot() + .into_iter() + .filter(|a| a.service_name() == service_name) + .collect()) + } + + fn delete(&self, id: &AccountId) -> Result<(), DomainError> { + self.store.lock().unwrap().remove(id); + Ok(()) + } +} + +// ── Fake account credential store ──────────────────────────────────── + +pub(crate) struct FakeAccountCredentialStore { + entries: Mutex>, + fail_on_write: bool, + /// When `Some(n)`, the first `n` writes succeed and every write + /// after that fails — used by the partial-import rollback test. + fail_after: Option, + write_count: Mutex, + /// Records every `store_password` call regardless of outcome so + /// tests can assert that a rollback / restore code path actually + /// re-issued the previous secret. + write_attempts: Mutex>, +} + +impl FakeAccountCredentialStore { + pub(crate) fn new() -> Self { + Self { + entries: Mutex::new(HashMap::new()), + fail_on_write: false, + fail_after: None, + write_count: Mutex::new(0), + write_attempts: Mutex::new(Vec::new()), + } + } + + pub(crate) fn write_attempts(&self) -> Vec<(AccountId, String)> { + self.write_attempts.lock().unwrap().clone() + } + + pub(crate) fn failing_on_write(mut self) -> Self { + self.fail_on_write = true; + self + } + + pub(crate) fn failing_after(mut self, n: usize) -> Self { + self.fail_after = Some(n); + self + } + + pub(crate) fn entry_count(&self) -> usize { + self.entries.lock().unwrap().len() + } + + pub(crate) fn snapshot(&self) -> Vec<(AccountId, String)> { + let mut entries: Vec<(AccountId, String)> = self + .entries + .lock() + .unwrap() + .iter() + .map(|(k, v)| (k.clone(), v.clone())) + .collect(); + entries.sort_by(|a, b| a.0.as_str().cmp(b.0.as_str())); + entries + } +} + +impl AccountCredentialStore for FakeAccountCredentialStore { + fn store_password(&self, account_id: &AccountId, password: &str) -> Result<(), DomainError> { + // Record the attempt before honouring fail-modes so callers + // that try to re-store after a failure are still observable. + self.write_attempts + .lock() + .unwrap() + .push((account_id.clone(), password.to_string())); + if self.fail_on_write { + return Err(DomainError::StorageError( + "fake keyring write failure".into(), + )); + } + if let Some(limit) = self.fail_after { + let mut count = self.write_count.lock().unwrap(); + if *count >= limit { + return Err(DomainError::StorageError( + "fake keyring write failure (past fail_after limit)".into(), + )); + } + *count += 1; + } + self.entries + .lock() + .unwrap() + .insert(account_id.clone(), password.to_string()); + Ok(()) + } + + fn get_password(&self, account_id: &AccountId) -> Result, DomainError> { + Ok(self.entries.lock().unwrap().get(account_id).cloned()) + } + + fn delete_password(&self, account_id: &AccountId) -> Result<(), DomainError> { + self.entries.lock().unwrap().remove(account_id); + Ok(()) + } +} + +// ── Fake account validator ─────────────────────────────────────────── + +pub(crate) struct FakeAccountValidator { + behavior: Mutex>, +} + +#[derive(Clone)] +pub(crate) enum ValidatorBehavior { + Ok(ValidationOutcome), + Reject(String), + Missing, + Storage(String), +} + +impl FakeAccountValidator { + pub(crate) fn new() -> Self { + Self { + behavior: Mutex::new(HashMap::new()), + } + } + + pub(crate) fn set(&self, service_name: &str, behavior: ValidatorBehavior) { + self.behavior + .lock() + .unwrap() + .insert(service_name.to_string(), behavior); + } +} + +impl AccountValidator for FakeAccountValidator { + fn validate( + &self, + service_name: &str, + _username: &str, + _password: &str, + ) -> Result { + let behavior = self + .behavior + .lock() + .unwrap() + .get(service_name) + .cloned() + .unwrap_or(ValidatorBehavior::Missing); + match behavior { + ValidatorBehavior::Ok(outcome) => Ok(outcome), + ValidatorBehavior::Reject(msg) => Ok(ValidationOutcome::rejected(msg)), + ValidatorBehavior::Missing => Err(DomainError::NotFound(format!( + "no plugin for service {service_name}" + ))), + ValidatorBehavior::Storage(msg) => Err(DomainError::StorageError(msg)), + } + } +} + +// ── Fake passphrase codec (XOR + length-prefixed passphrase tag) ───── + +/// Toy codec used in handler tests so the import / export flow can be +/// exercised without depending on the AES adapter. The format is: +/// +/// - 1 byte: ciphertext version (`0x01`) +/// - 1 byte: passphrase length `n` +/// - n bytes: passphrase echoed back (lets `open` reject the wrong key) +/// - rest: plaintext bytes (no XOR — the test fixture only needs to be +/// reversible and to fail on the wrong passphrase, not actually be +/// confidential). +pub(crate) struct FakePassphraseCodec; + +impl PassphraseCodec for FakePassphraseCodec { + fn seal(&self, passphrase: &str, plaintext: &[u8]) -> Result, DomainError> { + let pass_bytes = passphrase.as_bytes(); + if pass_bytes.len() > u8::MAX as usize { + return Err(DomainError::ValidationError("passphrase too long".into())); + } + let mut out = Vec::with_capacity(2 + pass_bytes.len() + plaintext.len()); + out.push(0x01); + out.push(pass_bytes.len() as u8); + out.extend_from_slice(pass_bytes); + out.extend_from_slice(plaintext); + Ok(out) + } + + fn open(&self, passphrase: &str, ciphertext: &[u8]) -> Result, DomainError> { + if ciphertext.len() < 2 { + return Err(DomainError::ValidationError("ciphertext truncated".into())); + } + if ciphertext[0] != 0x01 { + return Err(DomainError::ValidationError( + "unsupported ciphertext version".into(), + )); + } + let pass_len = ciphertext[1] as usize; + if ciphertext.len() < 2 + pass_len { + return Err(DomainError::ValidationError("ciphertext truncated".into())); + } + let stored = &ciphertext[2..2 + pass_len]; + if stored != passphrase.as_bytes() { + return Err(DomainError::ValidationError("wrong passphrase".into())); + } + Ok(ciphertext[2 + pass_len..].to_vec()) + } +} + +// ── Capturing event bus ────────────────────────────────────────────── + +pub(crate) struct CapturingEventBus { + events: Mutex>, +} + +impl CapturingEventBus { + pub(crate) fn new() -> Self { + Self { + events: Mutex::new(Vec::new()), + } + } + + pub(crate) fn snapshot(&self) -> Vec { + self.events.lock().unwrap().clone() + } +} + +impl EventBus for CapturingEventBus { + fn publish(&self, event: DomainEvent) { + self.events.lock().unwrap().push(event); + } + + fn subscribe(&self, _handler: Box) {} +} + +// ── Stubs for the unrelated ports the bus still requires ───────────── + +struct StubDownloadRepo; +impl DownloadRepository for StubDownloadRepo { + fn find_by_id(&self, _id: DownloadId) -> Result, DomainError> { + Ok(None) + } + fn save(&self, _d: &Download) -> Result<(), DomainError> { + Ok(()) + } + fn delete(&self, _id: DownloadId) -> Result<(), DomainError> { + Ok(()) + } + fn find_by_state(&self, _s: DownloadState) -> Result, DomainError> { + Ok(vec![]) + } +} + +struct StubDownloadEngine; +impl DownloadEngine for StubDownloadEngine { + fn start(&self, _download: &Download) -> Result<(), DomainError> { + Ok(()) + } + fn pause(&self, _id: DownloadId) -> Result<(), DomainError> { + Ok(()) + } + fn resume(&self, _id: DownloadId) -> Result<(), DomainError> { + Ok(()) + } + fn cancel(&self, _id: DownloadId) -> Result<(), DomainError> { + Ok(()) + } +} + +struct StubFileStorage; +impl FileStorage for StubFileStorage { + fn create_file(&self, _path: &Path, _size: u64) -> Result<(), DomainError> { + Ok(()) + } + fn write_segment(&self, _path: &Path, _offset: u64, _data: &[u8]) -> Result<(), DomainError> { + Ok(()) + } + fn read_meta(&self, _path: &Path) -> Result, DomainError> { + Ok(None) + } + fn write_meta(&self, _path: &Path, _meta: &DownloadMeta) -> Result<(), DomainError> { + Ok(()) + } + fn delete_meta(&self, _path: &Path) -> Result<(), DomainError> { + Ok(()) + } +} + +struct StubHttpClient; +impl HttpClient for StubHttpClient { + fn head(&self, _url: &str) -> Result { + Ok(HttpResponse { + status_code: 200, + headers: Default::default(), + body: vec![], + }) + } + fn get_range(&self, _url: &str, _start: u64, _end: u64) -> Result, DomainError> { + Ok(vec![]) + } + fn supports_range(&self, _url: &str) -> Result { + Ok(false) + } +} + +struct StubPluginLoader; +impl PluginLoader for StubPluginLoader { + fn load(&self, _manifest: &PluginManifest) -> Result<(), DomainError> { + Ok(()) + } + fn unload(&self, _name: &str) -> Result<(), DomainError> { + Ok(()) + } + fn resolve_url(&self, _url: &str) -> Result, DomainError> { + Ok(None) + } + fn extract_links(&self, _url: &str) -> Result { + Err(DomainError::NotFound("not mocked".into())) + } + fn get_media_variants(&self, _url: &str) -> Result { + Err(DomainError::NotFound("not mocked".into())) + } + fn list_loaded(&self) -> Result, DomainError> { + Ok(vec![]) + } + fn set_enabled(&self, _name: &str, _enabled: bool) -> Result<(), DomainError> { + Ok(()) + } +} + +struct StubConfigStore; +impl ConfigStore for StubConfigStore { + fn get_config(&self) -> Result { + Ok(AppConfig::default()) + } + fn update_config(&self, _patch: ConfigPatch) -> Result { + Ok(AppConfig::default()) + } +} + +struct StubCredentialStore; +impl CredentialStore for StubCredentialStore { + fn get(&self, _service: &str) -> Result, DomainError> { + Ok(None) + } + fn store(&self, _service: &str, _credential: &Credential) -> Result<(), DomainError> { + Ok(()) + } + fn delete(&self, _service: &str) -> Result<(), DomainError> { + Ok(()) + } +} + +struct StubClipboardObserver; +impl ClipboardObserver for StubClipboardObserver { + fn start(&self) -> Result<(), DomainError> { + Ok(()) + } + fn stop(&self) -> Result<(), DomainError> { + Ok(()) + } + fn get_urls(&self) -> Result, DomainError> { + Ok(vec![]) + } +} + +struct StubArchiveExtractor; +impl ArchiveExtractor for StubArchiveExtractor { + fn detect_format(&self, _file_path: &Path) -> Result, DomainError> { + Ok(None) + } + fn can_extract(&self, _file_path: &Path) -> Result { + Ok(false) + } + fn extract( + &self, + _file_path: &Path, + _dest_dir: &Path, + _password: Option<&str>, + ) -> Result { + Ok(ExtractSummary { + extracted_files: 0, + extracted_bytes: 0, + duration_ms: 0, + warnings: vec![], + }) + } + fn list_contents( + &self, + _file_path: &Path, + _password: Option<&str>, + ) -> Result, DomainError> { + Ok(vec![]) + } + fn detect_segments( + &self, + _file_path: &Path, + ) -> Result>, DomainError> { + Ok(None) + } +} + +/// Build a [`CommandBus`] wired with the supplied account ports plus +/// stubs for everything else. +pub(crate) fn build_account_bus( + account_repo: Arc, + credential_store: Arc, + event_bus: Arc, + validator: Option>, + codec: Option>, +) -> CommandBus { + let mut bus = CommandBus::new( + Arc::new(StubDownloadRepo), + Arc::new(StubDownloadEngine), + event_bus, + Arc::new(StubFileStorage), + Arc::new(StubHttpClient), + Arc::new(StubPluginLoader), + Arc::new(StubConfigStore), + Arc::new(StubCredentialStore), + Arc::new(StubClipboardObserver), + Arc::new(StubArchiveExtractor), + Arc::new(NoopHistoryRepo), + None, + ) + .with_account_repo(account_repo) + .with_account_credential_store(credential_store); + + if let Some(v) = validator { + bus = bus.with_account_validator(v); + } + if let Some(c) = codec { + bus = bus.with_passphrase_codec(c); + } + bus +} + +/// Build a bus with no account ports — used to assert handlers refuse +/// to run when their dependencies are missing. +pub(crate) fn bus_without_account_ports(event_bus: Arc) -> CommandBus { + CommandBus::new( + Arc::new(StubDownloadRepo), + Arc::new(StubDownloadEngine), + event_bus, + Arc::new(StubFileStorage), + Arc::new(StubHttpClient), + Arc::new(StubPluginLoader), + Arc::new(StubConfigStore), + Arc::new(StubCredentialStore), + Arc::new(StubClipboardObserver), + Arc::new(StubArchiveExtractor), + Arc::new(NoopHistoryRepo), + None, + ) +} diff --git a/src-tauri/src/application/commands/update_account.rs b/src-tauri/src/application/commands/update_account.rs new file mode 100644 index 00000000..e31ba1c8 --- /dev/null +++ b/src-tauri/src/application/commands/update_account.rs @@ -0,0 +1,384 @@ +//! Handler for [`UpdateAccountCommand`](super::UpdateAccountCommand). +//! +//! Applies a partial mutation to an existing account. Password rotation +//! is performed against the keyring; other fields update the SQLite row. +//! Each `None` in the [`AccountPatch`](super::AccountPatch) leaves the +//! corresponding column untouched. +//! +//! When the patch contains a non-empty string for `username` or +//! `password`, both are validated before any mutation lands so a bad +//! input never produces a partially-updated account. + +use crate::application::command_bus::CommandBus; +use crate::application::error::AppError; +use crate::domain::event::DomainEvent; +use crate::domain::model::account::Account; + +impl CommandBus { + pub async fn handle_update_account( + &self, + cmd: super::UpdateAccountCommand, + ) -> Result<(), AppError> { + let repo = self + .account_repo() + .ok_or_else(|| AppError::Validation("account repository not configured".into()))?; + let store = self.account_credential_store().ok_or_else(|| { + AppError::Validation("account credential store not configured".into()) + })?; + + let account = repo + .find_by_id(&cmd.id)? + .ok_or_else(|| AppError::NotFound(format!("account {} not found", cmd.id.as_str())))?; + + let username = match cmd.patch.username { + Some(value) => { + let trimmed = value.trim(); + if trimmed.is_empty() { + return Err(AppError::Validation("username must not be empty".into())); + } + trimmed.to_string() + } + None => account.username().to_string(), + }; + let account_type = cmd.patch.account_type.unwrap_or(account.account_type()); + let enabled = cmd.patch.enabled.unwrap_or(account.is_enabled()); + + if let Some(ref pw) = cmd.patch.password + && pw.is_empty() + { + return Err(AppError::Validation("password must not be empty".into())); + } + + let next = Account::reconstruct( + account.id().clone(), + account.service_name().to_string(), + username, + account_type, + enabled, + account.traffic_left(), + account.traffic_total(), + account.valid_until(), + account.last_validated(), + account.created_at(), + ); + // Capture the previous password BEFORE persisting the new row + // so a keyring-rotation failure can restore it. The + // `AccountCredentialStore` contract does not promise "no side + // effects on `Err`" — a backend that partially writes the new + // secret before failing would leave the keyring out of sync + // with the row we just restored. + let previous_password = if cmd.patch.password.is_some() { + store.get_password(&cmd.id)? + } else { + None + }; + + repo.save(&next)?; + + // Apply password rotation after the row is persisted. If the + // keyring write fails we roll the row back to the original so + // callers never observe a row that says "password rotated" while + // the keyring still holds the previous secret. + if let Some(pw) = cmd.patch.password + && let Err(e) = store.store_password(&cmd.id, &pw) + { + if let Err(rollback_err) = repo.save(&account) { + tracing::warn!( + account_id = %cmd.id.as_str(), + keyring_error = %e, + rollback_error = %rollback_err, + "keyring rotation failed and row rollback also failed; row metadata diverges from keyring" + ); + } + // Restore the previous password (or wipe the entry if the + // account had none) so a partially-completed write doesn't + // leave a half-rotated credential in the keyring. + let restore_result = match previous_password { + Some(prev) => store.store_password(&cmd.id, &prev), + None => store.delete_password(&cmd.id), + }; + if let Err(restore_err) = restore_result { + tracing::warn!( + account_id = %cmd.id.as_str(), + keyring_error = %e, + restore_error = %restore_err, + "keyring rotation failed and the password-restore step also failed; keyring may hold a partially rotated secret" + ); + } + return Err(e.into()); + } + + self.event_bus() + .publish(DomainEvent::AccountUpdated { id: cmd.id }); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::super::{AccountPatch, AddAccountCommand, UpdateAccountCommand}; + use crate::application::commands::tests_support::{ + CapturingEventBus, FakeAccountCredentialStore, InMemoryAccountRepo, build_account_bus, + }; + use crate::application::error::AppError; + use crate::domain::error::DomainError; + use crate::domain::event::DomainEvent; + use crate::domain::model::account::{AccountId, AccountType}; + use crate::domain::ports::driven::{AccountCredentialStore, AccountRepository}; + + fn add_command(service: &str, user: &str, pw: &str) -> AddAccountCommand { + AddAccountCommand { + service_name: service.into(), + username: user.into(), + password: pw.into(), + account_type: AccountType::Premium, + created_at_ms: 1_700_000_000_000, + } + } + + #[tokio::test] + async fn test_update_account_partial_patch_changes_only_listed_fields() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events.clone(), None, None); + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "old-pw")) + .await + .unwrap(); + + bus.handle_update_account(UpdateAccountCommand { + id: id.clone(), + patch: AccountPatch { + enabled: Some(false), + ..AccountPatch::default() + }, + }) + .await + .expect("update ok"); + + let after = repo.find_by_id(&id).unwrap().unwrap(); + assert!(!after.is_enabled()); + assert_eq!(after.username(), "alice", "untouched field stays as-is"); + assert_eq!(after.account_type(), AccountType::Premium); + assert_eq!(creds.get_password(&id).unwrap().as_deref(), Some("old-pw")); + } + + #[tokio::test] + async fn test_update_account_password_rotation_writes_new_keyring_value() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "old-pw")) + .await + .unwrap(); + + bus.handle_update_account(UpdateAccountCommand { + id: id.clone(), + patch: AccountPatch { + password: Some("new-pw".into()), + ..AccountPatch::default() + }, + }) + .await + .unwrap(); + + assert_eq!(creds.get_password(&id).unwrap().as_deref(), Some("new-pw")); + } + + #[tokio::test] + async fn test_update_account_unknown_id_returns_not_found() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events, None, None); + + let err = bus + .handle_update_account(UpdateAccountCommand { + id: AccountId::new("missing"), + patch: AccountPatch::default(), + }) + .await + .expect_err("missing id"); + assert!(matches!(err, AppError::NotFound(_))); + } + + #[tokio::test] + async fn test_update_account_restores_previous_password_when_rotation_fails() { + let repo = Arc::new(InMemoryAccountRepo::new()); + // First write (add_account) succeeds; subsequent writes fail — + // covers both the rotation attempt and the rollback restore + // attempt the handler makes after the rotation fails. + let creds = Arc::new(FakeAccountCredentialStore::new().failing_after(1)); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "old-pw")) + .await + .unwrap(); + + let err = bus + .handle_update_account(UpdateAccountCommand { + id: id.clone(), + patch: AccountPatch { + password: Some("new-pw".into()), + enabled: Some(false), + ..AccountPatch::default() + }, + }) + .await + .expect_err("rotation failure surfaces"); + assert!(matches!( + err, + AppError::Domain(DomainError::StorageError(_)) + )); + + // Row metadata must be back to the original because the + // rotation failed. + let after = repo.find_by_id(&id).unwrap().unwrap(); + assert!( + after.is_enabled(), + "row must be rolled back to enabled=true after failed rotation" + ); + + // The handler must have attempted to restore the previous + // password — the third write attempt carries the original + // secret. + let attempts = creds.write_attempts(); + assert_eq!(attempts.len(), 3, "add + rotation + restore"); + assert_eq!(attempts[0].1, "old-pw", "initial add"); + assert_eq!(attempts[1].1, "new-pw", "failed rotation"); + assert_eq!( + attempts[2].1, "old-pw", + "restore must replay the original password" + ); + } + + #[tokio::test] + async fn test_update_account_blank_username_rejected() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds, events, None, None); + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + + let err = bus + .handle_update_account(UpdateAccountCommand { + id: id.clone(), + patch: AccountPatch { + username: Some(" ".into()), + ..AccountPatch::default() + }, + }) + .await + .expect_err("blank rejected"); + assert!(matches!(err, AppError::Validation(_))); + let unchanged = repo.find_by_id(&id).unwrap().unwrap(); + assert_eq!(unchanged.username(), "alice"); + } + + #[tokio::test] + async fn test_update_account_empty_password_rejected_without_keyring_write() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + + let err = bus + .handle_update_account(UpdateAccountCommand { + id: id.clone(), + patch: AccountPatch { + password: Some("".into()), + ..AccountPatch::default() + }, + }) + .await + .expect_err("empty pw rejected"); + assert!(matches!(err, AppError::Validation(_))); + assert_eq!(creds.get_password(&id).unwrap().as_deref(), Some("pw")); + } + + #[tokio::test] + async fn test_update_account_emits_event_and_keeps_created_at() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds, events.clone(), None, None); + let id = bus + .handle_add_account(add_command("real-debrid", "alice", "pw")) + .await + .unwrap(); + events.snapshot(); // discard creation event from comparison + + bus.handle_update_account(UpdateAccountCommand { + id: id.clone(), + patch: AccountPatch { + account_type: Some(AccountType::Debrid), + ..AccountPatch::default() + }, + }) + .await + .unwrap(); + + let snapshot = events.snapshot(); + assert!( + snapshot + .iter() + .any(|e| matches!(e, DomainEvent::AccountUpdated { id: ev } if ev == &id)), + "AccountUpdated event missing" + ); + let after = repo.find_by_id(&id).unwrap().unwrap(); + assert_eq!(after.created_at(), 1_700_000_000_000); + assert_eq!(after.account_type(), AccountType::Debrid); + } + + #[tokio::test] + async fn test_update_account_propagates_repo_error() { + // No fake "failing repo" exists yet — simulate a save failure + // by triggering the unique-constraint check in `save`. + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, None, None); + + // Two accounts on the same service: id1 = (real-debrid, alice), + // id2 = (real-debrid, bob). + let id1 = bus + .handle_add_account(add_command("real-debrid", "alice", "pw1")) + .await + .unwrap(); + let _id2 = bus + .handle_add_account(add_command("real-debrid", "bob", "pw2")) + .await + .unwrap(); + + // Renaming id1 to "bob" must collide with id2 and surface as + // an `AlreadyExists` domain error from the repo. + let err = bus + .handle_update_account(UpdateAccountCommand { + id: id1, + patch: AccountPatch { + username: Some("bob".into()), + ..AccountPatch::default() + }, + }) + .await + .expect_err("collision"); + assert!(matches!( + err, + AppError::Domain(DomainError::AlreadyExists(_)) + )); + } +} diff --git a/src-tauri/src/application/commands/validate_account.rs b/src-tauri/src/application/commands/validate_account.rs new file mode 100644 index 00000000..c771c53f --- /dev/null +++ b/src-tauri/src/application/commands/validate_account.rs @@ -0,0 +1,344 @@ +//! Handler for [`ValidateAccountCommand`](super::ValidateAccountCommand). +//! +//! Looks up the account, reads its password from the keyring, hands +//! both off to [`AccountValidator`], and applies the resulting +//! [`ValidationOutcome`] to the persisted row. The handler returns a +//! detailed [`ValidationOutcomeDto`] so the caller can drive both +//! `account_validate` (boolean OK/fail) and `account_test_connection` +//! (full latency + traffic readout) without re-reading the row. + +use super::ValidationOutcomeDto; +use crate::application::command_bus::CommandBus; +use crate::application::error::AppError; +use crate::domain::error::DomainError; +use crate::domain::event::DomainEvent; +use crate::domain::model::account::Account; + +impl CommandBus { + pub async fn handle_validate_account( + &self, + cmd: super::ValidateAccountCommand, + ) -> Result { + let repo = self + .account_repo() + .ok_or_else(|| AppError::Validation("account repository not configured".into()))?; + let store = self.account_credential_store().ok_or_else(|| { + AppError::Validation("account credential store not configured".into()) + })?; + let validator = self + .account_validator() + .ok_or_else(|| AppError::Validation("account validator not configured".into()))?; + + let account = repo + .find_by_id(&cmd.id)? + .ok_or_else(|| AppError::NotFound(format!("account {} not found", cmd.id.as_str())))?; + + let password = store.get_password(&cmd.id)?.ok_or_else(|| { + AppError::NotFound(format!( + "no stored password for account {}", + cmd.id.as_str() + )) + })?; + + let outcome = + match validator.validate(account.service_name(), account.username(), &password) { + Ok(o) => o, + Err(DomainError::NotFound(msg)) => { + self.event_bus() + .publish(DomainEvent::AccountValidationFailed { + id: cmd.id.clone(), + error: msg.clone(), + }); + return Err(AppError::NotFound(msg)); + } + Err(other) => { + // Network failures, keyring read errors, and any + // other domain error coming back from the validator + // are surfaced as `AccountValidationFailed` so the + // UI can react identically whether the upstream + // service rejected the credentials or was simply + // unreachable. + self.event_bus() + .publish(DomainEvent::AccountValidationFailed { + id: cmd.id.clone(), + error: other.to_string(), + }); + return Err(other.into()); + } + }; + + let mut next = clone_account(&account); + next.set_last_validated(cmd.now_ms); + if outcome.valid { + if let Some(t) = outcome.traffic_left { + next.set_traffic_left(t); + } + if let Some(t) = outcome.traffic_total { + next.set_traffic_total(t); + } + if let Some(v) = outcome.valid_until { + next.set_valid_until(v); + } + } + repo.save(&next)?; + + if outcome.valid { + self.event_bus().publish(DomainEvent::AccountValidated { + id: cmd.id, + latency_ms: outcome.latency_ms, + traffic_left: outcome.traffic_left, + traffic_total: outcome.traffic_total, + valid_until: outcome.valid_until, + }); + } else { + self.event_bus() + .publish(DomainEvent::AccountValidationFailed { + id: cmd.id, + error: outcome + .error_message + .clone() + .unwrap_or_else(|| "validation rejected".into()), + }); + } + + Ok(outcome.into()) + } +} + +fn clone_account(account: &Account) -> Account { + Account::reconstruct( + account.id().clone(), + account.service_name().to_string(), + account.username().to_string(), + account.account_type(), + account.is_enabled(), + account.traffic_left(), + account.traffic_total(), + account.valid_until(), + account.last_validated(), + account.created_at(), + ) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::super::{AddAccountCommand, ValidateAccountCommand}; + use crate::application::commands::tests_support::{ + CapturingEventBus, FakeAccountCredentialStore, FakeAccountValidator, InMemoryAccountRepo, + ValidatorBehavior, build_account_bus, + }; + use crate::application::error::AppError; + use crate::domain::event::DomainEvent; + use crate::domain::model::account::{AccountId, AccountType}; + use crate::domain::ports::driven::{ + AccountCredentialStore, AccountRepository, ValidationOutcome, + }; + + fn add_command(service: &str) -> AddAccountCommand { + AddAccountCommand { + service_name: service.into(), + username: "alice".into(), + password: "pw".into(), + account_type: AccountType::Premium, + created_at_ms: 1_700_000_000_000, + } + } + + #[tokio::test] + async fn test_validate_account_unknown_service_returns_not_found() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let validator = Arc::new(FakeAccountValidator::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events.clone(), Some(validator), None); + let id = bus + .handle_add_account(add_command("mystery")) + .await + .unwrap(); + + let err = bus + .handle_validate_account(ValidateAccountCommand { + id: id.clone(), + now_ms: 2_000_000_000_000, + }) + .await + .expect_err("missing plugin"); + assert!(matches!(err, AppError::NotFound(ref m) if m.contains("mystery"))); + assert!( + events + .snapshot() + .iter() + .any(|e| matches!( + e, + DomainEvent::AccountValidationFailed { id: ev, error } if ev == &id && error.contains("mystery") + )) + ); + } + + #[tokio::test] + async fn test_validate_account_success_updates_metadata_and_emits_event() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let validator = Arc::new(FakeAccountValidator::new()); + validator.set( + "real-debrid", + ValidatorBehavior::Ok(ValidationOutcome { + valid: true, + latency_ms: Some(120), + traffic_left: Some(50_000), + traffic_total: Some(100_000), + valid_until: Some(2_500_000_000_000), + error_message: None, + }), + ); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds, events.clone(), Some(validator), None); + let id = bus + .handle_add_account(add_command("real-debrid")) + .await + .unwrap(); + + let outcome = bus + .handle_validate_account(ValidateAccountCommand { + id: id.clone(), + now_ms: 1_900_000_000_000, + }) + .await + .expect("validate ok"); + + assert!(outcome.valid); + assert_eq!(outcome.latency_ms, Some(120)); + assert_eq!(outcome.traffic_left, Some(50_000)); + + let after = repo.find_by_id(&id).unwrap().unwrap(); + assert_eq!(after.last_validated(), Some(1_900_000_000_000)); + assert_eq!(after.traffic_left(), Some(50_000)); + assert_eq!(after.traffic_total(), Some(100_000)); + assert_eq!(after.valid_until(), Some(2_500_000_000_000)); + + assert!( + events + .snapshot() + .iter() + .any(|e| matches!(e, DomainEvent::AccountValidated { id: ev, traffic_left: Some(50_000), .. } if ev == &id)) + ); + } + + #[tokio::test] + async fn test_validate_account_rejected_records_last_validated_but_not_traffic() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let validator = Arc::new(FakeAccountValidator::new()); + validator.set( + "real-debrid", + ValidatorBehavior::Reject("wrong password".into()), + ); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds, events.clone(), Some(validator), None); + let id = bus + .handle_add_account(add_command("real-debrid")) + .await + .unwrap(); + + let outcome = bus + .handle_validate_account(ValidateAccountCommand { + id: id.clone(), + now_ms: 1_900_000_000_000, + }) + .await + .expect("call returns Ok with valid=false"); + assert!(!outcome.valid); + assert_eq!(outcome.error_message.as_deref(), Some("wrong password")); + + let after = repo.find_by_id(&id).unwrap().unwrap(); + assert_eq!(after.last_validated(), Some(1_900_000_000_000)); + assert!(after.traffic_left().is_none(), "no traffic on reject"); + + assert!( + events + .snapshot() + .iter() + .any(|e| matches!(e, DomainEvent::AccountValidationFailed { id: ev, error } if ev == &id && error == "wrong password")) + ); + } + + #[tokio::test] + async fn test_validate_account_storage_error_emits_validation_failed_event() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let validator = Arc::new(FakeAccountValidator::new()); + validator.set( + "real-debrid", + ValidatorBehavior::Storage("upstream timeout".into()), + ); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events.clone(), Some(validator), None); + let id = bus + .handle_add_account(add_command("real-debrid")) + .await + .unwrap(); + + let err = bus + .handle_validate_account(ValidateAccountCommand { + id: id.clone(), + now_ms: 1_900_000_000_000, + }) + .await + .expect_err("storage error surfaces"); + assert!(matches!(err, AppError::Domain(_))); + assert!( + events.snapshot().iter().any(|e| matches!( + e, + DomainEvent::AccountValidationFailed { id: ev, error } if ev == &id && error.contains("upstream timeout") + )), + "AccountValidationFailed must fire on validator storage errors too" + ); + } + + #[tokio::test] + async fn test_validate_account_unknown_id_returns_not_found() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let validator = Arc::new(FakeAccountValidator::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo, creds, events, Some(validator), None); + + let err = bus + .handle_validate_account(ValidateAccountCommand { + id: AccountId::new("ghost"), + now_ms: 0, + }) + .await + .expect_err("ghost id"); + assert!(matches!(err, AppError::NotFound(_))); + } + + #[tokio::test] + async fn test_validate_account_missing_keyring_password_errors() { + let repo = Arc::new(InMemoryAccountRepo::new()); + let creds = Arc::new(FakeAccountCredentialStore::new()); + let validator = Arc::new(FakeAccountValidator::new()); + let events = Arc::new(CapturingEventBus::new()); + let bus = build_account_bus(repo.clone(), creds.clone(), events, Some(validator), None); + + let id = bus + .handle_add_account(add_command("real-debrid")) + .await + .unwrap(); + // Simulate a keyring eviction: delete the password under id. + creds + .delete_password(&id) + .expect("infallible in test fixture"); + + let err = bus + .handle_validate_account(ValidateAccountCommand { + id: id.clone(), + now_ms: 0, + }) + .await + .expect_err("missing pw"); + assert!(matches!(err, AppError::NotFound(_))); + } +} diff --git a/src-tauri/src/domain/event.rs b/src-tauri/src/domain/event.rs index e6459be5..8d986035 100644 --- a/src-tauri/src/domain/event.rs +++ b/src-tauri/src/domain/event.rs @@ -1,3 +1,4 @@ +use crate::domain::model::account::AccountId; use crate::domain::model::download::DownloadId; use crate::domain::model::views::HistoryEntry; @@ -234,6 +235,45 @@ pub enum DomainEvent { // Settings SettingsUpdated, + + // Accounts + AccountAdded { + id: AccountId, + service_name: String, + }, + AccountUpdated { + id: AccountId, + }, + AccountDeleted { + id: AccountId, + }, + /// Emitted by `validate_account` when the upstream service confirms + /// the credentials. Carries the freshly observed metadata so the + /// frontend can refresh traffic counters / expiry without round- + /// tripping back through `list_accounts`. + AccountValidated { + id: AccountId, + latency_ms: Option, + traffic_left: Option, + traffic_total: Option, + valid_until: Option, + }, + /// Emitted by `validate_account` when the credentials are rejected + /// or the upstream service is unreachable. + AccountValidationFailed { + id: AccountId, + error: String, + }, + /// Emitted after `import_accounts` decrypts the bundle and + /// successfully persists every entry it contained. + AccountsImported { + count: u32, + }, + /// Emitted after `export_accounts` writes the encrypted bundle to + /// disk. + AccountsExported { + count: u32, + }, } #[cfg(test)] diff --git a/src-tauri/src/domain/ports/driven/account_credential_store.rs b/src-tauri/src/domain/ports/driven/account_credential_store.rs new file mode 100644 index 00000000..eef07696 --- /dev/null +++ b/src-tauri/src/domain/ports/driven/account_credential_store.rs @@ -0,0 +1,28 @@ +//! Per-account credential storage. +//! +//! Keys credentials by [`AccountId`] so each persisted `Account` row +//! has exactly one matching keyring entry, even when the same +//! `(service_name, username)` pair appears under multiple ids +//! (e.g. legacy migrations or duplicate-detection tests). +//! +//! The lower-level [`CredentialStore`](super::CredentialStore) port is +//! keyed by service name and re-used by other call sites; this port +//! exists so the account-management commands never need to construct +//! ad-hoc keyring service strings. + +use crate::domain::error::DomainError; +use crate::domain::model::account::AccountId; + +pub trait AccountCredentialStore: Send + Sync { + /// Persist `password` under `account_id`. Overwrites any existing + /// value for the same id. + fn store_password(&self, account_id: &AccountId, password: &str) -> Result<(), DomainError>; + + /// Retrieve the password previously stored under `account_id`, or + /// `None` when nothing has been saved. + fn get_password(&self, account_id: &AccountId) -> Result, DomainError>; + + /// Delete the password stored under `account_id`. No-op when no + /// entry exists. + fn delete_password(&self, account_id: &AccountId) -> Result<(), DomainError>; +} diff --git a/src-tauri/src/domain/ports/driven/account_validator.rs b/src-tauri/src/domain/ports/driven/account_validator.rs new file mode 100644 index 00000000..aeec1e25 --- /dev/null +++ b/src-tauri/src/domain/ports/driven/account_validator.rs @@ -0,0 +1,87 @@ +//! Validates account credentials against a remote service. +//! +//! Implementations delegate to the hoster / debrid plugin matching the +//! account's `service_name`. When no plugin is registered for the +//! service, the implementation MUST return +//! [`DomainError::NotFound`] with a message that names the service so +//! the calling handler can surface a clear "no plugin for service X" +//! error to the user. + +use crate::domain::error::DomainError; + +/// Result of an account validation attempt. +/// +/// Carries the data that the "test connection" UI surface needs — even +/// when the credentials are rejected, an `error_message` lets the panel +/// explain *why* (wrong password, expired, rate-limited, ...). +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct ValidationOutcome { + pub valid: bool, + pub latency_ms: Option, + pub traffic_left: Option, + pub traffic_total: Option, + pub valid_until: Option, + pub error_message: Option, +} + +impl ValidationOutcome { + pub fn ok() -> Self { + Self { + valid: true, + ..Self::default() + } + } + + pub fn rejected(error_message: impl Into) -> Self { + Self { + valid: false, + error_message: Some(error_message.into()), + ..Self::default() + } + } +} + +/// Validates an account's credentials by attempting to connect to the +/// remote service it represents. +pub trait AccountValidator: Send + Sync { + /// Probe the remote service named `service_name` with the given + /// credentials and return the resulting [`ValidationOutcome`]. + /// + /// Returns [`DomainError::NotFound`] when no plugin is registered + /// for `service_name`. + fn validate( + &self, + service_name: &str, + username: &str, + password: &str, + ) -> Result; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_validation_outcome_ok_marks_valid_with_no_error() { + let out = ValidationOutcome::ok(); + assert!(out.valid); + assert!(out.error_message.is_none()); + assert!(out.latency_ms.is_none()); + assert!(out.traffic_left.is_none()); + } + + #[test] + fn test_validation_outcome_rejected_records_message_and_invalid_flag() { + let out = ValidationOutcome::rejected("wrong password"); + assert!(!out.valid); + assert_eq!(out.error_message.as_deref(), Some("wrong password")); + } + + #[test] + fn test_validation_outcome_default_is_invalid_and_empty() { + let out = ValidationOutcome::default(); + assert!(!out.valid); + assert!(out.latency_ms.is_none()); + assert!(out.error_message.is_none()); + } +} diff --git a/src-tauri/src/domain/ports/driven/mod.rs b/src-tauri/src/domain/ports/driven/mod.rs index 376405e4..eeaab0d2 100644 --- a/src-tauri/src/domain/ports/driven/mod.rs +++ b/src-tauri/src/domain/ports/driven/mod.rs @@ -1,7 +1,9 @@ #[cfg(test)] mod tests; +pub mod account_credential_store; pub mod account_repository; +pub mod account_validator; pub mod archive_extractor; pub mod checksum_computer; pub mod clipboard_observer; @@ -16,6 +18,7 @@ pub mod file_opener; pub mod file_storage; pub mod history_repository; pub mod http_client; +pub mod passphrase_codec; pub mod plugin_config_store; pub mod plugin_loader; pub mod plugin_read_repository; @@ -23,7 +26,9 @@ pub mod plugin_store_client; pub mod stats_repository; pub mod url_opener; +pub use account_credential_store::AccountCredentialStore; pub use account_repository::AccountRepository; +pub use account_validator::{AccountValidator, ValidationOutcome}; pub use archive_extractor::ArchiveExtractor; pub use checksum_computer::ChecksumComputer; pub use clipboard_observer::ClipboardObserver; @@ -38,6 +43,7 @@ pub use file_opener::FileOpener; pub use file_storage::FileStorage; pub use history_repository::HistoryRepository; pub use http_client::HttpClient; +pub use passphrase_codec::PassphraseCodec; pub use plugin_config_store::PluginConfigStore; pub use plugin_loader::PluginLoader; pub use plugin_read_repository::PluginReadRepository; diff --git a/src-tauri/src/domain/ports/driven/passphrase_codec.rs b/src-tauri/src/domain/ports/driven/passphrase_codec.rs new file mode 100644 index 00000000..20cc85a2 --- /dev/null +++ b/src-tauri/src/domain/ports/driven/passphrase_codec.rs @@ -0,0 +1,32 @@ +//! Authenticated encryption keyed by a user-provided passphrase. +//! +//! The account import / export commands serialize the bundle to bytes, +//! hand it to [`PassphraseCodec::seal`] for encryption, and write the +//! resulting blob to disk. Decryption reverses the flow and refuses +//! tampered ciphertext, so any failure returned by [`open`]( +//! PassphraseCodec::open) means the file cannot be trusted. +//! +//! Implementations MUST: +//! +//! - derive the encryption key from the passphrase via a memory-hard or +//! iteration-stretched KDF (PBKDF2-HMAC-SHA256 with ≥ 200 000 rounds +//! for the bundled adapter); +//! - generate a fresh random salt and nonce on every call to +//! [`seal`](PassphraseCodec::seal); +//! - return [`DomainError::ValidationError`] on a wrong passphrase or +//! any cryptographic check failure (authentication tag mismatch, +//! truncated input, unsupported version), rather than panicking. + +use crate::domain::error::DomainError; + +pub trait PassphraseCodec: Send + Sync { + /// Encrypt and authenticate `plaintext` under `passphrase`. The + /// returned blob bundles the algorithm version, salt, nonce, and + /// ciphertext + auth tag — callers treat it as opaque bytes. + fn seal(&self, passphrase: &str, plaintext: &[u8]) -> Result, DomainError>; + + /// Decrypt and authenticate `ciphertext` produced by + /// [`seal`](PassphraseCodec::seal). Wrong passphrase or any + /// integrity-check failure yields [`DomainError::ValidationError`]. + fn open(&self, passphrase: &str, ciphertext: &[u8]) -> Result, DomainError>; +} diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 3f2c3298..f112e118 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -15,8 +15,10 @@ use domain::ports::driven::{ // Public API — concrete types for app wiring (main.rs, Tauri setup, integration tests) pub use adapters::driven::clipboard::TauriClipboardObserver; pub use adapters::driven::config::TomlConfigStore; +pub use adapters::driven::credential::KeyringAccountStore; pub use adapters::driven::credential::KeyringCredentialStore; pub use adapters::driven::credential::NoopCredentialStore; +pub use adapters::driven::crypto::AesGcmPbkdf2Codec; pub use adapters::driven::event::TokioEventBus; pub use adapters::driven::event::spawn_history_recorder_bridge; pub use adapters::driven::event::spawn_stats_recorder_bridge;