From 4b7b7c01e6eeeec34f7735e6d50bbc22092f356d Mon Sep 17 00:00:00 2001 From: Mathieu Piton <27002047+mpiton@users.noreply.github.com> Date: Tue, 28 Apr 2026 10:23:20 +0200 Subject: [PATCH 1/3] feat(account): SQLite Accounts repository (task 20) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add `accounts` table migration with UNIQUE(service_name, username), sea-orm entity + converters, `AccountRepository` driven port and `SqliteAccountRepo` adapter. Refactor domain `Account` to a `String`-backed `AccountId` matching the spec's TEXT PRIMARY KEY and add the `traffic_total` / `last_validated` / `created_at` fields required by PRD-v2 §8 P1. Credentials stay out of SQLite -- exposed via `Account::credential_ref()` returning `keyring://{service}/{username}`. Unblocks tasks 21-25, 38, 51-56, 75-76 (Accounts commands/queries/UI, Debrid/premium hoster plugins). --- CHANGELOG.md | 4 + .../adapters/driven/sqlite/account_repo.rs | 380 ++++++++++++++++++ .../src/adapters/driven/sqlite/connection.rs | 97 +++++ .../driven/sqlite/entities/account.rs | 64 +++ .../adapters/driven/sqlite/entities/mod.rs | 1 + .../m20260428_000006_create_accounts.rs | 76 ++++ .../adapters/driven/sqlite/migrations/mod.rs | 2 + src-tauri/src/adapters/driven/sqlite/mod.rs | 1 + src-tauri/src/domain/model/account.rs | 232 ++++++++++- src-tauri/src/domain/model/mod.rs | 2 +- .../domain/ports/driven/account_repository.rs | 31 ++ src-tauri/src/domain/ports/driven/mod.rs | 2 + src-tauri/src/domain/ports/driven/tests.rs | 135 +++++++ src-tauri/src/lib.rs | 1 + 14 files changed, 1012 insertions(+), 16 deletions(-) create mode 100644 src-tauri/src/adapters/driven/sqlite/account_repo.rs create mode 100644 src-tauri/src/adapters/driven/sqlite/entities/account.rs create mode 100644 src-tauri/src/adapters/driven/sqlite/migrations/m20260428_000006_create_accounts.rs create mode 100644 src-tauri/src/domain/ports/driven/account_repository.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index a438c36e..0d7a4117 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +### Added + +- **Accounts persistence** (PRD §6.4, PRD-v2 §P1.1, task 20): SQLite `accounts` table (migration `m20260428_000006`) with `id` / `service_name` / `username` / `account_type` / `enabled` / `traffic_left` / `traffic_total` / `valid_until` / `last_validated` / `created_at` columns and a UNIQUE `(service_name, username)` index. New `AccountRepository` driven port (`save` / `find_by_id` / `list` / `list_by_service` / `delete`) and `SqliteAccountRepo` adapter with sea-orm entity + `from_domain` / `into_domain` converters. UNIQUE violations surface as `DomainError::AlreadyExists` instead of leaking storage errors. Domain `Account` aggregate gained `traffic_total`, `last_validated`, `created_at` fields and switched its identifier to `AccountId(String)` so generated account ids match the spec's `TEXT PRIMARY KEY`. `Account::credential_ref()` returns the `keyring://{service}/{username}` URI used to look up the password/token in the OS keychain — credentials are never persisted to SQLite. Unblocks tasks 21-25, 38, 51-56, 75-76. + ## [0.2.0-beta] - 2026-04-27 First public beta of Vortex, completing **Phase 0** of the v2 roadmap (PRD-v2 §P0). Every placeholder view in the v0.1 Tauri scaffold ships as a real, wired-to-backend feature, the queue/scheduler now respects the persisted `max_concurrent_downloads` value, completed downloads project into the `history` and `statistics` read models for KPI dashboards and re-download flows, and the integrity pipeline can verify SHA-256 / MD5 checksums end-to-end. The plugin store gains dynamic per-plugin configuration UIs and a "report broken" action; the system tray pulses while transfers are active; desktop notifications surface filename + size on completion and the failure reason on errors. Targeted at testers — REST API, browser extension and headless CLI are deferred to v0.3+. diff --git a/src-tauri/src/adapters/driven/sqlite/account_repo.rs b/src-tauri/src/adapters/driven/sqlite/account_repo.rs new file mode 100644 index 00000000..f0dbcfa2 --- /dev/null +++ b/src-tauri/src/adapters/driven/sqlite/account_repo.rs @@ -0,0 +1,380 @@ +//! SQLite implementation of `AccountRepository` (CQRS write side). + +use sea_orm::{ + ColumnTrait, DatabaseConnection, EntityTrait, QueryFilter, QueryOrder, RuntimeErr, + sea_query::OnConflict, +}; + +use crate::domain::error::DomainError; +use crate::domain::model::account::{Account, AccountId}; +use crate::domain::ports::driven::account_repository::AccountRepository; + +use super::entities::account; +use super::util::{block_on, map_db_err}; + +pub struct SqliteAccountRepo { + db: DatabaseConnection, +} + +impl SqliteAccountRepo { + pub fn new(db: DatabaseConnection) -> Self { + Self { db } + } +} + +impl AccountRepository for SqliteAccountRepo { + fn find_by_id(&self, id: &AccountId) -> Result, DomainError> { + let id_value = id.as_str().to_string(); + block_on(async { + let model = account::Entity::find_by_id(id_value) + .one(&self.db) + .await + .map_err(map_db_err)?; + match model { + Some(m) => Ok(Some(m.into_domain()?)), + None => Ok(None), + } + }) + } + + fn save(&self, account: &Account) -> Result<(), DomainError> { + let active = account::ActiveModel::from_domain(account); + + block_on(async { + // Upsert by primary key. The (service_name, username) UNIQUE + // index lets the DB itself enforce the constraint and surface + // it as a uniqueness violation we translate below. + let result = account::Entity::insert(active) + .on_conflict( + OnConflict::column(account::Column::Id) + .update_columns([ + account::Column::ServiceName, + account::Column::Username, + account::Column::AccountType, + account::Column::Enabled, + account::Column::TrafficLeft, + account::Column::TrafficTotal, + account::Column::ValidUntil, + account::Column::LastValidated, + account::Column::CreatedAt, + ]) + .to_owned(), + ) + .exec(&self.db) + .await; + + match result { + Ok(_) => Ok(()), + Err(sea_orm::DbErr::Exec(RuntimeErr::SqlxError(e))) + if is_unique_violation(&e.to_string()) => + { + Err(DomainError::AlreadyExists(format!( + "account ({}, {}) already exists", + account.service_name(), + account.username() + ))) + } + Err(sea_orm::DbErr::Query(RuntimeErr::SqlxError(e))) + if is_unique_violation(&e.to_string()) => + { + Err(DomainError::AlreadyExists(format!( + "account ({}, {}) already exists", + account.service_name(), + account.username() + ))) + } + Err(e) if is_unique_violation(&e.to_string()) => { + Err(DomainError::AlreadyExists(format!( + "account ({}, {}) already exists", + account.service_name(), + account.username() + ))) + } + Err(e) => Err(map_db_err(e)), + } + }) + } + + fn list(&self) -> Result, DomainError> { + block_on(async { + let models = account::Entity::find() + .order_by_asc(account::Column::CreatedAt) + .order_by_asc(account::Column::Id) + .all(&self.db) + .await + .map_err(map_db_err)?; + models.into_iter().map(|m| m.into_domain()).collect() + }) + } + + fn list_by_service(&self, service_name: &str) -> Result, DomainError> { + let svc = service_name.to_string(); + block_on(async { + let models = account::Entity::find() + .filter(account::Column::ServiceName.eq(svc)) + .order_by_asc(account::Column::CreatedAt) + .order_by_asc(account::Column::Id) + .all(&self.db) + .await + .map_err(map_db_err)?; + models.into_iter().map(|m| m.into_domain()).collect() + }) + } + + fn delete(&self, id: &AccountId) -> Result<(), DomainError> { + let id_value = id.as_str().to_string(); + block_on(async { + account::Entity::delete_by_id(id_value) + .exec(&self.db) + .await + .map_err(map_db_err)?; + Ok(()) + }) + } +} + +/// SQLite reports UNIQUE failures with one of these markers depending on +/// the driver path (sea-orm vs raw sqlx). Match either form so the +/// adapter doesn't depend on a specific error variant layout. +fn is_unique_violation(msg: &str) -> bool { + let lower = msg.to_lowercase(); + lower.contains("unique constraint failed") + || lower.contains("constraint failed: unique") + || (lower.contains("error code 2067") && lower.contains("unique")) + || lower.contains("(2067)") +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::adapters::driven::sqlite::connection::setup_test_db; + use crate::domain::model::account::{Account, AccountId, AccountType}; + + fn make_account(id: &str, service: &str, user: &str) -> Account { + Account::new( + AccountId::new(id), + service.to_string(), + user.to_string(), + AccountType::Debrid, + 1_700_000_000_000, + ) + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_save_and_find_account_round_trip_preserves_all_fields() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + let mut account = make_account("acc-1", "real-debrid", "alice@example.com"); + account.set_traffic_left(123_456); + account.set_traffic_total(1_000_000); + account.set_valid_until(1_800_000_000_000); + account.set_last_validated(1_700_001_000_000); + account.disable(); + + repo.save(&account).expect("save"); + + let found = repo + .find_by_id(&AccountId::new("acc-1")) + .expect("find_by_id") + .expect("account should exist"); + + assert_eq!(found.id().as_str(), "acc-1"); + assert_eq!(found.service_name(), "real-debrid"); + assert_eq!(found.username(), "alice@example.com"); + assert_eq!(found.account_type(), AccountType::Debrid); + assert!(!found.is_enabled()); + assert_eq!(found.traffic_left(), Some(123_456)); + assert_eq!(found.traffic_total(), Some(1_000_000)); + assert_eq!(found.valid_until(), Some(1_800_000_000_000)); + assert_eq!(found.last_validated(), Some(1_700_001_000_000)); + assert_eq!(found.created_at(), 1_700_000_000_000); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_save_upsert_updates_existing_account() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + let mut account = make_account("acc-1", "real-debrid", "alice"); + repo.save(&account).expect("first save"); + + account.disable(); + account.set_traffic_left(999); + repo.save(&account).expect("upsert"); + + let found = repo + .find_by_id(&AccountId::new("acc-1")) + .expect("find_by_id") + .expect("present"); + assert!(!found.is_enabled()); + assert_eq!(found.traffic_left(), Some(999)); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_find_by_id_not_found_returns_none() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + let result = repo + .find_by_id(&AccountId::new("missing")) + .expect("find_by_id"); + assert!(result.is_none()); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_list_returns_all_accounts_ordered_by_created_at() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + let mut a = make_account("a", "real-debrid", "u1"); + let mut b = make_account("b", "alldebrid", "u2"); + // Force a deterministic created_at order: a < b + a = Account::reconstruct( + AccountId::new("a"), + a.service_name().to_string(), + a.username().to_string(), + a.account_type(), + a.is_enabled(), + a.traffic_left(), + a.traffic_total(), + a.valid_until(), + a.last_validated(), + 10, + ); + b = Account::reconstruct( + AccountId::new("b"), + b.service_name().to_string(), + b.username().to_string(), + b.account_type(), + b.is_enabled(), + b.traffic_left(), + b.traffic_total(), + b.valid_until(), + b.last_validated(), + 20, + ); + + repo.save(&b).expect("save b first"); + repo.save(&a).expect("save a second"); + + let all = repo.list().expect("list"); + assert_eq!(all.len(), 2); + assert_eq!(all[0].id().as_str(), "a"); + assert_eq!(all[1].id().as_str(), "b"); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_list_by_service_filters_correctly() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + repo.save(&make_account("rd-1", "real-debrid", "alice")) + .expect("save rd-1"); + repo.save(&make_account("rd-2", "real-debrid", "bob")) + .expect("save rd-2"); + repo.save(&make_account("ad-1", "alldebrid", "carol")) + .expect("save ad-1"); + + let rd = repo.list_by_service("real-debrid").expect("filter rd"); + assert_eq!(rd.len(), 2); + for acc in &rd { + assert_eq!(acc.service_name(), "real-debrid"); + } + + let ad = repo.list_by_service("alldebrid").expect("filter ad"); + assert_eq!(ad.len(), 1); + assert_eq!(ad[0].id().as_str(), "ad-1"); + + let none = repo.list_by_service("unknown").expect("filter unknown"); + assert!(none.is_empty()); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_save_unique_violation_returns_already_exists() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + repo.save(&make_account("first", "real-debrid", "alice")) + .expect("save first"); + + let dup = make_account("second", "real-debrid", "alice"); + let err = repo.save(&dup).expect_err("duplicate save must fail"); + assert!( + matches!(err, DomainError::AlreadyExists(_)), + "expected AlreadyExists, got {err:?}" + ); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_delete_removes_account() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + repo.save(&make_account("acc-1", "real-debrid", "alice")) + .expect("save"); + + repo.delete(&AccountId::new("acc-1")).expect("delete"); + + let found = repo + .find_by_id(&AccountId::new("acc-1")) + .expect("find_by_id"); + assert!(found.is_none()); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_delete_missing_account_is_noop() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + repo.delete(&AccountId::new("ghost")).expect("delete"); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_account_type_round_trip_through_db_for_each_variant() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + let kinds = [ + ("free-id", "free-host", AccountType::Free), + ("prem-id", "prem-host", AccountType::Premium), + ("deb-id", "deb-host", AccountType::Debrid), + ]; + for (id, svc, t) in kinds { + let acc = Account::new(AccountId::new(id), svc.to_string(), "u".to_string(), t, 0); + repo.save(&acc).expect("save"); + let found = repo + .find_by_id(&AccountId::new(id)) + .expect("find") + .expect("present"); + assert_eq!(found.account_type(), t); + } + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_optional_fields_persist_as_null_when_unset() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + let acc = make_account("acc-null", "real-debrid", "u"); + repo.save(&acc).expect("save"); + + let found = repo + .find_by_id(&AccountId::new("acc-null")) + .expect("find") + .expect("present"); + assert!(found.traffic_left().is_none()); + assert!(found.traffic_total().is_none()); + assert!(found.valid_until().is_none()); + assert!(found.last_validated().is_none()); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_unique_violation_helper_recognises_sqlite_messages() { + // The helper has to match a couple of slightly different wordings + // depending on driver path; lock those in as regression guards. + assert!(is_unique_violation("UNIQUE constraint failed: accounts.id")); + assert!(is_unique_violation( + "(code: 2067) UNIQUE constraint failed: accounts.service_name, accounts.username" + )); + assert!(!is_unique_violation("disk I/O error")); + } +} diff --git a/src-tauri/src/adapters/driven/sqlite/connection.rs b/src-tauri/src/adapters/driven/sqlite/connection.rs index f8d4a398..d8b31552 100644 --- a/src-tauri/src/adapters/driven/sqlite/connection.rs +++ b/src-tauri/src/adapters/driven/sqlite/connection.rs @@ -86,6 +86,103 @@ mod tests { assert!(names.contains(&"history".to_string())); assert!(names.contains(&"plugins".to_string())); assert!(names.contains(&"statistics".to_string())); + assert!(names.contains(&"accounts".to_string())); + } + + #[tokio::test] + async fn test_accounts_migration_applies_cleanly_on_existing_db() { + // Stand up a DB at the schema state immediately before the + // accounts migration (5 migrations applied), seed prior tables, + // then run the remaining migrations and verify the new table + // exists and existing data is preserved. + let sqlite_opts = sea_orm::sqlx::sqlite::SqliteConnectOptions::from_str("sqlite::memory:") + .unwrap() + .pragma("foreign_keys", "ON"); + let pool = sea_orm::sqlx::sqlite::SqlitePoolOptions::new() + .max_connections(1) + .connect_with(sqlite_opts) + .await + .unwrap(); + let db = sea_orm::SqlxSqliteConnector::from_sqlx_sqlite_pool(pool); + + Migrator::up(&db, Some(5)) + .await + .expect("first 5 migrations"); + + let pre = db + .query_all(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + "SELECT name FROM sqlite_master WHERE type='table' AND name='accounts'".to_string(), + )) + .await + .unwrap(); + assert!( + pre.is_empty(), + "accounts table must not exist before its migration" + ); + + // Seed a download row that must survive the migration. + db.execute(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + "INSERT INTO downloads (id, url, file_name, state, priority, queue_position, downloaded_bytes, speed_bytes_per_sec, retry_count, max_retries, segments_count, source_hostname, protocol, resume_supported, destination_path, created_at, updated_at) VALUES (1, 'https://example.com/f.zip', 'f.zip', 'Queued', 5, 0, 0, 0, 0, 5, 1, 'example.com', 'https', 0, '/tmp', 1, 1)" + .to_string(), + )) + .await + .expect("seed download"); + + Migrator::up(&db, None).await.expect("remaining migrations"); + + let post = db + .query_all(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + "SELECT name FROM sqlite_master WHERE type='table' AND name='accounts'".to_string(), + )) + .await + .unwrap(); + assert_eq!(post.len(), 1, "accounts table created by migration"); + + let downloads = db + .query_all(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + "SELECT id FROM downloads".to_string(), + )) + .await + .unwrap(); + assert_eq!(downloads.len(), 1, "existing data preserved"); + } + + #[tokio::test] + async fn test_accounts_table_enforces_unique_service_username() { + let db = setup_test_db().await.unwrap(); + let now: i64 = 1_700_000_000_000; + db.execute(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + format!( + "INSERT INTO accounts (id, service_name, username, account_type, enabled, created_at) VALUES ('a1', 'real-debrid', 'alice', 'debrid', 1, {now})" + ), + )) + .await + .expect("insert first"); + + let dup = db + .execute(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + format!( + "INSERT INTO accounts (id, service_name, username, account_type, enabled, created_at) VALUES ('a2', 'real-debrid', 'alice', 'debrid', 1, {now})" + ), + )) + .await; + assert!(dup.is_err(), "UNIQUE(service_name, username) must reject"); + + let other = db + .execute(Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + format!( + "INSERT INTO accounts (id, service_name, username, account_type, enabled, created_at) VALUES ('a2', 'alldebrid', 'alice', 'debrid', 1, {now})" + ), + )) + .await; + assert!(other.is_ok(), "different service must be allowed"); } #[tokio::test] diff --git a/src-tauri/src/adapters/driven/sqlite/entities/account.rs b/src-tauri/src/adapters/driven/sqlite/entities/account.rs new file mode 100644 index 00000000..38001f46 --- /dev/null +++ b/src-tauri/src/adapters/driven/sqlite/entities/account.rs @@ -0,0 +1,64 @@ +use sea_orm::entity::prelude::*; + +use crate::domain::error::DomainError; +use crate::domain::model::account::{Account, AccountId, AccountType}; + +use crate::adapters::driven::sqlite::util::safe_u64; + +#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] +#[sea_orm(table_name = "accounts")] +pub struct Model { + #[sea_orm(primary_key, auto_increment = false)] + pub id: String, + pub service_name: String, + pub username: String, + pub account_type: String, + pub enabled: i32, + pub traffic_left: Option, + pub traffic_total: Option, + pub valid_until: Option, + pub last_validated: Option, + pub created_at: i64, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation {} + +impl ActiveModelBehavior for ActiveModel {} + +impl Model { + pub fn into_domain(self) -> Result { + let account_type: AccountType = self.account_type.parse()?; + Ok(Account::reconstruct( + AccountId::new(self.id), + self.service_name, + self.username, + account_type, + self.enabled != 0, + self.traffic_left.map(safe_u64), + self.traffic_total.map(safe_u64), + self.valid_until.map(safe_u64), + self.last_validated.map(safe_u64), + safe_u64(self.created_at), + )) + } +} + +impl ActiveModel { + pub fn from_domain(account: &Account) -> Self { + use sea_orm::ActiveValue::Set; + + Self { + id: Set(account.id().as_str().to_string()), + service_name: Set(account.service_name().to_string()), + username: Set(account.username().to_string()), + account_type: Set(account.account_type().to_string()), + enabled: Set(if account.is_enabled() { 1 } else { 0 }), + traffic_left: Set(account.traffic_left().map(|b| b as i64)), + traffic_total: Set(account.traffic_total().map(|b| b as i64)), + valid_until: Set(account.valid_until().map(|t| t as i64)), + last_validated: Set(account.last_validated().map(|t| t as i64)), + created_at: Set(account.created_at() as i64), + } + } +} diff --git a/src-tauri/src/adapters/driven/sqlite/entities/mod.rs b/src-tauri/src/adapters/driven/sqlite/entities/mod.rs index f25d1e21..052212f9 100644 --- a/src-tauri/src/adapters/driven/sqlite/entities/mod.rs +++ b/src-tauri/src/adapters/driven/sqlite/entities/mod.rs @@ -1,3 +1,4 @@ +pub mod account; pub mod download; pub mod download_segment; pub mod history; diff --git a/src-tauri/src/adapters/driven/sqlite/migrations/m20260428_000006_create_accounts.rs b/src-tauri/src/adapters/driven/sqlite/migrations/m20260428_000006_create_accounts.rs new file mode 100644 index 00000000..99eb72ee --- /dev/null +++ b/src-tauri/src/adapters/driven/sqlite/migrations/m20260428_000006_create_accounts.rs @@ -0,0 +1,76 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .create_table( + Table::create() + .table(Accounts::Table) + .if_not_exists() + .col(ColumnDef::new(Accounts::Id).text().not_null().primary_key()) + .col(ColumnDef::new(Accounts::ServiceName).text().not_null()) + .col(ColumnDef::new(Accounts::Username).text().not_null()) + .col(ColumnDef::new(Accounts::AccountType).text().not_null()) + .col( + ColumnDef::new(Accounts::Enabled) + .integer() + .not_null() + .default(1), + ) + .col(ColumnDef::new(Accounts::TrafficLeft).big_integer().null()) + .col(ColumnDef::new(Accounts::TrafficTotal).big_integer().null()) + .col(ColumnDef::new(Accounts::ValidUntil).big_integer().null()) + .col(ColumnDef::new(Accounts::LastValidated).big_integer().null()) + .col(ColumnDef::new(Accounts::CreatedAt).big_integer().not_null()) + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_accounts_service_username") + .table(Accounts::Table) + .col(Accounts::ServiceName) + .col(Accounts::Username) + .unique() + .to_owned(), + ) + .await?; + + manager + .create_index( + Index::create() + .name("idx_accounts_service") + .table(Accounts::Table) + .col(Accounts::ServiceName) + .to_owned(), + ) + .await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .drop_table(Table::drop().table(Accounts::Table).if_exists().to_owned()) + .await + } +} + +#[derive(DeriveIden)] +enum Accounts { + Table, + Id, + ServiceName, + Username, + AccountType, + Enabled, + TrafficLeft, + TrafficTotal, + ValidUntil, + LastValidated, + CreatedAt, +} diff --git a/src-tauri/src/adapters/driven/sqlite/migrations/mod.rs b/src-tauri/src/adapters/driven/sqlite/migrations/mod.rs index 45902933..d43145d9 100644 --- a/src-tauri/src/adapters/driven/sqlite/migrations/mod.rs +++ b/src-tauri/src/adapters/driven/sqlite/migrations/mod.rs @@ -5,6 +5,7 @@ mod m20260415_000002_add_download_error_message; mod m20260424_000003_add_checksum_columns; mod m20260425_000004_add_queue_position; mod m20260425_000005_create_plugin_configs; +mod m20260428_000006_create_accounts; pub struct Migrator; @@ -17,6 +18,7 @@ impl MigratorTrait for Migrator { Box::new(m20260424_000003_add_checksum_columns::Migration), Box::new(m20260425_000004_add_queue_position::Migration), Box::new(m20260425_000005_create_plugin_configs::Migration), + Box::new(m20260428_000006_create_accounts::Migration), ] } } diff --git a/src-tauri/src/adapters/driven/sqlite/mod.rs b/src-tauri/src/adapters/driven/sqlite/mod.rs index 88cfa29b..f45c18a8 100644 --- a/src-tauri/src/adapters/driven/sqlite/mod.rs +++ b/src-tauri/src/adapters/driven/sqlite/mod.rs @@ -1,3 +1,4 @@ +pub mod account_repo; pub mod connection; pub mod download_read_repo; pub mod download_repo; diff --git a/src-tauri/src/domain/model/account.rs b/src-tauri/src/domain/model/account.rs index f1635669..b0299fa8 100644 --- a/src-tauri/src/domain/model/account.rs +++ b/src-tauri/src/domain/model/account.rs @@ -1,3 +1,27 @@ +use std::fmt; +use std::str::FromStr; + +use crate::domain::error::DomainError; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AccountId(pub String); + +impl AccountId { + pub fn new(value: impl Into) -> Self { + Self(value.into()) + } + + pub fn as_str(&self) -> &str { + &self.0 + } +} + +impl fmt::Display for AccountId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0) + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum AccountType { Free, @@ -5,19 +29,54 @@ pub enum AccountType { Debrid, } -#[derive(Debug, Clone, PartialEq)] +impl fmt::Display for AccountType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s = match self { + AccountType::Free => "free", + AccountType::Premium => "premium", + AccountType::Debrid => "debrid", + }; + f.write_str(s) + } +} + +impl FromStr for AccountType { + type Err = DomainError; + + fn from_str(s: &str) -> Result { + match s { + "free" => Ok(AccountType::Free), + "premium" => Ok(AccountType::Premium), + "debrid" => Ok(AccountType::Debrid), + other => Err(DomainError::ValidationError(format!( + "invalid account type: {other}" + ))), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] pub struct Account { - id: u64, + id: AccountId, service_name: String, username: String, account_type: AccountType, enabled: bool, traffic_left: Option, + traffic_total: Option, valid_until: Option, + last_validated: Option, + created_at: u64, } impl Account { - pub fn new(id: u64, service_name: String, username: String, account_type: AccountType) -> Self { + pub fn new( + id: AccountId, + service_name: String, + username: String, + account_type: AccountType, + created_at: u64, + ) -> Self { Self { id, service_name, @@ -25,7 +84,37 @@ impl Account { account_type, enabled: true, traffic_left: None, + traffic_total: None, valid_until: None, + last_validated: None, + created_at, + } + } + + #[allow(clippy::too_many_arguments)] + pub fn reconstruct( + id: AccountId, + service_name: String, + username: String, + account_type: AccountType, + enabled: bool, + traffic_left: Option, + traffic_total: Option, + valid_until: Option, + last_validated: Option, + created_at: u64, + ) -> Self { + Self { + id, + service_name, + username, + account_type, + enabled, + traffic_left, + traffic_total, + valid_until, + last_validated, + created_at, } } @@ -52,10 +141,18 @@ impl Account { self.traffic_left = Some(bytes); } + pub fn set_traffic_total(&mut self, bytes: u64) { + self.traffic_total = Some(bytes); + } + pub fn set_valid_until(&mut self, timestamp: u64) { self.valid_until = Some(timestamp); } + pub fn set_last_validated(&mut self, timestamp: u64) { + self.last_validated = Some(timestamp); + } + pub fn is_expired(&self, now: u64) -> bool { match self.valid_until { Some(expiry) => now > expiry, @@ -63,8 +160,14 @@ impl Account { } } - pub fn id(&self) -> u64 { - self.id + /// Reference used to look up the credential in the system keyring. + /// Format: `keyring://{service_name}/{username}`. + pub fn credential_ref(&self) -> String { + format!("keyring://{}/{}", self.service_name, self.username) + } + + pub fn id(&self) -> &AccountId { + &self.id } pub fn service_name(&self) -> &str { @@ -83,9 +186,21 @@ impl Account { self.traffic_left } + pub fn traffic_total(&self) -> Option { + self.traffic_total + } + pub fn valid_until(&self) -> Option { self.valid_until } + + pub fn last_validated(&self) -> Option { + self.last_validated + } + + pub fn created_at(&self) -> u64 { + self.created_at + } } #[cfg(test)] @@ -94,27 +209,31 @@ mod tests { fn make_account() -> Account { Account::new( - 1, + AccountId::new("acc-1"), "ExampleHost".to_string(), "user@example.com".to_string(), AccountType::Free, + 1_700_000_000_000, ) } #[test] - fn test_account_new() { + fn test_account_new_initialises_defaults() { let acc = make_account(); - assert_eq!(acc.id(), 1); + assert_eq!(acc.id().as_str(), "acc-1"); assert_eq!(acc.service_name(), "ExampleHost"); assert_eq!(acc.username(), "user@example.com"); assert_eq!(acc.account_type(), AccountType::Free); assert!(acc.is_enabled()); assert!(acc.traffic_left().is_none()); + assert!(acc.traffic_total().is_none()); assert!(acc.valid_until().is_none()); + assert!(acc.last_validated().is_none()); + assert_eq!(acc.created_at(), 1_700_000_000_000); } #[test] - fn test_account_enable_disable() { + fn test_account_enable_disable_toggles_flag() { let mut acc = make_account(); assert!(acc.is_enabled()); acc.disable(); @@ -124,17 +243,35 @@ mod tests { } #[test] - fn test_account_is_premium() { - let free = Account::new(1, "H".to_string(), "u".to_string(), AccountType::Free); - let premium = Account::new(2, "H".to_string(), "u".to_string(), AccountType::Premium); - let debrid = Account::new(3, "H".to_string(), "u".to_string(), AccountType::Debrid); + fn test_account_is_premium_distinguishes_types() { + let free = Account::new( + AccountId::new("a"), + "H".to_string(), + "u".to_string(), + AccountType::Free, + 0, + ); + let premium = Account::new( + AccountId::new("b"), + "H".to_string(), + "u".to_string(), + AccountType::Premium, + 0, + ); + let debrid = Account::new( + AccountId::new("c"), + "H".to_string(), + "u".to_string(), + AccountType::Debrid, + 0, + ); assert!(!free.is_premium()); assert!(premium.is_premium()); assert!(debrid.is_premium()); } #[test] - fn test_account_expiry() { + fn test_account_expiry_is_inclusive_of_valid_until() { let mut acc = make_account(); assert!(!acc.is_expired(1000)); acc.set_valid_until(500); @@ -144,10 +281,75 @@ mod tests { } #[test] - fn test_account_traffic() { + fn test_account_traffic_setters_store_values() { let mut acc = make_account(); assert!(acc.traffic_left().is_none()); + assert!(acc.traffic_total().is_none()); acc.set_traffic_left(1_000_000); + acc.set_traffic_total(5_000_000); assert_eq!(acc.traffic_left(), Some(1_000_000)); + assert_eq!(acc.traffic_total(), Some(5_000_000)); + } + + #[test] + fn test_account_last_validated_setter_stores_timestamp() { + let mut acc = make_account(); + assert!(acc.last_validated().is_none()); + acc.set_last_validated(1_700_000_500_000); + assert_eq!(acc.last_validated(), Some(1_700_000_500_000)); + } + + #[test] + fn test_account_credential_ref_uses_keyring_scheme() { + let acc = make_account(); + assert_eq!( + acc.credential_ref(), + "keyring://ExampleHost/user@example.com" + ); + } + + #[test] + fn test_account_type_round_trip_via_string() { + for t in [AccountType::Free, AccountType::Premium, AccountType::Debrid] { + let s = t.to_string(); + let parsed: AccountType = s.parse().expect("round-trip parse"); + assert_eq!(parsed, t); + } + } + + #[test] + fn test_account_type_from_str_rejects_unknown() { + let result: Result = "unknown".parse(); + assert!(matches!(result, Err(DomainError::ValidationError(_)))); + } + + #[test] + fn test_account_id_display_returns_inner_value() { + let id = AccountId::new("xyz-42"); + assert_eq!(id.to_string(), "xyz-42"); + assert_eq!(id.as_str(), "xyz-42"); + } + + #[test] + fn test_account_reconstruct_preserves_all_fields() { + let acc = Account::reconstruct( + AccountId::new("k"), + "Host".to_string(), + "u".to_string(), + AccountType::Premium, + false, + Some(123), + Some(456), + Some(789), + Some(101), + 42, + ); + assert_eq!(acc.id().as_str(), "k"); + assert!(!acc.is_enabled()); + assert_eq!(acc.traffic_left(), Some(123)); + assert_eq!(acc.traffic_total(), Some(456)); + assert_eq!(acc.valid_until(), Some(789)); + assert_eq!(acc.last_validated(), Some(101)); + assert_eq!(acc.created_at(), 42); } } diff --git a/src-tauri/src/domain/model/mod.rs b/src-tauri/src/domain/model/mod.rs index 29715870..65485b27 100644 --- a/src-tauri/src/domain/model/mod.rs +++ b/src-tauri/src/domain/model/mod.rs @@ -15,7 +15,7 @@ pub mod queue; pub mod segment; pub mod views; -pub use account::{Account, AccountType}; +pub use account::{Account, AccountId, AccountType}; pub use archive::{ArchiveEntry, ArchiveFormat, ExtractSummary, ExtractionConfig}; pub use captcha::{CaptchaChallenge, CaptchaType}; pub use checksum::ChecksumAlgorithm; diff --git a/src-tauri/src/domain/ports/driven/account_repository.rs b/src-tauri/src/domain/ports/driven/account_repository.rs new file mode 100644 index 00000000..346665a9 --- /dev/null +++ b/src-tauri/src/domain/ports/driven/account_repository.rs @@ -0,0 +1,31 @@ +//! Write repository for the `Account` aggregate (CQRS write side). +//! +//! Persists account metadata only. Credentials (passwords / tokens) live +//! in the OS keyring and are looked up via `Account::credential_ref()` — +//! never through this port. + +use crate::domain::error::DomainError; +use crate::domain::model::account::{Account, AccountId}; + +/// Persists and retrieves `Account` aggregates. +pub trait AccountRepository: Send + Sync { + /// Find an account by its unique identifier. + fn find_by_id(&self, id: &AccountId) -> Result, DomainError>; + + /// Persist an account (insert or update). + /// + /// Returns `DomainError::AlreadyExists` when the `(service_name, username)` + /// pair already maps to a different `id` (UNIQUE constraint). + fn save(&self, account: &Account) -> Result<(), DomainError>; + + /// List every persisted account, ordered by `created_at` ascending. + fn list(&self) -> Result, DomainError>; + + /// List accounts for a single service (e.g. `"real-debrid"`), + /// ordered by `created_at` ascending. + fn list_by_service(&self, service_name: &str) -> Result, DomainError>; + + /// Delete an account by its identifier. No-op when the account does + /// not exist. + fn delete(&self, id: &AccountId) -> Result<(), DomainError>; +} diff --git a/src-tauri/src/domain/ports/driven/mod.rs b/src-tauri/src/domain/ports/driven/mod.rs index b8750b72..376405e4 100644 --- a/src-tauri/src/domain/ports/driven/mod.rs +++ b/src-tauri/src/domain/ports/driven/mod.rs @@ -1,6 +1,7 @@ #[cfg(test)] mod tests; +pub mod account_repository; pub mod archive_extractor; pub mod checksum_computer; pub mod clipboard_observer; @@ -22,6 +23,7 @@ pub mod plugin_store_client; pub mod stats_repository; pub mod url_opener; +pub use account_repository::AccountRepository; pub use archive_extractor::ArchiveExtractor; pub use checksum_computer::ChecksumComputer; pub use clipboard_observer::ClipboardObserver; diff --git a/src-tauri/src/domain/ports/driven/tests.rs b/src-tauri/src/domain/ports/driven/tests.rs index aef5cc92..cc59a5e3 100644 --- a/src-tauri/src/domain/ports/driven/tests.rs +++ b/src-tauri/src/domain/ports/driven/tests.rs @@ -9,6 +9,7 @@ use std::sync::Mutex; use crate::domain::error::DomainError; use crate::domain::event::DomainEvent; +use crate::domain::model::account::{Account, AccountId, AccountType}; use crate::domain::model::config::{AppConfig, ConfigPatch}; use crate::domain::model::credential::Credential; use crate::domain::model::download::{Download, DownloadId, DownloadState}; @@ -632,6 +633,139 @@ impl FileOpener for RecordingFileOpener { } } +// ── InMemoryAccountRepository ──────────────────────────────────── + +struct InMemoryAccountRepository { + store: Mutex>, +} + +impl InMemoryAccountRepository { + fn new() -> Self { + Self { + store: Mutex::new(HashMap::new()), + } + } +} + +impl AccountRepository for InMemoryAccountRepository { + fn find_by_id(&self, id: &AccountId) -> Result, DomainError> { + Ok(self.store.lock().unwrap().get(id).cloned()) + } + + fn save(&self, account: &Account) -> Result<(), DomainError> { + let mut guard = self.store.lock().unwrap(); + // Detect (service, username) collision against another id + for (id, existing) in guard.iter() { + if id != account.id() + && existing.service_name() == account.service_name() + && existing.username() == account.username() + { + return Err(DomainError::AlreadyExists(format!( + "{}::{}", + account.service_name(), + account.username() + ))); + } + } + guard.insert(account.id().clone(), account.clone()); + Ok(()) + } + + fn list(&self) -> Result, DomainError> { + let mut accounts: Vec = self.store.lock().unwrap().values().cloned().collect(); + accounts.sort_by_key(|a| a.created_at()); + Ok(accounts) + } + + fn list_by_service(&self, service_name: &str) -> Result, DomainError> { + let mut accounts: Vec = self + .store + .lock() + .unwrap() + .values() + .filter(|a| a.service_name() == service_name) + .cloned() + .collect(); + accounts.sort_by_key(|a| a.created_at()); + Ok(accounts) + } + + fn delete(&self, id: &AccountId) -> Result<(), DomainError> { + self.store.lock().unwrap().remove(id); + Ok(()) + } +} + +#[test] +fn in_memory_account_repository_round_trip_preserves_fields() { + let repo = InMemoryAccountRepository::new(); + let mut account = Account::new( + AccountId::new("acc-rt"), + "real-debrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 1_700_000_000_000, + ); + account.set_traffic_left(10); + account.set_traffic_total(20); + account.set_valid_until(30); + account.set_last_validated(40); + account.disable(); + + repo.save(&account).expect("save"); + let found = repo + .find_by_id(&AccountId::new("acc-rt")) + .expect("find") + .expect("present"); + assert_eq!(found, account); +} + +#[test] +fn in_memory_account_repository_unique_constraint_rejects_duplicate_service_username() { + let repo = InMemoryAccountRepository::new(); + let a = Account::new( + AccountId::new("acc-a"), + "real-debrid".to_string(), + "bob".to_string(), + AccountType::Debrid, + 0, + ); + let b = Account::new( + AccountId::new("acc-b"), + "real-debrid".to_string(), + "bob".to_string(), + AccountType::Debrid, + 0, + ); + repo.save(&a).expect("first save"); + let err = repo.save(&b).expect_err("conflicting save must fail"); + assert!(matches!(err, DomainError::AlreadyExists(_))); +} + +#[test] +fn in_memory_account_repository_list_by_service_filters_correctly() { + let repo = InMemoryAccountRepository::new(); + let rd = Account::new( + AccountId::new("rd-1"), + "real-debrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 1, + ); + let ad = Account::new( + AccountId::new("ad-1"), + "alldebrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 2, + ); + repo.save(&rd).expect("save rd"); + repo.save(&ad).expect("save ad"); + let only_rd = repo.list_by_service("real-debrid").expect("filter"); + assert_eq!(only_rd.len(), 1); + assert_eq!(only_rd[0].id().as_str(), "rd-1"); +} + // ── Send + Sync compile-time assertions ───────────────────────── fn assert_send_sync() {} @@ -652,6 +786,7 @@ fn all_driven_port_mocks_are_send_sync() { assert_send_sync::(); assert_send_sync::(); assert_send_sync::(); + assert_send_sync::(); } #[test] diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index bd6f5d84..3f2c3298 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -38,6 +38,7 @@ pub use adapters::driven::plugin::{ ExtismPluginLoader, GithubStoreClient, PluginRegistry, PluginWatcher, }; pub use adapters::driven::scheduler::{HISTORY_PURGE_STATE_FILE, HistoryPurgeWorker, SystemClock}; +pub use adapters::driven::sqlite::account_repo::SqliteAccountRepo; pub use adapters::driven::sqlite::connection; pub use adapters::driven::sqlite::download_read_repo::SqliteDownloadReadRepo; pub use adapters::driven::sqlite::download_repo::SqliteDownloadRepo; From cfaf38f30dbd084244a11bc761822f3de28bf435 Mon Sep 17 00:00:00 2001 From: Mathieu Piton <27002047+mpiton@users.noreply.github.com> Date: Tue, 28 Apr 2026 11:14:17 +0200 Subject: [PATCH 2/3] fix(account): address PR review comments - Replace unchecked `as i64` casts with `i64::try_from(...)` in `ActiveModel::from_domain` so u64 values above i64::MAX surface as `ValidationError` instead of silently flipping to negatives. - Percent-encode service_name and username in `Account::credential_ref()` so reserved characters (`/`, `@`, unicode...) cannot produce ambiguous keyring refs that point at the wrong stored credential. - Drop `CreatedAt` from the upsert `update_columns` list so the original insertion timestamp stays stable across subsequent saves, preventing list-order drift. - Tighten the connection-level UNIQUE test to assert the error message actually mentions \"unique\" rather than accepting any failure. - Sort `InMemoryAccountRepository::list*` deterministically by (created_at, id) to match the SQLite adapter and avoid HashMap iteration order leaking through ties. --- .../adapters/driven/sqlite/account_repo.rs | 45 +++++++++++- .../src/adapters/driven/sqlite/connection.rs | 10 ++- .../driven/sqlite/entities/account.rs | 39 ++++++++--- src-tauri/src/domain/model/account.rs | 70 ++++++++++++++++++- src-tauri/src/domain/ports/driven/tests.rs | 15 +++- 5 files changed, 159 insertions(+), 20 deletions(-) diff --git a/src-tauri/src/adapters/driven/sqlite/account_repo.rs b/src-tauri/src/adapters/driven/sqlite/account_repo.rs index f0dbcfa2..189bd43f 100644 --- a/src-tauri/src/adapters/driven/sqlite/account_repo.rs +++ b/src-tauri/src/adapters/driven/sqlite/account_repo.rs @@ -38,12 +38,14 @@ impl AccountRepository for SqliteAccountRepo { } fn save(&self, account: &Account) -> Result<(), DomainError> { - let active = account::ActiveModel::from_domain(account); + let active = account::ActiveModel::from_domain(account)?; block_on(async { // Upsert by primary key. The (service_name, username) UNIQUE // index lets the DB itself enforce the constraint and surface - // it as a uniqueness violation we translate below. + // it as a uniqueness violation we translate below. `created_at` + // is intentionally omitted so the original insertion timestamp + // stays stable across subsequent saves. let result = account::Entity::insert(active) .on_conflict( OnConflict::column(account::Column::Id) @@ -56,7 +58,6 @@ impl AccountRepository for SqliteAccountRepo { account::Column::TrafficTotal, account::Column::ValidUntil, account::Column::LastValidated, - account::Column::CreatedAt, ]) .to_owned(), ) @@ -211,6 +212,44 @@ mod tests { assert_eq!(found.traffic_left(), Some(999)); } + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_save_upsert_preserves_original_created_at() { + let db = setup_test_db().await.expect("test db"); + let repo = SqliteAccountRepo::new(db); + + // First save with original timestamp. + let original = Account::new( + AccountId::new("acc-stable"), + "real-debrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 1_700_000_000_000, + ); + repo.save(&original).expect("first save"); + + // Re-save the same id with a different created_at. It must NOT + // overwrite the stored value, otherwise list ordering becomes + // unstable across writes. + let updated = Account::new( + AccountId::new("acc-stable"), + "real-debrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 9_999_999_999_999, + ); + repo.save(&updated).expect("upsert"); + + let found = repo + .find_by_id(&AccountId::new("acc-stable")) + .expect("find") + .expect("present"); + assert_eq!( + found.created_at(), + 1_700_000_000_000, + "upsert must not rewrite created_at" + ); + } + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_find_by_id_not_found_returns_none() { let db = setup_test_db().await.expect("test db"); diff --git a/src-tauri/src/adapters/driven/sqlite/connection.rs b/src-tauri/src/adapters/driven/sqlite/connection.rs index d8b31552..97076939 100644 --- a/src-tauri/src/adapters/driven/sqlite/connection.rs +++ b/src-tauri/src/adapters/driven/sqlite/connection.rs @@ -164,15 +164,19 @@ mod tests { .await .expect("insert first"); - let dup = db + let dup_err = db .execute(Statement::from_string( sea_orm::DatabaseBackend::Sqlite, format!( "INSERT INTO accounts (id, service_name, username, account_type, enabled, created_at) VALUES ('a2', 'real-debrid', 'alice', 'debrid', 1, {now})" ), )) - .await; - assert!(dup.is_err(), "UNIQUE(service_name, username) must reject"); + .await + .expect_err("UNIQUE(service_name, username) must reject"); + assert!( + dup_err.to_string().to_ascii_lowercase().contains("unique"), + "expected UNIQUE constraint error, got: {dup_err}" + ); let other = db .execute(Statement::from_string( diff --git a/src-tauri/src/adapters/driven/sqlite/entities/account.rs b/src-tauri/src/adapters/driven/sqlite/entities/account.rs index 38001f46..b05e75ec 100644 --- a/src-tauri/src/adapters/driven/sqlite/entities/account.rs +++ b/src-tauri/src/adapters/driven/sqlite/entities/account.rs @@ -45,20 +45,41 @@ impl Model { } impl ActiveModel { - pub fn from_domain(account: &Account) -> Self { + pub fn from_domain(account: &Account) -> Result { use sea_orm::ActiveValue::Set; - Self { - id: Set(account.id().as_str().to_string()), + let id_str = account.id().as_str().to_string(); + + let traffic_left = checked_to_i64_opt(account.traffic_left(), "traffic_left", &id_str)?; + let traffic_total = checked_to_i64_opt(account.traffic_total(), "traffic_total", &id_str)?; + let valid_until = checked_to_i64_opt(account.valid_until(), "valid_until", &id_str)?; + let last_validated = + checked_to_i64_opt(account.last_validated(), "last_validated", &id_str)?; + let created_at = i64::try_from(account.created_at()).map_err(|_| { + DomainError::ValidationError(format!("account {id_str}: created_at exceeds i64::MAX")) + })?; + + Ok(Self { + id: Set(id_str), service_name: Set(account.service_name().to_string()), username: Set(account.username().to_string()), account_type: Set(account.account_type().to_string()), enabled: Set(if account.is_enabled() { 1 } else { 0 }), - traffic_left: Set(account.traffic_left().map(|b| b as i64)), - traffic_total: Set(account.traffic_total().map(|b| b as i64)), - valid_until: Set(account.valid_until().map(|t| t as i64)), - last_validated: Set(account.last_validated().map(|t| t as i64)), - created_at: Set(account.created_at() as i64), - } + traffic_left: Set(traffic_left), + traffic_total: Set(traffic_total), + valid_until: Set(valid_until), + last_validated: Set(last_validated), + created_at: Set(created_at), + }) } } + +fn checked_to_i64_opt( + value: Option, + field: &str, + account_id: &str, +) -> Result, DomainError> { + value.map(i64::try_from).transpose().map_err(|_| { + DomainError::ValidationError(format!("account {account_id}: {field} exceeds i64::MAX")) + }) +} diff --git a/src-tauri/src/domain/model/account.rs b/src-tauri/src/domain/model/account.rs index b0299fa8..e34a16ba 100644 --- a/src-tauri/src/domain/model/account.rs +++ b/src-tauri/src/domain/model/account.rs @@ -161,9 +161,15 @@ impl Account { } /// Reference used to look up the credential in the system keyring. - /// Format: `keyring://{service_name}/{username}`. + /// Format: `keyring://{service_name}/{username}`. Both segments are + /// percent-encoded so reserved characters (`/`, `?`, `#`, `@`...) cannot + /// produce ambiguous refs that point at the wrong stored credential. pub fn credential_ref(&self) -> String { - format!("keyring://{}/{}", self.service_name, self.username) + format!( + "keyring://{}/{}", + percent_encode_segment(&self.service_name), + percent_encode_segment(&self.username) + ) } pub fn id(&self) -> &AccountId { @@ -203,6 +209,24 @@ impl Account { } } +/// Percent-encode a string so it can be safely embedded as a path segment in +/// `keyring://...` refs. Only RFC 3986 unreserved characters survive +/// untouched; everything else is rendered as `%XX` per UTF-8 byte. +fn percent_encode_segment(s: &str) -> String { + use std::fmt::Write; + + let mut out = String::with_capacity(s.len()); + for byte in s.bytes() { + let unreserved = byte.is_ascii_alphanumeric() || matches!(byte, b'-' | b'.' | b'_' | b'~'); + if unreserved { + out.push(byte as char); + } else { + let _ = write!(out, "%{byte:02X}"); + } + } + out +} + #[cfg(test)] mod tests { use super::*; @@ -302,10 +326,50 @@ mod tests { #[test] fn test_account_credential_ref_uses_keyring_scheme() { let acc = make_account(); + // `@` in `user@example.com` is reserved → percent-encoded as %40. + assert_eq!( + acc.credential_ref(), + "keyring://ExampleHost/user%40example.com" + ); + } + + #[test] + fn test_account_credential_ref_percent_encodes_reserved_chars() { + // A `/` in the service or username could otherwise collide with the + // path separator and point two distinct accounts at the same ref. + let acc = Account::new( + AccountId::new("acc-collision"), + "real-debrid/eu".to_string(), + "alice/admin".to_string(), + AccountType::Debrid, + 0, + ); assert_eq!( acc.credential_ref(), - "keyring://ExampleHost/user@example.com" + "keyring://real-debrid%2Feu/alice%2Fadmin" + ); + + let other = Account::new( + AccountId::new("acc-other"), + "real-debrid".to_string(), + "eu/alice/admin".to_string(), + AccountType::Debrid, + 0, + ); + assert_ne!(acc.credential_ref(), other.credential_ref()); + } + + #[test] + fn test_account_credential_ref_handles_unicode_username() { + let acc = Account::new( + AccountId::new("acc-utf8"), + "host".to_string(), + "café".to_string(), + AccountType::Free, + 0, ); + // `é` is `0xC3 0xA9` in UTF-8. + assert_eq!(acc.credential_ref(), "keyring://host/caf%C3%A9"); } #[test] diff --git a/src-tauri/src/domain/ports/driven/tests.rs b/src-tauri/src/domain/ports/driven/tests.rs index cc59a5e3..6436e714 100644 --- a/src-tauri/src/domain/ports/driven/tests.rs +++ b/src-tauri/src/domain/ports/driven/tests.rs @@ -673,7 +673,14 @@ impl AccountRepository for InMemoryAccountRepository { fn list(&self) -> Result, DomainError> { let mut accounts: Vec = self.store.lock().unwrap().values().cloned().collect(); - accounts.sort_by_key(|a| a.created_at()); + // Secondary sort by id breaks ties from `HashMap` iteration order + // when multiple accounts share a `created_at`, mirroring the SQLite + // adapter which orders by (created_at, id). + accounts.sort_by(|a, b| { + a.created_at() + .cmp(&b.created_at()) + .then_with(|| a.id().as_str().cmp(b.id().as_str())) + }); Ok(accounts) } @@ -686,7 +693,11 @@ impl AccountRepository for InMemoryAccountRepository { .filter(|a| a.service_name() == service_name) .cloned() .collect(); - accounts.sort_by_key(|a| a.created_at()); + accounts.sort_by(|a, b| { + a.created_at() + .cmp(&b.created_at()) + .then_with(|| a.id().as_str().cmp(b.id().as_str())) + }); Ok(accounts) } From 6734ee68175ba8b326664c4d1d1e59ad5544289f Mon Sep 17 00:00:00 2001 From: Mathieu Piton <27002047+mpiton@users.noreply.github.com> Date: Tue, 28 Apr 2026 12:04:13 +0200 Subject: [PATCH 3/3] fix(account): preserve created_at in InMemoryAccountRepository Re-saving the same id in the in-memory mock previously overwrote `created_at`, which diverged from the SQLite adapter's insert-only semantics. The mock now mirrors production: same-id saves keep the existing timestamp and only update mutable fields. --- src-tauri/src/domain/ports/driven/tests.rs | 53 +++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/src-tauri/src/domain/ports/driven/tests.rs b/src-tauri/src/domain/ports/driven/tests.rs index 6436e714..b9f57509 100644 --- a/src-tauri/src/domain/ports/driven/tests.rs +++ b/src-tauri/src/domain/ports/driven/tests.rs @@ -667,7 +667,27 @@ impl AccountRepository for InMemoryAccountRepository { ))); } } - guard.insert(account.id().clone(), account.clone()); + + // Mirror the SQLite adapter: `created_at` is insert-only. On re-save + // of the same id, keep the existing timestamp so the mock cannot + // diverge from production on list ordering or round-trip behavior. + let stored = match guard.get(account.id()) { + Some(existing) => Account::reconstruct( + account.id().clone(), + account.service_name().to_string(), + account.username().to_string(), + account.account_type(), + account.is_enabled(), + account.traffic_left(), + account.traffic_total(), + account.valid_until(), + account.last_validated(), + existing.created_at(), + ), + None => account.clone(), + }; + + guard.insert(account.id().clone(), stored); Ok(()) } @@ -731,6 +751,37 @@ fn in_memory_account_repository_round_trip_preserves_fields() { assert_eq!(found, account); } +#[test] +fn in_memory_account_repository_save_preserves_original_created_at() { + // Same divergence guard as the SQLite adapter: re-saving a known id + // must not rewrite created_at, otherwise list ordering becomes + // unstable across writes. + let repo = InMemoryAccountRepository::new(); + let original = Account::new( + AccountId::new("acc-stable"), + "real-debrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 1_700_000_000_000, + ); + repo.save(&original).expect("first save"); + + let updated = Account::new( + AccountId::new("acc-stable"), + "real-debrid".to_string(), + "alice".to_string(), + AccountType::Debrid, + 9_999_999_999_999, + ); + repo.save(&updated).expect("upsert"); + + let found = repo + .find_by_id(&AccountId::new("acc-stable")) + .expect("find") + .expect("present"); + assert_eq!(found.created_at(), 1_700_000_000_000); +} + #[test] fn in_memory_account_repository_unique_constraint_rejects_duplicate_service_username() { let repo = InMemoryAccountRepository::new();