Initial commit
This commit is contained in:
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
target/
|
||||
.env
|
||||
*.db
|
||||
*.db-journal
|
||||
20
Cargo.toml
Normal file
20
Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "shanty-db"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
description = "Shared database schema and access layer for Shanty"
|
||||
repository = "ssh://connor@git.rcjohnstone.com:2222/Shanty/db.git"
|
||||
|
||||
[dependencies]
|
||||
sea-orm = { version = "1", features = ["sqlx-sqlite", "runtime-tokio-native-tls", "macros", "with-chrono"] }
|
||||
sea-orm-migration = { version = "1", features = ["sqlx-sqlite", "runtime-tokio-native-tls"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
thiserror = "2"
|
||||
tracing = "0.1"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { version = "1", features = ["full", "test-util"] }
|
||||
21
readme.md
Normal file
21
readme.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# shanty-db
|
||||
|
||||
Shared database schema and access layer for [Shanty](ssh://connor@git.rcjohnstone.com:2222/Shanty/shanty.git).
|
||||
|
||||
Uses Sea-ORM with SQLite. Provides migrations, typed entities, connection pooling,
|
||||
and query functions used by all other Shanty crates.
|
||||
|
||||
## Usage
|
||||
|
||||
```rust
|
||||
use shanty_db::Database;
|
||||
|
||||
let db = Database::new("sqlite:///path/to/shanty.db?mode=rwc").await?;
|
||||
// use db.conn() for queries
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```sh
|
||||
cargo test
|
||||
```
|
||||
56
src/entities/album.rs
Normal file
56
src/entities/album.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "albums")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub album_artist: String,
|
||||
#[sea_orm(nullable)]
|
||||
pub year: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub genre: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub cover_art_path: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub musicbrainz_id: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub artist_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::artist::Entity",
|
||||
from = "Column::ArtistId",
|
||||
to = "super::artist::Column::Id",
|
||||
on_delete = "SetNull"
|
||||
)]
|
||||
Artist,
|
||||
#[sea_orm(has_many = "super::track::Entity")]
|
||||
Tracks,
|
||||
#[sea_orm(has_many = "super::wanted_item::Entity")]
|
||||
WantedItems,
|
||||
}
|
||||
|
||||
impl Related<super::artist::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Artist.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::track::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Tracks.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::wanted_item::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::WantedItems.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
47
src/entities/artist.rs
Normal file
47
src/entities/artist.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "artists")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
#[sea_orm(nullable)]
|
||||
pub musicbrainz_id: Option<String>,
|
||||
pub added_at: chrono::NaiveDateTime,
|
||||
/// JSON-serialized Vec of top song info
|
||||
pub top_songs: String,
|
||||
/// JSON-serialized Vec of similar artist info
|
||||
pub similar_artists: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::album::Entity")]
|
||||
Albums,
|
||||
#[sea_orm(has_many = "super::track::Entity")]
|
||||
Tracks,
|
||||
#[sea_orm(has_many = "super::wanted_item::Entity")]
|
||||
WantedItems,
|
||||
}
|
||||
|
||||
impl Related<super::album::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Albums.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::track::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Tracks.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::wanted_item::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::WantedItems.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
55
src/entities/download_queue.rs
Normal file
55
src/entities/download_queue.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Text")]
|
||||
pub enum DownloadStatus {
|
||||
#[sea_orm(string_value = "pending")]
|
||||
Pending,
|
||||
#[sea_orm(string_value = "downloading")]
|
||||
Downloading,
|
||||
#[sea_orm(string_value = "completed")]
|
||||
Completed,
|
||||
#[sea_orm(string_value = "failed")]
|
||||
Failed,
|
||||
#[sea_orm(string_value = "cancelled")]
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "download_queue")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(nullable)]
|
||||
pub wanted_item_id: Option<i32>,
|
||||
pub query: String,
|
||||
#[sea_orm(nullable)]
|
||||
pub source_url: Option<String>,
|
||||
pub source_backend: String,
|
||||
pub status: DownloadStatus,
|
||||
#[sea_orm(nullable)]
|
||||
pub error_message: Option<String>,
|
||||
pub retry_count: i32,
|
||||
pub created_at: chrono::NaiveDateTime,
|
||||
pub updated_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::wanted_item::Entity",
|
||||
from = "Column::WantedItemId",
|
||||
to = "super::wanted_item::Column::Id",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
WantedItem,
|
||||
}
|
||||
|
||||
impl Related<super::wanted_item::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::WantedItem.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
13
src/entities/mod.rs
Normal file
13
src/entities/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
pub mod album;
|
||||
pub mod artist;
|
||||
pub mod download_queue;
|
||||
pub mod search_cache;
|
||||
pub mod track;
|
||||
pub mod wanted_item;
|
||||
|
||||
pub use album::Entity as Albums;
|
||||
pub use artist::Entity as Artists;
|
||||
pub use download_queue::Entity as DownloadQueue;
|
||||
pub use search_cache::Entity as SearchCache;
|
||||
pub use track::Entity as Tracks;
|
||||
pub use wanted_item::Entity as WantedItems;
|
||||
20
src/entities/search_cache.rs
Normal file
20
src/entities/search_cache.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "search_cache")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(unique)]
|
||||
pub query_key: String,
|
||||
pub provider: String,
|
||||
pub result_json: String,
|
||||
pub created_at: chrono::NaiveDateTime,
|
||||
pub expires_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
86
src/entities/track.rs
Normal file
86
src/entities/track.rs
Normal file
@@ -0,0 +1,86 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "tracks")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
#[sea_orm(unique)]
|
||||
pub file_path: String,
|
||||
#[sea_orm(nullable)]
|
||||
pub title: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub artist: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub album: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub album_artist: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub track_number: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub disc_number: Option<i32>,
|
||||
#[sea_orm(nullable, column_type = "Double")]
|
||||
pub duration: Option<f64>,
|
||||
#[sea_orm(nullable)]
|
||||
pub genre: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub year: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub codec: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub bitrate: Option<i32>,
|
||||
pub file_size: i64,
|
||||
#[sea_orm(nullable)]
|
||||
pub fingerprint: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub musicbrainz_id: Option<String>,
|
||||
#[sea_orm(nullable)]
|
||||
pub artist_id: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub album_id: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub file_mtime: Option<chrono::NaiveDateTime>,
|
||||
pub added_at: chrono::NaiveDateTime,
|
||||
pub updated_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::artist::Entity",
|
||||
from = "Column::ArtistId",
|
||||
to = "super::artist::Column::Id",
|
||||
on_delete = "SetNull"
|
||||
)]
|
||||
Artist,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::album::Entity",
|
||||
from = "Column::AlbumId",
|
||||
to = "super::album::Column::Id",
|
||||
on_delete = "SetNull"
|
||||
)]
|
||||
Album,
|
||||
#[sea_orm(has_many = "super::wanted_item::Entity")]
|
||||
WantedItems,
|
||||
}
|
||||
|
||||
impl Related<super::artist::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Artist.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::album::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Album.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::wanted_item::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::WantedItems.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
94
src/entities/wanted_item.rs
Normal file
94
src/entities/wanted_item.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Text")]
|
||||
pub enum WantedStatus {
|
||||
#[sea_orm(string_value = "wanted")]
|
||||
Wanted,
|
||||
#[sea_orm(string_value = "available")]
|
||||
Available,
|
||||
#[sea_orm(string_value = "downloaded")]
|
||||
Downloaded,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, EnumIter, DeriveActiveEnum)]
|
||||
#[sea_orm(rs_type = "String", db_type = "Text")]
|
||||
pub enum ItemType {
|
||||
#[sea_orm(string_value = "artist")]
|
||||
Artist,
|
||||
#[sea_orm(string_value = "album")]
|
||||
Album,
|
||||
#[sea_orm(string_value = "track")]
|
||||
Track,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "wanted_items")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub item_type: ItemType,
|
||||
#[sea_orm(nullable)]
|
||||
pub artist_id: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub album_id: Option<i32>,
|
||||
#[sea_orm(nullable)]
|
||||
pub track_id: Option<i32>,
|
||||
pub status: WantedStatus,
|
||||
pub added_at: chrono::NaiveDateTime,
|
||||
pub updated_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::artist::Entity",
|
||||
from = "Column::ArtistId",
|
||||
to = "super::artist::Column::Id",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Artist,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::album::Entity",
|
||||
from = "Column::AlbumId",
|
||||
to = "super::album::Column::Id",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Album,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::track::Entity",
|
||||
from = "Column::TrackId",
|
||||
to = "super::track::Column::Id",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Track,
|
||||
#[sea_orm(has_many = "super::download_queue::Entity")]
|
||||
Downloads,
|
||||
}
|
||||
|
||||
impl Related<super::artist::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Artist.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::album::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Album.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::track::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Track.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::download_queue::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Downloads.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
18
src/error.rs
Normal file
18
src/error.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use sea_orm::DbErr;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum DbError {
|
||||
#[error("database error: {0}")]
|
||||
SeaOrm(#[from] DbErr),
|
||||
|
||||
#[error("migration error: {0}")]
|
||||
Migration(String),
|
||||
|
||||
#[error("not found: {0}")]
|
||||
NotFound(String),
|
||||
|
||||
#[error("serialization error: {0}")]
|
||||
Serialization(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
pub type DbResult<T> = Result<T, DbError>;
|
||||
69
src/lib.rs
Normal file
69
src/lib.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
//! Shared database schema and access layer for Shanty.
|
||||
//!
|
||||
//! This crate owns the database schema, migrations, and provides a typed access
|
||||
//! layer used by all other Shanty crates. It uses Sea-ORM with SQLite.
|
||||
|
||||
pub mod entities;
|
||||
pub mod error;
|
||||
pub mod migration;
|
||||
pub mod queries;
|
||||
|
||||
use sea_orm::{ConnectOptions, Database as SeaDatabase, DatabaseConnection};
|
||||
use sea_orm_migration::MigratorTrait;
|
||||
use tracing::info;
|
||||
|
||||
pub use entities::*;
|
||||
pub use error::{DbError, DbResult};
|
||||
|
||||
/// Main database handle. Wraps a Sea-ORM `DatabaseConnection` and provides
|
||||
/// initialization with automatic migration.
|
||||
pub struct Database {
|
||||
conn: DatabaseConnection,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
/// Connect to the database at the given URL, creating it if necessary,
|
||||
/// and run all pending migrations.
|
||||
///
|
||||
/// For SQLite, use a URL like `sqlite:///path/to/shanty.db?mode=rwc`
|
||||
/// or `sqlite::memory:` for testing.
|
||||
pub async fn new(database_url: &str) -> DbResult<Self> {
|
||||
let mut opts = ConnectOptions::new(database_url.to_string());
|
||||
opts.max_connections(10)
|
||||
.min_connections(1)
|
||||
.sqlx_logging(false);
|
||||
|
||||
let conn = SeaDatabase::connect(opts)
|
||||
.await
|
||||
.map_err(DbError::SeaOrm)?;
|
||||
|
||||
// Enable WAL mode for better concurrent read performance
|
||||
if database_url.starts_with("sqlite:") && !database_url.contains(":memory:") {
|
||||
use sea_orm::ConnectionTrait;
|
||||
conn.execute_unprepared("PRAGMA journal_mode=WAL")
|
||||
.await
|
||||
.map_err(DbError::SeaOrm)?;
|
||||
conn.execute_unprepared("PRAGMA foreign_keys=ON")
|
||||
.await
|
||||
.map_err(DbError::SeaOrm)?;
|
||||
}
|
||||
|
||||
// Run pending migrations
|
||||
migration::Migrator::up(&conn, None)
|
||||
.await
|
||||
.map_err(|e| DbError::Migration(e.to_string()))?;
|
||||
info!("database migrations complete");
|
||||
|
||||
Ok(Self { conn })
|
||||
}
|
||||
|
||||
/// Get a reference to the underlying database connection.
|
||||
pub fn conn(&self) -> &DatabaseConnection {
|
||||
&self.conn
|
||||
}
|
||||
|
||||
/// Consume self and return the underlying connection.
|
||||
pub fn into_conn(self) -> DatabaseConnection {
|
||||
self.conn
|
||||
}
|
||||
}
|
||||
85
src/migration/m20260317_000001_create_artists.rs
Normal file
85
src/migration/m20260317_000001_create_artists.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Artists::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Artists::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Artists::Name).text().not_null())
|
||||
.col(ColumnDef::new(Artists::MusicbrainzId).text())
|
||||
.col(
|
||||
ColumnDef::new(Artists::AddedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Artists::TopSongs)
|
||||
.text()
|
||||
.not_null()
|
||||
.default("[]"),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Artists::SimilarArtists)
|
||||
.text()
|
||||
.not_null()
|
||||
.default("[]"),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_artists_name")
|
||||
.table(Artists::Table)
|
||||
.col(Artists::Name)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_artists_musicbrainz_id")
|
||||
.table(Artists::Table)
|
||||
.col(Artists::MusicbrainzId)
|
||||
.unique()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Artists::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub(crate) enum Artists {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
MusicbrainzId,
|
||||
AddedAt,
|
||||
TopSongs,
|
||||
SimilarArtists,
|
||||
}
|
||||
93
src/migration/m20260317_000002_create_albums.rs
Normal file
93
src/migration/m20260317_000002_create_albums.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
use super::m20260317_000001_create_artists::Artists;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Albums::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Albums::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Albums::Name).text().not_null())
|
||||
.col(ColumnDef::new(Albums::AlbumArtist).text().not_null())
|
||||
.col(ColumnDef::new(Albums::Year).integer())
|
||||
.col(ColumnDef::new(Albums::Genre).text())
|
||||
.col(ColumnDef::new(Albums::CoverArtPath).text())
|
||||
.col(ColumnDef::new(Albums::MusicbrainzId).text())
|
||||
.col(ColumnDef::new(Albums::ArtistId).integer())
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_albums_artist_id")
|
||||
.from(Albums::Table, Albums::ArtistId)
|
||||
.to(Artists::Table, Artists::Id)
|
||||
.on_delete(ForeignKeyAction::SetNull),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_albums_name")
|
||||
.table(Albums::Table)
|
||||
.col(Albums::Name)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_albums_artist_id")
|
||||
.table(Albums::Table)
|
||||
.col(Albums::ArtistId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_albums_musicbrainz_id")
|
||||
.table(Albums::Table)
|
||||
.col(Albums::MusicbrainzId)
|
||||
.unique()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Albums::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub(crate) enum Albums {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
AlbumArtist,
|
||||
Year,
|
||||
Genre,
|
||||
CoverArtPath,
|
||||
MusicbrainzId,
|
||||
ArtistId,
|
||||
}
|
||||
146
src/migration/m20260317_000003_create_tracks.rs
Normal file
146
src/migration/m20260317_000003_create_tracks.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
use super::m20260317_000001_create_artists::Artists;
|
||||
use super::m20260317_000002_create_albums::Albums;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Tracks::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Tracks::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Tracks::FilePath).text().not_null().unique_key())
|
||||
.col(ColumnDef::new(Tracks::Title).text())
|
||||
.col(ColumnDef::new(Tracks::Artist).text())
|
||||
.col(ColumnDef::new(Tracks::Album).text())
|
||||
.col(ColumnDef::new(Tracks::AlbumArtist).text())
|
||||
.col(ColumnDef::new(Tracks::TrackNumber).integer())
|
||||
.col(ColumnDef::new(Tracks::DiscNumber).integer())
|
||||
.col(ColumnDef::new(Tracks::Duration).double())
|
||||
.col(ColumnDef::new(Tracks::Genre).text())
|
||||
.col(ColumnDef::new(Tracks::Year).integer())
|
||||
.col(ColumnDef::new(Tracks::Codec).text())
|
||||
.col(ColumnDef::new(Tracks::Bitrate).integer())
|
||||
.col(ColumnDef::new(Tracks::FileSize).big_integer().not_null())
|
||||
.col(ColumnDef::new(Tracks::Fingerprint).text())
|
||||
.col(ColumnDef::new(Tracks::MusicbrainzId).text())
|
||||
.col(ColumnDef::new(Tracks::ArtistId).integer())
|
||||
.col(ColumnDef::new(Tracks::AlbumId).integer())
|
||||
.col(ColumnDef::new(Tracks::FileMtime).date_time())
|
||||
.col(
|
||||
ColumnDef::new(Tracks::AddedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Tracks::UpdatedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_tracks_artist_id")
|
||||
.from(Tracks::Table, Tracks::ArtistId)
|
||||
.to(Artists::Table, Artists::Id)
|
||||
.on_delete(ForeignKeyAction::SetNull),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_tracks_album_id")
|
||||
.from(Tracks::Table, Tracks::AlbumId)
|
||||
.to(Albums::Table, Albums::Id)
|
||||
.on_delete(ForeignKeyAction::SetNull),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_tracks_artist_id")
|
||||
.table(Tracks::Table)
|
||||
.col(Tracks::ArtistId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_tracks_album_id")
|
||||
.table(Tracks::Table)
|
||||
.col(Tracks::AlbumId)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_tracks_title")
|
||||
.table(Tracks::Table)
|
||||
.col(Tracks::Title)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_tracks_artist")
|
||||
.table(Tracks::Table)
|
||||
.col(Tracks::Artist)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Tracks::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub(crate) enum Tracks {
|
||||
Table,
|
||||
Id,
|
||||
FilePath,
|
||||
Title,
|
||||
Artist,
|
||||
Album,
|
||||
AlbumArtist,
|
||||
TrackNumber,
|
||||
DiscNumber,
|
||||
Duration,
|
||||
Genre,
|
||||
Year,
|
||||
Codec,
|
||||
Bitrate,
|
||||
FileSize,
|
||||
Fingerprint,
|
||||
MusicbrainzId,
|
||||
ArtistId,
|
||||
AlbumId,
|
||||
FileMtime,
|
||||
AddedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
103
src/migration/m20260317_000004_create_wanted_items.rs
Normal file
103
src/migration/m20260317_000004_create_wanted_items.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
use super::m20260317_000001_create_artists::Artists;
|
||||
use super::m20260317_000002_create_albums::Albums;
|
||||
use super::m20260317_000003_create_tracks::Tracks;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(WantedItems::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(WantedItems::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(WantedItems::ItemType).text().not_null())
|
||||
.col(ColumnDef::new(WantedItems::ArtistId).integer())
|
||||
.col(ColumnDef::new(WantedItems::AlbumId).integer())
|
||||
.col(ColumnDef::new(WantedItems::TrackId).integer())
|
||||
.col(
|
||||
ColumnDef::new(WantedItems::Status)
|
||||
.text()
|
||||
.not_null()
|
||||
.default("wanted"),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(WantedItems::AddedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(WantedItems::UpdatedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_wanted_artist_id")
|
||||
.from(WantedItems::Table, WantedItems::ArtistId)
|
||||
.to(Artists::Table, Artists::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_wanted_album_id")
|
||||
.from(WantedItems::Table, WantedItems::AlbumId)
|
||||
.to(Albums::Table, Albums::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_wanted_track_id")
|
||||
.from(WantedItems::Table, WantedItems::TrackId)
|
||||
.to(Tracks::Table, Tracks::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_wanted_items_status")
|
||||
.table(WantedItems::Table)
|
||||
.col(WantedItems::Status)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(WantedItems::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub(crate) enum WantedItems {
|
||||
Table,
|
||||
Id,
|
||||
ItemType,
|
||||
ArtistId,
|
||||
AlbumId,
|
||||
TrackId,
|
||||
Status,
|
||||
AddedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
101
src/migration/m20260317_000005_create_download_queue.rs
Normal file
101
src/migration/m20260317_000005_create_download_queue.rs
Normal file
@@ -0,0 +1,101 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
use super::m20260317_000004_create_wanted_items::WantedItems;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(DownloadQueue::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(DownloadQueue::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(DownloadQueue::WantedItemId).integer())
|
||||
.col(ColumnDef::new(DownloadQueue::Query).text().not_null())
|
||||
.col(ColumnDef::new(DownloadQueue::SourceUrl).text())
|
||||
.col(
|
||||
ColumnDef::new(DownloadQueue::SourceBackend)
|
||||
.text()
|
||||
.not_null()
|
||||
.default("ytdlp"),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(DownloadQueue::Status)
|
||||
.text()
|
||||
.not_null()
|
||||
.default("pending"),
|
||||
)
|
||||
.col(ColumnDef::new(DownloadQueue::ErrorMessage).text())
|
||||
.col(
|
||||
ColumnDef::new(DownloadQueue::RetryCount)
|
||||
.integer()
|
||||
.not_null()
|
||||
.default(0),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(DownloadQueue::CreatedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(DownloadQueue::UpdatedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("fk_download_wanted_item_id")
|
||||
.from(DownloadQueue::Table, DownloadQueue::WantedItemId)
|
||||
.to(WantedItems::Table, WantedItems::Id)
|
||||
.on_delete(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_download_queue_status")
|
||||
.table(DownloadQueue::Table)
|
||||
.col(DownloadQueue::Status)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(DownloadQueue::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
pub(crate) enum DownloadQueue {
|
||||
Table,
|
||||
Id,
|
||||
WantedItemId,
|
||||
Query,
|
||||
SourceUrl,
|
||||
SourceBackend,
|
||||
Status,
|
||||
ErrorMessage,
|
||||
RetryCount,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
73
src/migration/m20260317_000006_create_search_cache.rs
Normal file
73
src/migration/m20260317_000006_create_search_cache.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(SearchCache::Table)
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(SearchCache::Id)
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(SearchCache::QueryKey)
|
||||
.text()
|
||||
.not_null()
|
||||
.unique_key(),
|
||||
)
|
||||
.col(ColumnDef::new(SearchCache::Provider).text().not_null())
|
||||
.col(ColumnDef::new(SearchCache::ResultJson).text().not_null())
|
||||
.col(
|
||||
ColumnDef::new(SearchCache::CreatedAt)
|
||||
.date_time()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(SearchCache::ExpiresAt)
|
||||
.date_time()
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_search_cache_expires")
|
||||
.table(SearchCache::Table)
|
||||
.col(SearchCache::ExpiresAt)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(SearchCache::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeriveIden)]
|
||||
enum SearchCache {
|
||||
Table,
|
||||
Id,
|
||||
QueryKey,
|
||||
Provider,
|
||||
ResultJson,
|
||||
CreatedAt,
|
||||
ExpiresAt,
|
||||
}
|
||||
24
src/migration/mod.rs
Normal file
24
src/migration/mod.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
mod m20260317_000001_create_artists;
|
||||
mod m20260317_000002_create_albums;
|
||||
mod m20260317_000003_create_tracks;
|
||||
mod m20260317_000004_create_wanted_items;
|
||||
mod m20260317_000005_create_download_queue;
|
||||
mod m20260317_000006_create_search_cache;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20260317_000001_create_artists::Migration),
|
||||
Box::new(m20260317_000002_create_albums::Migration),
|
||||
Box::new(m20260317_000003_create_tracks::Migration),
|
||||
Box::new(m20260317_000004_create_wanted_items::Migration),
|
||||
Box::new(m20260317_000005_create_download_queue::Migration),
|
||||
Box::new(m20260317_000006_create_search_cache::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
82
src/queries/albums.rs
Normal file
82
src/queries/albums.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use sea_orm::*;
|
||||
|
||||
use crate::entities::album::{self, ActiveModel, Entity as Albums, Model as Album};
|
||||
use crate::error::{DbError, DbResult};
|
||||
|
||||
pub async fn upsert(
|
||||
db: &DatabaseConnection,
|
||||
name: &str,
|
||||
album_artist: &str,
|
||||
musicbrainz_id: Option<&str>,
|
||||
artist_id: Option<i32>,
|
||||
) -> DbResult<Album> {
|
||||
if let Some(mbid) = musicbrainz_id {
|
||||
if let Some(existing) = Albums::find()
|
||||
.filter(album::Column::MusicbrainzId.eq(mbid))
|
||||
.one(db)
|
||||
.await?
|
||||
{
|
||||
return Ok(existing);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(existing) = find_by_name_and_artist(db, name, album_artist).await? {
|
||||
return Ok(existing);
|
||||
}
|
||||
|
||||
let active = ActiveModel {
|
||||
name: Set(name.to_string()),
|
||||
album_artist: Set(album_artist.to_string()),
|
||||
musicbrainz_id: Set(musicbrainz_id.map(String::from)),
|
||||
artist_id: Set(artist_id),
|
||||
..Default::default()
|
||||
};
|
||||
Ok(active.insert(db).await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_id(db: &DatabaseConnection, id: i32) -> DbResult<Album> {
|
||||
Albums::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| DbError::NotFound(format!("album id={id}")))
|
||||
}
|
||||
|
||||
pub async fn find_by_name_and_artist(
|
||||
db: &DatabaseConnection,
|
||||
name: &str,
|
||||
album_artist: &str,
|
||||
) -> DbResult<Option<Album>> {
|
||||
Ok(Albums::find()
|
||||
.filter(album::Column::Name.eq(name))
|
||||
.filter(album::Column::AlbumArtist.eq(album_artist))
|
||||
.one(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn list(db: &DatabaseConnection, limit: u64, offset: u64) -> DbResult<Vec<Album>> {
|
||||
Ok(Albums::find()
|
||||
.order_by_asc(album::Column::Name)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_artist(db: &DatabaseConnection, artist_id: i32) -> DbResult<Vec<Album>> {
|
||||
Ok(Albums::find()
|
||||
.filter(album::Column::ArtistId.eq(artist_id))
|
||||
.order_by_asc(album::Column::Year)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn update(db: &DatabaseConnection, id: i32, model: ActiveModel) -> DbResult<Album> {
|
||||
let mut active = model;
|
||||
active.id = Set(id);
|
||||
Ok(active.update(db).await?)
|
||||
}
|
||||
|
||||
pub async fn delete(db: &DatabaseConnection, id: i32) -> DbResult<()> {
|
||||
Albums::delete_by_id(id).exec(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
87
src/queries/artists.rs
Normal file
87
src/queries/artists.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use chrono::Utc;
|
||||
use sea_orm::*;
|
||||
|
||||
use crate::entities::artist::{self, ActiveModel, Entity as Artists, Model as Artist};
|
||||
use crate::error::{DbError, DbResult};
|
||||
|
||||
pub async fn upsert(db: &DatabaseConnection, name: &str, musicbrainz_id: Option<&str>) -> DbResult<Artist> {
|
||||
// Try to find by musicbrainz_id first, then by name
|
||||
if let Some(mbid) = musicbrainz_id {
|
||||
if let Some(existing) = Artists::find()
|
||||
.filter(artist::Column::MusicbrainzId.eq(mbid))
|
||||
.one(db)
|
||||
.await?
|
||||
{
|
||||
return Ok(existing);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(existing) = find_by_name(db, name).await? {
|
||||
// Update musicbrainz_id if we have one now and didn't before
|
||||
if musicbrainz_id.is_some() && existing.musicbrainz_id.is_none() {
|
||||
let mut active: ActiveModel = existing.into();
|
||||
active.musicbrainz_id = Set(musicbrainz_id.map(String::from));
|
||||
return Ok(active.update(db).await?);
|
||||
}
|
||||
return Ok(existing);
|
||||
}
|
||||
|
||||
let now = Utc::now().naive_utc();
|
||||
let active = ActiveModel {
|
||||
name: Set(name.to_string()),
|
||||
musicbrainz_id: Set(musicbrainz_id.map(String::from)),
|
||||
added_at: Set(now),
|
||||
top_songs: Set("[]".to_string()),
|
||||
similar_artists: Set("[]".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
Ok(active.insert(db).await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_id(db: &DatabaseConnection, id: i32) -> DbResult<Artist> {
|
||||
Artists::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| DbError::NotFound(format!("artist id={id}")))
|
||||
}
|
||||
|
||||
pub async fn find_by_name(db: &DatabaseConnection, name: &str) -> DbResult<Option<Artist>> {
|
||||
Ok(Artists::find()
|
||||
.filter(artist::Column::Name.eq(name))
|
||||
.one(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn list(db: &DatabaseConnection, limit: u64, offset: u64) -> DbResult<Vec<Artist>> {
|
||||
Ok(Artists::find()
|
||||
.order_by_asc(artist::Column::Name)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn update(db: &DatabaseConnection, id: i32, model: ActiveModel) -> DbResult<Artist> {
|
||||
let mut active = model;
|
||||
active.id = Set(id);
|
||||
Ok(active.update(db).await?)
|
||||
}
|
||||
|
||||
pub async fn update_top_songs(db: &DatabaseConnection, id: i32, top_songs_json: &str) -> DbResult<Artist> {
|
||||
let existing = get_by_id(db, id).await?;
|
||||
let mut active: ActiveModel = existing.into();
|
||||
active.top_songs = Set(top_songs_json.to_string());
|
||||
Ok(active.update(db).await?)
|
||||
}
|
||||
|
||||
pub async fn update_similar_artists(db: &DatabaseConnection, id: i32, similar_json: &str) -> DbResult<Artist> {
|
||||
let existing = get_by_id(db, id).await?;
|
||||
let mut active: ActiveModel = existing.into();
|
||||
active.similar_artists = Set(similar_json.to_string());
|
||||
Ok(active.update(db).await?)
|
||||
}
|
||||
|
||||
pub async fn delete(db: &DatabaseConnection, id: i32) -> DbResult<()> {
|
||||
Artists::delete_by_id(id).exec(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
65
src/queries/cache.rs
Normal file
65
src/queries/cache.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use chrono::{Duration, Utc};
|
||||
use sea_orm::*;
|
||||
|
||||
use crate::entities::search_cache::{self, ActiveModel, Entity as SearchCache};
|
||||
use crate::error::DbResult;
|
||||
|
||||
/// Get a cached result if it exists and hasn't expired.
|
||||
pub async fn get(db: &DatabaseConnection, query_key: &str) -> DbResult<Option<String>> {
|
||||
let now = Utc::now().naive_utc();
|
||||
let result = SearchCache::find()
|
||||
.filter(search_cache::Column::QueryKey.eq(query_key))
|
||||
.filter(search_cache::Column::ExpiresAt.gt(now))
|
||||
.one(db)
|
||||
.await?;
|
||||
Ok(result.map(|entry| entry.result_json))
|
||||
}
|
||||
|
||||
/// Insert or update a cache entry with a TTL in seconds.
|
||||
pub async fn set(
|
||||
db: &DatabaseConnection,
|
||||
query_key: &str,
|
||||
provider: &str,
|
||||
result_json: &str,
|
||||
ttl_seconds: i64,
|
||||
) -> DbResult<()> {
|
||||
let now = Utc::now().naive_utc();
|
||||
let expires_at = now + Duration::seconds(ttl_seconds);
|
||||
|
||||
// Try to update existing entry
|
||||
let existing = SearchCache::find()
|
||||
.filter(search_cache::Column::QueryKey.eq(query_key))
|
||||
.one(db)
|
||||
.await?;
|
||||
|
||||
if let Some(entry) = existing {
|
||||
let mut active: ActiveModel = entry.into();
|
||||
active.provider = Set(provider.to_string());
|
||||
active.result_json = Set(result_json.to_string());
|
||||
active.created_at = Set(now);
|
||||
active.expires_at = Set(expires_at);
|
||||
active.update(db).await?;
|
||||
} else {
|
||||
let active = ActiveModel {
|
||||
query_key: Set(query_key.to_string()),
|
||||
provider: Set(provider.to_string()),
|
||||
result_json: Set(result_json.to_string()),
|
||||
created_at: Set(now),
|
||||
expires_at: Set(expires_at),
|
||||
..Default::default()
|
||||
};
|
||||
active.insert(db).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete all expired cache entries. Returns the number of entries purged.
|
||||
pub async fn purge_expired(db: &DatabaseConnection) -> DbResult<u64> {
|
||||
let now = Utc::now().naive_utc();
|
||||
let result = SearchCache::delete_many()
|
||||
.filter(search_cache::Column::ExpiresAt.lte(now))
|
||||
.exec(db)
|
||||
.await?;
|
||||
Ok(result.rows_affected)
|
||||
}
|
||||
83
src/queries/downloads.rs
Normal file
83
src/queries/downloads.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use chrono::Utc;
|
||||
use sea_orm::*;
|
||||
|
||||
use crate::entities::download_queue::{
|
||||
self, ActiveModel, DownloadStatus, Entity as DownloadQueue, Model as DownloadQueueItem,
|
||||
};
|
||||
use crate::error::{DbError, DbResult};
|
||||
|
||||
pub async fn enqueue(
|
||||
db: &DatabaseConnection,
|
||||
query: &str,
|
||||
wanted_item_id: Option<i32>,
|
||||
source_backend: &str,
|
||||
) -> DbResult<DownloadQueueItem> {
|
||||
let now = Utc::now().naive_utc();
|
||||
let active = ActiveModel {
|
||||
wanted_item_id: Set(wanted_item_id),
|
||||
query: Set(query.to_string()),
|
||||
source_url: Set(None),
|
||||
source_backend: Set(source_backend.to_string()),
|
||||
status: Set(DownloadStatus::Pending),
|
||||
error_message: Set(None),
|
||||
retry_count: Set(0),
|
||||
created_at: Set(now),
|
||||
updated_at: Set(now),
|
||||
..Default::default()
|
||||
};
|
||||
Ok(active.insert(db).await?)
|
||||
}
|
||||
|
||||
pub async fn get_next_pending(db: &DatabaseConnection) -> DbResult<Option<DownloadQueueItem>> {
|
||||
Ok(DownloadQueue::find()
|
||||
.filter(download_queue::Column::Status.eq(DownloadStatus::Pending))
|
||||
.order_by_asc(download_queue::Column::CreatedAt)
|
||||
.one(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn update_status(
|
||||
db: &DatabaseConnection,
|
||||
id: i32,
|
||||
status: DownloadStatus,
|
||||
error: Option<&str>,
|
||||
) -> DbResult<()> {
|
||||
let item = DownloadQueue::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| DbError::NotFound(format!("download_queue id={id}")))?;
|
||||
let mut active: ActiveModel = item.into();
|
||||
active.status = Set(status);
|
||||
active.error_message = Set(error.map(String::from));
|
||||
active.updated_at = Set(Utc::now().naive_utc());
|
||||
active.update(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn list(
|
||||
db: &DatabaseConnection,
|
||||
status_filter: Option<DownloadStatus>,
|
||||
) -> DbResult<Vec<DownloadQueueItem>> {
|
||||
let mut query = DownloadQueue::find();
|
||||
if let Some(status) = status_filter {
|
||||
query = query.filter(download_queue::Column::Status.eq(status));
|
||||
}
|
||||
Ok(query
|
||||
.order_by_desc(download_queue::Column::CreatedAt)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn retry_failed(db: &DatabaseConnection, id: i32) -> DbResult<()> {
|
||||
let item = DownloadQueue::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| DbError::NotFound(format!("download_queue id={id}")))?;
|
||||
let mut active: ActiveModel = item.into();
|
||||
active.status = Set(DownloadStatus::Pending);
|
||||
active.error_message = Set(None);
|
||||
active.retry_count = Set(active.retry_count.unwrap() + 1);
|
||||
active.updated_at = Set(Utc::now().naive_utc());
|
||||
active.update(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
6
src/queries/mod.rs
Normal file
6
src/queries/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
pub mod albums;
|
||||
pub mod artists;
|
||||
pub mod cache;
|
||||
pub mod downloads;
|
||||
pub mod tracks;
|
||||
pub mod wanted;
|
||||
106
src/queries/tracks.rs
Normal file
106
src/queries/tracks.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use chrono::Utc;
|
||||
use sea_orm::*;
|
||||
|
||||
use crate::entities::track::{self, ActiveModel, Entity as Tracks, Model as Track};
|
||||
use crate::error::{DbError, DbResult};
|
||||
|
||||
pub async fn upsert(db: &DatabaseConnection, new: ActiveModel) -> DbResult<Track> {
|
||||
// Extract file_path for lookup - it must be set for upsert
|
||||
let file_path = match &new.file_path {
|
||||
ActiveValue::Set(p) => p.clone(),
|
||||
_ => return Err(DbError::NotFound("file_path is required for upsert".into())),
|
||||
};
|
||||
|
||||
if let Some(existing) = get_by_path(db, &file_path).await? {
|
||||
let mut active = new;
|
||||
active.id = Set(existing.id);
|
||||
active.added_at = NotSet; // preserve original added_at
|
||||
active.updated_at = Set(Utc::now().naive_utc());
|
||||
return Ok(active.update(db).await?);
|
||||
}
|
||||
|
||||
let now = Utc::now().naive_utc();
|
||||
let mut active = new;
|
||||
if matches!(active.added_at, NotSet) {
|
||||
active.added_at = Set(now);
|
||||
}
|
||||
if matches!(active.updated_at, NotSet) {
|
||||
active.updated_at = Set(now);
|
||||
}
|
||||
Ok(active.insert(db).await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_id(db: &DatabaseConnection, id: i32) -> DbResult<Track> {
|
||||
Tracks::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| DbError::NotFound(format!("track id={id}")))
|
||||
}
|
||||
|
||||
pub async fn get_by_path(db: &DatabaseConnection, file_path: &str) -> DbResult<Option<Track>> {
|
||||
Ok(Tracks::find()
|
||||
.filter(track::Column::FilePath.eq(file_path))
|
||||
.one(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn list(db: &DatabaseConnection, limit: u64, offset: u64) -> DbResult<Vec<Track>> {
|
||||
Ok(Tracks::find()
|
||||
.order_by_asc(track::Column::Artist)
|
||||
.order_by_asc(track::Column::Album)
|
||||
.order_by_asc(track::Column::TrackNumber)
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn search(db: &DatabaseConnection, query: &str) -> DbResult<Vec<Track>> {
|
||||
let pattern = format!("%{query}%");
|
||||
Ok(Tracks::find()
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(track::Column::Title.like(&pattern))
|
||||
.add(track::Column::Artist.like(&pattern))
|
||||
.add(track::Column::Album.like(&pattern)),
|
||||
)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_album(db: &DatabaseConnection, album_id: i32) -> DbResult<Vec<Track>> {
|
||||
Ok(Tracks::find()
|
||||
.filter(track::Column::AlbumId.eq(album_id))
|
||||
.order_by_asc(track::Column::DiscNumber)
|
||||
.order_by_asc(track::Column::TrackNumber)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_artist(db: &DatabaseConnection, artist_id: i32) -> DbResult<Vec<Track>> {
|
||||
Ok(Tracks::find()
|
||||
.filter(track::Column::ArtistId.eq(artist_id))
|
||||
.order_by_asc(track::Column::Album)
|
||||
.order_by_asc(track::Column::TrackNumber)
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_untagged(db: &DatabaseConnection) -> DbResult<Vec<Track>> {
|
||||
Ok(Tracks::find()
|
||||
.filter(track::Column::MusicbrainzId.is_null())
|
||||
.all(db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn update_metadata(db: &DatabaseConnection, id: i32, model: ActiveModel) -> DbResult<Track> {
|
||||
let mut active = model;
|
||||
active.id = Set(id);
|
||||
active.updated_at = Set(Utc::now().naive_utc());
|
||||
Ok(active.update(db).await?)
|
||||
}
|
||||
|
||||
pub async fn delete(db: &DatabaseConnection, id: i32) -> DbResult<()> {
|
||||
Tracks::delete_by_id(id).exec(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
63
src/queries/wanted.rs
Normal file
63
src/queries/wanted.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use chrono::Utc;
|
||||
use sea_orm::*;
|
||||
|
||||
use crate::entities::wanted_item::{
|
||||
self, ActiveModel, Entity as WantedItems, ItemType, Model as WantedItem, WantedStatus,
|
||||
};
|
||||
use crate::error::{DbError, DbResult};
|
||||
|
||||
pub async fn add(
|
||||
db: &DatabaseConnection,
|
||||
item_type: ItemType,
|
||||
artist_id: Option<i32>,
|
||||
album_id: Option<i32>,
|
||||
track_id: Option<i32>,
|
||||
) -> DbResult<WantedItem> {
|
||||
let now = Utc::now().naive_utc();
|
||||
let active = ActiveModel {
|
||||
item_type: Set(item_type),
|
||||
artist_id: Set(artist_id),
|
||||
album_id: Set(album_id),
|
||||
track_id: Set(track_id),
|
||||
status: Set(WantedStatus::Wanted),
|
||||
added_at: Set(now),
|
||||
updated_at: Set(now),
|
||||
..Default::default()
|
||||
};
|
||||
Ok(active.insert(db).await?)
|
||||
}
|
||||
|
||||
pub async fn list(
|
||||
db: &DatabaseConnection,
|
||||
status_filter: Option<WantedStatus>,
|
||||
) -> DbResult<Vec<WantedItem>> {
|
||||
let mut query = WantedItems::find();
|
||||
if let Some(status) = status_filter {
|
||||
query = query.filter(wanted_item::Column::Status.eq(status));
|
||||
}
|
||||
Ok(query.all(db).await?)
|
||||
}
|
||||
|
||||
pub async fn get_by_id(db: &DatabaseConnection, id: i32) -> DbResult<WantedItem> {
|
||||
WantedItems::find_by_id(id)
|
||||
.one(db)
|
||||
.await?
|
||||
.ok_or_else(|| DbError::NotFound(format!("wanted_item id={id}")))
|
||||
}
|
||||
|
||||
pub async fn update_status(
|
||||
db: &DatabaseConnection,
|
||||
id: i32,
|
||||
status: WantedStatus,
|
||||
) -> DbResult<WantedItem> {
|
||||
let existing = get_by_id(db, id).await?;
|
||||
let mut active: ActiveModel = existing.into();
|
||||
active.status = Set(status);
|
||||
active.updated_at = Set(Utc::now().naive_utc());
|
||||
Ok(active.update(db).await?)
|
||||
}
|
||||
|
||||
pub async fn remove(db: &DatabaseConnection, id: i32) -> DbResult<()> {
|
||||
WantedItems::delete_by_id(id).exec(db).await?;
|
||||
Ok(())
|
||||
}
|
||||
277
tests/integration.rs
Normal file
277
tests/integration.rs
Normal file
@@ -0,0 +1,277 @@
|
||||
use chrono::Utc;
|
||||
use sea_orm::*;
|
||||
use shanty_db::entities::download_queue::DownloadStatus;
|
||||
use shanty_db::entities::wanted_item::{ItemType, WantedStatus};
|
||||
use shanty_db::{Database, queries};
|
||||
|
||||
async fn test_db() -> Database {
|
||||
Database::new("sqlite::memory:").await.expect("failed to create test database")
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_database_creation_and_migrations() {
|
||||
let _db = test_db().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_artist_crud() {
|
||||
let db = test_db().await;
|
||||
let conn = db.conn();
|
||||
|
||||
// Create
|
||||
let artist = queries::artists::upsert(conn, "Pink Floyd", None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(artist.name, "Pink Floyd");
|
||||
assert!(artist.musicbrainz_id.is_none());
|
||||
|
||||
// Read
|
||||
let found = queries::artists::get_by_id(conn, artist.id).await.unwrap();
|
||||
assert_eq!(found.name, "Pink Floyd");
|
||||
|
||||
// Find by name
|
||||
let found = queries::artists::find_by_name(conn, "Pink Floyd")
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(found.is_some());
|
||||
|
||||
// Upsert (same name, should return existing)
|
||||
let same = queries::artists::upsert(conn, "Pink Floyd", None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(same.id, artist.id);
|
||||
|
||||
// Upsert with musicbrainz_id updates existing
|
||||
let updated = queries::artists::upsert(conn, "Pink Floyd", Some("mb-123"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(updated.id, artist.id);
|
||||
assert_eq!(updated.musicbrainz_id.as_deref(), Some("mb-123"));
|
||||
|
||||
// List
|
||||
let all = queries::artists::list(conn, 100, 0).await.unwrap();
|
||||
assert_eq!(all.len(), 1);
|
||||
|
||||
// Update top songs
|
||||
let updated = queries::artists::update_top_songs(conn, artist.id, r#"["Time","Money"]"#)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(updated.top_songs, r#"["Time","Money"]"#);
|
||||
|
||||
// Delete
|
||||
queries::artists::delete(conn, artist.id).await.unwrap();
|
||||
let all = queries::artists::list(conn, 100, 0).await.unwrap();
|
||||
assert!(all.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_album_crud() {
|
||||
let db = test_db().await;
|
||||
let conn = db.conn();
|
||||
|
||||
let artist = queries::artists::upsert(conn, "Pink Floyd", None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Create album
|
||||
let album = queries::albums::upsert(
|
||||
conn,
|
||||
"The Dark Side of the Moon",
|
||||
"Pink Floyd",
|
||||
None,
|
||||
Some(artist.id),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(album.name, "The Dark Side of the Moon");
|
||||
assert_eq!(album.artist_id, Some(artist.id));
|
||||
|
||||
// Upsert same album returns existing
|
||||
let same = queries::albums::upsert(
|
||||
conn,
|
||||
"The Dark Side of the Moon",
|
||||
"Pink Floyd",
|
||||
None,
|
||||
Some(artist.id),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(same.id, album.id);
|
||||
|
||||
// Get by artist
|
||||
let albums = queries::albums::get_by_artist(conn, artist.id)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(albums.len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_track_upsert_and_search() {
|
||||
let db = test_db().await;
|
||||
let conn = db.conn();
|
||||
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
// Insert a track with partial metadata
|
||||
let active = shanty_db::entities::track::ActiveModel {
|
||||
file_path: Set("/music/time.flac".to_string()),
|
||||
title: Set(Some("Time".to_string())),
|
||||
artist: Set(Some("Pink Floyd".to_string())),
|
||||
album: Set(Some("The Dark Side of the Moon".to_string())),
|
||||
file_size: Set(42_000_000),
|
||||
added_at: Set(now),
|
||||
updated_at: Set(now),
|
||||
..Default::default()
|
||||
};
|
||||
let track = queries::tracks::upsert(conn, active).await.unwrap();
|
||||
assert_eq!(track.title.as_deref(), Some("Time"));
|
||||
assert!(track.artist_id.is_none()); // no FK yet
|
||||
|
||||
// Upsert same file_path should update, not duplicate
|
||||
let active2 = shanty_db::entities::track::ActiveModel {
|
||||
file_path: Set("/music/time.flac".to_string()),
|
||||
title: Set(Some("Time".to_string())),
|
||||
artist: Set(Some("Pink Floyd".to_string())),
|
||||
album: Set(Some("The Dark Side of the Moon".to_string())),
|
||||
file_size: Set(42_000_000),
|
||||
bitrate: Set(Some(1411)),
|
||||
..Default::default()
|
||||
};
|
||||
let updated = queries::tracks::upsert(conn, active2).await.unwrap();
|
||||
assert_eq!(updated.id, track.id);
|
||||
assert_eq!(updated.bitrate, Some(1411));
|
||||
|
||||
// Search
|
||||
let results = queries::tracks::search(conn, "Time").await.unwrap();
|
||||
assert_eq!(results.len(), 1);
|
||||
|
||||
let results = queries::tracks::search(conn, "Pink Floyd").await.unwrap();
|
||||
assert_eq!(results.len(), 1);
|
||||
|
||||
// Untagged (no musicbrainz_id)
|
||||
let untagged = queries::tracks::get_untagged(conn).await.unwrap();
|
||||
assert_eq!(untagged.len(), 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_wanted_items_lifecycle() {
|
||||
let db = test_db().await;
|
||||
let conn = db.conn();
|
||||
|
||||
let artist = queries::artists::upsert(conn, "Radiohead", None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Add wanted item
|
||||
let item = queries::wanted::add(conn, ItemType::Artist, Some(artist.id), None, None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(item.status, WantedStatus::Wanted);
|
||||
assert_eq!(item.item_type, ItemType::Artist);
|
||||
|
||||
// List with filter
|
||||
let wanted = queries::wanted::list(conn, Some(WantedStatus::Wanted))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(wanted.len(), 1);
|
||||
|
||||
let downloaded = queries::wanted::list(conn, Some(WantedStatus::Downloaded))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(downloaded.is_empty());
|
||||
|
||||
// Update status
|
||||
let updated = queries::wanted::update_status(conn, item.id, WantedStatus::Downloaded)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(updated.status, WantedStatus::Downloaded);
|
||||
|
||||
// Remove
|
||||
queries::wanted::remove(conn, item.id).await.unwrap();
|
||||
let all = queries::wanted::list(conn, None).await.unwrap();
|
||||
assert!(all.is_empty());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_queue_lifecycle() {
|
||||
let db = test_db().await;
|
||||
let conn = db.conn();
|
||||
|
||||
// Enqueue
|
||||
let item = queries::downloads::enqueue(conn, "Pink Floyd Time", None, "ytdlp")
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(item.status, DownloadStatus::Pending);
|
||||
assert_eq!(item.query, "Pink Floyd Time");
|
||||
|
||||
// Get next pending
|
||||
let next = queries::downloads::get_next_pending(conn).await.unwrap();
|
||||
assert!(next.is_some());
|
||||
assert_eq!(next.unwrap().id, item.id);
|
||||
|
||||
// Update to downloading
|
||||
queries::downloads::update_status(conn, item.id, DownloadStatus::Downloading, None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// No more pending
|
||||
let next = queries::downloads::get_next_pending(conn).await.unwrap();
|
||||
assert!(next.is_none());
|
||||
|
||||
// Fail it
|
||||
queries::downloads::update_status(
|
||||
conn,
|
||||
item.id,
|
||||
DownloadStatus::Failed,
|
||||
Some("network error"),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// List failed
|
||||
let failed = queries::downloads::list(conn, Some(DownloadStatus::Failed))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(failed.len(), 1);
|
||||
assert_eq!(failed[0].error_message.as_deref(), Some("network error"));
|
||||
|
||||
// Retry
|
||||
queries::downloads::retry_failed(conn, item.id).await.unwrap();
|
||||
let pending = queries::downloads::list(conn, Some(DownloadStatus::Pending))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(pending.len(), 1);
|
||||
assert_eq!(pending[0].retry_count, 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_search_cache_ttl() {
|
||||
let db = test_db().await;
|
||||
let conn = db.conn();
|
||||
|
||||
// Set cache with long TTL
|
||||
queries::cache::set(conn, "test_query", "musicbrainz", r#"{"results":[]}"#, 3600)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Get should return the value
|
||||
let result = queries::cache::get(conn, "test_query").await.unwrap();
|
||||
assert_eq!(result.as_deref(), Some(r#"{"results":[]}"#));
|
||||
|
||||
// Non-existent key
|
||||
let result = queries::cache::get(conn, "nonexistent").await.unwrap();
|
||||
assert!(result.is_none());
|
||||
|
||||
// Set cache with 0-second TTL (already expired)
|
||||
queries::cache::set(conn, "expired_query", "musicbrainz", r#"{"old":true}"#, 0)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should not return expired entry
|
||||
let result = queries::cache::get(conn, "expired_query").await.unwrap();
|
||||
assert!(result.is_none());
|
||||
|
||||
// Purge expired
|
||||
let purged = queries::cache::purge_expired(conn).await.unwrap();
|
||||
assert_eq!(purged, 1);
|
||||
}
|
||||
Reference in New Issue
Block a user