Compare commits

..

6 Commits

Author SHA1 Message Date
Connor Johnstone 3593698854 test fixes 2026-03-26 18:06:21 -04:00
Connor Johnstone a57125b9cd clippy 2026-03-26 17:52:03 -04:00
Connor Johnstone 86b6901638 format 2026-03-26 17:47:57 -04:00
Connor Johnstone de2847d41c Unified the track logic. Seems to work much better 2026-03-26 17:38:16 -04:00
Connor Johnstone 17fd738774 I **think** I've at least 99% fixed the top songs mismatch 2026-03-25 21:50:09 -04:00
Connor Johnstone 827944170a format 2026-03-24 12:57:12 -04:00
4 changed files with 467 additions and 128 deletions
+2 -2
View File
@@ -9,6 +9,6 @@ pub mod matching;
pub use error::{WatchError, WatchResult}; pub use error::{WatchError, WatchResult};
pub use library::{ pub use library::{
AddSummary, LibrarySummary, WatchListEntry, add_album, add_artist, add_track, library_summary, AddSummary, DiscRecording, LibrarySummary, WatchListEntry, add_album, add_artist, add_track,
list_items, remove_item, library_summary, list_items, remove_item, resolve_from_discography,
}; };
+362 -79
View File
@@ -3,6 +3,7 @@ use std::fmt;
use sea_orm::DatabaseConnection; use sea_orm::DatabaseConnection;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use shanty_db::entities::artist;
use shanty_db::entities::wanted_item::{ItemType, WantedStatus}; use shanty_db::entities::wanted_item::{ItemType, WantedStatus};
use shanty_db::queries; use shanty_db::queries;
use shanty_tag::provider::MetadataProvider; use shanty_tag::provider::MetadataProvider;
@@ -10,6 +11,68 @@ use shanty_tag::provider::MetadataProvider;
use crate::error::{WatchError, WatchResult}; use crate::error::{WatchError, WatchResult};
use crate::matching; use crate::matching;
/// A recording from an artist's discography, with release group context.
/// Used in the `artist_known_recordings` cache to resolve top song titles
/// to the correct recording MBID (preferring album over EP/single, older over newer).
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DiscRecording {
pub mbid: String,
pub title: String,
pub rg_type: String,
pub rg_date: Option<String>,
}
/// Normalize a title for comparison: lowercase + replace smart apostrophes/quotes.
fn normalize_title(s: &str) -> String {
s.to_lowercase()
.replace(['\u{2019}', '\u{2018}'], "'")
.replace(['\u{201C}', '\u{201D}'], "\"")
}
/// Resolve a song title to the best matching recording from an artist's discography.
///
/// Uses fuzzy title matching (Jaro-Winkler > 0.85). When multiple recordings match,
/// prefers: Album > EP > Single, then older release date.
/// Returns `None` if no match exceeds the threshold.
pub fn resolve_from_discography<'a>(
title: &str,
recordings: &'a [DiscRecording],
) -> Option<&'a DiscRecording> {
let title_norm = normalize_title(title);
let mut matches: Vec<(f64, &DiscRecording)> = recordings
.iter()
.map(|r| {
(
strsim::jaro_winkler(&title_norm, &normalize_title(&r.title)),
r,
)
})
.filter(|(score, _)| *score > 0.85)
.collect();
if matches.is_empty() {
return None;
}
matches.sort_by(|(score_a, a), (score_b, b)| {
score_b
.partial_cmp(score_a)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| rg_type_priority(&a.rg_type).cmp(&rg_type_priority(&b.rg_type)))
.then_with(|| a.rg_date.cmp(&b.rg_date))
});
matches.first().map(|(_, r)| *r)
}
fn rg_type_priority(rg_type: &str) -> u8 {
match rg_type {
"Album" => 0,
"EP" => 1,
_ => 2,
}
}
/// A display-friendly watchlist entry with resolved names. /// A display-friendly watchlist entry with resolved names.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WatchListEntry { pub struct WatchListEntry {
@@ -93,70 +156,53 @@ pub async fn add_artist(
} }
}; };
tracing::info!(name = %resolved_name, mbid = %artist_mbid, "fetching discography (release groups)"); tracing::info!(name = %resolved_name, mbid = %artist_mbid, "loading discography");
let all_release_groups = provider // Use the unified discography source — same MBIDs the detail page displays.
.get_artist_release_groups(&artist_mbid) // This reads from artist_rg_tracks caches (populated by enrich_artist),
.await // ensuring wanted_items always have MBIDs that match the detail page.
.map_err(|e| WatchError::Other(format!("failed to fetch release groups: {e}")))?; let disc = load_or_build_discography(conn, &artist_mbid, provider).await;
// Only include release groups where this artist is the primary credit, if disc.is_empty() {
// filtered by allowed secondary types (same as the artist detail page) tracing::warn!(
let release_groups: Vec<_> = all_release_groups name = %resolved_name,
.into_iter() mbid = %artist_mbid,
.filter(|rg| !rg.featured) "no discography data available — visit the artist page first to populate caches"
.filter(|rg| { );
if rg.secondary_types.is_empty() { return Ok(AddSummary::default());
}
// Deduplicate by MBID and expand into wanted items
let mut summary = AddSummary::default();
let mut seen_mbids: std::collections::HashSet<String> = std::collections::HashSet::new();
// Filter by allowed secondary types
let filtered: Vec<_> = disc
.iter()
.filter(|r| {
if r.rg_type.is_empty() || r.rg_type == "Album" {
true true
} else { } else {
rg.secondary_types allowed_secondary_types.iter().any(|st| st == &r.rg_type)
.iter()
.all(|st| allowed_secondary_types.contains(st))
} }
}) })
.collect(); .collect();
tracing::info!(count = release_groups.len(), "found release groups"); tracing::info!(
total = disc.len(),
filtered = filtered.len(),
"discography loaded"
);
let mut summary = AddSummary::default(); for rec in &filtered {
let mut seen_mbids: std::collections::HashSet<String> = std::collections::HashSet::new(); if !seen_mbids.insert(rec.mbid.clone()) {
continue; // Already processed this recording
for rg in &release_groups {
// Resolve a concrete release MBID from the release group
let release_mbid = if let Some(ref rid) = rg.first_release_mbid {
rid.clone()
} else {
// Browse releases for this release group to find a concrete release
match provider.resolve_release_from_group(&rg.mbid).await {
Ok(rid) => rid,
Err(e) => {
tracing::warn!(rg = %rg.title, rg_id = %rg.mbid, error = %e, "failed to resolve release from group");
summary.errors += 1;
continue;
}
}
};
tracing::info!(title = %rg.title, release_mbid = %release_mbid, "fetching tracks for release group");
let tracks = match provider.get_release_tracks(&release_mbid).await {
Ok(t) => t,
Err(e) => {
tracing::warn!(rg = %rg.title, error = %e, "failed to fetch tracks");
summary.errors += 1;
continue;
}
};
for track in &tracks {
if !seen_mbids.insert(track.recording_mbid.clone()) {
continue; // Already processed this recording in another release group
} }
match add_track_inner( match add_track_inner(
conn, conn,
&resolved_name, &resolved_name,
&track.title, &rec.title,
Some(&track.recording_mbid), Some(&rec.mbid),
Some(&artist_mbid), Some(&artist_mbid),
user_id, user_id,
) )
@@ -165,12 +211,11 @@ pub async fn add_artist(
Ok(true) => summary.tracks_added += 1, Ok(true) => summary.tracks_added += 1,
Ok(false) => summary.tracks_already_owned += 1, Ok(false) => summary.tracks_already_owned += 1,
Err(e) => { Err(e) => {
tracing::warn!(track = %track.title, error = %e, "failed to add track"); tracing::warn!(track = %rec.title, error = %e, "failed to add track");
summary.errors += 1; summary.errors += 1;
} }
} }
} }
}
tracing::info!(%summary, "artist watch complete"); tracing::info!(%summary, "artist watch complete");
Ok(summary) Ok(summary)
@@ -253,19 +298,129 @@ pub async fn add_track(
provider: &impl MetadataProvider, provider: &impl MetadataProvider,
user_id: Option<i32>, user_id: Option<i32>,
) -> WatchResult<WatchListEntry> { ) -> WatchResult<WatchListEntry> {
let (resolved_title, resolved_artist, resolved_mbid, resolved_artist_mbid) = // Fast path: if we have artist name + title, try to resolve the MBID directly from
// the cached discography (no MB API calls needed). This makes top song watches instant.
if let (Some(a), Some(t)) = (
artist_name.filter(|s| !s.is_empty()),
title.filter(|s| !s.is_empty()),
) && let Ok(Some(existing_artist)) = queries::artists::find_by_name(conn, a).await
&& let Some(ref artist_mbid) = existing_artist.musicbrainz_id
&& let Some(disc) = load_and_resolve_discography(conn, artist_mbid, t).await
{
tracing::info!(
title = t,
mbid = %disc.mbid,
rg_type = %disc.rg_type,
"resolved MBID from cached discography (fast path)"
);
let artist = queries::artists::upsert(conn, a, Some(artist_mbid)).await?;
return finish_add_track(conn, t, a, Some(disc.mbid.clone()), artist, user_id).await;
}
// Slow path: resolve via MB API
let (resolved_title, resolved_artist, _resolved_mbid, resolved_artist_mbid) =
resolve_track_info(artist_name, title, musicbrainz_id, provider).await?; resolve_track_info(artist_name, title, musicbrainz_id, provider).await?;
let artist = let artist =
queries::artists::upsert(conn, &resolved_artist, resolved_artist_mbid.as_deref()).await?; queries::artists::upsert(conn, &resolved_artist, resolved_artist_mbid.as_deref()).await?;
let is_owned = matching::track_is_owned(conn, &resolved_artist, &resolved_title).await?;
// Try to resolve an MBID from the artist's discography using fuzzy title matching.
// This ensures the wanted item's MBID matches a recording on the displayed release group.
let disc_mbid = if let Some(ref artist_mbid) = artist.musicbrainz_id {
let recordings = load_or_build_discography(conn, artist_mbid, provider).await;
resolve_from_discography(&resolved_title, &recordings).map(|d| d.mbid.clone())
} else {
None
};
if disc_mbid.is_none() {
tracing::warn!(
title = %resolved_title,
artist = %resolved_artist,
"no discography match found — track will not appear on artist page"
);
}
finish_add_track(
conn,
&resolved_title,
&resolved_artist,
disc_mbid,
artist,
user_id,
)
.await
}
/// Shared tail of `add_track`: dedup check, create wanted_item, detect ownership.
async fn finish_add_track(
conn: &DatabaseConnection,
title: &str,
_artist_name: &str,
recording_mbid: Option<String>,
artist: artist::Model,
user_id: Option<i32>,
) -> WatchResult<WatchListEntry> {
// Dedup: skip if a wanted_item with this MBID already exists
if let Some(ref mbid) = recording_mbid
&& let Ok(Some(existing)) = queries::wanted::find_by_mbid(conn, mbid).await
{
let artist_name_resolved = if let Some(aid) = existing.artist_id {
queries::artists::get_by_id(conn, aid)
.await
.map(|a| a.name)
.ok()
} else {
Some(artist.name.clone())
};
return Ok(WatchListEntry {
id: existing.id,
item_type: existing.item_type,
name: existing.name,
artist_name: artist_name_resolved,
status: existing.status,
added_at: existing.added_at,
});
}
// Dedup: skip if a wanted_item with same name + artist already exists
let all_wanted = queries::wanted::list(conn, None, None).await?;
let title_lower = title.to_lowercase();
if let Some(existing) = all_wanted
.iter()
.find(|w| w.artist_id == Some(artist.id) && w.name.to_lowercase() == title_lower)
{
// Update stale MBID if the new one differs (e.g., resolved from discography)
if let Some(ref new_mbid) = recording_mbid
&& existing.musicbrainz_id.as_deref() != Some(new_mbid)
{
let _ = queries::wanted::update_mbid(conn, existing.id, new_mbid).await;
tracing::info!(
id = existing.id,
old_mbid = ?existing.musicbrainz_id,
new_mbid = %new_mbid,
title = title,
"updated stale MBID on existing wanted item"
);
}
return Ok(WatchListEntry {
id: existing.id,
item_type: existing.item_type,
name: existing.name.clone(),
artist_name: Some(artist.name),
status: existing.status,
added_at: existing.added_at,
});
}
let is_owned = matching::track_is_owned(conn, recording_mbid.as_deref()).await?;
let item = queries::wanted::add( let item = queries::wanted::add(
conn, conn,
queries::wanted::AddWantedItem { queries::wanted::AddWantedItem {
item_type: ItemType::Track, item_type: ItemType::Track,
name: &resolved_title, name: title,
musicbrainz_id: resolved_mbid.as_deref(), musicbrainz_id: recording_mbid.as_deref(),
artist_id: Some(artist.id), artist_id: Some(artist.id),
album_id: None, album_id: None,
track_id: None, track_id: None,
@@ -284,13 +439,89 @@ pub async fn add_track(
Ok(WatchListEntry { Ok(WatchListEntry {
id: item.id, id: item.id,
item_type: ItemType::Track, item_type: ItemType::Track,
name: resolved_title, name: title.to_string(),
artist_name: Some(artist.name), artist_name: Some(artist.name),
status, status,
added_at: item.added_at, added_at: item.added_at,
}) })
} }
/// Load the discography cache and resolve a title. Returns None if cache doesn't exist
/// or title doesn't match.
async fn load_and_resolve_discography(
conn: &DatabaseConnection,
artist_mbid: &str,
title: &str,
) -> Option<DiscRecording> {
let cache_key = format!("artist_known_recordings:{artist_mbid}");
let json = queries::cache::get(conn, &cache_key).await.ok()??;
let recordings: Vec<DiscRecording> = serde_json::from_str(&json).ok()?;
resolve_from_discography(title, &recordings).cloned()
}
/// Load the discography cache, or build it from the detail page's cached release group
/// tracks (`artist_rg_tracks:*`). Only uses caches populated by enrich_artist() — never
/// fetches from MB API independently, to ensure MBIDs always match the detail page.
async fn load_or_build_discography(
conn: &DatabaseConnection,
artist_mbid: &str,
provider: &impl MetadataProvider,
) -> Vec<DiscRecording> {
let cache_key = format!("artist_known_recordings:{artist_mbid}");
// Try the pre-built known_recordings cache first
if let Ok(Some(json)) = queries::cache::get(conn, &cache_key).await {
if let Ok(recordings) = serde_json::from_str::<Vec<DiscRecording>>(&json) {
return recordings;
}
tracing::debug!(artist_mbid, "rebuilding discography cache (old format)");
}
// Build from the detail page's cached release group tracks (artist_rg_tracks:*).
// These are populated by enrich_artist() and are the source of truth.
// We only use cached data — never fetch from MB API here to avoid MBID divergence.
let mut recordings = Vec::new();
if let Ok(release_groups) = provider.get_artist_release_groups(artist_mbid).await {
for rg in &release_groups {
if rg.featured || !rg.secondary_types.is_empty() {
continue;
}
let rg_type = rg.primary_type.clone().unwrap_or_default();
let rg_date = rg.first_release_date.clone();
let rg_cache_key = format!("artist_rg_tracks:{}", rg.mbid);
if let Ok(Some(json)) = queries::cache::get(conn, &rg_cache_key).await
&& let Ok(cached) = serde_json::from_str::<serde_json::Value>(&json)
&& let Some(tracks) = cached.get("tracks").and_then(|t| t.as_array())
{
for t in tracks {
if let (Some(mbid), Some(title)) = (
t.get("recording_mbid").and_then(|v| v.as_str()),
t.get("title").and_then(|v| v.as_str()),
) {
recordings.push(DiscRecording {
mbid: mbid.to_string(),
title: title.to_string(),
rg_type: rg_type.clone(),
rg_date: rg_date.clone(),
});
}
}
}
// No cache for this RG = skip it (enrich_artist hasn't fetched it yet)
}
}
// Cache the result if we found anything
if !recordings.is_empty()
&& let Ok(json) = serde_json::to_string(&recordings)
{
let _ = queries::cache::set(conn, &cache_key, "computed", &json, 7 * 86400).await;
}
recordings
}
/// Internal: add a single track wanted item. Returns Ok(true) if added as Wanted, /// Internal: add a single track wanted item. Returns Ok(true) if added as Wanted,
/// Ok(false) if already owned. /// Ok(false) if already owned.
async fn add_track_inner( async fn add_track_inner(
@@ -302,18 +533,27 @@ async fn add_track_inner(
user_id: Option<i32>, user_id: Option<i32>,
) -> WatchResult<bool> { ) -> WatchResult<bool> {
// Skip if a wanted_item with this recording MBID already exists // Skip if a wanted_item with this recording MBID already exists
if let Some(mbid) = recording_mbid { if let Some(mbid) = recording_mbid
if queries::wanted::find_by_mbid(conn, mbid) && queries::wanted::find_by_mbid(conn, mbid).await?.is_some()
.await?
.is_some()
{ {
tracing::debug!(title = title, mbid = mbid, "already in watchlist, skipping"); tracing::debug!(title = title, mbid = mbid, "already in watchlist, skipping");
return Ok(false); return Ok(false);
} }
}
let artist = queries::artists::upsert(conn, artist_name, artist_mbid).await?; let artist = queries::artists::upsert(conn, artist_name, artist_mbid).await?;
let is_owned = matching::track_is_owned(conn, artist_name, title).await?;
// Also check by name + artist_id to catch race conditions (rapid double-clicks)
let all_wanted = queries::wanted::list(conn, None, None).await?;
let title_lower = title.to_lowercase();
if all_wanted
.iter()
.any(|w| w.artist_id == Some(artist.id) && w.name.to_lowercase() == title_lower)
{
tracing::debug!(title = title, "already in watchlist by name, skipping");
return Ok(false);
}
let is_owned = matching::track_is_owned(conn, recording_mbid).await?;
let item = queries::wanted::add( let item = queries::wanted::add(
conn, conn,
@@ -410,28 +650,22 @@ async fn resolve_album_info(
/// Resolve track info from MBID if needed. /// Resolve track info from MBID if needed.
/// Returns (title, artist_name, recording_mbid, artist_mbid). /// Returns (title, artist_name, recording_mbid, artist_mbid).
/// When an MBID is provided, validates it via MB lookup. If the MBID is stale/invalid,
/// falls back to searching by artist+title to find the correct recording MBID.
async fn resolve_track_info( async fn resolve_track_info(
artist_name: Option<&str>, artist_name: Option<&str>,
title: Option<&str>, title: Option<&str>,
mbid: Option<&str>, mbid: Option<&str>,
provider: &impl MetadataProvider, provider: &impl MetadataProvider,
) -> WatchResult<(String, String, Option<String>, Option<String>)> { ) -> WatchResult<(String, String, Option<String>, Option<String>)> {
if let (Some(t), Some(a)) = ( let has_name = title.filter(|s| !s.is_empty()).is_some()
title.filter(|s| !s.is_empty()), && artist_name.filter(|s| !s.is_empty()).is_some();
artist_name.filter(|s| !s.is_empty()),
) {
return Ok((t.to_string(), a.to_string(), mbid.map(String::from), None));
}
let mbid = // If we have an MBID, validate it via lookup
mbid.ok_or_else(|| WatchError::Other("either artist+title or --mbid is required".into()))?; if let Some(mbid) = mbid.filter(|s| !s.is_empty()) {
match provider.get_recording(mbid).await {
let details = provider Ok(details) => {
.get_recording(mbid) return Ok((
.await
.map_err(|e| WatchError::Other(format!("MusicBrainz lookup failed: {e}")))?;
Ok((
title title
.filter(|s| !s.is_empty()) .filter(|s| !s.is_empty())
.map(String::from) .map(String::from)
@@ -442,6 +676,55 @@ async fn resolve_track_info(
.unwrap_or(details.artist), .unwrap_or(details.artist),
Some(mbid.to_string()), Some(mbid.to_string()),
details.artist_mbid, details.artist_mbid,
));
}
Err(e) => {
// MBID is stale/invalid — fall back to search if we have name info
if has_name {
tracing::warn!(mbid = mbid, error = %e, "MBID validation failed, falling back to search");
} else {
return Err(WatchError::Other(format!(
"MusicBrainz lookup failed for {mbid}: {e}"
)));
}
}
}
}
// Search by artist + title
if let (Some(t), Some(a)) = (
title.filter(|s| !s.is_empty()),
artist_name.filter(|s| !s.is_empty()),
) {
// Try to find the correct recording MBID via search
let results = provider
.search_recording(a, t)
.await
.map_err(|e| WatchError::Other(format!("MusicBrainz search failed: {e}")))?;
// Prefer a result credited to this artist (by artist name match)
// to avoid picking up compilation/VA versions of the same song
let a_lower = a.to_lowercase();
let best = results
.iter()
.find(|r| r.artist.to_lowercase() == a_lower)
.or(results.first());
if let Some(best) = best {
return Ok((
t.to_string(),
a.to_string(),
Some(best.mbid.clone()),
best.artist_mbid.clone(),
));
}
// No search results — return without MBID
return Ok((t.to_string(), a.to_string(), None, None));
}
Err(WatchError::Other(
"either artist+title or a valid MBID is required".into(),
)) ))
} }
+7 -22
View File
@@ -87,34 +87,19 @@ pub async fn album_is_owned(
Ok(false) Ok(false)
} }
/// Check if a specific track is "owned" — a track with matching artist + title exists. /// Check if a specific track is "owned" — a track with the same recording MBID exists.
/// Uses MBID-only matching to avoid false positives from fuzzy name matching
/// (e.g., "Kill Me" incorrectly matching "How I Get Myself Killed").
pub async fn track_is_owned( pub async fn track_is_owned(
conn: &DatabaseConnection, conn: &DatabaseConnection,
artist_name: &str, recording_mbid: Option<&str>,
title: &str,
) -> WatchResult<bool> { ) -> WatchResult<bool> {
let norm_artist = normalize(artist_name); if let Some(mbid) = recording_mbid {
let norm_title = normalize(title); let all_tracks = queries::tracks::get_by_mbid(conn, mbid).await?;
if !all_tracks.is_empty() {
// Fuzzy: search tracks matching the title
let tracks = queries::tracks::search(conn, title).await?;
for track in &tracks {
let title_match = track
.title
.as_deref()
.map(|t| strsim::jaro_winkler(&norm_title, &normalize(t)) > 0.85)
.unwrap_or(false);
let artist_match = track
.artist
.as_deref()
.or(track.album_artist.as_deref())
.map(|a| strsim::jaro_winkler(&norm_artist, &normalize(a)) > 0.85)
.unwrap_or(false);
if title_match && artist_match {
return Ok(true); return Ok(true);
} }
} }
Ok(false) Ok(false)
} }
+77 -6
View File
@@ -14,7 +14,7 @@ async fn test_db() -> Database {
} }
/// Insert a fake track into the DB to simulate an indexed library. /// Insert a fake track into the DB to simulate an indexed library.
async fn insert_track(db: &Database, artist: &str, title: &str, album: &str) { async fn insert_track(db: &Database, artist: &str, title: &str, album: &str, mbid: Option<&str>) {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
let artist_rec = queries::artists::upsert(db.conn(), artist, None) let artist_rec = queries::artists::upsert(db.conn(), artist, None)
.await .await
@@ -28,6 +28,7 @@ async fn insert_track(db: &Database, artist: &str, title: &str, album: &str) {
artist: Set(Some(artist.to_string())), artist: Set(Some(artist.to_string())),
album: Set(Some(album.to_string())), album: Set(Some(album.to_string())),
album_artist: Set(Some(artist.to_string())), album_artist: Set(Some(artist.to_string())),
musicbrainz_id: Set(mbid.map(String::from)),
file_size: Set(1_000_000), file_size: Set(1_000_000),
artist_id: Set(Some(artist_rec.id)), artist_id: Set(Some(artist_rec.id)),
album_id: Set(Some(album_rec.id)), album_id: Set(Some(album_rec.id)),
@@ -38,6 +39,26 @@ async fn insert_track(db: &Database, artist: &str, title: &str, album: &str) {
queries::tracks::upsert(db.conn(), active).await.unwrap(); queries::tracks::upsert(db.conn(), active).await.unwrap();
} }
/// Populate the artist_rg_tracks cache so add_artist/add_track can resolve recordings.
async fn populate_discography_cache(db: &Database, _artist_mbid: &str) {
let rg_tracks = serde_json::json!({
"release_mbid": "release-123",
"tracks": [
{"recording_mbid": "rec-1", "title": "Track One"},
{"recording_mbid": "rec-2", "title": "Track Two"},
]
});
queries::cache::set(
db.conn(),
&format!("artist_rg_tracks:rg-123"),
"musicbrainz",
&rg_tracks.to_string(),
86400,
)
.await
.unwrap();
}
/// Mock provider that returns a tracklist for known releases. /// Mock provider that returns a tracklist for known releases.
struct MockProvider; struct MockProvider;
@@ -60,7 +81,19 @@ impl MetadataProvider for MockProvider {
score: 100, score: 100,
}]) }])
} }
async fn get_recording(&self, _mbid: &str) -> DataResult<RecordingDetails> { async fn get_recording(&self, mbid: &str) -> DataResult<RecordingDetails> {
// Return details for known test MBIDs
if mbid == "rec-time" {
return Ok(RecordingDetails {
mbid: "rec-time".into(),
title: "Time".into(),
artist: "Pink Floyd".into(),
artist_mbid: None,
genres: vec![],
releases: vec![],
duration_ms: None,
});
}
Err(shanty_data::DataError::Other("not found".into())) Err(shanty_data::DataError::Other("not found".into()))
} }
async fn search_artist( async fn search_artist(
@@ -160,13 +193,31 @@ async fn test_add_track_auto_owned() {
let db = test_db().await; let db = test_db().await;
let provider = MockProvider; let provider = MockProvider;
insert_track(&db, "Pink Floyd", "Time", "DSOTM").await; // Create artist with MBID so the fast path can resolve
queries::artists::upsert(db.conn(), "Pink Floyd", Some("pf-mbid"))
.await
.unwrap();
// Populate discography cache so fast path finds "Time" → "rec-time"
let known = serde_json::json!([
{"mbid": "rec-time", "title": "Time", "rg_type": "Album", "rg_date": "1973"}
]);
queries::cache::set(
db.conn(),
"artist_known_recordings:pf-mbid",
"computed",
&known.to_string(),
86400,
)
.await
.unwrap();
insert_track(&db, "Pink Floyd", "Time", "DSOTM", Some("rec-time")).await;
let entry = add_track( let entry = add_track(
db.conn(), db.conn(),
Some("Pink Floyd"), Some("Pink Floyd"),
Some("Time"), Some("Time"),
None, Some("rec-time"),
&provider, &provider,
None, None,
) )
@@ -204,6 +255,9 @@ async fn test_add_artist_expands_to_tracks() {
let db = test_db().await; let db = test_db().await;
let provider = MockProvider; let provider = MockProvider;
// Pre-populate the discography cache (normally done by enrich_artist)
populate_discography_cache(&db, "artist-456").await;
let summary = add_artist(db.conn(), Some("Test Artist"), None, &provider, &[], None) let summary = add_artist(db.conn(), Some("Test Artist"), None, &provider, &[], None)
.await .await
.unwrap(); .unwrap();
@@ -278,7 +332,24 @@ async fn test_library_summary() {
let db = test_db().await; let db = test_db().await;
let provider = MockProvider; let provider = MockProvider;
insert_track(&db, "Pink Floyd", "Time", "DSOTM").await; // Create artist with MBID and populate discography cache
queries::artists::upsert(db.conn(), "Pink Floyd", Some("pf-mbid"))
.await
.unwrap();
let known = serde_json::json!([
{"mbid": "rec-time", "title": "Time", "rg_type": "Album", "rg_date": "1973"}
]);
queries::cache::set(
db.conn(),
"artist_known_recordings:pf-mbid",
"computed",
&known.to_string(),
86400,
)
.await
.unwrap();
insert_track(&db, "Pink Floyd", "Time", "DSOTM", Some("rec-time")).await;
add_track( add_track(
db.conn(), db.conn(),
@@ -294,7 +365,7 @@ async fn test_library_summary() {
db.conn(), db.conn(),
Some("Pink Floyd"), Some("Pink Floyd"),
Some("Time"), Some("Time"),
None, Some("rec-time"),
&provider, &provider,
None, None,
) )