Added the mb db download. Big upsides and downsides
All checks were successful
CI / check (push) Successful in 1m11s
CI / docker (push) Successful in 2m21s

This commit is contained in:
Connor Johnstone
2026-03-21 23:22:49 -04:00
parent 31d54651e6
commit 51f2c2ae8f
9 changed files with 2181 additions and 142 deletions

View File

@@ -11,7 +11,16 @@ serde_json = "1"
thiserror = "2"
tracing = "0.1"
tokio = { version = "1", features = ["full"] }
reqwest = { version = "0.12", features = ["json"] }
reqwest = { version = "0.12", features = ["json", "stream"] }
futures-util = "0.3"
rusqlite = { version = "0.29", optional = true }
xz2 = { version = "0.1", optional = true }
tar = { version = "0.4", optional = true }
chrono = { version = "0.4", optional = true }
[features]
default = ["local-mb"]
local-mb = ["rusqlite", "xz2", "tar", "chrono"]
[dev-dependencies]
tokio = { version = "1", features = ["full", "test-util"] }

View File

@@ -4,6 +4,12 @@ pub mod fanarttv;
pub mod http;
pub mod lastfm;
pub mod lrclib;
#[cfg(feature = "local-mb")]
pub mod mb_hybrid;
#[cfg(feature = "local-mb")]
pub mod mb_import;
#[cfg(feature = "local-mb")]
pub mod mb_local;
pub mod musicbrainz;
pub mod traits;
pub mod types;
@@ -14,6 +20,10 @@ pub use error::{DataError, DataResult};
pub use fanarttv::FanartTvFetcher;
pub use lastfm::{LastFmBioFetcher, LastFmSimilarFetcher};
pub use lrclib::LrclibFetcher;
#[cfg(feature = "local-mb")]
pub use mb_hybrid::HybridMusicBrainzFetcher;
#[cfg(feature = "local-mb")]
pub use mb_local::LocalMusicBrainzFetcher;
pub use musicbrainz::MusicBrainzFetcher;
pub use traits::*;
pub use types::*;

View File

@@ -0,0 +1,171 @@
//! Hybrid MusicBrainz fetcher: local DB first, API fallback.
//!
//! Tries the local SQLite database for instant lookups. If the local DB is not
//! configured, not available, or doesn't have the requested entity, falls back
//! to the rate-limited MusicBrainz API.
use crate::error::DataResult;
use crate::mb_local::{LocalMbStats, LocalMusicBrainzFetcher};
use crate::musicbrainz::MusicBrainzFetcher;
use crate::traits::MetadataFetcher;
use crate::types::{
ArtistInfo, ArtistSearchResult, DiscographyEntry, RecordingDetails, RecordingMatch,
ReleaseGroupEntry, ReleaseMatch, ReleaseTrack,
};
/// A [`MetadataFetcher`] that tries a local MusicBrainz SQLite database first,
/// then falls back to the remote MusicBrainz API.
pub struct HybridMusicBrainzFetcher {
local: Option<LocalMusicBrainzFetcher>,
remote: MusicBrainzFetcher,
}
impl HybridMusicBrainzFetcher {
/// Create a hybrid fetcher. If `local` is `None`, all queries go to the API.
pub fn new(local: Option<LocalMusicBrainzFetcher>, remote: MusicBrainzFetcher) -> Self {
Self { local, remote }
}
/// Whether a local database is configured and has data.
pub fn has_local_db(&self) -> bool {
self.local.as_ref().is_some_and(|l| l.is_available())
}
/// Get stats from the local database (if available).
pub fn local_stats(&self) -> Option<LocalMbStats> {
self.local
.as_ref()
.filter(|l| l.is_available())
.map(|l| l.stats())
}
/// Get a reference to the underlying remote fetcher (for methods not on the trait).
pub fn remote(&self) -> &MusicBrainzFetcher {
&self.remote
}
/// Returns a reference to the local fetcher if available and populated.
fn local_if_available(&self) -> Option<&LocalMusicBrainzFetcher> {
self.local.as_ref().filter(|l| l.is_available())
}
/// Look up an artist by MBID. Tries local first, then remote.
pub async fn get_artist_by_mbid(&self, mbid: &str) -> DataResult<(String, Option<String>)> {
if let Some(local) = self.local_if_available()
&& let Ok(result) = local.get_artist_by_mbid_sync(mbid)
{
return Ok(result);
}
self.remote.get_artist_by_mbid(mbid).await
}
/// Get detailed artist info by MBID. Tries local first, then remote.
pub async fn get_artist_info(&self, mbid: &str) -> DataResult<ArtistInfo> {
if let Some(local) = self.local_if_available()
&& let Ok(result) = local.get_artist_info_sync(mbid)
{
return Ok(result);
}
self.remote.get_artist_info(mbid).await
}
/// Get a clone of the rate limiter for sharing with other MB clients.
pub fn limiter(&self) -> crate::http::RateLimiter {
self.remote.limiter()
}
}
/// Try a local search; returns `Some(results)` if non-empty, `None` to fall through.
async fn try_local_vec<T, F: std::future::Future<Output = DataResult<Vec<T>>>>(
f: F,
) -> Option<DataResult<Vec<T>>> {
let results = f.await;
match results {
Ok(ref r) if !r.is_empty() => Some(results),
_ => None,
}
}
impl MetadataFetcher for HybridMusicBrainzFetcher {
async fn search_recording(&self, artist: &str, title: &str) -> DataResult<Vec<RecordingMatch>> {
if let Some(local) = self.local_if_available()
&& let Some(results) = try_local_vec(local.search_recording(artist, title)).await
{
return results;
}
self.remote.search_recording(artist, title).await
}
async fn search_release(&self, artist: &str, album: &str) -> DataResult<Vec<ReleaseMatch>> {
if let Some(local) = self.local_if_available()
&& let Some(results) = try_local_vec(local.search_release(artist, album)).await
{
return results;
}
self.remote.search_release(artist, album).await
}
async fn get_recording(&self, mbid: &str) -> DataResult<RecordingDetails> {
if let Some(local) = self.local_if_available()
&& let Ok(result) = local.get_recording(mbid).await
{
return Ok(result);
}
self.remote.get_recording(mbid).await
}
async fn search_artist(&self, query: &str, limit: u32) -> DataResult<Vec<ArtistSearchResult>> {
if let Some(local) = self.local_if_available()
&& let Some(results) = try_local_vec(local.search_artist(query, limit)).await
{
return results;
}
self.remote.search_artist(query, limit).await
}
async fn get_artist_releases(
&self,
artist_mbid: &str,
limit: u32,
) -> DataResult<Vec<DiscographyEntry>> {
if let Some(local) = self.local_if_available()
&& let Some(results) =
try_local_vec(local.get_artist_releases(artist_mbid, limit)).await
{
return results;
}
self.remote.get_artist_releases(artist_mbid, limit).await
}
async fn get_release_tracks(&self, release_mbid: &str) -> DataResult<Vec<ReleaseTrack>> {
if let Some(local) = self.local_if_available()
&& let Ok(tracks) = local.get_release_tracks(release_mbid).await
{
return Ok(tracks);
}
self.remote.get_release_tracks(release_mbid).await
}
async fn get_artist_release_groups(
&self,
artist_mbid: &str,
) -> DataResult<Vec<ReleaseGroupEntry>> {
if let Some(local) = self.local_if_available()
&& let Some(results) = try_local_vec(local.get_artist_release_groups(artist_mbid)).await
{
return results;
}
self.remote.get_artist_release_groups(artist_mbid).await
}
async fn resolve_release_from_group(&self, release_group_mbid: &str) -> DataResult<String> {
if let Some(local) = self.local_if_available()
&& let Ok(result) = local.resolve_release_from_group(release_group_mbid).await
{
return Ok(result);
}
self.remote
.resolve_release_from_group(release_group_mbid)
.await
}
}

View File

@@ -0,0 +1,913 @@
//! MusicBrainz JSON dump importer.
//!
//! Downloads and parses MusicBrainz JSON data dumps (`.tar.xz` files) into a
//! local SQLite database (`shanty-mb.db`) for instant, rate-limit-free lookups.
//!
//! Each dump file contains one JSON object per line. We stream-decompress the
//! tar archive, read entries line-by-line, extract the fields we need, and batch
//! INSERT into SQLite with periodic transaction commits.
use std::io::BufRead;
use std::path::{Path, PathBuf};
use rusqlite::Connection;
use serde::Deserialize;
use tracing;
/// Batch size for transaction commits during import.
const BATCH_SIZE: u64 = 10_000;
/// Base URL for MusicBrainz JSON data dumps.
const DUMP_BASE_URL: &str = "https://data.metabrainz.org/pub/musicbrainz/data/json-dumps/";
/// Statistics from an import run.
#[derive(Debug, Clone, Default)]
pub struct ImportStats {
pub artists: u64,
pub artist_urls: u64,
pub release_groups: u64,
pub releases: u64,
pub tracks: u64,
pub recordings: u64,
}
impl std::fmt::Display for ImportStats {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Imported: {} artists, {} artist URLs, {} release groups, {} releases, {} tracks, {} recordings",
self.artists,
self.artist_urls,
self.release_groups,
self.releases,
self.tracks,
self.recordings
)
}
}
/// Create the SQLite schema for the local MusicBrainz database.
pub fn create_schema(conn: &Connection) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
conn.execute_batch(
"
CREATE TABLE IF NOT EXISTS mb_artists (
mbid TEXT PRIMARY KEY,
name TEXT NOT NULL,
sort_name TEXT,
disambiguation TEXT,
artist_type TEXT,
country TEXT,
begin_year INTEGER
);
CREATE INDEX IF NOT EXISTS idx_mb_artists_name ON mb_artists(name COLLATE NOCASE);
CREATE TABLE IF NOT EXISTS mb_artist_urls (
artist_mbid TEXT NOT NULL,
url TEXT NOT NULL,
link_type TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_mb_artist_urls_artist ON mb_artist_urls(artist_mbid);
CREATE TABLE IF NOT EXISTS mb_release_groups (
mbid TEXT PRIMARY KEY,
title TEXT NOT NULL,
artist_mbid TEXT,
primary_type TEXT,
secondary_types TEXT,
first_release_date TEXT
);
CREATE INDEX IF NOT EXISTS idx_mb_rg_artist ON mb_release_groups(artist_mbid);
CREATE TABLE IF NOT EXISTS mb_releases (
mbid TEXT PRIMARY KEY,
title TEXT NOT NULL,
release_group_mbid TEXT,
artist_mbid TEXT,
date TEXT,
country TEXT,
status TEXT
);
CREATE INDEX IF NOT EXISTS idx_mb_releases_rg ON mb_releases(release_group_mbid);
CREATE INDEX IF NOT EXISTS idx_mb_releases_artist ON mb_releases(artist_mbid);
CREATE TABLE IF NOT EXISTS mb_tracks (
id INTEGER PRIMARY KEY AUTOINCREMENT,
release_mbid TEXT NOT NULL,
recording_mbid TEXT NOT NULL,
title TEXT NOT NULL,
track_number INTEGER,
disc_number INTEGER,
duration_ms INTEGER,
position INTEGER
);
CREATE INDEX IF NOT EXISTS idx_mb_tracks_release ON mb_tracks(release_mbid);
CREATE INDEX IF NOT EXISTS idx_mb_tracks_recording ON mb_tracks(recording_mbid);
CREATE TABLE IF NOT EXISTS mb_recordings (
mbid TEXT PRIMARY KEY,
title TEXT NOT NULL,
artist_mbid TEXT,
duration_ms INTEGER
);
CREATE INDEX IF NOT EXISTS idx_mb_recordings_artist ON mb_recordings(artist_mbid);
CREATE INDEX IF NOT EXISTS idx_mb_recordings_title ON mb_recordings(title COLLATE NOCASE);
CREATE TABLE IF NOT EXISTS mb_import_meta (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
);
",
)?;
Ok(())
}
// --- JSON structures matching MusicBrainz dump format ---
#[derive(Deserialize)]
struct DumpArtist {
id: String,
name: String,
#[serde(rename = "sort-name")]
sort_name: Option<String>,
disambiguation: Option<String>,
#[serde(rename = "type")]
artist_type: Option<String>,
country: Option<String>,
#[serde(rename = "life-span")]
life_span: Option<DumpLifeSpan>,
relations: Option<Vec<DumpRelation>>,
}
#[derive(Deserialize)]
struct DumpLifeSpan {
begin: Option<String>,
}
#[derive(Deserialize)]
struct DumpRelation {
#[serde(rename = "type")]
relation_type: String,
url: Option<DumpRelationUrl>,
}
#[derive(Deserialize)]
struct DumpRelationUrl {
resource: String,
}
#[derive(Deserialize)]
struct DumpReleaseGroup {
id: String,
title: String,
#[serde(rename = "primary-type")]
primary_type: Option<String>,
#[serde(rename = "secondary-types", default)]
secondary_types: Option<Vec<String>>,
#[serde(rename = "first-release-date")]
first_release_date: Option<String>,
#[serde(rename = "artist-credit")]
artist_credit: Option<Vec<DumpArtistCredit>>,
}
#[derive(Deserialize)]
struct DumpRelease {
id: String,
title: String,
#[serde(rename = "release-group")]
release_group: Option<DumpReleaseGroupRef>,
date: Option<String>,
country: Option<String>,
status: Option<String>,
#[serde(rename = "artist-credit")]
artist_credit: Option<Vec<DumpArtistCredit>>,
media: Option<Vec<DumpMedia>>,
}
#[derive(Deserialize)]
struct DumpReleaseGroupRef {
id: String,
}
#[derive(Deserialize)]
struct DumpMedia {
position: Option<i32>,
tracks: Option<Vec<DumpTrack>>,
}
#[derive(Deserialize)]
struct DumpTrack {
position: Option<i32>,
title: String,
length: Option<u64>,
recording: Option<DumpTrackRecording>,
}
#[derive(Deserialize)]
struct DumpTrackRecording {
id: String,
}
#[derive(Deserialize)]
struct DumpRecording {
id: String,
title: String,
length: Option<u64>,
#[serde(rename = "artist-credit")]
artist_credit: Option<Vec<DumpArtistCredit>>,
}
#[derive(Deserialize)]
struct DumpArtistCredit {
artist: DumpArtistRef,
}
#[derive(Deserialize)]
struct DumpArtistRef {
id: String,
}
/// Extract the primary artist MBID from artist credits.
fn primary_artist_mbid(credits: &Option<Vec<DumpArtistCredit>>) -> Option<String> {
credits
.as_ref()
.and_then(|c| c.first().map(|ac| ac.artist.id.clone()))
}
/// Extract begin year from a life-span date string like "1990" or "1990-05-14".
fn extract_begin_year(life_span: &Option<DumpLifeSpan>) -> Option<i32> {
life_span
.as_ref()
.and_then(|ls| ls.begin.as_ref())
.and_then(|d| d.split('-').next())
.and_then(|y| y.parse::<i32>().ok())
}
/// Import artists from a line-delimited JSON reader.
///
/// Each line should be a complete JSON object matching the MusicBrainz artist dump format.
/// Returns the number of artists imported.
pub fn import_artists(
conn: &Connection,
reader: impl BufRead,
progress: impl Fn(u64),
) -> Result<u64, Box<dyn std::error::Error + Send + Sync>> {
// Clear existing data for clean re-import
conn.execute("DELETE FROM mb_artist_urls", [])?;
conn.execute("DELETE FROM mb_artists", [])?;
let mut count: u64 = 0;
let mut url_count: u64 = 0;
let tx = conn.unchecked_transaction()?;
for line_result in reader.lines() {
let line = match line_result {
Ok(l) => l,
Err(e) => {
tracing::warn!(error = %e, "skipping unreadable line");
continue;
}
};
let line = line.trim();
if line.is_empty() {
continue;
}
let artist: DumpArtist = match serde_json::from_str(line) {
Ok(a) => a,
Err(e) => {
tracing::trace!(error = %e, "skipping malformed artist JSON line");
continue;
}
};
let begin_year = extract_begin_year(&artist.life_span);
tx.execute(
"INSERT OR REPLACE INTO mb_artists (mbid, name, sort_name, disambiguation, artist_type, country, begin_year) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
artist.id,
artist.name,
artist.sort_name,
artist.disambiguation,
artist.artist_type,
artist.country,
begin_year,
],
)?;
// Insert URL relations
if let Some(relations) = artist.relations {
for rel in relations {
if let Some(url) = rel.url {
tx.execute(
"INSERT INTO mb_artist_urls (artist_mbid, url, link_type) VALUES (?1, ?2, ?3)",
rusqlite::params![artist.id, url.resource, rel.relation_type],
)?;
url_count += 1;
}
}
}
count += 1;
if count.is_multiple_of(BATCH_SIZE) {
progress(count);
}
}
tx.commit()?;
progress(count);
// Store URL count in import meta
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('artist_url_count', ?1)",
rusqlite::params![url_count.to_string()],
)?;
Ok(count)
}
/// Import release groups from a line-delimited JSON reader.
pub fn import_release_groups(
conn: &Connection,
reader: impl BufRead,
progress: impl Fn(u64),
) -> Result<u64, Box<dyn std::error::Error + Send + Sync>> {
conn.execute("DELETE FROM mb_release_groups", [])?;
let mut count: u64 = 0;
let tx = conn.unchecked_transaction()?;
for line_result in reader.lines() {
let line = match line_result {
Ok(l) => l,
Err(e) => {
tracing::warn!(error = %e, "skipping unreadable line");
continue;
}
};
let line = line.trim();
if line.is_empty() {
continue;
}
let rg: DumpReleaseGroup = match serde_json::from_str(line) {
Ok(r) => r,
Err(e) => {
tracing::trace!(error = %e, "skipping malformed release-group JSON line");
continue;
}
};
let artist_mbid = primary_artist_mbid(&rg.artist_credit);
let secondary_types = rg
.secondary_types
.as_ref()
.map(|st| serde_json::to_string(st).unwrap_or_default());
tx.execute(
"INSERT OR REPLACE INTO mb_release_groups (mbid, title, artist_mbid, primary_type, secondary_types, first_release_date) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
rusqlite::params![
rg.id,
rg.title,
artist_mbid,
rg.primary_type,
secondary_types,
rg.first_release_date,
],
)?;
count += 1;
if count.is_multiple_of(BATCH_SIZE) {
progress(count);
}
}
tx.commit()?;
progress(count);
Ok(count)
}
/// Import releases (and their tracks) from a line-delimited JSON reader.
pub fn import_releases(
conn: &Connection,
reader: impl BufRead,
progress: impl Fn(u64),
) -> Result<u64, Box<dyn std::error::Error + Send + Sync>> {
conn.execute("DELETE FROM mb_tracks", [])?;
conn.execute("DELETE FROM mb_releases", [])?;
let mut count: u64 = 0;
let mut track_count: u64 = 0;
let tx = conn.unchecked_transaction()?;
for line_result in reader.lines() {
let line = match line_result {
Ok(l) => l,
Err(e) => {
tracing::warn!(error = %e, "skipping unreadable line");
continue;
}
};
let line = line.trim();
if line.is_empty() {
continue;
}
let release: DumpRelease = match serde_json::from_str(line) {
Ok(r) => r,
Err(e) => {
tracing::trace!(error = %e, "skipping malformed release JSON line");
continue;
}
};
let rg_mbid = release.release_group.as_ref().map(|rg| &rg.id);
let artist_mbid = primary_artist_mbid(&release.artist_credit);
tx.execute(
"INSERT OR REPLACE INTO mb_releases (mbid, title, release_group_mbid, artist_mbid, date, country, status) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
release.id,
release.title,
rg_mbid,
artist_mbid,
release.date,
release.country,
release.status,
],
)?;
// Insert tracks from media
if let Some(media) = release.media {
for medium in media {
let disc_number = medium.position;
if let Some(tracks) = medium.tracks {
for track in tracks {
let recording_mbid = track
.recording
.as_ref()
.map(|r| r.id.as_str())
.unwrap_or("");
if recording_mbid.is_empty() {
continue;
}
tx.execute(
"INSERT INTO mb_tracks (release_mbid, recording_mbid, title, track_number, disc_number, duration_ms, position) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
rusqlite::params![
release.id,
recording_mbid,
track.title,
track.position,
disc_number,
track.length,
track.position,
],
)?;
track_count += 1;
}
}
}
}
count += 1;
if count.is_multiple_of(BATCH_SIZE) {
progress(count);
}
}
tx.commit()?;
progress(count);
// Store track count in import meta
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('track_count', ?1)",
rusqlite::params![track_count.to_string()],
)?;
Ok(count)
}
/// Import recordings from a line-delimited JSON reader.
pub fn import_recordings(
conn: &Connection,
reader: impl BufRead,
progress: impl Fn(u64),
) -> Result<u64, Box<dyn std::error::Error + Send + Sync>> {
conn.execute("DELETE FROM mb_recordings", [])?;
let mut count: u64 = 0;
let tx = conn.unchecked_transaction()?;
for line_result in reader.lines() {
let line = match line_result {
Ok(l) => l,
Err(e) => {
tracing::warn!(error = %e, "skipping unreadable line");
continue;
}
};
let line = line.trim();
if line.is_empty() {
continue;
}
let recording: DumpRecording = match serde_json::from_str(line) {
Ok(r) => r,
Err(e) => {
tracing::trace!(error = %e, "skipping malformed recording JSON line");
continue;
}
};
let artist_mbid = primary_artist_mbid(&recording.artist_credit);
tx.execute(
"INSERT OR REPLACE INTO mb_recordings (mbid, title, artist_mbid, duration_ms) VALUES (?1, ?2, ?3, ?4)",
rusqlite::params![
recording.id,
recording.title,
artist_mbid,
recording.length,
],
)?;
count += 1;
if count.is_multiple_of(BATCH_SIZE) {
progress(count);
}
}
tx.commit()?;
progress(count);
Ok(count)
}
/// Discover the latest dump folder timestamp from the MB server.
///
/// Fetches the directory listing and finds the `latest-is-{TIMESTAMP}` file.
pub async fn discover_latest_dump_folder()
-> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let client = reqwest::Client::builder()
.user_agent("Shanty/0.1.0 (shanty-music-app)")
.timeout(std::time::Duration::from_secs(30))
.build()?;
let resp = client.get(DUMP_BASE_URL).send().await?;
if !resp.status().is_success() {
return Err(format!("HTTP {} fetching dump listing", resp.status()).into());
}
let body = resp.text().await?;
// Parse the HTML directory listing to find "latest-is-YYYYMMDD-HHMMSS"
// The listing contains links like: <a href="latest-is-20260321-001002">
let latest = body
.split("latest-is-")
.nth(1)
.and_then(|s| s.split('"').next())
.map(|s| s.trim_end_matches('/').to_string());
match latest {
Some(timestamp) => {
tracing::info!(timestamp = %timestamp, "discovered latest MB dump folder");
Ok(timestamp)
}
None => Err("could not find latest dump folder in directory listing".into()),
}
}
/// Download a MusicBrainz JSON dump file and return the path it was saved to.
///
/// Downloads from `https://data.metabrainz.org/pub/musicbrainz/data/json-dumps/{timestamp}/{filename}`.
/// The `timestamp` is the dated folder name (e.g., "20260321-001002").
pub async fn download_dump(
filename: &str,
timestamp: &str,
target_dir: &Path,
progress: impl Fn(&str),
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
let url = format!("{DUMP_BASE_URL}{timestamp}/{filename}");
let target_path = target_dir.join(filename);
progress(&format!("Downloading {filename}..."));
tracing::info!(url = %url, target = %target_path.display(), "downloading MB dump");
let client = reqwest::Client::builder()
.user_agent("Shanty/0.1.0 (shanty-music-app)")
.timeout(std::time::Duration::from_secs(3600)) // 1 hour timeout for large files
.build()?;
let resp = client.get(&url).send().await?;
if !resp.status().is_success() {
return Err(format!("HTTP {} downloading {url}", resp.status()).into());
}
// Stream to disk — don't buffer the whole file in memory
std::fs::create_dir_all(target_dir)?;
let mut file = tokio::fs::File::create(&target_path).await?;
let mut stream = resp.bytes_stream();
let mut downloaded: u64 = 0;
let mut last_report: u64 = 0;
use futures_util::StreamExt;
use tokio::io::AsyncWriteExt;
while let Some(chunk) = stream.next().await {
let chunk = chunk?;
file.write_all(&chunk).await?;
downloaded += chunk.len() as u64;
// Report progress every ~50 MB
if downloaded - last_report > 50 * 1_048_576 {
let mb = downloaded / 1_048_576;
progress(&format!("Downloading {filename}... {mb} MB"));
last_report = downloaded;
}
}
file.flush().await?;
let size_mb = downloaded / 1_048_576;
progress(&format!("Downloaded {filename} ({size_mb} MB)"));
tracing::info!(
file = %target_path.display(),
size_mb = size_mb,
"download complete"
);
Ok(target_path)
}
/// Open a `.tar.xz` file and return a buffered reader for the data entry.
///
/// MusicBrainz dump archives contain metadata files (TIMESTAMP, COPYING, etc.)
/// followed by the actual data at `mbdump/{entity_name}`. This skips to the
/// `mbdump/` entry and returns a streaming reader — no buffering into memory.
///
/// IMPORTANT: The returned reader borrows from the archive internals. Because
/// tar::Entry borrows the archive, we can't return it directly. Instead we use
/// a helper that owns the archive and provides line-by-line iteration.
pub fn import_from_tar_xz(
path: &Path,
conn: &Connection,
entity_type: &str,
progress: impl Fn(u64),
) -> Result<u64, Box<dyn std::error::Error + Send + Sync>> {
let file = std::fs::File::open(path)?;
let xz_reader = xz2::read::XzDecoder::new(std::io::BufReader::with_capacity(64 * 1024, file));
let mut archive = tar::Archive::new(xz_reader);
// Find the mbdump/* entry (skip TIMESTAMP, COPYING, README, etc.)
for entry_result in archive.entries()? {
let entry = entry_result?;
let entry_path = entry.path()?.to_string_lossy().to_string();
if entry_path.starts_with("mbdump/") {
tracing::info!(entry = %entry_path, "found data entry in tar archive");
let reader = std::io::BufReader::with_capacity(256 * 1024, entry);
// Dispatch to the right importer based on entity type
return match entity_type {
"artist" => import_artists(conn, reader, progress),
"release-group" => import_release_groups(conn, reader, progress),
"release" => import_releases(conn, reader, progress),
"recording" => import_recordings(conn, reader, progress),
_ => Err(format!("unknown entity type: {entity_type}").into()),
};
}
}
Err(format!("no mbdump/ entry found in {}", path.display()).into())
}
/// Run a full import of all dump files from a directory.
///
/// Expects `artist.tar.xz`, `release-group.tar.xz`, `release.tar.xz`, and
/// `recording.tar.xz` to exist in `dump_dir`.
pub fn run_import(
conn: &Connection,
dump_dir: &Path,
progress: impl Fn(&str),
) -> Result<ImportStats, Box<dyn std::error::Error + Send + Sync>> {
create_schema(conn)?;
// Optimize for bulk import
conn.execute_batch(
"PRAGMA journal_mode = WAL;
PRAGMA synchronous = NORMAL;
PRAGMA cache_size = -64000;
PRAGMA temp_store = MEMORY;
PRAGMA foreign_keys = OFF;",
)?;
let mut stats = ImportStats::default();
// Import artists
let artist_path = dump_dir.join("artist.tar.xz");
if artist_path.exists() {
progress("Importing artists...");
stats.artists = import_from_tar_xz(&artist_path, conn, "artist", |n| {
if n % 100_000 == 0 {
progress(&format!("Artists: {n}..."));
}
})?;
progress(&format!("Artists: {} done", stats.artists));
} else {
tracing::warn!(path = %artist_path.display(), "artist dump not found, skipping");
}
// Import release groups
let rg_path = dump_dir.join("release-group.tar.xz");
if rg_path.exists() {
progress("Importing release groups...");
stats.release_groups = import_from_tar_xz(&rg_path, conn, "release-group", |n| {
if n % 100_000 == 0 {
progress(&format!("Release groups: {n}..."));
}
})?;
progress(&format!("Release groups: {} done", stats.release_groups));
} else {
tracing::warn!(path = %rg_path.display(), "release-group dump not found, skipping");
}
// Import releases (and tracks)
let release_path = dump_dir.join("release.tar.xz");
if release_path.exists() {
progress("Importing releases...");
stats.releases = import_from_tar_xz(&release_path, conn, "release", |n| {
if n % 100_000 == 0 {
progress(&format!("Releases: {n}..."));
}
})?;
// Read track count from meta
if let Ok(tc) = conn.query_row(
"SELECT value FROM mb_import_meta WHERE key = 'track_count'",
[],
|row| row.get::<_, String>(0),
) {
stats.tracks = tc.parse().unwrap_or(0);
}
progress(&format!(
"Releases: {} done ({} tracks)",
stats.releases, stats.tracks
));
} else {
tracing::warn!(path = %release_path.display(), "release dump not found, skipping");
}
// Import recordings
let recording_path = dump_dir.join("recording.tar.xz");
if recording_path.exists() {
progress("Importing recordings...");
stats.recordings = import_from_tar_xz(&recording_path, conn, "recording", |n| {
if n % 100_000 == 0 {
progress(&format!("Recordings: {n}..."));
}
})?;
progress(&format!("Recordings: {} done", stats.recordings));
} else {
tracing::warn!(path = %recording_path.display(), "recording dump not found, skipping");
}
// Read artist URL count from meta
if let Ok(uc) = conn.query_row(
"SELECT value FROM mb_import_meta WHERE key = 'artist_url_count'",
[],
|row| row.get::<_, String>(0),
) {
stats.artist_urls = uc.parse().unwrap_or(0);
}
// Record import timestamp
let now = chrono::Utc::now().to_rfc3339();
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('last_import_date', ?1)",
rusqlite::params![now],
)?;
// Record entity counts
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('artist_count', ?1)",
rusqlite::params![stats.artists.to_string()],
)?;
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('release_group_count', ?1)",
rusqlite::params![stats.release_groups.to_string()],
)?;
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('release_count', ?1)",
rusqlite::params![stats.releases.to_string()],
)?;
conn.execute(
"INSERT OR REPLACE INTO mb_import_meta (key, value) VALUES ('recording_count', ?1)",
rusqlite::params![stats.recordings.to_string()],
)?;
progress(&format!("Import complete: {stats}"));
Ok(stats)
}
/// The dump filenames to download.
pub const DUMP_FILES: &[&str] = &[
"artist.tar.xz",
"release-group.tar.xz",
"release.tar.xz",
"recording.tar.xz",
];
/// High-level import function: opens the database, runs import, closes it.
///
/// This is the main entry point for external callers that don't want to manage
/// a `rusqlite::Connection` directly.
pub fn run_import_at_path(
db_path: &Path,
dump_dir: &Path,
progress: impl Fn(&str),
) -> Result<ImportStats, Box<dyn std::error::Error + Send + Sync>> {
if let Some(parent) = db_path.parent() {
std::fs::create_dir_all(parent)?;
}
let conn = Connection::open(db_path)?;
run_import(&conn, dump_dir, progress)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_create_schema() {
let conn = Connection::open_in_memory().unwrap();
create_schema(&conn).unwrap();
// Verify tables exist
let count: i32 = conn
.query_row(
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name LIKE 'mb_%'",
[],
|row| row.get(0),
)
.unwrap();
assert!(count >= 6, "Expected at least 6 mb_ tables, got {count}");
}
#[test]
fn test_import_artists_empty() {
let conn = Connection::open_in_memory().unwrap();
create_schema(&conn).unwrap();
let reader = std::io::BufReader::new(std::io::Cursor::new(b""));
let count = import_artists(&conn, reader, |_| {}).unwrap();
assert_eq!(count, 0);
}
#[test]
fn test_import_single_artist() {
let conn = Connection::open_in_memory().unwrap();
create_schema(&conn).unwrap();
let json = r#"{"id":"some-uuid","name":"Test Artist","sort-name":"Artist, Test","disambiguation":"test","type":"Person","country":"US","life-span":{"begin":"1990-05-01"},"relations":[{"type":"wikipedia","url":{"resource":"https://en.wikipedia.org/wiki/Test"}}]}"#;
let reader = std::io::BufReader::new(std::io::Cursor::new(json.as_bytes()));
let count = import_artists(&conn, reader, |_| {}).unwrap();
assert_eq!(count, 1);
// Verify artist was inserted
let name: String = conn
.query_row(
"SELECT name FROM mb_artists WHERE mbid = 'some-uuid'",
[],
|row| row.get(0),
)
.unwrap();
assert_eq!(name, "Test Artist");
// Verify URL was inserted
let url_count: i32 = conn
.query_row(
"SELECT COUNT(*) FROM mb_artist_urls WHERE artist_mbid = 'some-uuid'",
[],
|row| row.get(0),
)
.unwrap();
assert_eq!(url_count, 1);
}
#[test]
fn test_import_release_groups() {
let conn = Connection::open_in_memory().unwrap();
create_schema(&conn).unwrap();
let json = r#"{"id":"rg-uuid","title":"Test Album","primary-type":"Album","secondary-types":["Compilation"],"first-release-date":"2020-01-15","artist-credit":[{"artist":{"id":"artist-uuid","name":"Test Artist"}}]}"#;
let reader = std::io::BufReader::new(std::io::Cursor::new(json.as_bytes()));
let count = import_release_groups(&conn, reader, |_| {}).unwrap();
assert_eq!(count, 1);
}
#[test]
fn test_import_recordings() {
let conn = Connection::open_in_memory().unwrap();
create_schema(&conn).unwrap();
let json = r#"{"id":"rec-uuid","title":"Test Recording","length":240000,"artist-credit":[{"artist":{"id":"artist-uuid","name":"Test Artist"}}]}"#;
let reader = std::io::BufReader::new(std::io::Cursor::new(json.as_bytes()));
let count = import_recordings(&conn, reader, |_| {}).unwrap();
assert_eq!(count, 1);
}
}

583
shanty-data/src/mb_local.rs Normal file
View File

@@ -0,0 +1,583 @@
//! Local MusicBrainz database fetcher.
//!
//! Implements [`MetadataFetcher`] backed by a local SQLite database (populated
//! via [`crate::mb_import`]). All queries are instant local lookups — no rate
//! limiting needed.
use std::sync::Mutex;
use rusqlite::Connection;
use crate::error::{DataError, DataResult};
use crate::traits::MetadataFetcher;
use crate::types::{
ArtistInfo, ArtistSearchResult, ArtistUrl, DiscographyEntry, RecordingDetails, RecordingMatch,
ReleaseGroupEntry, ReleaseMatch, ReleaseRef, ReleaseTrack,
};
/// Statistics about the local MusicBrainz database.
#[derive(Debug, Clone, Default, serde::Serialize)]
pub struct LocalMbStats {
pub artists: u64,
pub release_groups: u64,
pub releases: u64,
pub recordings: u64,
pub tracks: u64,
pub last_import_date: Option<String>,
}
/// A [`MetadataFetcher`] backed by a local SQLite database.
pub struct LocalMusicBrainzFetcher {
conn: Mutex<Connection>,
}
impl LocalMusicBrainzFetcher {
/// Open (or create) a local MusicBrainz SQLite database.
pub fn new(db_path: &str) -> Result<Self, Box<dyn std::error::Error>> {
let conn = Connection::open(db_path)?;
conn.execute_batch("PRAGMA journal_mode = WAL; PRAGMA cache_size = -16000;")?;
Ok(Self {
conn: Mutex::new(conn),
})
}
/// Check whether the database has been populated with data.
pub fn is_available(&self) -> bool {
let conn = self.conn.lock().unwrap();
// Check if the mb_artists table exists and has rows
conn.query_row(
"SELECT COUNT(*) FROM sqlite_master WHERE type='table' AND name='mb_artists'",
[],
|row| row.get::<_, i32>(0),
)
.map(|c| c > 0)
.unwrap_or(false)
&& conn
.query_row("SELECT COUNT(*) FROM mb_artists LIMIT 1", [], |row| {
row.get::<_, i32>(0)
})
.unwrap_or(0)
> 0
}
/// Get statistics about the imported data.
pub fn stats(&self) -> LocalMbStats {
let conn = self.conn.lock().unwrap();
let get_meta = |key: &str| -> Option<String> {
conn.query_row(
"SELECT value FROM mb_import_meta WHERE key = ?1",
rusqlite::params![key],
|row| row.get(0),
)
.ok()
};
LocalMbStats {
artists: get_meta("artist_count")
.and_then(|s| s.parse().ok())
.unwrap_or(0),
release_groups: get_meta("release_group_count")
.and_then(|s| s.parse().ok())
.unwrap_or(0),
releases: get_meta("release_count")
.and_then(|s| s.parse().ok())
.unwrap_or(0),
recordings: get_meta("recording_count")
.and_then(|s| s.parse().ok())
.unwrap_or(0),
tracks: get_meta("track_count")
.and_then(|s| s.parse().ok())
.unwrap_or(0),
last_import_date: get_meta("last_import_date"),
}
}
/// Look up an artist by MBID (returns name and disambiguation).
pub fn get_artist_by_mbid_sync(&self, mbid: &str) -> DataResult<(String, Option<String>)> {
let conn = self.conn.lock().unwrap();
let result = conn.query_row(
"SELECT name, disambiguation FROM mb_artists WHERE mbid = ?1",
rusqlite::params![mbid],
|row| {
let name: String = row.get(0)?;
let disambiguation: Option<String> = row.get(1)?;
Ok((name, disambiguation.filter(|s| !s.is_empty())))
},
);
match result {
Ok(r) => Ok(r),
Err(rusqlite::Error::QueryReturnedNoRows) => {
Err(DataError::Other(format!("artist {mbid} not found locally")))
}
Err(e) => Err(DataError::Other(e.to_string())),
}
}
/// Look up detailed artist info by MBID, including URLs.
pub fn get_artist_info_sync(&self, mbid: &str) -> DataResult<ArtistInfo> {
let conn = self.conn.lock().unwrap();
let artist = conn.query_row(
"SELECT name, disambiguation, country, artist_type, begin_year FROM mb_artists WHERE mbid = ?1",
rusqlite::params![mbid],
|row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, Option<String>>(1)?,
row.get::<_, Option<String>>(2)?,
row.get::<_, Option<String>>(3)?,
row.get::<_, Option<i32>>(4)?,
))
},
);
let (name, disambiguation, country, artist_type, begin_year) = match artist {
Ok(a) => a,
Err(rusqlite::Error::QueryReturnedNoRows) => {
return Err(DataError::Other(format!("artist {mbid} not found locally")));
}
Err(e) => return Err(DataError::Other(e.to_string())),
};
// Fetch URLs
let mut url_stmt = conn
.prepare("SELECT url, link_type FROM mb_artist_urls WHERE artist_mbid = ?1")
.map_err(|e| DataError::Other(e.to_string()))?;
let urls: Vec<ArtistUrl> = url_stmt
.query_map(rusqlite::params![mbid], |row| {
Ok(ArtistUrl {
url: row.get(0)?,
link_type: row.get(1)?,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect();
Ok(ArtistInfo {
name,
mbid: Some(mbid.to_string()),
disambiguation: disambiguation.filter(|s| !s.is_empty()),
country: country.filter(|s| !s.is_empty()),
artist_type,
begin_year: begin_year.map(|y| y.to_string()),
urls,
})
}
}
impl MetadataFetcher for LocalMusicBrainzFetcher {
async fn search_recording(&self, artist: &str, title: &str) -> DataResult<Vec<RecordingMatch>> {
let conn = self.conn.lock().unwrap();
let query = if artist.is_empty() {
let pattern = format!("%{title}%");
let mut stmt = conn
.prepare(
"SELECT r.mbid, r.title, r.artist_mbid, a.name
FROM mb_recordings r
LEFT JOIN mb_artists a ON r.artist_mbid = a.mbid
WHERE r.title LIKE ?1 COLLATE NOCASE
LIMIT 10",
)
.map_err(|e| DataError::Other(e.to_string()))?;
stmt.query_map(rusqlite::params![pattern], |row| {
Ok(RecordingMatch {
mbid: row.get(0)?,
title: row.get(1)?,
artist_mbid: row.get(2)?,
artist: row
.get::<_, Option<String>>(3)?
.unwrap_or_else(|| "Unknown Artist".into()),
releases: vec![],
score: 100,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect()
} else {
let artist_pattern = format!("%{artist}%");
let title_pattern = format!("%{title}%");
let mut stmt = conn
.prepare(
"SELECT r.mbid, r.title, r.artist_mbid, a.name
FROM mb_recordings r
LEFT JOIN mb_artists a ON r.artist_mbid = a.mbid
WHERE r.title LIKE ?1 COLLATE NOCASE
AND a.name LIKE ?2 COLLATE NOCASE
LIMIT 10",
)
.map_err(|e| DataError::Other(e.to_string()))?;
stmt.query_map(rusqlite::params![title_pattern, artist_pattern], |row| {
Ok(RecordingMatch {
mbid: row.get(0)?,
title: row.get(1)?,
artist_mbid: row.get(2)?,
artist: row
.get::<_, Option<String>>(3)?
.unwrap_or_else(|| "Unknown Artist".into()),
releases: vec![],
score: 100,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect()
};
Ok(query)
}
async fn search_release(&self, artist: &str, album: &str) -> DataResult<Vec<ReleaseMatch>> {
let conn = self.conn.lock().unwrap();
let results = if artist.is_empty() {
let pattern = format!("%{album}%");
let mut stmt = conn
.prepare(
"SELECT r.mbid, r.title, r.artist_mbid, a.name, r.date
FROM mb_releases r
LEFT JOIN mb_artists a ON r.artist_mbid = a.mbid
WHERE r.title LIKE ?1 COLLATE NOCASE
LIMIT 10",
)
.map_err(|e| DataError::Other(e.to_string()))?;
stmt.query_map(rusqlite::params![pattern], |row| {
Ok(ReleaseMatch {
mbid: row.get(0)?,
title: row.get(1)?,
artist_mbid: row.get(2)?,
artist: row
.get::<_, Option<String>>(3)?
.unwrap_or_else(|| "Unknown Artist".into()),
date: row.get(4)?,
track_count: None,
score: 100,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect()
} else {
let artist_pattern = format!("%{artist}%");
let album_pattern = format!("%{album}%");
let mut stmt = conn
.prepare(
"SELECT r.mbid, r.title, r.artist_mbid, a.name, r.date
FROM mb_releases r
LEFT JOIN mb_artists a ON r.artist_mbid = a.mbid
WHERE r.title LIKE ?1 COLLATE NOCASE
AND a.name LIKE ?2 COLLATE NOCASE
LIMIT 10",
)
.map_err(|e| DataError::Other(e.to_string()))?;
stmt.query_map(rusqlite::params![album_pattern, artist_pattern], |row| {
Ok(ReleaseMatch {
mbid: row.get(0)?,
title: row.get(1)?,
artist_mbid: row.get(2)?,
artist: row
.get::<_, Option<String>>(3)?
.unwrap_or_else(|| "Unknown Artist".into()),
date: row.get(4)?,
track_count: None,
score: 100,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect()
};
Ok(results)
}
async fn get_recording(&self, mbid: &str) -> DataResult<RecordingDetails> {
let conn = self.conn.lock().unwrap();
let recording = conn.query_row(
"SELECT r.mbid, r.title, r.artist_mbid, r.duration_ms, a.name
FROM mb_recordings r
LEFT JOIN mb_artists a ON r.artist_mbid = a.mbid
WHERE r.mbid = ?1",
rusqlite::params![mbid],
|row| {
Ok(RecordingDetails {
mbid: row.get(0)?,
title: row.get(1)?,
artist_mbid: row.get(2)?,
duration_ms: row.get(3)?,
artist: row
.get::<_, Option<String>>(4)?
.unwrap_or_else(|| "Unknown Artist".into()),
releases: vec![],
genres: vec![],
secondary_artists: vec![],
})
},
);
match recording {
Ok(mut r) => {
// Fetch releases that contain this recording
let mut stmt = conn
.prepare(
"SELECT DISTINCT rel.mbid, rel.title, rel.date
FROM mb_tracks t
JOIN mb_releases rel ON t.release_mbid = rel.mbid
WHERE t.recording_mbid = ?1
LIMIT 10",
)
.map_err(|e| DataError::Other(e.to_string()))?;
r.releases = stmt
.query_map(rusqlite::params![mbid], |row| {
Ok(ReleaseRef {
mbid: row.get(0)?,
title: row.get(1)?,
date: row.get(2)?,
track_number: None,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect();
Ok(r)
}
Err(rusqlite::Error::QueryReturnedNoRows) => Err(DataError::Other(format!(
"recording {mbid} not found locally"
))),
Err(e) => Err(DataError::Other(e.to_string())),
}
}
async fn search_artist(&self, query: &str, limit: u32) -> DataResult<Vec<ArtistSearchResult>> {
let conn = self.conn.lock().unwrap();
let pattern = format!("%{query}%");
let mut stmt = conn
.prepare(
"SELECT mbid, name, disambiguation, country, artist_type
FROM mb_artists
WHERE name LIKE ?1 COLLATE NOCASE
LIMIT ?2",
)
.map_err(|e| DataError::Other(e.to_string()))?;
let results: Vec<ArtistSearchResult> = stmt
.query_map(rusqlite::params![pattern, limit], |row| {
Ok(ArtistSearchResult {
mbid: row.get(0)?,
name: row.get(1)?,
disambiguation: row.get::<_, Option<String>>(2)?.filter(|s| !s.is_empty()),
country: row.get(3)?,
artist_type: row.get(4)?,
score: 100,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect();
Ok(results)
}
async fn get_artist_releases(
&self,
artist_mbid: &str,
limit: u32,
) -> DataResult<Vec<DiscographyEntry>> {
let conn = self.conn.lock().unwrap();
let mut stmt = conn
.prepare(
"SELECT mbid, title, date, status
FROM mb_releases
WHERE artist_mbid = ?1
LIMIT ?2",
)
.map_err(|e| DataError::Other(e.to_string()))?;
let results: Vec<DiscographyEntry> = stmt
.query_map(rusqlite::params![artist_mbid, limit], |row| {
Ok(DiscographyEntry {
mbid: row.get(0)?,
title: row.get(1)?,
date: row.get(2)?,
release_type: row.get(3)?,
track_count: None,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect();
Ok(results)
}
async fn get_release_tracks(&self, release_mbid: &str) -> DataResult<Vec<ReleaseTrack>> {
let conn = self.conn.lock().unwrap();
let mut stmt = conn
.prepare(
"SELECT recording_mbid, title, track_number, disc_number, duration_ms
FROM mb_tracks
WHERE release_mbid = ?1
ORDER BY disc_number, track_number",
)
.map_err(|e| DataError::Other(e.to_string()))?;
let tracks: Vec<ReleaseTrack> = stmt
.query_map(rusqlite::params![release_mbid], |row| {
Ok(ReleaseTrack {
recording_mbid: row.get(0)?,
title: row.get(1)?,
track_number: row.get(2)?,
disc_number: row.get(3)?,
duration_ms: row.get(4)?,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect();
if tracks.is_empty() {
Err(DataError::Other(format!(
"no tracks found for release {release_mbid}"
)))
} else {
Ok(tracks)
}
}
async fn get_artist_release_groups(
&self,
artist_mbid: &str,
) -> DataResult<Vec<ReleaseGroupEntry>> {
let conn = self.conn.lock().unwrap();
let mut stmt = conn
.prepare(
"SELECT rg.mbid, rg.title, rg.primary_type, rg.secondary_types, rg.first_release_date,
(SELECT r.mbid FROM mb_releases r WHERE r.release_group_mbid = rg.mbid LIMIT 1) as first_release_mbid
FROM mb_release_groups rg
WHERE rg.artist_mbid = ?1
ORDER BY rg.first_release_date",
)
.map_err(|e| DataError::Other(e.to_string()))?;
let results: Vec<ReleaseGroupEntry> = stmt
.query_map(rusqlite::params![artist_mbid], |row| {
let secondary_types_json: Option<String> = row.get(3)?;
let secondary_types: Vec<String> = secondary_types_json
.and_then(|s| serde_json::from_str(&s).ok())
.unwrap_or_default();
Ok(ReleaseGroupEntry {
mbid: row.get(0)?,
title: row.get(1)?,
primary_type: row.get(2)?,
secondary_types,
first_release_date: row.get(4)?,
first_release_mbid: row.get(5)?,
})
})
.map_err(|e| DataError::Other(e.to_string()))?
.filter_map(|r| r.ok())
.collect();
Ok(results)
}
async fn resolve_release_from_group(&self, release_group_mbid: &str) -> DataResult<String> {
let conn = self.conn.lock().unwrap();
let result = conn.query_row(
"SELECT mbid FROM mb_releases WHERE release_group_mbid = ?1 LIMIT 1",
rusqlite::params![release_group_mbid],
|row| row.get::<_, String>(0),
);
match result {
Ok(mbid) => Ok(mbid),
Err(rusqlite::Error::QueryReturnedNoRows) => Err(DataError::Other(format!(
"no releases for release-group {release_group_mbid}"
))),
Err(e) => Err(DataError::Other(e.to_string())),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::mb_import;
fn setup_test_db() -> Connection {
let conn = Connection::open_in_memory().unwrap();
mb_import::create_schema(&conn).unwrap();
// Insert test data
conn.execute(
"INSERT INTO mb_artists (mbid, name, sort_name, disambiguation, artist_type, country, begin_year) VALUES ('a-1', 'Test Artist', 'Artist, Test', 'test', 'Person', 'US', 1990)",
[],
).unwrap();
conn.execute(
"INSERT INTO mb_artist_urls (artist_mbid, url, link_type) VALUES ('a-1', 'https://en.wikipedia.org/wiki/Test', 'wikipedia')",
[],
).unwrap();
conn.execute(
"INSERT INTO mb_release_groups (mbid, title, artist_mbid, primary_type, secondary_types, first_release_date) VALUES ('rg-1', 'Test Album', 'a-1', 'Album', NULL, '2020-01-15')",
[],
).unwrap();
conn.execute(
"INSERT INTO mb_releases (mbid, title, release_group_mbid, artist_mbid, date, country, status) VALUES ('r-1', 'Test Album', 'rg-1', 'a-1', '2020-01-15', 'US', 'Official')",
[],
).unwrap();
conn.execute(
"INSERT INTO mb_tracks (release_mbid, recording_mbid, title, track_number, disc_number, duration_ms, position) VALUES ('r-1', 'rec-1', 'Track One', 1, 1, 240000, 1)",
[],
).unwrap();
conn.execute(
"INSERT INTO mb_recordings (mbid, title, artist_mbid, duration_ms) VALUES ('rec-1', 'Track One', 'a-1', 240000)",
[],
).unwrap();
// Insert import metadata
conn.execute(
"INSERT INTO mb_import_meta (key, value) VALUES ('artist_count', '1')",
[],
)
.unwrap();
conn
}
#[test]
fn test_get_artist_info_sync() {
let conn = setup_test_db();
// We can't easily test the struct directly since it wraps a Mutex<Connection>,
// but we can test the SQL works
let (name, disambig): (String, Option<String>) = conn
.query_row(
"SELECT name, disambiguation FROM mb_artists WHERE mbid = 'a-1'",
[],
|row| Ok((row.get(0)?, row.get(1)?)),
)
.unwrap();
assert_eq!(name, "Test Artist");
assert_eq!(disambig, Some("test".to_string()));
}
#[test]
fn test_resolve_release_from_group() {
let conn = setup_test_db();
let mbid: String = conn
.query_row(
"SELECT mbid FROM mb_releases WHERE release_group_mbid = 'rg-1' LIMIT 1",
[],
|row| row.get(0),
)
.unwrap();
assert_eq!(mbid, "r-1");
}
}