Added the mb db download. Big upsides and downsides
This commit is contained in:
120
src/mb_update.rs
Normal file
120
src/mb_update.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
//! Background task that periodically re-imports the MusicBrainz database.
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::web;
|
||||
|
||||
use crate::state::AppState;
|
||||
|
||||
/// Spawn the weekly MB database update loop.
|
||||
///
|
||||
/// Only runs if a local MB database exists (meaning the user has done an initial import).
|
||||
/// Downloads fresh dumps and re-imports weekly.
|
||||
pub fn spawn(state: web::Data<AppState>) {
|
||||
tokio::spawn(async move {
|
||||
// Wait 1 hour after startup before first check
|
||||
tokio::time::sleep(Duration::from_secs(3600)).await;
|
||||
|
||||
loop {
|
||||
// Check if local DB exists and auto-update is desired
|
||||
let has_local = state.mb_client.has_local_db();
|
||||
if !has_local {
|
||||
// No local DB — sleep a day and check again
|
||||
tokio::time::sleep(Duration::from_secs(86400)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check how old the import is
|
||||
let needs_update = state
|
||||
.mb_client
|
||||
.local_stats()
|
||||
.and_then(|s| s.last_import_date)
|
||||
.map(|date| {
|
||||
// Parse the date and check if it's older than 7 days
|
||||
chrono::NaiveDate::parse_from_str(&date, "%Y-%m-%d")
|
||||
.map(|d| {
|
||||
let age = chrono::Utc::now().naive_utc().date() - d;
|
||||
age.num_days() >= 7
|
||||
})
|
||||
.unwrap_or(true) // If we can't parse the date, update
|
||||
})
|
||||
.unwrap_or(false); // No stats = no local DB = skip
|
||||
|
||||
if !needs_update {
|
||||
// Check again in 24 hours
|
||||
tokio::time::sleep(Duration::from_secs(86400)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
tracing::info!("starting weekly MusicBrainz database update");
|
||||
|
||||
let data_dir = shanty_config::data_dir().join("mb-dumps");
|
||||
let db_path = state
|
||||
.config
|
||||
.read()
|
||||
.await
|
||||
.musicbrainz
|
||||
.local_db_path
|
||||
.clone()
|
||||
.unwrap_or_else(|| shanty_config::data_dir().join("shanty-mb.db"));
|
||||
|
||||
// Download fresh dumps
|
||||
if let Err(e) = std::fs::create_dir_all(&data_dir) {
|
||||
tracing::error!(error = %e, "failed to create dump dir for MB update");
|
||||
tokio::time::sleep(Duration::from_secs(86400)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
let timestamp = match shanty_data::mb_import::discover_latest_dump_folder().await {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
tracing::error!(error = %e, "failed to discover latest MB dump");
|
||||
tokio::time::sleep(Duration::from_secs(86400)).await;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut download_failed = false;
|
||||
for filename in shanty_data::mb_import::DUMP_FILES {
|
||||
if let Err(e) =
|
||||
shanty_data::mb_import::download_dump(filename, ×tamp, &data_dir, |msg| {
|
||||
tracing::info!("{msg}");
|
||||
})
|
||||
.await
|
||||
{
|
||||
tracing::error!(file = filename, error = %e, "MB dump download failed");
|
||||
download_failed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if download_failed {
|
||||
tokio::time::sleep(Duration::from_secs(86400)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Run import in blocking task
|
||||
let result = tokio::task::spawn_blocking(move || {
|
||||
shanty_data::mb_import::run_import_at_path(&db_path, &data_dir, |msg| {
|
||||
tracing::info!("{msg}");
|
||||
})
|
||||
})
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(Ok(stats)) => {
|
||||
tracing::info!(%stats, "weekly MusicBrainz update complete");
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
tracing::error!(error = %e, "weekly MusicBrainz import failed");
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(error = %e, "weekly MusicBrainz import task panicked");
|
||||
}
|
||||
}
|
||||
|
||||
// Sleep 7 days before next check
|
||||
tokio::time::sleep(Duration::from_secs(7 * 86400)).await;
|
||||
}
|
||||
});
|
||||
}
|
||||
Reference in New Issue
Block a user