Added the mb db download. Big upsides and downsides
This commit is contained in:
@@ -24,6 +24,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::resource("/monitor/status").route(web::get().to(get_monitor_status)))
|
||||
.service(web::resource("/scheduler/skip-pipeline").route(web::post().to(skip_pipeline)))
|
||||
.service(web::resource("/scheduler/skip-monitor").route(web::post().to(skip_monitor)))
|
||||
.service(web::resource("/mb-status").route(web::get().to(get_mb_status)))
|
||||
.service(web::resource("/mb-import").route(web::post().to(trigger_mb_import)))
|
||||
.service(
|
||||
web::resource("/config")
|
||||
.route(web::get().to(get_config))
|
||||
@@ -327,3 +329,106 @@ async fn skip_monitor(
|
||||
sched.next_monitor = None;
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({"status": "skipped"})))
|
||||
}
|
||||
|
||||
async fn get_mb_status(
|
||||
state: web::Data<AppState>,
|
||||
session: Session,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
auth::require_auth(&session)?;
|
||||
let has_local = state.mb_client.has_local_db();
|
||||
let stats = state.mb_client.local_stats();
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
"has_local_db": has_local,
|
||||
"stats": stats,
|
||||
})))
|
||||
}
|
||||
|
||||
async fn trigger_mb_import(
|
||||
state: web::Data<AppState>,
|
||||
session: Session,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
auth::require_admin(&session)?;
|
||||
let task_id = state.tasks.register("mb_import");
|
||||
let tid = task_id.clone();
|
||||
let config = state.config.read().await.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
state
|
||||
.tasks
|
||||
.update_progress(&tid, 0, 0, "Starting MusicBrainz import...");
|
||||
|
||||
let data_dir = shanty_config::data_dir().join("mb-dumps");
|
||||
let db_path = config
|
||||
.musicbrainz
|
||||
.local_db_path
|
||||
.clone()
|
||||
.unwrap_or_else(|| shanty_config::data_dir().join("shanty-mb.db"));
|
||||
|
||||
// Download dumps
|
||||
state
|
||||
.tasks
|
||||
.update_progress(&tid, 0, 4, "Downloading dumps...");
|
||||
if let Err(e) = std::fs::create_dir_all(&data_dir) {
|
||||
state
|
||||
.tasks
|
||||
.fail(&tid, format!("Failed to create data dir: {e}"));
|
||||
return;
|
||||
}
|
||||
|
||||
let timestamp = match shanty_data::mb_import::discover_latest_dump_folder().await {
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
state
|
||||
.tasks
|
||||
.fail(&tid, format!("Failed to discover latest dump: {e}"));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
for (i, filename) in shanty_data::mb_import::DUMP_FILES.iter().enumerate() {
|
||||
state.tasks.update_progress(
|
||||
&tid,
|
||||
i as u64,
|
||||
4 + 4, // 4 downloads + 4 imports
|
||||
&format!("Downloading {filename}..."),
|
||||
);
|
||||
if let Err(e) =
|
||||
shanty_data::mb_import::download_dump(filename, ×tamp, &data_dir, |_| {}).await
|
||||
{
|
||||
state
|
||||
.tasks
|
||||
.fail(&tid, format!("Failed to download {filename}: {e}"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Run import
|
||||
state
|
||||
.tasks
|
||||
.update_progress(&tid, 4, 8, "Importing into database...");
|
||||
|
||||
let tid_clone = tid.clone();
|
||||
let state_clone = state.clone();
|
||||
// Run import in blocking task since rusqlite is sync
|
||||
let result = tokio::task::spawn_blocking(move || {
|
||||
shanty_data::mb_import::run_import_at_path(&db_path, &data_dir, |msg| {
|
||||
state_clone.tasks.update_progress(&tid_clone, 4, 8, msg);
|
||||
})
|
||||
})
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Ok(Ok(stats)) => {
|
||||
state.tasks.complete(&tid, format!("{stats}"));
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
state.tasks.fail(&tid, format!("Import failed: {e}"));
|
||||
}
|
||||
Err(e) => {
|
||||
state.tasks.fail(&tid, format!("Import task panicked: {e}"));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "task_id": task_id })))
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user