hopefully a fix for the enrich pipeline sometimes not running

This commit is contained in:
Connor Johnstone
2026-03-25 13:37:29 -04:00
parent 7827841de6
commit b59bf4cc5d

View File

@@ -153,6 +153,7 @@ fn spawn_worker(state: web::Data<AppState>, task_type: WorkTaskType, concurrency
tracing::error!(id = item_id, error = %e, "failed to mark work item complete"); tracing::error!(id = item_id, error = %e, "failed to mark work item complete");
} }
// Enqueue downstream items // Enqueue downstream items
let has_downstream = !downstream.is_empty();
for (task_type, payload) in downstream { for (task_type, payload) in downstream {
if let Err(e) = queries::work_queue::enqueue( if let Err(e) = queries::work_queue::enqueue(
state.db.conn(), state.db.conn(),
@@ -169,6 +170,21 @@ fn spawn_worker(state: web::Data<AppState>, task_type: WorkTaskType, concurrency
} }
state.workers.notify(task_type); state.workers.notify(task_type);
} }
// Check pipeline completion when this item had no downstream
// (terminal step or empty result). Any worker can trigger this.
if !has_downstream
&& item_task_type != WorkTaskType::Enrich
&& let Some(ref pid) = pipeline_id
&& let Ok(true) = queries::work_queue::pipeline_is_complete(
state.db.conn(),
pid,
None, // item already marked complete
)
.await
{
trigger_pipeline_completion(&state, pid).await;
}
} }
Err(error) => { Err(error) => {
tracing::warn!( tracing::warn!(
@@ -464,13 +480,13 @@ async fn process_organize(
// Promote this track's wanted item from Downloaded to Owned // Promote this track's wanted item from Downloaded to Owned
let _ = queries::wanted::promote_downloaded_to_owned(conn).await; let _ = queries::wanted::promote_downloaded_to_owned(conn).await;
// Check if pipeline is complete — run cleanup then create enrich work items Ok(vec![])
// Exclude this item from the check since it's still "running" in the DB }
let mut downstream = Vec::new();
if let Some(ref pipeline_id) = item.pipeline_id /// Called when all non-Enrich items in a pipeline are complete.
&& let Ok(true) = /// Runs cleanup and creates Enrich work items for each watched artist.
queries::work_queue::pipeline_is_complete(conn, pipeline_id, Some(item.id)).await async fn trigger_pipeline_completion(state: &web::Data<AppState>, pipeline_id: &str) {
{ let conn = state.db.conn();
tracing::info!(pipeline_id = %pipeline_id, "pipeline complete, running cleanup"); tracing::info!(pipeline_id = %pipeline_id, "pipeline complete, running cleanup");
// Cleanup: remove orphaned tracks, empty albums, unused artists // Cleanup: remove orphaned tracks, empty albums, unused artists
@@ -500,11 +516,18 @@ async fn process_organize(
for artist_id in &artist_ids { for artist_id in &artist_ids {
let payload = serde_json::json!({"artist_id": artist_id}); let payload = serde_json::json!({"artist_id": artist_id});
downstream.push((WorkTaskType::Enrich, payload.to_string())); if let Err(e) = queries::work_queue::enqueue(
conn,
WorkTaskType::Enrich,
&payload.to_string(),
Some(pipeline_id),
)
.await
{
tracing::error!(error = %e, "failed to enqueue enrich work item");
} }
} }
state.workers.notify(WorkTaskType::Enrich);
Ok(downstream)
} }
async fn process_enrich( async fn process_enrich(
@@ -522,14 +545,5 @@ async fn process_enrich(
.await .await
.map_err(|e| e.to_string())?; .map_err(|e| e.to_string())?;
// If this pipeline is fully done, clear its completed items
if let Some(ref pipeline_id) = item.pipeline_id
&& let Ok(true) =
queries::work_queue::pipeline_is_complete(state.db.conn(), pipeline_id, Some(item.id))
.await
{
let _ = queries::work_queue::clear_pipeline(state.db.conn(), pipeline_id).await;
}
Ok(vec![]) Ok(vec![])
} }