Files
web/src/routes/artists.rs
2026-03-24 11:38:07 -04:00

875 lines
29 KiB
Rust

use actix_session::Session;
use actix_web::{HttpResponse, web};
use serde::{Deserialize, Serialize};
use shanty_data::{ArtistBioFetcher, ArtistImageFetcher, MetadataFetcher};
use shanty_db::entities::wanted_item::WantedStatus;
use shanty_db::queries;
use shanty_search::SearchProvider;
use crate::auth;
use crate::error::ApiError;
use crate::state::AppState;
#[derive(Deserialize)]
pub struct PaginationParams {
#[serde(default = "default_limit")]
limit: u64,
#[serde(default)]
offset: u64,
}
fn default_limit() -> u64 {
50
}
#[derive(Deserialize)]
pub struct AddArtistRequest {
name: Option<String>,
mbid: Option<String>,
}
#[derive(Serialize)]
struct ArtistListItem {
id: i32,
name: String,
musicbrainz_id: Option<String>,
monitored: bool,
total_watched: usize,
total_owned: usize,
total_items: usize,
}
#[derive(Serialize, Deserialize, Clone)]
struct CachedAlbumTracks {
release_mbid: String,
tracks: Vec<CachedTrack>,
}
#[derive(Serialize, Deserialize, Clone)]
struct CachedTrack {
recording_mbid: String,
title: String,
}
#[derive(Serialize)]
struct FullAlbumInfo {
mbid: String,
title: String,
release_type: Option<String>,
date: Option<String>,
track_count: u32,
local_album_id: Option<i32>,
watched_tracks: u32,
owned_tracks: u32,
downloaded_tracks: u32,
total_local_tracks: u32,
status: String,
}
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("/artists")
.route(web::get().to(list_artists))
.route(web::post().to(add_artist)),
)
.service(web::resource("/artists/{id}/full").route(web::get().to(get_artist_full)))
.service(
web::resource("/artists/{id}/monitor")
.route(web::post().to(set_monitored))
.route(web::delete().to(unset_monitored)),
)
.service(
web::resource("/artists/{id}")
.route(web::get().to(get_artist))
.route(web::delete().to(delete_artist)),
);
}
async fn list_artists(
state: web::Data<AppState>,
session: Session,
query: web::Query<PaginationParams>,
) -> Result<HttpResponse, ApiError> {
auth::require_auth(&session)?;
let artists = queries::artists::list(state.db.conn(), query.limit, query.offset).await?;
let wanted = queries::wanted::list(state.db.conn(), None, None).await?;
let mut items: Vec<ArtistListItem> = Vec::new();
for a in &artists {
let artist_wanted: Vec<_> = wanted
.iter()
.filter(|w| w.artist_id == Some(a.id))
.collect();
// Check if we have cached artist-level totals from a prior detail page load
let cache_key = format!("artist_totals:{}", a.id);
let cached_totals: Option<(u32, u32, u32)> =
if let Ok(Some(json)) = queries::cache::get(state.db.conn(), &cache_key).await {
serde_json::from_str(&json).ok()
} else {
None
};
let (total_watched, total_owned, total_items) =
if let Some((avail, watched, owned)) = cached_totals {
(watched as usize, owned as usize, avail as usize)
} else {
// Fall back to wanted item counts
let total_items = artist_wanted.len();
let total_owned = artist_wanted
.iter()
.filter(|w| w.status == WantedStatus::Owned)
.count();
(total_items, total_owned, total_items)
};
items.push(ArtistListItem {
id: a.id,
name: a.name.clone(),
musicbrainz_id: a.musicbrainz_id.clone(),
monitored: a.monitored,
total_watched,
total_owned,
total_items,
});
}
Ok(HttpResponse::Ok().json(items))
}
async fn get_artist(
state: web::Data<AppState>,
session: Session,
path: web::Path<String>,
) -> Result<HttpResponse, ApiError> {
auth::require_auth(&session)?;
let id_or_mbid = path.into_inner();
if let Ok(id) = id_or_mbid.parse::<i32>() {
let artist = queries::artists::get_by_id(state.db.conn(), id).await?;
let albums = queries::albums::get_by_artist(state.db.conn(), id).await?;
Ok(HttpResponse::Ok().json(serde_json::json!({
"artist": artist,
"albums": albums,
})))
} else {
Err(ApiError::BadRequest(
"use /artists/{id}/full for MBID lookups".into(),
))
}
}
/// Fetch (or retrieve from cache) the tracklist for a release group.
/// Cache key: `artist_rg_tracks:{release_group_id}`
async fn get_cached_album_tracks(
state: &AppState,
rg_id: &str,
first_release_id: Option<&str>,
ttl_seconds: i64,
extend_ttl: bool,
) -> Result<CachedAlbumTracks, ApiError> {
let cache_key = format!("artist_rg_tracks:{rg_id}");
// Check cache first
if let Some(json) = queries::cache::get(state.db.conn(), &cache_key)
.await
.map_err(|e| ApiError::Internal(e.to_string()))?
&& let Ok(cached) = serde_json::from_str::<CachedAlbumTracks>(&json)
{
// Extend TTL if artist is now watched (upgrades 7-day browse cache to permanent)
if extend_ttl {
let _ = queries::cache::set(
state.db.conn(),
&cache_key,
"musicbrainz",
&json,
ttl_seconds,
)
.await;
}
return Ok(cached);
}
// Not cached — resolve release MBID and fetch tracks
let release_mbid = if let Some(rid) = first_release_id {
rid.to_string()
} else {
// Check DB cache for previously resolved release MBID
let resolve_cache_key = format!("release_for_rg:{rg_id}");
if let Ok(Some(cached_rid)) = queries::cache::get(state.db.conn(), &resolve_cache_key).await
{
cached_rid
} else {
// Browse releases for this release group (through shared rate limiter)
let rid = state
.mb_client
.resolve_release_from_group(rg_id)
.await
.map_err(|e| ApiError::Internal(format!("MB error for group {rg_id}: {e}")))?;
// Cache the resolved release MBID for 365 days — it never changes
let _ = queries::cache::set(
state.db.conn(),
&resolve_cache_key,
"musicbrainz",
&rid,
365 * 86400,
)
.await;
rid
}
};
let mb_tracks = state
.mb_client
.get_release_tracks(&release_mbid)
.await
.map_err(|e| ApiError::Internal(format!("MB error for release {release_mbid}: {e}")))?;
let cached = CachedAlbumTracks {
release_mbid: release_mbid.clone(),
tracks: mb_tracks
.into_iter()
.map(|t| CachedTrack {
recording_mbid: t.recording_mbid,
title: t.title,
})
.collect(),
};
// Cache with caller-specified TTL
let json = serde_json::to_string(&cached).map_err(|e| ApiError::Internal(e.to_string()))?;
let _ = queries::cache::set(
state.db.conn(),
&cache_key,
"musicbrainz",
&json,
ttl_seconds,
)
.await;
Ok(cached)
}
#[derive(Deserialize)]
pub struct ArtistFullParams {
#[serde(default)]
quick: bool,
}
async fn get_artist_full(
state: web::Data<AppState>,
session: Session,
path: web::Path<String>,
query: web::Query<ArtistFullParams>,
) -> Result<HttpResponse, ApiError> {
auth::require_auth(&session)?;
let id_or_mbid = path.into_inner();
let quick_mode = query.quick;
let result = enrich_artist(&state, &id_or_mbid, quick_mode).await?;
Ok(HttpResponse::Ok().json(result))
}
/// Enrich an artist's data: fetch release groups, track lists, compute totals.
/// Can be called from HTTP handlers or background tasks.
pub async fn enrich_artist(
state: &AppState,
id_or_mbid: &str,
quick_mode: bool,
) -> Result<serde_json::Value, ApiError> {
// Resolve artist: local ID or MBID
// Track whether we already fetched artist info during resolution to avoid a duplicate API call
let (artist, id, mbid, prefetched_info) = if let Ok(local_id) = id_or_mbid.parse() {
let artist = queries::artists::get_by_id(state.db.conn(), local_id).await?;
let mbid = match &artist.musicbrainz_id {
Some(m) => m.clone(),
None => {
let results = state
.search
.search_artist(&artist.name, 1)
.await
.map_err(|e| ApiError::Internal(e.to_string()))?;
results.into_iter().next().map(|a| a.id).ok_or_else(|| {
ApiError::NotFound(format!("no MBID for artist '{}'", artist.name))
})?
}
};
(artist, Some(local_id), mbid, None)
} else {
let mbid = id_or_mbid.to_string();
// Direct MBID lookup — first check local DB, then MusicBrainz
let local = {
// Check if any local artist has this MBID
let all = queries::artists::list(state.db.conn(), 1000, 0).await?;
all.into_iter()
.find(|a| a.musicbrainz_id.as_deref() == Some(&mbid))
};
if let Some(a) = local {
let local_id = a.id;
(a, Some(local_id), mbid, None)
} else {
// Look up artist info from MusicBrainz by MBID — don't create a local record
// This fetches url-rels too, so we reuse it below instead of calling get_artist_info() again
let info =
state.mb_client.get_artist_info(&mbid).await.map_err(|e| {
ApiError::NotFound(format!("artist MBID {mbid} not found: {e}"))
})?;
// Create a synthetic artist object for display only (not saved to DB)
let synthetic = shanty_db::entities::artist::Model {
id: 0,
name: info.name.clone(),
musicbrainz_id: Some(mbid.clone()),
added_at: chrono::Utc::now().naive_utc(),
top_songs: "[]".to_string(),
similar_artists: "[]".to_string(),
monitored: false,
last_checked_at: None,
};
(synthetic, None, mbid, Some(info))
}
};
// Fetch detailed artist info (country, type, URLs) — reuse if already fetched during resolution
let artist_info = if let Some(info) = prefetched_info {
tracing::debug!(
mbid = %mbid,
urls = info.urls.len(),
country = ?info.country,
"reusing prefetched artist info"
);
Some(info)
} else {
match state.mb_client.get_artist_info(&mbid).await {
Ok(info) => {
tracing::debug!(
mbid = %mbid,
urls = info.urls.len(),
country = ?info.country,
"fetched artist info"
);
Some(info)
}
Err(e) => {
tracing::warn!(mbid = %mbid, error = %e, "failed to fetch artist info");
None
}
}
};
// Fetch artist photo + bio + banner (cached, provider-aware)
let config = state.config.read().await;
let image_source = config.metadata.artist_image_source.clone();
let bio_source = config.metadata.artist_bio_source.clone();
let lastfm_api_key = config.metadata.lastfm_api_key.clone();
let fanart_api_key = config.metadata.fanart_api_key.clone();
drop(config);
let (artist_photo, artist_bio, artist_banner) = fetch_artist_enrichment(
state,
&mbid,
&artist_info,
&image_source,
&bio_source,
lastfm_api_key.as_deref(),
fanart_api_key.as_deref(),
)
.await;
tracing::debug!(mbid = %mbid, has_photo = artist_photo.is_some(), has_bio = artist_bio.is_some(), has_banner = artist_banner.is_some(), "artist enrichment data");
// Fetch release groups and split into primary vs featured
let all_release_groups = state
.search
.get_release_groups(&mbid)
.await
.map_err(|e| ApiError::Internal(e.to_string()))?;
let allowed = state.config.read().await.allowed_secondary_types.clone();
let (primary_rgs, featured_rgs): (Vec<_>, Vec<_>) =
all_release_groups.into_iter().partition(|rg| !rg.featured);
let release_groups: Vec<_> = primary_rgs
.into_iter()
.filter(|rg| {
if rg.secondary_types.is_empty() {
true // Pure studio releases always included
} else {
// Include if ALL of the release group's secondary types are in the allowed list
rg.secondary_types.iter().all(|st| allowed.contains(st))
}
})
.collect();
// Featured release groups — just pass through with type filtering
let featured_albums: Vec<FullAlbumInfo> = featured_rgs
.iter()
.filter(|rg| {
if rg.secondary_types.is_empty() {
true
} else {
rg.secondary_types.iter().all(|st| allowed.contains(st))
}
})
.map(|rg| FullAlbumInfo {
mbid: rg.first_release_id.clone().unwrap_or_else(|| rg.id.clone()),
title: rg.title.clone(),
release_type: rg.primary_type.clone(),
date: rg.first_release_date.clone(),
track_count: 0,
local_album_id: None,
watched_tracks: 0,
owned_tracks: 0,
downloaded_tracks: 0,
total_local_tracks: 0,
status: "featured".to_string(),
})
.collect();
// Get all wanted items for this artist
let all_wanted = queries::wanted::list(state.db.conn(), None, None).await?;
let artist_wanted: Vec<_> = all_wanted
.iter()
.filter(|w| id.is_some() && w.artist_id == id)
.collect();
// Build a set of wanted item recording MBIDs and their statuses for fast lookup (MBID only)
let wanted_by_mbid: std::collections::HashMap<&str, &WantedStatus> = artist_wanted
.iter()
.filter_map(|w| w.musicbrainz_id.as_deref().map(|mbid| (mbid, &w.status)))
.collect();
// Get local albums
let local_albums = if let Some(local_id) = id {
queries::albums::get_by_artist(state.db.conn(), local_id).await?
} else {
vec![]
};
// Quick mode: if no wanted items and ?quick=true, skip per-album MB fetches
let skip_track_fetch = quick_mode && artist_wanted.is_empty();
// Build full album info — fetch tracklists (from cache or MB) for each release group
// Deduplicate at the artist level:
// - available: unique recording MBIDs across all releases
// - watched/owned: unique wanted item MBIDs (so the same wanted item matching
// multiple recordings across releases only counts once)
let mut seen_available: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut seen_watched: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut seen_owned: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut albums: Vec<FullAlbumInfo> = Vec::new();
for rg in &release_groups {
if skip_track_fetch {
// Fast path: just list the release groups without track counts
albums.push(FullAlbumInfo {
mbid: rg.first_release_id.clone().unwrap_or_else(|| rg.id.clone()),
title: rg.title.clone(),
release_type: rg.primary_type.clone(),
date: rg.first_release_date.clone(),
track_count: 0,
local_album_id: None,
watched_tracks: 0,
owned_tracks: 0,
downloaded_tracks: 0,
total_local_tracks: 0,
status: "unwatched".to_string(),
});
continue;
}
// If artist has any watched items, cache permanently (10 years);
// otherwise cache for 7 days (just browsing)
let is_watched = !artist_wanted.is_empty();
let cache_ttl = if is_watched {
10 * 365 * 86400
} else {
7 * 86400
};
let cached = match get_cached_album_tracks(
state,
&rg.id,
rg.first_release_id.as_deref(),
cache_ttl,
is_watched,
)
.await
{
Ok(c) => c,
Err(e) => {
tracing::warn!(rg_id = %rg.id, title = %rg.title, error = %e, "failed to fetch tracks");
// Still show the album, just without track data
albums.push(FullAlbumInfo {
mbid: rg.first_release_id.clone().unwrap_or_else(|| rg.id.clone()),
title: rg.title.clone(),
release_type: rg.primary_type.clone(),
date: rg.first_release_date.clone(),
track_count: 0,
local_album_id: None,
watched_tracks: 0,
owned_tracks: 0,
downloaded_tracks: 0,
total_local_tracks: 0,
status: "unwatched".to_string(),
});
continue;
}
};
let track_count = cached.tracks.len() as u32;
// Match each track against wanted items by recording MBID or title
let mut watched: u32 = 0;
let mut owned: u32 = 0;
let mut downloaded: u32 = 0;
for track in &cached.tracks {
let rec_id = &track.recording_mbid;
// Add to artist-level unique available set
seen_available.insert(rec_id.clone());
// Match by recording MBID only
if let Some(s) = wanted_by_mbid.get(rec_id.as_str()) {
watched += 1;
seen_watched.insert(rec_id.clone());
match s {
WantedStatus::Owned => {
owned += 1;
seen_owned.insert(rec_id.clone());
}
WantedStatus::Downloaded => {
downloaded += 1;
}
_ => {}
}
}
}
// Local album match
let local = local_albums
.iter()
.find(|a| a.name.to_lowercase() == rg.title.to_lowercase());
let local_album_id = local.map(|a| a.id);
let local_tracks = if let Some(aid) = local_album_id {
queries::tracks::get_by_album(state.db.conn(), aid)
.await
.unwrap_or_default()
.len() as u32
} else {
0
};
let status = if owned > 0 && owned >= track_count && track_count > 0 {
"owned"
} else if owned > 0 || downloaded > 0 {
"partial"
} else if watched > 0 {
"wanted"
} else {
"unwatched"
};
albums.push(FullAlbumInfo {
mbid: cached.release_mbid.clone(),
title: rg.title.clone(),
release_type: rg.primary_type.clone(),
date: rg.first_release_date.clone(),
track_count,
local_album_id,
watched_tracks: watched,
owned_tracks: owned,
downloaded_tracks: downloaded,
total_local_tracks: local_tracks,
status: status.to_string(),
});
}
// Sort: owned first, then partial, then wanted, then unwatched; within each by date
albums.sort_by(|a, b| {
let order = |s: &str| match s {
"owned" => 0,
"partial" => 1,
"wanted" => 2,
_ => 3,
};
order(&a.status)
.cmp(&order(&b.status))
.then_with(|| a.date.cmp(&b.date))
});
// Deduplicated artist-level totals
let total_available_tracks = seen_available.len() as u32;
let total_artist_watched = seen_watched.len() as u32;
let total_artist_owned = seen_owned.len() as u32;
let artist_status = if total_artist_owned > 0
&& total_artist_owned >= total_available_tracks
&& total_available_tracks > 0
{
"owned"
} else if total_artist_watched > 0 {
"partial"
} else {
"unwatched"
};
// Cache artist-level totals for the library listing page
if !skip_track_fetch && let Some(local_id) = id {
let cache_key = format!("artist_totals:{local_id}");
let totals = serde_json::json!([
total_available_tracks,
total_artist_watched,
total_artist_owned
]);
let _ = queries::cache::set(
state.db.conn(),
&cache_key,
"computed",
&totals.to_string(),
if artist_wanted.is_empty() {
7 * 86400
} else {
10 * 365 * 86400
},
)
.await;
}
Ok(serde_json::json!({
"artist": artist,
"albums": albums,
"featured_albums": featured_albums,
"artist_status": artist_status,
"total_available_tracks": total_available_tracks,
"total_watched_tracks": total_artist_watched,
"total_owned_tracks": total_artist_owned,
"enriched": !skip_track_fetch,
"monitored": artist.monitored,
"artist_info": artist_info,
"artist_photo": artist_photo,
"artist_bio": artist_bio,
"artist_banner": artist_banner,
}))
}
/// Enrich all watched artists in the background, updating their cached totals.
pub async fn enrich_all_watched_artists(state: &AppState) -> Result<u32, ApiError> {
let all_wanted = queries::wanted::list(state.db.conn(), None, None).await?;
// Collect unique artist IDs that have any wanted items
let mut artist_ids: Vec<i32> = all_wanted.iter().filter_map(|w| w.artist_id).collect();
artist_ids.sort();
artist_ids.dedup();
let mut count = 0u32;
for artist_id in &artist_ids {
match enrich_artist(state, &artist_id.to_string(), false).await {
Ok(_) => count += 1,
Err(e) => tracing::warn!(artist_id = artist_id, error = %e, "failed to enrich artist"),
}
}
Ok(count)
}
/// Fetch artist photo, bio, and banner using configured providers, with per-source caching.
async fn fetch_artist_enrichment(
state: &AppState,
mbid: &str,
artist_info: &Option<shanty_data::ArtistInfo>,
image_source: &str,
bio_source: &str,
lastfm_api_key: Option<&str>,
fanart_api_key: Option<&str>,
) -> (Option<String>, Option<String>, Option<String>) {
let Some(info) = artist_info.as_ref() else {
tracing::debug!(mbid = mbid, "no artist info for enrichment");
return (None, None, None);
};
// Build fanart.tv fetcher once if needed (used for both image and banner)
let fanart_fetcher = if image_source == "fanarttv" {
fanart_api_key.and_then(|key| shanty_data::FanartTvFetcher::new(key.to_string()).ok())
} else {
None
};
// Fetch image (cached per source — only cache hits, not misses)
let image_cache_key = format!("artist_image:{image_source}:{mbid}");
let photo_url = if let Ok(Some(cached)) =
queries::cache::get(state.db.conn(), &image_cache_key).await
&& !cached.is_empty()
{
Some(cached)
} else {
let url = match image_source {
"wikipedia" => state
.wiki_fetcher
.get_artist_image(info)
.await
.unwrap_or(None),
"fanarttv" => match &fanart_fetcher {
Some(f) => f.get_artist_image(info).await.unwrap_or(None),
None => {
tracing::warn!("fanart.tv selected but SHANTY_FANART_API_KEY not set");
None
}
},
_ => None,
};
if let Some(ref val) = url {
let _ = queries::cache::set(
state.db.conn(),
&image_cache_key,
image_source,
val,
30 * 86400,
)
.await;
}
url
};
// Fetch banner (cached per source — only for providers that support banners)
let banner_cache_key = format!("artist_banner:{image_source}:{mbid}");
let banner = if let Ok(Some(cached)) =
queries::cache::get(state.db.conn(), &banner_cache_key).await
&& !cached.is_empty()
{
Some(cached)
} else {
let url = match image_source {
"fanarttv" => match &fanart_fetcher {
Some(f) => f.get_artist_banner(info).await.unwrap_or(None),
None => None,
},
_ => None,
};
if let Some(ref val) = url {
let _ = queries::cache::set(
state.db.conn(),
&banner_cache_key,
image_source,
val,
30 * 86400,
)
.await;
}
url
};
// Fetch bio (cached per source — only cache hits, not misses)
let bio_cache_key = format!("artist_bio:{bio_source}:{mbid}");
let bio = if let Ok(Some(cached)) = queries::cache::get(state.db.conn(), &bio_cache_key).await
&& !cached.is_empty()
{
Some(cached)
} else {
let text = match bio_source {
"wikipedia" => state
.wiki_fetcher
.get_artist_bio(info)
.await
.unwrap_or(None),
"lastfm" => {
if let Some(key) = lastfm_api_key {
match shanty_data::LastFmBioFetcher::new(key.to_string()) {
Ok(fetcher) => fetcher.get_artist_bio(info).await.unwrap_or(None),
Err(e) => {
tracing::warn!(error = %e, "failed to create Last.fm fetcher");
None
}
}
} else {
tracing::warn!("Last.fm bio source selected but SHANTY_LASTFM_API_KEY not set");
None
}
}
_ => None,
};
if let Some(ref val) = text {
let _ =
queries::cache::set(state.db.conn(), &bio_cache_key, bio_source, val, 30 * 86400)
.await;
}
text
};
(photo_url, bio, banner)
}
async fn add_artist(
state: web::Data<AppState>,
session: Session,
body: web::Json<AddArtistRequest>,
) -> Result<HttpResponse, ApiError> {
let (user_id, _, _) = auth::require_auth(&session)?;
if body.name.is_none() && body.mbid.is_none() {
return Err(ApiError::BadRequest("provide name or mbid".into()));
}
let summary = shanty_watch::add_artist(
state.db.conn(),
body.name.as_deref(),
body.mbid.as_deref(),
&state.mb_client,
Some(user_id),
)
.await?;
// Enrich the newly watched artist in the background so library totals are populated
if let Some(ref mbid) = body.mbid {
let state = state.clone();
let mbid = mbid.clone();
tokio::spawn(async move {
let _ = enrich_artist(&state, &mbid, false).await;
});
}
Ok(HttpResponse::Ok().json(serde_json::json!({
"tracks_added": summary.tracks_added,
"tracks_already_owned": summary.tracks_already_owned,
"errors": summary.errors,
})))
}
async fn delete_artist(
state: web::Data<AppState>,
session: Session,
path: web::Path<i32>,
) -> Result<HttpResponse, ApiError> {
auth::require_admin(&session)?;
let id = path.into_inner();
queries::artists::delete(state.db.conn(), id).await?;
Ok(HttpResponse::NoContent().finish())
}
async fn set_monitored(
state: web::Data<AppState>,
session: Session,
path: web::Path<i32>,
) -> Result<HttpResponse, ApiError> {
auth::require_auth(&session)?;
let id = path.into_inner();
let artist = queries::artists::set_monitored(state.db.conn(), id, true).await?;
Ok(HttpResponse::Ok().json(serde_json::json!({
"id": artist.id,
"name": artist.name,
"monitored": artist.monitored,
})))
}
async fn unset_monitored(
state: web::Data<AppState>,
session: Session,
path: web::Path<i32>,
) -> Result<HttpResponse, ApiError> {
auth::require_auth(&session)?;
let id = path.into_inner();
let artist = queries::artists::set_monitored(state.db.conn(), id, false).await?;
Ok(HttpResponse::Ok().json(serde_json::json!({
"id": artist.id,
"name": artist.name,
"monitored": artist.monitored,
})))
}