Added the playlist generator
All checks were successful
CI / check (push) Successful in 1m12s
CI / docker (push) Successful in 2m1s

This commit is contained in:
Connor Johnstone
2026-03-20 18:09:47 -04:00
parent 4008b4d838
commit 6f73bb87ce
19 changed files with 1526 additions and 21 deletions

View File

@@ -2,3 +2,12 @@
//!
//! Generates playlists based on the indexed music library using strategies like
//! similar artists, genre matching, smart rules, and weighted random selection.
pub mod ordering;
pub mod scoring;
pub mod selection;
pub mod strategies;
pub mod types;
pub use strategies::{PlaylistError, genre_based, random, similar_artists, smart, to_m3u};
pub use types::{Candidate, PlaylistRequest, PlaylistResult, PlaylistTrack, SmartRules};

View File

@@ -0,0 +1,57 @@
use std::collections::BTreeMap;
use rand::prelude::*;
use crate::types::Candidate;
/// Reorder tracks so that artists are evenly spread out.
/// Greedily picks from the artist with the most remaining tracks,
/// avoiding back-to-back repeats when possible.
/// Ported faithfully from drift's interleave_artists().
pub fn interleave_artists(tracks: Vec<Candidate>) -> Vec<Candidate> {
let mut rng = rand::rng();
let mut by_artist: BTreeMap<String, Vec<Candidate>> = BTreeMap::new();
for track in tracks {
by_artist
.entry(track.artist.clone())
.or_default()
.push(track);
}
for group in by_artist.values_mut() {
group.shuffle(&mut rng);
}
let mut result = Vec::new();
let mut last_artist: Option<String> = None;
while !by_artist.is_empty() {
let mut artists: Vec<String> = by_artist.keys().cloned().collect();
artists.sort_by(|a, b| by_artist[b].len().cmp(&by_artist[a].len()));
let pick = artists
.iter()
.find(|a| last_artist.as_ref() != Some(a))
.or(artists.first())
.cloned()
.unwrap();
let group = by_artist.get_mut(&pick).unwrap();
let track = group.pop().unwrap();
if group.is_empty() {
by_artist.remove(&pick);
}
last_artist = Some(pick);
result.push(track);
}
result
}
/// Full random shuffle.
pub fn shuffle(mut tracks: Vec<Candidate>) -> Vec<Candidate> {
let mut rng = rand::rng();
tracks.shuffle(&mut rng);
tracks
}

View File

@@ -0,0 +1,151 @@
use std::collections::HashMap;
use shanty_data::PopularTrack;
use shanty_db::entities::track::Model as Track;
use crate::types::ScoredTrack;
/// Popularity exponent curve (0-10 scale).
/// 0 = no preference, 10 = heavy popular bias.
const POPULARITY_EXPONENTS: [f64; 11] = [
0.0, 0.06, 0.17, 0.33, 0.67, 1.30, 1.50, 1.70, 1.94, 2.22, 2.50,
];
/// Score all tracks for the given artists, returning scored tracks for ranking.
///
/// `artists` is a list of (mbid_or_name, display_name, similarity_score) tuples.
/// `tracks_by_artist` maps artist identifier -> their local tracks.
/// `top_tracks_by_artist` maps artist identifier -> their Last.fm top tracks.
pub fn score_tracks(
artists: &[(String, String, f64)],
tracks_by_artist: &HashMap<String, Vec<Track>>,
top_tracks_by_artist: &HashMap<String, Vec<PopularTrack>>,
popularity_bias: u8,
) -> Vec<ScoredTrack> {
let bias = popularity_bias.min(10) as usize;
let mut scored = Vec::new();
for (artist_key, name, match_score) in artists {
let local_tracks = match tracks_by_artist.get(artist_key) {
Some(t) if !t.is_empty() => t,
_ => continue,
};
let top_tracks = top_tracks_by_artist
.get(artist_key)
.cloned()
.unwrap_or_default();
// Build playcount lookup by lowercase name
let playcount_by_name: HashMap<String, u64> = top_tracks
.iter()
.map(|t| (t.name.to_lowercase(), t.playcount))
.collect();
let max_playcount = playcount_by_name
.values()
.copied()
.max()
.unwrap_or(1)
.max(1);
for track in local_tracks {
let title_lower = track.title.as_ref().map(|t| t.to_lowercase());
let playcount = title_lower
.as_ref()
.and_then(|t| playcount_by_name.get(t).copied())
.or_else(|| {
track
.musicbrainz_id
.as_ref()
.and_then(|id| playcount_by_name.get(id).copied())
});
// If we have popularity data, require a match; otherwise assign uniform score
let (popularity, similarity, score) = if !playcount_by_name.is_empty() {
let Some(playcount) = playcount else {
continue;
};
let popularity = if playcount > 0 {
(playcount as f64 / max_playcount as f64).powf(POPULARITY_EXPONENTS[bias])
} else {
0.0
};
let similarity = (match_score.exp()) / std::f64::consts::E;
let score = similarity * popularity;
(popularity, similarity, score)
} else {
// No top tracks data — use uniform scoring based on similarity only
let similarity = (match_score.exp()) / std::f64::consts::E;
(1.0, similarity, similarity)
};
scored.push(ScoredTrack {
track_id: track.id,
file_path: track.file_path.clone(),
title: track.title.clone(),
artist: name.clone(),
artist_mbid: track
.artist_id
.map(|_| artist_key.clone())
.or_else(|| Some(artist_key.clone())),
album: track.album.clone(),
duration: track.duration,
score,
popularity,
similarity,
});
}
}
// Step 1: Cap tracks per artist based on popularity bias
let mut by_artist: HashMap<String, Vec<ScoredTrack>> = HashMap::new();
for t in scored {
let key = t.artist_mbid.clone().unwrap_or_else(|| t.artist.clone());
by_artist.entry(key).or_default().push(t);
}
let cap = if popularity_bias == 0 {
None
} else {
let b = popularity_bias as f64;
let c = if b <= 5.0 {
90.0 - 12.8 * b
} else {
26.0 - 3.2 * (b - 5.0)
};
Some((c.round() as usize).max(1))
};
for group in by_artist.values_mut() {
group.sort_by(|a, b| {
b.score
.partial_cmp(&a.score)
.unwrap_or(std::cmp::Ordering::Equal)
});
if let Some(cap) = cap {
group.truncate(cap);
}
}
// Step 2: Normalize so each artist's total weight = their similarity
let similarity_map: HashMap<&str, f64> = artists
.iter()
.map(|(key, _, sim)| (key.as_str(), *sim))
.collect();
for (key, group) in &mut by_artist {
let total: f64 = group.iter().map(|t| t.score).sum();
if total > 0.0 {
let sim = similarity_map.get(key.as_str()).copied().unwrap_or(1.0);
for t in group.iter_mut() {
t.score *= sim / total;
}
}
}
by_artist.into_values().flatten().collect()
}

View File

@@ -0,0 +1,95 @@
use std::collections::{HashMap, HashSet};
use rand::distr::weighted::WeightedIndex;
use rand::prelude::*;
use crate::types::Candidate;
/// Weighted random sampling with per-artist caps and seed enforcement.
/// Ported faithfully from drift's generate_playlist().
pub fn generate_playlist(
candidates: &[Candidate],
n: usize,
seed_names: &HashSet<String>,
) -> Vec<Candidate> {
if candidates.is_empty() {
return Vec::new();
}
let mut rng = rand::rng();
let mut pool: Vec<&Candidate> = candidates.iter().collect();
let mut result: Vec<Candidate> = Vec::new();
let mut artist_counts: HashMap<String, usize> = HashMap::new();
let seed_min = (n / 10).max(1);
let distinct_artists: usize = {
let mut seen = HashSet::new();
for c in &pool {
seen.insert(&c.artist);
}
seen.len()
};
let divisor = match distinct_artists {
1 => 1,
2 => 2,
3 => 3,
4 => 3,
5 => 4,
_ => 5,
};
let artist_cap = n.div_ceil(divisor).max(1);
while result.len() < n && !pool.is_empty() {
let seed_count: usize = seed_names
.iter()
.map(|name| *artist_counts.get(name).unwrap_or(&0))
.sum();
let remaining = n - result.len();
let seed_deficit = seed_min.saturating_sub(seed_count);
let force_seed = seed_deficit > 0 && remaining <= seed_deficit;
let eligible: Vec<usize> = pool
.iter()
.enumerate()
.filter(|(_, c)| {
if force_seed {
seed_names.contains(&c.artist)
} else {
*artist_counts.get(&c.artist).unwrap_or(&0) < artist_cap
}
})
.map(|(i, _)| i)
.collect();
let fallback_indices: Vec<usize> = (0..pool.len()).collect();
let indices: &[usize] = if eligible.is_empty() {
&fallback_indices
} else {
&eligible
};
let weights: Vec<f64> = indices.iter().map(|&i| pool[i].score.max(0.001)).collect();
let dist = match WeightedIndex::new(&weights) {
Ok(d) => d,
Err(_) => break,
};
let picked = indices[dist.sample(&mut rng)];
let track = pool.remove(picked);
*artist_counts.entry(track.artist.clone()).or_insert(0) += 1;
result.push(Candidate {
score: track.score,
artist: track.artist.clone(),
artist_mbid: track.artist_mbid.clone(),
track_id: track.track_id,
file_path: track.file_path.clone(),
title: track.title.clone(),
album: track.album.clone(),
duration: track.duration,
});
}
result
}

View File

@@ -0,0 +1,490 @@
use std::collections::{HashMap, HashSet};
use sea_orm::DatabaseConnection;
use shanty_data::{PopularTrack, SimilarArtist, SimilarArtistFetcher};
use shanty_db::queries;
use crate::ordering;
use crate::scoring;
use crate::selection;
use crate::types::*;
/// Cache TTL: 7 days in seconds.
const CACHE_TTL: i64 = 7 * 24 * 3600;
/// Generate a playlist based on similar artists (the primary strategy).
///
/// Flow:
/// 1. For each seed artist: resolve MBID from DB
/// 2. Fetch similar artists (check cache first, else call Last.fm, cache result)
/// 3. Merge multi-seed: accumulate scores, normalize by seed count
/// 4. Filter: only keep artists that have tracks in the local library
/// 5. Score all tracks
/// 6. Select via weighted sampling
/// 7. Order (interleave or shuffle)
pub async fn similar_artists(
conn: &DatabaseConnection,
fetcher: &impl SimilarArtistFetcher,
seed_artists: Vec<String>,
count: usize,
popularity_bias: u8,
ordering: &str,
) -> Result<PlaylistResult, PlaylistError> {
if seed_artists.is_empty() {
return Err(PlaylistError::InvalidInput(
"at least one seed artist is required".into(),
));
}
let num_seeds = seed_artists.len() as f64;
// Merge similar artists from all seeds: key -> (name, total_score)
let mut merged: HashMap<String, (String, f64)> = HashMap::new();
// Track resolved seed names for enforcement (use DB names, not raw input)
let mut resolved_seed_names: HashSet<String> = HashSet::new();
for seed in &seed_artists {
// Resolve the seed artist: try name lookup in DB
let (artist_name, artist_mbid) = resolve_artist(conn, seed).await?;
resolved_seed_names.insert(artist_name.clone());
// Insert the seed itself with score 1.0
let key = artist_mbid
.clone()
.unwrap_or_else(|| artist_name.to_lowercase());
let entry = merged
.entry(key)
.or_insert_with(|| (artist_name.clone(), 0.0));
entry.1 += 1.0;
// Fetch similar artists (cached or fresh)
let similar = fetch_cached_similar(conn, fetcher, &artist_name, artist_mbid.as_deref())
.await
.unwrap_or_default();
for sa in similar {
let sa_key = sa.mbid.clone().unwrap_or_else(|| sa.name.to_lowercase());
let entry = merged
.entry(sa_key)
.or_insert_with(|| (sa.name.clone(), 0.0));
entry.1 += sa.match_score;
}
}
// Normalize scores by seed count
let artists: Vec<(String, String, f64)> = merged
.into_iter()
.map(|(key, (name, total))| (key, name, total / num_seeds))
.collect();
// Build track and top-track maps for scoring
let mut tracks_by_artist: HashMap<String, Vec<shanty_db::entities::track::Model>> =
HashMap::new();
let mut top_tracks_by_artist: HashMap<String, Vec<PopularTrack>> = HashMap::new();
for (key, name, _) in &artists {
// Get local tracks for this artist
let local = get_artist_tracks(conn, key, name).await;
if local.is_empty() {
continue;
}
tracks_by_artist.insert(key.clone(), local);
// Get top tracks (cached or fresh)
let top = fetch_cached_top_tracks(conn, fetcher, name, Some(key.as_str()))
.await
.unwrap_or_default();
if !top.is_empty() {
top_tracks_by_artist.insert(key.clone(), top);
}
}
// Score
let scored = scoring::score_tracks(
&artists,
&tracks_by_artist,
&top_tracks_by_artist,
popularity_bias,
);
// Convert to candidates
let candidates: Vec<Candidate> = scored
.into_iter()
.map(|t| Candidate {
score: t.score,
artist: t.artist,
artist_mbid: t.artist_mbid,
track_id: t.track_id,
file_path: t.file_path,
title: t.title,
album: t.album,
duration: t.duration,
})
.collect();
// Select (use resolved DB names for seed enforcement, not raw input)
let selected = selection::generate_playlist(&candidates, count, &resolved_seed_names);
// Order
let ordered = apply_ordering(selected, ordering);
Ok(PlaylistResult {
tracks: candidates_to_tracks(ordered),
strategy: "similar".to_string(),
resolved_seeds: resolved_seed_names.into_iter().collect(),
})
}
/// Generate a genre-based playlist.
pub async fn genre_based(
conn: &DatabaseConnection,
genres: Vec<String>,
count: usize,
ordering: &str,
) -> Result<PlaylistResult, PlaylistError> {
if genres.is_empty() {
return Err(PlaylistError::InvalidInput(
"at least one genre is required".into(),
));
}
let mut all_tracks = Vec::new();
for genre in &genres {
let tracks = queries::tracks::get_by_genre(conn, genre)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
all_tracks.extend(tracks);
}
// Deduplicate by track ID
let mut seen = HashSet::new();
all_tracks.retain(|t| seen.insert(t.id));
// Convert to candidates with uniform scoring
let candidates: Vec<Candidate> = all_tracks
.into_iter()
.map(|t| Candidate {
score: 1.0,
artist: t.artist.clone().unwrap_or_default(),
artist_mbid: t.musicbrainz_id.clone(),
track_id: t.id,
file_path: t.file_path.clone(),
title: t.title.clone(),
album: t.album.clone(),
duration: t.duration,
})
.collect();
let seed_names = HashSet::new();
let selected = selection::generate_playlist(&candidates, count, &seed_names);
let ordered = apply_ordering(selected, ordering);
Ok(PlaylistResult {
tracks: candidates_to_tracks(ordered),
strategy: "genre".to_string(),
resolved_seeds: vec![],
})
}
/// Generate a random playlist.
pub async fn random(
conn: &DatabaseConnection,
count: usize,
no_repeat_artist: bool,
) -> Result<PlaylistResult, PlaylistError> {
let tracks = queries::tracks::get_random(conn, count as u64 * 2)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
let mut result = Vec::new();
let mut seen_artists: HashSet<String> = HashSet::new();
for t in tracks {
if result.len() >= count {
break;
}
let artist = t.artist.clone().unwrap_or_default();
if no_repeat_artist && !artist.is_empty() && !seen_artists.insert(artist.clone()) {
continue;
}
result.push(PlaylistTrack {
track_id: t.id,
file_path: t.file_path.clone(),
title: t.title.clone(),
artist: t.artist.clone(),
album: t.album.clone(),
score: 0.0,
duration: t.duration,
});
}
Ok(PlaylistResult {
tracks: result,
strategy: "random".to_string(),
resolved_seeds: vec![],
})
}
/// Generate a smart playlist based on rules.
pub async fn smart(
conn: &DatabaseConnection,
rules: SmartRules,
count: usize,
) -> Result<PlaylistResult, PlaylistError> {
let mut all_tracks: Vec<shanty_db::entities::track::Model> = Vec::new();
// Genre filter
if !rules.genres.is_empty() {
for genre in &rules.genres {
let tracks = queries::tracks::get_by_genre(conn, genre)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
all_tracks.extend(tracks);
}
}
// Artist filter
if !rules.artists.is_empty() {
for artist_name in &rules.artists {
let tracks = queries::tracks::get_by_artist_name(conn, artist_name)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
all_tracks.extend(tracks);
}
}
// Recently added filter
if let Some(days) = rules.added_within_days {
let tracks = queries::tracks::get_recent(conn, days, 10000)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
if all_tracks.is_empty() {
all_tracks = tracks;
} else {
let recent_ids: HashSet<i32> = tracks.iter().map(|t| t.id).collect();
all_tracks.retain(|t| recent_ids.contains(&t.id));
}
}
// Year range filter
if let Some((min_year, max_year)) = rules.year_range {
all_tracks.retain(|t| {
t.year
.map(|y| y >= min_year && y <= max_year)
.unwrap_or(false)
});
}
// If no filters were specified, get random tracks
if rules.genres.is_empty()
&& rules.artists.is_empty()
&& rules.added_within_days.is_none()
&& rules.year_range.is_none()
{
all_tracks = queries::tracks::get_random(conn, count as u64 * 2)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
}
// Deduplicate
let mut seen = HashSet::new();
all_tracks.retain(|t| seen.insert(t.id));
// Convert to candidates
let candidates: Vec<Candidate> = all_tracks
.into_iter()
.map(|t| Candidate {
score: 1.0,
artist: t.artist.clone().unwrap_or_default(),
artist_mbid: t.musicbrainz_id.clone(),
track_id: t.id,
file_path: t.file_path.clone(),
title: t.title.clone(),
album: t.album.clone(),
duration: t.duration,
})
.collect();
let seed_names = HashSet::new();
let selected = selection::generate_playlist(&candidates, count, &seed_names);
let ordered = ordering::interleave_artists(selected);
Ok(PlaylistResult {
tracks: candidates_to_tracks(ordered),
strategy: "smart".to_string(),
resolved_seeds: vec![],
})
}
/// Generate an M3U playlist string from tracks.
pub fn to_m3u(tracks: &[PlaylistTrack]) -> String {
let mut out = String::from("#EXTM3U\n");
for t in tracks {
let duration = t.duration.unwrap_or(0.0) as i64;
let artist = t.artist.as_deref().unwrap_or("Unknown");
let title = t.title.as_deref().unwrap_or("Unknown");
out.push_str(&format!("#EXTINF:{duration},{artist} - {title}\n"));
out.push_str(&t.file_path);
out.push('\n');
}
out
}
/// Apply the chosen ordering mode to selected candidates.
fn apply_ordering(mut candidates: Vec<Candidate>, mode: &str) -> Vec<Candidate> {
match mode {
"random" => ordering::shuffle(candidates),
"score" => {
candidates.sort_by(|a, b| {
b.score
.partial_cmp(&a.score)
.unwrap_or(std::cmp::Ordering::Equal)
});
candidates
}
_ => ordering::interleave_artists(candidates), // "interleave" is default
}
}
// --- Helper functions ---
/// Resolve an artist name or MBID to (name, optional_mbid).
async fn resolve_artist(
conn: &DatabaseConnection,
query: &str,
) -> Result<(String, Option<String>), PlaylistError> {
// Try as MBID first (if it looks like a UUID)
if query.len() == 36 && query.contains('-') {
let tracks = queries::tracks::get_by_artist_mbid(conn, query)
.await
.map_err(|e| PlaylistError::Db(e.to_string()))?;
if let Some(t) = tracks.first() {
let name = t.artist.clone().unwrap_or_else(|| query.to_string());
return Ok((name, Some(query.to_string())));
}
}
// Try by name in DB
if let Ok(Some(artist)) = queries::artists::find_by_name(conn, query).await {
return Ok((artist.name, artist.musicbrainz_id));
}
// Fall back to using the query as the name
Ok((query.to_string(), None))
}
/// Get local tracks for an artist by key (MBID) or name.
async fn get_artist_tracks(
conn: &DatabaseConnection,
key: &str,
name: &str,
) -> Vec<shanty_db::entities::track::Model> {
// Try by MBID first
if let Ok(tracks) = queries::tracks::get_by_artist_mbid(conn, key).await
&& !tracks.is_empty()
{
return tracks;
}
// Try by name
queries::tracks::get_by_artist_name(conn, name)
.await
.unwrap_or_default()
}
/// Fetch similar artists from cache or Last.fm.
async fn fetch_cached_similar(
conn: &DatabaseConnection,
fetcher: &impl SimilarArtistFetcher,
artist_name: &str,
mbid: Option<&str>,
) -> Result<Vec<SimilarArtist>, PlaylistError> {
let cache_key = format!(
"lastfm_similar:{}",
mbid.unwrap_or(&artist_name.to_lowercase())
);
// Check cache
if let Ok(Some(json)) = queries::cache::get(conn, &cache_key).await
&& let Ok(cached) = serde_json::from_str::<Vec<SimilarArtist>>(&json)
{
tracing::debug!(artist = artist_name, "using cached similar artists");
return Ok(cached);
}
// Fetch from provider
let similar = fetcher
.get_similar_artists(artist_name, mbid)
.await
.map_err(|e| PlaylistError::FetchError(e.to_string()))?;
// Cache the result
if let Ok(json) = serde_json::to_string(&similar) {
let _ = queries::cache::set(conn, &cache_key, "lastfm", &json, CACHE_TTL).await;
}
Ok(similar)
}
/// Fetch top tracks from cache or Last.fm.
async fn fetch_cached_top_tracks(
conn: &DatabaseConnection,
fetcher: &impl SimilarArtistFetcher,
artist_name: &str,
mbid: Option<&str>,
) -> Result<Vec<PopularTrack>, PlaylistError> {
let cache_key = format!(
"lastfm_toptracks:{}",
mbid.unwrap_or(&artist_name.to_lowercase())
);
// Check cache
if let Ok(Some(json)) = queries::cache::get(conn, &cache_key).await
&& let Ok(cached) = serde_json::from_str::<Vec<PopularTrack>>(&json)
{
tracing::debug!(artist = artist_name, "using cached top tracks");
return Ok(cached);
}
// Fetch from provider
let tracks = fetcher
.get_top_tracks(artist_name, mbid)
.await
.map_err(|e| PlaylistError::FetchError(e.to_string()))?;
// Cache the result
if let Ok(json) = serde_json::to_string(&tracks) {
let _ = queries::cache::set(conn, &cache_key, "lastfm", &json, CACHE_TTL).await;
}
Ok(tracks)
}
/// Convert candidates to playlist tracks.
fn candidates_to_tracks(candidates: Vec<Candidate>) -> Vec<PlaylistTrack> {
candidates
.into_iter()
.map(|c| PlaylistTrack {
track_id: c.track_id,
file_path: c.file_path,
title: c.title,
artist: Some(c.artist),
album: c.album,
score: c.score,
duration: c.duration,
})
.collect()
}
/// Playlist generation error.
#[derive(Debug, thiserror::Error)]
pub enum PlaylistError {
#[error("invalid input: {0}")]
InvalidInput(String),
#[error("database error: {0}")]
Db(String),
#[error("fetch error: {0}")]
FetchError(String),
}

View File

@@ -0,0 +1,93 @@
use serde::{Deserialize, Serialize};
/// Request to generate a playlist.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlaylistRequest {
pub strategy: String,
#[serde(default)]
pub seed_artists: Vec<String>,
#[serde(default)]
pub genres: Vec<String>,
#[serde(default = "default_count")]
pub count: usize,
#[serde(default = "default_popularity_bias")]
pub popularity_bias: u8,
/// Ordering mode: "score" (by score), "interleave" (spread artists), "random" (full shuffle).
#[serde(default = "default_ordering")]
pub ordering: String,
#[serde(default)]
pub rules: Option<SmartRules>,
}
fn default_count() -> usize {
50
}
fn default_popularity_bias() -> u8 {
5
}
fn default_ordering() -> String {
"interleave".to_string()
}
/// Result of generating a playlist.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlaylistResult {
pub tracks: Vec<PlaylistTrack>,
pub strategy: String,
/// Resolved seed artist names (for display — may differ from input query).
#[serde(default)]
pub resolved_seeds: Vec<String>,
}
/// A track in a generated playlist.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PlaylistTrack {
pub track_id: i32,
pub file_path: String,
pub title: Option<String>,
pub artist: Option<String>,
pub album: Option<String>,
pub score: f64,
pub duration: Option<f64>,
}
/// A weighted candidate for playlist selection (internal).
#[derive(Debug, Clone)]
pub struct Candidate {
pub score: f64,
pub artist: String,
pub artist_mbid: Option<String>,
pub track_id: i32,
pub file_path: String,
pub title: Option<String>,
pub album: Option<String>,
pub duration: Option<f64>,
}
/// A scored track before candidate conversion (internal).
#[derive(Debug, Clone)]
pub struct ScoredTrack {
pub track_id: i32,
pub file_path: String,
pub title: Option<String>,
pub artist: String,
pub artist_mbid: Option<String>,
pub album: Option<String>,
pub duration: Option<f64>,
pub score: f64,
pub popularity: f64,
pub similarity: f64,
}
/// Rules for the "smart" playlist strategy.
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct SmartRules {
#[serde(default)]
pub genres: Vec<String>,
pub added_within_days: Option<u32>,
pub year_range: Option<(i32, i32)>,
#[serde(default)]
pub artists: Vec<String>,
}