Files
Main/src/main.rs
T
Connor Johnstone 314400bde5
CI / check (push) Failing after 22s
CI / docker (push) Has been skipped
redux of the worker queue
2026-03-23 18:37:46 -04:00

314 lines
10 KiB
Rust

use actix_cors::Cors;
use actix_session::{SessionMiddleware, storage::CookieSessionStore};
use actix_web::{App, HttpServer, cookie::Key, web};
use clap::{Parser, Subcommand};
use tracing_actix_web::TracingLogger;
use tracing_subscriber::EnvFilter;
use shanty_config::AppConfig;
use shanty_data::WikipediaFetcher;
use shanty_data::{HybridMusicBrainzFetcher, LocalMusicBrainzFetcher, MusicBrainzFetcher};
use shanty_db::Database;
use shanty_search::MusicBrainzSearch;
use shanty_web::routes;
use shanty_web::state::AppState;
use shanty_web::tasks::TaskManager;
use shanty_web::workers::WorkerManager;
#[derive(Parser)]
#[command(name = "shanty", about = "Shanty — self-hosted music management")]
struct Cli {
/// Path to config file.
#[arg(long, env = "SHANTY_CONFIG")]
config: Option<String>,
/// Override the port.
#[arg(long)]
port: Option<u16>,
/// Increase verbosity (-v info, -vv debug, -vvv trace).
#[arg(short, long, action = clap::ArgAction::Count)]
verbose: u8,
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Subcommand)]
enum Commands {
/// Import MusicBrainz JSON data dumps into local SQLite database.
MbImport {
/// Download fresh dump files from metabrainz.org before importing.
#[arg(long)]
download: bool,
/// Directory containing (or to download) dump files.
/// Defaults to the application data directory.
#[arg(long)]
data_dir: Option<String>,
},
}
#[actix_web::main]
async fn main() -> anyhow::Result<()> {
// Load .env file if present (before anything reads env vars)
dotenvy::dotenv().ok();
let cli = Cli::parse();
// Load config early so we can use log_level from it
let mut config = AppConfig::load(cli.config.as_deref());
// CLI -v flags override config log_level
let filter = match cli.verbose {
0 => {
// Use config log_level
let level = config.log_level.to_lowercase();
match level.as_str() {
"error" => "error".to_string(),
"warn" => "warn".to_string(),
"debug" => "debug,shanty=debug,shanty_web=debug".to_string(),
"trace" => "trace,shanty=trace,shanty_web=trace".to_string(),
_ => "info,shanty=info,shanty_web=info".to_string(),
}
}
1 => "info,shanty=debug,shanty_web=debug".to_string(),
_ => "debug,shanty=trace,shanty_web=trace".to_string(),
};
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(&filter)),
)
.init();
if let Some(port) = cli.port {
config.web.port = port;
}
// Handle subcommands
if let Some(Commands::MbImport { download, data_dir }) = cli.command {
return run_mb_import(&config, download, data_dir.as_deref()).await;
}
tracing::info!(url = %config.database_url, "connecting to database");
let db = Database::new(&config.database_url).await?;
let mb_remote = MusicBrainzFetcher::new()?;
let search = MusicBrainzSearch::with_limiter(mb_remote.limiter())?;
// Set up local MB database if configured
let local_mb = create_local_mb_fetcher(&config);
let mb_client = HybridMusicBrainzFetcher::new(local_mb, mb_remote);
if mb_client.has_local_db()
&& let Some(stats) = mb_client.local_stats()
{
tracing::info!(
artists = stats.artists,
release_groups = stats.release_groups,
releases = stats.releases,
recordings = stats.recordings,
last_import = ?stats.last_import_date,
"local MusicBrainz database loaded"
);
} else if !mb_client.has_local_db() {
tracing::info!("no local MusicBrainz database — using API only");
}
let wiki_fetcher = WikipediaFetcher::new()?;
let bind = format!("{}:{}", config.web.bind, config.web.port);
tracing::info!(bind = %bind, "starting server");
let config_path = cli.config.clone();
let state = web::Data::new(AppState {
db,
mb_client,
search,
wiki_fetcher,
config: std::sync::Arc::new(tokio::sync::RwLock::new(config)),
config_path,
tasks: TaskManager::new(),
workers: WorkerManager::new(),
firefox_login: tokio::sync::Mutex::new(None),
});
// Start work queue workers and unified scheduler
WorkerManager::spawn_all(state.clone());
shanty_web::scheduler::spawn(state.clone());
shanty_web::mb_update::spawn(state.clone());
// Resolve static files directory
let static_dir = std::env::current_exe()
.ok()
.and_then(|exe| exe.parent().map(|p| p.to_owned()))
.map(|p| p.join("static"))
.unwrap_or_else(|| std::path::PathBuf::from("static"));
let static_dir = if static_dir.is_dir() {
static_dir
} else {
// Check next to shanty-web crate root (for development)
let dev_path =
std::path::PathBuf::from(concat!(env!("CARGO_MANIFEST_DIR"), "/shanty-web/static"));
if dev_path.is_dir() {
dev_path
} else {
tracing::warn!("static directory not found — frontend will not be served");
static_dir
}
};
tracing::info!(path = %static_dir.display(), "serving static files");
// Generate a random session key (sessions won't survive restarts, which is fine)
let session_key = Key::generate();
let server = HttpServer::new(move || {
let cors = Cors::permissive();
let static_dir = static_dir.clone();
App::new()
.wrap(cors)
.wrap(
SessionMiddleware::builder(CookieSessionStore::default(), session_key.clone())
.cookie_secure(false)
.build(),
)
.wrap(TracingLogger::default())
.app_data(state.clone())
.configure(routes::configure)
.service(
actix_files::Files::new("/", static_dir.clone())
.index_file("index.html")
.prefer_utf8(true)
.guard(actix_web::guard::fn_guard(|ctx| {
!ctx.head().uri.path().starts_with("/rest")
})),
)
.default_service(web::to({
let index_path = static_dir.join("index.html");
move |req: actix_web::HttpRequest| {
let index_path = index_path.clone();
async move {
if req.path().starts_with("/rest") {
return Ok(actix_web::HttpResponse::NotFound()
.content_type("application/json")
.body(r#"{"subsonic-response":{"status":"failed","version":"1.16.1","error":{"code":0,"message":"Unknown endpoint"}}}"#));
}
actix_files::NamedFile::open_async(index_path)
.await
.map(|f| f.into_response(&req))
}
}
}))
})
.bind(&bind)?
.run();
// Graceful shutdown on Ctrl+C / SIGTERM
let handle = server.handle();
tokio::spawn(async move {
tokio::signal::ctrl_c().await.ok();
tracing::info!("shutdown signal received, stopping server");
handle.stop(true).await;
});
server.await?;
tracing::info!("server stopped");
Ok(())
}
/// Create a LocalMusicBrainzFetcher from config if available.
fn create_local_mb_fetcher(config: &AppConfig) -> Option<LocalMusicBrainzFetcher> {
let db_path = config
.musicbrainz
.local_db_path
.as_ref()
.map(|p| p.to_string_lossy().to_string())
.or_else(|| {
// Default location: data_dir/shanty-mb.db (only if it exists)
let default_path = shanty_config::data_dir().join("shanty-mb.db");
if default_path.exists() {
Some(default_path.to_string_lossy().to_string())
} else {
None
}
})?;
match LocalMusicBrainzFetcher::new(&db_path) {
Ok(fetcher) => {
if fetcher.is_available() {
tracing::info!(path = %db_path, "opened local MusicBrainz database");
Some(fetcher)
} else {
tracing::debug!(path = %db_path, "local MB database exists but has no data");
None
}
}
Err(e) => {
tracing::warn!(path = %db_path, error = %e, "failed to open local MB database");
None
}
}
}
/// Run the `mb-import` subcommand.
async fn run_mb_import(
config: &AppConfig,
download: bool,
data_dir_override: Option<&str>,
) -> anyhow::Result<()> {
let data_dir = data_dir_override
.map(std::path::PathBuf::from)
.unwrap_or_else(|| shanty_config::data_dir().join("mb-dumps"));
let db_path = config
.musicbrainz
.local_db_path
.clone()
.unwrap_or_else(|| shanty_config::data_dir().join("shanty-mb.db"));
tracing::info!(
dump_dir = %data_dir.display(),
db_path = %db_path.display(),
download = download,
"starting MusicBrainz import"
);
// Download dumps if requested
if download {
std::fs::create_dir_all(&data_dir)?;
let timestamp = shanty_data::mb_import::discover_latest_dump_folder()
.await
.map_err(|e| anyhow::anyhow!("failed to discover latest dump: {e}"))?;
tracing::info!(timestamp = %timestamp, "using dump folder");
for filename in shanty_data::mb_import::DUMP_FILES {
shanty_data::mb_import::download_dump(filename, &timestamp, &data_dir, |msg| {
tracing::info!("{msg}");
})
.await
.map_err(|e| anyhow::anyhow!("{e}"))?;
}
}
// Ensure the data directory exists and has at least one dump file
if !data_dir.exists() {
anyhow::bail!(
"dump directory {} does not exist. Use --download to fetch dumps, or provide --data-dir pointing to existing dump files.",
data_dir.display()
);
}
// Run import (handles opening the database internally)
let stats = shanty_data::mb_import::run_import_at_path(&db_path, &data_dir, |msg| {
tracing::info!("{msg}");
})
.map_err(|e| anyhow::anyhow!("{e}"))?;
tracing::info!(%stats, db_path = %db_path.display(), "MusicBrainz import complete");
println!("{stats}");
println!("Database: {}", db_path.display());
Ok(())
}