Initial commit

This commit is contained in:
Connor Johnstone
2026-03-17 15:31:29 -04:00
commit d5641493b9
11 changed files with 1458 additions and 0 deletions

292
src/main.rs Normal file
View File

@@ -0,0 +1,292 @@
use std::path::PathBuf;
use clap::{Parser, Subcommand};
use tracing_subscriber::EnvFilter;
use shanty_db::Database;
use shanty_db::entities::download_queue::DownloadStatus;
use shanty_db::queries;
use shanty_dl::{
AudioFormat, BackendConfig, DownloadBackend, DownloadTarget, SearchSource, YtDlpBackend,
download_single, run_queue,
};
#[derive(Parser)]
#[command(name = "shanty-dl", about = "Download music files for Shanty")]
struct Cli {
#[command(subcommand)]
command: Commands,
/// Database URL. Defaults to sqlite://<XDG_DATA_HOME>/shanty/shanty.db?mode=rwc
#[arg(long, global = true, env = "SHANTY_DATABASE_URL")]
database: Option<String>,
/// Increase verbosity (-v info, -vv debug, -vvv trace).
#[arg(short, long, global = true, action = clap::ArgAction::Count)]
verbose: u8,
}
#[derive(Subcommand)]
enum Commands {
/// Download a single song by query or URL.
Download {
/// Search query or direct URL.
query_or_url: String,
/// Output audio format.
#[arg(long, default_value = "opus")]
format: String,
/// Output directory for downloaded files.
#[arg(long)]
output: Option<PathBuf>,
/// Path to cookies.txt file for YouTube authentication.
#[arg(long, env = "SHANTY_COOKIES")]
cookies: Option<PathBuf>,
/// Search source (ytmusic or youtube).
#[arg(long, default_value = "ytmusic")]
search_source: String,
/// Requests per hour limit.
#[arg(long, default_value = "450")]
rate_limit: u32,
/// Preview what would be downloaded without doing it.
#[arg(long)]
dry_run: bool,
},
/// Manage the download queue.
Queue {
#[command(subcommand)]
action: QueueAction,
},
}
#[derive(Subcommand)]
enum QueueAction {
/// Process all pending items in the download queue.
Process {
/// Output audio format.
#[arg(long, default_value = "opus")]
format: String,
/// Output directory for downloaded files.
#[arg(long)]
output: Option<PathBuf>,
/// Path to cookies.txt for YouTube authentication.
#[arg(long, env = "SHANTY_COOKIES")]
cookies: Option<PathBuf>,
/// Search source (ytmusic or youtube).
#[arg(long, default_value = "ytmusic")]
search_source: String,
/// Requests per hour limit.
#[arg(long, default_value = "450")]
rate_limit: u32,
/// Preview without downloading.
#[arg(long)]
dry_run: bool,
},
/// Add an item to the download queue.
Add {
/// Search query for the song to download.
query: String,
},
/// List items in the download queue.
List {
/// Filter by status (pending, downloading, completed, failed, cancelled, all).
#[arg(long, default_value = "all")]
status: String,
},
/// Retry all failed downloads.
Retry,
}
fn default_database_url() -> String {
let data_dir = dirs::data_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("shanty");
std::fs::create_dir_all(&data_dir).ok();
let db_path = data_dir.join("shanty.db");
format!("sqlite://{}?mode=rwc", db_path.display())
}
fn default_output_dir() -> PathBuf {
let dir = dirs::data_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("shanty")
.join("downloads");
std::fs::create_dir_all(&dir).ok();
dir
}
fn make_backend(
cookies: &Option<PathBuf>,
search_source: &str,
rate_limit: u32,
) -> anyhow::Result<YtDlpBackend> {
let source: SearchSource = search_source
.parse()
.map_err(|e: String| anyhow::anyhow!(e))?;
// Bump rate limit if cookies are provided
let effective_rate = if cookies.is_some() && rate_limit == 450 {
tracing::info!("cookies provided — using authenticated rate limit (1800/hr)");
1800
} else {
rate_limit
};
Ok(YtDlpBackend::new(effective_rate, source, cookies.clone()))
}
fn make_backend_config(
format: &str,
output: &Option<PathBuf>,
cookies: &Option<PathBuf>,
) -> anyhow::Result<BackendConfig> {
let fmt: AudioFormat = format
.parse()
.map_err(|e: String| anyhow::anyhow!(e))?;
Ok(BackendConfig {
output_dir: output.clone().unwrap_or_else(default_output_dir),
format: fmt,
cookies_path: cookies.clone(),
})
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let cli = Cli::parse();
// Set up tracing
let filter = match cli.verbose {
0 => "warn",
1 => "info,shanty_dl=info",
2 => "info,shanty_dl=debug",
_ => "debug,shanty_dl=trace",
};
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(filter)),
)
.init();
match cli.command {
Commands::Download {
query_or_url,
format,
output,
cookies,
search_source,
rate_limit,
dry_run,
} => {
let backend = make_backend(&cookies, &search_source, rate_limit)?;
backend.check_available().await?;
let config = make_backend_config(&format, &output, &cookies)?;
// Determine if it's a URL or a search query
let target = if query_or_url.starts_with("http://")
|| query_or_url.starts_with("https://")
{
DownloadTarget::Url(query_or_url)
} else {
DownloadTarget::Query(query_or_url)
};
download_single(&backend, target, &config, dry_run).await?;
}
Commands::Queue { action } => {
let database_url = cli.database.unwrap_or_else(default_database_url);
let db = Database::new(&database_url).await?;
match action {
QueueAction::Process {
format,
output,
cookies,
search_source,
rate_limit,
dry_run,
} => {
let backend = make_backend(&cookies, &search_source, rate_limit)?;
backend.check_available().await?;
let config = make_backend_config(&format, &output, &cookies)?;
if dry_run {
println!("DRY RUN — no files will be downloaded");
}
let stats = run_queue(db.conn(), &backend, &config, dry_run).await?;
println!("\nQueue processing complete: {stats}");
}
QueueAction::Add { query } => {
let item = queries::downloads::enqueue(db.conn(), &query, None, "ytdlp").await?;
println!("Added to queue: id={}, query=\"{}\"", item.id, item.query);
}
QueueAction::List { status } => {
let filter = match status.to_lowercase().as_str() {
"all" => None,
"pending" => Some(DownloadStatus::Pending),
"downloading" => Some(DownloadStatus::Downloading),
"completed" => Some(DownloadStatus::Completed),
"failed" => Some(DownloadStatus::Failed),
"cancelled" => Some(DownloadStatus::Cancelled),
_ => anyhow::bail!("unknown status: {status}"),
};
let items = queries::downloads::list(db.conn(), filter).await?;
if items.is_empty() {
println!("Queue is empty.");
} else {
println!(
"{:<5} {:<12} {:<6} {:<40} {}",
"ID", "STATUS", "RETRY", "QUERY", "ERROR"
);
for item in &items {
println!(
"{:<5} {:<12} {:<6} {:<40} {}",
item.id,
format!("{:?}", item.status),
item.retry_count,
truncate(&item.query, 40),
item.error_message.as_deref().unwrap_or(""),
);
}
println!("\n{} items total", items.len());
}
}
QueueAction::Retry => {
let failed =
queries::downloads::list(db.conn(), Some(DownloadStatus::Failed)).await?;
if failed.is_empty() {
println!("No failed downloads to retry.");
} else {
for item in &failed {
queries::downloads::retry_failed(db.conn(), item.id).await?;
}
println!("Requeued {} failed downloads.", failed.len());
}
}
}
}
}
Ok(())
}
fn truncate(s: &str, max: usize) -> String {
if s.len() <= max {
s.to_string()
} else {
format!("{}", &s[..max - 1])
}
}