Compare commits
3 Commits
a57df38eb1
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c12dba886e | ||
|
|
0e5195e64c | ||
|
|
2592651c9a |
372
scripts/cookie_manager.py
Normal file
372
scripts/cookie_manager.py
Normal file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env python3
|
||||
"""YouTube cookie manager for Shanty.
|
||||
|
||||
Manages a persistent Firefox profile for YouTube authentication.
|
||||
Handles interactive login (via Xvfb + x11vnc + noVNC), headless cookie
|
||||
refresh, and Netscape-format cookie export for yt-dlp.
|
||||
|
||||
Dependencies (runtime):
|
||||
- firefox-esr
|
||||
- xvfb, x11vnc, novnc, websockify (for interactive login only)
|
||||
- sqlite3 (Python stdlib)
|
||||
|
||||
Usage:
|
||||
cookie_manager.py login-start <profile_dir> <vnc_port>
|
||||
cookie_manager.py login-stop
|
||||
cookie_manager.py refresh <profile_dir> <cookies_output>
|
||||
cookie_manager.py export <profile_dir> <cookies_output>
|
||||
cookie_manager.py status <profile_dir>
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import signal
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def find_firefox() -> str:
|
||||
"""Find the Firefox binary — 'firefox-esr' (Debian) or 'firefox' (Arch/other)."""
|
||||
for name in ("firefox-esr", "firefox"):
|
||||
if shutil.which(name):
|
||||
return name
|
||||
raise FileNotFoundError("firefox not found on PATH")
|
||||
|
||||
# File that tracks PIDs of login session processes
|
||||
PID_FILE = "/tmp/shanty-login-pids.json"
|
||||
|
||||
|
||||
def export_cookies(profile_dir: str, output_path: str) -> dict:
|
||||
"""Read cookies.sqlite from a Firefox profile and write Netscape format."""
|
||||
cookies_db = os.path.join(profile_dir, "cookies.sqlite")
|
||||
if not os.path.exists(cookies_db):
|
||||
return {"status": "error", "error": "cookies.sqlite not found"}
|
||||
|
||||
# Copy the database to avoid locking issues
|
||||
tmp_db = os.path.join(tempfile.gettempdir(), "shanty_cookies_tmp.sqlite")
|
||||
try:
|
||||
shutil.copy2(cookies_db, tmp_db)
|
||||
except Exception as e:
|
||||
return {"status": "error", "error": f"failed to copy cookies db: {e}"}
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(tmp_db)
|
||||
cursor = conn.execute(
|
||||
"SELECT host, path, isSecure, expiry, name, value "
|
||||
"FROM moz_cookies "
|
||||
"WHERE host LIKE '%youtube%' OR host LIKE '%google%'"
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
return {"status": "error", "error": f"sqlite error: {e}"}
|
||||
finally:
|
||||
try:
|
||||
os.unlink(tmp_db)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not rows:
|
||||
return {"status": "error", "error": "no YouTube/Google cookies found"}
|
||||
|
||||
os.makedirs(os.path.dirname(output_path) or ".", exist_ok=True)
|
||||
with open(output_path, "w") as f:
|
||||
f.write("# Netscape HTTP Cookie File\n")
|
||||
f.write("# Exported by Shanty cookie_manager.py\n")
|
||||
for host, path, secure, expiry, name, value in rows:
|
||||
flag = "TRUE" if host.startswith(".") else "FALSE"
|
||||
secure_str = "TRUE" if secure else "FALSE"
|
||||
f.write(f"{host}\t{flag}\t{path}\t{secure_str}\t{expiry}\t{name}\t{value}\n")
|
||||
|
||||
return {"status": "ok", "cookies_count": len(rows)}
|
||||
|
||||
|
||||
def cmd_export(args):
|
||||
"""Export cookies from profile without launching Firefox."""
|
||||
result = export_cookies(args.profile_dir, args.cookies_output)
|
||||
json.dump(result, sys.stdout)
|
||||
|
||||
|
||||
def cmd_status(args):
|
||||
"""Check if profile has valid YouTube cookies."""
|
||||
cookies_db = os.path.join(args.profile_dir, "cookies.sqlite")
|
||||
if not os.path.exists(cookies_db):
|
||||
json.dump({"authenticated": False, "reason": "no profile"}, sys.stdout)
|
||||
return
|
||||
|
||||
try:
|
||||
tmp_db = os.path.join(tempfile.gettempdir(), "shanty_cookies_status.sqlite")
|
||||
shutil.copy2(cookies_db, tmp_db)
|
||||
conn = sqlite3.connect(tmp_db)
|
||||
cursor = conn.execute(
|
||||
"SELECT MAX(lastAccessed) FROM moz_cookies "
|
||||
"WHERE host LIKE '%youtube%' OR host LIKE '%google%'"
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
count = conn.execute(
|
||||
"SELECT COUNT(*) FROM moz_cookies "
|
||||
"WHERE host LIKE '%youtube%' OR host LIKE '%google%'"
|
||||
).fetchone()[0]
|
||||
conn.close()
|
||||
os.unlink(tmp_db)
|
||||
except Exception as e:
|
||||
json.dump({"authenticated": False, "reason": str(e)}, sys.stdout)
|
||||
return
|
||||
|
||||
if not row[0] or count == 0:
|
||||
json.dump({"authenticated": False, "reason": "no cookies"}, sys.stdout)
|
||||
return
|
||||
|
||||
# lastAccessed is in microseconds since epoch
|
||||
last_accessed = row[0] / 1_000_000
|
||||
age_hours = (time.time() - last_accessed) / 3600
|
||||
|
||||
json.dump({
|
||||
"authenticated": True,
|
||||
"cookie_count": count,
|
||||
"cookie_age_hours": round(age_hours, 1),
|
||||
}, sys.stdout)
|
||||
|
||||
|
||||
def cmd_refresh(args):
|
||||
"""Launch headless Firefox to refresh cookies, then export."""
|
||||
profile_dir = args.profile_dir
|
||||
cookies_output = args.cookies_output
|
||||
|
||||
if not os.path.isdir(profile_dir):
|
||||
json.dump({"status": "error", "error": "profile directory not found"}, sys.stdout)
|
||||
return
|
||||
|
||||
# Launch Firefox headless, visit YouTube, quit
|
||||
try:
|
||||
firefox = find_firefox()
|
||||
proc = subprocess.Popen(
|
||||
[
|
||||
firefox, "--headless",
|
||||
"--profile", profile_dir,
|
||||
"https://www.youtube.com",
|
||||
],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
# Wait for page to load and cookies to refresh
|
||||
time.sleep(10)
|
||||
proc.terminate()
|
||||
try:
|
||||
proc.wait(timeout=10)
|
||||
except subprocess.TimeoutExpired:
|
||||
proc.kill()
|
||||
proc.wait()
|
||||
except FileNotFoundError:
|
||||
json.dump({"status": "error", "error": "firefox not found on PATH"}, sys.stdout)
|
||||
return
|
||||
except Exception as e:
|
||||
json.dump({"status": "error", "error": f"firefox error: {e}"}, sys.stdout)
|
||||
return
|
||||
|
||||
# Export cookies
|
||||
result = export_cookies(profile_dir, cookies_output)
|
||||
json.dump(result, sys.stdout)
|
||||
|
||||
|
||||
def cmd_login_start(args):
|
||||
"""Start Xvfb + x11vnc + noVNC + Firefox for interactive login."""
|
||||
profile_dir = args.profile_dir
|
||||
vnc_port = int(args.vnc_port)
|
||||
|
||||
# Pick an unused display number
|
||||
import random
|
||||
display_num = random.randint(50, 199)
|
||||
display = f":{display_num}"
|
||||
|
||||
os.makedirs(profile_dir, exist_ok=True)
|
||||
|
||||
pids = {}
|
||||
|
||||
# Build a minimal env — strips Wayland vars and forces DISPLAY to Xvfb.
|
||||
# This prevents x11vnc from detecting Wayland and Firefox from using
|
||||
# the user's real X/Wayland session.
|
||||
clean_env = {
|
||||
"DISPLAY": display,
|
||||
"HOME": os.environ.get("HOME", "/tmp"),
|
||||
"PATH": os.environ.get("PATH", "/usr/bin:/bin"),
|
||||
"XDG_RUNTIME_DIR": os.environ.get("XDG_RUNTIME_DIR", f"/run/user/{os.getuid()}"),
|
||||
}
|
||||
|
||||
try:
|
||||
# Start Xvfb (virtual framebuffer)
|
||||
xvfb = subprocess.Popen(
|
||||
["Xvfb", display, "-screen", "0", "1280x720x24"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
pids["xvfb"] = xvfb.pid
|
||||
time.sleep(1)
|
||||
|
||||
# Verify Xvfb started
|
||||
if xvfb.poll() is not None:
|
||||
stderr = xvfb.stderr.read().decode() if xvfb.stderr else ""
|
||||
raise RuntimeError(f"Xvfb failed to start on {display}: {stderr}")
|
||||
|
||||
# Start x11vnc (with clean env to avoid Wayland detection)
|
||||
vnc_display_port = 5900 + display_num
|
||||
x11vnc = subprocess.Popen(
|
||||
[
|
||||
"x11vnc", "-display", display,
|
||||
"-rfbport", str(vnc_display_port),
|
||||
"-nopw", "-forever", "-shared",
|
||||
],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
env=clean_env,
|
||||
)
|
||||
pids["x11vnc"] = x11vnc.pid
|
||||
time.sleep(1)
|
||||
|
||||
# Start websockify (noVNC proxy)
|
||||
# noVNC web files location varies by distro
|
||||
novnc_path = "/usr/share/novnc"
|
||||
if not os.path.isdir(novnc_path):
|
||||
novnc_path = "/usr/share/webapps/novnc" # Arch
|
||||
websockify = subprocess.Popen(
|
||||
[
|
||||
"websockify", "--web", novnc_path,
|
||||
str(vnc_port), f"localhost:{vnc_display_port}",
|
||||
],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
pids["websockify"] = websockify.pid
|
||||
time.sleep(1)
|
||||
|
||||
# Start Firefox on the virtual display
|
||||
firefox_bin = find_firefox()
|
||||
firefox = subprocess.Popen(
|
||||
[
|
||||
firefox_bin,
|
||||
"--profile", profile_dir,
|
||||
"https://accounts.google.com/ServiceLogin?continue=https://music.youtube.com",
|
||||
],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
env=clean_env,
|
||||
)
|
||||
pids["firefox"] = firefox.pid
|
||||
|
||||
except FileNotFoundError as e:
|
||||
# Clean up anything we started
|
||||
for pid in pids.values():
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
json.dump({"status": "error", "error": f"missing dependency: {e}"}, sys.stdout)
|
||||
return
|
||||
except Exception as e:
|
||||
for pid in pids.values():
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
json.dump({"status": "error", "error": str(e)}, sys.stdout)
|
||||
return
|
||||
|
||||
# Save PIDs for cleanup
|
||||
with open(PID_FILE, "w") as f:
|
||||
json.dump(pids, f)
|
||||
|
||||
vnc_url = f"http://localhost:{vnc_port}/vnc.html?autoconnect=true"
|
||||
json.dump({
|
||||
"status": "running",
|
||||
"vnc_url": vnc_url,
|
||||
"pids": pids,
|
||||
}, sys.stdout)
|
||||
|
||||
|
||||
def cmd_login_stop(args):
|
||||
"""Stop all login session processes and export cookies."""
|
||||
if not os.path.exists(PID_FILE):
|
||||
json.dump({"status": "error", "error": "no active login session"}, sys.stdout)
|
||||
return
|
||||
|
||||
with open(PID_FILE) as f:
|
||||
pids = json.load(f)
|
||||
|
||||
# Kill processes in reverse order
|
||||
for name in ["firefox", "websockify", "x11vnc", "xvfb"]:
|
||||
pid = pids.get(name)
|
||||
if pid:
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
|
||||
# Wait a moment for Firefox to flush cookies
|
||||
time.sleep(2)
|
||||
|
||||
# Force kill anything still alive
|
||||
for pid in pids.values():
|
||||
try:
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
except ProcessLookupError:
|
||||
pass
|
||||
|
||||
os.unlink(PID_FILE)
|
||||
|
||||
# Export cookies if profile dir and output provided
|
||||
if args.profile_dir and args.cookies_output:
|
||||
result = export_cookies(args.profile_dir, args.cookies_output)
|
||||
result["status"] = "stopped"
|
||||
json.dump(result, sys.stdout)
|
||||
else:
|
||||
json.dump({"status": "stopped"}, sys.stdout)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Shanty YouTube cookie manager")
|
||||
sub = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# login-start
|
||||
p = sub.add_parser("login-start")
|
||||
p.add_argument("profile_dir")
|
||||
p.add_argument("vnc_port")
|
||||
|
||||
# login-stop
|
||||
p = sub.add_parser("login-stop")
|
||||
p.add_argument("--profile-dir", dest="profile_dir", default=None)
|
||||
p.add_argument("--cookies-output", dest="cookies_output", default=None)
|
||||
|
||||
# refresh
|
||||
p = sub.add_parser("refresh")
|
||||
p.add_argument("profile_dir")
|
||||
p.add_argument("cookies_output")
|
||||
|
||||
# export
|
||||
p = sub.add_parser("export")
|
||||
p.add_argument("profile_dir")
|
||||
p.add_argument("cookies_output")
|
||||
|
||||
# status
|
||||
p = sub.add_parser("status")
|
||||
p.add_argument("profile_dir")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
handlers = {
|
||||
"login-start": cmd_login_start,
|
||||
"login-stop": cmd_login_stop,
|
||||
"refresh": cmd_refresh,
|
||||
"export": cmd_export,
|
||||
"status": cmd_status,
|
||||
}
|
||||
handlers[args.command](args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -80,7 +80,9 @@ impl std::str::FromStr for AudioFormat {
|
||||
"mp3" => Ok(AudioFormat::Mp3),
|
||||
"flac" => Ok(AudioFormat::Flac),
|
||||
"best" => Ok(AudioFormat::Best),
|
||||
_ => Err(format!("unsupported format: {s} (expected opus, mp3, flac, or best)")),
|
||||
_ => Err(format!(
|
||||
"unsupported format: {s} (expected opus, mp3, flac, or best)"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -98,7 +100,10 @@ pub trait DownloadBackend: Send + Sync {
|
||||
fn check_available(&self) -> impl std::future::Future<Output = DlResult<()>> + Send;
|
||||
|
||||
/// Search for tracks matching a query.
|
||||
fn search(&self, query: &str) -> impl std::future::Future<Output = DlResult<Vec<SearchResult>>> + Send;
|
||||
fn search(
|
||||
&self,
|
||||
query: &str,
|
||||
) -> impl std::future::Future<Output = DlResult<Vec<SearchResult>>> + Send;
|
||||
|
||||
/// Download a target to the configured output directory.
|
||||
fn download(
|
||||
|
||||
@@ -35,9 +35,7 @@ impl DlError {
|
||||
pub fn is_transient(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
DlError::RateLimited(_)
|
||||
| DlError::Io(_)
|
||||
| DlError::BackendError(_)
|
||||
DlError::RateLimited(_) | DlError::Io(_) | DlError::BackendError(_)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,12 @@ pub mod queue;
|
||||
pub mod rate_limit;
|
||||
pub mod ytdlp;
|
||||
|
||||
pub use backend::{AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult};
|
||||
pub use backend::{
|
||||
AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult,
|
||||
};
|
||||
pub use error::{DlError, DlResult};
|
||||
pub use queue::{DlStats, ProgressFn, SyncStats, download_single, run_queue, run_queue_with_progress, sync_wanted_to_queue};
|
||||
pub use queue::{
|
||||
DlStats, ProgressFn, SyncStats, download_single, run_queue, run_queue_with_progress,
|
||||
sync_wanted_to_queue,
|
||||
};
|
||||
pub use ytdlp::{SearchSource, YtDlpBackend};
|
||||
|
||||
24
src/main.rs
24
src/main.rs
@@ -156,9 +156,7 @@ fn make_backend_config(
|
||||
output: &Option<PathBuf>,
|
||||
cookies: &Option<PathBuf>,
|
||||
) -> anyhow::Result<BackendConfig> {
|
||||
let fmt: AudioFormat = format
|
||||
.parse()
|
||||
.map_err(|e: String| anyhow::anyhow!(e))?;
|
||||
let fmt: AudioFormat = format.parse().map_err(|e: String| anyhow::anyhow!(e))?;
|
||||
Ok(BackendConfig {
|
||||
output_dir: output.clone().unwrap_or_else(default_output_dir),
|
||||
format: fmt,
|
||||
@@ -199,13 +197,12 @@ async fn main() -> anyhow::Result<()> {
|
||||
let config = make_backend_config(&format, &output, &cookies)?;
|
||||
|
||||
// Determine if it's a URL or a search query
|
||||
let target = if query_or_url.starts_with("http://")
|
||||
|| query_or_url.starts_with("https://")
|
||||
{
|
||||
DownloadTarget::Url(query_or_url)
|
||||
} else {
|
||||
DownloadTarget::Query(query_or_url)
|
||||
};
|
||||
let target =
|
||||
if query_or_url.starts_with("http://") || query_or_url.starts_with("https://") {
|
||||
DownloadTarget::Url(query_or_url)
|
||||
} else {
|
||||
DownloadTarget::Query(query_or_url)
|
||||
};
|
||||
|
||||
download_single(&backend, target, &config, dry_run).await?;
|
||||
}
|
||||
@@ -235,7 +232,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
println!("\nQueue processing complete: {stats}");
|
||||
}
|
||||
QueueAction::Add { query } => {
|
||||
let item = queries::downloads::enqueue(db.conn(), &query, None, "ytdlp").await?;
|
||||
let item =
|
||||
queries::downloads::enqueue(db.conn(), &query, None, "ytdlp").await?;
|
||||
println!("Added to queue: id={}, query=\"{}\"", item.id, item.query);
|
||||
}
|
||||
QueueAction::List { status } => {
|
||||
@@ -254,8 +252,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
println!("Queue is empty.");
|
||||
} else {
|
||||
println!(
|
||||
"{:<5} {:<12} {:<6} {:<40} {}",
|
||||
"ID", "STATUS", "RETRY", "QUERY", "ERROR"
|
||||
"{:<5} {:<12} {:<6} {:<40} ERROR",
|
||||
"ID", "STATUS", "RETRY", "QUERY"
|
||||
);
|
||||
for item in &items {
|
||||
println!(
|
||||
|
||||
94
src/queue.rs
94
src/queue.rs
@@ -81,7 +81,11 @@ pub async fn run_queue_with_progress(
|
||||
stats.downloads_attempted += 1;
|
||||
|
||||
if let Some(ref cb) = on_progress {
|
||||
cb(stats.downloads_attempted, total, &format!("Downloading: {}", item.query));
|
||||
cb(
|
||||
stats.downloads_attempted,
|
||||
total,
|
||||
&format!("Downloading: {}", item.query),
|
||||
);
|
||||
}
|
||||
|
||||
tracing::info!(
|
||||
@@ -106,8 +110,7 @@ pub async fn run_queue_with_progress(
|
||||
}
|
||||
|
||||
// Mark as downloading
|
||||
queries::downloads::update_status(conn, item.id, DownloadStatus::Downloading, None)
|
||||
.await?;
|
||||
queries::downloads::update_status(conn, item.id, DownloadStatus::Downloading, None).await?;
|
||||
|
||||
// Determine download target
|
||||
let target = if let Some(ref url) = item.source_url {
|
||||
@@ -126,53 +129,45 @@ pub async fn run_queue_with_progress(
|
||||
"download completed"
|
||||
);
|
||||
|
||||
queries::downloads::update_status(
|
||||
conn,
|
||||
item.id,
|
||||
DownloadStatus::Completed,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
queries::downloads::update_status(conn, item.id, DownloadStatus::Completed, None)
|
||||
.await?;
|
||||
|
||||
// Update wanted item status and create track record with MBID
|
||||
if let Some(wanted_id) = item.wanted_item_id {
|
||||
if let Ok(wanted) = queries::wanted::get_by_id(conn, wanted_id).await {
|
||||
// Create a track record with the MBID so the tagger
|
||||
// can skip searching and go straight to the right recording
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let file_path = result.file_path.to_string_lossy().to_string();
|
||||
let file_size = std::fs::metadata(&result.file_path)
|
||||
.map(|m| m.len() as i64)
|
||||
.unwrap_or(0);
|
||||
if let Some(wanted_id) = item.wanted_item_id
|
||||
&& let Ok(wanted) = queries::wanted::get_by_id(conn, wanted_id).await
|
||||
{
|
||||
// Create a track record with the MBID so the tagger
|
||||
// can skip searching and go straight to the right recording
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let file_path = result.file_path.to_string_lossy().to_string();
|
||||
let file_size = std::fs::metadata(&result.file_path)
|
||||
.map(|m| m.len() as i64)
|
||||
.unwrap_or(0);
|
||||
|
||||
let track_active = shanty_db::entities::track::ActiveModel {
|
||||
file_path: Set(file_path),
|
||||
title: Set(Some(result.title.clone())),
|
||||
artist: Set(result.artist.clone()),
|
||||
file_size: Set(file_size),
|
||||
musicbrainz_id: Set(wanted.musicbrainz_id.clone()),
|
||||
artist_id: Set(wanted.artist_id),
|
||||
added_at: Set(now),
|
||||
updated_at: Set(now),
|
||||
..Default::default()
|
||||
};
|
||||
if let Err(e) = queries::tracks::upsert(conn, track_active).await {
|
||||
tracing::warn!(error = %e, "failed to create track record after download");
|
||||
}
|
||||
let track_active = shanty_db::entities::track::ActiveModel {
|
||||
file_path: Set(file_path),
|
||||
title: Set(Some(result.title.clone())),
|
||||
artist: Set(result.artist.clone()),
|
||||
file_size: Set(file_size),
|
||||
musicbrainz_id: Set(wanted.musicbrainz_id.clone()),
|
||||
artist_id: Set(wanted.artist_id),
|
||||
added_at: Set(now),
|
||||
updated_at: Set(now),
|
||||
..Default::default()
|
||||
};
|
||||
if let Err(e) = queries::tracks::upsert(conn, track_active).await {
|
||||
tracing::warn!(error = %e, "failed to create track record after download");
|
||||
}
|
||||
|
||||
if let Err(e) = queries::wanted::update_status(
|
||||
conn,
|
||||
wanted_id,
|
||||
WantedStatus::Downloaded,
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!(
|
||||
wanted_id = wanted_id,
|
||||
error = %e,
|
||||
"failed to update wanted item status"
|
||||
);
|
||||
}
|
||||
if let Err(e) =
|
||||
queries::wanted::update_status(conn, wanted_id, WantedStatus::Downloaded)
|
||||
.await
|
||||
{
|
||||
tracing::warn!(
|
||||
wanted_id = wanted_id,
|
||||
error = %e,
|
||||
"failed to update wanted item status"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,11 +239,8 @@ impl fmt::Display for SyncStats {
|
||||
/// Sync wanted items to the download queue.
|
||||
/// Finds all Track-type Wanted items and enqueues them for download,
|
||||
/// skipping any that already have a queue entry.
|
||||
pub async fn sync_wanted_to_queue(
|
||||
conn: &DatabaseConnection,
|
||||
dry_run: bool,
|
||||
) -> DlResult<SyncStats> {
|
||||
let wanted = queries::wanted::list(conn, Some(WantedStatus::Wanted)).await?;
|
||||
pub async fn sync_wanted_to_queue(conn: &DatabaseConnection, dry_run: bool) -> DlResult<SyncStats> {
|
||||
let wanted = queries::wanted::list(conn, Some(WantedStatus::Wanted), None).await?;
|
||||
let mut stats = SyncStats::default();
|
||||
|
||||
for item in &wanted {
|
||||
|
||||
@@ -42,8 +42,7 @@ impl RateLimiter {
|
||||
state.remaining -= 1;
|
||||
|
||||
// Warn when approaching the limit
|
||||
let pct_remaining =
|
||||
(state.remaining as f64 / self.max_per_hour as f64) * 100.0;
|
||||
let pct_remaining = (state.remaining as f64 / self.max_per_hour as f64) * 100.0;
|
||||
if pct_remaining < 10.0 && pct_remaining > 0.0 {
|
||||
tracing::warn!(
|
||||
remaining = state.remaining,
|
||||
|
||||
39
src/ytdlp.rs
39
src/ytdlp.rs
@@ -4,7 +4,9 @@ use std::process::Stdio;
|
||||
use serde::Deserialize;
|
||||
use tokio::process::Command;
|
||||
|
||||
use crate::backend::{BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult};
|
||||
use crate::backend::{
|
||||
BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult,
|
||||
};
|
||||
use crate::error::{DlError, DlResult};
|
||||
use crate::rate_limit::RateLimiter;
|
||||
|
||||
@@ -22,7 +24,9 @@ impl std::str::FromStr for SearchSource {
|
||||
match s.to_lowercase().as_str() {
|
||||
"ytmusic" | "youtube_music" | "youtubemusic" => Ok(SearchSource::YouTubeMusic),
|
||||
"youtube" | "yt" => Ok(SearchSource::YouTube),
|
||||
_ => Err(format!("unknown search source: {s} (expected ytmusic or youtube)")),
|
||||
_ => Err(format!(
|
||||
"unknown search source: {s} (expected ytmusic or youtube)"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -63,9 +67,10 @@ impl YtDlpBackend {
|
||||
self.rate_limiter.acquire().await;
|
||||
|
||||
let mut cmd = Command::new("yt-dlp");
|
||||
cmd.args(args)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped());
|
||||
cmd.args(args).stdout(Stdio::piped()).stderr(Stdio::piped());
|
||||
|
||||
// Allow yt-dlp to fetch updated JS challenge solver scripts
|
||||
cmd.args(["--remote-components", "ejs:github"]);
|
||||
|
||||
// Add cookies if configured
|
||||
if let Some(ref cookies) = self.cookies_path {
|
||||
@@ -120,7 +125,9 @@ impl YtDlpBackend {
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
|
||||
tracing::warn!(stderr = %stderr, "ytmusic search failed");
|
||||
return Err(DlError::BackendError(format!("ytmusic search failed: {stderr}")));
|
||||
return Err(DlError::BackendError(format!(
|
||||
"ytmusic search failed: {stderr}"
|
||||
)));
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
@@ -180,17 +187,17 @@ impl YtDlpBackend {
|
||||
fn find_ytmusic_script(&self) -> DlResult<PathBuf> {
|
||||
// Check next to the current executable
|
||||
if let Ok(exe) = std::env::current_exe() {
|
||||
let beside_exe = exe.parent().unwrap_or(std::path::Path::new(".")).join("ytmusic_search.py");
|
||||
let beside_exe = exe
|
||||
.parent()
|
||||
.unwrap_or(std::path::Path::new("."))
|
||||
.join("ytmusic_search.py");
|
||||
if beside_exe.exists() {
|
||||
return Ok(beside_exe);
|
||||
}
|
||||
}
|
||||
|
||||
// Check common install locations
|
||||
for dir in &[
|
||||
"/usr/share/shanty",
|
||||
"/usr/local/share/shanty",
|
||||
] {
|
||||
for dir in &["/usr/share/shanty", "/usr/local/share/shanty"] {
|
||||
let path = PathBuf::from(dir).join("ytmusic_search.py");
|
||||
if path.exists() {
|
||||
return Ok(path);
|
||||
@@ -303,10 +310,7 @@ impl DownloadBackend for YtDlpBackend {
|
||||
|
||||
// Add cookies from backend config or backend's own cookies
|
||||
let cookies_str;
|
||||
let cookies_path = config
|
||||
.cookies_path
|
||||
.as_ref()
|
||||
.or(self.cookies_path.as_ref());
|
||||
let cookies_path = config.cookies_path.as_ref().or(self.cookies_path.as_ref());
|
||||
if let Some(c) = cookies_path {
|
||||
cookies_str = c.to_string_lossy().to_string();
|
||||
args.push("--cookies");
|
||||
@@ -318,9 +322,8 @@ impl DownloadBackend for YtDlpBackend {
|
||||
let output = self.run_ytdlp(&args).await?;
|
||||
|
||||
// Parse the JSON output to get the actual file path
|
||||
let info: YtDlpDownloadInfo = serde_json::from_str(output.trim()).map_err(|e| {
|
||||
DlError::BackendError(format!("failed to parse yt-dlp output: {e}"))
|
||||
})?;
|
||||
let info: YtDlpDownloadInfo = serde_json::from_str(output.trim())
|
||||
.map_err(|e| DlError::BackendError(format!("failed to parse yt-dlp output: {e}")))?;
|
||||
|
||||
// --print-json reports the pre-extraction filename (e.g. .webm),
|
||||
// but --extract-audio produces a file with the target format extension.
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
use shanty_db::entities::download_queue::DownloadStatus;
|
||||
use shanty_db::{Database, queries};
|
||||
use shanty_dl::backend::{AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult};
|
||||
use shanty_dl::backend::{
|
||||
AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult,
|
||||
};
|
||||
use shanty_dl::error::DlResult;
|
||||
use shanty_dl::queue::{run_queue, download_single};
|
||||
use shanty_dl::queue::{download_single, run_queue};
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Mock backend for testing without yt-dlp.
|
||||
@@ -84,7 +86,9 @@ async fn test_queue_process_success() {
|
||||
.unwrap();
|
||||
|
||||
// Process queue
|
||||
let stats = run_queue(db.conn(), &backend, &config, false).await.unwrap();
|
||||
let stats = run_queue(db.conn(), &backend, &config, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(stats.downloads_attempted, 1);
|
||||
assert_eq!(stats.downloads_completed, 1);
|
||||
assert_eq!(stats.downloads_failed, 0);
|
||||
@@ -112,7 +116,9 @@ async fn test_queue_process_failure() {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let stats = run_queue(db.conn(), &backend, &config, false).await.unwrap();
|
||||
let stats = run_queue(db.conn(), &backend, &config, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(stats.downloads_attempted, 1);
|
||||
assert_eq!(stats.downloads_failed, 1);
|
||||
|
||||
@@ -218,9 +224,20 @@ async fn test_wanted_item_status_updated_on_download() {
|
||||
};
|
||||
|
||||
// Create a wanted item
|
||||
let wanted = queries::wanted::add(db.conn(), ItemType::Track, "Wanted Song", None, None, None, None)
|
||||
.await
|
||||
.unwrap();
|
||||
let wanted = queries::wanted::add(
|
||||
db.conn(),
|
||||
queries::wanted::AddWantedItem {
|
||||
item_type: ItemType::Track,
|
||||
name: "Wanted Song",
|
||||
musicbrainz_id: None,
|
||||
artist_id: None,
|
||||
album_id: None,
|
||||
track_id: None,
|
||||
user_id: None,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(wanted.status, WantedStatus::Wanted);
|
||||
|
||||
// Enqueue download linked to the wanted item
|
||||
@@ -229,7 +246,9 @@ async fn test_wanted_item_status_updated_on_download() {
|
||||
.unwrap();
|
||||
|
||||
// Process queue
|
||||
run_queue(db.conn(), &backend, &config, false).await.unwrap();
|
||||
run_queue(db.conn(), &backend, &config, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Wanted item should now be Downloaded
|
||||
let updated = queries::wanted::get_by_id(db.conn(), wanted.id)
|
||||
|
||||
Reference in New Issue
Block a user