Compare commits
6 Commits
fed3a070fc
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c12dba886e | ||
|
|
0e5195e64c | ||
|
|
2592651c9a | ||
|
|
a57df38eb1 | ||
|
|
2c5fe5728b | ||
|
|
ea1e3c6ac5 |
372
scripts/cookie_manager.py
Normal file
372
scripts/cookie_manager.py
Normal file
@@ -0,0 +1,372 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""YouTube cookie manager for Shanty.
|
||||||
|
|
||||||
|
Manages a persistent Firefox profile for YouTube authentication.
|
||||||
|
Handles interactive login (via Xvfb + x11vnc + noVNC), headless cookie
|
||||||
|
refresh, and Netscape-format cookie export for yt-dlp.
|
||||||
|
|
||||||
|
Dependencies (runtime):
|
||||||
|
- firefox-esr
|
||||||
|
- xvfb, x11vnc, novnc, websockify (for interactive login only)
|
||||||
|
- sqlite3 (Python stdlib)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
cookie_manager.py login-start <profile_dir> <vnc_port>
|
||||||
|
cookie_manager.py login-stop
|
||||||
|
cookie_manager.py refresh <profile_dir> <cookies_output>
|
||||||
|
cookie_manager.py export <profile_dir> <cookies_output>
|
||||||
|
cookie_manager.py status <profile_dir>
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import signal
|
||||||
|
import sqlite3
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def find_firefox() -> str:
|
||||||
|
"""Find the Firefox binary — 'firefox-esr' (Debian) or 'firefox' (Arch/other)."""
|
||||||
|
for name in ("firefox-esr", "firefox"):
|
||||||
|
if shutil.which(name):
|
||||||
|
return name
|
||||||
|
raise FileNotFoundError("firefox not found on PATH")
|
||||||
|
|
||||||
|
# File that tracks PIDs of login session processes
|
||||||
|
PID_FILE = "/tmp/shanty-login-pids.json"
|
||||||
|
|
||||||
|
|
||||||
|
def export_cookies(profile_dir: str, output_path: str) -> dict:
|
||||||
|
"""Read cookies.sqlite from a Firefox profile and write Netscape format."""
|
||||||
|
cookies_db = os.path.join(profile_dir, "cookies.sqlite")
|
||||||
|
if not os.path.exists(cookies_db):
|
||||||
|
return {"status": "error", "error": "cookies.sqlite not found"}
|
||||||
|
|
||||||
|
# Copy the database to avoid locking issues
|
||||||
|
tmp_db = os.path.join(tempfile.gettempdir(), "shanty_cookies_tmp.sqlite")
|
||||||
|
try:
|
||||||
|
shutil.copy2(cookies_db, tmp_db)
|
||||||
|
except Exception as e:
|
||||||
|
return {"status": "error", "error": f"failed to copy cookies db: {e}"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = sqlite3.connect(tmp_db)
|
||||||
|
cursor = conn.execute(
|
||||||
|
"SELECT host, path, isSecure, expiry, name, value "
|
||||||
|
"FROM moz_cookies "
|
||||||
|
"WHERE host LIKE '%youtube%' OR host LIKE '%google%'"
|
||||||
|
)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
except Exception as e:
|
||||||
|
return {"status": "error", "error": f"sqlite error: {e}"}
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.unlink(tmp_db)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not rows:
|
||||||
|
return {"status": "error", "error": "no YouTube/Google cookies found"}
|
||||||
|
|
||||||
|
os.makedirs(os.path.dirname(output_path) or ".", exist_ok=True)
|
||||||
|
with open(output_path, "w") as f:
|
||||||
|
f.write("# Netscape HTTP Cookie File\n")
|
||||||
|
f.write("# Exported by Shanty cookie_manager.py\n")
|
||||||
|
for host, path, secure, expiry, name, value in rows:
|
||||||
|
flag = "TRUE" if host.startswith(".") else "FALSE"
|
||||||
|
secure_str = "TRUE" if secure else "FALSE"
|
||||||
|
f.write(f"{host}\t{flag}\t{path}\t{secure_str}\t{expiry}\t{name}\t{value}\n")
|
||||||
|
|
||||||
|
return {"status": "ok", "cookies_count": len(rows)}
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_export(args):
|
||||||
|
"""Export cookies from profile without launching Firefox."""
|
||||||
|
result = export_cookies(args.profile_dir, args.cookies_output)
|
||||||
|
json.dump(result, sys.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_status(args):
|
||||||
|
"""Check if profile has valid YouTube cookies."""
|
||||||
|
cookies_db = os.path.join(args.profile_dir, "cookies.sqlite")
|
||||||
|
if not os.path.exists(cookies_db):
|
||||||
|
json.dump({"authenticated": False, "reason": "no profile"}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
tmp_db = os.path.join(tempfile.gettempdir(), "shanty_cookies_status.sqlite")
|
||||||
|
shutil.copy2(cookies_db, tmp_db)
|
||||||
|
conn = sqlite3.connect(tmp_db)
|
||||||
|
cursor = conn.execute(
|
||||||
|
"SELECT MAX(lastAccessed) FROM moz_cookies "
|
||||||
|
"WHERE host LIKE '%youtube%' OR host LIKE '%google%'"
|
||||||
|
)
|
||||||
|
row = cursor.fetchone()
|
||||||
|
count = conn.execute(
|
||||||
|
"SELECT COUNT(*) FROM moz_cookies "
|
||||||
|
"WHERE host LIKE '%youtube%' OR host LIKE '%google%'"
|
||||||
|
).fetchone()[0]
|
||||||
|
conn.close()
|
||||||
|
os.unlink(tmp_db)
|
||||||
|
except Exception as e:
|
||||||
|
json.dump({"authenticated": False, "reason": str(e)}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not row[0] or count == 0:
|
||||||
|
json.dump({"authenticated": False, "reason": "no cookies"}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
# lastAccessed is in microseconds since epoch
|
||||||
|
last_accessed = row[0] / 1_000_000
|
||||||
|
age_hours = (time.time() - last_accessed) / 3600
|
||||||
|
|
||||||
|
json.dump({
|
||||||
|
"authenticated": True,
|
||||||
|
"cookie_count": count,
|
||||||
|
"cookie_age_hours": round(age_hours, 1),
|
||||||
|
}, sys.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_refresh(args):
|
||||||
|
"""Launch headless Firefox to refresh cookies, then export."""
|
||||||
|
profile_dir = args.profile_dir
|
||||||
|
cookies_output = args.cookies_output
|
||||||
|
|
||||||
|
if not os.path.isdir(profile_dir):
|
||||||
|
json.dump({"status": "error", "error": "profile directory not found"}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Launch Firefox headless, visit YouTube, quit
|
||||||
|
try:
|
||||||
|
firefox = find_firefox()
|
||||||
|
proc = subprocess.Popen(
|
||||||
|
[
|
||||||
|
firefox, "--headless",
|
||||||
|
"--profile", profile_dir,
|
||||||
|
"https://www.youtube.com",
|
||||||
|
],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
# Wait for page to load and cookies to refresh
|
||||||
|
time.sleep(10)
|
||||||
|
proc.terminate()
|
||||||
|
try:
|
||||||
|
proc.wait(timeout=10)
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
proc.kill()
|
||||||
|
proc.wait()
|
||||||
|
except FileNotFoundError:
|
||||||
|
json.dump({"status": "error", "error": "firefox not found on PATH"}, sys.stdout)
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
json.dump({"status": "error", "error": f"firefox error: {e}"}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Export cookies
|
||||||
|
result = export_cookies(profile_dir, cookies_output)
|
||||||
|
json.dump(result, sys.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_login_start(args):
|
||||||
|
"""Start Xvfb + x11vnc + noVNC + Firefox for interactive login."""
|
||||||
|
profile_dir = args.profile_dir
|
||||||
|
vnc_port = int(args.vnc_port)
|
||||||
|
|
||||||
|
# Pick an unused display number
|
||||||
|
import random
|
||||||
|
display_num = random.randint(50, 199)
|
||||||
|
display = f":{display_num}"
|
||||||
|
|
||||||
|
os.makedirs(profile_dir, exist_ok=True)
|
||||||
|
|
||||||
|
pids = {}
|
||||||
|
|
||||||
|
# Build a minimal env — strips Wayland vars and forces DISPLAY to Xvfb.
|
||||||
|
# This prevents x11vnc from detecting Wayland and Firefox from using
|
||||||
|
# the user's real X/Wayland session.
|
||||||
|
clean_env = {
|
||||||
|
"DISPLAY": display,
|
||||||
|
"HOME": os.environ.get("HOME", "/tmp"),
|
||||||
|
"PATH": os.environ.get("PATH", "/usr/bin:/bin"),
|
||||||
|
"XDG_RUNTIME_DIR": os.environ.get("XDG_RUNTIME_DIR", f"/run/user/{os.getuid()}"),
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Start Xvfb (virtual framebuffer)
|
||||||
|
xvfb = subprocess.Popen(
|
||||||
|
["Xvfb", display, "-screen", "0", "1280x720x24"],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
pids["xvfb"] = xvfb.pid
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# Verify Xvfb started
|
||||||
|
if xvfb.poll() is not None:
|
||||||
|
stderr = xvfb.stderr.read().decode() if xvfb.stderr else ""
|
||||||
|
raise RuntimeError(f"Xvfb failed to start on {display}: {stderr}")
|
||||||
|
|
||||||
|
# Start x11vnc (with clean env to avoid Wayland detection)
|
||||||
|
vnc_display_port = 5900 + display_num
|
||||||
|
x11vnc = subprocess.Popen(
|
||||||
|
[
|
||||||
|
"x11vnc", "-display", display,
|
||||||
|
"-rfbport", str(vnc_display_port),
|
||||||
|
"-nopw", "-forever", "-shared",
|
||||||
|
],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
env=clean_env,
|
||||||
|
)
|
||||||
|
pids["x11vnc"] = x11vnc.pid
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# Start websockify (noVNC proxy)
|
||||||
|
# noVNC web files location varies by distro
|
||||||
|
novnc_path = "/usr/share/novnc"
|
||||||
|
if not os.path.isdir(novnc_path):
|
||||||
|
novnc_path = "/usr/share/webapps/novnc" # Arch
|
||||||
|
websockify = subprocess.Popen(
|
||||||
|
[
|
||||||
|
"websockify", "--web", novnc_path,
|
||||||
|
str(vnc_port), f"localhost:{vnc_display_port}",
|
||||||
|
],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
)
|
||||||
|
pids["websockify"] = websockify.pid
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
# Start Firefox on the virtual display
|
||||||
|
firefox_bin = find_firefox()
|
||||||
|
firefox = subprocess.Popen(
|
||||||
|
[
|
||||||
|
firefox_bin,
|
||||||
|
"--profile", profile_dir,
|
||||||
|
"https://accounts.google.com/ServiceLogin?continue=https://music.youtube.com",
|
||||||
|
],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
env=clean_env,
|
||||||
|
)
|
||||||
|
pids["firefox"] = firefox.pid
|
||||||
|
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
# Clean up anything we started
|
||||||
|
for pid in pids.values():
|
||||||
|
try:
|
||||||
|
os.kill(pid, signal.SIGTERM)
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
json.dump({"status": "error", "error": f"missing dependency: {e}"}, sys.stdout)
|
||||||
|
return
|
||||||
|
except Exception as e:
|
||||||
|
for pid in pids.values():
|
||||||
|
try:
|
||||||
|
os.kill(pid, signal.SIGTERM)
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
json.dump({"status": "error", "error": str(e)}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Save PIDs for cleanup
|
||||||
|
with open(PID_FILE, "w") as f:
|
||||||
|
json.dump(pids, f)
|
||||||
|
|
||||||
|
vnc_url = f"http://localhost:{vnc_port}/vnc.html?autoconnect=true"
|
||||||
|
json.dump({
|
||||||
|
"status": "running",
|
||||||
|
"vnc_url": vnc_url,
|
||||||
|
"pids": pids,
|
||||||
|
}, sys.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_login_stop(args):
|
||||||
|
"""Stop all login session processes and export cookies."""
|
||||||
|
if not os.path.exists(PID_FILE):
|
||||||
|
json.dump({"status": "error", "error": "no active login session"}, sys.stdout)
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(PID_FILE) as f:
|
||||||
|
pids = json.load(f)
|
||||||
|
|
||||||
|
# Kill processes in reverse order
|
||||||
|
for name in ["firefox", "websockify", "x11vnc", "xvfb"]:
|
||||||
|
pid = pids.get(name)
|
||||||
|
if pid:
|
||||||
|
try:
|
||||||
|
os.kill(pid, signal.SIGTERM)
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Wait a moment for Firefox to flush cookies
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
# Force kill anything still alive
|
||||||
|
for pid in pids.values():
|
||||||
|
try:
|
||||||
|
os.kill(pid, signal.SIGKILL)
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
os.unlink(PID_FILE)
|
||||||
|
|
||||||
|
# Export cookies if profile dir and output provided
|
||||||
|
if args.profile_dir and args.cookies_output:
|
||||||
|
result = export_cookies(args.profile_dir, args.cookies_output)
|
||||||
|
result["status"] = "stopped"
|
||||||
|
json.dump(result, sys.stdout)
|
||||||
|
else:
|
||||||
|
json.dump({"status": "stopped"}, sys.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Shanty YouTube cookie manager")
|
||||||
|
sub = parser.add_subparsers(dest="command", required=True)
|
||||||
|
|
||||||
|
# login-start
|
||||||
|
p = sub.add_parser("login-start")
|
||||||
|
p.add_argument("profile_dir")
|
||||||
|
p.add_argument("vnc_port")
|
||||||
|
|
||||||
|
# login-stop
|
||||||
|
p = sub.add_parser("login-stop")
|
||||||
|
p.add_argument("--profile-dir", dest="profile_dir", default=None)
|
||||||
|
p.add_argument("--cookies-output", dest="cookies_output", default=None)
|
||||||
|
|
||||||
|
# refresh
|
||||||
|
p = sub.add_parser("refresh")
|
||||||
|
p.add_argument("profile_dir")
|
||||||
|
p.add_argument("cookies_output")
|
||||||
|
|
||||||
|
# export
|
||||||
|
p = sub.add_parser("export")
|
||||||
|
p.add_argument("profile_dir")
|
||||||
|
p.add_argument("cookies_output")
|
||||||
|
|
||||||
|
# status
|
||||||
|
p = sub.add_parser("status")
|
||||||
|
p.add_argument("profile_dir")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
handlers = {
|
||||||
|
"login-start": cmd_login_start,
|
||||||
|
"login-stop": cmd_login_stop,
|
||||||
|
"refresh": cmd_refresh,
|
||||||
|
"export": cmd_export,
|
||||||
|
"status": cmd_status,
|
||||||
|
}
|
||||||
|
handlers[args.command](args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -80,7 +80,9 @@ impl std::str::FromStr for AudioFormat {
|
|||||||
"mp3" => Ok(AudioFormat::Mp3),
|
"mp3" => Ok(AudioFormat::Mp3),
|
||||||
"flac" => Ok(AudioFormat::Flac),
|
"flac" => Ok(AudioFormat::Flac),
|
||||||
"best" => Ok(AudioFormat::Best),
|
"best" => Ok(AudioFormat::Best),
|
||||||
_ => Err(format!("unsupported format: {s} (expected opus, mp3, flac, or best)")),
|
_ => Err(format!(
|
||||||
|
"unsupported format: {s} (expected opus, mp3, flac, or best)"
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -98,7 +100,10 @@ pub trait DownloadBackend: Send + Sync {
|
|||||||
fn check_available(&self) -> impl std::future::Future<Output = DlResult<()>> + Send;
|
fn check_available(&self) -> impl std::future::Future<Output = DlResult<()>> + Send;
|
||||||
|
|
||||||
/// Search for tracks matching a query.
|
/// Search for tracks matching a query.
|
||||||
fn search(&self, query: &str) -> impl std::future::Future<Output = DlResult<Vec<SearchResult>>> + Send;
|
fn search(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
) -> impl std::future::Future<Output = DlResult<Vec<SearchResult>>> + Send;
|
||||||
|
|
||||||
/// Download a target to the configured output directory.
|
/// Download a target to the configured output directory.
|
||||||
fn download(
|
fn download(
|
||||||
|
|||||||
@@ -35,9 +35,7 @@ impl DlError {
|
|||||||
pub fn is_transient(&self) -> bool {
|
pub fn is_transient(&self) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
self,
|
self,
|
||||||
DlError::RateLimited(_)
|
DlError::RateLimited(_) | DlError::Io(_) | DlError::BackendError(_)
|
||||||
| DlError::Io(_)
|
|
||||||
| DlError::BackendError(_)
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,12 @@ pub mod queue;
|
|||||||
pub mod rate_limit;
|
pub mod rate_limit;
|
||||||
pub mod ytdlp;
|
pub mod ytdlp;
|
||||||
|
|
||||||
pub use backend::{AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult};
|
pub use backend::{
|
||||||
|
AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult,
|
||||||
|
};
|
||||||
pub use error::{DlError, DlResult};
|
pub use error::{DlError, DlResult};
|
||||||
pub use queue::{DlStats, download_single, run_queue};
|
pub use queue::{
|
||||||
|
DlStats, ProgressFn, SyncStats, download_single, run_queue, run_queue_with_progress,
|
||||||
|
sync_wanted_to_queue,
|
||||||
|
};
|
||||||
pub use ytdlp::{SearchSource, YtDlpBackend};
|
pub use ytdlp::{SearchSource, YtDlpBackend};
|
||||||
|
|||||||
37
src/main.rs
37
src/main.rs
@@ -105,6 +105,12 @@ enum QueueAction {
|
|||||||
},
|
},
|
||||||
/// Retry all failed downloads.
|
/// Retry all failed downloads.
|
||||||
Retry,
|
Retry,
|
||||||
|
/// Sync wanted items from the watchlist to the download queue.
|
||||||
|
Sync {
|
||||||
|
/// Preview what would be enqueued without doing it.
|
||||||
|
#[arg(long)]
|
||||||
|
dry_run: bool,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_database_url() -> String {
|
fn default_database_url() -> String {
|
||||||
@@ -150,9 +156,7 @@ fn make_backend_config(
|
|||||||
output: &Option<PathBuf>,
|
output: &Option<PathBuf>,
|
||||||
cookies: &Option<PathBuf>,
|
cookies: &Option<PathBuf>,
|
||||||
) -> anyhow::Result<BackendConfig> {
|
) -> anyhow::Result<BackendConfig> {
|
||||||
let fmt: AudioFormat = format
|
let fmt: AudioFormat = format.parse().map_err(|e: String| anyhow::anyhow!(e))?;
|
||||||
.parse()
|
|
||||||
.map_err(|e: String| anyhow::anyhow!(e))?;
|
|
||||||
Ok(BackendConfig {
|
Ok(BackendConfig {
|
||||||
output_dir: output.clone().unwrap_or_else(default_output_dir),
|
output_dir: output.clone().unwrap_or_else(default_output_dir),
|
||||||
format: fmt,
|
format: fmt,
|
||||||
@@ -193,13 +197,12 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
let config = make_backend_config(&format, &output, &cookies)?;
|
let config = make_backend_config(&format, &output, &cookies)?;
|
||||||
|
|
||||||
// Determine if it's a URL or a search query
|
// Determine if it's a URL or a search query
|
||||||
let target = if query_or_url.starts_with("http://")
|
let target =
|
||||||
|| query_or_url.starts_with("https://")
|
if query_or_url.starts_with("http://") || query_or_url.starts_with("https://") {
|
||||||
{
|
DownloadTarget::Url(query_or_url)
|
||||||
DownloadTarget::Url(query_or_url)
|
} else {
|
||||||
} else {
|
DownloadTarget::Query(query_or_url)
|
||||||
DownloadTarget::Query(query_or_url)
|
};
|
||||||
};
|
|
||||||
|
|
||||||
download_single(&backend, target, &config, dry_run).await?;
|
download_single(&backend, target, &config, dry_run).await?;
|
||||||
}
|
}
|
||||||
@@ -229,7 +232,8 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
println!("\nQueue processing complete: {stats}");
|
println!("\nQueue processing complete: {stats}");
|
||||||
}
|
}
|
||||||
QueueAction::Add { query } => {
|
QueueAction::Add { query } => {
|
||||||
let item = queries::downloads::enqueue(db.conn(), &query, None, "ytdlp").await?;
|
let item =
|
||||||
|
queries::downloads::enqueue(db.conn(), &query, None, "ytdlp").await?;
|
||||||
println!("Added to queue: id={}, query=\"{}\"", item.id, item.query);
|
println!("Added to queue: id={}, query=\"{}\"", item.id, item.query);
|
||||||
}
|
}
|
||||||
QueueAction::List { status } => {
|
QueueAction::List { status } => {
|
||||||
@@ -248,8 +252,8 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
println!("Queue is empty.");
|
println!("Queue is empty.");
|
||||||
} else {
|
} else {
|
||||||
println!(
|
println!(
|
||||||
"{:<5} {:<12} {:<6} {:<40} {}",
|
"{:<5} {:<12} {:<6} {:<40} ERROR",
|
||||||
"ID", "STATUS", "RETRY", "QUERY", "ERROR"
|
"ID", "STATUS", "RETRY", "QUERY"
|
||||||
);
|
);
|
||||||
for item in &items {
|
for item in &items {
|
||||||
println!(
|
println!(
|
||||||
@@ -276,6 +280,13 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
println!("Requeued {} failed downloads.", failed.len());
|
println!("Requeued {} failed downloads.", failed.len());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
QueueAction::Sync { dry_run } => {
|
||||||
|
if dry_run {
|
||||||
|
println!("DRY RUN — no items will be enqueued");
|
||||||
|
}
|
||||||
|
let stats = shanty_dl::sync_wanted_to_queue(db.conn(), dry_run).await?;
|
||||||
|
println!("\nSync complete: {stats}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
154
src/queue.rs
154
src/queue.rs
@@ -1,7 +1,7 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use sea_orm::DatabaseConnection;
|
use sea_orm::{ActiveValue::Set, DatabaseConnection};
|
||||||
|
|
||||||
use shanty_db::entities::download_queue::DownloadStatus;
|
use shanty_db::entities::download_queue::DownloadStatus;
|
||||||
use shanty_db::entities::wanted_item::WantedStatus;
|
use shanty_db::entities::wanted_item::WantedStatus;
|
||||||
@@ -39,15 +39,39 @@ const RETRY_DELAYS: [Duration; 3] = [
|
|||||||
Duration::from_secs(600),
|
Duration::from_secs(600),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
/// Progress callback: (current, total, message).
|
||||||
|
pub type ProgressFn = Box<dyn Fn(u64, u64, &str) + Send + Sync>;
|
||||||
|
|
||||||
/// Process all pending items in the download queue.
|
/// Process all pending items in the download queue.
|
||||||
pub async fn run_queue(
|
pub async fn run_queue(
|
||||||
conn: &DatabaseConnection,
|
conn: &DatabaseConnection,
|
||||||
backend: &impl DownloadBackend,
|
backend: &impl DownloadBackend,
|
||||||
config: &BackendConfig,
|
config: &BackendConfig,
|
||||||
dry_run: bool,
|
dry_run: bool,
|
||||||
|
) -> DlResult<DlStats> {
|
||||||
|
run_queue_with_progress(conn, backend, config, dry_run, None).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Process all pending items in the download queue, with optional progress reporting.
|
||||||
|
pub async fn run_queue_with_progress(
|
||||||
|
conn: &DatabaseConnection,
|
||||||
|
backend: &impl DownloadBackend,
|
||||||
|
config: &BackendConfig,
|
||||||
|
dry_run: bool,
|
||||||
|
on_progress: Option<ProgressFn>,
|
||||||
) -> DlResult<DlStats> {
|
) -> DlResult<DlStats> {
|
||||||
let mut stats = DlStats::default();
|
let mut stats = DlStats::default();
|
||||||
|
|
||||||
|
// Count total for progress reporting
|
||||||
|
let total = queries::downloads::list(conn, Some(DownloadStatus::Pending))
|
||||||
|
.await
|
||||||
|
.map(|v| v.len() as u64)
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
if let Some(ref cb) = on_progress {
|
||||||
|
cb(0, total, "Starting downloads...");
|
||||||
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let item = match queries::downloads::get_next_pending(conn).await? {
|
let item = match queries::downloads::get_next_pending(conn).await? {
|
||||||
Some(item) => item,
|
Some(item) => item,
|
||||||
@@ -56,6 +80,14 @@ pub async fn run_queue(
|
|||||||
|
|
||||||
stats.downloads_attempted += 1;
|
stats.downloads_attempted += 1;
|
||||||
|
|
||||||
|
if let Some(ref cb) = on_progress {
|
||||||
|
cb(
|
||||||
|
stats.downloads_attempted,
|
||||||
|
total,
|
||||||
|
&format!("Downloading: {}", item.query),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
id = item.id,
|
id = item.id,
|
||||||
query = %item.query,
|
query = %item.query,
|
||||||
@@ -78,8 +110,7 @@ pub async fn run_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Mark as downloading
|
// Mark as downloading
|
||||||
queries::downloads::update_status(conn, item.id, DownloadStatus::Downloading, None)
|
queries::downloads::update_status(conn, item.id, DownloadStatus::Downloading, None).await?;
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Determine download target
|
// Determine download target
|
||||||
let target = if let Some(ref url) = item.source_url {
|
let target = if let Some(ref url) = item.source_url {
|
||||||
@@ -98,22 +129,39 @@ pub async fn run_queue(
|
|||||||
"download completed"
|
"download completed"
|
||||||
);
|
);
|
||||||
|
|
||||||
queries::downloads::update_status(
|
queries::downloads::update_status(conn, item.id, DownloadStatus::Completed, None)
|
||||||
conn,
|
.await?;
|
||||||
item.id,
|
|
||||||
DownloadStatus::Completed,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Update wanted item status if linked
|
// Update wanted item status and create track record with MBID
|
||||||
if let Some(wanted_id) = item.wanted_item_id {
|
if let Some(wanted_id) = item.wanted_item_id
|
||||||
if let Err(e) = queries::wanted::update_status(
|
&& let Ok(wanted) = queries::wanted::get_by_id(conn, wanted_id).await
|
||||||
conn,
|
{
|
||||||
wanted_id,
|
// Create a track record with the MBID so the tagger
|
||||||
WantedStatus::Downloaded,
|
// can skip searching and go straight to the right recording
|
||||||
)
|
let now = chrono::Utc::now().naive_utc();
|
||||||
.await
|
let file_path = result.file_path.to_string_lossy().to_string();
|
||||||
|
let file_size = std::fs::metadata(&result.file_path)
|
||||||
|
.map(|m| m.len() as i64)
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
let track_active = shanty_db::entities::track::ActiveModel {
|
||||||
|
file_path: Set(file_path),
|
||||||
|
title: Set(Some(result.title.clone())),
|
||||||
|
artist: Set(result.artist.clone()),
|
||||||
|
file_size: Set(file_size),
|
||||||
|
musicbrainz_id: Set(wanted.musicbrainz_id.clone()),
|
||||||
|
artist_id: Set(wanted.artist_id),
|
||||||
|
added_at: Set(now),
|
||||||
|
updated_at: Set(now),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
if let Err(e) = queries::tracks::upsert(conn, track_active).await {
|
||||||
|
tracing::warn!(error = %e, "failed to create track record after download");
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(e) =
|
||||||
|
queries::wanted::update_status(conn, wanted_id, WantedStatus::Downloaded)
|
||||||
|
.await
|
||||||
{
|
{
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
wanted_id = wanted_id,
|
wanted_id = wanted_id,
|
||||||
@@ -170,6 +218,76 @@ pub async fn run_queue(
|
|||||||
Ok(stats)
|
Ok(stats)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sync stats from a queue sync operation.
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
pub struct SyncStats {
|
||||||
|
pub found: u64,
|
||||||
|
pub enqueued: u64,
|
||||||
|
pub skipped: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for SyncStats {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"found: {}, enqueued: {}, skipped (already queued): {}",
|
||||||
|
self.found, self.enqueued, self.skipped,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sync wanted items to the download queue.
|
||||||
|
/// Finds all Track-type Wanted items and enqueues them for download,
|
||||||
|
/// skipping any that already have a queue entry.
|
||||||
|
pub async fn sync_wanted_to_queue(conn: &DatabaseConnection, dry_run: bool) -> DlResult<SyncStats> {
|
||||||
|
let wanted = queries::wanted::list(conn, Some(WantedStatus::Wanted), None).await?;
|
||||||
|
let mut stats = SyncStats::default();
|
||||||
|
|
||||||
|
for item in &wanted {
|
||||||
|
stats.found += 1;
|
||||||
|
|
||||||
|
// Build search query from the name + artist
|
||||||
|
let artist_name = if let Some(id) = item.artist_id {
|
||||||
|
queries::artists::get_by_id(conn, id)
|
||||||
|
.await
|
||||||
|
.map(|a| a.name)
|
||||||
|
.ok()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let query = match artist_name {
|
||||||
|
Some(ref artist) if !item.name.is_empty() => format!("{} {}", artist, item.name),
|
||||||
|
_ if !item.name.is_empty() => item.name.clone(),
|
||||||
|
Some(ref artist) => artist.clone(),
|
||||||
|
None => {
|
||||||
|
tracing::warn!(id = item.id, "cannot build query — no name or artist");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if already queued
|
||||||
|
if let Some(_existing) = queries::downloads::find_by_wanted_item_id(conn, item.id).await? {
|
||||||
|
tracing::debug!(id = item.id, name = %item.name, "already in queue, skipping");
|
||||||
|
stats.skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if dry_run {
|
||||||
|
println!("Would enqueue: {query}");
|
||||||
|
stats.enqueued += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
queries::downloads::enqueue(conn, &query, Some(item.id), "ytdlp").await?;
|
||||||
|
tracing::info!(id = item.id, query = %query, "enqueued for download");
|
||||||
|
stats.enqueued += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
tracing::info!(%stats, "sync complete");
|
||||||
|
Ok(stats)
|
||||||
|
}
|
||||||
|
|
||||||
/// Download a single item directly (not from queue).
|
/// Download a single item directly (not from queue).
|
||||||
pub async fn download_single(
|
pub async fn download_single(
|
||||||
backend: &impl DownloadBackend,
|
backend: &impl DownloadBackend,
|
||||||
|
|||||||
@@ -42,8 +42,7 @@ impl RateLimiter {
|
|||||||
state.remaining -= 1;
|
state.remaining -= 1;
|
||||||
|
|
||||||
// Warn when approaching the limit
|
// Warn when approaching the limit
|
||||||
let pct_remaining =
|
let pct_remaining = (state.remaining as f64 / self.max_per_hour as f64) * 100.0;
|
||||||
(state.remaining as f64 / self.max_per_hour as f64) * 100.0;
|
|
||||||
if pct_remaining < 10.0 && pct_remaining > 0.0 {
|
if pct_remaining < 10.0 && pct_remaining > 0.0 {
|
||||||
tracing::warn!(
|
tracing::warn!(
|
||||||
remaining = state.remaining,
|
remaining = state.remaining,
|
||||||
|
|||||||
45
src/ytdlp.rs
45
src/ytdlp.rs
@@ -4,7 +4,9 @@ use std::process::Stdio;
|
|||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
|
|
||||||
use crate::backend::{BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult};
|
use crate::backend::{
|
||||||
|
BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult,
|
||||||
|
};
|
||||||
use crate::error::{DlError, DlResult};
|
use crate::error::{DlError, DlResult};
|
||||||
use crate::rate_limit::RateLimiter;
|
use crate::rate_limit::RateLimiter;
|
||||||
|
|
||||||
@@ -22,7 +24,9 @@ impl std::str::FromStr for SearchSource {
|
|||||||
match s.to_lowercase().as_str() {
|
match s.to_lowercase().as_str() {
|
||||||
"ytmusic" | "youtube_music" | "youtubemusic" => Ok(SearchSource::YouTubeMusic),
|
"ytmusic" | "youtube_music" | "youtubemusic" => Ok(SearchSource::YouTubeMusic),
|
||||||
"youtube" | "yt" => Ok(SearchSource::YouTube),
|
"youtube" | "yt" => Ok(SearchSource::YouTube),
|
||||||
_ => Err(format!("unknown search source: {s} (expected ytmusic or youtube)")),
|
_ => Err(format!(
|
||||||
|
"unknown search source: {s} (expected ytmusic or youtube)"
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -63,9 +67,10 @@ impl YtDlpBackend {
|
|||||||
self.rate_limiter.acquire().await;
|
self.rate_limiter.acquire().await;
|
||||||
|
|
||||||
let mut cmd = Command::new("yt-dlp");
|
let mut cmd = Command::new("yt-dlp");
|
||||||
cmd.args(args)
|
cmd.args(args).stdout(Stdio::piped()).stderr(Stdio::piped());
|
||||||
.stdout(Stdio::piped())
|
|
||||||
.stderr(Stdio::piped());
|
// Allow yt-dlp to fetch updated JS challenge solver scripts
|
||||||
|
cmd.args(["--remote-components", "ejs:github"]);
|
||||||
|
|
||||||
// Add cookies if configured
|
// Add cookies if configured
|
||||||
if let Some(ref cookies) = self.cookies_path {
|
if let Some(ref cookies) = self.cookies_path {
|
||||||
@@ -102,7 +107,7 @@ impl YtDlpBackend {
|
|||||||
tracing::debug!(query = query, "searching YouTube Music via ytmusicapi");
|
tracing::debug!(query = query, "searching YouTube Music via ytmusicapi");
|
||||||
|
|
||||||
let output = Command::new("python3")
|
let output = Command::new("python3")
|
||||||
.args([script.to_str().unwrap(), "search", query])
|
.args([&*script.to_string_lossy(), "search", query])
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.output()
|
.output()
|
||||||
@@ -120,7 +125,9 @@ impl YtDlpBackend {
|
|||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
|
let stderr = String::from_utf8_lossy(&output.stderr).to_string();
|
||||||
tracing::warn!(stderr = %stderr, "ytmusic search failed");
|
tracing::warn!(stderr = %stderr, "ytmusic search failed");
|
||||||
return Err(DlError::BackendError(format!("ytmusic search failed: {stderr}")));
|
return Err(DlError::BackendError(format!(
|
||||||
|
"ytmusic search failed: {stderr}"
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
@@ -180,17 +187,17 @@ impl YtDlpBackend {
|
|||||||
fn find_ytmusic_script(&self) -> DlResult<PathBuf> {
|
fn find_ytmusic_script(&self) -> DlResult<PathBuf> {
|
||||||
// Check next to the current executable
|
// Check next to the current executable
|
||||||
if let Ok(exe) = std::env::current_exe() {
|
if let Ok(exe) = std::env::current_exe() {
|
||||||
let beside_exe = exe.parent().unwrap_or(std::path::Path::new(".")).join("ytmusic_search.py");
|
let beside_exe = exe
|
||||||
|
.parent()
|
||||||
|
.unwrap_or(std::path::Path::new("."))
|
||||||
|
.join("ytmusic_search.py");
|
||||||
if beside_exe.exists() {
|
if beside_exe.exists() {
|
||||||
return Ok(beside_exe);
|
return Ok(beside_exe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check common install locations
|
// Check common install locations
|
||||||
for dir in &[
|
for dir in &["/usr/share/shanty", "/usr/local/share/shanty"] {
|
||||||
"/usr/share/shanty",
|
|
||||||
"/usr/local/share/shanty",
|
|
||||||
] {
|
|
||||||
let path = PathBuf::from(dir).join("ytmusic_search.py");
|
let path = PathBuf::from(dir).join("ytmusic_search.py");
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
return Ok(path);
|
return Ok(path);
|
||||||
@@ -280,10 +287,10 @@ impl DownloadBackend for YtDlpBackend {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Build output template
|
// Build output template — include artist if available for filename-based metadata fallback
|
||||||
let output_template = config
|
let output_template = config
|
||||||
.output_dir
|
.output_dir
|
||||||
.join("%(title)s.%(ext)s")
|
.join("%(artist,uploader,channel)s - %(title)s.%(ext)s")
|
||||||
.to_string_lossy()
|
.to_string_lossy()
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
@@ -303,10 +310,7 @@ impl DownloadBackend for YtDlpBackend {
|
|||||||
|
|
||||||
// Add cookies from backend config or backend's own cookies
|
// Add cookies from backend config or backend's own cookies
|
||||||
let cookies_str;
|
let cookies_str;
|
||||||
let cookies_path = config
|
let cookies_path = config.cookies_path.as_ref().or(self.cookies_path.as_ref());
|
||||||
.cookies_path
|
|
||||||
.as_ref()
|
|
||||||
.or(self.cookies_path.as_ref());
|
|
||||||
if let Some(c) = cookies_path {
|
if let Some(c) = cookies_path {
|
||||||
cookies_str = c.to_string_lossy().to_string();
|
cookies_str = c.to_string_lossy().to_string();
|
||||||
args.push("--cookies");
|
args.push("--cookies");
|
||||||
@@ -318,9 +322,8 @@ impl DownloadBackend for YtDlpBackend {
|
|||||||
let output = self.run_ytdlp(&args).await?;
|
let output = self.run_ytdlp(&args).await?;
|
||||||
|
|
||||||
// Parse the JSON output to get the actual file path
|
// Parse the JSON output to get the actual file path
|
||||||
let info: YtDlpDownloadInfo = serde_json::from_str(output.trim()).map_err(|e| {
|
let info: YtDlpDownloadInfo = serde_json::from_str(output.trim())
|
||||||
DlError::BackendError(format!("failed to parse yt-dlp output: {e}"))
|
.map_err(|e| DlError::BackendError(format!("failed to parse yt-dlp output: {e}")))?;
|
||||||
})?;
|
|
||||||
|
|
||||||
// --print-json reports the pre-extraction filename (e.g. .webm),
|
// --print-json reports the pre-extraction filename (e.g. .webm),
|
||||||
// but --extract-audio produces a file with the target format extension.
|
// but --extract-audio produces a file with the target format extension.
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
use shanty_db::entities::download_queue::DownloadStatus;
|
use shanty_db::entities::download_queue::DownloadStatus;
|
||||||
use shanty_db::{Database, queries};
|
use shanty_db::{Database, queries};
|
||||||
use shanty_dl::backend::{AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult};
|
use shanty_dl::backend::{
|
||||||
|
AudioFormat, BackendConfig, DownloadBackend, DownloadResult, DownloadTarget, SearchResult,
|
||||||
|
};
|
||||||
use shanty_dl::error::DlResult;
|
use shanty_dl::error::DlResult;
|
||||||
use shanty_dl::queue::{run_queue, download_single};
|
use shanty_dl::queue::{download_single, run_queue};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
/// Mock backend for testing without yt-dlp.
|
/// Mock backend for testing without yt-dlp.
|
||||||
@@ -84,7 +86,9 @@ async fn test_queue_process_success() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Process queue
|
// Process queue
|
||||||
let stats = run_queue(db.conn(), &backend, &config, false).await.unwrap();
|
let stats = run_queue(db.conn(), &backend, &config, false)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert_eq!(stats.downloads_attempted, 1);
|
assert_eq!(stats.downloads_attempted, 1);
|
||||||
assert_eq!(stats.downloads_completed, 1);
|
assert_eq!(stats.downloads_completed, 1);
|
||||||
assert_eq!(stats.downloads_failed, 0);
|
assert_eq!(stats.downloads_failed, 0);
|
||||||
@@ -112,7 +116,9 @@ async fn test_queue_process_failure() {
|
|||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let stats = run_queue(db.conn(), &backend, &config, false).await.unwrap();
|
let stats = run_queue(db.conn(), &backend, &config, false)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert_eq!(stats.downloads_attempted, 1);
|
assert_eq!(stats.downloads_attempted, 1);
|
||||||
assert_eq!(stats.downloads_failed, 1);
|
assert_eq!(stats.downloads_failed, 1);
|
||||||
|
|
||||||
@@ -218,9 +224,20 @@ async fn test_wanted_item_status_updated_on_download() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create a wanted item
|
// Create a wanted item
|
||||||
let wanted = queries::wanted::add(db.conn(), ItemType::Track, None, None, None)
|
let wanted = queries::wanted::add(
|
||||||
.await
|
db.conn(),
|
||||||
.unwrap();
|
queries::wanted::AddWantedItem {
|
||||||
|
item_type: ItemType::Track,
|
||||||
|
name: "Wanted Song",
|
||||||
|
musicbrainz_id: None,
|
||||||
|
artist_id: None,
|
||||||
|
album_id: None,
|
||||||
|
track_id: None,
|
||||||
|
user_id: None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert_eq!(wanted.status, WantedStatus::Wanted);
|
assert_eq!(wanted.status, WantedStatus::Wanted);
|
||||||
|
|
||||||
// Enqueue download linked to the wanted item
|
// Enqueue download linked to the wanted item
|
||||||
@@ -229,7 +246,9 @@ async fn test_wanted_item_status_updated_on_download() {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Process queue
|
// Process queue
|
||||||
run_queue(db.conn(), &backend, &config, false).await.unwrap();
|
run_queue(db.conn(), &backend, &config, false)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// Wanted item should now be Downloaded
|
// Wanted item should now be Downloaded
|
||||||
let updated = queries::wanted::get_by_id(db.conn(), wanted.id)
|
let updated = queries::wanted::get_by_id(db.conn(), wanted.id)
|
||||||
|
|||||||
Reference in New Issue
Block a user