redux of the worker queue

This commit is contained in:
Connor Johnstone
2026-03-23 18:37:45 -04:00
parent 3494de1133
commit 9d1366f266
2 changed files with 38 additions and 0 deletions
+37
View File
@@ -118,6 +118,43 @@ async fn process_file(
Ok(true) Ok(true)
} }
/// Index a single file by path. Returns `Ok(Some(track_id))` if indexed,
/// `Ok(None)` if skipped (unchanged mtime), or an error.
pub async fn index_file(
conn: &DatabaseConnection,
file_path: &std::path::Path,
dry_run: bool,
) -> IndexResult<Option<i32>> {
let metadata = std::fs::metadata(file_path)?;
let mtime = metadata
.modified()
.ok()
.and_then(|t| {
let duration = t
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default();
chrono::DateTime::from_timestamp(duration.as_secs() as i64, 0)
})
.map(|dt| dt.naive_utc())
.unwrap_or_else(|| chrono::Utc::now().naive_utc());
let scanned = ScannedFile {
path: file_path.to_owned(),
file_size: metadata.len() as i64,
mtime,
};
match process_file(conn, &scanned, dry_run).await? {
true => {
// Look up the track we just created to return its ID
let path_str = file_path.to_string_lossy().to_string();
let track = shanty_db::queries::tracks::get_by_path(conn, &path_str).await?;
Ok(track.map(|t| t.id))
}
false => Ok(None),
}
}
/// Run the full indexing pipeline: scan directory, extract metadata, upsert to DB. /// Run the full indexing pipeline: scan directory, extract metadata, upsert to DB.
pub async fn index_directory( pub async fn index_directory(
conn: &DatabaseConnection, conn: &DatabaseConnection,
+1
View File
@@ -10,6 +10,7 @@ pub mod metadata;
pub mod scanner; pub mod scanner;
pub use error::{IndexError, IndexResult}; pub use error::{IndexError, IndexResult};
pub use indexer::index_file;
use std::fmt; use std::fmt;
use std::path::PathBuf; use std::path::PathBuf;