// yt - A fully featured command line YouTube client
//
// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
// SPDX-License-Identifier: GPL-3.0-or-later
//
// This file is part of Yt.
//
// You should have received a copy of the License along with this program.
// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
use std::{collections::HashMap, sync::Arc, time::Duration};
use crate::{
app::App,
download::download_options::download_opts,
storage::video_database::{
downloader::{get_next_uncached_video, set_video_cache_path},
extractor_hash::ExtractorHash,
getters::get_video_yt_dlp_opts,
Video, YtDlpOptions,
},
};
use anyhow::{bail, Context, Result};
use futures::{future::BoxFuture, FutureExt};
use log::{debug, info, warn};
use tokio::{fs, task::JoinHandle, time};
mod download_options;
#[derive(Debug)]
pub struct CurrentDownload {
task_handle: JoinHandle<Result<()>>,
extractor_hash: ExtractorHash,
}
impl CurrentDownload {
fn new_from_video(app: Arc<App>, video: Video) -> Self {
let extractor_hash = video.extractor_hash.clone();
let task_handle = tokio::spawn(async move {
Downloader::actually_cache_video(&app, &video)
.await
.with_context(|| format!("Failed to cache video: '{}'", video.title))?;
Ok(())
});
Self {
task_handle,
extractor_hash,
}
}
}
pub struct Downloader {
current_download: Option<CurrentDownload>,
video_size_cache: HashMap<ExtractorHash, u64>,
}
impl Downloader {
pub fn new() -> Self {
Self {
current_download: None,
video_size_cache: HashMap::new(),
}
}
/// The entry point to the Downloader.
/// This Downloader will periodically check if the database has changed, and then also
/// change which videos it downloads.
/// This will run, until the database doesn't contain any watchable videos
pub async fn consume(&mut self, app: Arc<App>, max_cache_size: u64) -> Result<()> {
while let Some(next_video) = get_next_uncached_video(&app).await? {
if Self::get_current_cache_allocation(&app).await?
+ self.get_approx_video_size(&app, &next_video).await?
>= max_cache_size
{
warn!(
"Can't download video: '{}' as it's too large for the cache.",
next_video.title
);
// Wait and hope, that a large video is deleted from the cache.
time::sleep(Duration::from_secs(10)).await;
continue;
}
if let Some(_) = &self.current_download {
let current_download = self.current_download.take().expect("Is Some");
if current_download.task_handle.is_finished() {
current_download.task_handle.await??;
continue;
}
if next_video.extractor_hash != current_download.extractor_hash {
info!(
"Noticed, that the next video is not the video being downloaded, replacing it ('{}' vs. '{}')!",
next_video.extractor_hash.into_short_hash(&app).await?, current_download.extractor_hash.into_short_hash(&app).await?
);
// Replace the currently downloading video
current_download.task_handle.abort();
let new_current_download =
CurrentDownload::new_from_video(Arc::clone(&app), next_video);
self.current_download = Some(new_current_download);
} else {
debug!(
"Currently downloading '{}'",
current_download
.extractor_hash
.into_short_hash(&app)
.await?
);
// Reset the taken value
self.current_download = Some(current_download);
}
} else {
info!(
"No video is being downloaded right now, setting it to '{}'",
next_video.title
);
let new_current_download =
CurrentDownload::new_from_video(Arc::clone(&app), next_video);
self.current_download = Some(new_current_download);
}
time::sleep(Duration::new(1, 0)).await;
}
info!("Finished downloading!");
Ok(())
}
async fn get_current_cache_allocation(app: &App) -> Result<u64> {
fn dir_size(mut dir: fs::ReadDir) -> BoxFuture<'static, Result<u64>> {
async move {
let mut acc = 0;
while let Some(entry) = dir.next_entry().await? {
let size = match entry.metadata().await? {
data if data.is_dir() => {
let path = entry.path();
let read_dir = fs::read_dir(path).await?;
dir_size(read_dir).await?
}
data => data.len(),
};
acc += size;
}
Ok(acc)
}
.boxed()
}
let val = dir_size(fs::read_dir(&app.config.paths.download_dir).await?).await;
if let Ok(val) = val.as_ref() {
info!("Cache dir has a size of '{}'", val);
}
val
}
async fn get_approx_video_size(&mut self, app: &App, video: &Video) -> Result<u64> {
if let Some(value) = self.video_size_cache.get(&video.extractor_hash) {
Ok(*value)
} else {
// the subtitle file size should be negligible
let add_opts = YtDlpOptions {
subtitle_langs: "".to_owned(),
};
let opts = &download_opts(&app, add_opts);
let result = yt_dlp::extract_info(&opts, &video.url, false, true)
.await
.with_context(|| {
format!("Failed to extract video information: '{}'", video.title)
})?;
let size = if let Some(val) = result.filesize {
val
} else if let Some(val) = result.filesize_approx {
val
} else {
bail!("Failed to find a filesize for video: '{}'", video.title);
};
assert_eq!(
self.video_size_cache
.insert(video.extractor_hash.clone(), size),
None
);
Ok(size)
}
}
async fn actually_cache_video(app: &App, video: &Video) -> Result<()> {
debug!("Download started: {}", &video.title);
let addional_opts = get_video_yt_dlp_opts(&app, &video.extractor_hash).await?;
let result = yt_dlp::download(&[video.url.clone()], &download_opts(&app, addional_opts))
.await
.with_context(|| format!("Failed to download video: '{}'", video.title))?;
assert_eq!(result.len(), 1);
let result = &result[0];
set_video_cache_path(app, &video.extractor_hash, Some(&result)).await?;
info!(
"Video '{}' was downlaoded to path: {}",
video.title,
result.display()
);
Ok(())
}
}