about summary refs log tree commit diff stats
path: root/src/download
diff options
context:
space:
mode:
authorBenedikt Peetz <benedikt.peetz@b-peetz.de>2024-08-22 14:22:13 +0200
committerBenedikt Peetz <benedikt.peetz@b-peetz.de>2024-08-22 14:22:13 +0200
commit6bfc7ee06dc1a598014dd5bec659b14a3aa87bbd (patch)
treef12b4892214fd9cd0fbbd206abd6929179f75d2b /src/download
parenttest(benches/update): Init (diff)
downloadyt-6bfc7ee06dc1a598014dd5bec659b14a3aa87bbd.tar.gz
yt-6bfc7ee06dc1a598014dd5bec659b14a3aa87bbd.zip
feat(download): Support limiting the downloader by maximal cache size
Diffstat (limited to 'src/download')
-rw-r--r--src/download/download_options.rs2
-rw-r--r--src/download/mod.rs96
2 files changed, 87 insertions, 11 deletions
diff --git a/src/download/download_options.rs b/src/download/download_options.rs
index 17cf66c..04c1600 100644
--- a/src/download/download_options.rs
+++ b/src/download/download_options.rs
@@ -50,7 +50,7 @@ pub fn download_opts(additional_opts: YtDlpOptions) -> serde_json::Map<String, s
       "writeautomaticsub": true,
 
       "outtmpl": {
-        "default": constants::download_dir().join("%(channel)s/%(title)s.%(ext)s"),
+        "default": constants::download_dir(false).expect("We're not creating this dir, thus this function can't error").join("%(channel)s/%(title)s.%(ext)s"),
         "chapter": "%(title)s - %(section_number)03d %(section_title)s [%(id)s].%(ext)s"
       },
       "compat_opts": {},
diff --git a/src/download/mod.rs b/src/download/mod.rs
index 3785876..c3d79b7 100644
--- a/src/download/mod.rs
+++ b/src/download/mod.rs
@@ -8,22 +8,24 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use std::{sync::Arc, time::Duration};
+use std::{collections::HashMap, sync::Arc, time::Duration};
 
 use crate::{
     app::App,
+    constants::download_dir,
     download::download_options::download_opts,
     storage::video_database::{
         downloader::{get_next_uncached_video, set_video_cache_path},
         extractor_hash::ExtractorHash,
         getters::get_video_yt_dlp_opts,
-        Video,
+        Video, YtDlpOptions,
     },
 };
 
-use anyhow::{Context, Result};
-use log::{debug, info};
-use tokio::{task::JoinHandle, time};
+use anyhow::{bail, Context, Result};
+use futures::{future::BoxFuture, FutureExt};
+use log::{debug, info, warn};
+use tokio::{fs, task::JoinHandle, time};
 
 mod download_options;
 
@@ -53,12 +55,14 @@ impl CurrentDownload {
 
 pub struct Downloader {
     current_download: Option<CurrentDownload>,
+    video_size_cache: HashMap<ExtractorHash, u64>,
 }
 
 impl Downloader {
     pub fn new() -> Self {
         Self {
             current_download: None,
+            video_size_cache: HashMap::new(),
         }
     }
 
@@ -67,7 +71,20 @@ impl Downloader {
     /// change which videos it downloads.
     /// This will run, until the database doesn't contain any watchable videos
     pub async fn consume(&mut self, app: Arc<App>, max_cache_size: u64) -> Result<()> {
-        while let Some(next_video) = get_next_uncached_video(app).await? {
+        while let Some(next_video) = get_next_uncached_video(&app).await? {
+            if Self::get_current_cache_allocation().await?
+                + self.get_approx_video_size(&next_video).await?
+                >= max_cache_size
+            {
+                warn!(
+                    "Can't download video: '{}' as it's too large for the cache.",
+                    next_video.title
+                );
+                // Wait and hope, that a large video is deleted from the cache.
+                time::sleep(Duration::from_secs(10)).await;
+                continue;
+            }
+
             if let Some(_) = &self.current_download {
                 let current_download = self.current_download.take().expect("Is Some");
 
@@ -99,7 +116,6 @@ impl Downloader {
                     );
                     // Reset the taken value
                     self.current_download = Some(current_download);
-                    time::sleep(Duration::new(1, 0)).await;
                 }
             } else {
                 info!(
@@ -111,15 +127,75 @@ impl Downloader {
                 self.current_download = Some(new_current_download);
             }
 
-            // if get_allocated_cache().await? < CONCURRENT {
-            //     .cache_video(next_video).await?;
-            // }
+            time::sleep(Duration::new(1, 0)).await;
         }
 
         info!("Finished downloading!");
         Ok(())
     }
 
+    async fn get_current_cache_allocation() -> Result<u64> {
+        fn dir_size(mut dir: fs::ReadDir) -> BoxFuture<'static, Result<u64>> {
+            async move {
+                let mut acc = 0;
+                while let Some(entry) = dir.next_entry().await? {
+                    let size = match entry.metadata().await? {
+                        data if data.is_dir() => {
+                            let path = entry.path();
+                            let read_dir = fs::read_dir(path).await?;
+
+                            dir_size(read_dir).await?
+                        }
+                        data => data.len(),
+                    };
+                    acc += size;
+                }
+                Ok(acc)
+            }
+            .boxed()
+        }
+
+        let val = dir_size(fs::read_dir(download_dir(true)?).await?).await;
+        if let Ok(val) = val.as_ref() {
+            info!("Cache dir has a size of '{}'", val);
+        }
+        val
+    }
+
+    async fn get_approx_video_size(&mut self, video: &Video) -> Result<u64> {
+        if let Some(value) = self.video_size_cache.get(&video.extractor_hash) {
+            Ok(*value)
+        } else {
+            // the subtitle file size should be negligible
+            let add_opts = YtDlpOptions {
+                subtitle_langs: "".to_owned(),
+            };
+            let opts = &download_opts(add_opts);
+
+            let result = yt_dlp::extract_info(&opts, &video.url, false, true)
+                .await
+                .with_context(|| {
+                    format!("Failed to extract video information: '{}'", video.title)
+                })?;
+
+            let size = if let Some(val) = result.filesize {
+                val
+            } else if let Some(val) = result.filesize_approx {
+                val
+            } else {
+                bail!("Failed to find a filesize for video: '{}'", video.title);
+            };
+
+            assert_eq!(
+                self.video_size_cache
+                    .insert(video.extractor_hash.clone(), size),
+                None
+            );
+
+            Ok(size)
+        }
+    }
+
     async fn actually_cache_video(app: &App, video: &Video) -> Result<()> {
         debug!("Download started: {}", &video.title);