about summary refs log tree commit diff stats
path: root/src/update/mod.rs
diff options
context:
space:
mode:
authorBenedikt Peetz <benedikt.peetz@b-peetz.de>2024-10-14 14:56:29 +0200
committerBenedikt Peetz <benedikt.peetz@b-peetz.de>2024-10-14 14:56:29 +0200
commit6c9286857ef8b314962b67f4a16a66e8c35531bc (patch)
tree9ced4485ec38b39f82cba258c06321a21c40000a /src/update/mod.rs
parentbuild(Cargo.toml): Add further lints (diff)
downloadyt-6c9286857ef8b314962b67f4a16a66e8c35531bc.tar.gz
yt-6c9286857ef8b314962b67f4a16a66e8c35531bc.zip
refactor(treewide): Combine the separate crates in one workspace
Diffstat (limited to 'src/update/mod.rs')
-rw-r--r--src/update/mod.rs257
1 files changed, 0 insertions, 257 deletions
diff --git a/src/update/mod.rs b/src/update/mod.rs
deleted file mode 100644
index 6abb8c4..0000000
--- a/src/update/mod.rs
+++ /dev/null
@@ -1,257 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::{collections::HashMap, process::Stdio, str::FromStr};
-
-use anyhow::{Context, Ok, Result};
-use chrono::{DateTime, Utc};
-use log::{error, info, warn};
-use tokio::{
-    io::{AsyncBufReadExt, BufReader},
-    process::Command,
-};
-use url::Url;
-use yt_dlp::{unsmuggle_url, wrapper::info_json::InfoJson};
-
-use crate::{
-    app::App,
-    storage::{
-        subscriptions::{get_subscriptions, Subscription},
-        video_database::{
-            extractor_hash::ExtractorHash, getters::get_all_hashes, setters::add_video, Video,
-            VideoStatus,
-        },
-    },
-    videos::display::format_video::FormatVideo,
-};
-
-pub async fn update(
-    app: &App,
-    max_backlog: u32,
-    subs_to_update: Vec<String>,
-    verbosity: u8,
-) -> Result<()> {
-    let subscriptions = get_subscriptions(app).await?;
-    let mut back_subs: HashMap<Url, Subscription> = HashMap::new();
-    let logging = verbosity > 0;
-    let log_level = match verbosity {
-        // 0 => 50,   // logging.CRITICAL
-        0 => 40,   // logging.ERROR
-        1 => 30,   // logging.WARNING
-        2 => 20,   // logging.INFO
-        3.. => 10, // logging.DEBUG
-    };
-    info!("Passing log_level {} to the update script", log_level);
-
-    let mut urls: Vec<String> = vec![];
-    for (name, sub) in subscriptions.0 {
-        if subs_to_update.contains(&name) || subs_to_update.is_empty() {
-            urls.push(sub.url.to_string());
-            back_subs.insert(sub.url.clone(), sub);
-        } else {
-            info!(
-                "Not updating subscription '{}' as it was not specified",
-                name
-            );
-        }
-    }
-
-    // We can get away with not having to re-fetch the hashes every time, as the returned video
-    // should not contain duplicates.
-    let hashes = get_all_hashes(app).await?;
-
-    let mut child = Command::new("raw_update.py")
-        .arg(max_backlog.to_string())
-        .arg(urls.len().to_string())
-        .arg(log_level.to_string())
-        .args(&urls)
-        .args(hashes.iter().map(|haz| haz.to_string()).collect::<Vec<_>>())
-        .stdout(Stdio::piped())
-        .stderr(if logging {
-            Stdio::inherit()
-        } else {
-            Stdio::null()
-        })
-        .stdin(Stdio::null())
-        .spawn()
-        .context("Failed to call python3 update_raw")?;
-
-    let mut out = BufReader::new(
-        child
-            .stdout
-            .take()
-            .expect("Should be able to take child stdout"),
-    )
-    .lines();
-
-    while let Some(line) = out.next_line().await? {
-        // use tokio::{fs::File, io::AsyncWriteExt};
-        // let mut output = File::create("output.json").await?;
-        // output.write(line.as_bytes()).await?;
-        // output.flush().await?;
-        // output.sync_all().await?;
-        // drop(output);
-
-        let output_json: HashMap<Url, InfoJson> =
-            serde_json::from_str(&line).expect("This should be valid json");
-
-        for (url, value) in output_json {
-            let sub = back_subs.get(&url).expect("This was stored before");
-            process_subscription(app, sub, value, &hashes)
-                .await
-                .with_context(|| format!("Failed to process subscription: '{}'", sub.name))?
-        }
-    }
-
-    let out = child.wait().await?;
-    if !out.success() {
-        error!(
-            "The update_raw.py invokation failed (exit code: {}).",
-            out.code()
-                .map(|f| f.to_string())
-                .unwrap_or("<No exit code>".to_owned())
-        )
-    }
-
-    Ok(())
-}
-
-pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Result<Video> {
-    macro_rules! unwrap_option {
-        ($option:expr) => {
-            match $option {
-                Some(x) => x,
-                None => anyhow::bail!(concat!(
-                    "Expected a value, but '",
-                    stringify!($option),
-                    "' is None!"
-                )),
-            }
-        };
-    }
-
-    let publish_date = if let Some(date) = &entry.upload_date {
-        let year: u32 = date
-            .chars()
-            .take(4)
-            .collect::<String>()
-            .parse()
-            .expect("Should work.");
-        let month: u32 = date
-            .chars()
-            .skip(4)
-            .take(2)
-            .collect::<String>()
-            .parse()
-            .expect("Should work");
-        let day: u32 = date
-            .chars()
-            .skip(6)
-            .take(2)
-            .collect::<String>()
-            .parse()
-            .expect("Should work");
-
-        let date_string = format!("{year:04}-{month:02}-{day:02}T00:00:00Z");
-        Some(
-            DateTime::<Utc>::from_str(&date_string)
-                .expect("This should always work")
-                .timestamp(),
-        )
-    } else {
-        warn!(
-            "The video '{}' lacks it's upload date!",
-            unwrap_option!(&entry.title)
-        );
-        None
-    };
-
-    let thumbnail_url = match (&entry.thumbnails, &entry.thumbnail) {
-        (None, None) => None,
-        (None, Some(thumbnail)) => Some(thumbnail.to_owned()),
-
-        // TODO: The algorithm is not exactly the best <2024-05-28>
-        (Some(thumbnails), None) => Some(
-            thumbnails
-                .first()
-                .expect("At least one should exist")
-                .url
-                .clone(),
-        ),
-        (Some(_), Some(thumnail)) => Some(thumnail.to_owned()),
-    };
-
-    let url = {
-        let smug_url: url::Url = unwrap_option!(entry.webpage_url.clone());
-        unsmuggle_url(smug_url)?
-    };
-
-    let extractor_hash = blake3::hash(unwrap_option!(entry.id).as_bytes());
-
-    let subscription_name = if let Some(sub) = sub {
-        Some(sub.name.clone())
-    } else {
-        if let Some(uploader) = entry.uploader {
-            if entry.webpage_url_domain == Some("youtube.com".to_owned()) {
-                Some(format!("{} - Videos", uploader))
-            } else {
-                Some(uploader.clone())
-            }
-        } else {
-            None
-        }
-    };
-
-    let video = Video {
-        cache_path: None,
-        description: entry.description.clone(),
-        duration: entry.duration,
-        extractor_hash: ExtractorHash::from_hash(extractor_hash),
-        last_status_change: Utc::now().timestamp(),
-        parent_subscription_name: subscription_name,
-        priority: 0,
-        publish_date,
-        status: VideoStatus::Pick,
-        status_change: false,
-        thumbnail_url,
-        title: unwrap_option!(entry.title.clone()),
-        url,
-    };
-    Ok(video)
-}
-
-async fn process_subscription(
-    app: &App,
-    sub: &Subscription,
-    entry: InfoJson,
-    hashes: &[blake3::Hash],
-) -> Result<()> {
-    let video =
-        video_entry_to_video(entry, Some(sub)).context("Failed to parse search entry as Video")?;
-
-    if hashes.contains(&video.extractor_hash.hash()) {
-        // We already stored the video information
-        unreachable!("The python update script should have never provided us a duplicated video");
-    } else {
-        add_video(app, video.clone())
-            .await
-            .with_context(|| format!("Failed to add video to database: '{}'", video.title))?;
-        println!(
-            "{}",
-            (&video
-                .to_formatted_video(app)
-                .await
-                .with_context(|| format!("Failed to format video: '{}'", video.title))?
-                .colorize())
-                .to_line_display()
-        );
-        Ok(())
-    }
-}