diff options
author | Benedikt Peetz <benedikt.peetz@b-peetz.de> | 2024-10-07 19:43:13 +0200 |
---|---|---|
committer | Benedikt Peetz <benedikt.peetz@b-peetz.de> | 2024-10-07 19:43:13 +0200 |
commit | 1f0445952332d96acadecb936d9eaa7169d52082 (patch) | |
tree | e725d99a19c4b92c6a6850e35c9f9894a6e503eb /src/update | |
parent | feat(cli): Also add a `dowa` command (diff) | |
download | yt-1f0445952332d96acadecb936d9eaa7169d52082.tar.gz yt-1f0445952332d96acadecb936d9eaa7169d52082.zip |
feat(cli): Add a `add` command
This command allows adding URLs directly. Otherwise, the process would be: `yt subs add <URL>` -> `yt update` -> `yt subs remove <URL>`
Diffstat (limited to '')
-rw-r--r-- | src/update/mod.rs | 66 |
1 files changed, 43 insertions, 23 deletions
diff --git a/src/update/mod.rs b/src/update/mod.rs index ce0a7e5..ce3a7f9 100644 --- a/src/update/mod.rs +++ b/src/update/mod.rs @@ -120,12 +120,7 @@ pub async fn update( Ok(()) } -async fn process_subscription( - app: &App, - sub: &Subscription, - entry: InfoJson, - hashes: &[blake3::Hash], -) -> Result<()> { +pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Result<Video> { macro_rules! unwrap_option { ($option:expr) => { match $option { @@ -197,26 +192,51 @@ async fn process_subscription( let extractor_hash = blake3::hash(unwrap_option!(entry.id).as_bytes()); - if hashes.contains(&extractor_hash) { + let subscription_name = if let Some(sub) = sub { + Some(sub.name.clone()) + } else { + if let Some(uploader) = entry.uploader { + if entry.webpage_url_domain == Some("youtube.com".to_owned()) { + Some(format!("{} - Videos", uploader)) + } else { + Some(uploader.clone()) + } + } else { + None + } + }; + + let video = Video { + cache_path: None, + description: entry.description.clone(), + duration: entry.duration, + extractor_hash: ExtractorHash::from_hash(extractor_hash), + last_status_change: Utc::now().timestamp(), + parent_subscription_name: subscription_name, + priority: 0, + publish_date, + status: VideoStatus::Pick, + status_change: false, + thumbnail_url, + title: unwrap_option!(entry.title.clone()), + url, + }; + Ok(video) +} + +async fn process_subscription( + app: &App, + sub: &Subscription, + entry: InfoJson, + hashes: &[blake3::Hash], +) -> Result<()> { + let video = + video_entry_to_video(entry, Some(sub)).context("Failed to parse search entry as Video")?; + + if hashes.contains(&video.extractor_hash.hash()) { // We already stored the video information unreachable!("The python update script should have never provided us a duplicated video"); } else { - let video = Video { - cache_path: None, - description: entry.description.clone(), - duration: entry.duration, - extractor_hash: ExtractorHash::from_hash(extractor_hash), - last_status_change: Utc::now().timestamp(), - parent_subscription_name: Some(sub.name.clone()), - priority: 0, - publish_date, - status: VideoStatus::Pick, - status_change: false, - thumbnail_url, - title: unwrap_option!(entry.title.clone()), - url, - }; - println!("{}", video.to_color_display(app).await?); add_video(app, video).await?; Ok(()) |