From 2c7980b773cad586af5db8ff0755f1d74d94f7d1 Mon Sep 17 00:00:00 2001 From: Benedikt Peetz Date: Mon, 14 Oct 2024 18:05:33 +0200 Subject: refactor(treewide): Conform to the clippy and rust lints --- Cargo.lock | 1 + Cargo.toml | 1 + crates/bytes/src/error.rs | 5 +- crates/bytes/src/lib.rs | 11 +- crates/libmpv2/libmpv2-sys/src/lib.rs | 9 +- crates/libmpv2/src/mpv.rs | 4 +- crates/libmpv2/src/mpv/protocol.rs | 2 +- crates/libmpv2/src/mpv/render.rs | 4 +- crates/yt_dlp/Cargo.toml | 1 + crates/yt_dlp/src/duration.rs | 7 ++ crates/yt_dlp/src/lib.rs | 67 ++++++---- crates/yt_dlp/src/logging.rs | 16 ++- crates/yt_dlp/src/main.rs | 96 -------------- crates/yt_dlp/src/tests.rs | 85 +++++++++++++ crates/yt_dlp/src/wrapper/info_json.rs | 25 ++-- yt/Cargo.toml | 2 +- yt/src/app.rs | 1 + yt/src/cache/mod.rs | 2 +- yt/src/cli.rs | 3 +- yt/src/comments/comment.rs | 3 +- yt/src/comments/display.rs | 14 +-- yt/src/comments/mod.rs | 46 ++++--- yt/src/config/default.rs | 42 +++---- yt/src/config/definitions.rs | 12 +- yt/src/config/file_system.rs | 7 +- yt/src/config/mod.rs | 19 ++- yt/src/download/download_options.rs | 10 +- yt/src/download/mod.rs | 28 +++-- yt/src/main.rs | 32 +++-- yt/src/select/cmds.rs | 24 ++-- yt/src/select/mod.rs | 27 ++-- yt/src/select/selection_file/duration.rs | 12 +- yt/src/select/selection_file/mod.rs | 5 +- yt/src/status/mod.rs | 6 +- yt/src/storage/subscriptions.rs | 21 ++-- yt/src/storage/video_database/downloader.rs | 10 +- yt/src/storage/video_database/extractor_hash.rs | 19 +-- yt/src/storage/video_database/getters.rs | 47 ++++--- yt/src/storage/video_database/mod.rs | 15 ++- yt/src/storage/video_database/setters.rs | 11 +- yt/src/subscribe/mod.rs | 41 +++--- yt/src/update/mod.rs | 72 ++++++----- yt/src/videos/display/format_video.rs | 1 + yt/src/videos/display/mod.rs | 8 +- yt/src/videos/mod.rs | 8 +- yt/src/watch/events/handlers/mod.rs | 161 ++++++++++++++++++++++++ yt/src/watch/events/mod.rs | 145 ++++++--------------- yt/src/watch/events/playlist_handler.rs | 21 ++-- yt/src/watch/mod.rs | 22 +++- 49 files changed, 727 insertions(+), 504 deletions(-) delete mode 100644 crates/yt_dlp/src/main.rs create mode 100644 crates/yt_dlp/src/tests.rs create mode 100644 yt/src/watch/events/handlers/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 180bf13..646be94 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2457,6 +2457,7 @@ dependencies = [ "pyo3", "serde", "serde_json", + "tokio", "url", ] diff --git a/Cargo.toml b/Cargo.toml index 1fa9668..888e79d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,6 +36,7 @@ log = "0.4.22" serde = { version = "1.0.210", features = ["derive"] } serde_json = "1.0.128" url = { version = "2.5.2", features = ["serde"] } +tokio = { version = "1.40.0", features = [ "rt-multi-thread", "macros", "process", "time", "io-std", ] } [profile.profiling] inherits = "release" diff --git a/crates/bytes/src/error.rs b/crates/bytes/src/error.rs index 7643109..c9783d8 100644 --- a/crates/bytes/src/error.rs +++ b/crates/bytes/src/error.rs @@ -11,6 +11,7 @@ use std::{fmt::Display, num::ParseIntError}; #[derive(Debug)] +#[allow(clippy::module_name_repetitions)] pub enum BytesError { BytesParseIntError(ParseIntError), NotYetSupported(String), @@ -20,10 +21,10 @@ impl Display for BytesError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { BytesError::BytesParseIntError(e) => { - f.write_fmt(format_args!("Failed to parse a number as integer: '{}'", e)) + f.write_fmt(format_args!("Failed to parse a number as integer: '{e}'")) }, BytesError::NotYetSupported(other) => { - f.write_fmt(format_args!("Your extension '{}' is not yet supported. Only KB,MB,GB or KiB,MiB,GiB are supported", other)) + f.write_fmt(format_args!("Your extension '{other}' is not yet supported. Only KB,MB,GB or KiB,MiB,GiB are supported")) } } } diff --git a/crates/bytes/src/lib.rs b/crates/bytes/src/lib.rs index 78d3c4e..6e3f73c 100644 --- a/crates/bytes/src/lib.rs +++ b/crates/bytes/src/lib.rs @@ -8,6 +8,12 @@ // You should have received a copy of the License along with this program. // If not, see . +#![allow( + clippy::cast_possible_truncation, + clippy::cast_precision_loss, + clippy::cast_sign_loss, + clippy::cast_possible_wrap +)] use std::{fmt::Display, str::FromStr}; use error::BytesError; @@ -28,13 +34,15 @@ const TB: u64 = 1000 * GB; pub mod error; pub mod serde; -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Debug)] pub struct Bytes(u64); impl Bytes { + #[must_use] pub fn as_u64(self) -> u64 { self.0 } + #[must_use] pub fn new(v: u64) -> Self { Self(v) } @@ -140,6 +148,7 @@ impl Display for Bytes { /// assert_eq!(precision_f64(1.2300_f32 as f64, 2) as f32, 1.2f32); ///# } /// ``` +#[must_use] pub fn precision_f64(x: f64, decimals: u32) -> f64 { if x == 0. || decimals == 0 { 0. diff --git a/crates/libmpv2/libmpv2-sys/src/lib.rs b/crates/libmpv2/libmpv2-sys/src/lib.rs index 36a8199..0d14dbb 100644 --- a/crates/libmpv2/libmpv2-sys/src/lib.rs +++ b/crates/libmpv2/libmpv2-sys/src/lib.rs @@ -1,7 +1,3 @@ -#![allow(non_upper_case_globals)] -#![allow(non_camel_case_types)] -#![allow(non_snake_case)] - // yt - A fully featured command line YouTube client // // Copyright (C) 2024 Benedikt Peetz @@ -11,6 +7,11 @@ // // You should have received a copy of the License along with this program. // If not, see . +// +#![allow(non_upper_case_globals)] +#![allow(non_camel_case_types)] +#![allow(non_snake_case)] +#![allow(clippy::doc_lazy_continuation)] include!(concat!(env!("OUT_DIR"), "/bindings.rs")); diff --git a/crates/libmpv2/src/mpv.rs b/crates/libmpv2/src/mpv.rs index 9d554a6..bf4581e 100644 --- a/crates/libmpv2/src/mpv.rs +++ b/crates/libmpv2/src/mpv.rs @@ -51,6 +51,7 @@ fn mpv_err(ret: T, err: ctype::c_int) -> Result { } /// This trait describes which types are allowed to be passed to getter mpv APIs. +#[allow(clippy::missing_safety_doc)] pub unsafe trait GetData: Sized { #[doc(hidden)] fn get_from_c_void Result>(mut fun: F) -> Result { @@ -62,6 +63,7 @@ pub unsafe trait GetData: Sized { } /// This trait describes which types are allowed to be passed to setter mpv APIs. +#[allow(clippy::missing_safety_doc)] pub unsafe trait SetData: Sized { #[doc(hidden)] fn call_as_c_void Result>( @@ -207,7 +209,7 @@ pub mod mpv_node { } } - pub fn child(self: Self, node: libmpv2_sys::mpv_node) -> Self { + pub fn child(self, node: libmpv2_sys::mpv_node) -> Self { Self { parent: self.parent, node, diff --git a/crates/libmpv2/src/mpv/protocol.rs b/crates/libmpv2/src/mpv/protocol.rs index 4ae4f16..31a5933 100644 --- a/crates/libmpv2/src/mpv/protocol.rs +++ b/crates/libmpv2/src/mpv/protocol.rs @@ -155,7 +155,7 @@ where { let data = Box::from_raw(cookie as *mut ProtocolData); - panic::catch_unwind(|| ((*data).close_fn)(Box::from_raw((*data).cookie))); + panic::catch_unwind(|| (data.close_fn)(Box::from_raw(data.cookie))); } struct ProtocolData { diff --git a/crates/libmpv2/src/mpv/render.rs b/crates/libmpv2/src/mpv/render.rs index 91db34e..c3f2dc9 100644 --- a/crates/libmpv2/src/mpv/render.rs +++ b/crates/libmpv2/src/mpv/render.rs @@ -213,8 +213,8 @@ impl RenderContext { params: impl IntoIterator>, ) -> Result { let params: Vec<_> = params.into_iter().collect(); - let mut raw_params: Vec = Vec::new(); - raw_params.reserve(params.len() + 1); + let mut raw_params: Vec = + Vec::with_capacity(params.len() + 1); let mut raw_ptrs: HashMap<*const c_void, DeleterFn> = HashMap::new(); for p in params { diff --git a/crates/yt_dlp/Cargo.toml b/crates/yt_dlp/Cargo.toml index 0f1d248..03196e9 100644 --- a/crates/yt_dlp/Cargo.toml +++ b/crates/yt_dlp/Cargo.toml @@ -30,6 +30,7 @@ serde_json.workspace = true url.workspace = true [dev-dependencies] +tokio.workspace = true [lints] workspace = true diff --git a/crates/yt_dlp/src/duration.rs b/crates/yt_dlp/src/duration.rs index cd7454b..f91892d 100644 --- a/crates/yt_dlp/src/duration.rs +++ b/crates/yt_dlp/src/duration.rs @@ -9,6 +9,8 @@ // If not, see . // TODO: This file should be de-duplicated with the same file in the 'yt' crate <2024-06-25> + +#[derive(Debug, Clone, Copy)] pub struct Duration { time: u32, } @@ -26,6 +28,11 @@ impl From<&str> for Duration { impl From> for Duration { fn from(value: Option) -> Self { Self { + #[allow( + clippy::cast_possible_truncation, + clippy::cast_precision_loss, + clippy::cast_sign_loss + )] time: value.unwrap_or(0.0).ceil() as u32, } } diff --git a/crates/yt_dlp/src/lib.rs b/crates/yt_dlp/src/lib.rs index f958895..4e35cb0 100644 --- a/crates/yt_dlp/src/lib.rs +++ b/crates/yt_dlp/src/lib.rs @@ -8,6 +8,10 @@ // You should have received a copy of the License along with this program. // If not, see . +// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint. +#![allow(unsafe_op_in_unsafe_fn)] +#![allow(clippy::missing_errors_doc)] + use std::env; use std::{fs::File, io::Write}; @@ -31,14 +35,20 @@ pub mod duration; pub mod logging; pub mod wrapper; +#[cfg(test)] +mod tests; + /// Synchronisation helper, to ensure that we don't setup the logger multiple times static SYNC_OBJ: Once = Once::new(); /// Add a logger to the yt-dlp options. /// If you have an logger set (i.e. for rust), than this will log to rust +/// +/// # Panics +/// This should never panic. pub fn add_logger_and_sig_handler<'a>( opts: Bound<'a, PyDict>, - py: Python, + py: Python<'_>, ) -> PyResult> { setup_logging(py, "yt_dlp")?; @@ -52,10 +62,9 @@ pub fn add_logger_and_sig_handler<'a>( // This allows the user to actually stop the application with Ctrl+C. // This is here because it can only be run in the main thread and this was here already. py.run_bound( - r#" + "\ import signal -signal.signal(signal.SIGINT, signal.SIG_DFL) - "#, +signal.signal(signal.SIGINT, signal.SIG_DFL)", None, None, ) @@ -82,14 +91,22 @@ signal.signal(signal.SIGINT, signal.SIG_DFL) } #[pyfunction] -pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> { +#[allow(clippy::too_many_lines)] +#[allow(clippy::missing_panics_doc)] +#[allow(clippy::items_after_statements)] +#[allow( + clippy::cast_possible_truncation, + clippy::cast_sign_loss, + clippy::cast_precision_loss +)] +pub fn progress_hook(py: Python<'_>, input: &Bound<'_, PyDict>) -> PyResult<()> { // Only add the handler, if the log-level is higher than Debug (this avoids covering debug // messages). if log_enabled!(Level::Debug) { return Ok(()); } - let input: serde_json::Map = serde_json::from_str(&json_dumps( + let input: Map = serde_json::from_str(&json_dumps( py, input .downcast::() @@ -164,8 +181,9 @@ pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> { } fn format_speed(speed: f64) -> String { + #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] let bytes = Bytes::new(speed.floor() as u64); - format!("{}/s", bytes) + format!("{bytes}/s") } let get_title = |add_extension: bool| -> String { @@ -187,7 +205,7 @@ pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> { default_get! { as_str, "", "info_dict", "title"}.to_owned() } } - other => panic!("The extension '{}' is not yet implemented", other), + other => panic!("The extension '{other}' is not yet implemented"), } }; @@ -242,18 +260,18 @@ pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> { ); } "finished" => { - println!("Finished downloading: '{}'", c!("34;1", get_title(false))) + println!("Finished downloading: '{}'", c!("34;1", get_title(false))); } "error" => { panic!("Error whilst downloading: {}", get_title(true)) } - other => panic!("{} is not a valid state!", other), + other => panic!("{other} is not a valid state!"), }; Ok(()) } -pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python) -> PyResult> { +pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python<'_>) -> PyResult> { if let Some(hooks) = opts.get_item("progress_hooks")? { let hooks = hooks.downcast::()?; hooks.append(wrap_pyfunction_bound!(progress_hook, py)?)?; @@ -280,10 +298,12 @@ pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python) -> PyResult, url: &Url, @@ -311,8 +331,8 @@ pub async fn extract_info( if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") { if confirm == "yes" { - let mut file = File::create("output.info.json").unwrap(); - write!(file, "{}", result_str).unwrap(); + let mut file = File::create("output.info.json")?; + write!(file, "{result_str}").unwrap(); } } @@ -321,7 +341,9 @@ pub async fn extract_info( }) } -pub fn unsmuggle_url(smug_url: Url) -> PyResult { +/// # Panics +/// Only if python fails to return a valid URL. +pub fn unsmuggle_url(smug_url: &Url) -> PyResult { Python::with_gil(|py| { let utils = get_yt_dlp_utils(py)?; let url = utils @@ -341,6 +363,9 @@ pub fn unsmuggle_url(smug_url: Url) -> PyResult { /// Download a given list of URLs. /// Returns the paths they were downloaded to. +/// +/// # Panics +/// Only if `yt_dlp` changes their `info_json` schema. pub async fn download( urls: &[Url], download_options: &Map, @@ -357,7 +382,7 @@ pub async fn download( } else { info_json.requested_downloads.expect("This must exist")[0] .filename - .to_owned() + .clone() }; out_paths.push(result_string); @@ -378,7 +403,7 @@ fn json_map_to_py_dict<'a>( Ok(python_dict) } -fn json_dumps(py: Python, input: Bound) -> PyResult { +fn json_dumps(py: Python<'_>, input: Bound<'_, PyAny>) -> PyResult { // json.dumps(yt_dlp.sanitize_info(input)) let yt_dlp = get_yt_dlp(py, PyDict::new_bound(py))?; @@ -394,13 +419,13 @@ fn json_dumps(py: Python, input: Bound) -> PyResult { Ok(output_str) } -fn json_loads_str(py: Python, input: T) -> PyResult> { +fn json_loads_str(py: Python<'_>, input: T) -> PyResult> { let string = serde_json::to_string(&input).expect("Correct json must be pased"); json_loads(py, string) } -fn json_loads(py: Python, input: String) -> PyResult> { +fn json_loads(py: Python<'_>, input: String) -> PyResult> { // json.loads(input) let json = PyModule::import_bound(py, "json")?; diff --git a/crates/yt_dlp/src/logging.rs b/crates/yt_dlp/src/logging.rs index 4039da4..385255d 100644 --- a/crates/yt_dlp/src/logging.rs +++ b/crates/yt_dlp/src/logging.rs @@ -12,6 +12,9 @@ // It is licensed under the Apache 2.0 License, copyright up to 2024 by Dylan Storey // It was modified by Benedikt Peetz 2024 +// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint. +#![allow(unsafe_op_in_unsafe_fn)] + use log::{logger, Level, MetadataBuilder, Record}; use pyo3::{ prelude::{PyAnyMethods, PyListMethods, PyModuleMethods}, @@ -19,6 +22,7 @@ use pyo3::{ }; /// Consume a Python `logging.LogRecord` and emit a Rust `Log` instead. +#[allow(clippy::needless_pass_by_value)] #[pyfunction] fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> { let level = record.getattr("levelno")?; @@ -37,7 +41,7 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> { } else { // Libraries (ex: tracing_subscriber::filter::Directive) expect rust-style targets like foo::bar, // and may not deal well with "." as a module separator: - let logger_name = logger_name.replace(".", "::"); + let logger_name = logger_name.replace('.', "::"); Some(format!("{rust_target}::{logger_name}")) }; @@ -84,10 +88,11 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> { Ok(()) } -/// Registers the host_log function in rust as the event handler for Python's logging logger +/// Registers the `host_log` function in rust as the event handler for Python's logging logger /// This function needs to be called from within a pyo3 context as early as possible to ensure logging messages /// arrive to the rust consumer. -pub fn setup_logging(py: Python, target: &str) -> PyResult<()> { +#[allow(clippy::module_name_repetitions)] +pub fn setup_logging(py: Python<'_>, target: &str) -> PyResult<()> { let logging = py.import_bound("logging")?; logging.setattr("host_log", wrap_pyfunction!(host_log, &logging)?)?; @@ -100,15 +105,14 @@ class HostHandler(Handler): super().__init__(level=level) def emit(self, record): - host_log(record,"{}") + host_log(record,"{target}") oldBasicConfig = basicConfig def basicConfig(*pargs, **kwargs): if "handlers" not in kwargs: kwargs["handlers"] = [HostHandler()] return oldBasicConfig(*pargs, **kwargs) -"#, - target +"# ) .as_str(), Some(&logging.dict()), diff --git a/crates/yt_dlp/src/main.rs b/crates/yt_dlp/src/main.rs deleted file mode 100644 index c40ddc3..0000000 --- a/crates/yt_dlp/src/main.rs +++ /dev/null @@ -1,96 +0,0 @@ -// yt - A fully featured command line YouTube client -// -// Copyright (C) 2024 Benedikt Peetz -// SPDX-License-Identifier: GPL-3.0-or-later -// -// This file is part of Yt. -// -// You should have received a copy of the License along with this program. -// If not, see . - -use std::{env::args, fs}; - -use yt_dlp::wrapper::info_json::InfoJson; - -#[cfg(test)] -mod test { - use url::Url; - use yt_dlp::wrapper::yt_dlp_options::{ExtractFlat, YtDlpOptions}; - - const YT_OPTS: YtDlpOptions = YtDlpOptions { - playliststart: 1, - playlistend: 10, - noplaylist: false, - extract_flat: ExtractFlat::InPlaylist, - }; - - #[test] - fn test_extract_info_video() { - let info = yt_dlp::extract_info( - YT_OPTS, - &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."), - false, - false, - false, - ) - .map_err(|err| format!("Encountered error: '{}'", err)) - .unwrap(); - - println!("{:#?}", info); - } - - #[test] - fn test_extract_info_url() { - let err = yt_dlp::extract_info( - YT_OPTS, - &Url::parse("https://google.com").expect("Is valid."), - false, - false, - false, - ) - .map_err(|err| format!("Encountered error: '{}'", err)) - .unwrap(); - - println!("{:#?}", err); - } - - #[test] - fn test_extract_info_playlist() { - let err = yt_dlp::extract_info( - YT_OPTS, - &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."), - false, - false, - true, - ) - .map_err(|err| format!("Encountered error: '{}'", err)) - .unwrap(); - - println!("{:#?}", err); - } - #[test] - fn test_extract_info_playlist_full() { - let err = yt_dlp::extract_info( - YT_OPTS, - &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."), - false, - false, - true, - ) - .map_err(|err| format!("Encountered error: '{}'", err)) - .unwrap(); - - println!("{:#?}", err); - } -} - -fn main() { - let input_file: &str = &args().take(2).collect::>()[1]; - - let input = fs::read_to_string(input_file).unwrap(); - - let output: InfoJson = - serde_json::from_str(&input).expect("Python should be able to produce correct json"); - - println!("{:#?}", output); -} diff --git a/crates/yt_dlp/src/tests.rs b/crates/yt_dlp/src/tests.rs new file mode 100644 index 0000000..08e392f --- /dev/null +++ b/crates/yt_dlp/src/tests.rs @@ -0,0 +1,85 @@ +// yt - A fully featured command line YouTube client +// +// Copyright (C) 2024 Benedikt Peetz +// SPDX-License-Identifier: GPL-3.0-or-later +// +// This file is part of Yt. +// +// You should have received a copy of the License along with this program. +// If not, see . + +use std::sync::LazyLock; + +use serde_json::{json, Value}; +use url::Url; + +static YT_OPTS: LazyLock> = LazyLock::new(|| { + match json!({ + "playliststart": 1, + "playlistend": 10, + "noplaylist": false, + "extract_flat": false, + }) { + Value::Object(obj) => obj, + _ => unreachable!("This json is hardcoded"), + } +}); + +#[tokio::test] +async fn test_extract_info_video() { + let info = crate::extract_info( + &YT_OPTS, + &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."), + false, + false, + ) + .await + .map_err(|err| format!("Encountered error: '{}'", err)) + .unwrap(); + + println!("{:#?}", info); +} + +#[tokio::test] +async fn test_extract_info_url() { + let err = crate::extract_info( + &YT_OPTS, + &Url::parse("https://google.com").expect("Is valid."), + false, + false, + ) + .await + .map_err(|err| format!("Encountered error: '{}'", err)) + .unwrap(); + + println!("{:#?}", err); +} + +#[tokio::test] +async fn test_extract_info_playlist() { + let err = crate::extract_info( + &YT_OPTS, + &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."), + false, + true, + ) + .await + .map_err(|err| format!("Encountered error: '{}'", err)) + .unwrap(); + + println!("{:#?}", err); +} +#[tokio::test] +async fn test_extract_info_playlist_full() { + let err = crate::extract_info( + &YT_OPTS, + &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."), + false, + true, + ) + .await + .map_err(|err| format!("Encountered error: '{}'", err)) + .unwrap(); + + println!("{:#?}", err); +} diff --git a/crates/yt_dlp/src/wrapper/info_json.rs b/crates/yt_dlp/src/wrapper/info_json.rs index 50a026d..f113fe5 100644 --- a/crates/yt_dlp/src/wrapper/info_json.rs +++ b/crates/yt_dlp/src/wrapper/info_json.rs @@ -8,6 +8,9 @@ // You should have received a copy of the License along with this program. // If not, see . +// `yt_dlp` named them like this. +#![allow(clippy::pub_underscore_fields)] + use std::{collections::HashMap, path::PathBuf}; use pyo3::{types::PyDict, Bound, PyResult, Python}; @@ -146,6 +149,7 @@ pub struct InfoJson { #[derive(Debug, Deserialize, Serialize, PartialEq)] #[serde(deny_unknown_fields)] +#[allow(missing_copy_implementations)] pub struct FilesToMove {} #[derive(Debug, Deserialize, Serialize, PartialEq)] @@ -197,8 +201,7 @@ pub struct Subtitle { pub url: Url, } -#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)] -#[serde(deny_unknown_fields)] +#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)] pub enum SubtitleExt { #[serde(alias = "vtt")] Vtt, @@ -266,7 +269,7 @@ where Ok(None) } -#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)] #[serde(deny_unknown_fields)] pub enum SponsorblockChapterType { #[serde(alias = "skip")] @@ -278,7 +281,7 @@ pub enum SponsorblockChapterType { #[serde(alias = "poi")] Poi, } -#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)] #[serde(deny_unknown_fields)] pub enum SponsorblockChapterCategory { #[serde(alias = "filler")] @@ -314,13 +317,14 @@ pub enum SponsorblockChapterCategory { #[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)] #[serde(deny_unknown_fields)] +#[allow(missing_copy_implementations)] pub struct HeatMapEntry { pub start_time: f64, pub end_time: f64, pub value: f64, } -#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)] #[serde(deny_unknown_fields)] pub enum Extractor { #[serde(alias = "generic")] @@ -337,7 +341,7 @@ pub enum Extractor { YouTubeTab, } -#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)] #[serde(deny_unknown_fields)] pub enum ExtractorKey { #[serde(alias = "Generic")] @@ -354,7 +358,7 @@ pub enum ExtractorKey { YouTubeTab, } -#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)] +#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)] #[serde(deny_unknown_fields)] pub enum InfoType { #[serde(alias = "playlist")] @@ -385,6 +389,7 @@ pub enum Parent { } impl Parent { + #[must_use] pub fn id(&self) -> Option<&str> { if let Self::Id(id) = self { Some(id) @@ -421,6 +426,7 @@ impl From for Id { #[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)] #[serde(deny_unknown_fields)] +#[allow(clippy::struct_excessive_bools)] pub struct Comment { pub id: Id, pub text: String, @@ -522,6 +528,7 @@ pub struct Format { #[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)] #[serde(deny_unknown_fields)] +#[allow(missing_copy_implementations)] pub struct DownloaderOptions { http_chunk_size: u64, } @@ -554,8 +561,8 @@ pub struct Fragment { } impl InfoJson { - pub fn to_py_dict(self, py: Python) -> PyResult> { - let output: Bound = json_loads_str(py, self)?; + pub fn to_py_dict(self, py: Python<'_>) -> PyResult> { + let output: Bound<'_, PyDict> = json_loads_str(py, self)?; Ok(output) } } diff --git a/yt/Cargo.toml b/yt/Cargo.toml index 4cc712c..e35e4be 100644 --- a/yt/Cargo.toml +++ b/yt/Cargo.toml @@ -35,7 +35,6 @@ regex = "1.11.0" sqlx = { version = "0.8.2", features = ["runtime-tokio", "sqlite"] } stderrlog = "0.6.0" tempfile = "3.13.0" -tokio = { version = "1.40.0", features = [ "rt-multi-thread", "macros", "process", "time", "io-std", ] } toml = "0.8.19" trinitry = { version = "0.2.2" } xdg = "2.5.2" @@ -44,6 +43,7 @@ libmpv2.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true +tokio.workspace = true url.workspace = true yt_dlp.workspace = true diff --git a/yt/src/app.rs b/yt/src/app.rs index b7d136e..1f82214 100644 --- a/yt/src/app.rs +++ b/yt/src/app.rs @@ -13,6 +13,7 @@ use sqlx::{query, sqlite::SqliteConnectOptions, SqlitePool}; use crate::config::Config; +#[derive(Debug)] pub struct App { pub database: SqlitePool, pub config: Config, diff --git a/yt/src/cache/mod.rs b/yt/src/cache/mod.rs index a3e08c9..6ceef25 100644 --- a/yt/src/cache/mod.rs +++ b/yt/src/cache/mod.rs @@ -47,7 +47,7 @@ pub async fn invalidate(app: &App, hard: bool) -> Result<()> { info!("Got videos to invalidate: '{}'", all_cached_things.len()); for video in all_cached_things { - invalidate_video(app, &video, hard).await? + invalidate_video(app, &video, hard).await?; } Ok(()) diff --git a/yt/src/cli.rs b/yt/src/cli.rs index d19586e..2208caa 100644 --- a/yt/src/cli.rs +++ b/yt/src/cli.rs @@ -23,6 +23,7 @@ use crate::{ #[derive(Parser, Debug)] #[clap(author, version, about, long_about = None)] +#[allow(clippy::module_name_repetitions)] /// An command line interface to select, download and watch videos pub struct CliArgs { #[command(subcommand)] @@ -127,7 +128,7 @@ pub enum Command { fn byte_parser(input: &str) -> Result { Ok(input .parse::() - .with_context(|| format!("Failed to parse '{}' as bytes!", input))? + .with_context(|| format!("Failed to parse '{input}' as bytes!"))? .as_u64()) } diff --git a/yt/src/comments/comment.rs b/yt/src/comments/comment.rs index 752c510..c998cdb 100644 --- a/yt/src/comments/comment.rs +++ b/yt/src/comments/comment.rs @@ -11,6 +11,7 @@ use yt_dlp::wrapper::info_json::Comment; #[derive(Debug, Clone)] +#[allow(clippy::module_name_repetitions)] pub struct CommentExt { pub value: Comment, pub replies: Vec, @@ -43,7 +44,7 @@ impl Comments { } impl CommentExt { pub fn push_reply(&mut self, value: CommentExt) { - self.replies.push(value) + self.replies.push(value); } pub fn get_mut_reply(&mut self, key: &str) -> Option<&mut CommentExt> { self.replies diff --git a/yt/src/comments/display.rs b/yt/src/comments/display.rs index 7000063..4d678bc 100644 --- a/yt/src/comments/display.rs +++ b/yt/src/comments/display.rs @@ -23,8 +23,6 @@ impl Comments { } fn render_help(&self, color: bool) -> Result { - let mut f = String::new(); - macro_rules! c { ($color_str:expr, $write:ident, $color:expr) => { if $color { @@ -87,29 +85,31 @@ impl Comments { f.write_str(":\n")?; f.write_str(ident)?; - f.write_str(&value.text.replace('\n', &format!("\n{}", ident)))?; + f.write_str(&value.text.replace('\n', &format!("\n{ident}")))?; f.write_str("\n")?; - if !comment.replies.is_empty() { + if comment.replies.is_empty() { + f.write_str("\n")?; + } else { let mut children = comment.replies.clone(); children.sort_by(|a, b| a.value.timestamp.cmp(&b.value.timestamp)); for child in children { format(&child, f, ident_count + 4, color)?; } - } else { - f.write_str("\n")?; } Ok(()) } + let mut f = String::new(); + if !&self.vec.is_empty() { let mut children = self.vec.clone(); children.sort_by(|a, b| b.value.like_count.cmp(&a.value.like_count)); for child in children { - format(&child, &mut f, 0, color)? + format(&child, &mut f, 0, color)?; } } Ok(f) diff --git a/yt/src/comments/mod.rs b/yt/src/comments/mod.rs index 5fbc3fb..fd9f9da 100644 --- a/yt/src/comments/mod.rs +++ b/yt/src/comments/mod.rs @@ -25,12 +25,14 @@ use crate::{ getters::{get_currently_playing_video, get_video_info_json}, Video, }, + unreachable::Unreachable, }; mod comment; mod display; -pub async fn get_comments(app: &App) -> Result { +#[allow(clippy::too_many_lines)] +pub async fn get(app: &App) -> Result { let currently_playing_video: Video = if let Some(video) = get_currently_playing_video(app).await? { video @@ -40,28 +42,38 @@ pub async fn get_comments(app: &App) -> Result { let mut info_json: InfoJson = get_video_info_json(¤tly_playing_video) .await? - .expect("A currently *playing* must be cached. And thus the info.json should be available"); - - let base_comments = mem::take(&mut info_json.comments).expect("A video should have comments"); + .unreachable( + "A currently *playing* must be cached. And thus the info.json should be available", + ); + + let base_comments = mem::take(&mut info_json.comments).with_context(|| { + format!( + "The video ('{}') does not have comments!", + info_json + .title + .as_ref() + .unwrap_or(&("".to_owned())) + ) + })?; drop(info_json); let mut comments = Comments::new(); - base_comments.into_iter().for_each(|c| { + for c in base_comments { if let Parent::Id(id) = &c.parent { comments.insert(&(id.clone()), CommentExt::from(c)); } else { comments.push(CommentExt::from(c)); } - }); + } comments.vec.iter_mut().for_each(|comment| { let replies = mem::take(&mut comment.replies); let mut output_replies: Vec = vec![]; - let re = Regex::new(r"\u{200b}?(@[^\t\s]+)\u{200b}?").unwrap(); + let re = Regex::new(r"\u{200b}?(@[^\t\s]+)\u{200b}?").unreachable("This is hardcoded"); for reply in replies { if let Some(replyee_match) = re.captures(&reply.value.text){ - let full_match = replyee_match.get(0).expect("This always exists"); + let full_match = replyee_match.get(0).unreachable("This will always exist"); let text = reply. value. text[0..full_match.start()] @@ -72,7 +84,7 @@ pub async fn get_comments(app: &App) -> Result { .text[full_match.end()..]; let text: &str = text.trim().trim_matches('\u{200b}'); - let replyee = replyee_match.get(1).expect("This should exist").as_str(); + let replyee = replyee_match.get(1).unreachable("This should also exist").as_str(); if let Some(parent) = output_replies @@ -87,7 +99,7 @@ pub async fn get_comments(app: &App) -> Result { parent.replies.push(CommentExt::from(Comment { text: text.to_owned(), ..reply.value - })) + })); } else if let Some(parent) = output_replies .iter_mut() // .rev() @@ -99,7 +111,7 @@ pub async fn get_comments(app: &App) -> Result { parent.replies.push(CommentExt::from(Comment { text: text.to_owned(), ..reply.value - })) + })); } else if let Some(parent) = output_replies .iter_mut() // .rev() @@ -110,7 +122,7 @@ pub async fn get_comments(app: &App) -> Result { parent.replies.push(CommentExt::from(Comment { text: text.to_owned(), ..reply.value - })) + })); } else if let Some(parent) = output_replies.iter_mut() // .rev() .filter(|com| com.value.author == replyee) @@ -119,7 +131,7 @@ pub async fn get_comments(app: &App) -> Result { parent.replies.push(CommentExt::from(Comment { text: text.to_owned(), ..reply.value - })) + })); } else { eprintln!( "Failed to find a parent for ('{}') both directly and via replies! The reply text was:\n'{}'\n", @@ -139,7 +151,7 @@ pub async fn get_comments(app: &App) -> Result { } pub async fn comments(app: &App) -> Result<()> { - let comments = get_comments(app).await?; + let comments = get(app).await?; let mut less = Command::new("less") .args(["--raw-control-chars"]) @@ -152,7 +164,7 @@ pub async fn comments(app: &App) -> Result<()> { .args(["--uniform-spacing", "--split-only", "--width=90"]) .stdin(Stdio::piped()) .stderr(Stdio::inherit()) - .stdout(less.stdin.take().expect("Should be open")) + .stdout(less.stdin.take().unreachable("Should be open")) .spawn() .context("Failed to run fmt")?; @@ -160,7 +172,7 @@ pub async fn comments(app: &App) -> Result<()> { std::thread::spawn(move || { stdin .write_all(comments.render(true).as_bytes()) - .expect("Should be able to write to stdin of fmt"); + .unreachable("Should be able to write to the stdin of less"); }); let _ = less.wait().context("Failed to await less")?; @@ -173,6 +185,6 @@ mod test { #[test] fn test_string_replacement() { let s = "A \n\nB\n\nC".to_owned(); - assert_eq!("A \n \n B\n \n C", s.replace('\n', "\n ")) + assert_eq!("A \n \n B\n \n C", s.replace('\n', "\n ")); } } diff --git a/yt/src/config/default.rs b/yt/src/config/default.rs index 59063f5..3b7a3f5 100644 --- a/yt/src/config/default.rs +++ b/yt/src/config/default.rs @@ -16,56 +16,56 @@ fn get_runtime_path(name: &'static str) -> Result { let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?; xdg_dirs .place_runtime_file(name) - .with_context(|| format!("Failed to place runtime file: '{}'", name)) + .with_context(|| format!("Failed to place runtime file: '{name}'")) } fn get_data_path(name: &'static str) -> Result { let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?; xdg_dirs .place_data_file(name) - .with_context(|| format!("Failed to place data file: '{}'", name)) + .with_context(|| format!("Failed to place data file: '{name}'")) } fn get_config_path(name: &'static str) -> Result { let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?; xdg_dirs .place_config_file(name) - .with_context(|| format!("Failed to place config file: '{}'", name)) + .with_context(|| format!("Failed to place config file: '{name}'")) } pub(super) fn create_path(path: PathBuf) -> Result { if !path.exists() { if let Some(parent) = path.parent() { std::fs::create_dir_all(parent) - .with_context(|| format!("Failed to create the '{}' directory", path.display()))? + .with_context(|| format!("Failed to create the '{}' directory", path.display()))?; } } Ok(path) } -pub const PREFIX: &str = "yt"; +pub(crate) const PREFIX: &str = "yt"; -pub mod select { - pub fn playback_speed() -> f64 { +pub(crate) mod select { + pub(crate) fn playback_speed() -> f64 { 2.7 } - pub fn subtitle_langs() -> &'static str { + pub(crate) fn subtitle_langs() -> &'static str { "" } } -pub mod watch { - pub fn local_comments_length() -> usize { +pub(crate) mod watch { + pub(crate) fn local_comments_length() -> usize { 1000 } } -pub mod update { - pub fn max_backlog() -> u32 { +pub(crate) mod update { + pub(crate) fn max_backlog() -> u32 { 20 } } -pub mod paths { +pub(crate) mod paths { use std::{env::temp_dir, path::PathBuf}; use anyhow::Result; @@ -73,30 +73,30 @@ pub mod paths { use super::{create_path, get_config_path, get_data_path, get_runtime_path, PREFIX}; // We download to the temp dir to avoid taxing the disk - pub fn download_dir() -> Result { + pub(crate) fn download_dir() -> Result { let temp_dir = temp_dir(); create_path(temp_dir.join(PREFIX)) } - pub fn mpv_config_path() -> Result { + pub(crate) fn mpv_config_path() -> Result { get_config_path("mpv.conf") } - pub fn mpv_input_path() -> Result { + pub(crate) fn mpv_input_path() -> Result { get_config_path("mpv.input.conf") } - pub fn database_path() -> Result { + pub(crate) fn database_path() -> Result { get_data_path("videos.sqlite") } - pub fn config_path() -> Result { + pub(crate) fn config_path() -> Result { get_config_path("config.toml") } - pub fn last_selection_path() -> Result { + pub(crate) fn last_selection_path() -> Result { get_runtime_path("selected.yts") } } -pub mod download { - pub fn max_cache_size() -> &'static str { +pub(crate) mod download { + pub(crate) fn max_cache_size() -> &'static str { "3 GiB" } } diff --git a/yt/src/config/definitions.rs b/yt/src/config/definitions.rs index d37e6da..7d5feee 100644 --- a/yt/src/config/definitions.rs +++ b/yt/src/config/definitions.rs @@ -14,7 +14,7 @@ use serde::Deserialize; #[derive(Debug, Deserialize, PartialEq)] #[serde(deny_unknown_fields)] -pub struct ConfigFile { +pub(crate) struct ConfigFile { pub select: Option, pub watch: Option, pub paths: Option, @@ -24,33 +24,33 @@ pub struct ConfigFile { #[derive(Debug, Deserialize, PartialEq, Clone, Copy)] #[serde(deny_unknown_fields)] -pub struct UpdateConfig { +pub(crate) struct UpdateConfig { pub max_backlog: Option, } #[derive(Debug, Deserialize, PartialEq, Clone)] #[serde(deny_unknown_fields)] -pub struct DownloadConfig { +pub(crate) struct DownloadConfig { /// This will then be converted to an u64 pub max_cache_size: Option, } #[derive(Debug, Deserialize, PartialEq, Clone)] #[serde(deny_unknown_fields)] -pub struct SelectConfig { +pub(crate) struct SelectConfig { pub playback_speed: Option, pub subtitle_langs: Option, } #[derive(Debug, Deserialize, PartialEq, Clone, Copy)] #[serde(deny_unknown_fields)] -pub struct WatchConfig { +pub(crate) struct WatchConfig { pub local_comments_length: Option, } #[derive(Debug, Deserialize, PartialEq, Clone)] #[serde(deny_unknown_fields)] -pub struct PathsConfig { +pub(crate) struct PathsConfig { pub download_dir: Option, pub mpv_config_path: Option, pub mpv_input_path: Option, diff --git a/yt/src/config/file_system.rs b/yt/src/config/file_system.rs index 5751583..11bcd12 100644 --- a/yt/src/config/file_system.rs +++ b/yt/src/config/file_system.rs @@ -71,12 +71,11 @@ impl Config { db_path: Option, config_path: Option, ) -> Result { - let config_file_path = config_path - .map(Ok) - .unwrap_or_else(|| -> Result<_> { paths::config_path() })?; + let config_file_path = + config_path.map_or_else(|| -> Result<_> { paths::config_path() }, Ok)?; let config: super::definitions::ConfigFile = - toml::from_str(&read_to_string(config_file_path).unwrap_or("".to_owned())) + toml::from_str(&read_to_string(config_file_path).unwrap_or(String::new())) .context("Failed to parse the config file as toml")?; Ok(Self { diff --git a/yt/src/config/mod.rs b/yt/src/config/mod.rs index ea40055..1aaf065 100644 --- a/yt/src/config/mod.rs +++ b/yt/src/config/mod.rs @@ -8,6 +8,8 @@ // You should have received a copy of the License along with this program. // If not, see . +#![allow(clippy::module_name_repetitions)] + use std::path::PathBuf; use bytes::Bytes; @@ -17,7 +19,7 @@ mod default; mod definitions; pub mod file_system; -#[derive(Serialize)] +#[derive(Serialize, Debug)] pub struct Config { pub select: SelectConfig, pub watch: WatchConfig, @@ -25,24 +27,29 @@ pub struct Config { pub download: DownloadConfig, pub update: UpdateConfig, } -#[derive(Serialize)] +#[derive(Serialize, Debug)] +// This structure could get non-copy fields in the future. +// The same thing applies to all the other structures here. +#[allow(missing_copy_implementations)] pub struct UpdateConfig { pub max_backlog: u32, } -#[derive(Serialize)] +#[derive(Serialize, Debug)] +#[allow(missing_copy_implementations)] pub struct DownloadConfig { pub max_cache_size: Bytes, } -#[derive(Serialize)] +#[derive(Serialize, Debug)] pub struct SelectConfig { pub playback_speed: f64, pub subtitle_langs: String, } -#[derive(Serialize)] +#[derive(Serialize, Debug)] +#[allow(missing_copy_implementations)] pub struct WatchConfig { pub local_comments_length: usize, } -#[derive(Serialize)] +#[derive(Serialize, Debug)] pub struct PathsConfig { pub download_dir: PathBuf, pub mpv_config_path: PathBuf, diff --git a/yt/src/download/download_options.rs b/yt/src/download/download_options.rs index e93170a..1dc0bf2 100644 --- a/yt/src/download/download_options.rs +++ b/yt/src/download/download_options.rs @@ -22,10 +22,8 @@ use crate::{app::App, storage::video_database::YtDlpOptions}; // "logger": _ytdl_logger // } -pub fn download_opts( - app: &App, - additional_opts: YtDlpOptions, -) -> serde_json::Map { +#[must_use] +pub fn download_opts(app: &App, additional_opts: &YtDlpOptions) -> serde_json::Map { match json!({ "extract_flat": false, "extractor_args": { @@ -103,10 +101,10 @@ pub fn download_opts( } ] }) { - serde_json::Value::Object(mut obj) => { + Value::Object(mut obj) => { obj.insert( "subtitleslangs".to_owned(), - serde_json::Value::Array( + Value::Array( additional_opts .subtitle_langs .split(',') diff --git a/yt/src/download/mod.rs b/yt/src/download/mod.rs index 56910f9..a056f80 100644 --- a/yt/src/download/mod.rs +++ b/yt/src/download/mod.rs @@ -19,6 +19,7 @@ use crate::{ getters::get_video_yt_dlp_opts, Video, YtDlpOptions, }, + unreachable::Unreachable, }; use anyhow::{bail, Context, Result}; @@ -27,9 +28,11 @@ use futures::{future::BoxFuture, FutureExt}; use log::{debug, error, info, warn}; use tokio::{fs, task::JoinHandle, time}; +#[allow(clippy::module_name_repetitions)] pub mod download_options; #[derive(Debug)] +#[allow(clippy::module_name_repetitions)] pub struct CurrentDownload { task_handle: JoinHandle>, extractor_hash: ExtractorHash, @@ -37,7 +40,7 @@ pub struct CurrentDownload { impl CurrentDownload { fn new_from_video(app: Arc, video: Video) -> Self { - let extractor_hash = video.extractor_hash.clone(); + let extractor_hash = video.extractor_hash; let task_handle = tokio::spawn(async move { Downloader::actually_cache_video(&app, &video) @@ -64,6 +67,7 @@ enum CacheSizeCheck { ExceedsMaxCacheSize, } +#[derive(Debug)] pub struct Downloader { current_download: Option, video_size_cache: HashMap, @@ -78,6 +82,7 @@ impl Default for Downloader { } impl Downloader { + #[must_use] pub fn new() -> Self { Self { current_download: None, @@ -167,14 +172,17 @@ impl Downloader { }; if self.current_download.is_some() { - let current_download = self.current_download.take().expect("Is Some"); + let current_download = self.current_download.take().unreachable("It is `Some`."); if current_download.task_handle.is_finished() { current_download.task_handle.await??; continue; } - if next_video.extractor_hash != current_download.extractor_hash { + if next_video.extractor_hash == current_download.extractor_hash { + // Reset the taken value + self.current_download = Some(current_download); + } else { info!( "Noticed, that the next video is not the video being downloaded, replacing it ('{}' vs. '{}')!", next_video.extractor_hash.into_short_hash(&app).await?, current_download.extractor_hash.into_short_hash(&app).await? @@ -187,9 +195,6 @@ impl Downloader { CurrentDownload::new_from_video(Arc::clone(&app), next_video); self.current_download = Some(new_current_download); - } else { - // Reset the taken value - self.current_download = Some(current_download); } } else { info!( @@ -238,9 +243,9 @@ impl Downloader { } else { // the subtitle file size should be negligible let add_opts = YtDlpOptions { - subtitle_langs: "".to_owned(), + subtitle_langs: String::new(), }; - let opts = &download_opts(app, add_opts); + let opts = &download_opts(app, &add_opts); let result = yt_dlp::extract_info(opts, &video.url, false, true) .await @@ -253,9 +258,11 @@ impl Downloader { } else if let Some(val) = result.filesize_approx { val } else if result.duration.is_some() && result.tbr.is_some() { + #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)] let duration = result.duration.expect("Is some").ceil() as u64; // TODO: yt_dlp gets this from the format + #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)] let tbr = result.tbr.expect("Is Some").ceil() as u64; duration * tbr * (1000 / 8) @@ -269,8 +276,7 @@ impl Downloader { }; assert_eq!( - self.video_size_cache - .insert(video.extractor_hash.clone(), size), + self.video_size_cache.insert(video.extractor_hash, size), None ); @@ -283,7 +289,7 @@ impl Downloader { let addional_opts = get_video_yt_dlp_opts(app, &video.extractor_hash).await?; - let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, addional_opts)) + let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, &addional_opts)) .await .with_context(|| format!("Failed to download video: '{}'", video.title))?; diff --git a/yt/src/main.rs b/yt/src/main.rs index e2c517c..d4cd2b6 100644 --- a/yt/src/main.rs +++ b/yt/src/main.rs @@ -8,6 +8,10 @@ // You should have received a copy of the License along with this program. // If not, see . +// `yt` is not a library. Besides, the `anyhow::Result` type is really useless, if you're not going +// to print it anyways. +#![allow(clippy::missing_errors_doc)] + use std::{collections::HashMap, fs, sync::Arc}; use anyhow::{bail, Context, Result}; @@ -29,7 +33,7 @@ use url::Url; use videos::display::format_video::FormatVideo; use yt_dlp::wrapper::info_json::InfoJson; -use crate::{cli::Command, storage::subscriptions::get_subscriptions}; +use crate::{cli::Command, storage::subscriptions}; pub mod app; pub mod cli; @@ -49,6 +53,8 @@ pub mod videos; pub mod watch; #[tokio::main] +// This is _the_ main function after all. It is not really good, but it sort of works. +#[allow(clippy::too_many_lines)] async fn main() -> Result<()> { let args = cli::CliArgs::parse(); @@ -146,7 +152,7 @@ async fn main() -> Result<()> { max_backlog, subscriptions, } => { - let all_subs = get_subscriptions(&app).await?; + let all_subs = subscriptions::get(&app).await?; for sub in &subscriptions { if !all_subs.0.contains_key(sub) { @@ -173,14 +179,14 @@ async fn main() -> Result<()> { .context("Failed to remove a subscription")?; } SubscriptionCommand::List {} => { - let all_subs = get_subscriptions(&app).await?; + let all_subs = subscriptions::get(&app).await?; for (key, val) in all_subs.0 { println!("{}: '{}'", key, val.url); } } SubscriptionCommand::Export {} => { - let all_subs = get_subscriptions(&app).await?; + let all_subs = subscriptions::get(&app).await?; for val in all_subs.0.values() { println!("{}", val.url); } @@ -189,9 +195,9 @@ async fn main() -> Result<()> { if let Some(file) = file { let f = File::open(file).await?; - subscribe::import(&app, BufReader::new(f), force).await? + subscribe::import(&app, BufReader::new(f), force).await?; } else { - subscribe::import(&app, BufReader::new(stdin()), force).await? + subscribe::import(&app, BufReader::new(stdin()), force).await?; }; } }, @@ -202,7 +208,7 @@ async fn main() -> Result<()> { Command::Config {} => status::config(&app)?, Command::Database { command } => match command { - CacheCommand::Invalidate { hard } => cache::invalidate(&app, hard).await?, + CacheCommand::Invalidate { hard } => invalidate(&app, hard).await?, CacheCommand::Maintain { all } => cache::maintain(&app, all).await?, }, @@ -211,15 +217,19 @@ async fn main() -> Result<()> { let string = fs::read_to_string(&path) .with_context(|| format!("Failed to read '{}' to string!", path.display()))?; - let _: InfoJson = - serde_json::from_str(&string).context("Failed to deserialize value")?; + drop( + serde_json::from_str::(&string) + .context("Failed to deserialize value")?, + ); } CheckCommand::UpdateInfoJson { path } => { let string = fs::read_to_string(&path) .with_context(|| format!("Failed to read '{}' to string!", path.display()))?; - let _: HashMap = - serde_json::from_str(&string).context("Failed to deserialize value")?; + drop( + serde_json::from_str::>(&string) + .context("Failed to deserialize value")?, + ); } }, Command::Comments {} => { diff --git a/yt/src/select/cmds.rs b/yt/src/select/cmds.rs index 6e71607..f1488bc 100644 --- a/yt/src/select/cmds.rs +++ b/yt/src/select/cmds.rs @@ -43,16 +43,6 @@ pub async fn handle_select_cmd( } SelectCommand::Add { urls } => { for url in urls { - let opts = download_opts( - &app, - video_database::YtDlpOptions { - subtitle_langs: "".to_owned(), - }, - ); - let entry = yt_dlp::extract_info(&opts, &url, false, true) - .await - .with_context(|| format!("Failed to fetch entry for url: '{}'", url))?; - async fn add_entry( app: &App, entry: yt_dlp::wrapper::info_json::InfoJson, @@ -67,9 +57,19 @@ pub async fn handle_select_cmd( Ok(()) } + let opts = download_opts( + app, + &video_database::YtDlpOptions { + subtitle_langs: String::new(), + }, + ); + let entry = yt_dlp::extract_info(&opts, &url, false, true) + .await + .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?; + match entry._type { Some(InfoType::Video) => { - add_entry(&app, entry).await?; + add_entry(app, entry).await?; } Some(InfoType::Playlist) => { if let Some(mut entries) = entry.entries { @@ -79,7 +79,7 @@ pub async fn handle_select_cmd( let futures: Vec<_> = entries .into_iter() - .map(|entry| add_entry(&app, entry)) + .map(|entry| add_entry(app, entry)) .collect(); join_all(futures).await.into_iter().collect::>()?; diff --git a/yt/src/select/mod.rs b/yt/src/select/mod.rs index ca7a203..ddc8a5e 100644 --- a/yt/src/select/mod.rs +++ b/yt/src/select/mod.rs @@ -11,8 +11,8 @@ use std::{ env::{self}, fs, - io::{BufRead, Write}, - io::{BufReader, BufWriter}, + io::{BufRead, BufReader, BufWriter, Write}, + string::String, }; use crate::{ @@ -20,6 +20,7 @@ use crate::{ cli::CliArgs, constants::HELP_STR, storage::video_database::{getters::get_videos, VideoStatus}, + unreachable::Unreachable, videos::display::format_video::FormatVideo, }; @@ -64,7 +65,7 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<( // Warmup the cache for the display rendering of the videos. // Otherwise the futures would all try to warm it up at the same time. if let Some(vid) = matching_videos.first() { - let _ = vid.to_formatted_video(app).await?; + drop(vid.to_formatted_video(app).await?); } let mut edit_file = BufWriter::new(&temp_file); @@ -82,7 +83,7 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<( edit_file .write_all(formatted_line.as_bytes()) - .expect("This write should not fail"); + .context("Failed to write to `edit_file`")?; Ok(()) })?; @@ -125,24 +126,26 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<( let arg_line = ["yt", "select"] .into_iter() - .chain(line.iter().map(|val| val.as_str())); + .chain(line.iter().map(String::as_str)); let args = CliArgs::parse_from(arg_line); - let cmd = if let crate::cli::Command::Select { cmd } = - args.command.expect("This will be some") - { - cmd - } else { + let crate::cli::Command::Select { cmd } = args + .command + .unreachable("This will be some, as we constructed it above.") + else { unreachable!("This is checked in the `filter_line` function") }; handle_select_cmd( app, - cmd.expect("This value should always be some here"), + cmd.unreachable( + "This value should always be some \ + here, as it would otherwise thrown an error above.", + ), Some(line_number), ) - .await? + .await?; } } diff --git a/yt/src/select/selection_file/duration.rs b/yt/src/select/selection_file/duration.rs index a38981c..3957f0f 100644 --- a/yt/src/select/selection_file/duration.rs +++ b/yt/src/select/selection_file/duration.rs @@ -12,6 +12,8 @@ use std::str::FromStr; use anyhow::{Context, Result}; +use crate::unreachable::Unreachable; + #[derive(Copy, Clone, Debug)] pub struct Duration { time: u32, @@ -23,11 +25,9 @@ impl FromStr for Duration { fn from_str(s: &str) -> Result { fn parse_num(str: &str, suffix: char) -> Result { str.strip_suffix(suffix) - .with_context(|| { - format!("Failed to strip suffix '{}' of number: '{}'", suffix, str) - })? + .with_context(|| format!("Failed to strip suffix '{suffix}' of number: '{str}'"))? .parse::() - .with_context(|| format!("Failed to parse '{}'", suffix)) + .with_context(|| format!("Failed to parse '{suffix}'")) } if s == "[No Duration]" { @@ -66,7 +66,9 @@ impl FromStr for Duration { impl From> for Duration { fn from(value: Option) -> Self { Self { - time: value.unwrap_or(0.0).ceil() as u32, + #[allow(clippy::cast_possible_truncation)] + time: u32::try_from(value.unwrap_or(0.0).ceil() as i128) + .unreachable("This should not exceed `u32::MAX`"), } } } diff --git a/yt/src/select/selection_file/mod.rs b/yt/src/select/selection_file/mod.rs index 45809fa..5e5643d 100644 --- a/yt/src/select/selection_file/mod.rs +++ b/yt/src/select/selection_file/mod.rs @@ -20,10 +20,7 @@ pub fn process_line(line: &str) -> Result>> { if line.starts_with('#') || line.trim().is_empty() { Ok(None) } else { - // pick 2195db "CouchRecherche? Gunnar und Han von STRG_F sind #mitfunkzuhause" "2020-04-01" "STRG_F - Live" "[1h 5m]" "https://www.youtube.com/watch?v=C8UXOaoMrXY" - - let tri = - Trinitry::new(line).with_context(|| format!("Failed to parse line '{}'", line))?; + let tri = Trinitry::new(line).with_context(|| format!("Failed to parse line '{line}'"))?; let mut vec = Vec::with_capacity(tri.arguments().len() + 1); vec.push(tri.command().to_owned()); diff --git a/yt/src/status/mod.rs b/yt/src/status/mod.rs index 7ffe8d7..474cab7 100644 --- a/yt/src/status/mod.rs +++ b/yt/src/status/mod.rs @@ -15,7 +15,7 @@ use crate::{ app::App, download::Downloader, storage::{ - subscriptions::get_subscriptions, + subscriptions::get, video_database::{getters::get_videos, VideoStatus}, }, }; @@ -72,7 +72,7 @@ pub async fn show(app: &App) -> Result<()> { let drop_videos_changing = get!(@changing all_videos, Drop); let dropped_videos_changing = get!(@changing all_videos, Dropped); - let subscriptions = get_subscriptions(app).await?; + let subscriptions = get(app).await?; let subscriptions_len = subscriptions.0.len(); let cache_usage_raw = Downloader::get_current_cache_allocation(app) @@ -101,7 +101,7 @@ Dropped Videos: {dropped_videos_len} ({dropped_videos_changing} changing) pub fn config(app: &App) -> Result<()> { let config_str = toml::to_string(&app.config)?; - print!("{}", config_str); + print!("{config_str}"); Ok(()) } diff --git a/yt/src/storage/subscriptions.rs b/yt/src/storage/subscriptions.rs index 22edd08..8e089f0 100644 --- a/yt/src/storage/subscriptions.rs +++ b/yt/src/storage/subscriptions.rs @@ -19,7 +19,7 @@ use sqlx::query; use url::Url; use yt_dlp::wrapper::info_json::InfoType; -use crate::app::App; +use crate::{app::App, unreachable::Unreachable}; #[derive(Clone, Debug)] pub struct Subscription { @@ -31,6 +31,7 @@ pub struct Subscription { } impl Subscription { + #[must_use] pub fn new(name: String, url: Url) -> Self { Self { name, url } } @@ -38,14 +39,13 @@ impl Subscription { /// Check whether an URL could be used as a subscription URL pub async fn check_url(url: &Url) -> Result { - let yt_opts = match json!( { + let Value::Object(yt_opts) = json!( { "playliststart": 1, "playlistend": 10, "noplaylist": false, "extract_flat": "in_playlist", - }) { - Value::Object(map) => map, - _ => unreachable!("This is hardcoded"), + }) else { + unreachable!("This is hardcoded"); }; let info = yt_dlp::extract_info(&yt_opts, url, false, false).await?; @@ -55,10 +55,11 @@ pub async fn check_url(url: &Url) -> Result { Ok(info._type == Some(InfoType::Playlist)) } -#[derive(Default)] +#[derive(Default, Debug)] pub struct Subscriptions(pub(crate) HashMap); -pub async fn remove_all_subscriptions(app: &App) -> Result<()> { +/// Remove all subscriptions +pub async fn remove_all(app: &App) -> Result<()> { query!( " DELETE FROM subscriptions; @@ -71,7 +72,7 @@ pub async fn remove_all_subscriptions(app: &App) -> Result<()> { } /// Get a list of subscriptions -pub async fn get_subscriptions(app: &App) -> Result { +pub async fn get(app: &App) -> Result { let raw_subs = query!( " SELECT * @@ -88,7 +89,7 @@ pub async fn get_subscriptions(app: &App) -> Result { sub.name.clone(), Subscription::new( sub.name, - Url::parse(&sub.url).expect("This should be valid"), + Url::parse(&sub.url).unreachable("It was an URL, when we inserted it."), ), ) }) @@ -117,6 +118,8 @@ pub async fn add_subscription(app: &App, sub: &Subscription) -> Result<()> { Ok(()) } +/// # Panics +/// Only if assertions fail pub async fn remove_subscription(app: &App, sub: &Subscription) -> Result<()> { let output = query!( " diff --git a/yt/src/storage/video_database/downloader.rs b/yt/src/storage/video_database/downloader.rs index ccd4ca9..bfb7aa3 100644 --- a/yt/src/storage/video_database/downloader.rs +++ b/yt/src/storage/video_database/downloader.rs @@ -15,12 +15,15 @@ use log::debug; use sqlx::query; use url::Url; -use crate::{app::App, storage::video_database::VideoStatus}; +use crate::{app::App, storage::video_database::VideoStatus, unreachable::Unreachable}; use super::{ExtractorHash, Video}; /// Returns to next video which should be downloaded. This respects the priority assigned by select. /// It does not return videos, which are already cached. +/// +/// # Panics +/// Only if assertions fail. pub async fn get_next_uncached_video(app: &App) -> Result> { let status = VideoStatus::Watch.as_db_integer(); @@ -46,7 +49,7 @@ pub async fn get_next_uncached_video(app: &App) -> Result> { let thumbnail_url = base .thumbnail_url .as_ref() - .map(|url| Url::parse(url).expect("Parsing this as url should always work")); + .map(|url| Url::parse(url).unreachable("Parsing this as url should always work")); let status_change = if base.status_change == 1 { true @@ -149,5 +152,6 @@ pub async fn get_allocated_cache(app: &App) -> Result { .fetch_one(&app.database) .await?; - Ok(count.count as u32) + Ok(u32::try_from(count.count) + .unreachable("The value should be strictly positive (and bolow `u32::Max`)")) } diff --git a/yt/src/storage/video_database/extractor_hash.rs b/yt/src/storage/video_database/extractor_hash.rs index c956919..3aa3cd2 100644 --- a/yt/src/storage/video_database/extractor_hash.rs +++ b/yt/src/storage/video_database/extractor_hash.rs @@ -8,18 +8,18 @@ // You should have received a copy of the License along with this program. // If not, see . -use std::{collections::HashMap, fmt::Display, str::FromStr}; +use std::{collections::HashSet, fmt::Display, str::FromStr}; use anyhow::{bail, Context, Result}; use blake3::Hash; use log::debug; use tokio::sync::OnceCell; -use crate::{app::App, storage::video_database::getters::get_all_hashes}; +use crate::{app::App, storage::video_database::getters::get_all_hashes, unreachable::Unreachable}; static EXTRACTOR_HASH_LENGTH: OnceCell = OnceCell::const_new(); -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)] pub struct ExtractorHash { hash: Hash, } @@ -40,6 +40,7 @@ impl Display for ShortHash { } #[derive(Debug, Clone)] +#[allow(clippy::module_name_repetitions)] pub struct LazyExtractorHash { value: ShortHash, } @@ -67,6 +68,7 @@ impl LazyExtractorHash { } impl ExtractorHash { + #[must_use] pub fn from_hash(hash: Hash) -> Self { Self { hash } } @@ -76,6 +78,7 @@ impl ExtractorHash { }) } + #[must_use] pub fn hash(&self) -> &Hash { &self.hash } @@ -88,9 +91,9 @@ impl ExtractorHash { .get_needed_char_len(app) .await .context("Failed to calculate needed char length")?; - EXTRACTOR_HASH_LENGTH - .set(needed_chars) - .expect("This should work at this stage"); + EXTRACTOR_HASH_LENGTH.set(needed_chars).unreachable( + "This should work at this stage, as we checked above that it is empty.", + ); needed_chars }; @@ -139,9 +142,9 @@ impl ExtractorHash { .map(|vec| vec.iter().take(i).collect::()) .collect(); - let mut uniqnes_hashmap: HashMap = HashMap::new(); + let mut uniqnes_hashmap: HashSet = HashSet::new(); for ch in i_chars { - if let Some(()) = uniqnes_hashmap.insert(ch, ()) { + if !uniqnes_hashmap.insert(ch) { // The key was already in the hash map, thus we have a duplicated char and need // at least one char more continue 'outer; diff --git a/yt/src/storage/video_database/getters.rs b/yt/src/storage/video_database/getters.rs index 29dd014..d8f9a3f 100644 --- a/yt/src/storage/video_database/getters.rs +++ b/yt/src/storage/video_database/getters.rs @@ -26,6 +26,7 @@ use crate::{ subscriptions::Subscription, video_database::{extractor_hash::ExtractorHash, Video}, }, + unreachable::Unreachable, }; use super::{MpvOptions, VideoOptions, VideoStatus, YtDlpOptions}; @@ -69,12 +70,15 @@ macro_rules! video_from_record { /// Get the lines to display at the selection file /// [`changing` = true]: Means that we include *only* videos, that have the `status_changing` flag set /// [`changing` = None]: Means that we include *both* videos, that have the `status_changing` flag set and not set +/// +/// # Panics +/// Only, if assertions fail. pub async fn get_videos( app: &App, allowed_states: &[VideoStatus], changing: Option, ) -> Result> { - let mut qb: QueryBuilder = QueryBuilder::new( + let mut qb: QueryBuilder<'_, Sqlite> = QueryBuilder::new( "\ SELECT * FROM videos @@ -132,7 +136,7 @@ pub async fn get_videos( extractor_hash: ExtractorHash::from_hash( base.get::("extractor_hash") .parse() - .expect("The db hash should be a valid blake3 hash"), + .unreachable("The db hash should always be a valid blake3 hash"), ), last_status_change: base.get("last_status_change"), parent_subscription_name: base @@ -143,9 +147,17 @@ pub async fn get_videos( thumbnail_url: base .get::, &str>("thumbnail_url") .as_ref() - .map(|url| Url::parse(url).expect("Parsing this as url should always work")), - title: base.get::("title").to_owned(), - url: Url::parse(base.get("url")).expect("Parsing this as url should always work"), + .map(|url| { + Url::parse(url).unreachable( + "Parsing this as url should always work. \ + As it was an URL when we put it in.", + ) + }), + title: base.get::("title").clone(), + url: Url::parse(base.get("url")).unreachable( + "Parsing this as url should always work. \ + As it was an URL when we put it in.", + ), priority: base.get("priority"), status_change: { let val = base.get::("status_change"); @@ -195,6 +207,8 @@ pub async fn get_video_by_hash(app: &App, hash: &ExtractorHash) -> Result