about summary refs log tree commit diff stats
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock1
-rw-r--r--Cargo.toml1
-rw-r--r--crates/bytes/src/error.rs5
-rw-r--r--crates/bytes/src/lib.rs11
-rw-r--r--crates/libmpv2/libmpv2-sys/src/lib.rs9
-rw-r--r--crates/libmpv2/src/mpv.rs4
-rw-r--r--crates/libmpv2/src/mpv/protocol.rs2
-rw-r--r--crates/libmpv2/src/mpv/render.rs4
-rw-r--r--crates/yt_dlp/Cargo.toml1
-rw-r--r--crates/yt_dlp/src/duration.rs7
-rw-r--r--crates/yt_dlp/src/lib.rs67
-rw-r--r--crates/yt_dlp/src/logging.rs16
-rw-r--r--crates/yt_dlp/src/main.rs96
-rw-r--r--crates/yt_dlp/src/tests.rs85
-rw-r--r--crates/yt_dlp/src/wrapper/info_json.rs25
-rw-r--r--yt/Cargo.toml2
-rw-r--r--yt/src/app.rs1
-rw-r--r--yt/src/cache/mod.rs2
-rw-r--r--yt/src/cli.rs3
-rw-r--r--yt/src/comments/comment.rs3
-rw-r--r--yt/src/comments/display.rs14
-rw-r--r--yt/src/comments/mod.rs46
-rw-r--r--yt/src/config/default.rs42
-rw-r--r--yt/src/config/definitions.rs12
-rw-r--r--yt/src/config/file_system.rs7
-rw-r--r--yt/src/config/mod.rs19
-rw-r--r--yt/src/download/download_options.rs10
-rw-r--r--yt/src/download/mod.rs28
-rw-r--r--yt/src/main.rs32
-rw-r--r--yt/src/select/cmds.rs24
-rw-r--r--yt/src/select/mod.rs27
-rw-r--r--yt/src/select/selection_file/duration.rs12
-rw-r--r--yt/src/select/selection_file/mod.rs5
-rw-r--r--yt/src/status/mod.rs6
-rw-r--r--yt/src/storage/subscriptions.rs21
-rw-r--r--yt/src/storage/video_database/downloader.rs10
-rw-r--r--yt/src/storage/video_database/extractor_hash.rs19
-rw-r--r--yt/src/storage/video_database/getters.rs47
-rw-r--r--yt/src/storage/video_database/mod.rs15
-rw-r--r--yt/src/storage/video_database/setters.rs11
-rw-r--r--yt/src/subscribe/mod.rs41
-rw-r--r--yt/src/update/mod.rs72
-rw-r--r--yt/src/videos/display/format_video.rs1
-rw-r--r--yt/src/videos/display/mod.rs8
-rw-r--r--yt/src/videos/mod.rs8
-rw-r--r--yt/src/watch/events/handlers/mod.rs161
-rw-r--r--yt/src/watch/events/mod.rs145
-rw-r--r--yt/src/watch/events/playlist_handler.rs21
-rw-r--r--yt/src/watch/mod.rs22
49 files changed, 727 insertions, 504 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 180bf13..646be94 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2457,6 +2457,7 @@ dependencies = [
  "pyo3",
  "serde",
  "serde_json",
+ "tokio",
  "url",
 ]
 
diff --git a/Cargo.toml b/Cargo.toml
index 1fa9668..888e79d 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -36,6 +36,7 @@ log = "0.4.22"
 serde = { version = "1.0.210", features = ["derive"] }
 serde_json = "1.0.128"
 url = { version = "2.5.2", features = ["serde"] }
+tokio = { version = "1.40.0", features = [ "rt-multi-thread", "macros", "process", "time", "io-std", ] }
 
 [profile.profiling]
 inherits = "release"
diff --git a/crates/bytes/src/error.rs b/crates/bytes/src/error.rs
index 7643109..c9783d8 100644
--- a/crates/bytes/src/error.rs
+++ b/crates/bytes/src/error.rs
@@ -11,6 +11,7 @@
 use std::{fmt::Display, num::ParseIntError};
 
 #[derive(Debug)]
+#[allow(clippy::module_name_repetitions)]
 pub enum BytesError {
     BytesParseIntError(ParseIntError),
     NotYetSupported(String),
@@ -20,10 +21,10 @@ impl Display for BytesError {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
             BytesError::BytesParseIntError(e) => {
-                f.write_fmt(format_args!("Failed to parse a number as integer: '{}'", e))
+                f.write_fmt(format_args!("Failed to parse a number as integer: '{e}'"))
             },
             BytesError::NotYetSupported(other) => {
-                f.write_fmt(format_args!("Your extension '{}' is not yet supported. Only KB,MB,GB or KiB,MiB,GiB are supported", other))
+                f.write_fmt(format_args!("Your extension '{other}' is not yet supported. Only KB,MB,GB or KiB,MiB,GiB are supported"))
             }
         }
     }
diff --git a/crates/bytes/src/lib.rs b/crates/bytes/src/lib.rs
index 78d3c4e..6e3f73c 100644
--- a/crates/bytes/src/lib.rs
+++ b/crates/bytes/src/lib.rs
@@ -8,6 +8,12 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+#![allow(
+    clippy::cast_possible_truncation,
+    clippy::cast_precision_loss,
+    clippy::cast_sign_loss,
+    clippy::cast_possible_wrap
+)]
 use std::{fmt::Display, str::FromStr};
 
 use error::BytesError;
@@ -28,13 +34,15 @@ const TB: u64 = 1000 * GB;
 pub mod error;
 pub mod serde;
 
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, Debug)]
 pub struct Bytes(u64);
 
 impl Bytes {
+    #[must_use]
     pub fn as_u64(self) -> u64 {
         self.0
     }
+    #[must_use]
     pub fn new(v: u64) -> Self {
         Self(v)
     }
@@ -140,6 +148,7 @@ impl Display for Bytes {
 ///   assert_eq!(precision_f64(1.2300_f32 as f64, 2) as f32, 1.2f32);
 ///# }
 /// ```
+#[must_use]
 pub fn precision_f64(x: f64, decimals: u32) -> f64 {
     if x == 0. || decimals == 0 {
         0.
diff --git a/crates/libmpv2/libmpv2-sys/src/lib.rs b/crates/libmpv2/libmpv2-sys/src/lib.rs
index 36a8199..0d14dbb 100644
--- a/crates/libmpv2/libmpv2-sys/src/lib.rs
+++ b/crates/libmpv2/libmpv2-sys/src/lib.rs
@@ -1,7 +1,3 @@
-#![allow(non_upper_case_globals)]
-#![allow(non_camel_case_types)]
-#![allow(non_snake_case)]
-
 // yt - A fully featured command line YouTube client
 //
 // Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
@@ -11,6 +7,11 @@
 //
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+//
+#![allow(non_upper_case_globals)]
+#![allow(non_camel_case_types)]
+#![allow(non_snake_case)]
+#![allow(clippy::doc_lazy_continuation)]
 
 include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
 
diff --git a/crates/libmpv2/src/mpv.rs b/crates/libmpv2/src/mpv.rs
index 9d554a6..bf4581e 100644
--- a/crates/libmpv2/src/mpv.rs
+++ b/crates/libmpv2/src/mpv.rs
@@ -51,6 +51,7 @@ fn mpv_err<T>(ret: T, err: ctype::c_int) -> Result<T> {
 }
 
 /// This trait describes which types are allowed to be passed to getter mpv APIs.
+#[allow(clippy::missing_safety_doc)]
 pub unsafe trait GetData: Sized {
     #[doc(hidden)]
     fn get_from_c_void<T, F: FnMut(*mut ctype::c_void) -> Result<T>>(mut fun: F) -> Result<Self> {
@@ -62,6 +63,7 @@ pub unsafe trait GetData: Sized {
 }
 
 /// This trait describes which types are allowed to be passed to setter mpv APIs.
+#[allow(clippy::missing_safety_doc)]
 pub unsafe trait SetData: Sized {
     #[doc(hidden)]
     fn call_as_c_void<T, F: FnMut(*mut ctype::c_void) -> Result<T>>(
@@ -207,7 +209,7 @@ pub mod mpv_node {
                 }
             }
 
-            pub fn child(self: Self, node: libmpv2_sys::mpv_node) -> Self {
+            pub fn child(self, node: libmpv2_sys::mpv_node) -> Self {
                 Self {
                     parent: self.parent,
                     node,
diff --git a/crates/libmpv2/src/mpv/protocol.rs b/crates/libmpv2/src/mpv/protocol.rs
index 4ae4f16..31a5933 100644
--- a/crates/libmpv2/src/mpv/protocol.rs
+++ b/crates/libmpv2/src/mpv/protocol.rs
@@ -155,7 +155,7 @@ where
 {
     let data = Box::from_raw(cookie as *mut ProtocolData<T, U>);
 
-    panic::catch_unwind(|| ((*data).close_fn)(Box::from_raw((*data).cookie)));
+    panic::catch_unwind(|| (data.close_fn)(Box::from_raw(data.cookie)));
 }
 
 struct ProtocolData<T, U> {
diff --git a/crates/libmpv2/src/mpv/render.rs b/crates/libmpv2/src/mpv/render.rs
index 91db34e..c3f2dc9 100644
--- a/crates/libmpv2/src/mpv/render.rs
+++ b/crates/libmpv2/src/mpv/render.rs
@@ -213,8 +213,8 @@ impl RenderContext {
         params: impl IntoIterator<Item = RenderParam<C>>,
     ) -> Result<Self> {
         let params: Vec<_> = params.into_iter().collect();
-        let mut raw_params: Vec<libmpv2_sys::mpv_render_param> = Vec::new();
-        raw_params.reserve(params.len() + 1);
+        let mut raw_params: Vec<libmpv2_sys::mpv_render_param> =
+            Vec::with_capacity(params.len() + 1);
         let mut raw_ptrs: HashMap<*const c_void, DeleterFn> = HashMap::new();
 
         for p in params {
diff --git a/crates/yt_dlp/Cargo.toml b/crates/yt_dlp/Cargo.toml
index 0f1d248..03196e9 100644
--- a/crates/yt_dlp/Cargo.toml
+++ b/crates/yt_dlp/Cargo.toml
@@ -30,6 +30,7 @@ serde_json.workspace = true
 url.workspace = true
 
 [dev-dependencies]
+tokio.workspace = true
 
 [lints]
 workspace = true
diff --git a/crates/yt_dlp/src/duration.rs b/crates/yt_dlp/src/duration.rs
index cd7454b..f91892d 100644
--- a/crates/yt_dlp/src/duration.rs
+++ b/crates/yt_dlp/src/duration.rs
@@ -9,6 +9,8 @@
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
 // TODO: This file should be de-duplicated with the same file in the 'yt' crate <2024-06-25>
+
+#[derive(Debug, Clone, Copy)]
 pub struct Duration {
     time: u32,
 }
@@ -26,6 +28,11 @@ impl From<&str> for Duration {
 impl From<Option<f64>> for Duration {
     fn from(value: Option<f64>) -> Self {
         Self {
+            #[allow(
+                clippy::cast_possible_truncation,
+                clippy::cast_precision_loss,
+                clippy::cast_sign_loss
+            )]
             time: value.unwrap_or(0.0).ceil() as u32,
         }
     }
diff --git a/crates/yt_dlp/src/lib.rs b/crates/yt_dlp/src/lib.rs
index f958895..4e35cb0 100644
--- a/crates/yt_dlp/src/lib.rs
+++ b/crates/yt_dlp/src/lib.rs
@@ -8,6 +8,10 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
+#![allow(unsafe_op_in_unsafe_fn)]
+#![allow(clippy::missing_errors_doc)]
+
 use std::env;
 use std::{fs::File, io::Write};
 
@@ -31,14 +35,20 @@ pub mod duration;
 pub mod logging;
 pub mod wrapper;
 
+#[cfg(test)]
+mod tests;
+
 /// Synchronisation helper, to ensure that we don't setup the logger multiple times
 static SYNC_OBJ: Once = Once::new();
 
 /// Add a logger to the yt-dlp options.
 /// If you have an logger set (i.e. for rust), than this will log to rust
+///
+/// # Panics
+/// This should never panic.
 pub fn add_logger_and_sig_handler<'a>(
     opts: Bound<'a, PyDict>,
-    py: Python,
+    py: Python<'_>,
 ) -> PyResult<Bound<'a, PyDict>> {
     setup_logging(py, "yt_dlp")?;
 
@@ -52,10 +62,9 @@ pub fn add_logger_and_sig_handler<'a>(
         // This allows the user to actually stop the application with Ctrl+C.
         // This is here because it can only be run in the main thread and this was here already.
         py.run_bound(
-            r#"
+            "\
 import signal
-signal.signal(signal.SIGINT, signal.SIG_DFL)
-        "#,
+signal.signal(signal.SIGINT, signal.SIG_DFL)",
             None,
             None,
         )
@@ -82,14 +91,22 @@ signal.signal(signal.SIGINT, signal.SIG_DFL)
 }
 
 #[pyfunction]
-pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> {
+#[allow(clippy::too_many_lines)]
+#[allow(clippy::missing_panics_doc)]
+#[allow(clippy::items_after_statements)]
+#[allow(
+    clippy::cast_possible_truncation,
+    clippy::cast_sign_loss,
+    clippy::cast_precision_loss
+)]
+pub fn progress_hook(py: Python<'_>, input: &Bound<'_, PyDict>) -> PyResult<()> {
     // Only add the handler, if the log-level is higher than Debug (this avoids covering debug
     // messages).
     if log_enabled!(Level::Debug) {
         return Ok(());
     }
 
-    let input: serde_json::Map<String, Value> = serde_json::from_str(&json_dumps(
+    let input: Map<String, Value> = serde_json::from_str(&json_dumps(
         py,
         input
             .downcast::<PyAny>()
@@ -164,8 +181,9 @@ pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> {
     }
 
     fn format_speed(speed: f64) -> String {
+        #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)]
         let bytes = Bytes::new(speed.floor() as u64);
-        format!("{}/s", bytes)
+        format!("{bytes}/s")
     }
 
     let get_title = |add_extension: bool| -> String {
@@ -187,7 +205,7 @@ pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> {
                     default_get! { as_str, "<No title>", "info_dict", "title"}.to_owned()
                 }
             }
-            other => panic!("The extension '{}' is not yet implemented", other),
+            other => panic!("The extension '{other}' is not yet implemented"),
         }
     };
 
@@ -242,18 +260,18 @@ pub fn progress_hook(py: Python, input: Bound<'_, PyDict>) -> PyResult<()> {
             );
         }
         "finished" => {
-            println!("Finished downloading: '{}'", c!("34;1", get_title(false)))
+            println!("Finished downloading: '{}'", c!("34;1", get_title(false)));
         }
         "error" => {
             panic!("Error whilst downloading: {}", get_title(true))
         }
-        other => panic!("{} is not a valid state!", other),
+        other => panic!("{other} is not a valid state!"),
     };
 
     Ok(())
 }
 
-pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python) -> PyResult<Bound<'a, PyDict>> {
+pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python<'_>) -> PyResult<Bound<'a, PyDict>> {
     if let Some(hooks) = opts.get_item("progress_hooks")? {
         let hooks = hooks.downcast::<PyList>()?;
         hooks.append(wrap_pyfunction_bound!(progress_hook, py)?)?;
@@ -280,10 +298,12 @@ pub fn add_hooks<'a>(opts: Bound<'a, PyDict>, py: Python) -> PyResult<Bound<'a,
 /// @param download     Whether to download videos
 /// @param process      Whether to resolve all unresolved references (URLs, playlist items).
 ///                     Must be True for download to work
-/// @param ie_key       Use only the extractor with this key
+/// @param `ie_key`       Use only the extractor with this key
 ///
-/// @param extra_info   Dictionary containing the extra values to add to the info (For internal use only)
-/// @force_generic_extractor  Force using the generic extractor (Deprecated; use ie_key='Generic')
+/// @param `extra_info`   Dictionary containing the extra values to add to the info (For internal use only)
+/// @`force_generic_extractor`  Force using the generic extractor (Deprecated; use `ie_key`='Generic')
+#[allow(clippy::unused_async)]
+#[allow(clippy::missing_panics_doc)]
 pub async fn extract_info(
     yt_dlp_opts: &Map<String, Value>,
     url: &Url,
@@ -311,8 +331,8 @@ pub async fn extract_info(
 
         if let Ok(confirm) = env::var("YT_STORE_INFO_JSON") {
             if confirm == "yes" {
-                let mut file = File::create("output.info.json").unwrap();
-                write!(file, "{}", result_str).unwrap();
+                let mut file = File::create("output.info.json")?;
+                write!(file, "{result_str}").unwrap();
             }
         }
 
@@ -321,7 +341,9 @@ pub async fn extract_info(
     })
 }
 
-pub fn unsmuggle_url(smug_url: Url) -> PyResult<Url> {
+/// # Panics
+/// Only if python fails to return a valid URL.
+pub fn unsmuggle_url(smug_url: &Url) -> PyResult<Url> {
     Python::with_gil(|py| {
         let utils = get_yt_dlp_utils(py)?;
         let url = utils
@@ -341,6 +363,9 @@ pub fn unsmuggle_url(smug_url: Url) -> PyResult<Url> {
 
 /// Download a given list of URLs.
 /// Returns the paths they were downloaded to.
+///
+/// # Panics
+/// Only if `yt_dlp` changes their `info_json` schema.
 pub async fn download(
     urls: &[Url],
     download_options: &Map<String, Value>,
@@ -357,7 +382,7 @@ pub async fn download(
         } else {
             info_json.requested_downloads.expect("This must exist")[0]
                 .filename
-                .to_owned()
+                .clone()
         };
 
         out_paths.push(result_string);
@@ -378,7 +403,7 @@ fn json_map_to_py_dict<'a>(
     Ok(python_dict)
 }
 
-fn json_dumps(py: Python, input: Bound<PyAny>) -> PyResult<String> {
+fn json_dumps(py: Python<'_>, input: Bound<'_, PyAny>) -> PyResult<String> {
     //     json.dumps(yt_dlp.sanitize_info(input))
 
     let yt_dlp = get_yt_dlp(py, PyDict::new_bound(py))?;
@@ -394,13 +419,13 @@ fn json_dumps(py: Python, input: Bound<PyAny>) -> PyResult<String> {
     Ok(output_str)
 }
 
-fn json_loads_str<T: Serialize>(py: Python, input: T) -> PyResult<Bound<PyDict>> {
+fn json_loads_str<T: Serialize>(py: Python<'_>, input: T) -> PyResult<Bound<'_, PyDict>> {
     let string = serde_json::to_string(&input).expect("Correct json must be pased");
 
     json_loads(py, string)
 }
 
-fn json_loads(py: Python, input: String) -> PyResult<Bound<PyDict>> {
+fn json_loads(py: Python<'_>, input: String) -> PyResult<Bound<'_, PyDict>> {
     //     json.loads(input)
 
     let json = PyModule::import_bound(py, "json")?;
diff --git a/crates/yt_dlp/src/logging.rs b/crates/yt_dlp/src/logging.rs
index 4039da4..385255d 100644
--- a/crates/yt_dlp/src/logging.rs
+++ b/crates/yt_dlp/src/logging.rs
@@ -12,6 +12,9 @@
 // It is licensed under the Apache 2.0 License, copyright up to 2024 by Dylan Storey
 // It was modified by Benedikt Peetz 2024
 
+// The pyo3 `pyfunction` proc-macros call unsafe functions internally, which trigger this lint.
+#![allow(unsafe_op_in_unsafe_fn)]
+
 use log::{logger, Level, MetadataBuilder, Record};
 use pyo3::{
     prelude::{PyAnyMethods, PyListMethods, PyModuleMethods},
@@ -19,6 +22,7 @@ use pyo3::{
 };
 
 /// Consume a Python `logging.LogRecord` and emit a Rust `Log` instead.
+#[allow(clippy::needless_pass_by_value)]
 #[pyfunction]
 fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
     let level = record.getattr("levelno")?;
@@ -37,7 +41,7 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
     } else {
         // Libraries (ex: tracing_subscriber::filter::Directive) expect rust-style targets like foo::bar,
         // and may not deal well with "." as a module separator:
-        let logger_name = logger_name.replace(".", "::");
+        let logger_name = logger_name.replace('.', "::");
         Some(format!("{rust_target}::{logger_name}"))
     };
 
@@ -84,10 +88,11 @@ fn host_log(record: Bound<'_, PyAny>, rust_target: &str) -> PyResult<()> {
     Ok(())
 }
 
-/// Registers the host_log function in rust as the event handler for Python's logging logger
+/// Registers the `host_log` function in rust as the event handler for Python's logging logger
 /// This function needs to be called from within a pyo3 context as early as possible to ensure logging messages
 /// arrive to the rust consumer.
-pub fn setup_logging(py: Python, target: &str) -> PyResult<()> {
+#[allow(clippy::module_name_repetitions)]
+pub fn setup_logging(py: Python<'_>, target: &str) -> PyResult<()> {
     let logging = py.import_bound("logging")?;
 
     logging.setattr("host_log", wrap_pyfunction!(host_log, &logging)?)?;
@@ -100,15 +105,14 @@ class HostHandler(Handler):
         super().__init__(level=level)
 
     def emit(self, record):
-        host_log(record,"{}")
+        host_log(record,"{target}")
 
 oldBasicConfig = basicConfig
 def basicConfig(*pargs, **kwargs):
     if "handlers" not in kwargs:
         kwargs["handlers"] = [HostHandler()]
     return oldBasicConfig(*pargs, **kwargs)
-"#,
-            target
+"#
         )
         .as_str(),
         Some(&logging.dict()),
diff --git a/crates/yt_dlp/src/main.rs b/crates/yt_dlp/src/main.rs
deleted file mode 100644
index c40ddc3..0000000
--- a/crates/yt_dlp/src/main.rs
+++ /dev/null
@@ -1,96 +0,0 @@
-// yt - A fully featured command line YouTube client
-//
-// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
-// SPDX-License-Identifier: GPL-3.0-or-later
-//
-// This file is part of Yt.
-//
-// You should have received a copy of the License along with this program.
-// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
-
-use std::{env::args, fs};
-
-use yt_dlp::wrapper::info_json::InfoJson;
-
-#[cfg(test)]
-mod test {
-    use url::Url;
-    use yt_dlp::wrapper::yt_dlp_options::{ExtractFlat, YtDlpOptions};
-
-    const YT_OPTS: YtDlpOptions = YtDlpOptions {
-        playliststart: 1,
-        playlistend: 10,
-        noplaylist: false,
-        extract_flat: ExtractFlat::InPlaylist,
-    };
-
-    #[test]
-    fn test_extract_info_video() {
-        let info = yt_dlp::extract_info(
-            YT_OPTS,
-            &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."),
-            false,
-            false,
-            false,
-        )
-        .map_err(|err| format!("Encountered error: '{}'", err))
-        .unwrap();
-
-        println!("{:#?}", info);
-    }
-
-    #[test]
-    fn test_extract_info_url() {
-        let err = yt_dlp::extract_info(
-            YT_OPTS,
-            &Url::parse("https://google.com").expect("Is valid."),
-            false,
-            false,
-            false,
-        )
-        .map_err(|err| format!("Encountered error: '{}'", err))
-        .unwrap();
-
-        println!("{:#?}", err);
-    }
-
-    #[test]
-    fn test_extract_info_playlist() {
-        let err = yt_dlp::extract_info(
-            YT_OPTS,
-            &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."),
-            false,
-            false,
-            true,
-        )
-        .map_err(|err| format!("Encountered error: '{}'", err))
-        .unwrap();
-
-        println!("{:#?}", err);
-    }
-    #[test]
-    fn test_extract_info_playlist_full() {
-        let err = yt_dlp::extract_info(
-            YT_OPTS,
-            &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."),
-            false,
-            false,
-            true,
-        )
-        .map_err(|err| format!("Encountered error: '{}'", err))
-        .unwrap();
-
-        println!("{:#?}", err);
-    }
-}
-
-fn main() {
-    let input_file: &str = &args().take(2).collect::<Vec<String>>()[1];
-
-    let input = fs::read_to_string(input_file).unwrap();
-
-    let output: InfoJson =
-        serde_json::from_str(&input).expect("Python should be able to produce correct json");
-
-    println!("{:#?}", output);
-}
diff --git a/crates/yt_dlp/src/tests.rs b/crates/yt_dlp/src/tests.rs
new file mode 100644
index 0000000..08e392f
--- /dev/null
+++ b/crates/yt_dlp/src/tests.rs
@@ -0,0 +1,85 @@
+// yt - A fully featured command line YouTube client
+//
+// Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
+// SPDX-License-Identifier: GPL-3.0-or-later
+//
+// This file is part of Yt.
+//
+// You should have received a copy of the License along with this program.
+// If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
+
+use std::sync::LazyLock;
+
+use serde_json::{json, Value};
+use url::Url;
+
+static YT_OPTS: LazyLock<serde_json::Map<String, Value>> = LazyLock::new(|| {
+    match json!({
+        "playliststart": 1,
+        "playlistend": 10,
+        "noplaylist": false,
+        "extract_flat": false,
+    }) {
+        Value::Object(obj) => obj,
+        _ => unreachable!("This json is hardcoded"),
+    }
+});
+
+#[tokio::test]
+async fn test_extract_info_video() {
+    let info = crate::extract_info(
+        &YT_OPTS,
+        &Url::parse("https://www.youtube.com/watch?v=dbjPnXaacAU").expect("Is valid."),
+        false,
+        false,
+    )
+    .await
+    .map_err(|err| format!("Encountered error: '{}'", err))
+    .unwrap();
+
+    println!("{:#?}", info);
+}
+
+#[tokio::test]
+async fn test_extract_info_url() {
+    let err = crate::extract_info(
+        &YT_OPTS,
+        &Url::parse("https://google.com").expect("Is valid."),
+        false,
+        false,
+    )
+    .await
+    .map_err(|err| format!("Encountered error: '{}'", err))
+    .unwrap();
+
+    println!("{:#?}", err);
+}
+
+#[tokio::test]
+async fn test_extract_info_playlist() {
+    let err = crate::extract_info(
+        &YT_OPTS,
+        &Url::parse("https://www.youtube.com/@TheGarriFrischer/videos").expect("Is valid."),
+        false,
+        true,
+    )
+    .await
+    .map_err(|err| format!("Encountered error: '{}'", err))
+    .unwrap();
+
+    println!("{:#?}", err);
+}
+#[tokio::test]
+async fn test_extract_info_playlist_full() {
+    let err = crate::extract_info(
+        &YT_OPTS,
+        &Url::parse("https://www.youtube.com/@NixOS-Foundation/videos").expect("Is valid."),
+        false,
+        true,
+    )
+    .await
+    .map_err(|err| format!("Encountered error: '{}'", err))
+    .unwrap();
+
+    println!("{:#?}", err);
+}
diff --git a/crates/yt_dlp/src/wrapper/info_json.rs b/crates/yt_dlp/src/wrapper/info_json.rs
index 50a026d..f113fe5 100644
--- a/crates/yt_dlp/src/wrapper/info_json.rs
+++ b/crates/yt_dlp/src/wrapper/info_json.rs
@@ -8,6 +8,9 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+// `yt_dlp` named them like this.
+#![allow(clippy::pub_underscore_fields)]
+
 use std::{collections::HashMap, path::PathBuf};
 
 use pyo3::{types::PyDict, Bound, PyResult, Python};
@@ -146,6 +149,7 @@ pub struct InfoJson {
 
 #[derive(Debug, Deserialize, Serialize, PartialEq)]
 #[serde(deny_unknown_fields)]
+#[allow(missing_copy_implementations)]
 pub struct FilesToMove {}
 
 #[derive(Debug, Deserialize, Serialize, PartialEq)]
@@ -197,8 +201,7 @@ pub struct Subtitle {
     pub url: Url,
 }
 
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)]
-#[serde(deny_unknown_fields)]
+#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
 pub enum SubtitleExt {
     #[serde(alias = "vtt")]
     Vtt,
@@ -266,7 +269,7 @@ where
     Ok(None)
 }
 
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)]
+#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
 pub enum SponsorblockChapterType {
     #[serde(alias = "skip")]
@@ -278,7 +281,7 @@ pub enum SponsorblockChapterType {
     #[serde(alias = "poi")]
     Poi,
 }
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)]
+#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
 pub enum SponsorblockChapterCategory {
     #[serde(alias = "filler")]
@@ -314,13 +317,14 @@ pub enum SponsorblockChapterCategory {
 
 #[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd)]
 #[serde(deny_unknown_fields)]
+#[allow(missing_copy_implementations)]
 pub struct HeatMapEntry {
     pub start_time: f64,
     pub end_time: f64,
     pub value: f64,
 }
 
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)]
+#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
 pub enum Extractor {
     #[serde(alias = "generic")]
@@ -337,7 +341,7 @@ pub enum Extractor {
     YouTubeTab,
 }
 
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)]
+#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
 pub enum ExtractorKey {
     #[serde(alias = "Generic")]
@@ -354,7 +358,7 @@ pub enum ExtractorKey {
     YouTubeTab,
 }
 
-#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq)]
+#[derive(Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
 pub enum InfoType {
     #[serde(alias = "playlist")]
@@ -385,6 +389,7 @@ pub enum Parent {
 }
 
 impl Parent {
+    #[must_use]
     pub fn id(&self) -> Option<&str> {
         if let Self::Id(id) = self {
             Some(id)
@@ -421,6 +426,7 @@ impl From<String> for Id {
 
 #[derive(Debug, Deserialize, Serialize, Clone, Eq, PartialEq, PartialOrd, Ord)]
 #[serde(deny_unknown_fields)]
+#[allow(clippy::struct_excessive_bools)]
 pub struct Comment {
     pub id: Id,
     pub text: String,
@@ -522,6 +528,7 @@ pub struct Format {
 
 #[derive(Debug, Deserialize, Serialize, Eq, PartialEq, PartialOrd, Ord)]
 #[serde(deny_unknown_fields)]
+#[allow(missing_copy_implementations)]
 pub struct DownloaderOptions {
     http_chunk_size: u64,
 }
@@ -554,8 +561,8 @@ pub struct Fragment {
 }
 
 impl InfoJson {
-    pub fn to_py_dict(self, py: Python) -> PyResult<Bound<PyDict>> {
-        let output: Bound<PyDict> = json_loads_str(py, self)?;
+    pub fn to_py_dict(self, py: Python<'_>) -> PyResult<Bound<'_, PyDict>> {
+        let output: Bound<'_, PyDict> = json_loads_str(py, self)?;
         Ok(output)
     }
 }
diff --git a/yt/Cargo.toml b/yt/Cargo.toml
index 4cc712c..e35e4be 100644
--- a/yt/Cargo.toml
+++ b/yt/Cargo.toml
@@ -35,7 +35,6 @@ regex = "1.11.0"
 sqlx = { version = "0.8.2", features = ["runtime-tokio", "sqlite"] }
 stderrlog = "0.6.0"
 tempfile = "3.13.0"
-tokio = { version = "1.40.0", features = [ "rt-multi-thread", "macros", "process", "time", "io-std", ] }
 toml = "0.8.19"
 trinitry = { version = "0.2.2" }
 xdg = "2.5.2"
@@ -44,6 +43,7 @@ libmpv2.workspace = true
 log.workspace = true
 serde.workspace = true
 serde_json.workspace = true
+tokio.workspace = true
 url.workspace = true
 yt_dlp.workspace = true
 
diff --git a/yt/src/app.rs b/yt/src/app.rs
index b7d136e..1f82214 100644
--- a/yt/src/app.rs
+++ b/yt/src/app.rs
@@ -13,6 +13,7 @@ use sqlx::{query, sqlite::SqliteConnectOptions, SqlitePool};
 
 use crate::config::Config;
 
+#[derive(Debug)]
 pub struct App {
     pub database: SqlitePool,
     pub config: Config,
diff --git a/yt/src/cache/mod.rs b/yt/src/cache/mod.rs
index a3e08c9..6ceef25 100644
--- a/yt/src/cache/mod.rs
+++ b/yt/src/cache/mod.rs
@@ -47,7 +47,7 @@ pub async fn invalidate(app: &App, hard: bool) -> Result<()> {
     info!("Got videos to invalidate: '{}'", all_cached_things.len());
 
     for video in all_cached_things {
-        invalidate_video(app, &video, hard).await?
+        invalidate_video(app, &video, hard).await?;
     }
 
     Ok(())
diff --git a/yt/src/cli.rs b/yt/src/cli.rs
index d19586e..2208caa 100644
--- a/yt/src/cli.rs
+++ b/yt/src/cli.rs
@@ -23,6 +23,7 @@ use crate::{
 
 #[derive(Parser, Debug)]
 #[clap(author, version, about, long_about = None)]
+#[allow(clippy::module_name_repetitions)]
 /// An command line interface to select, download and watch videos
 pub struct CliArgs {
     #[command(subcommand)]
@@ -127,7 +128,7 @@ pub enum Command {
 fn byte_parser(input: &str) -> Result<u64, anyhow::Error> {
     Ok(input
         .parse::<Bytes>()
-        .with_context(|| format!("Failed to parse '{}' as bytes!", input))?
+        .with_context(|| format!("Failed to parse '{input}' as bytes!"))?
         .as_u64())
 }
 
diff --git a/yt/src/comments/comment.rs b/yt/src/comments/comment.rs
index 752c510..c998cdb 100644
--- a/yt/src/comments/comment.rs
+++ b/yt/src/comments/comment.rs
@@ -11,6 +11,7 @@
 use yt_dlp::wrapper::info_json::Comment;
 
 #[derive(Debug, Clone)]
+#[allow(clippy::module_name_repetitions)]
 pub struct CommentExt {
     pub value: Comment,
     pub replies: Vec<CommentExt>,
@@ -43,7 +44,7 @@ impl Comments {
 }
 impl CommentExt {
     pub fn push_reply(&mut self, value: CommentExt) {
-        self.replies.push(value)
+        self.replies.push(value);
     }
     pub fn get_mut_reply(&mut self, key: &str) -> Option<&mut CommentExt> {
         self.replies
diff --git a/yt/src/comments/display.rs b/yt/src/comments/display.rs
index 7000063..4d678bc 100644
--- a/yt/src/comments/display.rs
+++ b/yt/src/comments/display.rs
@@ -23,8 +23,6 @@ impl Comments {
     }
 
     fn render_help(&self, color: bool) -> Result<String, std::fmt::Error> {
-        let mut f = String::new();
-
         macro_rules! c {
             ($color_str:expr, $write:ident, $color:expr) => {
                 if $color {
@@ -87,29 +85,31 @@ impl Comments {
             f.write_str(":\n")?;
             f.write_str(ident)?;
 
-            f.write_str(&value.text.replace('\n', &format!("\n{}", ident)))?;
+            f.write_str(&value.text.replace('\n', &format!("\n{ident}")))?;
             f.write_str("\n")?;
 
-            if !comment.replies.is_empty() {
+            if comment.replies.is_empty() {
+                f.write_str("\n")?;
+            } else {
                 let mut children = comment.replies.clone();
                 children.sort_by(|a, b| a.value.timestamp.cmp(&b.value.timestamp));
 
                 for child in children {
                     format(&child, f, ident_count + 4, color)?;
                 }
-            } else {
-                f.write_str("\n")?;
             }
 
             Ok(())
         }
 
+        let mut f = String::new();
+
         if !&self.vec.is_empty() {
             let mut children = self.vec.clone();
             children.sort_by(|a, b| b.value.like_count.cmp(&a.value.like_count));
 
             for child in children {
-                format(&child, &mut f, 0, color)?
+                format(&child, &mut f, 0, color)?;
             }
         }
         Ok(f)
diff --git a/yt/src/comments/mod.rs b/yt/src/comments/mod.rs
index 5fbc3fb..fd9f9da 100644
--- a/yt/src/comments/mod.rs
+++ b/yt/src/comments/mod.rs
@@ -25,12 +25,14 @@ use crate::{
         getters::{get_currently_playing_video, get_video_info_json},
         Video,
     },
+    unreachable::Unreachable,
 };
 
 mod comment;
 mod display;
 
-pub async fn get_comments(app: &App) -> Result<Comments> {
+#[allow(clippy::too_many_lines)]
+pub async fn get(app: &App) -> Result<Comments> {
     let currently_playing_video: Video =
         if let Some(video) = get_currently_playing_video(app).await? {
             video
@@ -40,28 +42,38 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
 
     let mut info_json: InfoJson = get_video_info_json(&currently_playing_video)
         .await?
-        .expect("A currently *playing* must be cached. And thus the info.json should be available");
-
-    let base_comments = mem::take(&mut info_json.comments).expect("A video should have comments");
+        .unreachable(
+            "A currently *playing* must be cached. And thus the info.json should be available",
+        );
+
+    let base_comments = mem::take(&mut info_json.comments).with_context(|| {
+        format!(
+            "The video ('{}') does not have comments!",
+            info_json
+                .title
+                .as_ref()
+                .unwrap_or(&("<No Title>".to_owned()))
+        )
+    })?;
     drop(info_json);
 
     let mut comments = Comments::new();
-    base_comments.into_iter().for_each(|c| {
+    for c in base_comments {
         if let Parent::Id(id) = &c.parent {
             comments.insert(&(id.clone()), CommentExt::from(c));
         } else {
             comments.push(CommentExt::from(c));
         }
-    });
+    }
 
     comments.vec.iter_mut().for_each(|comment| {
        let replies = mem::take(&mut comment.replies);
        let mut output_replies: Vec<CommentExt>  = vec![];
 
-       let re = Regex::new(r"\u{200b}?(@[^\t\s]+)\u{200b}?").unwrap();
+       let re = Regex::new(r"\u{200b}?(@[^\t\s]+)\u{200b}?").unreachable("This is hardcoded");
        for reply in replies {
            if let Some(replyee_match) =  re.captures(&reply.value.text){
-               let full_match = replyee_match.get(0).expect("This always exists");
+               let full_match = replyee_match.get(0).unreachable("This will always exist");
                let text = reply.
                    value.
                    text[0..full_match.start()]
@@ -72,7 +84,7 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
                    .text[full_match.end()..];
                let text: &str = text.trim().trim_matches('\u{200b}');
 
-               let replyee = replyee_match.get(1).expect("This should exist").as_str();
+               let replyee = replyee_match.get(1).unreachable("This should also exist").as_str();
 
 
                if let Some(parent) = output_replies
@@ -87,7 +99,7 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
                    parent.replies.push(CommentExt::from(Comment {
                        text: text.to_owned(),
                        ..reply.value
-                   }))
+                   }));
                } else if let Some(parent) = output_replies
                    .iter_mut()
                    // .rev()
@@ -99,7 +111,7 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
                    parent.replies.push(CommentExt::from(Comment {
                        text: text.to_owned(),
                        ..reply.value
-                   }))
+                   }));
                } else if let Some(parent) = output_replies
                    .iter_mut()
                    // .rev()
@@ -110,7 +122,7 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
                    parent.replies.push(CommentExt::from(Comment {
                        text: text.to_owned(),
                        ..reply.value
-                   }))
+                   }));
                } else if let Some(parent) = output_replies.iter_mut()
                    // .rev()
                    .filter(|com| com.value.author == replyee)
@@ -119,7 +131,7 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
                    parent.replies.push(CommentExt::from(Comment {
                        text: text.to_owned(),
                        ..reply.value
-                   }))
+                   }));
                } else {
                    eprintln!(
                    "Failed to find a parent for ('{}') both directly and via replies! The reply text was:\n'{}'\n",
@@ -139,7 +151,7 @@ pub async fn get_comments(app: &App) -> Result<Comments> {
 }
 
 pub async fn comments(app: &App) -> Result<()> {
-    let comments = get_comments(app).await?;
+    let comments = get(app).await?;
 
     let mut less = Command::new("less")
         .args(["--raw-control-chars"])
@@ -152,7 +164,7 @@ pub async fn comments(app: &App) -> Result<()> {
         .args(["--uniform-spacing", "--split-only", "--width=90"])
         .stdin(Stdio::piped())
         .stderr(Stdio::inherit())
-        .stdout(less.stdin.take().expect("Should be open"))
+        .stdout(less.stdin.take().unreachable("Should be open"))
         .spawn()
         .context("Failed to run fmt")?;
 
@@ -160,7 +172,7 @@ pub async fn comments(app: &App) -> Result<()> {
     std::thread::spawn(move || {
         stdin
             .write_all(comments.render(true).as_bytes())
-            .expect("Should be able to write to stdin of fmt");
+            .unreachable("Should be able to write to the stdin of less");
     });
 
     let _ = less.wait().context("Failed to await less")?;
@@ -173,6 +185,6 @@ mod test {
     #[test]
     fn test_string_replacement() {
         let s = "A \n\nB\n\nC".to_owned();
-        assert_eq!("A \n  \n  B\n  \n  C", s.replace('\n', "\n  "))
+        assert_eq!("A \n  \n  B\n  \n  C", s.replace('\n', "\n  "));
     }
 }
diff --git a/yt/src/config/default.rs b/yt/src/config/default.rs
index 59063f5..3b7a3f5 100644
--- a/yt/src/config/default.rs
+++ b/yt/src/config/default.rs
@@ -16,56 +16,56 @@ fn get_runtime_path(name: &'static str) -> Result<PathBuf> {
     let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?;
     xdg_dirs
         .place_runtime_file(name)
-        .with_context(|| format!("Failed to place runtime file: '{}'", name))
+        .with_context(|| format!("Failed to place runtime file: '{name}'"))
 }
 fn get_data_path(name: &'static str) -> Result<PathBuf> {
     let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?;
     xdg_dirs
         .place_data_file(name)
-        .with_context(|| format!("Failed to place data file: '{}'", name))
+        .with_context(|| format!("Failed to place data file: '{name}'"))
 }
 fn get_config_path(name: &'static str) -> Result<PathBuf> {
     let xdg_dirs = xdg::BaseDirectories::with_prefix(PREFIX)?;
     xdg_dirs
         .place_config_file(name)
-        .with_context(|| format!("Failed to place config file: '{}'", name))
+        .with_context(|| format!("Failed to place config file: '{name}'"))
 }
 
 pub(super) fn create_path(path: PathBuf) -> Result<PathBuf> {
     if !path.exists() {
         if let Some(parent) = path.parent() {
             std::fs::create_dir_all(parent)
-                .with_context(|| format!("Failed to create the '{}' directory", path.display()))?
+                .with_context(|| format!("Failed to create the '{}' directory", path.display()))?;
         }
     }
 
     Ok(path)
 }
 
-pub const PREFIX: &str = "yt";
+pub(crate) const PREFIX: &str = "yt";
 
-pub mod select {
-    pub fn playback_speed() -> f64 {
+pub(crate) mod select {
+    pub(crate) fn playback_speed() -> f64 {
         2.7
     }
-    pub fn subtitle_langs() -> &'static str {
+    pub(crate) fn subtitle_langs() -> &'static str {
         ""
     }
 }
 
-pub mod watch {
-    pub fn local_comments_length() -> usize {
+pub(crate) mod watch {
+    pub(crate) fn local_comments_length() -> usize {
         1000
     }
 }
 
-pub mod update {
-    pub fn max_backlog() -> u32 {
+pub(crate) mod update {
+    pub(crate) fn max_backlog() -> u32 {
         20
     }
 }
 
-pub mod paths {
+pub(crate) mod paths {
     use std::{env::temp_dir, path::PathBuf};
 
     use anyhow::Result;
@@ -73,30 +73,30 @@ pub mod paths {
     use super::{create_path, get_config_path, get_data_path, get_runtime_path, PREFIX};
 
     // We download to the temp dir to avoid taxing the disk
-    pub fn download_dir() -> Result<PathBuf> {
+    pub(crate) fn download_dir() -> Result<PathBuf> {
         let temp_dir = temp_dir();
 
         create_path(temp_dir.join(PREFIX))
     }
-    pub fn mpv_config_path() -> Result<PathBuf> {
+    pub(crate) fn mpv_config_path() -> Result<PathBuf> {
         get_config_path("mpv.conf")
     }
-    pub fn mpv_input_path() -> Result<PathBuf> {
+    pub(crate) fn mpv_input_path() -> Result<PathBuf> {
         get_config_path("mpv.input.conf")
     }
-    pub fn database_path() -> Result<PathBuf> {
+    pub(crate) fn database_path() -> Result<PathBuf> {
         get_data_path("videos.sqlite")
     }
-    pub fn config_path() -> Result<PathBuf> {
+    pub(crate) fn config_path() -> Result<PathBuf> {
         get_config_path("config.toml")
     }
-    pub fn last_selection_path() -> Result<PathBuf> {
+    pub(crate) fn last_selection_path() -> Result<PathBuf> {
         get_runtime_path("selected.yts")
     }
 }
 
-pub mod download {
-    pub fn max_cache_size() -> &'static str {
+pub(crate) mod download {
+    pub(crate) fn max_cache_size() -> &'static str {
         "3 GiB"
     }
 }
diff --git a/yt/src/config/definitions.rs b/yt/src/config/definitions.rs
index d37e6da..7d5feee 100644
--- a/yt/src/config/definitions.rs
+++ b/yt/src/config/definitions.rs
@@ -14,7 +14,7 @@ use serde::Deserialize;
 
 #[derive(Debug, Deserialize, PartialEq)]
 #[serde(deny_unknown_fields)]
-pub struct ConfigFile {
+pub(crate) struct ConfigFile {
     pub select: Option<SelectConfig>,
     pub watch: Option<WatchConfig>,
     pub paths: Option<PathsConfig>,
@@ -24,33 +24,33 @@ pub struct ConfigFile {
 
 #[derive(Debug, Deserialize, PartialEq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
-pub struct UpdateConfig {
+pub(crate) struct UpdateConfig {
     pub max_backlog: Option<u32>,
 }
 
 #[derive(Debug, Deserialize, PartialEq, Clone)]
 #[serde(deny_unknown_fields)]
-pub struct DownloadConfig {
+pub(crate) struct DownloadConfig {
     /// This will then be converted to an u64
     pub max_cache_size: Option<String>,
 }
 
 #[derive(Debug, Deserialize, PartialEq, Clone)]
 #[serde(deny_unknown_fields)]
-pub struct SelectConfig {
+pub(crate) struct SelectConfig {
     pub playback_speed: Option<f64>,
     pub subtitle_langs: Option<String>,
 }
 
 #[derive(Debug, Deserialize, PartialEq, Clone, Copy)]
 #[serde(deny_unknown_fields)]
-pub struct WatchConfig {
+pub(crate) struct WatchConfig {
     pub local_comments_length: Option<usize>,
 }
 
 #[derive(Debug, Deserialize, PartialEq, Clone)]
 #[serde(deny_unknown_fields)]
-pub struct PathsConfig {
+pub(crate) struct PathsConfig {
     pub download_dir: Option<PathBuf>,
     pub mpv_config_path: Option<PathBuf>,
     pub mpv_input_path: Option<PathBuf>,
diff --git a/yt/src/config/file_system.rs b/yt/src/config/file_system.rs
index 5751583..11bcd12 100644
--- a/yt/src/config/file_system.rs
+++ b/yt/src/config/file_system.rs
@@ -71,12 +71,11 @@ impl Config {
         db_path: Option<PathBuf>,
         config_path: Option<PathBuf>,
     ) -> Result<Self> {
-        let config_file_path = config_path
-            .map(Ok)
-            .unwrap_or_else(|| -> Result<_> { paths::config_path() })?;
+        let config_file_path =
+            config_path.map_or_else(|| -> Result<_> { paths::config_path() }, Ok)?;
 
         let config: super::definitions::ConfigFile =
-            toml::from_str(&read_to_string(config_file_path).unwrap_or("".to_owned()))
+            toml::from_str(&read_to_string(config_file_path).unwrap_or(String::new()))
                 .context("Failed to parse the config file as toml")?;
 
         Ok(Self {
diff --git a/yt/src/config/mod.rs b/yt/src/config/mod.rs
index ea40055..1aaf065 100644
--- a/yt/src/config/mod.rs
+++ b/yt/src/config/mod.rs
@@ -8,6 +8,8 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+#![allow(clippy::module_name_repetitions)]
+
 use std::path::PathBuf;
 
 use bytes::Bytes;
@@ -17,7 +19,7 @@ mod default;
 mod definitions;
 pub mod file_system;
 
-#[derive(Serialize)]
+#[derive(Serialize, Debug)]
 pub struct Config {
     pub select: SelectConfig,
     pub watch: WatchConfig,
@@ -25,24 +27,29 @@ pub struct Config {
     pub download: DownloadConfig,
     pub update: UpdateConfig,
 }
-#[derive(Serialize)]
+#[derive(Serialize, Debug)]
+// This structure could get non-copy fields in the future.
+// The same thing applies to all the other structures here.
+#[allow(missing_copy_implementations)]
 pub struct UpdateConfig {
     pub max_backlog: u32,
 }
-#[derive(Serialize)]
+#[derive(Serialize, Debug)]
+#[allow(missing_copy_implementations)]
 pub struct DownloadConfig {
     pub max_cache_size: Bytes,
 }
-#[derive(Serialize)]
+#[derive(Serialize, Debug)]
 pub struct SelectConfig {
     pub playback_speed: f64,
     pub subtitle_langs: String,
 }
-#[derive(Serialize)]
+#[derive(Serialize, Debug)]
+#[allow(missing_copy_implementations)]
 pub struct WatchConfig {
     pub local_comments_length: usize,
 }
-#[derive(Serialize)]
+#[derive(Serialize, Debug)]
 pub struct PathsConfig {
     pub download_dir: PathBuf,
     pub mpv_config_path: PathBuf,
diff --git a/yt/src/download/download_options.rs b/yt/src/download/download_options.rs
index e93170a..1dc0bf2 100644
--- a/yt/src/download/download_options.rs
+++ b/yt/src/download/download_options.rs
@@ -22,10 +22,8 @@ use crate::{app::App, storage::video_database::YtDlpOptions};
 //     "logger": _ytdl_logger
 // }
 
-pub fn download_opts(
-    app: &App,
-    additional_opts: YtDlpOptions,
-) -> serde_json::Map<String, serde_json::Value> {
+#[must_use]
+pub fn download_opts(app: &App, additional_opts: &YtDlpOptions) -> serde_json::Map<String, Value> {
     match json!({
       "extract_flat": false,
       "extractor_args": {
@@ -103,10 +101,10 @@ pub fn download_opts(
         }
       ]
     }) {
-        serde_json::Value::Object(mut obj) => {
+        Value::Object(mut obj) => {
             obj.insert(
                 "subtitleslangs".to_owned(),
-                serde_json::Value::Array(
+                Value::Array(
                     additional_opts
                         .subtitle_langs
                         .split(',')
diff --git a/yt/src/download/mod.rs b/yt/src/download/mod.rs
index 56910f9..a056f80 100644
--- a/yt/src/download/mod.rs
+++ b/yt/src/download/mod.rs
@@ -19,6 +19,7 @@ use crate::{
         getters::get_video_yt_dlp_opts,
         Video, YtDlpOptions,
     },
+    unreachable::Unreachable,
 };
 
 use anyhow::{bail, Context, Result};
@@ -27,9 +28,11 @@ use futures::{future::BoxFuture, FutureExt};
 use log::{debug, error, info, warn};
 use tokio::{fs, task::JoinHandle, time};
 
+#[allow(clippy::module_name_repetitions)]
 pub mod download_options;
 
 #[derive(Debug)]
+#[allow(clippy::module_name_repetitions)]
 pub struct CurrentDownload {
     task_handle: JoinHandle<Result<()>>,
     extractor_hash: ExtractorHash,
@@ -37,7 +40,7 @@ pub struct CurrentDownload {
 
 impl CurrentDownload {
     fn new_from_video(app: Arc<App>, video: Video) -> Self {
-        let extractor_hash = video.extractor_hash.clone();
+        let extractor_hash = video.extractor_hash;
 
         let task_handle = tokio::spawn(async move {
             Downloader::actually_cache_video(&app, &video)
@@ -64,6 +67,7 @@ enum CacheSizeCheck {
     ExceedsMaxCacheSize,
 }
 
+#[derive(Debug)]
 pub struct Downloader {
     current_download: Option<CurrentDownload>,
     video_size_cache: HashMap<ExtractorHash, u64>,
@@ -78,6 +82,7 @@ impl Default for Downloader {
 }
 
 impl Downloader {
+    #[must_use]
     pub fn new() -> Self {
         Self {
             current_download: None,
@@ -167,14 +172,17 @@ impl Downloader {
             };
 
             if self.current_download.is_some() {
-                let current_download = self.current_download.take().expect("Is Some");
+                let current_download = self.current_download.take().unreachable("It is `Some`.");
 
                 if current_download.task_handle.is_finished() {
                     current_download.task_handle.await??;
                     continue;
                 }
 
-                if next_video.extractor_hash != current_download.extractor_hash {
+                if next_video.extractor_hash == current_download.extractor_hash {
+                    // Reset the taken value
+                    self.current_download = Some(current_download);
+                } else {
                     info!(
                     "Noticed, that the next video is not the video being downloaded, replacing it ('{}' vs. '{}')!",
                         next_video.extractor_hash.into_short_hash(&app).await?, current_download.extractor_hash.into_short_hash(&app).await?
@@ -187,9 +195,6 @@ impl Downloader {
                         CurrentDownload::new_from_video(Arc::clone(&app), next_video);
 
                     self.current_download = Some(new_current_download);
-                } else {
-                    // Reset the taken value
-                    self.current_download = Some(current_download);
                 }
             } else {
                 info!(
@@ -238,9 +243,9 @@ impl Downloader {
         } else {
             // the subtitle file size should be negligible
             let add_opts = YtDlpOptions {
-                subtitle_langs: "".to_owned(),
+                subtitle_langs: String::new(),
             };
-            let opts = &download_opts(app, add_opts);
+            let opts = &download_opts(app, &add_opts);
 
             let result = yt_dlp::extract_info(opts, &video.url, false, true)
                 .await
@@ -253,9 +258,11 @@ impl Downloader {
             } else if let Some(val) = result.filesize_approx {
                 val
             } else if result.duration.is_some() && result.tbr.is_some() {
+                #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
                 let duration = result.duration.expect("Is some").ceil() as u64;
 
                 // TODO: yt_dlp gets this from the format
+                #[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
                 let tbr = result.tbr.expect("Is Some").ceil() as u64;
 
                 duration * tbr * (1000 / 8)
@@ -269,8 +276,7 @@ impl Downloader {
             };
 
             assert_eq!(
-                self.video_size_cache
-                    .insert(video.extractor_hash.clone(), size),
+                self.video_size_cache.insert(video.extractor_hash, size),
                 None
             );
 
@@ -283,7 +289,7 @@ impl Downloader {
 
         let addional_opts = get_video_yt_dlp_opts(app, &video.extractor_hash).await?;
 
-        let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, addional_opts))
+        let result = yt_dlp::download(&[video.url.clone()], &download_opts(app, &addional_opts))
             .await
             .with_context(|| format!("Failed to download video: '{}'", video.title))?;
 
diff --git a/yt/src/main.rs b/yt/src/main.rs
index e2c517c..d4cd2b6 100644
--- a/yt/src/main.rs
+++ b/yt/src/main.rs
@@ -8,6 +8,10 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
+// `yt` is not a library. Besides, the `anyhow::Result` type is really useless, if you're not going
+// to print it anyways.
+#![allow(clippy::missing_errors_doc)]
+
 use std::{collections::HashMap, fs, sync::Arc};
 
 use anyhow::{bail, Context, Result};
@@ -29,7 +33,7 @@ use url::Url;
 use videos::display::format_video::FormatVideo;
 use yt_dlp::wrapper::info_json::InfoJson;
 
-use crate::{cli::Command, storage::subscriptions::get_subscriptions};
+use crate::{cli::Command, storage::subscriptions};
 
 pub mod app;
 pub mod cli;
@@ -49,6 +53,8 @@ pub mod videos;
 pub mod watch;
 
 #[tokio::main]
+// This is _the_ main function after all. It is not really good, but it sort of works.
+#[allow(clippy::too_many_lines)]
 async fn main() -> Result<()> {
     let args = cli::CliArgs::parse();
 
@@ -146,7 +152,7 @@ async fn main() -> Result<()> {
             max_backlog,
             subscriptions,
         } => {
-            let all_subs = get_subscriptions(&app).await?;
+            let all_subs = subscriptions::get(&app).await?;
 
             for sub in &subscriptions {
                 if !all_subs.0.contains_key(sub) {
@@ -173,14 +179,14 @@ async fn main() -> Result<()> {
                     .context("Failed to remove a subscription")?;
             }
             SubscriptionCommand::List {} => {
-                let all_subs = get_subscriptions(&app).await?;
+                let all_subs = subscriptions::get(&app).await?;
 
                 for (key, val) in all_subs.0 {
                     println!("{}: '{}'", key, val.url);
                 }
             }
             SubscriptionCommand::Export {} => {
-                let all_subs = get_subscriptions(&app).await?;
+                let all_subs = subscriptions::get(&app).await?;
                 for val in all_subs.0.values() {
                     println!("{}", val.url);
                 }
@@ -189,9 +195,9 @@ async fn main() -> Result<()> {
                 if let Some(file) = file {
                     let f = File::open(file).await?;
 
-                    subscribe::import(&app, BufReader::new(f), force).await?
+                    subscribe::import(&app, BufReader::new(f), force).await?;
                 } else {
-                    subscribe::import(&app, BufReader::new(stdin()), force).await?
+                    subscribe::import(&app, BufReader::new(stdin()), force).await?;
                 };
             }
         },
@@ -202,7 +208,7 @@ async fn main() -> Result<()> {
         Command::Config {} => status::config(&app)?,
 
         Command::Database { command } => match command {
-            CacheCommand::Invalidate { hard } => cache::invalidate(&app, hard).await?,
+            CacheCommand::Invalidate { hard } => invalidate(&app, hard).await?,
             CacheCommand::Maintain { all } => cache::maintain(&app, all).await?,
         },
 
@@ -211,15 +217,19 @@ async fn main() -> Result<()> {
                 let string = fs::read_to_string(&path)
                     .with_context(|| format!("Failed to read '{}' to string!", path.display()))?;
 
-                let _: InfoJson =
-                    serde_json::from_str(&string).context("Failed to deserialize value")?;
+                drop(
+                    serde_json::from_str::<InfoJson>(&string)
+                        .context("Failed to deserialize value")?,
+                );
             }
             CheckCommand::UpdateInfoJson { path } => {
                 let string = fs::read_to_string(&path)
                     .with_context(|| format!("Failed to read '{}' to string!", path.display()))?;
 
-                let _: HashMap<Url, InfoJson> =
-                    serde_json::from_str(&string).context("Failed to deserialize value")?;
+                drop(
+                    serde_json::from_str::<HashMap<Url, InfoJson>>(&string)
+                        .context("Failed to deserialize value")?,
+                );
             }
         },
         Command::Comments {} => {
diff --git a/yt/src/select/cmds.rs b/yt/src/select/cmds.rs
index 6e71607..f1488bc 100644
--- a/yt/src/select/cmds.rs
+++ b/yt/src/select/cmds.rs
@@ -43,16 +43,6 @@ pub async fn handle_select_cmd(
         }
         SelectCommand::Add { urls } => {
             for url in urls {
-                let opts = download_opts(
-                    &app,
-                    video_database::YtDlpOptions {
-                        subtitle_langs: "".to_owned(),
-                    },
-                );
-                let entry = yt_dlp::extract_info(&opts, &url, false, true)
-                    .await
-                    .with_context(|| format!("Failed to fetch entry for url: '{}'", url))?;
-
                 async fn add_entry(
                     app: &App,
                     entry: yt_dlp::wrapper::info_json::InfoJson,
@@ -67,9 +57,19 @@ pub async fn handle_select_cmd(
                     Ok(())
                 }
 
+                let opts = download_opts(
+                    app,
+                    &video_database::YtDlpOptions {
+                        subtitle_langs: String::new(),
+                    },
+                );
+                let entry = yt_dlp::extract_info(&opts, &url, false, true)
+                    .await
+                    .with_context(|| format!("Failed to fetch entry for url: '{url}'"))?;
+
                 match entry._type {
                     Some(InfoType::Video) => {
-                        add_entry(&app, entry).await?;
+                        add_entry(app, entry).await?;
                     }
                     Some(InfoType::Playlist) => {
                         if let Some(mut entries) = entry.entries {
@@ -79,7 +79,7 @@ pub async fn handle_select_cmd(
 
                                 let futures: Vec<_> = entries
                                     .into_iter()
-                                    .map(|entry| add_entry(&app, entry))
+                                    .map(|entry| add_entry(app, entry))
                                     .collect();
 
                                 join_all(futures).await.into_iter().collect::<Result<_>>()?;
diff --git a/yt/src/select/mod.rs b/yt/src/select/mod.rs
index ca7a203..ddc8a5e 100644
--- a/yt/src/select/mod.rs
+++ b/yt/src/select/mod.rs
@@ -11,8 +11,8 @@
 use std::{
     env::{self},
     fs,
-    io::{BufRead, Write},
-    io::{BufReader, BufWriter},
+    io::{BufRead, BufReader, BufWriter, Write},
+    string::String,
 };
 
 use crate::{
@@ -20,6 +20,7 @@ use crate::{
     cli::CliArgs,
     constants::HELP_STR,
     storage::video_database::{getters::get_videos, VideoStatus},
+    unreachable::Unreachable,
     videos::display::format_video::FormatVideo,
 };
 
@@ -64,7 +65,7 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<(
         // Warmup the cache for the display rendering of the videos.
         // Otherwise the futures would all try to warm it up at the same time.
         if let Some(vid) = matching_videos.first() {
-            let _ = vid.to_formatted_video(app).await?;
+            drop(vid.to_formatted_video(app).await?);
         }
 
         let mut edit_file = BufWriter::new(&temp_file);
@@ -82,7 +83,7 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<(
 
             edit_file
                 .write_all(formatted_line.as_bytes())
-                .expect("This write should not fail");
+                .context("Failed to write to `edit_file`")?;
 
             Ok(())
         })?;
@@ -125,24 +126,26 @@ pub async fn select(app: &App, done: bool, use_last_selection: bool) -> Result<(
 
             let arg_line = ["yt", "select"]
                 .into_iter()
-                .chain(line.iter().map(|val| val.as_str()));
+                .chain(line.iter().map(String::as_str));
 
             let args = CliArgs::parse_from(arg_line);
 
-            let cmd = if let crate::cli::Command::Select { cmd } =
-                args.command.expect("This will be some")
-            {
-                cmd
-            } else {
+            let crate::cli::Command::Select { cmd } = args
+                .command
+                .unreachable("This will be some, as we constructed it above.")
+            else {
                 unreachable!("This is checked in the `filter_line` function")
             };
 
             handle_select_cmd(
                 app,
-                cmd.expect("This value should always be some here"),
+                cmd.unreachable(
+                    "This value should always be some \
+                    here, as it would otherwise thrown an error above.",
+                ),
                 Some(line_number),
             )
-            .await?
+            .await?;
         }
     }
 
diff --git a/yt/src/select/selection_file/duration.rs b/yt/src/select/selection_file/duration.rs
index a38981c..3957f0f 100644
--- a/yt/src/select/selection_file/duration.rs
+++ b/yt/src/select/selection_file/duration.rs
@@ -12,6 +12,8 @@ use std::str::FromStr;
 
 use anyhow::{Context, Result};
 
+use crate::unreachable::Unreachable;
+
 #[derive(Copy, Clone, Debug)]
 pub struct Duration {
     time: u32,
@@ -23,11 +25,9 @@ impl FromStr for Duration {
     fn from_str(s: &str) -> Result<Self, Self::Err> {
         fn parse_num(str: &str, suffix: char) -> Result<u32> {
             str.strip_suffix(suffix)
-                .with_context(|| {
-                    format!("Failed to strip suffix '{}' of number: '{}'", suffix, str)
-                })?
+                .with_context(|| format!("Failed to strip suffix '{suffix}' of number: '{str}'"))?
                 .parse::<u32>()
-                .with_context(|| format!("Failed to parse '{}'", suffix))
+                .with_context(|| format!("Failed to parse '{suffix}'"))
         }
 
         if s == "[No Duration]" {
@@ -66,7 +66,9 @@ impl FromStr for Duration {
 impl From<Option<f64>> for Duration {
     fn from(value: Option<f64>) -> Self {
         Self {
-            time: value.unwrap_or(0.0).ceil() as u32,
+            #[allow(clippy::cast_possible_truncation)]
+            time: u32::try_from(value.unwrap_or(0.0).ceil() as i128)
+                .unreachable("This should not exceed `u32::MAX`"),
         }
     }
 }
diff --git a/yt/src/select/selection_file/mod.rs b/yt/src/select/selection_file/mod.rs
index 45809fa..5e5643d 100644
--- a/yt/src/select/selection_file/mod.rs
+++ b/yt/src/select/selection_file/mod.rs
@@ -20,10 +20,7 @@ pub fn process_line(line: &str) -> Result<Option<Vec<String>>> {
     if line.starts_with('#') || line.trim().is_empty() {
         Ok(None)
     } else {
-        // pick 2195db "CouchRecherche? Gunnar und Han von STRG_F sind #mitfunkzuhause" "2020-04-01" "STRG_F - Live" "[1h 5m]" "https://www.youtube.com/watch?v=C8UXOaoMrXY"
-
-        let tri =
-            Trinitry::new(line).with_context(|| format!("Failed to parse line '{}'", line))?;
+        let tri = Trinitry::new(line).with_context(|| format!("Failed to parse line '{line}'"))?;
 
         let mut vec = Vec::with_capacity(tri.arguments().len() + 1);
         vec.push(tri.command().to_owned());
diff --git a/yt/src/status/mod.rs b/yt/src/status/mod.rs
index 7ffe8d7..474cab7 100644
--- a/yt/src/status/mod.rs
+++ b/yt/src/status/mod.rs
@@ -15,7 +15,7 @@ use crate::{
     app::App,
     download::Downloader,
     storage::{
-        subscriptions::get_subscriptions,
+        subscriptions::get,
         video_database::{getters::get_videos, VideoStatus},
     },
 };
@@ -72,7 +72,7 @@ pub async fn show(app: &App) -> Result<()> {
     let drop_videos_changing = get!(@changing all_videos, Drop);
     let dropped_videos_changing = get!(@changing all_videos, Dropped);
 
-    let subscriptions = get_subscriptions(app).await?;
+    let subscriptions = get(app).await?;
     let subscriptions_len = subscriptions.0.len();
 
     let cache_usage_raw = Downloader::get_current_cache_allocation(app)
@@ -101,7 +101,7 @@ Dropped  Videos: {dropped_videos_len} ({dropped_videos_changing} changing)
 pub fn config(app: &App) -> Result<()> {
     let config_str = toml::to_string(&app.config)?;
 
-    print!("{}", config_str);
+    print!("{config_str}");
 
     Ok(())
 }
diff --git a/yt/src/storage/subscriptions.rs b/yt/src/storage/subscriptions.rs
index 22edd08..8e089f0 100644
--- a/yt/src/storage/subscriptions.rs
+++ b/yt/src/storage/subscriptions.rs
@@ -19,7 +19,7 @@ use sqlx::query;
 use url::Url;
 use yt_dlp::wrapper::info_json::InfoType;
 
-use crate::app::App;
+use crate::{app::App, unreachable::Unreachable};
 
 #[derive(Clone, Debug)]
 pub struct Subscription {
@@ -31,6 +31,7 @@ pub struct Subscription {
 }
 
 impl Subscription {
+    #[must_use]
     pub fn new(name: String, url: Url) -> Self {
         Self { name, url }
     }
@@ -38,14 +39,13 @@ impl Subscription {
 
 /// Check whether an URL could be used as a subscription URL
 pub async fn check_url(url: &Url) -> Result<bool> {
-    let yt_opts = match json!( {
+    let Value::Object(yt_opts) = json!( {
         "playliststart": 1,
         "playlistend": 10,
         "noplaylist": false,
         "extract_flat": "in_playlist",
-    }) {
-        Value::Object(map) => map,
-        _ => unreachable!("This is hardcoded"),
+    }) else {
+        unreachable!("This is hardcoded");
     };
 
     let info = yt_dlp::extract_info(&yt_opts, url, false, false).await?;
@@ -55,10 +55,11 @@ pub async fn check_url(url: &Url) -> Result<bool> {
     Ok(info._type == Some(InfoType::Playlist))
 }
 
-#[derive(Default)]
+#[derive(Default, Debug)]
 pub struct Subscriptions(pub(crate) HashMap<String, Subscription>);
 
-pub async fn remove_all_subscriptions(app: &App) -> Result<()> {
+/// Remove all subscriptions
+pub async fn remove_all(app: &App) -> Result<()> {
     query!(
         "
         DELETE FROM subscriptions;
@@ -71,7 +72,7 @@ pub async fn remove_all_subscriptions(app: &App) -> Result<()> {
 }
 
 /// Get a list of subscriptions
-pub async fn get_subscriptions(app: &App) -> Result<Subscriptions> {
+pub async fn get(app: &App) -> Result<Subscriptions> {
     let raw_subs = query!(
         "
         SELECT *
@@ -88,7 +89,7 @@ pub async fn get_subscriptions(app: &App) -> Result<Subscriptions> {
                 sub.name.clone(),
                 Subscription::new(
                     sub.name,
-                    Url::parse(&sub.url).expect("This should be valid"),
+                    Url::parse(&sub.url).unreachable("It was an URL, when we inserted it."),
                 ),
             )
         })
@@ -117,6 +118,8 @@ pub async fn add_subscription(app: &App, sub: &Subscription) -> Result<()> {
     Ok(())
 }
 
+/// # Panics
+/// Only if assertions fail
 pub async fn remove_subscription(app: &App, sub: &Subscription) -> Result<()> {
     let output = query!(
         "
diff --git a/yt/src/storage/video_database/downloader.rs b/yt/src/storage/video_database/downloader.rs
index ccd4ca9..bfb7aa3 100644
--- a/yt/src/storage/video_database/downloader.rs
+++ b/yt/src/storage/video_database/downloader.rs
@@ -15,12 +15,15 @@ use log::debug;
 use sqlx::query;
 use url::Url;
 
-use crate::{app::App, storage::video_database::VideoStatus};
+use crate::{app::App, storage::video_database::VideoStatus, unreachable::Unreachable};
 
 use super::{ExtractorHash, Video};
 
 /// Returns to next video which should be downloaded. This respects the priority assigned by select.
 /// It does not return videos, which are already cached.
+///
+/// # Panics
+/// Only if assertions fail.
 pub async fn get_next_uncached_video(app: &App) -> Result<Option<Video>> {
     let status = VideoStatus::Watch.as_db_integer();
 
@@ -46,7 +49,7 @@ pub async fn get_next_uncached_video(app: &App) -> Result<Option<Video>> {
         let thumbnail_url = base
             .thumbnail_url
             .as_ref()
-            .map(|url| Url::parse(url).expect("Parsing this as url should always work"));
+            .map(|url| Url::parse(url).unreachable("Parsing this as url should always work"));
 
         let status_change = if base.status_change == 1 {
             true
@@ -149,5 +152,6 @@ pub async fn get_allocated_cache(app: &App) -> Result<u32> {
     .fetch_one(&app.database)
     .await?;
 
-    Ok(count.count as u32)
+    Ok(u32::try_from(count.count)
+        .unreachable("The value should be strictly positive (and bolow `u32::Max`)"))
 }
diff --git a/yt/src/storage/video_database/extractor_hash.rs b/yt/src/storage/video_database/extractor_hash.rs
index c956919..3aa3cd2 100644
--- a/yt/src/storage/video_database/extractor_hash.rs
+++ b/yt/src/storage/video_database/extractor_hash.rs
@@ -8,18 +8,18 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use std::{collections::HashMap, fmt::Display, str::FromStr};
+use std::{collections::HashSet, fmt::Display, str::FromStr};
 
 use anyhow::{bail, Context, Result};
 use blake3::Hash;
 use log::debug;
 use tokio::sync::OnceCell;
 
-use crate::{app::App, storage::video_database::getters::get_all_hashes};
+use crate::{app::App, storage::video_database::getters::get_all_hashes, unreachable::Unreachable};
 
 static EXTRACTOR_HASH_LENGTH: OnceCell<usize> = OnceCell::const_new();
 
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
 pub struct ExtractorHash {
     hash: Hash,
 }
@@ -40,6 +40,7 @@ impl Display for ShortHash {
 }
 
 #[derive(Debug, Clone)]
+#[allow(clippy::module_name_repetitions)]
 pub struct LazyExtractorHash {
     value: ShortHash,
 }
@@ -67,6 +68,7 @@ impl LazyExtractorHash {
 }
 
 impl ExtractorHash {
+    #[must_use]
     pub fn from_hash(hash: Hash) -> Self {
         Self { hash }
     }
@@ -76,6 +78,7 @@ impl ExtractorHash {
         })
     }
 
+    #[must_use]
     pub fn hash(&self) -> &Hash {
         &self.hash
     }
@@ -88,9 +91,9 @@ impl ExtractorHash {
                 .get_needed_char_len(app)
                 .await
                 .context("Failed to calculate needed char length")?;
-            EXTRACTOR_HASH_LENGTH
-                .set(needed_chars)
-                .expect("This should work at this stage");
+            EXTRACTOR_HASH_LENGTH.set(needed_chars).unreachable(
+                "This should work at this stage, as we checked above that it is empty.",
+            );
 
             needed_chars
         };
@@ -139,9 +142,9 @@ impl ExtractorHash {
                 .map(|vec| vec.iter().take(i).collect::<String>())
                 .collect();
 
-            let mut uniqnes_hashmap: HashMap<String, ()> = HashMap::new();
+            let mut uniqnes_hashmap: HashSet<String> = HashSet::new();
             for ch in i_chars {
-                if let Some(()) = uniqnes_hashmap.insert(ch, ()) {
+                if !uniqnes_hashmap.insert(ch) {
                     // The key was already in the hash map, thus we have a duplicated char and need
                     // at least one char more
                     continue 'outer;
diff --git a/yt/src/storage/video_database/getters.rs b/yt/src/storage/video_database/getters.rs
index 29dd014..d8f9a3f 100644
--- a/yt/src/storage/video_database/getters.rs
+++ b/yt/src/storage/video_database/getters.rs
@@ -26,6 +26,7 @@ use crate::{
         subscriptions::Subscription,
         video_database::{extractor_hash::ExtractorHash, Video},
     },
+    unreachable::Unreachable,
 };
 
 use super::{MpvOptions, VideoOptions, VideoStatus, YtDlpOptions};
@@ -69,12 +70,15 @@ macro_rules! video_from_record {
 /// Get the lines to display at the selection file
 /// [`changing` = true]: Means that we include *only* videos, that have the `status_changing` flag set
 /// [`changing` = None]: Means that we include *both* videos, that have the `status_changing` flag set and not set
+///
+/// # Panics
+/// Only, if assertions fail.
 pub async fn get_videos(
     app: &App,
     allowed_states: &[VideoStatus],
     changing: Option<bool>,
 ) -> Result<Vec<Video>> {
-    let mut qb: QueryBuilder<Sqlite> = QueryBuilder::new(
+    let mut qb: QueryBuilder<'_, Sqlite> = QueryBuilder::new(
         "\
     SELECT *
     FROM videos
@@ -132,7 +136,7 @@ pub async fn get_videos(
                 extractor_hash: ExtractorHash::from_hash(
                     base.get::<String, &str>("extractor_hash")
                         .parse()
-                        .expect("The db hash should be a valid blake3 hash"),
+                        .unreachable("The db hash should always be a valid blake3 hash"),
                 ),
                 last_status_change: base.get("last_status_change"),
                 parent_subscription_name: base
@@ -143,9 +147,17 @@ pub async fn get_videos(
                 thumbnail_url: base
                     .get::<Option<String>, &str>("thumbnail_url")
                     .as_ref()
-                    .map(|url| Url::parse(url).expect("Parsing this as url should always work")),
-                title: base.get::<String, &str>("title").to_owned(),
-                url: Url::parse(base.get("url")).expect("Parsing this as url should always work"),
+                    .map(|url| {
+                        Url::parse(url).unreachable(
+                            "Parsing this as url should always work. \
+                             As it was an URL when we put it in.",
+                        )
+                    }),
+                title: base.get::<String, &str>("title").clone(),
+                url: Url::parse(base.get("url")).unreachable(
+                    "Parsing this as url should always work. \
+                    As it was an URL when we put it in.",
+                ),
                 priority: base.get("priority"),
                 status_change: {
                     let val = base.get::<i64, &str>("status_change");
@@ -195,6 +207,8 @@ pub async fn get_video_by_hash(app: &App, hash: &ExtractorHash) -> Result<Video>
     video_from_record! {raw_video}
 }
 
+/// # Panics
+/// Only if assertions fail.
 pub async fn get_currently_playing_video(app: &App) -> Result<Option<Video>> {
     let mut videos: Vec<Video> = get_changing_videos(app, VideoStatus::Cached).await?;
 
@@ -248,8 +262,9 @@ pub async fn get_all_hashes(app: &App) -> Result<Vec<Hash>> {
     Ok(hashes_hex
         .iter()
         .map(|hash| {
-            Hash::from_hex(&hash.extractor_hash)
-                .expect("These values started as blake3 hashes, they should stay blake3 hashes")
+            Hash::from_hex(&hash.extractor_hash).unreachable(
+                "These values started as blake3 hashes, they should stay blake3 hashes",
+            )
         })
         .collect())
 }
@@ -269,8 +284,9 @@ pub async fn get_video_hashes(app: &App, subs: &Subscription) -> Result<Vec<Hash
     Ok(hashes_hex
         .iter()
         .map(|hash| {
-            Hash::from_hex(&hash.extractor_hash)
-                .expect("These values started as blake3 hashes, they should stay blake3 hashes")
+            Hash::from_hex(&hash.extractor_hash).unreachable(
+                "These values started as blake3 hashes, they should stay blake3 hashes",
+            )
         })
         .collect())
 }
@@ -289,10 +305,7 @@ pub async fn get_video_yt_dlp_opts(app: &App, hash: &ExtractorHash) -> Result<Yt
     .fetch_one(&app.database)
     .await
     .with_context(|| {
-        format!(
-            "Failed to fetch the `yt_dlp_video_opts` for video: {}",
-            hash
-        )
+        format!("Failed to fetch the `yt_dlp_video_opts` for video with hash: '{hash}'",)
     })?;
 
     Ok(YtDlpOptions {
@@ -312,7 +325,9 @@ pub async fn get_video_mpv_opts(app: &App, hash: &ExtractorHash) -> Result<MpvOp
     )
     .fetch_one(&app.database)
     .await
-    .with_context(|| format!("Failed to fetch the `mpv_video_opts` for video: {}", hash))?;
+    .with_context(|| {
+        format!("Failed to fetch the `mpv_video_opts` for video with hash: '{hash}'")
+    })?;
 
     Ok(MpvOptions {
         playback_speed: mpv_options.playback_speed,
@@ -332,7 +347,7 @@ pub async fn get_video_opts(app: &App, hash: &ExtractorHash) -> Result<VideoOpti
     )
     .fetch_one(&app.database)
     .await
-    .with_context(|| format!("Failed to fetch the `video_opts` for video: {}", hash))?;
+    .with_context(|| format!("Failed to fetch the `video_opts` for video with hash: '{hash}'"))?;
 
     let mpv = MpvOptions {
         playback_speed: opts.playback_speed,
@@ -341,5 +356,5 @@ pub async fn get_video_opts(app: &App, hash: &ExtractorHash) -> Result<VideoOpti
         subtitle_langs: opts.subtitle_langs,
     };
 
-    Ok(VideoOptions { mpv, yt_dlp })
+    Ok(VideoOptions { yt_dlp, mpv })
 }
diff --git a/yt/src/storage/video_database/mod.rs b/yt/src/storage/video_database/mod.rs
index 1765f79..aff57b9 100644
--- a/yt/src/storage/video_database/mod.rs
+++ b/yt/src/storage/video_database/mod.rs
@@ -52,10 +52,11 @@ impl VideoOptions {
 
     /// This will write out the options that are different from the defaults.
     /// Beware, that this does not set the priority.
+    #[must_use]
     pub fn to_cli_flags(self, app: &App) -> String {
         let mut f = String::new();
 
-        if self.mpv.playback_speed != app.config.select.playback_speed {
+        if (self.mpv.playback_speed - app.config.select.playback_speed).abs() > f64::EPSILON {
             write!(f, " --speed '{}'", self.mpv.playback_speed).expect("Works");
         }
         if self.yt_dlp.subtitle_langs != app.config.select.subtitle_langs {
@@ -66,7 +67,7 @@ impl VideoOptions {
     }
 }
 
-#[derive(Debug)]
+#[derive(Debug, Clone, Copy)]
 /// Additionally settings passed to mpv on watch
 pub struct MpvOptions {
     /// The playback speed. (1 is 100%, 2.7 is 270%, and so on)
@@ -118,21 +119,21 @@ impl VideoStatus {
         VideoStatus::Dropped,
     ];
 
+    #[must_use]
     pub fn as_command(&self) -> &str {
         // NOTE: Keep the serialize able variants synced with the main `select` function <2024-06-14>
         // Also try to ensure, that the strings have the same length
         match self {
             VideoStatus::Pick => "pick   ",
 
-            VideoStatus::Watch => "watch  ",
-            VideoStatus::Cached => "watch  ",
+            VideoStatus::Watch | VideoStatus::Cached => "watch  ",
             VideoStatus::Watched => "watched",
 
-            VideoStatus::Drop => "drop   ",
-            VideoStatus::Dropped => "drop   ",
+            VideoStatus::Drop | VideoStatus::Dropped => "drop   ",
         }
     }
 
+    #[must_use]
     pub fn as_db_integer(&self) -> i64 {
         // These numbers should not change their mapping!
         // Oh, and keep them in sync with the SQLite check constraint.
@@ -147,6 +148,7 @@ impl VideoStatus {
             VideoStatus::Dropped => 5,
         }
     }
+    #[must_use]
     pub fn from_db_integer(num: i64) -> Self {
         match num {
             0 => Self::Pick,
@@ -164,6 +166,7 @@ impl VideoStatus {
         }
     }
 
+    #[must_use]
     pub fn as_str(&self) -> &'static str {
         match self {
             VideoStatus::Pick => "Pick",
diff --git a/yt/src/storage/video_database/setters.rs b/yt/src/storage/video_database/setters.rs
index c160138..4531fd1 100644
--- a/yt/src/storage/video_database/setters.rs
+++ b/yt/src/storage/video_database/setters.rs
@@ -111,7 +111,10 @@ pub async fn set_video_status(
 }
 
 /// Mark a video as watched.
-/// This will both set the status to `Watched` and the cache_path to Null.
+/// This will both set the status to `Watched` and the `cache_path` to Null.
+///
+/// # Panics
+/// Only if assertions fail.
 pub async fn set_video_watched(app: &App, video: &Video) -> Result<()> {
     let video_hash = video.extractor_hash.hash().to_string();
     let new_status = VideoStatus::Watched.as_db_integer();
@@ -143,7 +146,7 @@ pub async fn set_video_watched(app: &App, video: &Video) -> Result<()> {
 
     if let Some(path) = &video.cache_path {
         if let Ok(true) = path.try_exists() {
-            fs::remove_file(path).await?
+            fs::remove_file(path).await?;
         }
     }
 
@@ -168,7 +171,7 @@ pub async fn set_state_change(
     video_extractor_hash: &ExtractorHash,
     changing: bool,
 ) -> Result<()> {
-    let state_change = if changing { 1 } else { 0 };
+    let state_change = u32::from(changing);
     let video_extractor_hash = video_extractor_hash.hash().to_string();
 
     query!(
@@ -217,7 +220,7 @@ pub async fn add_video(app: &App, video: Video) -> Result<()> {
     let thumbnail_url = video.thumbnail_url.map(|val| val.to_string());
 
     let status = video.status.as_db_integer();
-    let status_change = if video.status_change { 1 } else { 0 };
+    let status_change = u32::from(video.status_change);
     let url = video.url.to_string();
     let extractor_hash = video.extractor_hash.hash().to_string();
 
diff --git a/yt/src/subscribe/mod.rs b/yt/src/subscribe/mod.rs
index 74d88b4..32a7760 100644
--- a/yt/src/subscribe/mod.rs
+++ b/yt/src/subscribe/mod.rs
@@ -21,13 +21,14 @@ use yt_dlp::wrapper::info_json::InfoType;
 use crate::{
     app::App,
     storage::subscriptions::{
-        add_subscription, check_url, get_subscriptions, remove_all_subscriptions,
+        add_subscription, check_url, get, remove_all,
         remove_subscription, Subscription,
     },
+    unreachable::Unreachable,
 };
 
 pub async fn unsubscribe(app: &App, name: String) -> Result<()> {
-    let present_subscriptions = get_subscriptions(app).await?;
+    let present_subscriptions = get(app).await?;
 
     if let Some(subscription) = present_subscriptions.0.get(&name) {
         remove_subscription(app, subscription).await?;
@@ -44,22 +45,22 @@ pub async fn import<W: AsyncBufRead + AsyncBufReadExt + Unpin>(
     force: bool,
 ) -> Result<()> {
     if force {
-        remove_all_subscriptions(app).await?;
+        remove_all(app).await?;
     }
 
     let mut lines = reader.lines();
     while let Some(line) = lines.next_line().await? {
         let url =
-            Url::from_str(&line).with_context(|| format!("Failed to parse '{}' as url", line))?;
+            Url::from_str(&line).with_context(|| format!("Failed to parse '{line}' as url"))?;
         match subscribe(app, None, url)
             .await
-            .with_context(|| format!("Failed to subscribe to: '{}'", line))
+            .with_context(|| format!("Failed to subscribe to: '{line}'"))
         {
-            Ok(_) => (),
+            Ok(()) => (),
             Err(err) => eprintln!(
                 "Error while subscribing to '{}': '{}'",
                 line,
-                err.source().expect("Should have a source")
+                err.source().unreachable("Should have a source")
             ),
         }
     }
@@ -79,14 +80,15 @@ pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()>
         warn!("Your youtbe url does not seem like it actually tracks a channels playlist (videos, streams, shorts). Adding subscriptions for each of them...");
 
         let url = Url::parse(&(url.as_str().to_owned() + "/"))
-            .expect("This was an url, it should stay one");
+            .unreachable("This was an url, it should stay one");
 
         if let Some(name) = name {
             let out: Result<()> = async move {
                 actual_subscribe(
                     app,
                     Some(name.clone() + " {Videos}"),
-                    url.join("videos/").expect("Works"),
+                    url.join("videos/")
+                        .unreachable("The url should allow being joined onto"),
                 )
                 .await
                 .with_context(|| {
@@ -96,7 +98,7 @@ pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()>
                 actual_subscribe(
                     app,
                     Some(name.clone() + " {Streams}"),
-                    url.join("streams/").expect("Works"),
+                    url.join("streams/").unreachable("See above."),
                 )
                 .await
                 .with_context(|| {
@@ -106,7 +108,7 @@ pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()>
                 actual_subscribe(
                     app,
                     Some(name.clone() + " {Shorts}"),
-                    url.join("shorts/").expect("Works"),
+                    url.join("shorts/").unreachable("See above."),
                 )
                 .await
                 .with_context(|| format!("Failed to subscribe to '{}'", name + " {Shorts}"))?;
@@ -116,17 +118,17 @@ pub async fn subscribe(app: &App, name: Option<String>, url: Url) -> Result<()>
             .boxed()
             .await;
 
-            out?
+            out?;
         } else {
-            actual_subscribe(app, None, url.join("videos/").expect("Works"))
+            actual_subscribe(app, None, url.join("videos/").unreachable("See above."))
                 .await
                 .with_context(|| format!("Failed to subscribe to the '{}' variant", "{Videos}"))?;
 
-            actual_subscribe(app, None, url.join("streams/").expect("Works"))
+            actual_subscribe(app, None, url.join("streams/").unreachable("See above."))
                 .await
                 .with_context(|| format!("Failed to subscribe to the '{}' variant", "{Streams}"))?;
 
-            actual_subscribe(app, None, url.join("shorts/").expect("Works"))
+            actual_subscribe(app, None, url.join("shorts/").unreachable("See above."))
                 .await
                 .with_context(|| format!("Failed to subscribe to the '{}' variant", "{Shorts}"))?;
         }
@@ -145,14 +147,13 @@ async fn actual_subscribe(app: &App, name: Option<String>, url: Url) -> Result<(
     let name = if let Some(name) = name {
         name
     } else {
-        let yt_opts = match json!( {
+        let Value::Object(yt_opts) = json!( {
             "playliststart": 1,
             "playlistend": 10,
             "noplaylist": false,
             "extract_flat": "in_playlist",
-        }) {
-            Value::Object(map) => map,
-            _ => unreachable!("This is hardcoded"),
+        }) else {
+            unreachable!("This is hardcoded")
         };
 
         let info = yt_dlp::extract_info(&yt_opts, &url, false, false).await?;
@@ -164,7 +165,7 @@ async fn actual_subscribe(app: &App, name: Option<String>, url: Url) -> Result<(
         }
     };
 
-    let present_subscriptions = get_subscriptions(app).await?;
+    let present_subscriptions = get(app).await?;
 
     if let Some(subs) = present_subscriptions.0.get(&name) {
         bail!(
diff --git a/yt/src/update/mod.rs b/yt/src/update/mod.rs
index 6abb8c4..e3ab54e 100644
--- a/yt/src/update/mod.rs
+++ b/yt/src/update/mod.rs
@@ -8,7 +8,7 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use std::{collections::HashMap, process::Stdio, str::FromStr};
+use std::{collections::HashMap, process::Stdio, str::FromStr, string::ToString};
 
 use anyhow::{Context, Ok, Result};
 use chrono::{DateTime, Utc};
@@ -23,12 +23,13 @@ use yt_dlp::{unsmuggle_url, wrapper::info_json::InfoJson};
 use crate::{
     app::App,
     storage::{
-        subscriptions::{get_subscriptions, Subscription},
+        subscriptions::{get, Subscription},
         video_database::{
             extractor_hash::ExtractorHash, getters::get_all_hashes, setters::add_video, Video,
             VideoStatus,
         },
     },
+    unreachable::Unreachable,
     videos::display::format_video::FormatVideo,
 };
 
@@ -38,7 +39,7 @@ pub async fn update(
     subs_to_update: Vec<String>,
     verbosity: u8,
 ) -> Result<()> {
-    let subscriptions = get_subscriptions(app).await?;
+    let subscriptions = get(app).await?;
     let mut back_subs: HashMap<Url, Subscription> = HashMap::new();
     let logging = verbosity > 0;
     let log_level = match verbosity {
@@ -72,7 +73,7 @@ pub async fn update(
         .arg(urls.len().to_string())
         .arg(log_level.to_string())
         .args(&urls)
-        .args(hashes.iter().map(|haz| haz.to_string()).collect::<Vec<_>>())
+        .args(hashes.iter().map(ToString::to_string).collect::<Vec<_>>())
         .stdout(Stdio::piped())
         .stderr(if logging {
             Stdio::inherit()
@@ -87,7 +88,7 @@ pub async fn update(
         child
             .stdout
             .take()
-            .expect("Should be able to take child stdout"),
+            .unreachable("Should be able to take child stdout"),
     )
     .lines();
 
@@ -99,14 +100,14 @@ pub async fn update(
         // output.sync_all().await?;
         // drop(output);
 
-        let output_json: HashMap<Url, InfoJson> =
-            serde_json::from_str(&line).expect("This should be valid json");
+        let output_json: HashMap<Url, InfoJson> = serde_json::from_str(&line)
+            .unreachable("The json is generated by our own script. It should be valid");
 
         for (url, value) in output_json {
-            let sub = back_subs.get(&url).expect("This was stored before");
+            let sub = back_subs.get(&url).unreachable("This was stored before");
             process_subscription(app, sub, value, &hashes)
                 .await
-                .with_context(|| format!("Failed to process subscription: '{}'", sub.name))?
+                .with_context(|| format!("Failed to process subscription: '{}'", sub.name))?;
         }
     }
 
@@ -115,14 +116,14 @@ pub async fn update(
         error!(
             "The update_raw.py invokation failed (exit code: {}).",
             out.code()
-                .map(|f| f.to_string())
-                .unwrap_or("<No exit code>".to_owned())
-        )
+                .map_or("<No exit code>".to_owned(), |f| f.to_string())
+        );
     }
 
     Ok(())
 }
 
+#[allow(clippy::too_many_lines)]
 pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Result<Video> {
     macro_rules! unwrap_option {
         ($option:expr) => {
@@ -136,6 +137,17 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
             }
         };
     }
+    fn fmt_context(date: &str, extended: Option<&str>) -> String {
+        let f = format!(
+            "Failed to parse the `upload_date` of the entry ('{date}'). \
+                    Expected `YYYY-MM-DD`, has the format changed?"
+        );
+        if let Some(date_string) = extended {
+            format!("{f}\nThe parsed '{date_string}' can't be turned to a valid UTC date.'")
+        } else {
+            f
+        }
+    }
 
     let publish_date = if let Some(date) = &entry.upload_date {
         let year: u32 = date
@@ -143,26 +155,26 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
             .take(4)
             .collect::<String>()
             .parse()
-            .expect("Should work.");
+            .with_context(|| fmt_context(date, None))?;
         let month: u32 = date
             .chars()
             .skip(4)
             .take(2)
             .collect::<String>()
             .parse()
-            .expect("Should work");
+            .with_context(|| fmt_context(date, None))?;
         let day: u32 = date
             .chars()
             .skip(6)
             .take(2)
             .collect::<String>()
             .parse()
-            .expect("Should work");
+            .with_context(|| fmt_context(date, None))?;
 
         let date_string = format!("{year:04}-{month:02}-{day:02}T00:00:00Z");
         Some(
             DateTime::<Utc>::from_str(&date_string)
-                .expect("This should always work")
+                .with_context(|| fmt_context(date, Some(&date_string)))?
                 .timestamp(),
         )
     } else {
@@ -178,35 +190,27 @@ pub fn video_entry_to_video(entry: InfoJson, sub: Option<&Subscription>) -> Resu
         (None, Some(thumbnail)) => Some(thumbnail.to_owned()),
 
         // TODO: The algorithm is not exactly the best <2024-05-28>
-        (Some(thumbnails), None) => Some(
-            thumbnails
-                .first()
-                .expect("At least one should exist")
-                .url
-                .clone(),
-        ),
+        (Some(thumbnails), None) => thumbnails.first().map(|thumbnail| thumbnail.url.clone()),
         (Some(_), Some(thumnail)) => Some(thumnail.to_owned()),
     };
 
     let url = {
-        let smug_url: url::Url = unwrap_option!(entry.webpage_url.clone());
-        unsmuggle_url(smug_url)?
+        let smug_url: Url = unwrap_option!(entry.webpage_url.clone());
+        unsmuggle_url(&smug_url)?
     };
 
     let extractor_hash = blake3::hash(unwrap_option!(entry.id).as_bytes());
 
     let subscription_name = if let Some(sub) = sub {
         Some(sub.name.clone())
-    } else {
-        if let Some(uploader) = entry.uploader {
-            if entry.webpage_url_domain == Some("youtube.com".to_owned()) {
-                Some(format!("{} - Videos", uploader))
-            } else {
-                Some(uploader.clone())
-            }
+    } else if let Some(uploader) = entry.uploader {
+        if entry.webpage_url_domain == Some("youtube.com".to_owned()) {
+            Some(format!("{uploader} - Videos"))
         } else {
-            None
+            Some(uploader.clone())
         }
+    } else {
+        None
     };
 
     let video = Video {
@@ -236,7 +240,7 @@ async fn process_subscription(
     let video =
         video_entry_to_video(entry, Some(sub)).context("Failed to parse search entry as Video")?;
 
-    if hashes.contains(&video.extractor_hash.hash()) {
+    if hashes.contains(video.extractor_hash.hash()) {
         // We already stored the video information
         unreachable!("The python update script should have never provided us a duplicated video");
     } else {
diff --git a/yt/src/videos/display/format_video.rs b/yt/src/videos/display/format_video.rs
index 50646a1..18c2e15 100644
--- a/yt/src/videos/display/format_video.rs
+++ b/yt/src/videos/display/format_video.rs
@@ -28,6 +28,7 @@ pub trait FormatVideo {
     fn url(&self) -> Self::Output;
     fn video_options(&self) -> Self::Output;
 
+    #[allow(clippy::type_complexity)]
     fn to_parts(
         &self,
     ) -> (
diff --git a/yt/src/videos/display/mod.rs b/yt/src/videos/display/mod.rs
index d919dd2..8464d72 100644
--- a/yt/src/videos/display/mod.rs
+++ b/yt/src/videos/display/mod.rs
@@ -36,9 +36,11 @@ macro_rules! get {
 }
 
 /// This is identical to a [`FormattedVideo`], but has colorized fields.
+#[derive(Debug)]
 pub struct ColorizedFormattedVideo(FormattedVideo);
 
 impl FormattedVideo {
+    #[must_use]
     pub fn colorize(self) -> ColorizedFormattedVideo {
         let Self {
             cache_path,
@@ -96,7 +98,7 @@ pub struct FormattedVideo {
     thumbnail_url: String,
     title: String,
     url: String,
-    /// This string contains the video options (speed, subtitle_languages, etc.).
+    /// This string contains the video options (speed, `subtitle_languages`, etc.).
     /// It already starts with an extra whitespace, when these are not empty.
     video_options: String,
 }
@@ -171,8 +173,8 @@ impl Video {
                     format!("Failed to get video options for video: '{}'", self.title)
                 })?
                 .to_cli_flags(app);
-            let opts_white = if !opts.is_empty() { " " } else { "" };
-            format!("{}{}", opts_white, opts)
+            let opts_white = if opts.is_empty() { "" } else { " " };
+            format!("{opts_white}{opts}")
         };
 
         Ok(FormattedVideo {
diff --git a/yt/src/videos/mod.rs b/yt/src/videos/mod.rs
index 59baa8c..9704f73 100644
--- a/yt/src/videos/mod.rs
+++ b/yt/src/videos/mod.rs
@@ -47,19 +47,19 @@ pub async fn query(app: &App, limit: Option<usize>, search_query: Option<String>
     if let Some(query) = search_query {
         let mut matcher = Matcher::new(nucleo_matcher::Config::DEFAULT.match_paths());
 
-        let matches = Pattern::parse(
+        let pattern_matches = Pattern::parse(
             &query.replace(' ', "\\ "),
             CaseMatching::Ignore,
             Normalization::Smart,
         )
         .match_list(all_video_strings, &mut matcher);
 
-        matches
+        pattern_matches
             .iter()
             .rev()
-            .for_each(|(val, key)| println!("{} ({})", val, key));
+            .for_each(|(val, key)| println!("{val} ({key})"));
     } else {
-        println!("{}", all_video_strings.join("\n"))
+        println!("{}", all_video_strings.join("\n"));
     }
 
     Ok(())
diff --git a/yt/src/watch/events/handlers/mod.rs b/yt/src/watch/events/handlers/mod.rs
new file mode 100644
index 0000000..eca016a
--- /dev/null
+++ b/yt/src/watch/events/handlers/mod.rs
@@ -0,0 +1,161 @@
+use std::{env::current_exe, mem};
+
+use crate::{app::App, comments, storage::video_database::setters::set_state_change};
+
+use super::MpvEventHandler;
+
+use anyhow::{bail, Context, Result};
+use libmpv2::{
+    events::{EndFileEvent, PlaylistEntryId},
+    Mpv,
+};
+use log::info;
+use tokio::process::Command;
+
+impl MpvEventHandler {
+    // EndFile {{{
+    pub async fn handle_end_file_eof(
+        &mut self,
+        app: &App,
+        mpv: &Mpv,
+        end_file_event: EndFileEvent,
+    ) -> Result<()> {
+        info!("Mpv reached eof of current video. Marking it inactive.");
+
+        self.mark_video_inactive(app, mpv, end_file_event.playlist_entry_id)
+            .await?;
+
+        Ok(())
+    }
+    pub async fn handle_end_file_stop(
+        &mut self,
+        app: &App,
+        mpv: &Mpv,
+        end_file_event: EndFileEvent,
+    ) -> Result<()> {
+        // This reason is incredibly ambiguous. It _both_ means actually pausing a
+        // video and going to the next one in the playlist.
+        // Oh, and it's also called, when a video is removed from the playlist (at
+        // least via "playlist-remove current")
+        info!("Paused video (or went to next playlist entry); Marking it inactive");
+
+        self.mark_video_inactive(app, mpv, end_file_event.playlist_entry_id)
+            .await?;
+
+        Ok(())
+    }
+    pub async fn handle_end_file_quit(
+        &mut self,
+        app: &App,
+        mpv: &Mpv,
+        _end_file_event: EndFileEvent,
+    ) -> Result<()> {
+        info!("Mpv quit. Exiting playback");
+
+        // draining the playlist is okay, as mpv is done playing
+        let mut handler = mem::take(&mut self.playlist_handler);
+        let videos = handler.playlist_ids(mpv)?;
+        for hash in videos.values() {
+            self.mark_video_watched(app, hash).await?;
+            set_state_change(app, hash, false).await?;
+        }
+
+        Ok(())
+    }
+    // }}}
+
+    // StartFile {{{
+    pub async fn handle_start_file(
+        &mut self,
+        app: &App,
+        mpv: &Mpv,
+        entry_id: PlaylistEntryId,
+    ) -> Result<()> {
+        self.possibly_add_new_videos(app, mpv, false).await?;
+
+        // We don't need to check, whether other videos are still active, as they should
+        // have been marked inactive in the `Stop` handler.
+        self.mark_video_active(app, mpv, entry_id).await?;
+        let hash = self.get_cvideo_hash(mpv, 0)?;
+        self.apply_options(app, mpv, &hash).await?;
+
+        Ok(())
+    }
+    // }}}
+
+    // ClientMessage {{{
+    pub async fn handle_client_message_yt_comments_external(app: &App) -> Result<()> {
+        let binary =
+            current_exe().context("Failed to determine the current executable to re-execute")?;
+
+        let status = Command::new("riverctl")
+            .args(["focus-output", "next"])
+            .status()
+            .await?;
+        if !status.success() {
+            bail!("focusing the next output failed!");
+        }
+
+        let status = Command::new("alacritty")
+            .args([
+                "--title",
+                "floating please",
+                "--command",
+                binary
+                    .to_str()
+                    .context("Failed to turn the executable path to a utf8-string")?,
+                "--db-path",
+                app.config
+                    .paths
+                    .database_path
+                    .to_str()
+                    .context("Failed to parse the database_path as a utf8-string")?,
+                "comments",
+            ])
+            .status()
+            .await?;
+        if !status.success() {
+            bail!("Falied to start `yt comments`");
+        }
+
+        let status = Command::new("riverctl")
+            .args(["focus-output", "next"])
+            .status()
+            .await?;
+
+        if !status.success() {
+            bail!("focusing the next output failed!");
+        }
+
+        Ok(())
+    }
+    pub async fn handle_client_message_yt_comments_local(app: &App, mpv: &Mpv) -> Result<()> {
+        let comments: String = comments::get(app)
+            .await?
+            .render(false)
+            .replace(['"', '\''], "")
+            .chars()
+            .take(app.config.watch.local_comments_length)
+            .collect();
+
+        Self::message(mpv, &comments, "6000")?;
+        Ok(())
+    }
+
+    /// # Panics
+    /// Only if internal assertions fail.
+    pub fn handle_client_message_yt_mark_watch_later(&mut self, mpv: &Mpv) -> Result<()> {
+        mpv.execute("write-watch-later-config", &[])?;
+
+        let hash = self.remove_cvideo_from_playlist(mpv)?;
+        assert!(
+            self.watch_later_block_list.insert(hash),
+            "A video should not be blocked *and* in the playlist"
+        );
+
+        Self::message(mpv, "Marked the video to be watched later", "3000")?;
+
+        Ok(())
+    }
+    // }}}
+}
diff --git a/yt/src/watch/events/mod.rs b/yt/src/watch/events/mod.rs
index 41a7772..413fb4b 100644
--- a/yt/src/watch/events/mod.rs
+++ b/yt/src/watch/events/mod.rs
@@ -8,43 +8,48 @@
 // You should have received a copy of the License along with this program.
 // If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
 
-use std::{collections::HashMap, env::current_exe, mem, time::Duration};
+use std::{
+    collections::{HashMap, HashSet},
+    time::Duration,
+};
 
-use anyhow::{bail, Result};
+use anyhow::{Context, Result};
 use libmpv2::{
     events::{Event, PlaylistEntryId},
     EndFileReason, Mpv,
 };
 use log::{debug, info, warn};
-use tokio::{process::Command, time};
+use tokio::time;
 
 use crate::{
     app::App,
-    comments::get_comments,
     storage::video_database::{
         extractor_hash::ExtractorHash,
         getters::{get_video_by_hash, get_video_mpv_opts, get_videos},
         setters::{set_state_change, set_video_watched},
         VideoStatus,
     },
+    unreachable::Unreachable,
 };
 
 use playlist_handler::PlaylistHandler;
 
+mod handlers;
 mod playlist_handler;
 
 #[derive(Debug)]
 pub struct MpvEventHandler {
-    watch_later_block_list: HashMap<ExtractorHash, ()>,
+    watch_later_block_list: HashSet<ExtractorHash>,
     playlist_handler: PlaylistHandler,
 }
 
 impl MpvEventHandler {
+    #[must_use]
     pub fn from_playlist(playlist_cache: HashMap<String, ExtractorHash>) -> Self {
         let playlist_handler = PlaylistHandler::from_cache(playlist_cache);
         Self {
             playlist_handler,
-            watch_later_block_list: HashMap::new(),
+            watch_later_block_list: HashSet::new(),
         }
     }
 
@@ -71,10 +76,7 @@ impl MpvEventHandler {
             .into_iter()
             .filter(|val| !current_playlist.values().any(|a| a == &val.extractor_hash))
             .filter(|val| {
-                if self
-                    .watch_later_block_list
-                    .contains_key(&val.extractor_hash)
-                {
+                if self.watch_later_block_list.contains(&val.extractor_hash) {
                     blocked_videos += 1;
                     false
                 } else {
@@ -94,9 +96,14 @@ impl MpvEventHandler {
         for play_thing in play_things {
             debug!("Adding '{}' to playlist.", play_thing.title);
 
-            let orig_cache_path = play_thing.cache_path.expect("Is cached and thus some");
-            let cache_path = orig_cache_path.to_str().expect("Should be vaild utf8");
-            let fmt_cache_path = format!("\"{}\"", cache_path);
+            let orig_cache_path = play_thing.cache_path.unreachable("Is cached and thus some");
+            let cache_path = orig_cache_path.to_str().with_context(|| {
+                format!(
+                    "Failed to parse video cache_path as vaild utf8: '{}'",
+                    orig_cache_path.display()
+                )
+            })?;
+            let fmt_cache_path = format!("\"{cache_path}\"");
 
             let args = &[&fmt_cache_path, "append-play"];
 
@@ -108,11 +115,7 @@ impl MpvEventHandler {
         if force_message || num > 0 {
             Self::message(
                 mpv,
-                format!(
-                    "Added {} videos ({} are marked as watch later)",
-                    num, blocked_videos
-                )
-                .as_str(),
+                format!("Added {num} videos ({blocked_videos} are marked as watch later)").as_str(),
                 "3000",
             )?;
         }
@@ -120,12 +123,12 @@ impl MpvEventHandler {
     }
 
     fn message(mpv: &Mpv, message: &str, time: &str) -> Result<()> {
-        mpv.execute("show-text", &[format!("\"{}\"", message).as_str(), time])?;
+        mpv.execute("show-text", &[format!("\"{message}\"").as_str(), time])?;
         Ok(())
     }
 
     /// Get the hash of the currently playing video.
-    /// You can specify an offset, which is added to the playlist_position to get, for example, the
+    /// You can specify an offset, which is added to the ``playlist_position`` to get, for example, the
     /// previous video (-1) or the next video (+1).
     /// Beware that setting an offset can cause an property error if it's out of bound.
     fn get_cvideo_hash(&mut self, mpv: &Mpv, offset: i64) -> Result<ExtractorHash> {
@@ -135,12 +138,12 @@ impl MpvEventHandler {
                 if raw == -1 {
                     unreachable!( "This should only be called when a current video exists. Current state: '{:#?}'", self);
                 } else {
-                    (raw + offset) as usize
+                    usize::try_from(raw + offset).with_context(|| format!("Failed to calculate playlist position because of usize overflow: '{raw} + {offset}'"))?
                 }
             };
 
             let raw =
-                mpv.get_property::<i64>(format!("playlist/{}/id", playlist_position).as_str())?;
+                mpv.get_property::<i64>(format!("playlist/{playlist_position}/id").as_str())?;
             PlaylistEntryId::new(raw)
         };
 
@@ -224,40 +227,22 @@ impl MpvEventHandler {
     }
 
     /// This will return [`true`], if the event handling should be stopped
-    pub async fn handle_mpv_event<'a>(
+    pub async fn handle_mpv_event(
         &mut self,
         app: &App,
         mpv: &Mpv,
-        event: Event<'a>,
+        event: Event<'_>,
     ) -> Result<bool> {
         match event {
             Event::EndFile(r) => match r.reason {
                 EndFileReason::Eof => {
-                    info!("Mpv reached eof of current video. Marking it inactive.");
-
-                    self.mark_video_inactive(app, mpv, r.playlist_entry_id)
-                        .await?;
+                    self.handle_end_file_eof(app, mpv, r).await?;
                 }
                 EndFileReason::Stop => {
-                    // This reason is incredibly ambiguous. It _both_ means actually pausing a
-                    // video and going to the next one in the playlist.
-                    // Oh, and it's also called, when a video is removed from the playlist (at
-                    // least via "playlist-remove current")
-                    info!("Paused video (or went to next playlist entry); Marking it inactive");
-
-                    self.mark_video_inactive(app, mpv, r.playlist_entry_id)
-                        .await?;
+                    self.handle_end_file_stop(app, mpv, r).await?;
                 }
                 EndFileReason::Quit => {
-                    info!("Mpv quit. Exiting playback");
-
-                    // draining the playlist is okay, as mpv is done playing
-                    let mut handler = mem::take(&mut self.playlist_handler);
-                    let videos = handler.playlist_ids(mpv)?;
-                    for hash in videos.values() {
-                        self.mark_video_watched(app, hash).await?;
-                        set_state_change(app, hash, false).await?;
-                    }
+                    self.handle_end_file_quit(app, mpv, r).await?;
                     return Ok(true);
                 }
                 EndFileReason::Error => {
@@ -268,84 +253,24 @@ impl MpvEventHandler {
                 }
             },
             Event::StartFile(entry_id) => {
-                self.possibly_add_new_videos(app, mpv, false).await?;
-
-                // We don't need to check, whether other videos are still active, as they should
-                // have been marked inactive in the `Stop` handler.
-                self.mark_video_active(app, mpv, entry_id).await?;
-                let hash = self.get_cvideo_hash(mpv, 0)?;
-                self.apply_options(app, mpv, &hash).await?;
+                self.handle_start_file(app, mpv, entry_id).await?;
             }
             Event::ClientMessage(a) => {
                 debug!("Got Client Message event: '{}'", a.join(" "));
 
                 match a.as_slice() {
                     &["yt-comments-external"] => {
-                        let binary = current_exe().expect("A current exe should exist");
-
-                        let status = Command::new("riverctl")
-                            .args(["focus-output", "next"])
-                            .status()
-                            .await?;
-                        if !status.success() {
-                            bail!("focusing the next output failed!");
-                        }
-
-                        let status = Command::new("alacritty")
-                            .args([
-                                "--title",
-                                "floating please",
-                                "--command",
-                                binary.to_str().expect("Should be valid unicode"),
-                                "--db-path",
-                                app.config
-                                    .paths
-                                    .database_path
-                                    .to_str()
-                                    .expect("This should be convertible?"),
-                                "comments",
-                            ])
-                            .status()
-                            .await?;
-                        if !status.success() {
-                            bail!("Falied to start `yt comments`");
-                        }
-
-                        let status = Command::new("riverctl")
-                            .args(["focus-output", "next"])
-                            .status()
-                            .await?;
-                        if !status.success() {
-                            bail!("focusing the next output failed!");
-                        }
+                        Self::handle_client_message_yt_comments_external(app).await?;
                     }
                     &["yt-comments-local"] => {
-                        let comments: String = get_comments(app)
-                            .await?
-                            .render(false)
-                            .replace("\"", "")
-                            .replace("'", "")
-                            .chars()
-                            .take(app.config.watch.local_comments_length)
-                            .collect();
-
-                        Self::message(mpv, &comments, "6000")?;
+                        Self::handle_client_message_yt_comments_local(app, mpv).await?;
                     }
                     &["yt-description"] => {
                         // let description = description(app).await?;
                         Self::message(mpv, "<YT Description>", "6000")?;
                     }
                     &["yt-mark-watch-later"] => {
-                        mpv.execute("write-watch-later-config", &[])?;
-
-                        let hash = self.remove_cvideo_from_playlist(mpv)?;
-                        assert_eq!(
-                            self.watch_later_block_list.insert(hash, ()),
-                            None,
-                            "A video should not be blocked *and* in the playlist"
-                        );
-
-                        Self::message(mpv, "Marked the video to be watched later", "3000")?;
+                        self.handle_client_message_yt_mark_watch_later(mpv)?;
                     }
                     &["yt-mark-done-and-go-next"] => {
                         let cvideo_hash = self.remove_cvideo_from_playlist(mpv)?;
@@ -357,7 +282,7 @@ impl MpvEventHandler {
                         self.possibly_add_new_videos(app, mpv, true).await?;
                     }
                     other => {
-                        debug!("Unknown message: {}", other.join(" "))
+                        debug!("Unknown message: {}", other.join(" "));
                     }
                 }
             }
diff --git a/yt/src/watch/events/playlist_handler.rs b/yt/src/watch/events/playlist_handler.rs
index 0933856..232232d 100644
--- a/yt/src/watch/events/playlist_handler.rs
+++ b/yt/src/watch/events/playlist_handler.rs
@@ -16,26 +16,26 @@ use libmpv2::{events::PlaylistEntryId, mpv_node::MpvNode, Mpv};
 use crate::storage::video_database::extractor_hash::ExtractorHash;
 
 #[derive(Debug, Default)]
-pub struct PlaylistHandler {
+pub(crate) struct PlaylistHandler {
     /// A map of the original file paths to the videos extractor hashes.
     /// Used to get the extractor hash from a video returned by mpv
     playlist_cache: HashMap<String, ExtractorHash>,
 
-    /// A map of the playlist_entry_id field to their corresponding extractor hashes.
+    /// A map of the `playlist_entry_id` field to their corresponding extractor hashes.
     playlist_ids: HashMap<PlaylistEntryId, ExtractorHash>,
 }
 impl PlaylistHandler {
-    pub fn from_cache(cache: HashMap<String, ExtractorHash>) -> Self {
+    pub(crate) fn from_cache(cache: HashMap<String, ExtractorHash>) -> Self {
         Self {
             playlist_cache: cache,
             playlist_ids: HashMap::new(),
         }
     }
 
-    pub fn reserve(&mut self, len: usize) {
-        self.playlist_cache.reserve(len)
+    pub(crate) fn reserve(&mut self, len: usize) {
+        self.playlist_cache.reserve(len);
     }
-    pub fn add(&mut self, cache_path: String, extractor_hash: ExtractorHash) {
+    pub(crate) fn add(&mut self, cache_path: String, extractor_hash: ExtractorHash) {
         assert_eq!(
             self.playlist_cache.insert(cache_path, extractor_hash),
             None,
@@ -43,7 +43,10 @@ impl PlaylistHandler {
         );
     }
 
-    pub fn playlist_ids(&mut self, mpv: &Mpv) -> Result<&HashMap<PlaylistEntryId, ExtractorHash>> {
+    pub(crate) fn playlist_ids(
+        &mut self,
+        mpv: &Mpv,
+    ) -> Result<&HashMap<PlaylistEntryId, ExtractorHash>> {
         let mpv_playlist: Vec<(String, PlaylistEntryId)> = match mpv.get_property("playlist")? {
             MpvNode::ArrayIter(array) => array
                 .map(|val| match val {
@@ -59,10 +62,10 @@ impl PlaylistHandler {
 
                         map.for_each(|(key, value)| match key.as_str() {
                             "filename" => {
-                                entry.filename = Some(value.str().expect("work").to_owned())
+                                entry.filename = Some(value.str().expect("work").to_owned());
                             }
                             "id" => {
-                                entry.id = Some(PlaylistEntryId::new(value.i64().expect("Works")))
+                                entry.id = Some(PlaylistEntryId::new(value.i64().expect("Works")));
                             }
                             _ => (),
                         });
diff --git a/yt/src/watch/mod.rs b/yt/src/watch/mod.rs
index 3bcf1fc..5c76c12 100644
--- a/yt/src/watch/mod.rs
+++ b/yt/src/watch/mod.rs
@@ -10,7 +10,7 @@
 
 use std::collections::HashMap;
 
-use anyhow::Result;
+use anyhow::{Context, Result};
 use events::MpvEventHandler;
 use libmpv2::{events::EventContext, Mpv};
 use log::{debug, info, warn};
@@ -19,6 +19,7 @@ use crate::{
     app::App,
     cache::maintain,
     storage::video_database::{extractor_hash::ExtractorHash, getters::get_videos, VideoStatus},
+    unreachable::Unreachable,
 };
 
 pub mod events;
@@ -47,7 +48,9 @@ pub async fn watch(app: &App) -> Result<()> {
         info!("Found mpv.conf at '{}'!", config_path.display());
         mpv.execute(
             "load-config-file",
-            &[config_path.to_str().expect("This should be utf8-able")],
+            &[config_path
+                .to_str()
+                .context("Failed to parse the config path is utf8-stringt")?],
         )?;
     } else {
         warn!(
@@ -61,7 +64,9 @@ pub async fn watch(app: &App) -> Result<()> {
         info!("Found mpv.input.conf at '{}'!", input_path.display());
         mpv.execute(
             "load-input-conf",
-            &[input_path.to_str().expect("This should be utf8-able")],
+            &[input_path
+                .to_str()
+                .context("Failed to parse the input path as utf8 string")?],
         )?;
     } else {
         warn!(
@@ -85,9 +90,14 @@ pub async fn watch(app: &App) -> Result<()> {
     for play_thing in play_things {
         debug!("Adding '{}' to playlist.", play_thing.title);
 
-        let orig_cache_path = play_thing.cache_path.expect("Is cached and thus some");
-        let cache_path = orig_cache_path.to_str().expect("Should be vaild utf8");
-        let fmt_cache_path = format!("\"{}\"", cache_path);
+        let orig_cache_path = play_thing.cache_path.unreachable("Is cached and thus some");
+        let cache_path = orig_cache_path.to_str().with_context(|| {
+            format!(
+                "Failed to parse the cache_path of a video as utf8: '{}'",
+                orig_cache_path.display()
+            )
+        })?;
+        let fmt_cache_path = format!("\"{cache_path}\"");
 
         let args = &[&fmt_cache_path, "append-play"];