about summary refs log tree commit diff stats
path: root/sys/nixpkgs/pkgs/lf-make-map/src
diff options
context:
space:
mode:
Diffstat (limited to 'sys/nixpkgs/pkgs/lf-make-map/src')
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/cli.rs28
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs45
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs.old165
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/main.rs116
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/mapping/error.rs7
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/mapping/map_tree.rs103
-rw-r--r--sys/nixpkgs/pkgs/lf-make-map/src/mapping/mod.rs122
7 files changed, 586 insertions, 0 deletions
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/cli.rs b/sys/nixpkgs/pkgs/lf-make-map/src/cli.rs
new file mode 100644
index 00000000..7650b39b
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/cli.rs
@@ -0,0 +1,28 @@
+use std::path::PathBuf;
+
+use clap::{ArgAction, Parser};
+
+/// An automatic lf cd mapping generator
+#[derive(Parser, Debug)]
+#[clap(author, version, about, long_about = None)]
+#[command(next_line_help = true)]
+pub struct Args {
+    /// The directory to treat as home
+    #[arg(long, short = 'n', env = "HOME")]
+    pub home_name: PathBuf,
+
+    /// The directories to generate mappings for
+    pub relevant_directories: Vec<PathBuf>,
+
+    /// The number of directories to generate mappings for, starting from each `relevant_directory`
+    #[arg(long, short, default_value = "2")]
+    pub depth: usize,
+
+    /// Increase message verbosity
+    #[arg(long="verbose", short = 'v', action = ArgAction::Count)]
+    pub verbosity: u8,
+
+    /// Silence all output
+    #[arg(long, short = 'q')]
+    pub quiet: bool,
+}
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs b/sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs
new file mode 100644
index 00000000..48df027f
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs
@@ -0,0 +1,45 @@
+use std::path::PathBuf;
+
+use anyhow::{Context, Result};
+use log::{debug, info, trace};
+use walkdir::{DirEntry, WalkDir};
+
+use crate::mapping::{map_tree::MappingTree, Mapping};
+
+pub struct MappingsGenerator {
+    mappings: MappingTree,
+    paths_to_process: Vec<PathBuf>,
+}
+
+fn is_dir(entry: &DirEntry) -> bool {
+    entry.file_type().is_dir()
+}
+
+impl MappingsGenerator {
+    pub async fn new(
+        directories_to_scan: Vec<PathBuf>,
+        max_depth: usize,
+        home_path: PathBuf,
+    ) -> Result<Self> {
+        let mut mappings = MappingTree::new();
+
+        for dir in directories_to_scan {
+            for dir2 in WalkDir::new(&dir)
+                .max_depth(max_depth)
+                .into_iter()
+                .filter_entry(|e| is_dir(e))
+            {
+                let directory =
+                    dir2.with_context(|| format!("Failed to read dir ('{}')", &dir.display()))?;
+
+                trace!("Processed '{}'..", directory.path().display());
+
+                let mapping = Mapping::new(&home_path, directory.path().to_path_buf());
+                mappings
+                    .insert(&mapping.key.clone(), mapping)
+                    .context("Failed to insert a key")?;
+            }
+        }
+        todo!()
+    }
+}
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs.old b/sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs.old
new file mode 100644
index 00000000..406b1996
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/generator/mod.rs.old
@@ -0,0 +1,165 @@
+use std::{
+    collections::HashMap,
+    path::{Path, PathBuf},
+};
+
+use anyhow::{bail, Context, Result};
+use futures::{Stream, StreamExt, TryStreamExt};
+use log::info;
+use tokio::{
+    fs::{self, DirEntry},
+    io,
+    sync::mpsc::{self, Receiver, Sender},
+    task::JoinHandle,
+};
+use tokio_stream::wrappers::{ReadDirStream, ReceiverStream};
+
+pub struct MappingGenerator {
+    current_mappings: HashMap<String, PathBuf>,
+    paths_to_process: Vec<PathBuf>,
+}
+
+pub struct MappingGeneratorHelper {
+    tx: Sender<(PathBuf, oneshotSender<PathBuf>)>,
+    handle: JoinHandle<()>,
+    done: Vec<PathBuf>,
+}
+
+impl MappingGeneratorHelper {
+    pub fn new() -> Self {
+        let (rx, tx) = mpsc::channel(100);
+
+        let handle = tokio::spawn(async move {
+            while let Some(dir) = rx.recv().await {
+                info!("processing '{}'..", dir.display());
+                get_dir(dir);
+            }
+        });
+
+        Self { tx, handle }
+    }
+
+    pub async fn process(&self, dir: PathBuf) -> Result<()> {
+        let (tx, rx) =
+        self.tx.send(dir).await?;
+        Ok(())
+    }
+}
+
+impl MappingGenerator {
+    pub async fn new(directories_to_scan: Vec<PathBuf>, depth: usize) -> Result<Self> {
+        let cleaned_directories: Vec<PathBuf> = directories_to_scan
+            .into_iter()
+            .map(check_dir)
+            .collect::<Result<_>>()?;
+
+        let helper = MappingGeneratorHelper::new();
+
+        cleaned_directories
+            .into_iter()
+            .for_each(|dir| helper.process(dir));
+
+        info!(
+            "Will process:\n  {}",
+            all_directories
+                .iter()
+                .map(|pat| pat.display().to_string())
+                .collect::<Vec<_>>()
+                .join("\n  ")
+        );
+        Ok(Self {
+            current_mappings: HashMap::new(),
+            paths_to_process: all_directories,
+        })
+    }
+}
+
+fn check_dir(dir: PathBuf) -> Result<PathBuf> {
+    match dir.metadata() {
+        Ok(_) => Ok(dir),
+        Err(e) => bail!(
+            "'{}' is not a valid path; Error was: '{}'",
+            dir.display(),
+            e
+        ),
+    }
+}
+
+pub async fn get_dir(dir: PathBuf, current_depth: usize, max_depth: usize) -> Result<Vec<PathBuf>> {
+    let (tx, rx) = mpsc::channel(100);
+
+    let handle = tokio::spawn(async move { get_dir_recursive(dir, current_depth, max_depth, tx) });
+
+    let out = ReceiverStream::new(rx).collect::<Vec<PathBuf>>().await;
+    handle.await?;
+    Ok(out)
+}
+
+async fn get_dir_recursive(
+    dir: PathBuf,
+    current_depth: usize,
+    max_depth: usize,
+    tx: Sender<PathBuf>,
+) -> Result<()> {
+    if dir.is_dir() && current_depth != max_depth {
+        tx.send(dir).await?;
+
+        match fs::read_dir(&dir).await {
+            Ok(directories) => {
+                let mut handles: Vec<JoinHandle<Result<(), anyhow::Error>>> = vec![];
+                while let Some(entry) = directories
+                    .next_entry()
+                    .await
+                    .with_context(|| format!("Failed to read directory: '{}'", dir.display()))?
+                {
+                    let tx_new = tx.clone();
+                    handles.push(tokio::spawn(async move {
+                        get_dir_recursive(entry.path(), current_depth + 1, max_depth, tx_new)
+                            .await
+                            .with_context(|| {
+                                format!("Failed to get child directories to '{}'", dir.display())
+                            })?;
+
+                        Ok(())
+                    }));
+                }
+
+                let out: Vec<_> = tokio_stream::iter(handles)
+                    .then(|handle| async move { handle.await })
+                    .collect()
+                    .await;
+
+                // I have no idea what happened here to the type system
+                for i in out {
+                    i??
+                }
+
+                Ok(())
+            }
+
+            Err(e) => {
+                bail!(
+                    "Unable to read directory {}, skipping; error: {}",
+                    dir.display(),
+                    e
+                );
+            }
+        }
+    } else {
+        return Ok(());
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use std::path::PathBuf;
+
+    use super::get_dir;
+
+    #[test]
+    fn test_get_dir() {
+        let dirs = get_dir(PathBuf::from("~/repos"));
+        let expected_dirs = vec![PathBuf::from("~/repos/rust")];
+        assert_eq!(dirs, expected_dirs);
+    }
+}
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/main.rs b/sys/nixpkgs/pkgs/lf-make-map/src/main.rs
new file mode 100644
index 00000000..dbfe5ec7
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/main.rs
@@ -0,0 +1,116 @@
+use anyhow::Context;
+use clap::Parser;
+use cli::Args;
+use generator::MappingsGenerator;
+
+mod cli;
+mod generator;
+mod mapping;
+
+#[tokio::main]
+async fn main() -> anyhow::Result<()> {
+    let args = Args::parse();
+
+    stderrlog::new()
+        .module(module_path!())
+        .quiet(args.quiet)
+        .show_module_names(false)
+        .color(stderrlog::ColorChoice::Auto)
+        .verbosity(args.verbosity as usize)
+        .timestamp(stderrlog::Timestamp::Off)
+        .init()?;
+
+    // gen_lf_mappings(args.home_name, 0, args.relevant_directories);
+    let map = MappingsGenerator::new(args.relevant_directories, args.depth, args.home_name)
+        .await
+        .context("Failed to initialize mappings generator")?;
+
+    Ok(())
+}
+
+// fn gen_lf_mappings(home_name: PathBuf, char_num: usize, rel_dirs: Vec<PathBuf>) {
+//     let mut mappings_vec = vec![];
+//     let mut index_counter = 0;
+//     rel_dirs.iter().for_each(|rel_dir| {
+//         mappings_vec.push(vec![Mapping::new(
+//             &gen_hot_key(rel_dir, rel_dir, char_num),
+//             rel_dir,
+//             rel_dir,
+//             None,
+//         )]);
+//         get_dir(rel_dir.to_owned()).iter().for_each(|path| {
+//             mappings_vec[index_counter].push(Mapping::new(
+//                 &gen_hot_key(
+//                     path,
+//                     path.parent().expect("All paths here should have parents"),
+//                     char_num,
+//                 ),
+//                 path,
+//                 &path
+//                     .parent()
+//                     .expect("All paths here should have parents")
+//                     .to_owned(),
+//                 None,
+//             ));
+//         });
+//         index_counter += 1;
+//     });
+//     print_mappings(&mappings_vec, home_name);
+//     mappings_vec
+//         .into_iter()
+//         .for_each(|rel_dir_mapping: Vec<Mapping>| {
+//             let mut hash_map = sort_mapping_by_hot_key(rel_dir_mapping.clone());
+//             //dbg!(hash_map);
+//             hash_map.insert("gsi".to_owned(), vec![rel_dir_mapping[0].clone()]);
+//         });
+// }
+//
+// fn sort_mapping_by_hot_key(mut mappings: Vec<Mapping>) -> HashMap<String, Vec<Mapping>> {
+//     mappings.sort_by_key(|mapping| mapping.hot_key.clone());
+//
+//     let mut filtered_mappings: HashMap<String, Vec<Mapping>> = HashMap::new();
+//     mappings.iter().for_each(|mapping| {
+//         filtered_mappings.insert(mapping.hot_key.clone(), vec![]);
+//     });
+//     //dbg!(&mappings);
+//
+//     let mut index_counter = 1;
+//     mappings.iter().for_each(|mapping| {
+//         if mappings.len() > index_counter {
+//             let next_mapping = &mappings[index_counter];
+//             let vec = filtered_mappings
+//                 .get_mut(&mapping.hot_key)
+//                 .expect("This existst as it has been initialized");
+//
+//             if &next_mapping.hot_key == &mapping.hot_key {
+//                 vec.push(mapping.clone());
+//                 vec.push(next_mapping.clone());
+//             } else {
+//                 vec.push(mapping.clone());
+//             }
+//
+//             let new_vec = vec.to_owned();
+//             filtered_mappings.insert(mapping.hot_key.to_owned(), new_vec);
+//         }
+//
+//         index_counter += 1;
+//     });
+//     filtered_mappings
+// }
+//
+// fn print_mappings(mappings: &Vec<Vec<Mapping>>, home_name: PathBuf) {
+//     for mapping in mappings {
+//         mapping.iter().for_each(|map| {
+//             println!(
+//                 "{} = \"cd {}\";",
+//                 map.hot_key,
+//                 map.path
+//                     .display()
+//                     .to_string()
+//                     .replace(home_name.to_str().expect("This should be UTF-8"), "~")
+//             );
+//         });
+//
+//         println!("# -------------");
+//     }
+// }
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/mapping/error.rs b/sys/nixpkgs/pkgs/lf-make-map/src/mapping/error.rs
new file mode 100644
index 00000000..2a59ed64
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/mapping/error.rs
@@ -0,0 +1,7 @@
+use thiserror::Error;
+
+#[derive(Error, Debug)]
+pub enum Error {
+    #[error("The node at key '{0}' already exists!")]
+    NodeExits(String),
+}
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/mapping/map_tree.rs b/sys/nixpkgs/pkgs/lf-make-map/src/mapping/map_tree.rs
new file mode 100644
index 00000000..44165ed1
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/mapping/map_tree.rs
@@ -0,0 +1,103 @@
+use std::collections::HashMap;
+
+use super::{error, Mapping};
+
+/// A prefix tree
+pub struct MappingTree {
+    root: Node,
+}
+
+#[derive(Clone)]
+pub struct Node {
+    children: HashMap<char, Node>,
+    value: Option<Mapping>,
+
+    /// The key needed to get to this node
+    location: String,
+}
+
+impl MappingTree {
+    pub fn new() -> Self {
+        Self {
+            root: Node::new(String::new(), None),
+        }
+    }
+
+    /// Returns the node at the key, otherwise None
+    pub fn get(&self, key: &str) -> Option<&Node> {
+        let mut current_node = &self.root;
+        for ch in key.chars() {
+            current_node = current_node.children.get(&ch)?
+        }
+
+        Some(current_node)
+    }
+    /// Returns the node at the key, otherwise None. The node can be changed
+    pub fn get_mut(&mut self, key: &str) -> Option<&mut Node> {
+        let mut current_node = &mut self.root;
+        for ch in key.chars() {
+            current_node = current_node.children.get_mut(&ch)?
+        }
+
+        Some(current_node)
+    }
+
+    /// Returns the node at the key, otherwise the last node that matched.
+    pub fn try_get(&self, key: &str) -> &Node {
+        let mut current_node = &self.root;
+        for ch in key.chars() {
+            if let Some(node) = current_node.children.get(&ch) {
+                current_node = node;
+            } else {
+                return current_node;
+            }
+        }
+
+        current_node
+    }
+
+    pub fn insert(&mut self, key: &str, mapping: Mapping) -> Result<(), error::Error> {
+        let node = self.try_get(key).clone();
+        if node.location.as_str() != key {
+            let needed_nodes_key = key.trim_start_matches(node.location.as_str());
+            let needed_nodes_length = needed_nodes_key.chars().count();
+
+            let mut current_node = self
+                .get_mut(&node.location)
+                .expect("This should always exists");
+            let mut current_location = node.location.clone();
+            let mut counter = 0;
+
+            for ch in needed_nodes_key.chars() {
+                current_location.push(ch);
+
+                let next_node = if counter == needed_nodes_length {
+                    Node::new(current_location.clone(), Some(mapping.clone()))
+                } else {
+                    Node::new(current_location.clone(), None)
+                };
+
+                current_node.children.insert(ch, next_node);
+                current_node = current_node
+                    .children
+                    .get_mut(&ch)
+                    .expect("Was just inserted");
+                counter += 1;
+            }
+        } else {
+            return Err(error::Error::NodeExits(key.to_owned()));
+        }
+
+        Ok(())
+    }
+}
+
+impl Node {
+    pub fn new(location: String, mapping: Option<Mapping>) -> Self {
+        Self {
+            children: HashMap::new(),
+            location,
+            value: mapping,
+        }
+    }
+}
diff --git a/sys/nixpkgs/pkgs/lf-make-map/src/mapping/mod.rs b/sys/nixpkgs/pkgs/lf-make-map/src/mapping/mod.rs
new file mode 100644
index 00000000..7de1ca5d
--- /dev/null
+++ b/sys/nixpkgs/pkgs/lf-make-map/src/mapping/mod.rs
@@ -0,0 +1,122 @@
+use std::path::{Path, PathBuf};
+
+use log::debug;
+
+pub mod error;
+pub mod map_tree;
+
+#[derive(Debug, Clone)]
+pub struct Mapping {
+    pub raw_path: PathBuf,
+
+    pub keys: usize,
+
+    pub key: String,
+}
+impl Mapping {
+    pub fn new(home_path: &Path, initial_path: PathBuf) -> Mapping {
+        let raw_path = initial_path
+            .strip_prefix(home_path)
+            .expect("Must always be under the `home_path`");
+
+        let key = Self::path_to_key(raw_path.to_str().expect("Should be a valid &str"));
+
+        Self {
+            raw_path: raw_path.to_owned(),
+            keys: key.len(),
+            key,
+        }
+    }
+
+    fn path_to_key(path: &str) -> String {
+        let key: String = path
+            .split('/')
+            .map(|part| part.chars().nth(0).expect("Must have a first element"))
+            .collect();
+        debug!("'{}' -> '{}'", path, key);
+        key
+    }
+}
+
+pub fn gen_hot_key(path: &Path, base_path: &Path, amount_of_chars: usize) -> String {
+    let path_filename_as_str = path
+        .file_name()
+        .expect("All paths here should have a file name")
+        .to_str()
+        .expect("The OSstr should be convertible");
+
+    let mut return_val = String::from("g");
+    if path != base_path {
+        return_val.push(
+            base_path
+                .file_name()
+                .expect("All paths here should have a file name")
+                .to_str()
+                .expect("The OSstr should be convertible")
+                .chars()
+                .nth(0)
+                .expect("All names should have a first char"),
+        );
+    }
+    if path_filename_as_str.contains("_") {
+        path_filename_as_str.split("_").for_each(|a| {
+            return_val.push(
+                a.chars()
+                    .nth(0)
+                    .expect("All names should have a first char"),
+            )
+        });
+    } else {
+        if path == base_path {
+            return_val.push(
+                path_filename_as_str
+                    .chars()
+                    .nth(0)
+                    .expect("All names should have a first char"),
+            );
+        } else {
+            for a in 0..amount_of_chars {
+                return_val.push(if let Some(b) = path_filename_as_str.chars().nth(a) {
+                    b
+                } else {
+                    path_filename_as_str
+                        .chars()
+                        .nth(0)
+                        .expect("All names should have a first char")
+                });
+            }
+        }
+    }
+    if path == base_path {
+        return_val.push('.');
+    }
+    return_val
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    #[test]
+    fn gen_hot_key_test() {
+        let gen1 = gen_hot_key(
+            Path::new("/home/dt/repos/java_script"),
+            Path::new("/home/dt/repos"),
+            1,
+        );
+        assert_eq!(gen1, "grjs".to_owned());
+    }
+    #[test]
+    fn gen_hot_key_test_for_same_names() {
+        let gen1 = gen_hot_key(Path::new("/home/dt/repos/"), Path::new("/home/dt/repos"), 1);
+        assert_eq!(gen1, "gr.".to_owned());
+    }
+    #[test]
+    fn gen_hot_key_test_for_non_underscore_name() {
+        let gen1 = gen_hot_key(
+            Path::new("/home/dt/repos/rust"),
+            Path::new("/home/dt/repos"),
+            1,
+        );
+        assert_eq!(gen1, "grr".to_owned());
+    }
+}