From 3f5500dc95d3d1c29200844a8056cab15e2ec89d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:36:51 +0100 Subject: [PATCH 01/32] feat: init --- .envrc | 1 + .gitignore | 21 ++++++ flake.lock | 115 +++++++++++++++++++++++++++++++ flake.nix | 58 ++++++++++++++++ prompt_plan.md | 153 ++++++++++++++++++++++++++++++++++++++++ spec.md | 184 +++++++++++++++++++++++++++++++++++++++++++++++++ 6 files changed, 532 insertions(+) create mode 100644 .envrc create mode 100644 .gitignore create mode 100644 flake.lock create mode 100644 flake.nix create mode 100644 prompt_plan.md create mode 100644 spec.md diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..3550a30 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..61a76d0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +### direnv ### +.direnv + +### Rust ### +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +# Added by cargo +/target diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..3460792 --- /dev/null +++ b/flake.lock @@ -0,0 +1,115 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1736101677, + "narHash": "sha256-iKOPq86AOWCohuzxwFy/MtC8PcSVGnrxBOvxpjpzrAY=", + "owner": "ipetkov", + "repo": "crane", + "rev": "61ba163d85e5adeddc7b3a69bb174034965965b2", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "fenix": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ], + "rust-analyzer-src": "rust-analyzer-src" + }, + "locked": { + "lastModified": 1736318091, + "narHash": "sha256-RkRHXZaMgOMGgkW2YmEqxxDDYRiGFbfr1JuaI0VrCKo=", + "owner": "nix-community", + "repo": "fenix", + "rev": "9e13860d50cbfd42e79101a516e1939c7723f093", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "fenix", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1736344531, + "narHash": "sha256-8YVQ9ZbSfuUk2bUf2KRj60NRraLPKPS0Q4QFTbc+c2c=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "bffc22eb12172e6db3c5dde9e3e5628f8e3e7912", + "type": "github" + }, + "original": { + "id": "nixpkgs", + "ref": "nixos-unstable", + "type": "indirect" + } + }, + "root": { + "inputs": { + "crane": "crane", + "fenix": "fenix", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "rust-analyzer-src": { + "flake": false, + "locked": { + "lastModified": 1736266405, + "narHash": "sha256-V2FDSb8YjuquZduBRNp5niWYlWurja2yGN6Xzh5GPYk=", + "owner": "rust-lang", + "repo": "rust-analyzer", + "rev": "91fc0a239af4e56b84b1d3974ac0f34dcc99b895", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "ref": "nightly", + "repo": "rust-analyzer", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..a54d6b0 --- /dev/null +++ b/flake.nix @@ -0,0 +1,58 @@ +{ + inputs = { + crane.url = "github:ipetkov/crane"; + fenix.url = "github:nix-community/fenix"; + fenix.inputs.nixpkgs.follows = "nixpkgs"; + flake-utils.url = "github:numtide/flake-utils"; + nixpkgs.url = "nixpkgs/nixos-unstable"; + }; + + outputs = { self, crane, flake-utils, nixpkgs, ... }@inputs: + flake-utils.lib.eachDefaultSystem (system: + let + inherit (pkgs) lib; + pkgs = import nixpkgs { inherit system; }; + fenix = inputs.fenix.packages.${system}; + craneLib = crane.lib.${system}.overrideToolchain toolchain.toolchain; + mkSrc = extraPaths: with lib.fileset; let + root = ./.; + rustFiles = fromSource (craneLib.cleanCargoSource root); + fileset = union rustFiles (unions extraPaths); + in + toSource { inherit root fileset; }; + + ## Customize here ## + toolchain = fenix.stable; # or fenix.complete; + stdenv = pkgs.stdenvAdapters.useMoldLinker pkgs.stdenv; + in + { + packages.default = craneLib.buildPackage { + inherit stdenv; + src = mkSrc [ ]; + strictDeps = true; + buildInputs = [ + # Add additional build inputs here + ] ++ lib.optionals pkgs.stdenv.isDarwin [ + # Additional darwin specific inputs can be set here + pkgs.libiconv + ]; + # Additional environment variables can be set directly + # MY_CUSTOM_VAR = "some value"; + }; + + devShells.default = pkgs.mkShell.override { inherit stdenv; } + { + nativeBuildInputs = with pkgs; [ + # Add additional build inputs here + ] ++ (with toolchain; [ + cargo + clippy + rustfmt + rustc + fenix.rust-analyzer + ]); + RUST_SRC_PATH = "${toolchain.rust-src}/lib/rustlib/src/rust/library"; + }; + } + ); +} diff --git a/prompt_plan.md b/prompt_plan.md new file mode 100644 index 0000000..088e300 --- /dev/null +++ b/prompt_plan.md @@ -0,0 +1,153 @@ +# Phase 1: Core Tag Handling +## Prompt 1: Tag Parsing/Validation Module +```text +Create a Rust module ```tag_engine``` with: +1. Tag validation function ```validate_tag(tag: &str) -> Result<(), TagError>``` + - Checks for prohibited characters (NUL, :, /, space) + - Validates tag is non-empty + - Returns custom error variants +2. Tag parsing function ```parse_tags(filename: &str) -> Result<(String, Vec), ParseError>``` + - Splits filename into base name and tags using " -- " delimiter + - Handles multiple delimiters as errors + - Preserves file extension +3. Unit tests covering: + - Valid/invalid tags + - Filename parsing edge cases + - Error propagation +``` + +## Prompt 2: Tag Serialization +```text +Extend ```tag_engine``` with: +1. ```serialize_tags(base: &str, tags: &[String], ext: &str) -> String``` + - Combines base name, sorted tags, and extension + - Uses " -- " delimiter only when tags exist + - Maintains alphabetical tag order +2. Unit tests for: + - Roundtrip parsing/serialization + - Empty tags handling + - Extension preservation + - Case sensitivity checks +``` + +# Phase 2: Basic Commands +## Prompt 3: List Command Implementation +```text +Implement ```list``` command: +1. CLI argument parsing using clap +2. Core logic: + - Process each file through ```tag_engine::parse_tags``` + - Collect unique tags across all files + - Output sorted tags line-by-line +3. Unit tests for: + - Multi-file tag aggregation + - Empty input handling + - Error propagation from tag engine +``` + +## Prompt 4: Add Command Foundation +```text +Implement core of ```add``` command: +1. Tag merging logic: + - Combine existing and new tags + - Remove duplicates while preserving order + - Case-sensitive comparison +2. Function signature: + ```fn add_tags(current: Vec, new: Vec) -> Vec``` +3. Unit tests for: + - Duplicate prevention + - Order preservation + - Case sensitivity +``` + +## Prompt 5: Complete Add Command +```text +Wire up ```add``` command: +1. File processing loop: + - Parse existing tags + - Merge with new tags + - Serialize new filename + - Atomic rename operation +2. Error handling: + - Clean error messages + - Early exit on first error +3. Integration tests: + - Actual file renaming + - Permission handling + - Cross-platform path handling +``` + +# Phase 3: Advanced Operations +## Prompt 6: Remove Command Core +```text +Implement tag removal logic: +1. Function ```filter_tags(current: Vec, remove: &[String]) -> Vec``` + - Remove all instances of specified tags + - Maintain original order of remaining tags +2. Unit tests for: + - Multi-tag removal + - Non-existent tag handling + - Empty result handling +``` + +## Prompt 7: Tree Command Structure +```text +Implement tree directory builder: +1. Function ```create_tag_combinations(tags: &[String], depth: usize) -> Vec>``` + - Generate all unique permutations up to specified depth + - Maintain alphabetical order in paths +2. Unit tests for: + - Depth limiting + - Combination validity + - Order preservation +``` + +## Prompt 8: Symlink Management +```text +Create symlink helper module: +1. Function ```create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<()>``` + - Creates all required directories + - Atomic symlink creation + - Cross-platform compatibility stubs +2. Error handling: + - Clean path in error messages + - Permission checks +3. Unit tests for: + - Directory structure validation + - Symlink safety checks +``` + +# Phase 4: Error Handling +## Prompt 9: Error Type Unification +```text +Create unified error type: +1. Enum ```FileTagsError``` with variants for all error cases +2. Implement ```From``` traits for IO/parsing errors +3. Consistent error formatting: + - Machine-readable when needed + - User-friendly messages +4. Unit tests for error propagation +``` + +# Phase 5: Final Integration +## Prompt 10: CLI Wiring +```text +Integrate all components: +1. Complete command implementations +2. Add proper error handling +3. Configure --help output +4. Final integration tests: + - All commands together + - Cross-command file operations + - Stress tests with large filesets +``` + +# Phase 6: Optimization +## Prompt 11: Performance Pass +```text +Optimize hot paths: +1. Zero-copy parsing where possible +2. Preallocate buffers +3. Lazy evaluation of expensive operations +4. Benchmarks for critical operations +5. Unit test preservation diff --git a/spec.md b/spec.md new file mode 100644 index 0000000..05dac27 --- /dev/null +++ b/spec.md @@ -0,0 +1,184 @@ +# filetags-rs Technical Specification + +## Overview +filetags-rs is a command-line tool written in Rust for managing tags in file and directory names. Tags are embedded directly in filenames using a specific delimiter pattern. + +## Core Constants +- Tag delimiter: " -- " (space, double dash, space) +- Tag separator: " " (single space) +- Default tree depth: 3 + +## Tag Rules +- Tags must be valid Unicode strings +- Prohibited characters: NUL, ":", "/" +- Tags cannot contain the tag separator (space) +- Tags are stored in alphabetical order +- Duplicate tags are preserved but not added again +- Tags are case-sensitive + +## Commands + +### 1. List +**Usage:** `filetags list ` + +**Behavior:** +- Lists all unique tags found in the specified files +- Output is just the tags themselves, one per line +- No structural formatting (like JSON) is applied +- Empty files (no tags) produce no output + +**Example:** +```bash +$ filetags list "document -- tag1 tag2.pdf" "notes -- tag2 tag3.txt" +tag1 +tag2 +tag3 +``` + +### 2. Add +**Usage:** `filetags add --tag= [--tag=...] ` + +**Behavior:** +- Adds specified tags to files +- Merges with existing tags +- Sorts all tags alphabetically +- Ignores duplicate tags +- Preserves the original file extension + +**Example:** +```bash +$ filetags add --tag=work --tag=draft document.pdf +# Result: document -- draft work.pdf +``` + +### 3. Remove +**Usage:** `filetags remove --tag= [--tag=...] ` + +**Behavior:** +- Removes specified tags from files +- Preserves remaining tags in alphabetical order +- Silently ignores requests to remove non-existent tags +- Preserves the original file extension + +**Example:** +```bash +$ filetags remove --tag=draft "document -- draft work.pdf" +# Result: document -- work.pdf +``` + +### 4. Tree +**Usage:** `filetags tree --dir= [--depth=] ` + +**Parameters:** +- `--dir`: Target directory for creating the tree structure +- `--depth`: Maximum depth of the tree (default: 3) + +**Behavior:** +- Creates a directory tree based on file tags +- Creates symlinks at leaf nodes pointing to the original file +- For files with no tags, creates only a root-level symlink +- Creates all possible tag combinations up to specified depth +- Maintains alphabetical order in path components + +**Example:** +For file "document -- tag1 tag2.pdf": +``` +target_dir/ +├── document.pdf -> /path/to/document -- tag1 tag2.pdf +├── tag1/ +│ ├── document.pdf -> /path/to/document -- tag1 tag2.pdf +│ └── tag2/ +│ └── document.pdf -> /path/to/document -- tag1 tag2.pdf +└── tag2/ + ├── document.pdf -> /path/to/document -- tag1 tag2.pdf + └── tag1/ + └── document.pdf -> /path/to/document -- tag1 tag2.pdf +``` + +## Error Handling + +### Validation Errors +- Invalid characters in tags +- Empty tags +- Missing required arguments +- Invalid depth value for tree command +- Non-existent target directory + +### Runtime Errors +- File permission issues +- Symlink creation failures +- File reading/writing failures +- Directory creation failures + +All errors should: +- Print clear error messages to stderr +- Use appropriate error codes +- Include the specific file/tag that caused the error +- Exit with non-zero status + +## Testing Strategy + +### Unit Tests +1. Tag validation + - Character restrictions + - Length restrictions + - Delimiter handling + - Separator handling + +2. Tag operations + - Adding tags + - Removing tags + - Sorting tags + - Duplicate handling + - Case sensitivity + +3. Filename parsing + - Splitting into base name and tags + - Extension handling + - Edge cases (no tags, multiple delimiters) + +### Integration Tests +1. Command execution + - All commands with various combinations of arguments + - Error cases + - Edge cases + +2. File system operations + - File creation/modification + - Directory operations + - Symlink handling + - Permission handling + +3. Tree structure creation + - Various depths + - Different tag combinations + - Edge cases (no tags, max depth) + +### Property-Based Tests +- Filename roundtrip (parse → modify → serialize) +- Tag ordering invariants +- Unicode handling +- Path manipulation safety + +## Performance Considerations +- Efficient string manipulation for filename parsing +- Minimal allocations in hot paths +- Proper error type design to avoid allocations +- Efficient directory traversal for tree command +- Proper buffer sizes for file operations + +## Security Considerations +- Proper handling of symbolic links +- Path traversal prevention +- Proper permission checking +- Safe handling of Unicode filenames +- Proper escaping of special characters + +## Future Considerations +- Configurable delimiters +- Tag hierarchies +- Tag aliases +- Case-insensitive option +- Bulk operations +- Regular expression support +- Tag statistics and metadata From 05e5d9410a3a803c770eb2be5e02f03901d915ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:36:51 +0100 Subject: [PATCH 02/32] feat: init --- .envrc | 1 + .gitignore | 21 ++++++++++ flake.lock | 115 +++++++++++++++++++++++++++++++++++++++++++++++++++++ flake.nix | 58 +++++++++++++++++++++++++++ 4 files changed, 195 insertions(+) create mode 100644 .envrc create mode 100644 .gitignore create mode 100644 flake.lock create mode 100644 flake.nix diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..3550a30 --- /dev/null +++ b/.envrc @@ -0,0 +1 @@ +use flake diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..61a76d0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +### direnv ### +.direnv + +### Rust ### +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +# Added by cargo +/target diff --git a/flake.lock b/flake.lock new file mode 100644 index 0000000..3460792 --- /dev/null +++ b/flake.lock @@ -0,0 +1,115 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1736101677, + "narHash": "sha256-iKOPq86AOWCohuzxwFy/MtC8PcSVGnrxBOvxpjpzrAY=", + "owner": "ipetkov", + "repo": "crane", + "rev": "61ba163d85e5adeddc7b3a69bb174034965965b2", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "fenix": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ], + "rust-analyzer-src": "rust-analyzer-src" + }, + "locked": { + "lastModified": 1736318091, + "narHash": "sha256-RkRHXZaMgOMGgkW2YmEqxxDDYRiGFbfr1JuaI0VrCKo=", + "owner": "nix-community", + "repo": "fenix", + "rev": "9e13860d50cbfd42e79101a516e1939c7723f093", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "fenix", + "type": "github" + } + }, + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1736344531, + "narHash": "sha256-8YVQ9ZbSfuUk2bUf2KRj60NRraLPKPS0Q4QFTbc+c2c=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "bffc22eb12172e6db3c5dde9e3e5628f8e3e7912", + "type": "github" + }, + "original": { + "id": "nixpkgs", + "ref": "nixos-unstable", + "type": "indirect" + } + }, + "root": { + "inputs": { + "crane": "crane", + "fenix": "fenix", + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs" + } + }, + "rust-analyzer-src": { + "flake": false, + "locked": { + "lastModified": 1736266405, + "narHash": "sha256-V2FDSb8YjuquZduBRNp5niWYlWurja2yGN6Xzh5GPYk=", + "owner": "rust-lang", + "repo": "rust-analyzer", + "rev": "91fc0a239af4e56b84b1d3974ac0f34dcc99b895", + "type": "github" + }, + "original": { + "owner": "rust-lang", + "ref": "nightly", + "repo": "rust-analyzer", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 0000000..a54d6b0 --- /dev/null +++ b/flake.nix @@ -0,0 +1,58 @@ +{ + inputs = { + crane.url = "github:ipetkov/crane"; + fenix.url = "github:nix-community/fenix"; + fenix.inputs.nixpkgs.follows = "nixpkgs"; + flake-utils.url = "github:numtide/flake-utils"; + nixpkgs.url = "nixpkgs/nixos-unstable"; + }; + + outputs = { self, crane, flake-utils, nixpkgs, ... }@inputs: + flake-utils.lib.eachDefaultSystem (system: + let + inherit (pkgs) lib; + pkgs = import nixpkgs { inherit system; }; + fenix = inputs.fenix.packages.${system}; + craneLib = crane.lib.${system}.overrideToolchain toolchain.toolchain; + mkSrc = extraPaths: with lib.fileset; let + root = ./.; + rustFiles = fromSource (craneLib.cleanCargoSource root); + fileset = union rustFiles (unions extraPaths); + in + toSource { inherit root fileset; }; + + ## Customize here ## + toolchain = fenix.stable; # or fenix.complete; + stdenv = pkgs.stdenvAdapters.useMoldLinker pkgs.stdenv; + in + { + packages.default = craneLib.buildPackage { + inherit stdenv; + src = mkSrc [ ]; + strictDeps = true; + buildInputs = [ + # Add additional build inputs here + ] ++ lib.optionals pkgs.stdenv.isDarwin [ + # Additional darwin specific inputs can be set here + pkgs.libiconv + ]; + # Additional environment variables can be set directly + # MY_CUSTOM_VAR = "some value"; + }; + + devShells.default = pkgs.mkShell.override { inherit stdenv; } + { + nativeBuildInputs = with pkgs; [ + # Add additional build inputs here + ] ++ (with toolchain; [ + cargo + clippy + rustfmt + rustc + fenix.rust-analyzer + ]); + RUST_SRC_PATH = "${toolchain.rust-src}/lib/rustlib/src/rust/library"; + }; + } + ); +} From 3e3f6995180a80bfe318e71618f248b525933f5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:40:39 +0100 Subject: [PATCH 03/32] feat: cargo init --- .gitignore | 9 +++++++++ Cargo.toml | 6 ++++++ src/main.rs | 3 +++ 3 files changed, 18 insertions(+) create mode 100644 Cargo.toml create mode 100644 src/main.rs diff --git a/.gitignore b/.gitignore index 61a76d0..7fe1a0f 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,12 @@ Cargo.lock # Added by cargo /target + +.aider* + + +# Added by cargo +# +# already existing elements were commented out + +#/target diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..0e88bad --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[package] +name = "filetags_rs" +version = "0.1.0" +edition = "2021" + +[dependencies] diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..e7a11a9 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} From 0dc401f6427aef729b658ae41703f024449810f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:40:39 +0100 Subject: [PATCH 04/32] feat: cargo init --- .gitignore | 9 +++++++++ Cargo.toml | 6 ++++++ src/main.rs | 3 +++ 3 files changed, 18 insertions(+) create mode 100644 Cargo.toml create mode 100644 src/main.rs diff --git a/.gitignore b/.gitignore index 61a76d0..7fe1a0f 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,12 @@ Cargo.lock # Added by cargo /target + +.aider* + + +# Added by cargo +# +# already existing elements were commented out + +#/target diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..0e88bad --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[package] +name = "filetags_rs" +version = "0.1.0" +edition = "2021" + +[dependencies] diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..e7a11a9 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} From d13528cd3139bef7a37d2b14c3856ac08bcb9ffb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:41:32 +0100 Subject: [PATCH 05/32] feat: Add tag engine module with tag validation and parsing --- Cargo.toml | 1 + src/main.rs | 2 + src/tag_engine.rs | 115 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 118 insertions(+) create mode 100644 src/tag_engine.rs diff --git a/Cargo.toml b/Cargo.toml index 0e88bad..cd1b656 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,3 +4,4 @@ version = "0.1.0" edition = "2021" [dependencies] +thiserror = "1.0" diff --git a/src/main.rs b/src/main.rs index e7a11a9..2ee2541 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,5 @@ +mod tag_engine; + fn main() { println!("Hello, world!"); } diff --git a/src/tag_engine.rs b/src/tag_engine.rs new file mode 100644 index 0000000..36fb955 --- /dev/null +++ b/src/tag_engine.rs @@ -0,0 +1,115 @@ +use std::path::Path; +use thiserror::Error; + +#[derive(Error, Debug, PartialEq)] +pub enum TagError { + #[error("tag cannot be empty")] + Empty, + #[error("tag contains invalid character: {0}")] + InvalidChar(char), +} + +#[derive(Error, Debug, PartialEq)] +pub enum ParseError { + #[error("multiple tag delimiters found")] + MultipleDelimiters, + #[error("invalid tag: {0}")] + InvalidTag(#[from] TagError), +} + +pub fn validate_tag(tag: &str) -> Result<(), TagError> { + if tag.is_empty() { + return Err(TagError::Empty); + } + + // Check for prohibited characters + for c in tag.chars() { + if c == '\0' || c == ':' || c == '/' || c == ' ' { + return Err(TagError::InvalidChar(c)); + } + } + + Ok(()) +} + +pub fn parse_tags(filename: &str) -> Result<(String, Vec), ParseError> { + const TAG_DELIMITER: &str = " -- "; + + let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect(); + + if parts.len() > 2 { + return Err(ParseError::MultipleDelimiters); + } + + let base_name = parts[0].to_string(); + + let tags = if parts.len() == 2 { + let tag_part = parts[1]; + let tags: Vec = tag_part + .split_whitespace() + .map(str::to_string) + .collect(); + + // Validate each tag + for tag in &tags { + validate_tag(tag)?; + } + + tags + } else { + Vec::new() + }; + + Ok((base_name, tags)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_validate_tag_valid() { + assert!(validate_tag("valid-tag").is_ok()); + assert!(validate_tag("tag123").is_ok()); + assert!(validate_tag("_tag_").is_ok()); + } + + #[test] + fn test_validate_tag_invalid() { + assert_eq!(validate_tag(""), Err(TagError::Empty)); + assert_eq!(validate_tag("bad tag"), Err(TagError::InvalidChar(' '))); + assert_eq!(validate_tag("bad:tag"), Err(TagError::InvalidChar(':'))); + assert_eq!(validate_tag("bad/tag"), Err(TagError::InvalidChar('/'))); + assert_eq!(validate_tag("bad\0tag"), Err(TagError::InvalidChar('\0'))); + } + + #[test] + fn test_parse_tags_no_tags() { + let (base, tags) = parse_tags("file.txt").unwrap(); + assert_eq!(base, "file.txt"); + assert!(tags.is_empty()); + } + + #[test] + fn test_parse_tags_with_tags() { + let (base, tags) = parse_tags("file.txt -- tag1 tag2").unwrap(); + assert_eq!(base, "file.txt"); + assert_eq!(tags, vec!["tag1", "tag2"]); + } + + #[test] + fn test_parse_tags_multiple_delimiters() { + assert_eq!( + parse_tags("file.txt -- tag1 -- tag2"), + Err(ParseError::MultipleDelimiters) + ); + } + + #[test] + fn test_parse_tags_invalid_tag() { + assert_eq!( + parse_tags("file.txt -- invalid:tag"), + Err(ParseError::InvalidTag(TagError::InvalidChar(':'))) + ); + } +} From 06a25589b9aceef42ea6e88659c0ddea6db191d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:41:32 +0100 Subject: [PATCH 06/32] feat: Add tag engine module with tag validation and parsing --- Cargo.toml | 1 + src/main.rs | 2 + src/tag_engine.rs | 115 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 118 insertions(+) create mode 100644 src/tag_engine.rs diff --git a/Cargo.toml b/Cargo.toml index 0e88bad..cd1b656 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,3 +4,4 @@ version = "0.1.0" edition = "2021" [dependencies] +thiserror = "1.0" diff --git a/src/main.rs b/src/main.rs index e7a11a9..2ee2541 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,5 @@ +mod tag_engine; + fn main() { println!("Hello, world!"); } diff --git a/src/tag_engine.rs b/src/tag_engine.rs new file mode 100644 index 0000000..36fb955 --- /dev/null +++ b/src/tag_engine.rs @@ -0,0 +1,115 @@ +use std::path::Path; +use thiserror::Error; + +#[derive(Error, Debug, PartialEq)] +pub enum TagError { + #[error("tag cannot be empty")] + Empty, + #[error("tag contains invalid character: {0}")] + InvalidChar(char), +} + +#[derive(Error, Debug, PartialEq)] +pub enum ParseError { + #[error("multiple tag delimiters found")] + MultipleDelimiters, + #[error("invalid tag: {0}")] + InvalidTag(#[from] TagError), +} + +pub fn validate_tag(tag: &str) -> Result<(), TagError> { + if tag.is_empty() { + return Err(TagError::Empty); + } + + // Check for prohibited characters + for c in tag.chars() { + if c == '\0' || c == ':' || c == '/' || c == ' ' { + return Err(TagError::InvalidChar(c)); + } + } + + Ok(()) +} + +pub fn parse_tags(filename: &str) -> Result<(String, Vec), ParseError> { + const TAG_DELIMITER: &str = " -- "; + + let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect(); + + if parts.len() > 2 { + return Err(ParseError::MultipleDelimiters); + } + + let base_name = parts[0].to_string(); + + let tags = if parts.len() == 2 { + let tag_part = parts[1]; + let tags: Vec = tag_part + .split_whitespace() + .map(str::to_string) + .collect(); + + // Validate each tag + for tag in &tags { + validate_tag(tag)?; + } + + tags + } else { + Vec::new() + }; + + Ok((base_name, tags)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_validate_tag_valid() { + assert!(validate_tag("valid-tag").is_ok()); + assert!(validate_tag("tag123").is_ok()); + assert!(validate_tag("_tag_").is_ok()); + } + + #[test] + fn test_validate_tag_invalid() { + assert_eq!(validate_tag(""), Err(TagError::Empty)); + assert_eq!(validate_tag("bad tag"), Err(TagError::InvalidChar(' '))); + assert_eq!(validate_tag("bad:tag"), Err(TagError::InvalidChar(':'))); + assert_eq!(validate_tag("bad/tag"), Err(TagError::InvalidChar('/'))); + assert_eq!(validate_tag("bad\0tag"), Err(TagError::InvalidChar('\0'))); + } + + #[test] + fn test_parse_tags_no_tags() { + let (base, tags) = parse_tags("file.txt").unwrap(); + assert_eq!(base, "file.txt"); + assert!(tags.is_empty()); + } + + #[test] + fn test_parse_tags_with_tags() { + let (base, tags) = parse_tags("file.txt -- tag1 tag2").unwrap(); + assert_eq!(base, "file.txt"); + assert_eq!(tags, vec!["tag1", "tag2"]); + } + + #[test] + fn test_parse_tags_multiple_delimiters() { + assert_eq!( + parse_tags("file.txt -- tag1 -- tag2"), + Err(ParseError::MultipleDelimiters) + ); + } + + #[test] + fn test_parse_tags_invalid_tag() { + assert_eq!( + parse_tags("file.txt -- invalid:tag"), + Err(ParseError::InvalidTag(TagError::InvalidChar(':'))) + ); + } +} From 0fa29839d021a3a953040655880be9ef15d1cc5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:42:42 +0100 Subject: [PATCH 07/32] feat: Add serialize_tags function with comprehensive tests --- src/tag_engine.rs | 72 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 62 insertions(+), 10 deletions(-) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 36fb955..c00b135 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -1,4 +1,3 @@ -use std::path::Path; use thiserror::Error; #[derive(Error, Debug, PartialEq)] @@ -32,16 +31,21 @@ pub fn validate_tag(tag: &str) -> Result<(), TagError> { Ok(()) } -pub fn parse_tags(filename: &str) -> Result<(String, Vec), ParseError> { - const TAG_DELIMITER: &str = " -- "; - +pub const TAG_DELIMITER: &str = " -- "; + +pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), ParseError> { let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect(); if parts.len() > 2 { return Err(ParseError::MultipleDelimiters); } - let base_name = parts[0].to_string(); + // Split the first part into base and extension + let base_parts: Vec<&str> = parts[0].rsplitn(2, '.').collect(); + let (base_name, extension) = match base_parts.len() { + 2 => (base_parts[1].to_string(), format!(".{}", base_parts[0])), + _ => (parts[0].to_string(), String::new()), + }; let tags = if parts.len() == 2 { let tag_part = parts[1]; @@ -60,13 +64,59 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec), ParseError> { Vec::new() }; - Ok((base_name, tags)) + Ok((base_name, tags, extension)) +} + +pub fn serialize_tags(base: &str, tags: &[String], extension: &str) -> String { + let mut sorted_tags = tags.to_vec(); + sorted_tags.sort(); + + if sorted_tags.is_empty() { + format!("{}{}", base, extension) + } else { + format!("{}{}{}{}", base, TAG_DELIMITER, sorted_tags.join(" "), extension) + } } #[cfg(test)] mod tests { use super::*; + #[test] + fn test_serialize_tags_no_tags() { + let result = serialize_tags("file", &[], ".txt"); + assert_eq!(result, "file.txt"); + } + + #[test] + fn test_serialize_tags_with_tags() { + let tags = vec!["tag2".to_string(), "tag1".to_string()]; + let result = serialize_tags("file", &tags, ".txt"); + assert_eq!(result, "file -- tag1 tag2.txt"); + } + + #[test] + fn test_roundtrip_parsing_serialization() { + let original = "document.doc -- work draft pdf"; + let (base, tags, ext) = parse_tags(original).unwrap(); + let result = serialize_tags(&base, &tags, &ext); + assert_eq!(result, "document -- draft pdf work.doc"); + } + + #[test] + fn test_case_sensitivity() { + let tags = vec!["ZIP".to_string(), "archive".to_string()]; + let result = serialize_tags("backup", &tags, ".tar"); + assert_eq!(result, "backup -- ZIP archive.tar"); + } + + #[test] + fn test_empty_extension() { + let tags = vec!["note".to_string()]; + let result = serialize_tags("README", &tags, ""); + assert_eq!(result, "README -- note"); + } + #[test] fn test_validate_tag_valid() { assert!(validate_tag("valid-tag").is_ok()); @@ -85,16 +135,18 @@ mod tests { #[test] fn test_parse_tags_no_tags() { - let (base, tags) = parse_tags("file.txt").unwrap(); - assert_eq!(base, "file.txt"); + let (base, tags, ext) = parse_tags("file.txt").unwrap(); + assert_eq!(base, "file"); assert!(tags.is_empty()); + assert_eq!(ext, ".txt"); } #[test] fn test_parse_tags_with_tags() { - let (base, tags) = parse_tags("file.txt -- tag1 tag2").unwrap(); - assert_eq!(base, "file.txt"); + let (base, tags, ext) = parse_tags("file.txt -- tag1 tag2").unwrap(); + assert_eq!(base, "file"); assert_eq!(tags, vec!["tag1", "tag2"]); + assert_eq!(ext, ".txt"); } #[test] From bce4b3eb861e9d9df8b359b509e6cb7c6ce2f9fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:42:42 +0100 Subject: [PATCH 08/32] feat: Add serialize_tags function with comprehensive tests --- src/tag_engine.rs | 72 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 62 insertions(+), 10 deletions(-) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 36fb955..c00b135 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -1,4 +1,3 @@ -use std::path::Path; use thiserror::Error; #[derive(Error, Debug, PartialEq)] @@ -32,16 +31,21 @@ pub fn validate_tag(tag: &str) -> Result<(), TagError> { Ok(()) } -pub fn parse_tags(filename: &str) -> Result<(String, Vec), ParseError> { - const TAG_DELIMITER: &str = " -- "; - +pub const TAG_DELIMITER: &str = " -- "; + +pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), ParseError> { let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect(); if parts.len() > 2 { return Err(ParseError::MultipleDelimiters); } - let base_name = parts[0].to_string(); + // Split the first part into base and extension + let base_parts: Vec<&str> = parts[0].rsplitn(2, '.').collect(); + let (base_name, extension) = match base_parts.len() { + 2 => (base_parts[1].to_string(), format!(".{}", base_parts[0])), + _ => (parts[0].to_string(), String::new()), + }; let tags = if parts.len() == 2 { let tag_part = parts[1]; @@ -60,13 +64,59 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec), ParseError> { Vec::new() }; - Ok((base_name, tags)) + Ok((base_name, tags, extension)) +} + +pub fn serialize_tags(base: &str, tags: &[String], extension: &str) -> String { + let mut sorted_tags = tags.to_vec(); + sorted_tags.sort(); + + if sorted_tags.is_empty() { + format!("{}{}", base, extension) + } else { + format!("{}{}{}{}", base, TAG_DELIMITER, sorted_tags.join(" "), extension) + } } #[cfg(test)] mod tests { use super::*; + #[test] + fn test_serialize_tags_no_tags() { + let result = serialize_tags("file", &[], ".txt"); + assert_eq!(result, "file.txt"); + } + + #[test] + fn test_serialize_tags_with_tags() { + let tags = vec!["tag2".to_string(), "tag1".to_string()]; + let result = serialize_tags("file", &tags, ".txt"); + assert_eq!(result, "file -- tag1 tag2.txt"); + } + + #[test] + fn test_roundtrip_parsing_serialization() { + let original = "document.doc -- work draft pdf"; + let (base, tags, ext) = parse_tags(original).unwrap(); + let result = serialize_tags(&base, &tags, &ext); + assert_eq!(result, "document -- draft pdf work.doc"); + } + + #[test] + fn test_case_sensitivity() { + let tags = vec!["ZIP".to_string(), "archive".to_string()]; + let result = serialize_tags("backup", &tags, ".tar"); + assert_eq!(result, "backup -- ZIP archive.tar"); + } + + #[test] + fn test_empty_extension() { + let tags = vec!["note".to_string()]; + let result = serialize_tags("README", &tags, ""); + assert_eq!(result, "README -- note"); + } + #[test] fn test_validate_tag_valid() { assert!(validate_tag("valid-tag").is_ok()); @@ -85,16 +135,18 @@ mod tests { #[test] fn test_parse_tags_no_tags() { - let (base, tags) = parse_tags("file.txt").unwrap(); - assert_eq!(base, "file.txt"); + let (base, tags, ext) = parse_tags("file.txt").unwrap(); + assert_eq!(base, "file"); assert!(tags.is_empty()); + assert_eq!(ext, ".txt"); } #[test] fn test_parse_tags_with_tags() { - let (base, tags) = parse_tags("file.txt -- tag1 tag2").unwrap(); - assert_eq!(base, "file.txt"); + let (base, tags, ext) = parse_tags("file.txt -- tag1 tag2").unwrap(); + assert_eq!(base, "file"); assert_eq!(tags, vec!["tag1", "tag2"]); + assert_eq!(ext, ".txt"); } #[test] From 6f0410ff98c5cd3beba1bd9b37a6e38e553a632c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:51:12 +0100 Subject: [PATCH 09/32] feat: Implement list command with tag aggregation and CLI parsing --- Cargo.toml | 1 + src/main.rs | 75 +++++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 74 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index cd1b656..5255f6c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,3 +5,4 @@ edition = "2021" [dependencies] thiserror = "1.0" +clap = { version = "4.4", features = ["derive"] } diff --git a/src/main.rs b/src/main.rs index 2ee2541..a44e53c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,76 @@ mod tag_engine; -fn main() { - println!("Hello, world!"); +use std::collections::BTreeSet; +use std::error::Error; +use clap::Parser; + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Cli { + /// Files to process + #[arg(required = true)] + files: Vec, +} + +fn list_tags(files: &[String]) -> Result, Box> { + let mut unique_tags = BTreeSet::new(); + + for file in files { + if let Ok((_, tags, _)) = tag_engine::parse_tags(file) { + unique_tags.extend(tags); + } + } + + Ok(unique_tags.into_iter().collect()) +} + +fn main() -> Result<(), Box> { + let cli = Cli::parse(); + + let tags = list_tags(&cli.files)?; + + for tag in tags { + println!("{}", tag); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_list_tags_empty() { + let files = Vec::new(); + let tags = list_tags(&files).unwrap(); + assert!(tags.is_empty()); + } + + #[test] + fn test_list_tags_single_file() { + let files = vec!["document.txt -- tag1 tag2".to_string()]; + let tags = list_tags(&files).unwrap(); + assert_eq!(tags, vec!["tag1", "tag2"]); + } + + #[test] + fn test_list_tags_multiple_files() { + let files = vec![ + "doc1.txt -- tag1 tag2".to_string(), + "doc2.txt -- tag2 tag3".to_string(), + ]; + let tags = list_tags(&files).unwrap(); + assert_eq!(tags, vec!["tag1", "tag2", "tag3"]); + } + + #[test] + fn test_list_tags_with_invalid() { + let files = vec![ + "valid.txt -- good tag1".to_string(), + "invalid.txt -- bad:tag".to_string(), + ]; + let tags = list_tags(&files).unwrap(); + assert_eq!(tags, vec!["good", "tag1"]); + } } From 4dd9edee6146b192aa2a8748df39b7f6380a7860 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:51:12 +0100 Subject: [PATCH 10/32] feat: Implement list command with tag aggregation and CLI parsing --- Cargo.toml | 1 + src/main.rs | 75 +++++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 74 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index cd1b656..5255f6c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,3 +5,4 @@ edition = "2021" [dependencies] thiserror = "1.0" +clap = { version = "4.4", features = ["derive"] } diff --git a/src/main.rs b/src/main.rs index 2ee2541..a44e53c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,76 @@ mod tag_engine; -fn main() { - println!("Hello, world!"); +use std::collections::BTreeSet; +use std::error::Error; +use clap::Parser; + +#[derive(Parser)] +#[command(author, version, about, long_about = None)] +struct Cli { + /// Files to process + #[arg(required = true)] + files: Vec, +} + +fn list_tags(files: &[String]) -> Result, Box> { + let mut unique_tags = BTreeSet::new(); + + for file in files { + if let Ok((_, tags, _)) = tag_engine::parse_tags(file) { + unique_tags.extend(tags); + } + } + + Ok(unique_tags.into_iter().collect()) +} + +fn main() -> Result<(), Box> { + let cli = Cli::parse(); + + let tags = list_tags(&cli.files)?; + + for tag in tags { + println!("{}", tag); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_list_tags_empty() { + let files = Vec::new(); + let tags = list_tags(&files).unwrap(); + assert!(tags.is_empty()); + } + + #[test] + fn test_list_tags_single_file() { + let files = vec!["document.txt -- tag1 tag2".to_string()]; + let tags = list_tags(&files).unwrap(); + assert_eq!(tags, vec!["tag1", "tag2"]); + } + + #[test] + fn test_list_tags_multiple_files() { + let files = vec![ + "doc1.txt -- tag1 tag2".to_string(), + "doc2.txt -- tag2 tag3".to_string(), + ]; + let tags = list_tags(&files).unwrap(); + assert_eq!(tags, vec!["tag1", "tag2", "tag3"]); + } + + #[test] + fn test_list_tags_with_invalid() { + let files = vec![ + "valid.txt -- good tag1".to_string(), + "invalid.txt -- bad:tag".to_string(), + ]; + let tags = list_tags(&files).unwrap(); + assert_eq!(tags, vec!["good", "tag1"]); + } } From 9bea7c905341106e75c618a04a1c6bb214b86769 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:52:34 +0100 Subject: [PATCH 11/32] feat: Implement add_tags function with tag merging and deduplication logic --- src/tag_engine.rs | 52 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index c00b135..327fa09 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -67,6 +67,18 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse Ok((base_name, tags, extension)) } +pub fn add_tags(current: Vec, new: Vec) -> Vec { + let mut result = current; + + for tag in new { + if !result.contains(&tag) { + result.push(tag); + } + } + + result +} + pub fn serialize_tags(base: &str, tags: &[String], extension: &str) -> String { let mut sorted_tags = tags.to_vec(); sorted_tags.sort(); @@ -117,6 +129,46 @@ mod tests { assert_eq!(result, "README -- note"); } + #[test] + fn test_add_tags_no_duplicates() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let new = vec!["tag3".to_string(), "tag4".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2", "tag3", "tag4"]); + } + + #[test] + fn test_add_tags_with_duplicates() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let new = vec!["tag2".to_string(), "tag3".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2", "tag3"]); + } + + #[test] + fn test_add_tags_case_sensitive() { + let current = vec!["Tag1".to_string(), "tag2".to_string()]; + let new = vec!["tag1".to_string(), "Tag2".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["Tag1", "tag2", "tag1", "Tag2"]); + } + + #[test] + fn test_add_tags_empty_current() { + let current = Vec::new(); + let new = vec!["tag1".to_string(), "tag2".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2"]); + } + + #[test] + fn test_add_tags_empty_new() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let new = Vec::new(); + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2"]); + } + #[test] fn test_validate_tag_valid() { assert!(validate_tag("valid-tag").is_ok()); From 78180a516fe459dcdf5499b5bf4635989ca0ef8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:52:34 +0100 Subject: [PATCH 12/32] feat: Implement add_tags function with tag merging and deduplication logic --- src/tag_engine.rs | 52 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index c00b135..327fa09 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -67,6 +67,18 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse Ok((base_name, tags, extension)) } +pub fn add_tags(current: Vec, new: Vec) -> Vec { + let mut result = current; + + for tag in new { + if !result.contains(&tag) { + result.push(tag); + } + } + + result +} + pub fn serialize_tags(base: &str, tags: &[String], extension: &str) -> String { let mut sorted_tags = tags.to_vec(); sorted_tags.sort(); @@ -117,6 +129,46 @@ mod tests { assert_eq!(result, "README -- note"); } + #[test] + fn test_add_tags_no_duplicates() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let new = vec!["tag3".to_string(), "tag4".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2", "tag3", "tag4"]); + } + + #[test] + fn test_add_tags_with_duplicates() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let new = vec!["tag2".to_string(), "tag3".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2", "tag3"]); + } + + #[test] + fn test_add_tags_case_sensitive() { + let current = vec!["Tag1".to_string(), "tag2".to_string()]; + let new = vec!["tag1".to_string(), "Tag2".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["Tag1", "tag2", "tag1", "Tag2"]); + } + + #[test] + fn test_add_tags_empty_current() { + let current = Vec::new(); + let new = vec!["tag1".to_string(), "tag2".to_string()]; + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2"]); + } + + #[test] + fn test_add_tags_empty_new() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let new = Vec::new(); + let result = add_tags(current, new); + assert_eq!(result, vec!["tag1", "tag2"]); + } + #[test] fn test_validate_tag_valid() { assert!(validate_tag("valid-tag").is_ok()); From 12f7da02de1a34020476419edcfe6423231ea3d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:53:33 +0100 Subject: [PATCH 13/32] feat: Implement add command with file tag management and error handling --- Cargo.toml | 4 + src/main.rs | 185 +++++++++++++++++++++++++++++++++++++++++----- src/tag_engine.rs | 56 ++++++++++---- 3 files changed, 213 insertions(+), 32 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5255f6c..d132488 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,3 +6,7 @@ edition = "2021" [dependencies] thiserror = "1.0" clap = { version = "4.4", features = ["derive"] } +fs-err = "2.11" + +[dev-dependencies] +tempfile = "3.8" diff --git a/src/main.rs b/src/main.rs index a44e53c..c290fc9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,35 +2,112 @@ mod tag_engine; use std::collections::BTreeSet; use std::error::Error; -use clap::Parser; +use clap::{Parser, Subcommand}; +use fs_err as fs; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum CommandError { + #[error("Failed to rename {from} to {to}: {source}")] + Rename { + from: String, + to: String, + source: std::io::Error, + }, + #[error("Failed to parse tags in {file}: {source}")] + Parse { + file: String, + source: tag_engine::ParseError, + }, +} #[derive(Parser)] #[command(author, version, about, long_about = None)] struct Cli { - /// Files to process - #[arg(required = true)] - files: Vec, + #[command(subcommand)] + command: Commands, } -fn list_tags(files: &[String]) -> Result, Box> { +#[derive(Subcommand)] +enum Commands { + /// List all unique tags + List { + /// Files to process + files: Vec, + }, + /// Add tags to files + Add { + /// Tags to add + #[arg(required = true)] + tags: Vec, + /// Files to process + #[arg(required = true)] + files: Vec, + }, +} + +fn list_tags(files: &[String]) -> Result, CommandError> { let mut unique_tags = BTreeSet::new(); for file in files { - if let Ok((_, tags, _)) = tag_engine::parse_tags(file) { - unique_tags.extend(tags); + match tag_engine::parse_tags(file) { + Ok((_, tags, _)) => unique_tags.extend(tags), + Err(e) => return Err(CommandError::Parse { + file: file.to_string(), + source: e, + }), } } Ok(unique_tags.into_iter().collect()) } +fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> { + let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| CommandError::Parse { + file: file.to_string(), + source: e, + })?; + + let merged_tags = tag_engine::add_tags(current_tags, new_tags.to_vec()); + // Preserve the original directory + let parent = std::path::Path::new(file).parent() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + + let new_filename = tag_engine::serialize_tags(&base, &merged_tags, &ext); + let new_path = if parent.is_empty() { + new_filename + } else { + format!("{}/{}", parent, new_filename) + }; + + // Only rename if the name would actually change + if file != new_path { + fs::rename(file, &new_path).map_err(|e| CommandError::Rename { + from: file.to_string(), + to: new_path, + source: e, + })?; + } + + Ok(()) +} + fn main() -> Result<(), Box> { let cli = Cli::parse(); - let tags = list_tags(&cli.files)?; - - for tag in tags { - println!("{}", tag); + match cli.command { + Commands::List { files } => { + let tags = list_tags(&files)?; + for tag in tags { + println!("{}", tag); + } + } + Commands::Add { tags, files } => { + for file in files { + add_tags_to_file(&file, &tags)?; + } + } } Ok(()) @@ -39,6 +116,18 @@ fn main() -> Result<(), Box> { #[cfg(test)] mod tests { use super::*; + use tempfile::TempDir; + + fn create_test_file(dir: &TempDir, name: &str) -> Result> { + let path = dir.path().join(name); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + fs::write(&path, "")?; + Ok(path.to_str() + .ok_or("Invalid path")? + .to_string()) + } #[test] fn test_list_tags_empty() { @@ -65,12 +154,72 @@ mod tests { } #[test] - fn test_list_tags_with_invalid() { - let files = vec![ - "valid.txt -- good tag1".to_string(), - "invalid.txt -- bad:tag".to_string(), - ]; - let tags = list_tags(&files).unwrap(); - assert_eq!(tags, vec!["good", "tag1"]); + fn test_add_tags_to_file() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("test.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["tag1".to_string(), "tag2".to_string()])?; + + // Verify original is gone and new exists + assert!(!initial_path.exists(), "Original file still exists after rename"); + let new_path = tmp_dir.path().join("test -- tag1 tag2.txt"); + assert!(new_path.exists(), "Tagged file was not created"); + Ok(()) + } + + #[test] + fn test_add_tags_to_existing_tags() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test -- existing.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("test -- existing.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["new".to_string()])?; + + // Verify original is gone and new exists + assert!(!initial_path.exists(), "Original file still exists after rename"); + let new_name = tmp_dir.path().join("test -- existing new.txt"); + assert!(new_name.exists(), "Tagged file was not created"); + Ok(()) + } + + #[test] + fn test_add_duplicate_tags() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test -- tag1.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("test -- tag1.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["tag1".to_string()])?; + + // Original should still exist since no change was needed + assert!(initial_path.exists(), "Original file should still exist"); + Ok(()) + } + + #[test] + fn test_add_tags_nested_path() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "nested/path/test.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("nested/path/test.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["tag1".to_string()])?; + + // Verify original is gone and new exists in same directory + assert!(!initial_path.exists(), "Original file still exists after rename"); + let new_path = tmp_dir.path().join("nested/path/test -- tag1.txt"); + assert!(new_path.exists(), "Tagged file was not created in original directory"); + Ok(()) } } diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 327fa09..cb956a2 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -34,7 +34,13 @@ pub fn validate_tag(tag: &str) -> Result<(), TagError> { pub const TAG_DELIMITER: &str = " -- "; pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), ParseError> { - let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect(); + // Get the file name without the path + let file_name = std::path::Path::new(filename) + .file_name() + .map(|s| s.to_string_lossy().into_owned()) + .unwrap_or_else(|| filename.to_string()); + + let parts: Vec<&str> = file_name.split(TAG_DELIMITER).collect(); if parts.len() > 2 { return Err(ParseError::MultipleDelimiters); @@ -42,24 +48,30 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse // Split the first part into base and extension let base_parts: Vec<&str> = parts[0].rsplitn(2, '.').collect(); - let (base_name, extension) = match base_parts.len() { - 2 => (base_parts[1].to_string(), format!(".{}", base_parts[0])), - _ => (parts[0].to_string(), String::new()), + let mut extension = match base_parts.len() { + 2 => format!(".{}", base_parts[0]), + _ => String::new(), }; + let base_name = base_parts.last().unwrap_or(&parts[0]).to_string(); let tags = if parts.len() == 2 { - let tag_part = parts[1]; - let tags: Vec = tag_part - .split_whitespace() - .map(str::to_string) - .collect(); - - // Validate each tag - for tag in &tags { - validate_tag(tag)?; + let mut tag_part = parts[1].to_string(); + + // Check if the last tag contains an extension + if let Some(last_part) = tag_part.split_whitespace().last() { + if let Some(dot_pos) = last_part.rfind('.') { + extension = last_part[dot_pos..].to_string(); + tag_part.truncate(tag_part.len() - extension.len()); + } } - tags + let mut unique_tags = std::collections::HashSet::new(); + for tag in tag_part.split_whitespace() { + validate_tag(tag)?; + unique_tags.insert(tag.to_string()); + } + + unique_tags.into_iter().collect() } else { Vec::new() }; @@ -216,4 +228,20 @@ mod tests { Err(ParseError::InvalidTag(TagError::InvalidChar(':'))) ); } + + #[test] + fn test_parse_tags_with_path() { + let (base, tags, ext) = parse_tags("/tmp/path/to/file.txt -- tag1 tag2").unwrap(); + assert_eq!(base, "file"); + assert_eq!(tags, vec!["tag1", "tag2"]); + assert_eq!(ext, ".txt"); + } + + #[test] + fn test_parse_tags_with_duplicate_tags() { + let (base, tags, ext) = parse_tags("/tmp/.tmpRRop05/test -- tag1 tag1.txt").unwrap(); + assert_eq!(base, "test"); + assert_eq!(tags, vec!["tag1"]); + assert_eq!(ext, ".txt"); + } } From 9031ec9af17fce84c5710c631a95ba784053aaf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 15:53:33 +0100 Subject: [PATCH 14/32] feat: Implement add command with file tag management and error handling --- Cargo.toml | 4 + src/main.rs | 185 +++++++++++++++++++++++++++++++++++++++++----- src/tag_engine.rs | 56 ++++++++++---- 3 files changed, 213 insertions(+), 32 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5255f6c..d132488 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,3 +6,7 @@ edition = "2021" [dependencies] thiserror = "1.0" clap = { version = "4.4", features = ["derive"] } +fs-err = "2.11" + +[dev-dependencies] +tempfile = "3.8" diff --git a/src/main.rs b/src/main.rs index a44e53c..c290fc9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,35 +2,112 @@ mod tag_engine; use std::collections::BTreeSet; use std::error::Error; -use clap::Parser; +use clap::{Parser, Subcommand}; +use fs_err as fs; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum CommandError { + #[error("Failed to rename {from} to {to}: {source}")] + Rename { + from: String, + to: String, + source: std::io::Error, + }, + #[error("Failed to parse tags in {file}: {source}")] + Parse { + file: String, + source: tag_engine::ParseError, + }, +} #[derive(Parser)] #[command(author, version, about, long_about = None)] struct Cli { - /// Files to process - #[arg(required = true)] - files: Vec, + #[command(subcommand)] + command: Commands, } -fn list_tags(files: &[String]) -> Result, Box> { +#[derive(Subcommand)] +enum Commands { + /// List all unique tags + List { + /// Files to process + files: Vec, + }, + /// Add tags to files + Add { + /// Tags to add + #[arg(required = true)] + tags: Vec, + /// Files to process + #[arg(required = true)] + files: Vec, + }, +} + +fn list_tags(files: &[String]) -> Result, CommandError> { let mut unique_tags = BTreeSet::new(); for file in files { - if let Ok((_, tags, _)) = tag_engine::parse_tags(file) { - unique_tags.extend(tags); + match tag_engine::parse_tags(file) { + Ok((_, tags, _)) => unique_tags.extend(tags), + Err(e) => return Err(CommandError::Parse { + file: file.to_string(), + source: e, + }), } } Ok(unique_tags.into_iter().collect()) } +fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> { + let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| CommandError::Parse { + file: file.to_string(), + source: e, + })?; + + let merged_tags = tag_engine::add_tags(current_tags, new_tags.to_vec()); + // Preserve the original directory + let parent = std::path::Path::new(file).parent() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + + let new_filename = tag_engine::serialize_tags(&base, &merged_tags, &ext); + let new_path = if parent.is_empty() { + new_filename + } else { + format!("{}/{}", parent, new_filename) + }; + + // Only rename if the name would actually change + if file != new_path { + fs::rename(file, &new_path).map_err(|e| CommandError::Rename { + from: file.to_string(), + to: new_path, + source: e, + })?; + } + + Ok(()) +} + fn main() -> Result<(), Box> { let cli = Cli::parse(); - let tags = list_tags(&cli.files)?; - - for tag in tags { - println!("{}", tag); + match cli.command { + Commands::List { files } => { + let tags = list_tags(&files)?; + for tag in tags { + println!("{}", tag); + } + } + Commands::Add { tags, files } => { + for file in files { + add_tags_to_file(&file, &tags)?; + } + } } Ok(()) @@ -39,6 +116,18 @@ fn main() -> Result<(), Box> { #[cfg(test)] mod tests { use super::*; + use tempfile::TempDir; + + fn create_test_file(dir: &TempDir, name: &str) -> Result> { + let path = dir.path().join(name); + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; + } + fs::write(&path, "")?; + Ok(path.to_str() + .ok_or("Invalid path")? + .to_string()) + } #[test] fn test_list_tags_empty() { @@ -65,12 +154,72 @@ mod tests { } #[test] - fn test_list_tags_with_invalid() { - let files = vec![ - "valid.txt -- good tag1".to_string(), - "invalid.txt -- bad:tag".to_string(), - ]; - let tags = list_tags(&files).unwrap(); - assert_eq!(tags, vec!["good", "tag1"]); + fn test_add_tags_to_file() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("test.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["tag1".to_string(), "tag2".to_string()])?; + + // Verify original is gone and new exists + assert!(!initial_path.exists(), "Original file still exists after rename"); + let new_path = tmp_dir.path().join("test -- tag1 tag2.txt"); + assert!(new_path.exists(), "Tagged file was not created"); + Ok(()) + } + + #[test] + fn test_add_tags_to_existing_tags() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test -- existing.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("test -- existing.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["new".to_string()])?; + + // Verify original is gone and new exists + assert!(!initial_path.exists(), "Original file still exists after rename"); + let new_name = tmp_dir.path().join("test -- existing new.txt"); + assert!(new_name.exists(), "Tagged file was not created"); + Ok(()) + } + + #[test] + fn test_add_duplicate_tags() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test -- tag1.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("test -- tag1.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["tag1".to_string()])?; + + // Original should still exist since no change was needed + assert!(initial_path.exists(), "Original file should still exist"); + Ok(()) + } + + #[test] + fn test_add_tags_nested_path() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "nested/path/test.txt")?; + + // Verify file was created + let initial_path = tmp_dir.path().join("nested/path/test.txt"); + assert!(initial_path.exists(), "Initial test file was not created"); + + add_tags_to_file(&file, &vec!["tag1".to_string()])?; + + // Verify original is gone and new exists in same directory + assert!(!initial_path.exists(), "Original file still exists after rename"); + let new_path = tmp_dir.path().join("nested/path/test -- tag1.txt"); + assert!(new_path.exists(), "Tagged file was not created in original directory"); + Ok(()) } } diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 327fa09..cb956a2 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -34,7 +34,13 @@ pub fn validate_tag(tag: &str) -> Result<(), TagError> { pub const TAG_DELIMITER: &str = " -- "; pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), ParseError> { - let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect(); + // Get the file name without the path + let file_name = std::path::Path::new(filename) + .file_name() + .map(|s| s.to_string_lossy().into_owned()) + .unwrap_or_else(|| filename.to_string()); + + let parts: Vec<&str> = file_name.split(TAG_DELIMITER).collect(); if parts.len() > 2 { return Err(ParseError::MultipleDelimiters); @@ -42,24 +48,30 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse // Split the first part into base and extension let base_parts: Vec<&str> = parts[0].rsplitn(2, '.').collect(); - let (base_name, extension) = match base_parts.len() { - 2 => (base_parts[1].to_string(), format!(".{}", base_parts[0])), - _ => (parts[0].to_string(), String::new()), + let mut extension = match base_parts.len() { + 2 => format!(".{}", base_parts[0]), + _ => String::new(), }; + let base_name = base_parts.last().unwrap_or(&parts[0]).to_string(); let tags = if parts.len() == 2 { - let tag_part = parts[1]; - let tags: Vec = tag_part - .split_whitespace() - .map(str::to_string) - .collect(); - - // Validate each tag - for tag in &tags { - validate_tag(tag)?; + let mut tag_part = parts[1].to_string(); + + // Check if the last tag contains an extension + if let Some(last_part) = tag_part.split_whitespace().last() { + if let Some(dot_pos) = last_part.rfind('.') { + extension = last_part[dot_pos..].to_string(); + tag_part.truncate(tag_part.len() - extension.len()); + } } - tags + let mut unique_tags = std::collections::HashSet::new(); + for tag in tag_part.split_whitespace() { + validate_tag(tag)?; + unique_tags.insert(tag.to_string()); + } + + unique_tags.into_iter().collect() } else { Vec::new() }; @@ -216,4 +228,20 @@ mod tests { Err(ParseError::InvalidTag(TagError::InvalidChar(':'))) ); } + + #[test] + fn test_parse_tags_with_path() { + let (base, tags, ext) = parse_tags("/tmp/path/to/file.txt -- tag1 tag2").unwrap(); + assert_eq!(base, "file"); + assert_eq!(tags, vec!["tag1", "tag2"]); + assert_eq!(ext, ".txt"); + } + + #[test] + fn test_parse_tags_with_duplicate_tags() { + let (base, tags, ext) = parse_tags("/tmp/.tmpRRop05/test -- tag1 tag1.txt").unwrap(); + assert_eq!(base, "test"); + assert_eq!(tags, vec!["tag1"]); + assert_eq!(ext, ".txt"); + } } From d499152616f303f7490dce88d5bf8ba93acf7bf7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:29:15 +0100 Subject: [PATCH 15/32] feat: Update Add command to use --tag flag and sort tags alphabetically --- src/main.rs | 3 +-- src/tag_engine.rs | 27 ++++++++++++++++++++------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/src/main.rs b/src/main.rs index c290fc9..c895f46 100644 --- a/src/main.rs +++ b/src/main.rs @@ -38,10 +38,9 @@ enum Commands { /// Add tags to files Add { /// Tags to add - #[arg(required = true)] + #[arg(long = "tag", required = true)] tags: Vec, /// Files to process - #[arg(required = true)] files: Vec, }, } diff --git a/src/tag_engine.rs b/src/tag_engine.rs index cb956a2..7942ec8 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -65,13 +65,26 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse } } - let mut unique_tags = std::collections::HashSet::new(); - for tag in tag_part.split_whitespace() { - validate_tag(tag)?; - unique_tags.insert(tag.to_string()); - } - - unique_tags.into_iter().collect() + // First parse all tags + let parsed_tags: Vec = tag_part + .split_whitespace() + .map(|tag| { + validate_tag(tag)?; + Ok(tag.to_string()) + }) + .collect::>()?; + + // Then filter duplicates using a HashSet + let unique_tags: Vec = parsed_tags + .into_iter() + .collect::>() + .into_iter() + .collect(); + + // Finally sort + let mut tags = unique_tags; + tags.sort(); + tags } else { Vec::new() }; From 64d3761de9915164292679dc64cd2fed3bad8c2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:29:15 +0100 Subject: [PATCH 16/32] feat: Update Add command to use --tag flag and sort tags alphabetically --- src/main.rs | 3 +-- src/tag_engine.rs | 27 ++++++++++++++++++++------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/src/main.rs b/src/main.rs index c290fc9..c895f46 100644 --- a/src/main.rs +++ b/src/main.rs @@ -38,10 +38,9 @@ enum Commands { /// Add tags to files Add { /// Tags to add - #[arg(required = true)] + #[arg(long = "tag", required = true)] tags: Vec, /// Files to process - #[arg(required = true)] files: Vec, }, } diff --git a/src/tag_engine.rs b/src/tag_engine.rs index cb956a2..7942ec8 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -65,13 +65,26 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse } } - let mut unique_tags = std::collections::HashSet::new(); - for tag in tag_part.split_whitespace() { - validate_tag(tag)?; - unique_tags.insert(tag.to_string()); - } - - unique_tags.into_iter().collect() + // First parse all tags + let parsed_tags: Vec = tag_part + .split_whitespace() + .map(|tag| { + validate_tag(tag)?; + Ok(tag.to_string()) + }) + .collect::>()?; + + // Then filter duplicates using a HashSet + let unique_tags: Vec = parsed_tags + .into_iter() + .collect::>() + .into_iter() + .collect(); + + // Finally sort + let mut tags = unique_tags; + tags.sort(); + tags } else { Vec::new() }; From 4a9575976e3b70109465dbd7166c6ab5d1516bdc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:32:42 +0100 Subject: [PATCH 17/32] feat: Implement filter_tags function with comprehensive unit tests --- src/tag_engine.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 7942ec8..8b03a4d 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -92,6 +92,12 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse Ok((base_name, tags, extension)) } +pub fn filter_tags(current: Vec, remove: &[String]) -> Vec { + current.into_iter() + .filter(|tag| !remove.contains(tag)) + .collect() +} + pub fn add_tags(current: Vec, new: Vec) -> Vec { let mut result = current; @@ -257,4 +263,28 @@ mod tests { assert_eq!(tags, vec!["tag1"]); assert_eq!(ext, ".txt"); } + + #[test] + fn test_filter_tags_multi_remove() { + let current = vec!["tag1".to_string(), "tag2".to_string(), "tag3".to_string()]; + let remove = vec!["tag1".to_string(), "tag3".to_string()]; + let result = filter_tags(current, &remove); + assert_eq!(result, vec!["tag2"]); + } + + #[test] + fn test_filter_tags_non_existent() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let remove = vec!["tag3".to_string()]; + let result = filter_tags(current, &remove); + assert_eq!(result, vec!["tag1", "tag2"]); + } + + #[test] + fn test_filter_tags_empty_result() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let remove = vec!["tag1".to_string(), "tag2".to_string()]; + let result = filter_tags(current, &remove); + assert!(result.is_empty()); + } } From cd27308cfa285d532e52b1d04be0d67da8637237 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:32:42 +0100 Subject: [PATCH 18/32] feat: Implement filter_tags function with comprehensive unit tests --- src/tag_engine.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 7942ec8..8b03a4d 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -92,6 +92,12 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse Ok((base_name, tags, extension)) } +pub fn filter_tags(current: Vec, remove: &[String]) -> Vec { + current.into_iter() + .filter(|tag| !remove.contains(tag)) + .collect() +} + pub fn add_tags(current: Vec, new: Vec) -> Vec { let mut result = current; @@ -257,4 +263,28 @@ mod tests { assert_eq!(tags, vec!["tag1"]); assert_eq!(ext, ".txt"); } + + #[test] + fn test_filter_tags_multi_remove() { + let current = vec!["tag1".to_string(), "tag2".to_string(), "tag3".to_string()]; + let remove = vec!["tag1".to_string(), "tag3".to_string()]; + let result = filter_tags(current, &remove); + assert_eq!(result, vec!["tag2"]); + } + + #[test] + fn test_filter_tags_non_existent() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let remove = vec!["tag3".to_string()]; + let result = filter_tags(current, &remove); + assert_eq!(result, vec!["tag1", "tag2"]); + } + + #[test] + fn test_filter_tags_empty_result() { + let current = vec!["tag1".to_string(), "tag2".to_string()]; + let remove = vec!["tag1".to_string(), "tag2".to_string()]; + let result = filter_tags(current, &remove); + assert!(result.is_empty()); + } } From f12671c70741db283891c85479567751e357642b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:33:14 +0100 Subject: [PATCH 19/32] feat: Implement tag combination generation with depth and order constraints --- src/tag_engine.rs | 76 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 8b03a4d..b3aee70 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -92,6 +92,42 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse Ok((base_name, tags, extension)) } +pub fn create_tag_combinations(tags: &[String], depth: usize) -> Vec> { + let mut result = Vec::new(); + + // Handle empty tags or depth 0 + if tags.is_empty() || depth == 0 { + return result; + } + + // Add individual tags first + for tag in tags { + result.push(vec![tag.clone()]); + } + + // Generate combinations up to specified depth + for len in 2..=depth.min(tags.len()) { + let mut temp = Vec::new(); + + // Start with existing combinations of length-1 + for combo in result.iter().filter(|c| c.len() == len - 1) { + // Try to add each remaining tag that comes after the last tag in combo + if let Some(last) = combo.last() { + for tag in tags { + if tag > last && !combo.contains(tag) { + let mut new_combo = combo.clone(); + new_combo.push(tag.clone()); + temp.push(new_combo); + } + } + } + } + result.extend(temp); + } + + result +} + pub fn filter_tags(current: Vec, remove: &[String]) -> Vec { current.into_iter() .filter(|tag| !remove.contains(tag)) @@ -287,4 +323,44 @@ mod tests { let result = filter_tags(current, &remove); assert!(result.is_empty()); } + + #[test] + fn test_create_tag_combinations_empty() { + let tags: Vec = vec![]; + let result = create_tag_combinations(&tags, 2); + assert!(result.is_empty()); + } + + #[test] + fn test_create_tag_combinations_depth_limit() { + let tags = vec!["tag1".to_string(), "tag2".to_string(), "tag3".to_string()]; + let result = create_tag_combinations(&tags, 2); + + // Should contain individual tags and pairs, but no triples + assert!(result.iter().all(|combo| combo.len() <= 2)); + assert_eq!(result.len(), 6); // 3 individual + 3 pairs + } + + #[test] + fn test_create_tag_combinations_order() { + let tags = vec!["tag2".to_string(), "tag1".to_string(), "tag3".to_string()]; + let result = create_tag_combinations(&tags, 2); + + // Check that all combinations maintain alphabetical order + for combo in result { + if combo.len() > 1 { + assert!(combo.windows(2).all(|w| w[0] < w[1])); + } + } + } + + #[test] + fn test_create_tag_combinations_uniqueness() { + let tags = vec!["tag1".to_string(), "tag2".to_string(), "tag3".to_string()]; + let result = create_tag_combinations(&tags, 3); + + // Convert to set to check for duplicates + let result_set: std::collections::HashSet<_> = result.into_iter().collect(); + assert_eq!(result_set.len(), 7); // 3 individual + 3 pairs + 1 triple + } } From 2809b6e8187f6271c1b1873b455d31763a2ed157 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:33:14 +0100 Subject: [PATCH 20/32] feat: Implement tag combination generation with depth and order constraints --- src/tag_engine.rs | 76 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/src/tag_engine.rs b/src/tag_engine.rs index 8b03a4d..b3aee70 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -92,6 +92,42 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec, String), Parse Ok((base_name, tags, extension)) } +pub fn create_tag_combinations(tags: &[String], depth: usize) -> Vec> { + let mut result = Vec::new(); + + // Handle empty tags or depth 0 + if tags.is_empty() || depth == 0 { + return result; + } + + // Add individual tags first + for tag in tags { + result.push(vec![tag.clone()]); + } + + // Generate combinations up to specified depth + for len in 2..=depth.min(tags.len()) { + let mut temp = Vec::new(); + + // Start with existing combinations of length-1 + for combo in result.iter().filter(|c| c.len() == len - 1) { + // Try to add each remaining tag that comes after the last tag in combo + if let Some(last) = combo.last() { + for tag in tags { + if tag > last && !combo.contains(tag) { + let mut new_combo = combo.clone(); + new_combo.push(tag.clone()); + temp.push(new_combo); + } + } + } + } + result.extend(temp); + } + + result +} + pub fn filter_tags(current: Vec, remove: &[String]) -> Vec { current.into_iter() .filter(|tag| !remove.contains(tag)) @@ -287,4 +323,44 @@ mod tests { let result = filter_tags(current, &remove); assert!(result.is_empty()); } + + #[test] + fn test_create_tag_combinations_empty() { + let tags: Vec = vec![]; + let result = create_tag_combinations(&tags, 2); + assert!(result.is_empty()); + } + + #[test] + fn test_create_tag_combinations_depth_limit() { + let tags = vec!["tag1".to_string(), "tag2".to_string(), "tag3".to_string()]; + let result = create_tag_combinations(&tags, 2); + + // Should contain individual tags and pairs, but no triples + assert!(result.iter().all(|combo| combo.len() <= 2)); + assert_eq!(result.len(), 6); // 3 individual + 3 pairs + } + + #[test] + fn test_create_tag_combinations_order() { + let tags = vec!["tag2".to_string(), "tag1".to_string(), "tag3".to_string()]; + let result = create_tag_combinations(&tags, 2); + + // Check that all combinations maintain alphabetical order + for combo in result { + if combo.len() > 1 { + assert!(combo.windows(2).all(|w| w[0] < w[1])); + } + } + } + + #[test] + fn test_create_tag_combinations_uniqueness() { + let tags = vec!["tag1".to_string(), "tag2".to_string(), "tag3".to_string()]; + let result = create_tag_combinations(&tags, 3); + + // Convert to set to check for duplicates + let result_set: std::collections::HashSet<_> = result.into_iter().collect(); + assert_eq!(result_set.len(), 7); // 3 individual + 3 pairs + 1 triple + } } From 840ffc111c727e1be332cf31a8e8c3c747f42bb5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:34:16 +0100 Subject: [PATCH 21/32] feat: Add cross-platform symlink helper module with robust error handling --- src/main.rs | 1 + src/symlink.rs | 134 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+) create mode 100644 src/symlink.rs diff --git a/src/main.rs b/src/main.rs index c895f46..229d353 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,5 @@ mod tag_engine; +mod symlink; use std::collections::BTreeSet; use std::error::Error; diff --git a/src/symlink.rs b/src/symlink.rs new file mode 100644 index 0000000..6816c15 --- /dev/null +++ b/src/symlink.rs @@ -0,0 +1,134 @@ +use std::path::{Path, PathBuf}; +use thiserror::Error; +use fs_err as fs; + +#[derive(Error, Debug)] +pub enum SymlinkError { + #[error("Failed to create directory {path}: {source}")] + CreateDir { + path: String, + source: std::io::Error, + }, + #[error("Failed to create symlink from {from} to {to}: {source}")] + CreateLink { + from: String, + to: String, + source: std::io::Error, + }, + #[error("Invalid path: {0}")] + InvalidPath(String), +} + +pub fn create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<(), SymlinkError> { + for path in paths { + // Ensure path is absolute and clean + let abs_path = fs::canonicalize(&path).map_err(|_| { + SymlinkError::InvalidPath(path.to_string_lossy().into_owned()) + })?; + + // Get the file name for the symlink + let file_name = abs_path.file_name().ok_or_else(|| { + SymlinkError::InvalidPath(abs_path.to_string_lossy().into_owned()) + })?; + + // Create target directory if it doesn't exist + fs::create_dir_all(target_dir).map_err(|e| SymlinkError::CreateDir { + path: target_dir.to_string_lossy().into_owned(), + source: e, + })?; + + // Create the symlink + let link_path = target_dir.join(file_name); + #[cfg(unix)] + std::os::unix::fs::symlink(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { + from: abs_path.to_string_lossy().into_owned(), + to: link_path.to_string_lossy().into_owned(), + source: e, + })?; + + #[cfg(windows)] + std::os::windows::fs::symlink_file(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { + from: abs_path.to_string_lossy().into_owned(), + to: link_path.to_string_lossy().into_owned(), + source: e, + })?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_create_symlink_tree_basic() -> Result<(), Box> { + let source_dir = TempDir::new()?; + let target_dir = TempDir::new()?; + + // Create a test file + let test_file = source_dir.path().join("test.txt"); + fs::write(&test_file, "test content")?; + + // Create symlink tree + create_symlink_tree( + vec![test_file.clone()], + target_dir.path() + )?; + + // Verify symlink exists and points to correct file + let symlink = target_dir.path().join("test.txt"); + assert!(symlink.exists()); + assert!(symlink.is_symlink()); + + #[cfg(unix)] + { + use std::os::unix::fs::MetadataExt; + assert_eq!( + fs::metadata(&test_file)?.ino(), + fs::metadata(&symlink)?.ino() + ); + } + + Ok(()) + } + + #[test] + fn test_create_symlink_tree_nested() -> Result<(), Box> { + let source_dir = TempDir::new()?; + let target_dir = TempDir::new()?; + + // Create nested test file + let nested_dir = source_dir.path().join("nested"); + fs::create_dir_all(&nested_dir)?; + let test_file = nested_dir.join("test.txt"); + fs::write(&test_file, "test content")?; + + // Create symlink tree + create_symlink_tree( + vec![test_file], + &target_dir.path().join("nested") + )?; + + // Verify directory and symlink were created + let symlink = target_dir.path().join("nested/test.txt"); + assert!(symlink.exists()); + assert!(symlink.is_symlink()); + + Ok(()) + } + + #[test] + fn test_create_symlink_tree_invalid_path() { + let target_dir = TempDir::new().unwrap(); + + // Try to create symlink with non-existent source + let result = create_symlink_tree( + vec![PathBuf::from("/nonexistent/path")], + target_dir.path() + ); + + assert!(matches!(result, Err(SymlinkError::InvalidPath(_)))); + } +} From aa18a35f8d35064450920da67a90c70cc536c45a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:34:16 +0100 Subject: [PATCH 22/32] feat: Add cross-platform symlink helper module with robust error handling --- src/main.rs | 1 + src/symlink.rs | 134 +++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+) create mode 100644 src/symlink.rs diff --git a/src/main.rs b/src/main.rs index c895f46..229d353 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,5 @@ mod tag_engine; +mod symlink; use std::collections::BTreeSet; use std::error::Error; diff --git a/src/symlink.rs b/src/symlink.rs new file mode 100644 index 0000000..6816c15 --- /dev/null +++ b/src/symlink.rs @@ -0,0 +1,134 @@ +use std::path::{Path, PathBuf}; +use thiserror::Error; +use fs_err as fs; + +#[derive(Error, Debug)] +pub enum SymlinkError { + #[error("Failed to create directory {path}: {source}")] + CreateDir { + path: String, + source: std::io::Error, + }, + #[error("Failed to create symlink from {from} to {to}: {source}")] + CreateLink { + from: String, + to: String, + source: std::io::Error, + }, + #[error("Invalid path: {0}")] + InvalidPath(String), +} + +pub fn create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<(), SymlinkError> { + for path in paths { + // Ensure path is absolute and clean + let abs_path = fs::canonicalize(&path).map_err(|_| { + SymlinkError::InvalidPath(path.to_string_lossy().into_owned()) + })?; + + // Get the file name for the symlink + let file_name = abs_path.file_name().ok_or_else(|| { + SymlinkError::InvalidPath(abs_path.to_string_lossy().into_owned()) + })?; + + // Create target directory if it doesn't exist + fs::create_dir_all(target_dir).map_err(|e| SymlinkError::CreateDir { + path: target_dir.to_string_lossy().into_owned(), + source: e, + })?; + + // Create the symlink + let link_path = target_dir.join(file_name); + #[cfg(unix)] + std::os::unix::fs::symlink(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { + from: abs_path.to_string_lossy().into_owned(), + to: link_path.to_string_lossy().into_owned(), + source: e, + })?; + + #[cfg(windows)] + std::os::windows::fs::symlink_file(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { + from: abs_path.to_string_lossy().into_owned(), + to: link_path.to_string_lossy().into_owned(), + source: e, + })?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_create_symlink_tree_basic() -> Result<(), Box> { + let source_dir = TempDir::new()?; + let target_dir = TempDir::new()?; + + // Create a test file + let test_file = source_dir.path().join("test.txt"); + fs::write(&test_file, "test content")?; + + // Create symlink tree + create_symlink_tree( + vec![test_file.clone()], + target_dir.path() + )?; + + // Verify symlink exists and points to correct file + let symlink = target_dir.path().join("test.txt"); + assert!(symlink.exists()); + assert!(symlink.is_symlink()); + + #[cfg(unix)] + { + use std::os::unix::fs::MetadataExt; + assert_eq!( + fs::metadata(&test_file)?.ino(), + fs::metadata(&symlink)?.ino() + ); + } + + Ok(()) + } + + #[test] + fn test_create_symlink_tree_nested() -> Result<(), Box> { + let source_dir = TempDir::new()?; + let target_dir = TempDir::new()?; + + // Create nested test file + let nested_dir = source_dir.path().join("nested"); + fs::create_dir_all(&nested_dir)?; + let test_file = nested_dir.join("test.txt"); + fs::write(&test_file, "test content")?; + + // Create symlink tree + create_symlink_tree( + vec![test_file], + &target_dir.path().join("nested") + )?; + + // Verify directory and symlink were created + let symlink = target_dir.path().join("nested/test.txt"); + assert!(symlink.exists()); + assert!(symlink.is_symlink()); + + Ok(()) + } + + #[test] + fn test_create_symlink_tree_invalid_path() { + let target_dir = TempDir::new().unwrap(); + + // Try to create symlink with non-existent source + let result = create_symlink_tree( + vec![PathBuf::from("/nonexistent/path")], + target_dir.path() + ); + + assert!(matches!(result, Err(SymlinkError::InvalidPath(_)))); + } +} From ad1a9265bafa099be8357ebbbd1481d5d1111fbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:35:16 +0100 Subject: [PATCH 23/32] feat: Create unified error handling with FileTagsError --- src/error.rs | 94 +++++++++++++++++++++++++++++++++++++++++++++++ src/main.rs | 39 +++++++------------- src/symlink.rs | 43 +++++++--------------- src/tag_engine.rs | 18 +-------- 4 files changed, 121 insertions(+), 73 deletions(-) create mode 100644 src/error.rs diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..2a30b64 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,94 @@ +use thiserror::Error; +use std::path::PathBuf; + +#[derive(Error, Debug)] +pub enum FileTagsError { + #[error("Failed to create directory {path}: {source}")] + CreateDir { + path: PathBuf, + source: std::io::Error, + }, + + #[error("Failed to create symlink from {from} to {to}: {source}")] + CreateLink { + from: PathBuf, + to: PathBuf, + source: std::io::Error, + }, + + #[error("Invalid path: {0}")] + InvalidPath(PathBuf), + + #[error("Failed to rename {from} to {to}: {source}")] + Rename { + from: PathBuf, + to: PathBuf, + source: std::io::Error, + }, + + #[error("Failed to parse tags in {file}: {source}")] + Parse { + file: PathBuf, + source: ParseError, + }, + + #[error("Tag error: {0}")] + Tag(#[from] TagError), +} + +#[derive(Error, Debug, PartialEq)] +pub enum TagError { + #[error("tag cannot be empty")] + Empty, + #[error("tag contains invalid character: {0}")] + InvalidChar(char), +} + +#[derive(Error, Debug, PartialEq)] +pub enum ParseError { + #[error("multiple tag delimiters found")] + MultipleDelimiters, + #[error("invalid tag: {0}")] + InvalidTag(#[from] TagError), +} + +impl From for FileTagsError { + fn from(err: ParseError) -> Self { + FileTagsError::Parse { + file: PathBuf::from(""), + source: err, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::error::Error; + + #[test] + fn test_error_conversion() { + let tag_err = TagError::Empty; + let parse_err = ParseError::InvalidTag(tag_err); + let file_err = FileTagsError::from(parse_err); + + assert!(matches!(file_err, FileTagsError::Parse { .. })); + } + + #[test] + fn test_error_display() { + let err = FileTagsError::InvalidPath(PathBuf::from("/bad/path")); + assert_eq!(err.to_string(), "Invalid path: /bad/path"); + } + + #[test] + fn test_error_source() { + let io_err = std::io::Error::from(std::io::ErrorKind::NotFound); + let err = FileTagsError::CreateDir { + path: PathBuf::from("/test"), + source: io_err, + }; + + assert!(err.source().is_some()); + } +} diff --git a/src/main.rs b/src/main.rs index 229d353..fcb5adf 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,26 +1,13 @@ mod tag_engine; mod symlink; +mod error; use std::collections::BTreeSet; +use std::path::PathBuf; use std::error::Error; use clap::{Parser, Subcommand}; use fs_err as fs; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum CommandError { - #[error("Failed to rename {from} to {to}: {source}")] - Rename { - from: String, - to: String, - source: std::io::Error, - }, - #[error("Failed to parse tags in {file}: {source}")] - Parse { - file: String, - source: tag_engine::ParseError, - }, -} +use crate::error::{FileTagsError, ParseError}; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -46,14 +33,14 @@ enum Commands { }, } -fn list_tags(files: &[String]) -> Result, CommandError> { +fn list_tags(files: &[String]) -> Result, FileTagsError> { let mut unique_tags = BTreeSet::new(); for file in files { match tag_engine::parse_tags(file) { Ok((_, tags, _)) => unique_tags.extend(tags), - Err(e) => return Err(CommandError::Parse { - file: file.to_string(), + Err(e) => return Err(FileTagsError::Parse { + file: PathBuf::from(file), source: e, }), } @@ -62,9 +49,9 @@ fn list_tags(files: &[String]) -> Result, CommandError> { Ok(unique_tags.into_iter().collect()) } -fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> { - let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| CommandError::Parse { - file: file.to_string(), +fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), FileTagsError> { + let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), source: e, })?; @@ -83,9 +70,9 @@ fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> // Only rename if the name would actually change if file != new_path { - fs::rename(file, &new_path).map_err(|e| CommandError::Rename { - from: file.to_string(), - to: new_path, + fs::rename(file, &new_path).map_err(|e| FileTagsError::Rename { + from: PathBuf::from(file), + to: PathBuf::from(&new_path), source: e, })?; } @@ -93,7 +80,7 @@ fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> Ok(()) } -fn main() -> Result<(), Box> { +fn main() -> Result<(), FileTagsError> { let cli = Cli::parse(); match cli.command { diff --git a/src/symlink.rs b/src/symlink.rs index 6816c15..1ab47fa 100644 --- a/src/symlink.rs +++ b/src/symlink.rs @@ -1,55 +1,38 @@ use std::path::{Path, PathBuf}; -use thiserror::Error; use fs_err as fs; +use crate::error::FileTagsError; -#[derive(Error, Debug)] -pub enum SymlinkError { - #[error("Failed to create directory {path}: {source}")] - CreateDir { - path: String, - source: std::io::Error, - }, - #[error("Failed to create symlink from {from} to {to}: {source}")] - CreateLink { - from: String, - to: String, - source: std::io::Error, - }, - #[error("Invalid path: {0}")] - InvalidPath(String), -} - -pub fn create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<(), SymlinkError> { +pub fn create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<(), FileTagsError> { for path in paths { // Ensure path is absolute and clean let abs_path = fs::canonicalize(&path).map_err(|_| { - SymlinkError::InvalidPath(path.to_string_lossy().into_owned()) + FileTagsError::InvalidPath(path.clone()) })?; // Get the file name for the symlink let file_name = abs_path.file_name().ok_or_else(|| { - SymlinkError::InvalidPath(abs_path.to_string_lossy().into_owned()) + FileTagsError::InvalidPath(abs_path.clone()) })?; // Create target directory if it doesn't exist - fs::create_dir_all(target_dir).map_err(|e| SymlinkError::CreateDir { - path: target_dir.to_string_lossy().into_owned(), + fs::create_dir_all(target_dir).map_err(|e| FileTagsError::CreateDir { + path: target_dir.to_path_buf(), source: e, })?; // Create the symlink let link_path = target_dir.join(file_name); #[cfg(unix)] - std::os::unix::fs::symlink(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { - from: abs_path.to_string_lossy().into_owned(), - to: link_path.to_string_lossy().into_owned(), + std::os::unix::fs::symlink(&abs_path, &link_path).map_err(|e| FileTagsError::CreateLink { + from: abs_path, + to: link_path, source: e, })?; #[cfg(windows)] - std::os::windows::fs::symlink_file(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { - from: abs_path.to_string_lossy().into_owned(), - to: link_path.to_string_lossy().into_owned(), + std::os::windows::fs::symlink_file(&abs_path, &link_path).map_err(|e| FileTagsError::CreateLink { + from: abs_path, + to: link_path, source: e, })?; } @@ -129,6 +112,6 @@ mod tests { target_dir.path() ); - assert!(matches!(result, Err(SymlinkError::InvalidPath(_)))); + assert!(matches!(result, Err(FileTagsError::InvalidPath(_)))); } } diff --git a/src/tag_engine.rs b/src/tag_engine.rs index b3aee70..89503ff 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -1,20 +1,4 @@ -use thiserror::Error; - -#[derive(Error, Debug, PartialEq)] -pub enum TagError { - #[error("tag cannot be empty")] - Empty, - #[error("tag contains invalid character: {0}")] - InvalidChar(char), -} - -#[derive(Error, Debug, PartialEq)] -pub enum ParseError { - #[error("multiple tag delimiters found")] - MultipleDelimiters, - #[error("invalid tag: {0}")] - InvalidTag(#[from] TagError), -} +use crate::error::{ParseError, TagError}; pub fn validate_tag(tag: &str) -> Result<(), TagError> { if tag.is_empty() { From 4517fa34f82d8624922cc691c0594188ba34d751 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:35:16 +0100 Subject: [PATCH 24/32] feat: Create unified error handling with FileTagsError --- src/error.rs | 94 +++++++++++++++++++++++++++++++++++++++++++++++ src/main.rs | 39 +++++++------------- src/symlink.rs | 43 +++++++--------------- src/tag_engine.rs | 18 +-------- 4 files changed, 121 insertions(+), 73 deletions(-) create mode 100644 src/error.rs diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..2a30b64 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,94 @@ +use thiserror::Error; +use std::path::PathBuf; + +#[derive(Error, Debug)] +pub enum FileTagsError { + #[error("Failed to create directory {path}: {source}")] + CreateDir { + path: PathBuf, + source: std::io::Error, + }, + + #[error("Failed to create symlink from {from} to {to}: {source}")] + CreateLink { + from: PathBuf, + to: PathBuf, + source: std::io::Error, + }, + + #[error("Invalid path: {0}")] + InvalidPath(PathBuf), + + #[error("Failed to rename {from} to {to}: {source}")] + Rename { + from: PathBuf, + to: PathBuf, + source: std::io::Error, + }, + + #[error("Failed to parse tags in {file}: {source}")] + Parse { + file: PathBuf, + source: ParseError, + }, + + #[error("Tag error: {0}")] + Tag(#[from] TagError), +} + +#[derive(Error, Debug, PartialEq)] +pub enum TagError { + #[error("tag cannot be empty")] + Empty, + #[error("tag contains invalid character: {0}")] + InvalidChar(char), +} + +#[derive(Error, Debug, PartialEq)] +pub enum ParseError { + #[error("multiple tag delimiters found")] + MultipleDelimiters, + #[error("invalid tag: {0}")] + InvalidTag(#[from] TagError), +} + +impl From for FileTagsError { + fn from(err: ParseError) -> Self { + FileTagsError::Parse { + file: PathBuf::from(""), + source: err, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::error::Error; + + #[test] + fn test_error_conversion() { + let tag_err = TagError::Empty; + let parse_err = ParseError::InvalidTag(tag_err); + let file_err = FileTagsError::from(parse_err); + + assert!(matches!(file_err, FileTagsError::Parse { .. })); + } + + #[test] + fn test_error_display() { + let err = FileTagsError::InvalidPath(PathBuf::from("/bad/path")); + assert_eq!(err.to_string(), "Invalid path: /bad/path"); + } + + #[test] + fn test_error_source() { + let io_err = std::io::Error::from(std::io::ErrorKind::NotFound); + let err = FileTagsError::CreateDir { + path: PathBuf::from("/test"), + source: io_err, + }; + + assert!(err.source().is_some()); + } +} diff --git a/src/main.rs b/src/main.rs index 229d353..fcb5adf 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,26 +1,13 @@ mod tag_engine; mod symlink; +mod error; use std::collections::BTreeSet; +use std::path::PathBuf; use std::error::Error; use clap::{Parser, Subcommand}; use fs_err as fs; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum CommandError { - #[error("Failed to rename {from} to {to}: {source}")] - Rename { - from: String, - to: String, - source: std::io::Error, - }, - #[error("Failed to parse tags in {file}: {source}")] - Parse { - file: String, - source: tag_engine::ParseError, - }, -} +use crate::error::{FileTagsError, ParseError}; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -46,14 +33,14 @@ enum Commands { }, } -fn list_tags(files: &[String]) -> Result, CommandError> { +fn list_tags(files: &[String]) -> Result, FileTagsError> { let mut unique_tags = BTreeSet::new(); for file in files { match tag_engine::parse_tags(file) { Ok((_, tags, _)) => unique_tags.extend(tags), - Err(e) => return Err(CommandError::Parse { - file: file.to_string(), + Err(e) => return Err(FileTagsError::Parse { + file: PathBuf::from(file), source: e, }), } @@ -62,9 +49,9 @@ fn list_tags(files: &[String]) -> Result, CommandError> { Ok(unique_tags.into_iter().collect()) } -fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> { - let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| CommandError::Parse { - file: file.to_string(), +fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), FileTagsError> { + let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), source: e, })?; @@ -83,9 +70,9 @@ fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> // Only rename if the name would actually change if file != new_path { - fs::rename(file, &new_path).map_err(|e| CommandError::Rename { - from: file.to_string(), - to: new_path, + fs::rename(file, &new_path).map_err(|e| FileTagsError::Rename { + from: PathBuf::from(file), + to: PathBuf::from(&new_path), source: e, })?; } @@ -93,7 +80,7 @@ fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), CommandError> Ok(()) } -fn main() -> Result<(), Box> { +fn main() -> Result<(), FileTagsError> { let cli = Cli::parse(); match cli.command { diff --git a/src/symlink.rs b/src/symlink.rs index 6816c15..1ab47fa 100644 --- a/src/symlink.rs +++ b/src/symlink.rs @@ -1,55 +1,38 @@ use std::path::{Path, PathBuf}; -use thiserror::Error; use fs_err as fs; +use crate::error::FileTagsError; -#[derive(Error, Debug)] -pub enum SymlinkError { - #[error("Failed to create directory {path}: {source}")] - CreateDir { - path: String, - source: std::io::Error, - }, - #[error("Failed to create symlink from {from} to {to}: {source}")] - CreateLink { - from: String, - to: String, - source: std::io::Error, - }, - #[error("Invalid path: {0}")] - InvalidPath(String), -} - -pub fn create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<(), SymlinkError> { +pub fn create_symlink_tree(paths: Vec, target_dir: &Path) -> Result<(), FileTagsError> { for path in paths { // Ensure path is absolute and clean let abs_path = fs::canonicalize(&path).map_err(|_| { - SymlinkError::InvalidPath(path.to_string_lossy().into_owned()) + FileTagsError::InvalidPath(path.clone()) })?; // Get the file name for the symlink let file_name = abs_path.file_name().ok_or_else(|| { - SymlinkError::InvalidPath(abs_path.to_string_lossy().into_owned()) + FileTagsError::InvalidPath(abs_path.clone()) })?; // Create target directory if it doesn't exist - fs::create_dir_all(target_dir).map_err(|e| SymlinkError::CreateDir { - path: target_dir.to_string_lossy().into_owned(), + fs::create_dir_all(target_dir).map_err(|e| FileTagsError::CreateDir { + path: target_dir.to_path_buf(), source: e, })?; // Create the symlink let link_path = target_dir.join(file_name); #[cfg(unix)] - std::os::unix::fs::symlink(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { - from: abs_path.to_string_lossy().into_owned(), - to: link_path.to_string_lossy().into_owned(), + std::os::unix::fs::symlink(&abs_path, &link_path).map_err(|e| FileTagsError::CreateLink { + from: abs_path, + to: link_path, source: e, })?; #[cfg(windows)] - std::os::windows::fs::symlink_file(&abs_path, &link_path).map_err(|e| SymlinkError::CreateLink { - from: abs_path.to_string_lossy().into_owned(), - to: link_path.to_string_lossy().into_owned(), + std::os::windows::fs::symlink_file(&abs_path, &link_path).map_err(|e| FileTagsError::CreateLink { + from: abs_path, + to: link_path, source: e, })?; } @@ -129,6 +112,6 @@ mod tests { target_dir.path() ); - assert!(matches!(result, Err(SymlinkError::InvalidPath(_)))); + assert!(matches!(result, Err(FileTagsError::InvalidPath(_)))); } } diff --git a/src/tag_engine.rs b/src/tag_engine.rs index b3aee70..89503ff 100644 --- a/src/tag_engine.rs +++ b/src/tag_engine.rs @@ -1,20 +1,4 @@ -use thiserror::Error; - -#[derive(Error, Debug, PartialEq)] -pub enum TagError { - #[error("tag cannot be empty")] - Empty, - #[error("tag contains invalid character: {0}")] - InvalidChar(char), -} - -#[derive(Error, Debug, PartialEq)] -pub enum ParseError { - #[error("multiple tag delimiters found")] - MultipleDelimiters, - #[error("invalid tag: {0}")] - InvalidTag(#[from] TagError), -} +use crate::error::{ParseError, TagError}; pub fn validate_tag(tag: &str) -> Result<(), TagError> { if tag.is_empty() { From 43f0752de78cb464c4b0e366c961e6b13a494008 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:37:10 +0100 Subject: [PATCH 25/32] feat: Integrate all components with new commands and comprehensive tests --- src/main.rs | 164 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 160 insertions(+), 4 deletions(-) diff --git a/src/main.rs b/src/main.rs index fcb5adf..deb2912 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,10 +4,9 @@ mod error; use std::collections::BTreeSet; use std::path::PathBuf; -use std::error::Error; use clap::{Parser, Subcommand}; use fs_err as fs; -use crate::error::{FileTagsError, ParseError}; +use crate::error::FileTagsError; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -18,17 +17,40 @@ struct Cli { #[derive(Subcommand)] enum Commands { - /// List all unique tags + /// List all unique tags found in files List { /// Files to process + #[arg(required = true, help = "One or more files to process")] files: Vec, }, /// Add tags to files Add { /// Tags to add - #[arg(long = "tag", required = true)] + #[arg(long = "tag", required = true, help = "Tags to add to files")] tags: Vec, /// Files to process + #[arg(required = true, help = "One or more files to add tags to")] + files: Vec, + }, + /// Remove tags from files + Remove { + /// Tags to remove + #[arg(long = "tag", required = true, help = "Tags to remove from files")] + tags: Vec, + /// Files to process + #[arg(required = true, help = "One or more files to remove tags from")] + files: Vec, + }, + /// Create a tag-based directory tree with symlinks + Tree { + /// Target directory for the tree + #[arg(long, required = true, help = "Target directory for creating the tree")] + dir: String, + /// Maximum depth of the tree + #[arg(long, default_value = "3", help = "Maximum depth of the directory tree")] + depth: usize, + /// Files to process + #[arg(required = true, help = "One or more files to create tree from")] files: Vec, }, } @@ -95,6 +117,75 @@ fn main() -> Result<(), FileTagsError> { add_tags_to_file(&file, &tags)?; } } + Commands::Remove { tags, files } => { + for file in files { + remove_tags_from_file(&file, &tags)?; + } + } + Commands::Tree { dir, depth, files } => { + create_tag_tree(&files, &dir, depth)?; + } + } + + Ok(()) +} + +fn remove_tags_from_file(file: &str, remove_tags: &[String]) -> Result<(), FileTagsError> { + let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; + + let filtered_tags = tag_engine::filter_tags(current_tags, remove_tags); + let new_filename = tag_engine::serialize_tags(&base, &filtered_tags, &ext); + + // Preserve the original directory + let parent = std::path::Path::new(file).parent() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + + let new_path = if parent.is_empty() { + new_filename + } else { + format!("{}/{}", parent, new_filename) + }; + + // Only rename if tags actually changed + if file != new_path { + fs::rename(file, &new_path).map_err(|e| FileTagsError::Rename { + from: PathBuf::from(file), + to: PathBuf::from(&new_path), + source: e, + })?; + } + + Ok(()) +} + +fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<(), FileTagsError> { + let target = PathBuf::from(target_dir); + + for file in files { + let (_base, tags, _ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; + + // Create root symlink + let paths = vec![PathBuf::from(file)]; + symlink::create_symlink_tree(paths, &target)?; + + // Generate all tag combinations and create directory structure + let combinations = tag_engine::create_tag_combinations(&tags, depth); + for combo in combinations { + let mut dir_path = target.clone(); + for tag in &combo { + dir_path.push(tag); + } + + let paths = vec![PathBuf::from(file)]; + symlink::create_symlink_tree(paths, &dir_path)?; + } } Ok(()) @@ -209,4 +300,69 @@ mod tests { assert!(new_path.exists(), "Tagged file was not created in original directory"); Ok(()) } + + #[test] + fn test_remove_tags() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test -- tag1 tag2 tag3.txt")?; + + remove_tags_from_file(&file, &vec!["tag2".to_string()])?; + + let new_path = tmp_dir.path().join("test -- tag1 tag3.txt"); + assert!(new_path.exists(), "File with removed tag not found"); + Ok(()) + } + + #[test] + fn test_create_tag_tree() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let source = create_test_file(&tmp_dir, "test -- tag1 tag2.txt")?; + let tree_dir = tmp_dir.path().join("tree"); + + create_tag_tree(&[source], tree_dir.to_str().unwrap(), 2)?; + + // Check root symlink + assert!(tree_dir.join("test -- tag1 tag2.txt").exists()); + + // Check tag directories + assert!(tree_dir.join("tag1").join("test -- tag1 tag2.txt").exists()); + assert!(tree_dir.join("tag2").join("test -- tag1 tag2.txt").exists()); + assert!(tree_dir.join("tag1").join("tag2").join("test -- tag1 tag2.txt").exists()); + + Ok(()) + } + + #[test] + fn test_integration_all_commands() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file1 = create_test_file(&tmp_dir, "doc1.txt")?; + let file2 = create_test_file(&tmp_dir, "doc2.txt")?; + + // Add tags + add_tags_to_file(&file1, &vec!["work".to_string(), "draft".to_string()])?; + add_tags_to_file(&file2, &vec!["work".to_string(), "final".to_string()])?; + + // List tags + let tags = list_tags(&[ + tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(), + tmp_dir.path().join("doc2 -- final work.txt").to_str().unwrap().to_string(), + ])?; + assert_eq!(tags, vec!["draft", "final", "work"]); + + // Remove a tag + let file_to_remove = tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(); + remove_tags_from_file(&file_to_remove, &vec!["draft".to_string()])?; + + // Create tree + let tree_dir = tmp_dir.path().join("tree"); + create_tag_tree( + &[tmp_dir.path().join("doc1 -- work.txt").to_str().unwrap().to_string()], + tree_dir.to_str().unwrap(), + 2 + )?; + + assert!(tree_dir.join("work").join("doc1 -- work.txt").exists()); + + Ok(()) + } } From f23beef347f6074de9af16f5b0f16e41264a3d4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:37:10 +0100 Subject: [PATCH 26/32] feat: Integrate all components with new commands and comprehensive tests --- src/main.rs | 164 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 160 insertions(+), 4 deletions(-) diff --git a/src/main.rs b/src/main.rs index fcb5adf..deb2912 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,10 +4,9 @@ mod error; use std::collections::BTreeSet; use std::path::PathBuf; -use std::error::Error; use clap::{Parser, Subcommand}; use fs_err as fs; -use crate::error::{FileTagsError, ParseError}; +use crate::error::FileTagsError; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -18,17 +17,40 @@ struct Cli { #[derive(Subcommand)] enum Commands { - /// List all unique tags + /// List all unique tags found in files List { /// Files to process + #[arg(required = true, help = "One or more files to process")] files: Vec, }, /// Add tags to files Add { /// Tags to add - #[arg(long = "tag", required = true)] + #[arg(long = "tag", required = true, help = "Tags to add to files")] tags: Vec, /// Files to process + #[arg(required = true, help = "One or more files to add tags to")] + files: Vec, + }, + /// Remove tags from files + Remove { + /// Tags to remove + #[arg(long = "tag", required = true, help = "Tags to remove from files")] + tags: Vec, + /// Files to process + #[arg(required = true, help = "One or more files to remove tags from")] + files: Vec, + }, + /// Create a tag-based directory tree with symlinks + Tree { + /// Target directory for the tree + #[arg(long, required = true, help = "Target directory for creating the tree")] + dir: String, + /// Maximum depth of the tree + #[arg(long, default_value = "3", help = "Maximum depth of the directory tree")] + depth: usize, + /// Files to process + #[arg(required = true, help = "One or more files to create tree from")] files: Vec, }, } @@ -95,6 +117,75 @@ fn main() -> Result<(), FileTagsError> { add_tags_to_file(&file, &tags)?; } } + Commands::Remove { tags, files } => { + for file in files { + remove_tags_from_file(&file, &tags)?; + } + } + Commands::Tree { dir, depth, files } => { + create_tag_tree(&files, &dir, depth)?; + } + } + + Ok(()) +} + +fn remove_tags_from_file(file: &str, remove_tags: &[String]) -> Result<(), FileTagsError> { + let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; + + let filtered_tags = tag_engine::filter_tags(current_tags, remove_tags); + let new_filename = tag_engine::serialize_tags(&base, &filtered_tags, &ext); + + // Preserve the original directory + let parent = std::path::Path::new(file).parent() + .map(|p| p.to_string_lossy().to_string()) + .unwrap_or_default(); + + let new_path = if parent.is_empty() { + new_filename + } else { + format!("{}/{}", parent, new_filename) + }; + + // Only rename if tags actually changed + if file != new_path { + fs::rename(file, &new_path).map_err(|e| FileTagsError::Rename { + from: PathBuf::from(file), + to: PathBuf::from(&new_path), + source: e, + })?; + } + + Ok(()) +} + +fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<(), FileTagsError> { + let target = PathBuf::from(target_dir); + + for file in files { + let (_base, tags, _ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; + + // Create root symlink + let paths = vec![PathBuf::from(file)]; + symlink::create_symlink_tree(paths, &target)?; + + // Generate all tag combinations and create directory structure + let combinations = tag_engine::create_tag_combinations(&tags, depth); + for combo in combinations { + let mut dir_path = target.clone(); + for tag in &combo { + dir_path.push(tag); + } + + let paths = vec![PathBuf::from(file)]; + symlink::create_symlink_tree(paths, &dir_path)?; + } } Ok(()) @@ -209,4 +300,69 @@ mod tests { assert!(new_path.exists(), "Tagged file was not created in original directory"); Ok(()) } + + #[test] + fn test_remove_tags() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file = create_test_file(&tmp_dir, "test -- tag1 tag2 tag3.txt")?; + + remove_tags_from_file(&file, &vec!["tag2".to_string()])?; + + let new_path = tmp_dir.path().join("test -- tag1 tag3.txt"); + assert!(new_path.exists(), "File with removed tag not found"); + Ok(()) + } + + #[test] + fn test_create_tag_tree() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let source = create_test_file(&tmp_dir, "test -- tag1 tag2.txt")?; + let tree_dir = tmp_dir.path().join("tree"); + + create_tag_tree(&[source], tree_dir.to_str().unwrap(), 2)?; + + // Check root symlink + assert!(tree_dir.join("test -- tag1 tag2.txt").exists()); + + // Check tag directories + assert!(tree_dir.join("tag1").join("test -- tag1 tag2.txt").exists()); + assert!(tree_dir.join("tag2").join("test -- tag1 tag2.txt").exists()); + assert!(tree_dir.join("tag1").join("tag2").join("test -- tag1 tag2.txt").exists()); + + Ok(()) + } + + #[test] + fn test_integration_all_commands() -> Result<(), Box> { + let tmp_dir = TempDir::new()?; + let file1 = create_test_file(&tmp_dir, "doc1.txt")?; + let file2 = create_test_file(&tmp_dir, "doc2.txt")?; + + // Add tags + add_tags_to_file(&file1, &vec!["work".to_string(), "draft".to_string()])?; + add_tags_to_file(&file2, &vec!["work".to_string(), "final".to_string()])?; + + // List tags + let tags = list_tags(&[ + tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(), + tmp_dir.path().join("doc2 -- final work.txt").to_str().unwrap().to_string(), + ])?; + assert_eq!(tags, vec!["draft", "final", "work"]); + + // Remove a tag + let file_to_remove = tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(); + remove_tags_from_file(&file_to_remove, &vec!["draft".to_string()])?; + + // Create tree + let tree_dir = tmp_dir.path().join("tree"); + create_tag_tree( + &[tmp_dir.path().join("doc1 -- work.txt").to_str().unwrap().to_string()], + tree_dir.to_str().unwrap(), + 2 + )?; + + assert!(tree_dir.join("work").join("doc1 -- work.txt").exists()); + + Ok(()) + } } From 81200d6c053b9be64b613d4d6261abb73486e8bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:56:12 +0100 Subject: [PATCH 27/32] feat: Add shell completions using clap_complete_command --- Cargo.toml | 1 + src/main.rs | 189 +++++++++++++++++++++++++++++++++------------------- 2 files changed, 123 insertions(+), 67 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d132488..920d17b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" [dependencies] thiserror = "1.0" clap = { version = "4.4", features = ["derive"] } +clap_complete_command = "0.5.1" fs-err = "2.11" [dev-dependencies] diff --git a/src/main.rs b/src/main.rs index deb2912..687f573 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,12 +1,13 @@ -mod tag_engine; -mod symlink; +use clap::CommandFactory; mod error; +mod symlink; +mod tag_engine; -use std::collections::BTreeSet; -use std::path::PathBuf; +use crate::error::FileTagsError; use clap::{Parser, Subcommand}; use fs_err as fs; -use crate::error::FileTagsError; +use std::collections::BTreeSet; +use std::path::PathBuf; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -16,7 +17,13 @@ struct Cli { } #[derive(Subcommand)] +#[command(subcommand_required = true)] enum Commands { + /// Generate shell completions + Completion { + #[arg(value_enum)] + shell: clap_complete_command::Shell, + }, /// List all unique tags found in files List { /// Files to process @@ -47,7 +54,11 @@ enum Commands { #[arg(long, required = true, help = "Target directory for creating the tree")] dir: String, /// Maximum depth of the tree - #[arg(long, default_value = "3", help = "Maximum depth of the directory tree")] + #[arg( + long, + default_value = "3", + help = "Maximum depth of the directory tree" + )] depth: usize, /// Files to process #[arg(required = true, help = "One or more files to create tree from")] @@ -61,10 +72,12 @@ fn list_tags(files: &[String]) -> Result, FileTagsError> { for file in files { match tag_engine::parse_tags(file) { Ok((_, tags, _)) => unique_tags.extend(tags), - Err(e) => return Err(FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - }), + Err(e) => { + return Err(FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + }) + } } } @@ -72,17 +85,19 @@ fn list_tags(files: &[String]) -> Result, FileTagsError> { } fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), FileTagsError> { - let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - })?; + let (base, current_tags, ext) = + tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; let merged_tags = tag_engine::add_tags(current_tags, new_tags.to_vec()); // Preserve the original directory - let parent = std::path::Path::new(file).parent() + let parent = std::path::Path::new(file) + .parent() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_default(); - + let new_filename = tag_engine::serialize_tags(&base, &merged_tags, &ext); let new_path = if parent.is_empty() { new_filename @@ -104,8 +119,11 @@ fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), FileTagsError fn main() -> Result<(), FileTagsError> { let cli = Cli::parse(); - + match cli.command { + Commands::Completion { shell } => { + shell.generate(&mut Cli::command(), &mut std::io::stdout()); + } Commands::List { files } => { let tags = list_tags(&files)?; for tag in tags { @@ -131,19 +149,21 @@ fn main() -> Result<(), FileTagsError> { } fn remove_tags_from_file(file: &str, remove_tags: &[String]) -> Result<(), FileTagsError> { - let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - })?; + let (base, current_tags, ext) = + tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; let filtered_tags = tag_engine::filter_tags(current_tags, remove_tags); let new_filename = tag_engine::serialize_tags(&base, &filtered_tags, &ext); - + // Preserve the original directory - let parent = std::path::Path::new(file).parent() + let parent = std::path::Path::new(file) + .parent() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_default(); - + let new_path = if parent.is_empty() { new_filename } else { @@ -164,12 +184,13 @@ fn remove_tags_from_file(file: &str, remove_tags: &[String]) -> Result<(), FileT fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<(), FileTagsError> { let target = PathBuf::from(target_dir); - + for file in files { - let (_base, tags, _ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - })?; + let (_base, tags, _ext) = + tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; // Create root symlink let paths = vec![PathBuf::from(file)]; @@ -182,7 +203,7 @@ fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<( for tag in &combo { dir_path.push(tag); } - + let paths = vec![PathBuf::from(file)]; symlink::create_symlink_tree(paths, &dir_path)?; } @@ -202,9 +223,7 @@ mod tests { fs::create_dir_all(parent)?; } fs::write(&path, "")?; - Ok(path.to_str() - .ok_or("Invalid path")? - .to_string()) + Ok(path.to_str().ok_or("Invalid path")?.to_string()) } #[test] @@ -235,15 +254,18 @@ mod tests { fn test_add_tags_to_file() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("test.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["tag1".to_string(), "tag2".to_string()])?; - + // Verify original is gone and new exists - assert!(!initial_path.exists(), "Original file still exists after rename"); + assert!( + !initial_path.exists(), + "Original file still exists after rename" + ); let new_path = tmp_dir.path().join("test -- tag1 tag2.txt"); assert!(new_path.exists(), "Tagged file was not created"); Ok(()) @@ -253,15 +275,18 @@ mod tests { fn test_add_tags_to_existing_tags() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test -- existing.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("test -- existing.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["new".to_string()])?; - + // Verify original is gone and new exists - assert!(!initial_path.exists(), "Original file still exists after rename"); + assert!( + !initial_path.exists(), + "Original file still exists after rename" + ); let new_name = tmp_dir.path().join("test -- existing new.txt"); assert!(new_name.exists(), "Tagged file was not created"); Ok(()) @@ -271,13 +296,13 @@ mod tests { fn test_add_duplicate_tags() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test -- tag1.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("test -- tag1.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["tag1".to_string()])?; - + // Original should still exist since no change was needed assert!(initial_path.exists(), "Original file should still exist"); Ok(()) @@ -287,17 +312,23 @@ mod tests { fn test_add_tags_nested_path() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "nested/path/test.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("nested/path/test.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["tag1".to_string()])?; - + // Verify original is gone and new exists in same directory - assert!(!initial_path.exists(), "Original file still exists after rename"); + assert!( + !initial_path.exists(), + "Original file still exists after rename" + ); let new_path = tmp_dir.path().join("nested/path/test -- tag1.txt"); - assert!(new_path.exists(), "Tagged file was not created in original directory"); + assert!( + new_path.exists(), + "Tagged file was not created in original directory" + ); Ok(()) } @@ -305,9 +336,9 @@ mod tests { fn test_remove_tags() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test -- tag1 tag2 tag3.txt")?; - + remove_tags_from_file(&file, &vec!["tag2".to_string()])?; - + let new_path = tmp_dir.path().join("test -- tag1 tag3.txt"); assert!(new_path.exists(), "File with removed tag not found"); Ok(()) @@ -318,17 +349,21 @@ mod tests { let tmp_dir = TempDir::new()?; let source = create_test_file(&tmp_dir, "test -- tag1 tag2.txt")?; let tree_dir = tmp_dir.path().join("tree"); - + create_tag_tree(&[source], tree_dir.to_str().unwrap(), 2)?; - + // Check root symlink assert!(tree_dir.join("test -- tag1 tag2.txt").exists()); - + // Check tag directories assert!(tree_dir.join("tag1").join("test -- tag1 tag2.txt").exists()); assert!(tree_dir.join("tag2").join("test -- tag1 tag2.txt").exists()); - assert!(tree_dir.join("tag1").join("tag2").join("test -- tag1 tag2.txt").exists()); - + assert!(tree_dir + .join("tag1") + .join("tag2") + .join("test -- tag1 tag2.txt") + .exists()); + Ok(()) } @@ -337,32 +372,52 @@ mod tests { let tmp_dir = TempDir::new()?; let file1 = create_test_file(&tmp_dir, "doc1.txt")?; let file2 = create_test_file(&tmp_dir, "doc2.txt")?; - + // Add tags add_tags_to_file(&file1, &vec!["work".to_string(), "draft".to_string()])?; add_tags_to_file(&file2, &vec!["work".to_string(), "final".to_string()])?; - + // List tags let tags = list_tags(&[ - tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(), - tmp_dir.path().join("doc2 -- final work.txt").to_str().unwrap().to_string(), + tmp_dir + .path() + .join("doc1 -- draft work.txt") + .to_str() + .unwrap() + .to_string(), + tmp_dir + .path() + .join("doc2 -- final work.txt") + .to_str() + .unwrap() + .to_string(), ])?; assert_eq!(tags, vec!["draft", "final", "work"]); - + // Remove a tag - let file_to_remove = tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(); + let file_to_remove = tmp_dir + .path() + .join("doc1 -- draft work.txt") + .to_str() + .unwrap() + .to_string(); remove_tags_from_file(&file_to_remove, &vec!["draft".to_string()])?; - + // Create tree let tree_dir = tmp_dir.path().join("tree"); create_tag_tree( - &[tmp_dir.path().join("doc1 -- work.txt").to_str().unwrap().to_string()], + &[tmp_dir + .path() + .join("doc1 -- work.txt") + .to_str() + .unwrap() + .to_string()], tree_dir.to_str().unwrap(), - 2 + 2, )?; - + assert!(tree_dir.join("work").join("doc1 -- work.txt").exists()); - + Ok(()) } } From 263b033035a4dd32fa9c2b08989f34ef982b5440 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 16:56:12 +0100 Subject: [PATCH 28/32] feat: Add shell completions using clap_complete_command --- Cargo.toml | 1 + src/main.rs | 189 +++++++++++++++++++++++++++++++++------------------- 2 files changed, 123 insertions(+), 67 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d132488..920d17b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" [dependencies] thiserror = "1.0" clap = { version = "4.4", features = ["derive"] } +clap_complete_command = "0.5.1" fs-err = "2.11" [dev-dependencies] diff --git a/src/main.rs b/src/main.rs index deb2912..687f573 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,12 +1,13 @@ -mod tag_engine; -mod symlink; +use clap::CommandFactory; mod error; +mod symlink; +mod tag_engine; -use std::collections::BTreeSet; -use std::path::PathBuf; +use crate::error::FileTagsError; use clap::{Parser, Subcommand}; use fs_err as fs; -use crate::error::FileTagsError; +use std::collections::BTreeSet; +use std::path::PathBuf; #[derive(Parser)] #[command(author, version, about, long_about = None)] @@ -16,7 +17,13 @@ struct Cli { } #[derive(Subcommand)] +#[command(subcommand_required = true)] enum Commands { + /// Generate shell completions + Completion { + #[arg(value_enum)] + shell: clap_complete_command::Shell, + }, /// List all unique tags found in files List { /// Files to process @@ -47,7 +54,11 @@ enum Commands { #[arg(long, required = true, help = "Target directory for creating the tree")] dir: String, /// Maximum depth of the tree - #[arg(long, default_value = "3", help = "Maximum depth of the directory tree")] + #[arg( + long, + default_value = "3", + help = "Maximum depth of the directory tree" + )] depth: usize, /// Files to process #[arg(required = true, help = "One or more files to create tree from")] @@ -61,10 +72,12 @@ fn list_tags(files: &[String]) -> Result, FileTagsError> { for file in files { match tag_engine::parse_tags(file) { Ok((_, tags, _)) => unique_tags.extend(tags), - Err(e) => return Err(FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - }), + Err(e) => { + return Err(FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + }) + } } } @@ -72,17 +85,19 @@ fn list_tags(files: &[String]) -> Result, FileTagsError> { } fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), FileTagsError> { - let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - })?; + let (base, current_tags, ext) = + tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; let merged_tags = tag_engine::add_tags(current_tags, new_tags.to_vec()); // Preserve the original directory - let parent = std::path::Path::new(file).parent() + let parent = std::path::Path::new(file) + .parent() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_default(); - + let new_filename = tag_engine::serialize_tags(&base, &merged_tags, &ext); let new_path = if parent.is_empty() { new_filename @@ -104,8 +119,11 @@ fn add_tags_to_file(file: &str, new_tags: &[String]) -> Result<(), FileTagsError fn main() -> Result<(), FileTagsError> { let cli = Cli::parse(); - + match cli.command { + Commands::Completion { shell } => { + shell.generate(&mut Cli::command(), &mut std::io::stdout()); + } Commands::List { files } => { let tags = list_tags(&files)?; for tag in tags { @@ -131,19 +149,21 @@ fn main() -> Result<(), FileTagsError> { } fn remove_tags_from_file(file: &str, remove_tags: &[String]) -> Result<(), FileTagsError> { - let (base, current_tags, ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - })?; + let (base, current_tags, ext) = + tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; let filtered_tags = tag_engine::filter_tags(current_tags, remove_tags); let new_filename = tag_engine::serialize_tags(&base, &filtered_tags, &ext); - + // Preserve the original directory - let parent = std::path::Path::new(file).parent() + let parent = std::path::Path::new(file) + .parent() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_default(); - + let new_path = if parent.is_empty() { new_filename } else { @@ -164,12 +184,13 @@ fn remove_tags_from_file(file: &str, remove_tags: &[String]) -> Result<(), FileT fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<(), FileTagsError> { let target = PathBuf::from(target_dir); - + for file in files { - let (_base, tags, _ext) = tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { - file: PathBuf::from(file), - source: e, - })?; + let (_base, tags, _ext) = + tag_engine::parse_tags(file).map_err(|e| FileTagsError::Parse { + file: PathBuf::from(file), + source: e, + })?; // Create root symlink let paths = vec![PathBuf::from(file)]; @@ -182,7 +203,7 @@ fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<( for tag in &combo { dir_path.push(tag); } - + let paths = vec![PathBuf::from(file)]; symlink::create_symlink_tree(paths, &dir_path)?; } @@ -202,9 +223,7 @@ mod tests { fs::create_dir_all(parent)?; } fs::write(&path, "")?; - Ok(path.to_str() - .ok_or("Invalid path")? - .to_string()) + Ok(path.to_str().ok_or("Invalid path")?.to_string()) } #[test] @@ -235,15 +254,18 @@ mod tests { fn test_add_tags_to_file() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("test.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["tag1".to_string(), "tag2".to_string()])?; - + // Verify original is gone and new exists - assert!(!initial_path.exists(), "Original file still exists after rename"); + assert!( + !initial_path.exists(), + "Original file still exists after rename" + ); let new_path = tmp_dir.path().join("test -- tag1 tag2.txt"); assert!(new_path.exists(), "Tagged file was not created"); Ok(()) @@ -253,15 +275,18 @@ mod tests { fn test_add_tags_to_existing_tags() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test -- existing.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("test -- existing.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["new".to_string()])?; - + // Verify original is gone and new exists - assert!(!initial_path.exists(), "Original file still exists after rename"); + assert!( + !initial_path.exists(), + "Original file still exists after rename" + ); let new_name = tmp_dir.path().join("test -- existing new.txt"); assert!(new_name.exists(), "Tagged file was not created"); Ok(()) @@ -271,13 +296,13 @@ mod tests { fn test_add_duplicate_tags() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test -- tag1.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("test -- tag1.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["tag1".to_string()])?; - + // Original should still exist since no change was needed assert!(initial_path.exists(), "Original file should still exist"); Ok(()) @@ -287,17 +312,23 @@ mod tests { fn test_add_tags_nested_path() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "nested/path/test.txt")?; - + // Verify file was created let initial_path = tmp_dir.path().join("nested/path/test.txt"); assert!(initial_path.exists(), "Initial test file was not created"); - + add_tags_to_file(&file, &vec!["tag1".to_string()])?; - + // Verify original is gone and new exists in same directory - assert!(!initial_path.exists(), "Original file still exists after rename"); + assert!( + !initial_path.exists(), + "Original file still exists after rename" + ); let new_path = tmp_dir.path().join("nested/path/test -- tag1.txt"); - assert!(new_path.exists(), "Tagged file was not created in original directory"); + assert!( + new_path.exists(), + "Tagged file was not created in original directory" + ); Ok(()) } @@ -305,9 +336,9 @@ mod tests { fn test_remove_tags() -> Result<(), Box> { let tmp_dir = TempDir::new()?; let file = create_test_file(&tmp_dir, "test -- tag1 tag2 tag3.txt")?; - + remove_tags_from_file(&file, &vec!["tag2".to_string()])?; - + let new_path = tmp_dir.path().join("test -- tag1 tag3.txt"); assert!(new_path.exists(), "File with removed tag not found"); Ok(()) @@ -318,17 +349,21 @@ mod tests { let tmp_dir = TempDir::new()?; let source = create_test_file(&tmp_dir, "test -- tag1 tag2.txt")?; let tree_dir = tmp_dir.path().join("tree"); - + create_tag_tree(&[source], tree_dir.to_str().unwrap(), 2)?; - + // Check root symlink assert!(tree_dir.join("test -- tag1 tag2.txt").exists()); - + // Check tag directories assert!(tree_dir.join("tag1").join("test -- tag1 tag2.txt").exists()); assert!(tree_dir.join("tag2").join("test -- tag1 tag2.txt").exists()); - assert!(tree_dir.join("tag1").join("tag2").join("test -- tag1 tag2.txt").exists()); - + assert!(tree_dir + .join("tag1") + .join("tag2") + .join("test -- tag1 tag2.txt") + .exists()); + Ok(()) } @@ -337,32 +372,52 @@ mod tests { let tmp_dir = TempDir::new()?; let file1 = create_test_file(&tmp_dir, "doc1.txt")?; let file2 = create_test_file(&tmp_dir, "doc2.txt")?; - + // Add tags add_tags_to_file(&file1, &vec!["work".to_string(), "draft".to_string()])?; add_tags_to_file(&file2, &vec!["work".to_string(), "final".to_string()])?; - + // List tags let tags = list_tags(&[ - tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(), - tmp_dir.path().join("doc2 -- final work.txt").to_str().unwrap().to_string(), + tmp_dir + .path() + .join("doc1 -- draft work.txt") + .to_str() + .unwrap() + .to_string(), + tmp_dir + .path() + .join("doc2 -- final work.txt") + .to_str() + .unwrap() + .to_string(), ])?; assert_eq!(tags, vec!["draft", "final", "work"]); - + // Remove a tag - let file_to_remove = tmp_dir.path().join("doc1 -- draft work.txt").to_str().unwrap().to_string(); + let file_to_remove = tmp_dir + .path() + .join("doc1 -- draft work.txt") + .to_str() + .unwrap() + .to_string(); remove_tags_from_file(&file_to_remove, &vec!["draft".to_string()])?; - + // Create tree let tree_dir = tmp_dir.path().join("tree"); create_tag_tree( - &[tmp_dir.path().join("doc1 -- work.txt").to_str().unwrap().to_string()], + &[tmp_dir + .path() + .join("doc1 -- work.txt") + .to_str() + .unwrap() + .to_string()], tree_dir.to_str().unwrap(), - 2 + 2, )?; - + assert!(tree_dir.join("work").join("doc1 -- work.txt").exists()); - + Ok(()) } } From dc82f7bc9616f8ec3c675b3d7132db46c5e812be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 17:13:05 +0100 Subject: [PATCH 29/32] faet: add nix package --- Cargo.lock | 421 ++++++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 2 +- flake.nix | 11 +- src/main.rs | 5 +- 4 files changed, 435 insertions(+), 4 deletions(-) create mode 100644 Cargo.lock diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..5cb5b1c --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,421 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +dependencies = [ + "anstyle", + "once_cell", + "windows-sys", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "bitflags" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "4.5.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_complete" +version = "4.5.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e3040c8291884ddf39445dc033c70abc2bc44a42f0a3a00571a0f483a83f0cd" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_complete_command" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d" +dependencies = [ + "clap", + "clap_complete", + "clap_complete_fig", + "clap_complete_nushell", +] + +[[package]] +name = "clap_complete_fig" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d494102c8ff3951810c72baf96910b980fb065ca5d3101243e6a8dc19747c86b" +dependencies = [ + "clap", + "clap_complete", +] + +[[package]] +name = "clap_complete_nushell" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e" +dependencies = [ + "clap", + "clap_complete", +] + +[[package]] +name = "clap_derive" +version = "4.5.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "filetags" +version = "0.1.0" +dependencies = [ + "clap", + "clap_complete_command", + "fs-err", + "tempfile", + "thiserror", +] + +[[package]] +name = "fs-err" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" +dependencies = [ + "autocfg", +] + +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi", + "windows-targets", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "libc" +version = "0.2.170" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "once_cell" +version = "1.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "2.0.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" +dependencies = [ + "cfg-if", + "fastrand", + "getrandom", + "once_cell", + "rustix", + "windows-sys", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags", +] diff --git a/Cargo.toml b/Cargo.toml index 920d17b..e44349d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "filetags_rs" +name = "filetags" version = "0.1.0" edition = "2021" diff --git a/flake.nix b/flake.nix index a54d6b0..0db4ae8 100644 --- a/flake.nix +++ b/flake.nix @@ -13,7 +13,7 @@ inherit (pkgs) lib; pkgs = import nixpkgs { inherit system; }; fenix = inputs.fenix.packages.${system}; - craneLib = crane.lib.${system}.overrideToolchain toolchain.toolchain; + craneLib = (crane.mkLib pkgs).overrideToolchain toolchain.toolchain; mkSrc = extraPaths: with lib.fileset; let root = ./.; rustFiles = fromSource (craneLib.cleanCargoSource root); @@ -38,6 +38,15 @@ ]; # Additional environment variables can be set directly # MY_CUSTOM_VAR = "some value"; + + nativeBuildInputs = with pkgs; [ installShellFiles ]; + meta.mainProgram = "filetags"; + postInstall = '' + installShellCompletion --cmd timers \ + --bash <($out/bin/filetags completions bash) \ + --fish <($out/bin/filetags completions fish) \ + --zsh <($out/bin/filetags completions zsh) \ + ''; }; devShells.default = pkgs.mkShell.override { inherit stdenv; } diff --git a/src/main.rs b/src/main.rs index 687f573..a213b20 100644 --- a/src/main.rs +++ b/src/main.rs @@ -20,7 +20,7 @@ struct Cli { #[command(subcommand_required = true)] enum Commands { /// Generate shell completions - Completion { + Completions { #[arg(value_enum)] shell: clap_complete_command::Shell, }, @@ -121,7 +121,7 @@ fn main() -> Result<(), FileTagsError> { let cli = Cli::parse(); match cli.command { - Commands::Completion { shell } => { + Commands::Completions { shell } => { shell.generate(&mut Cli::command(), &mut std::io::stdout()); } Commands::List { files } => { @@ -215,6 +215,7 @@ fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<( #[cfg(test)] mod tests { use super::*; + use std::error::Error; use tempfile::TempDir; fn create_test_file(dir: &TempDir, name: &str) -> Result> { From 5addea67e769b99da7fcd31058e9a542f2e93cec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 17:13:05 +0100 Subject: [PATCH 30/32] faet: add nix package --- Cargo.lock | 421 ++++++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 2 +- flake.nix | 11 +- src/main.rs | 5 +- 4 files changed, 435 insertions(+), 4 deletions(-) create mode 100644 Cargo.lock diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..5cb5b1c --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,421 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +dependencies = [ + "anstyle", + "once_cell", + "windows-sys", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "bitflags" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "4.5.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_complete" +version = "4.5.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e3040c8291884ddf39445dc033c70abc2bc44a42f0a3a00571a0f483a83f0cd" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_complete_command" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d" +dependencies = [ + "clap", + "clap_complete", + "clap_complete_fig", + "clap_complete_nushell", +] + +[[package]] +name = "clap_complete_fig" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d494102c8ff3951810c72baf96910b980fb065ca5d3101243e6a8dc19747c86b" +dependencies = [ + "clap", + "clap_complete", +] + +[[package]] +name = "clap_complete_nushell" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e" +dependencies = [ + "clap", + "clap_complete", +] + +[[package]] +name = "clap_derive" +version = "4.5.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "filetags" +version = "0.1.0" +dependencies = [ + "clap", + "clap_complete_command", + "fs-err", + "tempfile", + "thiserror", +] + +[[package]] +name = "fs-err" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" +dependencies = [ + "autocfg", +] + +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi", + "windows-targets", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "libc" +version = "0.2.170" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "once_cell" +version = "1.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "2.0.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" +dependencies = [ + "cfg-if", + "fastrand", + "getrandom", + "once_cell", + "rustix", + "windows-sys", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags", +] diff --git a/Cargo.toml b/Cargo.toml index 920d17b..e44349d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "filetags_rs" +name = "filetags" version = "0.1.0" edition = "2021" diff --git a/flake.nix b/flake.nix index a54d6b0..0db4ae8 100644 --- a/flake.nix +++ b/flake.nix @@ -13,7 +13,7 @@ inherit (pkgs) lib; pkgs = import nixpkgs { inherit system; }; fenix = inputs.fenix.packages.${system}; - craneLib = crane.lib.${system}.overrideToolchain toolchain.toolchain; + craneLib = (crane.mkLib pkgs).overrideToolchain toolchain.toolchain; mkSrc = extraPaths: with lib.fileset; let root = ./.; rustFiles = fromSource (craneLib.cleanCargoSource root); @@ -38,6 +38,15 @@ ]; # Additional environment variables can be set directly # MY_CUSTOM_VAR = "some value"; + + nativeBuildInputs = with pkgs; [ installShellFiles ]; + meta.mainProgram = "filetags"; + postInstall = '' + installShellCompletion --cmd timers \ + --bash <($out/bin/filetags completions bash) \ + --fish <($out/bin/filetags completions fish) \ + --zsh <($out/bin/filetags completions zsh) \ + ''; }; devShells.default = pkgs.mkShell.override { inherit stdenv; } diff --git a/src/main.rs b/src/main.rs index 687f573..a213b20 100644 --- a/src/main.rs +++ b/src/main.rs @@ -20,7 +20,7 @@ struct Cli { #[command(subcommand_required = true)] enum Commands { /// Generate shell completions - Completion { + Completions { #[arg(value_enum)] shell: clap_complete_command::Shell, }, @@ -121,7 +121,7 @@ fn main() -> Result<(), FileTagsError> { let cli = Cli::parse(); match cli.command { - Commands::Completion { shell } => { + Commands::Completions { shell } => { shell.generate(&mut Cli::command(), &mut std::io::stdout()); } Commands::List { files } => { @@ -215,6 +215,7 @@ fn create_tag_tree(files: &[String], target_dir: &str, depth: usize) -> Result<( #[cfg(test)] mod tests { use super::*; + use std::error::Error; use tempfile::TempDir; fn create_test_file(dir: &TempDir, name: &str) -> Result> { From 6aa46041df77cc2e5c923c92150ec9d7e02760b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 17:15:04 +0100 Subject: [PATCH 31/32] docs: Add comprehensive README with usage, installation, and examples --- README.md | 98 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..5082c30 --- /dev/null +++ b/README.md @@ -0,0 +1,98 @@ +# filetags-rs + +A command-line tool for managing tags in file and directory names. Tags are embedded directly in filenames using a specific delimiter pattern. + +## Installation +This is a nix flake :) + +## Usage + +Tags in filenames use the following format: +``` +filename -- tag1 tag2 tag3.ext +``` + +Where: +- Tag delimiter is ` -- ` (space, double dash, space) +- Tags are separated by spaces +- Tags are stored in alphabetical order +- Tags cannot contain spaces, NUL, ":", or "/" + +### Commands + +#### List Tags +List all unique tags found in the specified files: + +```bash +filetags list "document -- tag1 tag2.pdf" "notes -- tag2 tag3.txt" +# Output: +# tag1 +# tag2 +# tag3 +``` + +#### Add Tags +Add one or more tags to files: + +```bash +filetags add --tag=work --tag=draft document.pdf +# Result: document -- draft work.pdf +``` + +#### Remove Tags +Remove specified tags from files: + +```bash +filetags remove --tag=draft "document -- draft work.pdf" +# Result: document -- work.pdf +``` + +#### Create Tag Tree +Create a directory tree with symlinks based on file tags: + +```bash +filetags tree --dir=./tagged --depth=2 "document -- tag1 tag2.pdf" +``` + +This creates a directory structure like: +``` +tagged/ +├── document.pdf -> /path/to/document -- tag1 tag2.pdf +├── tag1/ +│ ├── document.pdf -> /path/to/document -- tag1 tag2.pdf +│ └── tag2/ +│ └── document.pdf -> /path/to/document -- tag1 tag2.pdf +└── tag2/ + ├── document.pdf -> /path/to/document -- tag1 tag2.pdf + └── tag1/ + └── document.pdf -> /path/to/document -- tag1 tag2.pdf +``` + +#### Shell Completions + +Generate shell completions for your preferred shell: + +```bash +# For fish shell +filetags completion fish > ~/.config/fish/completions/filetags.fish + +# For bash +filetags completion bash > ~/.local/share/bash-completion/completions/filetags + +# For zsh +filetags completion zsh > ~/.zsh/completions/_filetags +``` + +## Error Handling + +The tool provides clear error messages for: +- Invalid characters in tags +- Empty tags +- Missing arguments +- File permission issues +- Symlink creation failures +- Directory creation failures + +## License + +GPL-3.0-or-later From 3b4ee0af06cba2153e326ac5e890529530ba8232 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Moritz=20B=C3=B6hme?= Date: Sun, 23 Feb 2025 17:15:04 +0100 Subject: [PATCH 32/32] docs: Add comprehensive README with usage, installation, and examples --- README.md | 98 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..5082c30 --- /dev/null +++ b/README.md @@ -0,0 +1,98 @@ +# filetags-rs + +A command-line tool for managing tags in file and directory names. Tags are embedded directly in filenames using a specific delimiter pattern. + +## Installation +This is a nix flake :) + +## Usage + +Tags in filenames use the following format: +``` +filename -- tag1 tag2 tag3.ext +``` + +Where: +- Tag delimiter is ` -- ` (space, double dash, space) +- Tags are separated by spaces +- Tags are stored in alphabetical order +- Tags cannot contain spaces, NUL, ":", or "/" + +### Commands + +#### List Tags +List all unique tags found in the specified files: + +```bash +filetags list "document -- tag1 tag2.pdf" "notes -- tag2 tag3.txt" +# Output: +# tag1 +# tag2 +# tag3 +``` + +#### Add Tags +Add one or more tags to files: + +```bash +filetags add --tag=work --tag=draft document.pdf +# Result: document -- draft work.pdf +``` + +#### Remove Tags +Remove specified tags from files: + +```bash +filetags remove --tag=draft "document -- draft work.pdf" +# Result: document -- work.pdf +``` + +#### Create Tag Tree +Create a directory tree with symlinks based on file tags: + +```bash +filetags tree --dir=./tagged --depth=2 "document -- tag1 tag2.pdf" +``` + +This creates a directory structure like: +``` +tagged/ +├── document.pdf -> /path/to/document -- tag1 tag2.pdf +├── tag1/ +│ ├── document.pdf -> /path/to/document -- tag1 tag2.pdf +│ └── tag2/ +│ └── document.pdf -> /path/to/document -- tag1 tag2.pdf +└── tag2/ + ├── document.pdf -> /path/to/document -- tag1 tag2.pdf + └── tag1/ + └── document.pdf -> /path/to/document -- tag1 tag2.pdf +``` + +#### Shell Completions + +Generate shell completions for your preferred shell: + +```bash +# For fish shell +filetags completion fish > ~/.config/fish/completions/filetags.fish + +# For bash +filetags completion bash > ~/.local/share/bash-completion/completions/filetags + +# For zsh +filetags completion zsh > ~/.zsh/completions/_filetags +``` + +## Error Handling + +The tool provides clear error messages for: +- Invalid characters in tags +- Empty tags +- Missing arguments +- File permission issues +- Symlink creation failures +- Directory creation failures + +## License + +GPL-3.0-or-later