feat: Add serialize_tags function with comprehensive tests
This commit is contained in:
parent
06a25589b9
commit
bce4b3eb86
1 changed files with 62 additions and 10 deletions
|
|
@ -1,4 +1,3 @@
|
|||
use std::path::Path;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug, PartialEq)]
|
||||
|
|
@ -32,16 +31,21 @@ pub fn validate_tag(tag: &str) -> Result<(), TagError> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn parse_tags(filename: &str) -> Result<(String, Vec<String>), ParseError> {
|
||||
const TAG_DELIMITER: &str = " -- ";
|
||||
|
||||
pub const TAG_DELIMITER: &str = " -- ";
|
||||
|
||||
pub fn parse_tags(filename: &str) -> Result<(String, Vec<String>, String), ParseError> {
|
||||
let parts: Vec<&str> = filename.split(TAG_DELIMITER).collect();
|
||||
|
||||
if parts.len() > 2 {
|
||||
return Err(ParseError::MultipleDelimiters);
|
||||
}
|
||||
|
||||
let base_name = parts[0].to_string();
|
||||
// Split the first part into base and extension
|
||||
let base_parts: Vec<&str> = parts[0].rsplitn(2, '.').collect();
|
||||
let (base_name, extension) = match base_parts.len() {
|
||||
2 => (base_parts[1].to_string(), format!(".{}", base_parts[0])),
|
||||
_ => (parts[0].to_string(), String::new()),
|
||||
};
|
||||
|
||||
let tags = if parts.len() == 2 {
|
||||
let tag_part = parts[1];
|
||||
|
|
@ -60,13 +64,59 @@ pub fn parse_tags(filename: &str) -> Result<(String, Vec<String>), ParseError> {
|
|||
Vec::new()
|
||||
};
|
||||
|
||||
Ok((base_name, tags))
|
||||
Ok((base_name, tags, extension))
|
||||
}
|
||||
|
||||
pub fn serialize_tags(base: &str, tags: &[String], extension: &str) -> String {
|
||||
let mut sorted_tags = tags.to_vec();
|
||||
sorted_tags.sort();
|
||||
|
||||
if sorted_tags.is_empty() {
|
||||
format!("{}{}", base, extension)
|
||||
} else {
|
||||
format!("{}{}{}{}", base, TAG_DELIMITER, sorted_tags.join(" "), extension)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_serialize_tags_no_tags() {
|
||||
let result = serialize_tags("file", &[], ".txt");
|
||||
assert_eq!(result, "file.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_tags_with_tags() {
|
||||
let tags = vec!["tag2".to_string(), "tag1".to_string()];
|
||||
let result = serialize_tags("file", &tags, ".txt");
|
||||
assert_eq!(result, "file -- tag1 tag2.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_roundtrip_parsing_serialization() {
|
||||
let original = "document.doc -- work draft pdf";
|
||||
let (base, tags, ext) = parse_tags(original).unwrap();
|
||||
let result = serialize_tags(&base, &tags, &ext);
|
||||
assert_eq!(result, "document -- draft pdf work.doc");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case_sensitivity() {
|
||||
let tags = vec!["ZIP".to_string(), "archive".to_string()];
|
||||
let result = serialize_tags("backup", &tags, ".tar");
|
||||
assert_eq!(result, "backup -- ZIP archive.tar");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_extension() {
|
||||
let tags = vec!["note".to_string()];
|
||||
let result = serialize_tags("README", &tags, "");
|
||||
assert_eq!(result, "README -- note");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_tag_valid() {
|
||||
assert!(validate_tag("valid-tag").is_ok());
|
||||
|
|
@ -85,16 +135,18 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_parse_tags_no_tags() {
|
||||
let (base, tags) = parse_tags("file.txt").unwrap();
|
||||
assert_eq!(base, "file.txt");
|
||||
let (base, tags, ext) = parse_tags("file.txt").unwrap();
|
||||
assert_eq!(base, "file");
|
||||
assert!(tags.is_empty());
|
||||
assert_eq!(ext, ".txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_tags_with_tags() {
|
||||
let (base, tags) = parse_tags("file.txt -- tag1 tag2").unwrap();
|
||||
assert_eq!(base, "file.txt");
|
||||
let (base, tags, ext) = parse_tags("file.txt -- tag1 tag2").unwrap();
|
||||
assert_eq!(base, "file");
|
||||
assert_eq!(tags, vec!["tag1", "tag2"]);
|
||||
assert_eq!(ext, ".txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue