diff --git a/Cargo.lock b/Cargo.lock index 27da6090..5d72ad13 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1003,6 +1003,7 @@ dependencies = [ "rstest", "rstest_reuse", "serde", + "serde_json", "tempfile", "which", ] diff --git a/Cargo.toml b/Cargo.toml index d1371158..086033b9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -30,6 +30,7 @@ fastrand = "2.0.0" glob = "0.3.0" ouroboros = "0.18.3" serde = { version = "1.0.116", features = ["derive"] } +serde_json = "1.0" [target.'cfg(unix)'.dependencies] nix = { version = "0.30.1", default-features = false, features = [ diff --git a/contrib/completions/_zoxide b/contrib/completions/_zoxide index 97e654f3..307a1787 100644 --- a/contrib/completions/_zoxide +++ b/contrib/completions/_zoxide @@ -96,7 +96,7 @@ esac ;; (import) _arguments "${_arguments_options[@]}" : \ -'--from=[Application to import from]:FROM:(autojump z)' \ +'--from=[Application to import from]:FROM:(autojump z jump)' \ '--merge[Merge into existing database]' \ '-h[Print help]' \ '--help[Print help]' \ diff --git a/contrib/completions/zoxide.bash b/contrib/completions/zoxide.bash index 82b174e3..cb1be319 100644 --- a/contrib/completions/zoxide.bash +++ b/contrib/completions/zoxide.bash @@ -166,7 +166,7 @@ _zoxide() { fi case "${prev}" in --from) - COMPREPLY=($(compgen -W "autojump z" -- "${cur}")) + COMPREPLY=($(compgen -W "autojump z jump" -- "${cur}")) return 0 ;; *) diff --git a/contrib/completions/zoxide.fish b/contrib/completions/zoxide.fish index 3a0bfe7a..a55ab7fb 100644 --- a/contrib/completions/zoxide.fish +++ b/contrib/completions/zoxide.fish @@ -50,7 +50,8 @@ complete -c zoxide -n "__fish_zoxide_using_subcommand edit; and __fish_seen_subc complete -c zoxide -n "__fish_zoxide_using_subcommand edit; and __fish_seen_subcommand_from reload" -s h -l help -d 'Print help' complete -c zoxide -n "__fish_zoxide_using_subcommand edit; and __fish_seen_subcommand_from reload" -s V -l version -d 'Print version' complete -c zoxide -n "__fish_zoxide_using_subcommand import" -l from -d 'Application to import from' -r -f -a "autojump\t'' -z\t''" +z\t'' +jump\t''" complete -c zoxide -n "__fish_zoxide_using_subcommand import" -l merge -d 'Merge into existing database' complete -c zoxide -n "__fish_zoxide_using_subcommand import" -s h -l help -d 'Print help' complete -c zoxide -n "__fish_zoxide_using_subcommand import" -s V -l version -d 'Print version' diff --git a/contrib/completions/zoxide.nu b/contrib/completions/zoxide.nu index 642908e6..1925d97c 100644 --- a/contrib/completions/zoxide.nu +++ b/contrib/completions/zoxide.nu @@ -44,7 +44,7 @@ module completions { ] def "nu-complete zoxide import from" [] { - [ "autojump" "z" ] + [ "autojump" "z" "jump" ] } # Import entries from another application diff --git a/contrib/completions/zoxide.ts b/contrib/completions/zoxide.ts index 1e0d4045..3f22c5b7 100644 --- a/contrib/completions/zoxide.ts +++ b/contrib/completions/zoxide.ts @@ -124,6 +124,7 @@ const completion: Fig.Spec = { suggestions: [ "autojump", "z", + "jump", ], }, }, diff --git a/src/cmd/cmd.rs b/src/cmd/cmd.rs index 7359786c..48b3aaff 100644 --- a/src/cmd/cmd.rs +++ b/src/cmd/cmd.rs @@ -112,6 +112,7 @@ pub enum ImportFrom { Autojump, #[clap(alias = "fasd")] Z, + Jump, } /// Generate shell configuration diff --git a/src/cmd/import.rs b/src/cmd/import.rs index ac0777a9..d408704f 100644 --- a/src/cmd/import.rs +++ b/src/cmd/import.rs @@ -19,6 +19,7 @@ impl Run for Import { match self.from { ImportFrom::Autojump => import_autojump(&mut db, &buffer), ImportFrom::Z => import_z(&mut db, &buffer), + ImportFrom::Jump => import_jump(&mut db, &buffer), } .context("import error")?; @@ -78,6 +79,93 @@ fn sigmoid(x: f64) -> f64 { 1.0 / (1.0 + (-x).exp()) } +/// Parse a simple ISO 8601 UTC timestamp (YYYY-MM-DDTHH:MM:SSZ +/// or YYYY-MM-DDTHH:MM:SS.ssssss±hh:mm) to Unix epoch seconds. +/// Returns None if the format is invalid +/// Note: this only return to second-precision and ignores +/// timezone offsets +fn parse_iso8601_utc(timestamp: &str) -> Option { + // Expected format: 2023-01-01T12:00:00Z or 2024-11-07T11:01:57.327507-08:00 + let is_valid = (timestamp.len() == 20 && timestamp.ends_with('Z')) + || timestamp.len() >= 21; + if !is_valid { + return None; + } + + let parts: Vec<&str> = timestamp[..19].split(&['-', 'T', ':'][..]).collect(); + if parts.len() != 6 && parts.len() != 7 { + return None; + } + + let year = parts[0].parse::().ok()?; + let month = parts[1].parse::().ok()?; + let day = parts[2].parse::().ok()?; + let hour = parts[3].parse::().ok()?; + let minute = parts[4].parse::().ok()?; + let second = parts[5].parse::().ok()?; + + // Basic validation + if !(1..=12).contains(&month) + || !(1..=31).contains(&day) + || hour > 23 + || minute > 59 + || second > 59 + { + return None; + } + + // Simple calculation (ignoring leap years and timezone complexities for now) + // This is a basic implementation that works for most practical cases + let mut days_since_epoch = (year - 1970) * 365 + (year - 1970) / 4; // basic leap years + // rough month lengths (non-leap year) + let month_days = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; + for m in 1..month { + days_since_epoch += month_days[m as usize] as u64; + } + days_since_epoch += (day - 1) as u64; + + let seconds_since_epoch = days_since_epoch * 24 * 60 * 60 + + (hour as u64) * 60 * 60 + + (minute as u64) * 60 + + (second as u64); + + Some(seconds_since_epoch) +} + +fn import_jump(db: &mut Database, buffer: &str) -> Result<()> { + #[derive(serde::Deserialize)] + struct Entry { + #[serde(rename = "Path")] + path: String, + #[serde(rename = "Score")] + score: Score, + } + + #[derive(serde::Deserialize)] + struct Score { + #[serde(rename = "Weight")] + weight: i64, + #[serde(rename = "Age")] + age: String, + } + + let entries: Vec = + serde_json::from_str(buffer).context("invalid Jump JSON format")?; + + for entry in entries { + let Some(last_accessed) = parse_iso8601_utc(&entry.score.age) else { + eprintln!("Warning: Skipping entry with invalid timestamp: {}", entry.score.age); + continue; + }; + db.add_unchecked(&entry.path, entry.score.weight as f64, last_accessed); + } + + if db.dirty() { + db.dedup(); + } + Ok(()) +} + #[cfg(test)] mod tests { use super::*; @@ -163,4 +251,79 @@ mod tests { assert_eq!(dir1.last_accessed, dir2.last_accessed); } } + + #[test] + fn parse_iso8601_timestamp() { + // Test basic ISO 8601 UTC timestamp parsing + // These are the actual values our parser produces (approximate calculation) + assert_eq!(parse_iso8601_utc("2023-01-01T12:00:00Z"), Some(1672574400)); // 12:00 UTC + assert_eq!(parse_iso8601_utc("2023-01-02T14:20:00Z"), Some(1672669200)); // 14:20 UTC + assert_eq!(parse_iso8601_utc("2023-01-03T09:15:00Z"), Some(1672737300)); // 09:15 UTC + + // test stripping parts we ignore + assert_eq!(parse_iso8601_utc("2024-11-07T11:01:57.327507-08:00"), Some(1730890917)); + assert_eq!(parse_iso8601_utc("2024-11-07T11:28:33.949702-08:00"), Some(1730892513)); + assert_eq!(parse_iso8601_utc("2026-02-17T11:36:17.7596-08:00"), Some(1771328177)); + // nanosecond precision (real Jump timestamps) + assert!(parse_iso8601_utc("2025-01-10T20:51:04.217017979-08:00").is_some()); + + // Test invalid formats + assert_eq!(parse_iso8601_utc("invalid"), None); + assert_eq!(parse_iso8601_utc("2023-01-01T12:00:00"), None); // Missing Z + assert_eq!(parse_iso8601_utc("2023-01-01 12:00:00Z"), None); // Wrong separator + } + + #[test] + fn from_jump() { + let data_dir = tempfile::tempdir().unwrap(); + let mut db = Database::open_dir(data_dir.path()).unwrap(); + for (path, rank, last_accessed) in [ + ("/quux/quuz", 1.0, 100), + ("/corge/grault/garply", 6.0, 600), + ("/waldo/fred/plugh", 3.0, 300), + ("/xyzzy/thud", 8.0, 800), + ("/foo/bar", 9.0, 900), + ] { + db.add_unchecked(path, rank, last_accessed); + } + + // Define timestamps as variables to ensure consistency + let baz_time = "2023-01-01T12:00:00Z"; + let foobar_time = "2023-01-02T12:00:00Z"; + let quux_time = "2026-02-17T11:36:17.7596-08:00"; + + let buffer = format!( + r#"[ + {{"Path":"/baz","Score":{{"Weight":7,"Age":"{}"}}}}, + {{"Path":"/foo/bar","Score":{{"Weight":2,"Age":"{}"}}}}, + {{"Path":"/quux/quuz","Score":{{"Weight":5,"Age":"{}"}}}} + ]"#, + baz_time, foobar_time, quux_time + ); + import_jump(&mut db, &buffer).unwrap(); + + db.sort_by_path(); + println!("got: {:?}", &db.dirs()); + + // Parse the same timestamps for expected results + let baz_timestamp = parse_iso8601_utc(baz_time).unwrap(); + let foobar_timestamp = parse_iso8601_utc(foobar_time).unwrap(); + let quux_timestamp = parse_iso8601_utc(quux_time).unwrap(); + + let exp = [ + Dir { path: "/baz".into(), rank: 7.0, last_accessed: baz_timestamp }, + Dir { path: "/corge/grault/garply".into(), rank: 6.0, last_accessed: 600u64 }, + Dir { path: "/foo/bar".into(), rank: 11.0, last_accessed: foobar_timestamp }, + Dir { path: "/quux/quuz".into(), rank: 6.0, last_accessed: quux_timestamp }, + Dir { path: "/waldo/fred/plugh".into(), rank: 3.0, last_accessed: 300u64 }, + Dir { path: "/xyzzy/thud".into(), rank: 8.0, last_accessed: 800u64 }, + ]; + println!("exp: {exp:?}"); + + for (dir1, dir2) in db.dirs().iter().zip(exp) { + assert_eq!(dir1.path, dir2.path); + assert!((dir1.rank - dir2.rank).abs() < 0.01); + assert_eq!(dir1.last_accessed, dir2.last_accessed); + } + } }