Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ fastrand = "2.0.0"
glob = "0.3.0"
ouroboros = "0.18.3"
serde = { version = "1.0.116", features = ["derive"] }
serde_json = "1.0"

[target.'cfg(unix)'.dependencies]
nix = { version = "0.30.1", default-features = false, features = [
Expand Down
2 changes: 1 addition & 1 deletion contrib/completions/_zoxide

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion contrib/completions/zoxide.bash

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion contrib/completions/zoxide.fish

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion contrib/completions/zoxide.nu

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions contrib/completions/zoxide.ts

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions src/cmd/cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ pub enum ImportFrom {
Autojump,
#[clap(alias = "fasd")]
Z,
Jump,
}

/// Generate shell configuration
Expand Down
163 changes: 163 additions & 0 deletions src/cmd/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ impl Run for Import {
match self.from {
ImportFrom::Autojump => import_autojump(&mut db, &buffer),
ImportFrom::Z => import_z(&mut db, &buffer),
ImportFrom::Jump => import_jump(&mut db, &buffer),
}
.context("import error")?;

Expand Down Expand Up @@ -78,6 +79,93 @@ fn sigmoid(x: f64) -> f64 {
1.0 / (1.0 + (-x).exp())
}

/// Parse a simple ISO 8601 UTC timestamp (YYYY-MM-DDTHH:MM:SSZ
/// or YYYY-MM-DDTHH:MM:SS.ssssss±hh:mm) to Unix epoch seconds.
/// Returns None if the format is invalid
/// Note: this only return to second-precision and ignores
/// timezone offsets
fn parse_iso8601_utc(timestamp: &str) -> Option<u64> {
// Expected format: 2023-01-01T12:00:00Z or 2024-11-07T11:01:57.327507-08:00
let is_valid = (timestamp.len() == 20 && timestamp.ends_with('Z'))
|| timestamp.len() >= 21;
if !is_valid {
return None;
}

let parts: Vec<&str> = timestamp[..19].split(&['-', 'T', ':'][..]).collect();
if parts.len() != 6 && parts.len() != 7 {
return None;
}

let year = parts[0].parse::<u64>().ok()?;
let month = parts[1].parse::<u32>().ok()?;
let day = parts[2].parse::<u32>().ok()?;
let hour = parts[3].parse::<u32>().ok()?;
let minute = parts[4].parse::<u32>().ok()?;
let second = parts[5].parse::<u32>().ok()?;

// Basic validation
if !(1..=12).contains(&month)
|| !(1..=31).contains(&day)
|| hour > 23
|| minute > 59
|| second > 59
{
return None;
}

// Simple calculation (ignoring leap years and timezone complexities for now)
// This is a basic implementation that works for most practical cases
let mut days_since_epoch = (year - 1970) * 365 + (year - 1970) / 4; // basic leap years
// rough month lengths (non-leap year)
Comment on lines +118 to +120
Copy link

Copilot AI Mar 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

let mut days_since_epoch = (year - 1970) * 365 + ... will underflow if the input year is < 1970 (since year is u64), producing a huge timestamp instead of rejecting it. Add an explicit year >= 1970 check (or use a signed type) before doing the subtraction.

Copilot uses AI. Check for mistakes.
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not worried about pre-1970 timestamps being buggy

let month_days = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
for m in 1..month {
days_since_epoch += month_days[m as usize] as u64;
Comment on lines +82 to +123
Copy link

Copilot AI Mar 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

parse_iso8601_utc claims to parse ISO 8601 to Unix epoch seconds, but the implementation explicitly ignores timezone offsets and does not handle leap years correctly (e.g., any 2024 date after Feb will be off by 86400s). Since last_accessed affects scoring/TTL, this will import incorrect timestamps from Jump. Consider parsing RFC3339 properly (including offsets) via a time/date library, or restrict accepted inputs to ...Z and document that limitation.

Copilot uses AI. Check for mistakes.
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

2024 is two years ago and I'm not worried about those scores being wrong

}
days_since_epoch += (day - 1) as u64;

let seconds_since_epoch = days_since_epoch * 24 * 60 * 60
+ (hour as u64) * 60 * 60
+ (minute as u64) * 60
+ (second as u64);

Some(seconds_since_epoch)
}

fn import_jump(db: &mut Database, buffer: &str) -> Result<()> {
#[derive(serde::Deserialize)]
struct Entry {
#[serde(rename = "Path")]
path: String,
#[serde(rename = "Score")]
score: Score,
}

#[derive(serde::Deserialize)]
struct Score {
#[serde(rename = "Weight")]
weight: i64,
#[serde(rename = "Age")]
age: String,
}

let entries: Vec<Entry> =
serde_json::from_str(buffer).context("invalid Jump JSON format")?;

Comment on lines +152 to +154
Copy link

Copilot AI Mar 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR description mentions a "custom JSON parser" for Jump, but the implementation uses serde_json::from_str. If the intent is to avoid bringing in serde_json, this doesn't meet that goal; otherwise, consider updating the PR description to reflect the actual approach (custom timestamp parsing + serde_json deserialization).

Copilot uses AI. Check for mistakes.
for entry in entries {
let Some(last_accessed) = parse_iso8601_utc(&entry.score.age) else {
eprintln!("Warning: Skipping entry with invalid timestamp: {}", entry.score.age);
continue;
};
db.add_unchecked(&entry.path, entry.score.weight as f64, last_accessed);
}

if db.dirty() {
db.dedup();
}
Ok(())
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down Expand Up @@ -163,4 +251,79 @@ mod tests {
assert_eq!(dir1.last_accessed, dir2.last_accessed);
}
}

#[test]
fn parse_iso8601_timestamp() {
// Test basic ISO 8601 UTC timestamp parsing
// These are the actual values our parser produces (approximate calculation)
assert_eq!(parse_iso8601_utc("2023-01-01T12:00:00Z"), Some(1672574400)); // 12:00 UTC
assert_eq!(parse_iso8601_utc("2023-01-02T14:20:00Z"), Some(1672669200)); // 14:20 UTC
assert_eq!(parse_iso8601_utc("2023-01-03T09:15:00Z"), Some(1672737300)); // 09:15 UTC

// test stripping parts we ignore
assert_eq!(parse_iso8601_utc("2024-11-07T11:01:57.327507-08:00"), Some(1730890917));
assert_eq!(parse_iso8601_utc("2024-11-07T11:28:33.949702-08:00"), Some(1730892513));
assert_eq!(parse_iso8601_utc("2026-02-17T11:36:17.7596-08:00"), Some(1771328177));
// nanosecond precision (real Jump timestamps)
assert!(parse_iso8601_utc("2025-01-10T20:51:04.217017979-08:00").is_some());
Comment on lines +255 to +268
Copy link

Copilot AI Mar 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The parse_iso8601_timestamp test hard-codes expected epoch values that match the current approximate (and incorrect for leap years/offsets) implementation. This will lock in wrong behavior rather than validating correct ISO 8601/RFC3339 parsing. Once timestamp parsing is fixed to handle leap years and timezone offsets, update these expected values to the real Unix timestamps (and add a case that exercises a leap day / post-Feb leap-year date).

Copilot uses AI. Check for mistakes.

// Test invalid formats
assert_eq!(parse_iso8601_utc("invalid"), None);
assert_eq!(parse_iso8601_utc("2023-01-01T12:00:00"), None); // Missing Z
assert_eq!(parse_iso8601_utc("2023-01-01 12:00:00Z"), None); // Wrong separator
}

#[test]
fn from_jump() {
let data_dir = tempfile::tempdir().unwrap();
let mut db = Database::open_dir(data_dir.path()).unwrap();
for (path, rank, last_accessed) in [
("/quux/quuz", 1.0, 100),
("/corge/grault/garply", 6.0, 600),
("/waldo/fred/plugh", 3.0, 300),
("/xyzzy/thud", 8.0, 800),
("/foo/bar", 9.0, 900),
] {
db.add_unchecked(path, rank, last_accessed);
}

// Define timestamps as variables to ensure consistency
let baz_time = "2023-01-01T12:00:00Z";
let foobar_time = "2023-01-02T12:00:00Z";
let quux_time = "2026-02-17T11:36:17.7596-08:00";

let buffer = format!(
r#"[
{{"Path":"/baz","Score":{{"Weight":7,"Age":"{}"}}}},
{{"Path":"/foo/bar","Score":{{"Weight":2,"Age":"{}"}}}},
{{"Path":"/quux/quuz","Score":{{"Weight":5,"Age":"{}"}}}}
]"#,
baz_time, foobar_time, quux_time
);
import_jump(&mut db, &buffer).unwrap();

db.sort_by_path();
println!("got: {:?}", &db.dirs());

// Parse the same timestamps for expected results
let baz_timestamp = parse_iso8601_utc(baz_time).unwrap();
let foobar_timestamp = parse_iso8601_utc(foobar_time).unwrap();
let quux_timestamp = parse_iso8601_utc(quux_time).unwrap();

let exp = [
Dir { path: "/baz".into(), rank: 7.0, last_accessed: baz_timestamp },
Dir { path: "/corge/grault/garply".into(), rank: 6.0, last_accessed: 600u64 },
Dir { path: "/foo/bar".into(), rank: 11.0, last_accessed: foobar_timestamp },
Dir { path: "/quux/quuz".into(), rank: 6.0, last_accessed: quux_timestamp },
Dir { path: "/waldo/fred/plugh".into(), rank: 3.0, last_accessed: 300u64 },
Dir { path: "/xyzzy/thud".into(), rank: 8.0, last_accessed: 800u64 },
];
println!("exp: {exp:?}");

for (dir1, dir2) in db.dirs().iter().zip(exp) {
assert_eq!(dir1.path, dir2.path);
assert!((dir1.rank - dir2.rank).abs() < 0.01);
assert_eq!(dir1.last_accessed, dir2.last_accessed);
}
}
}
Loading