Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
292 changes: 112 additions & 180 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ tokio = "^1.0"

[workspace.package]
version = "0.4.0"
edition = "2021"
edition = "2024"
license = "EUPL-1.2"

[package]
Expand All @@ -30,7 +30,8 @@ log.workspace = true
tokio = { workspace = true, features = ["macros", "rt-multi-thread"] }

libnpins = { path = "libnpins" }
futures-util = { version = "0.3.31", default-features = false }
# Pin clap because the latest patch has a regression in the help output
clap = { version = "=4.5.36", features = [ "derive", "env" ] }
futures-util = { version = "0.3.31", default-features = false }
crossterm = { version = "0.29", default-features = false }
env_logger = { version = "^0.11.0", features = ["color", "auto-color", "regex"], default-features = false }
7 changes: 5 additions & 2 deletions libnpins/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,21 @@ version.workspace = true
edition.workspace = true
license.workspace = true

[lints.clippy]
wildcard_imports = "warn"

[dependencies]
serde = { version = "^1.0", features = [ "derive" ] }
serde_json.workspace = true
url.workspace = true
url = { workspace = true, features = ["serde"] }
anyhow.workspace = true
tokio = { workspace = true, features = ["process"] }
log.workspace = true
reqwest = { version = "0.13.1", features = [ "rustls" ], default-features = false }
async-trait = "0.1"
lenient_semver_parser = { version = "0.4.2", default-features = false }
lenient_version = { version = "0.4.2" }
nix-compat = { git = "https://git.snix.dev/snix/snix", version = "0.1.0", features = ["serde"] }
nix-compat = { git = "https://git.snix.dev/snix/snix", rev = "4918571f95d436d2e3da4665e8c1e9b77d9546e8", default-features = false, features = ["serde"] }

[dev-dependencies]
env_logger = { version = "^0.11.0", features = ["color", "auto-color", "regex"], default-features = false }
8 changes: 5 additions & 3 deletions libnpins/src/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
//!
//! This should be preferred over pinning the equivaleng `nixpkgs` git branch.

use crate::*;
use nix_compat::nixhash::NixHash;
use serde::{Deserialize, Serialize};

use crate::{Updatable, build_client, diff, nix};

#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
pub struct Pin {
Expand Down Expand Up @@ -49,7 +51,7 @@ impl Updatable for Pin {
type Version = ChannelVersion;
type Hashes = ChannelHash;

async fn update(&self, _old: Option<&ChannelVersion>) -> Result<ChannelVersion> {
async fn update(&self, _old: Option<&ChannelVersion>) -> anyhow::Result<ChannelVersion> {
/* We want to get from something like https://channels.nixos.org/nixos-21.11
* to https://releases.nixos.org/nixos/21.11/nixos-21.11.335807.df4f1f7cc3f/nixexprs.tar.xz
*/
Expand All @@ -66,7 +68,7 @@ impl Updatable for Pin {
Ok(ChannelVersion { url })
}

async fn fetch(&self, version: &ChannelVersion) -> Result<Self::Hashes> {
async fn fetch(&self, version: &ChannelVersion) -> anyhow::Result<Self::Hashes> {
/* Prefetch an URL that looks like
* https://releases.nixos.org/nixos/21.11/nixos-21.11.335807.df4f1f7cc3f
*/
Expand Down
11 changes: 5 additions & 6 deletions libnpins/src/container.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
//! Pin an OCI container

use crate::nix::nix_prefetch_docker;
use crate::*;
use anyhow::Result;
use serde::{Deserialize, Serialize};

use crate::{Updatable, diff, nix::nix_prefetch_docker};

#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
pub struct Pin {
pub image_name: String,
Expand Down Expand Up @@ -47,15 +46,15 @@ impl Updatable for Pin {
type Version = ContainerVersion;
type Hashes = ContainerHash;

async fn update(&self, _old: Option<&ContainerVersion>) -> Result<ContainerVersion> {
async fn update(&self, _old: Option<&ContainerVersion>) -> anyhow::Result<ContainerVersion> {
Ok(ContainerVersion {
image_digest: nix_prefetch_docker(&self.image_name, &self.image_tag, None)
.await?
.image_digest,
})
}

async fn fetch(&self, version: &ContainerVersion) -> Result<ContainerHash> {
async fn fetch(&self, version: &ContainerVersion) -> anyhow::Result<ContainerHash> {
Ok(ContainerHash {
hash: nix_prefetch_docker(
&self.image_name,
Expand All @@ -72,7 +71,7 @@ impl Updatable for Pin {
mod test {
use super::*;

const DEAD_TEST_CONTAINER: &'static str = "docker.io/dperson/torproxy";
const DEAD_TEST_CONTAINER: &str = "docker.io/dperson/torproxy";

#[tokio::test]
async fn update_and_fetch_container() {
Expand Down
3 changes: 2 additions & 1 deletion libnpins/src/flake.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
//! Convert+Import Nix flake lock files

use crate::*;
use anyhow::{Context, Result};
use git::fetch_default_branch;
use serde::{Deserialize, Serialize};
use url::Url;

use crate::{Pin, git, tarball};

/// Pin entry from a nix flake's lock file
///
/// Flake locks have a two-part structure: the input's specification, and the
Expand Down
26 changes: 12 additions & 14 deletions libnpins/src/git.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@
//! instance. This should be preferred over the generic Git API if possible. See [`Repository`]
//! for more on this.

use crate::*;
use anyhow::{Context, Result};
use lenient_version::Version;
use nix_compat::nixhash::NixHash;
use serde::{Deserialize, Serialize};
use tokio::process::Command;
use url::Url;

use crate::{GenericVersion, Updatable, check_git_url, diff, get_and_deserialize, nix};

fn get_github_url() -> String {
std::env::var("NPINS_GITHUB_HOST").unwrap_or_else(|_| String::from("https://github.com"))
}
Expand All @@ -38,7 +40,7 @@ pub struct GitRevision {

impl GitRevision {
pub fn new(revision: String) -> Result<Self> {
if !revision.chars().all(|c| c.is_digit(16)) || revision.len() != 40 {
if !revision.chars().all(|c| c.is_ascii_hexdigit()) || revision.len() != 40 {
anyhow::bail!("'{revision}' is not a valid git revision (sha1 hash)");
}
Ok(Self { revision })
Expand Down Expand Up @@ -166,9 +168,8 @@ impl Repository {
];

for (path, func) in distinct_api_endpoints {
match probe(url.clone(), path).await {
Ok(_) => return func(url),
_ => {},
if probe(url.clone(), path).await.is_ok() {
return func(url);
}
}
None
Expand Down Expand Up @@ -578,11 +579,9 @@ impl Updatable for GitReleasePin {
match old_version {
Ok(old_version) => {
anyhow::ensure!(
latest >= old_version,
"Failed to ensure version monotonicity, latest found version is {} but current is {}",
latest,
old_version,
);
latest >= old_version,
"Failed to ensure version monotonicity, latest found version is {latest} but current is {old_version}"
);
},
Err(_) => {
log::warn!(
Expand Down Expand Up @@ -737,8 +736,7 @@ pub async fn fetch_default_branch(repo: &Url) -> Result<String> {

let info = remotes
.iter()
.filter(|info| info.revision.starts_with("ref: refs/heads/") && info.ref_ == "HEAD")
.next()
.find(|info| info.revision.starts_with("ref: refs/heads/") && info.ref_ == "HEAD")
.with_context(|| format!("Failed to resolve HEAD to a ref for {}", repo))?;

info.revision
Expand Down Expand Up @@ -1190,7 +1188,7 @@ mod test {
let version = pin.update(None).await?;
assert_eq!(
version,
git::GitRevision {
GitRevision {
revision: "e7145078163692697b843915a665d4f41139a65c".into(),
}
);
Expand Down Expand Up @@ -1285,7 +1283,7 @@ mod test {
let version = pin.update(None).await?;
assert_eq!(
version,
git::GitRevision {
GitRevision {
revision: "bca2071b6923d45d9aabac27b3ea1e40f5fa3006".into(),
}
);
Expand Down
26 changes: 10 additions & 16 deletions libnpins/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
//! Currently, it pretty much exposes the internals of the CLI 1:1, but in the future
//! this is supposed to evolve into a more standalone library.

use anyhow::Result;
use diff::{Diff, OptionExt};
use nix_compat::nixhash::NixHash;
use reqwest::IntoUrl;
Expand All @@ -21,7 +20,7 @@ pub mod pypi;
pub mod tarball;
pub mod versions;

pub const DEFAULT_NIX: &'static str = include_str!("default.nix");
pub const DEFAULT_NIX: &str = include_str!("default.nix");

/// Helper method to build you a client.
// TODO make injectable via a configuration mechanism
Expand Down Expand Up @@ -129,10 +128,10 @@ pub trait Updatable:
/// Fetch the latest applicable commit data
///
/// The old version may be passed to help guarantee monotonicity of the versions.
async fn update(&self, old: Option<&Self::Version>) -> Result<Self::Version>;
async fn update(&self, old: Option<&Self::Version>) -> anyhow::Result<Self::Version>;

/// Fetch hashes for a given version
async fn fetch(&self, version: &Self::Version) -> Result<Self::Hashes>;
async fn fetch(&self, version: &Self::Version) -> anyhow::Result<Self::Hashes>;
}

/// Create the `Pin` type
Expand Down Expand Up @@ -178,7 +177,7 @@ macro_rules! mkPin {
})*

/* If an error is returned, `self` remains unchanged */
pub async fn update(&mut self) -> Result<Vec<diff::DiffEntry>> {
pub async fn update(&mut self) -> ::anyhow::Result<Vec<diff::DiffEntry>> {
Ok(match self {
$(Self::$name { input, version, .. } => {
/* Use very explicit syntax to force the correct types and get good compile errors */
Expand All @@ -191,7 +190,7 @@ macro_rules! mkPin {
/* If an error is returned, `self` remains unchanged. This returns a double result: the outer one
* indicates that `update` should be called first, the inner is from the actual operation.
*/
pub async fn fetch(&mut self) -> Result<Vec<diff::DiffEntry>> {
pub async fn fetch(&mut self) -> ::anyhow::Result<Vec<diff::DiffEntry>> {
Ok(match self {
$(Self::$name { input, version, hashes, .. } => {
let version = version.as_ref()
Expand Down Expand Up @@ -225,14 +224,14 @@ macro_rules! mkPin {
/// Unfreeze a pin
pub fn unfreeze(&mut self) {
match self {
$(Self::$name { ref mut frozen, .. } => frozen.unfreeze()),*
$(Self::$name { frozen, .. } => frozen.unfreeze()),*
}
}

/// Freeze a pin
pub fn freeze(&mut self) {
match self {
$(Self::$name { ref mut frozen, .. } => frozen.freeze()),*
$(Self::$name { frozen, .. } => frozen.freeze()),*
}
}

Expand Down Expand Up @@ -314,7 +313,7 @@ impl NixPins {
}

/// Custom manual deserialize wrapper that checks the version
pub fn from_json_versioned(value: serde_json::Value) -> Result<Self> {
pub fn from_json_versioned(value: serde_json::Value) -> anyhow::Result<Self> {
versions::from_value_versioned(value)
}

Expand Down Expand Up @@ -350,7 +349,7 @@ impl diff::Diff for GenericHash {
}

/// The Frozen field in a Pin
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct Frozen(pub bool);

impl Frozen {
Expand All @@ -377,12 +376,6 @@ impl diff::Diff for Frozen {
}
}

impl std::default::Default for Frozen {
fn default() -> Self {
Frozen(false)
}
}

/// An URL and its hash
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct GenericUrlHashes {
Expand All @@ -404,6 +397,7 @@ mod tests {
use super::*;

#[test]
#[rustfmt::skip]
fn test_frozen() {
assert!(!Frozen::default().is_frozen());
assert!(!{
Expand Down
6 changes: 3 additions & 3 deletions libnpins/src/niv.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
//! Convert+Import Niv files

use crate::*;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;

use crate::{Pin, git};

/// Pin entry from Niv's sources.json
///
/// We only take the minimum information required to get things working. This does not include
Expand All @@ -21,7 +21,7 @@ pub struct NivPin {
impl TryFrom<NivPin> for Pin {
type Error = anyhow::Error;

fn try_from(niv: NivPin) -> Result<Self> {
fn try_from(niv: NivPin) -> anyhow::Result<Self> {
Ok(match niv.owner {
None => {
git::GitPin::new(git::Repository::git(niv.repo.parse()?), niv.branch, false).into()
Expand Down
7 changes: 4 additions & 3 deletions libnpins/src/nix.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use crate::{check_git_url, check_url, DEFAULT_NIX};
use anyhow::{Context, Result};
use nix_compat::nixhash::{HashAlgo, NixHash};
use std::path::Path;

use crate::{DEFAULT_NIX, check_git_url, check_url};

#[allow(unused)]
pub struct PrefetchInfo {
store_path: String,
Expand Down Expand Up @@ -187,8 +188,8 @@ pub async fn nix_prefetch_docker(
"nix-prefetch-git output: {}",
String::from_utf8_lossy(&output.stdout)
);
Ok(serde_json::from_slice(&output.stdout)
.context("Failed to deserialize nix-pfetch-git JSON response.")?)
serde_json::from_slice(&output.stdout)
.context("Failed to deserialize nix-pfetch-git JSON response.")
}
pub async fn nix_eval_pin(lockfile_path: &Path, pin: &str) -> Result<std::path::PathBuf> {
let lockfile_path = lockfile_path.canonicalize()?;
Expand Down
11 changes: 7 additions & 4 deletions libnpins/src/pypi.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
//! Pin a PyPi package

use crate::*;
use anyhow::{Context, Result};
use lenient_version::Version;
use nix_compat::nixhash;
use nix_compat::nixhash::{self, NixHash};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

use crate::{GenericUrlHashes, GenericVersion, Updatable, diff, get_and_deserialize};

#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
pub struct Pin {
pub name: String,
Expand Down Expand Up @@ -95,7 +96,9 @@ impl Updatable for Pin {
);
},
_ => {
log::warn!("This repository does not appear to be following SemVer, so no guarantees on monotonicity can be made.");
log::warn!(
"This repository does not appear to be following SemVer, so no guarantees on monotonicity can be made."
);
},
}
}
Expand Down Expand Up @@ -136,7 +139,7 @@ impl Updatable for Pin {
.with_context(|| "failed to convert to NixHash")?;

Ok(GenericUrlHashes {
hash: hash.into(),
hash,
url: latest_source.url.parse()?,
})
}
Expand Down
Loading