Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 54 additions & 13 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ jobs:
- name: Run `cargo-deny`
uses: EmbarkStudios/[email protected]


check-schema:
name: Check schema
runs-on: ubuntu-latest
Expand Down Expand Up @@ -168,15 +168,15 @@ jobs:
run: |
if ! [[ -z $(git status -s) ]]; then
echo "::error title=Workspace is not clean::Please run 'sh ./misc/update.sh' and commit the changes"

(
echo '## Diff after running `sh ./misc/update.sh`:'
echo
echo '```diff'
git diff
echo '```'
) >> $GITHUB_STEP_SUMMARY

exit 1
fi

Expand Down Expand Up @@ -216,14 +216,52 @@ jobs:
cargo clippy --workspace --tests --bins --lib -- -D warnings


compile-test-artifacts:
name: Compile test artifacts
runs-on: ubuntu-latest

permissions:
contents: read

steps:
- name: Checkout
uses: actions/[email protected]

- name: Install toolchain
run: |
rustup toolchain install stable
rustup default stable

- name: Install nextest
uses: taiki-e/install-action@nextest

- name: Setup sccache
uses: mozilla-actions/[email protected]

- name: Build and archive tests
run: cargo nextest archive --workspace --archive-file nextest-archive.tar.zst
env:
SQLX_OFFLINE: '1'

- name: Upload archive to workflow
uses: actions/[email protected]
with:
name: nextest-archive
path: nextest-archive.tar.zst


test:
name: Run test suite with Rust stable
needs: [rustfmt, opa-lint]
needs: [rustfmt, opa-lint, compile-test-artifacts]
runs-on: ubuntu-latest

permissions:
contents: read

strategy:
matrix:
partition: [1, 2]

services:
postgres:
image: docker.io/library/postgres:15.3
Expand All @@ -243,10 +281,9 @@ jobs:
- name: Checkout the code
uses: actions/[email protected]

- name: Install toolchain
run: |
rustup toolchain install stable
rustup default stable
- run: mkdir -p ~/.cargo/bin
- name: Install nextest
uses: taiki-e/install-action@nextest

- name: Install Node
uses: actions/[email protected]
Expand All @@ -270,16 +307,20 @@ jobs:
working-directory: ./policies
run: make

- name: Setup sccache
uses: mozilla-actions/[email protected]
- name: Download archive
uses: actions/download-artifact@v4
with:
name: nextest-archive

- name: Test
id: test
env:
DATABASE_URL: postgresql://postgres:postgres@localhost/postgres
SQLX_OFFLINE: '1'
run: |
cargo test --workspace
~/.cargo/bin/cargo-nextest nextest run --archive-file nextest-archive.tar.zst \
--partition count:${{ matrix.partition }}/2 \
--retries 1
# --retries is a workaround for sqlx not playing nice with nextest
# Waiting for https://github.com/launchbadge/sqlx/pull/3334

syn2mas:
name: Check syn2mas
Expand Down
16 changes: 9 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -319,14 +319,16 @@ version = "0.6.2"

# A few profile opt-level tweaks to make the test suite run faster
[profile.dev.package]
num-bigint-dig.opt-level = 3
sqlx-macros.opt-level = 3
cranelift-codegen.opt-level = 3
regalloc2.opt-level = 3
argon2.opt-level = 3
pbkdf2.opt-level = 3
bcrypt.opt-level = 3
sha2.opt-level = 3
digest.opt-level = 3
block-buffer.opt-level = 3
cranelift-codegen.opt-level = 3
digest.opt-level = 3
hmac.opt-level = 3
generic-array.opt-level = 3
num-bigint-dig.opt-level = 3
pbkdf2.opt-level = 3
rayon.opt-level = 3
regalloc2.opt-level = 3
sha2.opt-level = 3
sqlx-macros.opt-level = 3
29 changes: 0 additions & 29 deletions crates/data-model/src/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -225,16 +225,6 @@ impl TokenType {
}

/// Generate a token for the given type
///
/// ```rust
/// extern crate rand;
///
/// use rand::thread_rng;
/// use mas_data_model::TokenType::{AccessToken, RefreshToken};
///
/// AccessToken.generate(&mut thread_rng());
/// RefreshToken.generate(&mut thread_rng());
/// ```
pub fn generate(self, rng: &mut (impl RngCore + ?Sized)) -> String {
let random_part: String = rng
.sample_iter(&Alphanumeric)
Expand All @@ -250,25 +240,6 @@ impl TokenType {

/// Check the format of a token and determine its type
///
/// ```rust
/// use mas_data_model::TokenType;
///
/// assert_eq!(
/// TokenType::check("mat_kkLSacJDpek22jKWw4AcXG68b7U3W6_0Lg9yb"),
/// Ok(TokenType::AccessToken)
/// );
///
/// assert_eq!(
/// TokenType::check("mar_PkpplxPkfjsqvtdfUlYR1Afg2TpaHF_GaTQd2"),
/// Ok(TokenType::RefreshToken)
/// );
///
/// assert_eq!(
/// TokenType::check("syt_PkpplxPkfjsqvtdfUlYR1Afg2TpaHF_GaTQd2"),
/// Ok(TokenType::CompatAccessToken)
/// );
/// ```
///
/// # Errors
///
/// Returns an error if the token is not valid
Expand Down
3 changes: 2 additions & 1 deletion crates/handlers/src/health.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,11 @@ mod tests {
use hyper::{Request, StatusCode};

use super::*;
use crate::test_utils::{RequestBuilderExt, ResponseExt, TestState};
use crate::test_utils::{setup, RequestBuilderExt, ResponseExt, TestState};

#[sqlx::test(migrator = "mas_storage_pg::MIGRATOR")]
async fn test_get_health(pool: PgPool) {
setup();
let state = TestState::from_pool(pool).await.unwrap();
let request = Request::get("/health").empty();

Expand Down
13 changes: 1 addition & 12 deletions crates/handlers/src/passwords.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,18 +42,6 @@ impl PasswordManager {
/// complexity score between 0 and 4. The first item in
/// the iterator will be the default hashing scheme.
///
/// # Example
///
/// ```rust
/// pub use mas_handlers::passwords::{PasswordManager, Hasher};
///
/// PasswordManager::new(3, [
/// (3, Hasher::argon2id(Some(b"a-secret-pepper".to_vec()))),
/// (2, Hasher::argon2id(None)),
/// (1, Hasher::bcrypt(Some(10), None)),
/// ]).unwrap();
/// ```
///
/// # Errors
///
/// Returns an error if the iterator was empty
Expand Down Expand Up @@ -432,6 +420,7 @@ mod tests {
}

#[test]
#[ignore = "this is particularly slow (20s+ seconds)"]
fn hashing_pbkdf2() {
let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42);
let password = b"hunter2";
Expand Down
18 changes: 0 additions & 18 deletions crates/keystore/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -611,24 +611,6 @@ pub struct Keystore {

impl Keystore {
/// Create a keystore out of a JSON Web Key Set
///
/// ```rust
/// use mas_keystore::{Keystore, PrivateKey, JsonWebKey, JsonWebKeySet};
/// let rsa = PrivateKey::load_pem(include_str!("../tests/keys/rsa.pkcs1.pem")).unwrap();
/// let rsa = JsonWebKey::new(rsa);
///
/// let ec_p256 = PrivateKey::load_pem(include_str!("../tests/keys/ec-p256.sec1.pem")).unwrap();
/// let ec_p256 = JsonWebKey::new(ec_p256);
///
/// let ec_p384 = PrivateKey::load_pem(include_str!("../tests/keys/ec-p384.sec1.pem")).unwrap();
/// let ec_p384 = JsonWebKey::new(ec_p384);
///
/// let ec_k256 = PrivateKey::load_pem(include_str!("../tests/keys/ec-k256.sec1.pem")).unwrap();
/// let ec_k256 = JsonWebKey::new(ec_k256);
///
/// let jwks = JsonWebKeySet::new(vec![rsa, ec_p256, ec_p384, ec_k256]);
/// let keystore = Keystore::new(jwks);
/// ```
#[must_use]
pub fn new(keys: JsonWebKeySet<PrivateKey>) -> Self {
let keys = Arc::new(keys);
Expand Down
26 changes: 0 additions & 26 deletions crates/oauth2-types/src/oidc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -925,32 +925,6 @@ impl ProviderMetadata {
///
/// To access other fields, use this type's `Deref` implementation.
///
/// # Example
///
/// ```no_run
/// use oauth2_types::{
/// oidc::VerifiedProviderMetadata,
/// requests::GrantType,
/// };
/// use url::Url;
/// # use oauth2_types::oidc::{ProviderMetadata, ProviderMetadataVerificationError};
/// # let metadata = ProviderMetadata::default();
/// # let issuer = "http://localhost/";
/// let verified_metadata = metadata.validate(&issuer)?;
///
/// // The endpoint is required during validation so this is not an `Option`.
/// let _: &Url = verified_metadata.authorization_endpoint();
///
/// // The field has a default value so this is not an `Option`.
/// let _: &[GrantType] = verified_metadata.grant_types_supported();
///
/// // Other fields can be accessed via `Deref`.
/// if let Some(registration_endpoint) = &verified_metadata.registration_endpoint {
/// println!("Registration is supported at {registration_endpoint}");
/// }
/// # Ok::<(), ProviderMetadataVerificationError>(())
/// ```
///
/// [OpenID Connect Discovery Spec 1.0]: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
#[derive(Debug, Clone)]
pub struct VerifiedProviderMetadata {
Expand Down
27 changes: 0 additions & 27 deletions crates/oauth2-types/src/registration/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -769,33 +769,6 @@ impl ClientMetadata {
///
/// To access other fields, use this type's `Deref` implementation.
///
/// # Example
///
/// ```no_run
/// use oauth2_types::{
/// oidc::ApplicationType,
/// registration::VerifiedClientMetadata,
/// requests::GrantType,
/// };
/// use url::Url;
/// # use oauth2_types::registration::{ClientMetadata, ClientMetadataVerificationError};
/// # let metadata = ClientMetadata::default();
/// # let issuer = Url::parse("http://localhost").unwrap();
/// let verified_metadata = metadata.validate()?;
///
/// // The redirect URIs are required during validation so this is not an `Option`.
/// let _: &[Url] = verified_metadata.redirect_uris();
///
/// // The field has a default value so this is not an `Option`.
/// let _: ApplicationType = verified_metadata.application_type();
///
/// // Other fields can be accessed via `Deref`.
/// if let Some(jwks_uri) = &verified_metadata.jwks_uri {
/// println!("Client's JWK Set is available at {jwks_uri}");
/// }
/// # Ok::<(), ClientMetadataVerificationError>(())
/// ```
///
/// [OpenID Connect Dynamic Client Registration Spec 1.0]: https://openid.net/specs/openid-connect-registration-1_0.html#ClientMetadata
#[derive(Serialize, Debug, PartialEq, Eq, Clone)]
#[serde(into = "ClientMetadataSerdeHelper")]
Expand Down
14 changes: 0 additions & 14 deletions crates/storage/src/clock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,20 +48,6 @@ impl Clock for SystemClock {

/// A fake clock, which uses a fixed timestamp, and can be advanced with the
/// [`MockClock::advance`] method.
///
/// ```rust
/// use mas_storage::clock::{Clock, MockClock};
/// use chrono::Duration;
///
/// let clock = MockClock::default();
/// let t1 = clock.now();
/// let t2 = clock.now();
/// assert_eq!(t1, t2);
///
/// clock.advance(Duration::microseconds(10 * 1000 * 1000));
/// let t3 = clock.now();
/// assert_eq!(t2 + Duration::microseconds(10 * 1000 * 1000), t3);
/// ```
pub struct MockClock {
timestamp: AtomicI64,
}
Expand Down
21 changes: 2 additions & 19 deletions crates/storage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,20 +32,7 @@
//!
//! The repository trait definition should look like this:
//!
//! ```rust
//! # use async_trait::async_trait;
//! # use ulid::Ulid;
//! # use rand_core::RngCore;
//! # use mas_storage::Clock;
//! #
//! # // A fake data structure, usually defined in mas-data-model
//! # struct FakeData {
//! # id: Ulid,
//! # }
//! #
//! # // A fake empty macro, to replace `mas_storage::repository_impl`
//! # macro_rules! repository_impl { ($($tok:tt)*) => {} }
//!
//! ```ignore
//! #[async_trait]
//! pub trait FakeDataRepository: Send + Sync {
//! /// The error type returned by the repository
Expand Down Expand Up @@ -108,11 +95,7 @@
//! Then update the [`RepositoryAccess`] trait to make the new repository
//! available:
//!
//! ```rust
//! # trait FakeDataRepository {
//! # type Error;
//! # }
//!
//! ```ignore
//! /// Access the various repositories the backend implements.
//! pub trait RepositoryAccess: Send {
//! /// The backend-specific error type used by each repository.
Expand Down
Loading