From 1c6c6ff8faf2deee8daf9b9c37f2165df41928b5 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 27 Jun 2025 14:56:21 +0200 Subject: [PATCH 01/10] Record the decoded ID token claims on upstream auth sessions --- .../data-model/src/upstream_oauth2/session.rs | 32 ++++++++++++ .../admin/v1/upstream_oauth_links/delete.rs | 2 +- .../handlers/src/upstream_oauth2/callback.rs | 10 ++++ crates/handlers/src/upstream_oauth2/link.rs | 6 ++- ...f701b37c9e3c2f4a332f418b9fb2625a0fe3f.json | 19 ------- ...73e7ed0770c81071cf1f17516d3a45881ae9.json} | 20 +++++--- ...b06a842cabc787279ba7d690f69b59ad3eb50.json | 20 ++++++++ ...212102_upstream_oauth2_id_token_claims.sql | 8 +++ crates/storage-pg/src/upstream_oauth2/mod.rs | 2 +- .../storage-pg/src/upstream_oauth2/session.rs | 49 +++++++++++++------ crates/storage/src/upstream_oauth2/session.rs | 6 +++ 11 files changed, 129 insertions(+), 45 deletions(-) delete mode 100644 crates/storage-pg/.sqlx/query-5f5245ace61b896f92be78ab4fef701b37c9e3c2f4a332f418b9fb2625a0fe3f.json rename crates/storage-pg/.sqlx/{query-37a124678323380357fa9d1375fd125fb35476ac3008e5adbd04a761d5edcd42.json => query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json} (77%) create mode 100644 crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json create mode 100644 crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql diff --git a/crates/data-model/src/upstream_oauth2/session.rs b/crates/data-model/src/upstream_oauth2/session.rs index e1d0695fc..e7dad7132 100644 --- a/crates/data-model/src/upstream_oauth2/session.rs +++ b/crates/data-model/src/upstream_oauth2/session.rs @@ -19,6 +19,7 @@ pub enum UpstreamOAuthAuthorizationSessionState { completed_at: DateTime, link_id: Ulid, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, }, @@ -27,6 +28,7 @@ pub enum UpstreamOAuthAuthorizationSessionState { consumed_at: DateTime, link_id: Ulid, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, }, @@ -35,6 +37,7 @@ pub enum UpstreamOAuthAuthorizationSessionState { consumed_at: Option>, unlinked_at: DateTime, id_token: Option, + id_token_claims: Option, }, } @@ -52,6 +55,7 @@ impl UpstreamOAuthAuthorizationSessionState { completed_at: DateTime, link: &UpstreamOAuthLink, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, ) -> Result { @@ -60,6 +64,7 @@ impl UpstreamOAuthAuthorizationSessionState { completed_at, link_id: link.id, id_token, + id_token_claims, extra_callback_parameters, userinfo, }), @@ -83,6 +88,7 @@ impl UpstreamOAuthAuthorizationSessionState { completed_at, link_id, id_token, + id_token_claims, extra_callback_parameters, userinfo, } => Ok(Self::Consumed { @@ -90,6 +96,7 @@ impl UpstreamOAuthAuthorizationSessionState { link_id, consumed_at, id_token, + id_token_claims, extra_callback_parameters, userinfo, }), @@ -146,6 +153,29 @@ impl UpstreamOAuthAuthorizationSessionState { } } + /// Get the ID token claims for the upstream OAuth 2.0 authorization + /// session. + /// + /// Returns `None` if the upstream OAuth 2.0 authorization session state is + /// not [`Pending`]. + /// + /// [`Pending`]: UpstreamOAuthAuthorizationSessionState::Pending + #[must_use] + pub fn id_token_claims(&self) -> Option<&serde_json::Value> { + match self { + Self::Pending => None, + Self::Completed { + id_token_claims, .. + } + | Self::Consumed { + id_token_claims, .. + } + | Self::Unlinked { + id_token_claims, .. + } => id_token_claims.as_ref(), + } + } + /// Get the extra query parameters that were sent to the upstream provider. /// /// Returns `None` if the upstream OAuth 2.0 authorization session state is @@ -277,6 +307,7 @@ impl UpstreamOAuthAuthorizationSession { completed_at: DateTime, link: &UpstreamOAuthLink, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, ) -> Result { @@ -284,6 +315,7 @@ impl UpstreamOAuthAuthorizationSession { completed_at, link, id_token, + id_token_claims, extra_callback_parameters, userinfo, )?; diff --git a/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs b/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs index 3c7905a4c..3e87109b5 100644 --- a/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs +++ b/crates/handlers/src/admin/v1/upstream_oauth_links/delete.rs @@ -126,7 +126,7 @@ mod tests { let session = repo .upstream_oauth_session() - .complete_with_link(&state.clock, session, &link, None, None, None) + .complete_with_link(&state.clock, session, &link, None, None, None, None) .await .unwrap(); diff --git a/crates/handlers/src/upstream_oauth2/callback.rs b/crates/handlers/src/upstream_oauth2/callback.rs index e6b207490..e368c9b72 100644 --- a/crates/handlers/src/upstream_oauth2/callback.rs +++ b/crates/handlers/src/upstream_oauth2/callback.rs @@ -312,6 +312,7 @@ pub(crate) async fn handler( .await?; let mut jwks = None; + let mut id_token_claims = None; let mut context = AttributeMappingContext::new(); if let Some(id_token) = token_response.id_token.as_ref() { @@ -337,6 +338,14 @@ pub(crate) async fn handler( let (_headers, mut claims) = id_token.into_parts(); + // Save a copy of the claims for later; the claims extract methods + // remove them from the map, and we want to store the original claims. + // We anyway need this to be a serde_json::Value + id_token_claims = Some( + serde_json::to_value(&claims) + .expect("serializing a HashMap into a Value should never fail"), + ); + // Access token hash must match. mas_jose::claims::AT_HASH .extract_optional_with_options( @@ -472,6 +481,7 @@ pub(crate) async fn handler( session, &link, token_response.id_token, + id_token_claims, params.extra_callback_parameters, userinfo, ) diff --git a/crates/handlers/src/upstream_oauth2/link.rs b/crates/handlers/src/upstream_oauth2/link.rs index 610fdf3e0..feb9e9074 100644 --- a/crates/handlers/src/upstream_oauth2/link.rs +++ b/crates/handlers/src/upstream_oauth2/link.rs @@ -934,7 +934,7 @@ mod tests { ..UpstreamOAuthProviderClaimsImports::default() }; - let id_token = serde_json::json!({ + let id_token_claims = serde_json::json!({ "preferred_username": "john", "email": "john@example.com", "email_verified": true, @@ -953,7 +953,8 @@ mod tests { .signing_key_for_alg(&JsonWebSignatureAlg::Rs256) .unwrap(); let header = JsonWebSignatureHeader::new(JsonWebSignatureAlg::Rs256); - let id_token = Jwt::sign_with_rng(&mut rng, header, id_token, &signer).unwrap(); + let id_token = + Jwt::sign_with_rng(&mut rng, header, id_token_claims.clone(), &signer).unwrap(); // Provision a provider and a link let mut repo = state.repository().await.unwrap(); @@ -1022,6 +1023,7 @@ mod tests { session, &link, Some(id_token.into_string()), + Some(id_token_claims), None, None, ) diff --git a/crates/storage-pg/.sqlx/query-5f5245ace61b896f92be78ab4fef701b37c9e3c2f4a332f418b9fb2625a0fe3f.json b/crates/storage-pg/.sqlx/query-5f5245ace61b896f92be78ab4fef701b37c9e3c2f4a332f418b9fb2625a0fe3f.json deleted file mode 100644 index c33da04d8..000000000 --- a/crates/storage-pg/.sqlx/query-5f5245ace61b896f92be78ab4fef701b37c9e3c2f4a332f418b9fb2625a0fe3f.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE upstream_oauth_authorization_sessions\n SET upstream_oauth_link_id = $1,\n completed_at = $2,\n id_token = $3,\n extra_callback_parameters = $4,\n userinfo = $5\n WHERE upstream_oauth_authorization_session_id = $6\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Uuid", - "Timestamptz", - "Text", - "Jsonb", - "Jsonb", - "Uuid" - ] - }, - "nullable": [] - }, - "hash": "5f5245ace61b896f92be78ab4fef701b37c9e3c2f4a332f418b9fb2625a0fe3f" -} diff --git a/crates/storage-pg/.sqlx/query-37a124678323380357fa9d1375fd125fb35476ac3008e5adbd04a761d5edcd42.json b/crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json similarity index 77% rename from crates/storage-pg/.sqlx/query-37a124678323380357fa9d1375fd125fb35476ac3008e5adbd04a761d5edcd42.json rename to crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json index 0e28ac022..c3c2e2507 100644 --- a/crates/storage-pg/.sqlx/query-37a124678323380357fa9d1375fd125fb35476ac3008e5adbd04a761d5edcd42.json +++ b/crates/storage-pg/.sqlx/query-e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n upstream_oauth_authorization_session_id,\n upstream_oauth_provider_id,\n upstream_oauth_link_id,\n state,\n code_challenge_verifier,\n nonce,\n id_token,\n extra_callback_parameters,\n userinfo,\n created_at,\n completed_at,\n consumed_at,\n unlinked_at\n FROM upstream_oauth_authorization_sessions\n WHERE upstream_oauth_authorization_session_id = $1\n ", + "query": "\n SELECT\n upstream_oauth_authorization_session_id,\n upstream_oauth_provider_id,\n upstream_oauth_link_id,\n state,\n code_challenge_verifier,\n nonce,\n id_token,\n id_token_claims,\n extra_callback_parameters,\n userinfo,\n created_at,\n completed_at,\n consumed_at,\n unlinked_at\n FROM upstream_oauth_authorization_sessions\n WHERE upstream_oauth_authorization_session_id = $1\n ", "describe": { "columns": [ { @@ -40,31 +40,36 @@ }, { "ordinal": 7, - "name": "extra_callback_parameters", + "name": "id_token_claims", "type_info": "Jsonb" }, { "ordinal": 8, - "name": "userinfo", + "name": "extra_callback_parameters", "type_info": "Jsonb" }, { "ordinal": 9, + "name": "userinfo", + "type_info": "Jsonb" + }, + { + "ordinal": 10, "name": "created_at", "type_info": "Timestamptz" }, { - "ordinal": 10, + "ordinal": 11, "name": "completed_at", "type_info": "Timestamptz" }, { - "ordinal": 11, + "ordinal": 12, "name": "consumed_at", "type_info": "Timestamptz" }, { - "ordinal": 12, + "ordinal": 13, "name": "unlinked_at", "type_info": "Timestamptz" } @@ -84,11 +89,12 @@ true, true, true, + true, false, true, true, true ] }, - "hash": "37a124678323380357fa9d1375fd125fb35476ac3008e5adbd04a761d5edcd42" + "hash": "e62d043f86e7232e6e9433631f8273e7ed0770c81071cf1f17516d3a45881ae9" } diff --git a/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json b/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json new file mode 100644 index 000000000..072e6f57b --- /dev/null +++ b/crates/storage-pg/.sqlx/query-fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE upstream_oauth_authorization_sessions\n SET upstream_oauth_link_id = $1\n , completed_at = $2\n , id_token = $3\n , id_token_claims = $4\n , extra_callback_parameters = $5\n , userinfo = $6\n WHERE upstream_oauth_authorization_session_id = $7\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Uuid", + "Timestamptz", + "Text", + "Jsonb", + "Jsonb", + "Jsonb", + "Uuid" + ] + }, + "nullable": [] + }, + "hash": "fd8f3e7ff02d4d1f465aad32edcb06a842cabc787279ba7d690f69b59ad3eb50" +} diff --git a/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql b/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql new file mode 100644 index 000000000..6cb78a4c2 --- /dev/null +++ b/crates/storage-pg/migrations/20250602212102_upstream_oauth2_id_token_claims.sql @@ -0,0 +1,8 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This is the decoded claims from the ID token stored as JSONB +ALTER TABLE upstream_oauth_authorization_sessions + ADD COLUMN id_token_claims JSONB; diff --git a/crates/storage-pg/src/upstream_oauth2/mod.rs b/crates/storage-pg/src/upstream_oauth2/mod.rs index 23f73481c..b77e0631d 100644 --- a/crates/storage-pg/src/upstream_oauth2/mod.rs +++ b/crates/storage-pg/src/upstream_oauth2/mod.rs @@ -152,7 +152,7 @@ mod tests { let session = repo .upstream_oauth_session() - .complete_with_link(&clock, session, &link, None, None, None) + .complete_with_link(&clock, session, &link, None, None, None, None) .await .unwrap(); // Reload the session diff --git a/crates/storage-pg/src/upstream_oauth2/session.rs b/crates/storage-pg/src/upstream_oauth2/session.rs index e1dc6deb1..7946d63f0 100644 --- a/crates/storage-pg/src/upstream_oauth2/session.rs +++ b/crates/storage-pg/src/upstream_oauth2/session.rs @@ -40,6 +40,7 @@ struct SessionLookup { code_challenge_verifier: Option, nonce: Option, id_token: Option, + id_token_claims: Option, userinfo: Option, created_at: DateTime, completed_at: Option>, @@ -56,18 +57,20 @@ impl TryFrom for UpstreamOAuthAuthorizationSession { let state = match ( value.upstream_oauth_link_id, value.id_token, + value.id_token_claims, value.extra_callback_parameters, value.userinfo, value.completed_at, value.consumed_at, value.unlinked_at, ) { - (None, None, None, None, None, None, None) => { + (None, None, None, None, None, None, None, None) => { UpstreamOAuthAuthorizationSessionState::Pending } ( Some(link_id), id_token, + id_token_claims, extra_callback_parameters, userinfo, Some(completed_at), @@ -77,12 +80,14 @@ impl TryFrom for UpstreamOAuthAuthorizationSession { completed_at, link_id: link_id.into(), id_token, + id_token_claims, extra_callback_parameters, userinfo, }, ( Some(link_id), id_token, + id_token_claims, extra_callback_parameters, userinfo, Some(completed_at), @@ -92,18 +97,27 @@ impl TryFrom for UpstreamOAuthAuthorizationSession { completed_at, link_id: link_id.into(), id_token, + id_token_claims, extra_callback_parameters, userinfo, consumed_at, }, - (_, id_token, _, _, Some(completed_at), consumed_at, Some(unlinked_at)) => { - UpstreamOAuthAuthorizationSessionState::Unlinked { - completed_at, - id_token, - consumed_at, - unlinked_at, - } - } + ( + _, + id_token, + id_token_claims, + _, + _, + Some(completed_at), + consumed_at, + Some(unlinked_at), + ) => UpstreamOAuthAuthorizationSessionState::Unlinked { + completed_at, + id_token, + id_token_claims, + consumed_at, + unlinked_at, + }, _ => { return Err(DatabaseInconsistencyError::on( "upstream_oauth_authorization_sessions", @@ -152,6 +166,7 @@ impl UpstreamOAuthSessionRepository for PgUpstreamOAuthSessionRepository<'_> { code_challenge_verifier, nonce, id_token, + id_token_claims, extra_callback_parameters, userinfo, created_at, @@ -253,6 +268,7 @@ impl UpstreamOAuthSessionRepository for PgUpstreamOAuthSessionRepository<'_> { upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, upstream_oauth_link: &UpstreamOAuthLink, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, ) -> Result { @@ -261,16 +277,18 @@ impl UpstreamOAuthSessionRepository for PgUpstreamOAuthSessionRepository<'_> { sqlx::query!( r#" UPDATE upstream_oauth_authorization_sessions - SET upstream_oauth_link_id = $1, - completed_at = $2, - id_token = $3, - extra_callback_parameters = $4, - userinfo = $5 - WHERE upstream_oauth_authorization_session_id = $6 + SET upstream_oauth_link_id = $1 + , completed_at = $2 + , id_token = $3 + , id_token_claims = $4 + , extra_callback_parameters = $5 + , userinfo = $6 + WHERE upstream_oauth_authorization_session_id = $7 "#, Uuid::from(upstream_oauth_link.id), completed_at, id_token, + id_token_claims, extra_callback_parameters, userinfo, Uuid::from(upstream_oauth_authorization_session.id), @@ -284,6 +302,7 @@ impl UpstreamOAuthSessionRepository for PgUpstreamOAuthSessionRepository<'_> { completed_at, upstream_oauth_link, id_token, + id_token_claims, extra_callback_parameters, userinfo, ) diff --git a/crates/storage/src/upstream_oauth2/session.rs b/crates/storage/src/upstream_oauth2/session.rs index 6aadb673f..273370794 100644 --- a/crates/storage/src/upstream_oauth2/session.rs +++ b/crates/storage/src/upstream_oauth2/session.rs @@ -74,18 +74,23 @@ pub trait UpstreamOAuthSessionRepository: Send + Sync { /// * `upstream_oauth_link`: the link to associate with the session /// * `id_token`: the ID token returned by the upstream OAuth provider, if /// present + /// * `id_token_claims`: the claims contained in the ID token, if present /// * `extra_callback_parameters`: the extra query parameters returned in /// the callback, if any + /// * `userinfo`: the user info returned by the upstream OAuth provider, if + /// requested /// /// # Errors /// /// Returns [`Self::Error`] if the underlying repository fails + #[expect(clippy::too_many_arguments)] async fn complete_with_link( &mut self, clock: &dyn Clock, upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, upstream_oauth_link: &UpstreamOAuthLink, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, ) -> Result; @@ -131,6 +136,7 @@ repository_impl!(UpstreamOAuthSessionRepository: upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, upstream_oauth_link: &UpstreamOAuthLink, id_token: Option, + id_token_claims: Option, extra_callback_parameters: Option, userinfo: Option, ) -> Result; From db65a702a71a4db79b9d9fd7f2d7e917dfdbd9f0 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 27 Jun 2025 15:43:37 +0200 Subject: [PATCH 02/10] storage: list and count methods for upstream oauth sessions --- crates/storage-pg/src/iden.rs | 23 ++ crates/storage-pg/src/upstream_oauth2/mod.rs | 157 +++++++++++++- .../storage-pg/src/upstream_oauth2/session.rs | 198 +++++++++++++++++- crates/storage/src/upstream_oauth2/mod.rs | 2 +- crates/storage/src/upstream_oauth2/session.rs | 69 +++++- 5 files changed, 444 insertions(+), 5 deletions(-) diff --git a/crates/storage-pg/src/iden.rs b/crates/storage-pg/src/iden.rs index d401ca144..f7342adf5 100644 --- a/crates/storage-pg/src/iden.rs +++ b/crates/storage-pg/src/iden.rs @@ -140,6 +140,29 @@ pub enum UpstreamOAuthLinks { CreatedAt, } +#[derive(sea_query::Iden)] +#[iden = "upstream_oauth_authorization_sessions"] +pub enum UpstreamOAuthAuthorizationSessions { + Table, + #[iden = "upstream_oauth_authorization_session_id"] + UpstreamOAuthAuthorizationSessionId, + #[iden = "upstream_oauth_provider_id"] + UpstreamOAuthProviderId, + #[iden = "upstream_oauth_link_id"] + UpstreamOAuthLinkId, + State, + CodeChallengeVerifier, + Nonce, + IdToken, + IdTokenClaims, + ExtraCallbackParameters, + Userinfo, + CreatedAt, + CompletedAt, + ConsumedAt, + UnlinkedAt, +} + #[derive(sea_query::Iden)] pub enum UserRegistrationTokens { Table, diff --git a/crates/storage-pg/src/upstream_oauth2/mod.rs b/crates/storage-pg/src/upstream_oauth2/mod.rs index b77e0631d..2158d872c 100644 --- a/crates/storage-pg/src/upstream_oauth2/mod.rs +++ b/crates/storage-pg/src/upstream_oauth2/mod.rs @@ -29,7 +29,7 @@ mod tests { upstream_oauth2::{ UpstreamOAuthLinkFilter, UpstreamOAuthLinkRepository, UpstreamOAuthProviderFilter, UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository, - UpstreamOAuthSessionRepository, + UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository, }, user::UserRepository, }; @@ -262,6 +262,29 @@ mod tests { 1 ); + // Test listing and counting sessions + let session_filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + + // Count the sessions for the provider + let session_count = repo + .upstream_oauth_session() + .count(session_filter) + .await + .unwrap(); + assert_eq!(session_count, 1); + + // List the sessions for the provider + let session_page = repo + .upstream_oauth_session() + .list(session_filter, Pagination::first(10)) + .await + .unwrap(); + + assert_eq!(session_page.edges.len(), 1); + assert_eq!(session_page.edges[0].id, session.id); + assert!(!session_page.has_next_page); + assert!(!session_page.has_previous_page); + // Try deleting the provider repo.upstream_oauth_provider() .delete(provider) @@ -423,4 +446,136 @@ mod tests { .is_empty() ); } + + /// Test that the pagination works as expected in the upstream OAuth + /// session repository + #[sqlx::test(migrator = "crate::MIGRATOR")] + async fn test_session_repository_pagination(pool: PgPool) { + let scope = Scope::from_iter([OPENID]); + + let mut rng = rand_chacha::ChaChaRng::seed_from_u64(42); + let clock = MockClock::default(); + let mut repo = PgRepository::from_pool(&pool).await.unwrap(); + + // Create a provider + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: Some("https://example.com/".to_owned()), + human_name: None, + brand_name: None, + scope, + token_endpoint_auth_method: UpstreamOAuthProviderTokenAuthMethod::None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + token_endpoint_signing_alg: None, + client_id: "client-id".to_owned(), + encrypted_client_secret: None, + claims_imports: UpstreamOAuthProviderClaimsImports::default(), + token_endpoint_override: None, + authorization_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Oidc, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Auto, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + }, + ) + .await + .unwrap(); + + let filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + + // Count the number of sessions before we start + assert_eq!( + repo.upstream_oauth_session().count(filter).await.unwrap(), + 0 + ); + + let mut ids = Vec::with_capacity(20); + // Create 20 sessions + for idx in 0..20 { + let state = format!("state-{idx}"); + let session = repo + .upstream_oauth_session() + .add(&mut rng, &clock, &provider, state, None, None) + .await + .unwrap(); + ids.push(session.id); + clock.advance(Duration::microseconds(10 * 1000 * 1000)); + } + + // Now we have 20 sessions + assert_eq!( + repo.upstream_oauth_session().count(filter).await.unwrap(), + 20 + ); + + // Lookup the first 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::first(10)) + .await + .unwrap(); + + // It returned the first 10 items + assert!(page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.id).collect(); + assert_eq!(&edge_ids, &ids[..10]); + + // Lookup the next 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::first(10).after(ids[9])) + .await + .unwrap(); + + // It returned the next 10 items + assert!(!page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.id).collect(); + assert_eq!(&edge_ids, &ids[10..]); + + // Lookup the last 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::last(10)) + .await + .unwrap(); + + // It returned the last 10 items + assert!(page.has_previous_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.id).collect(); + assert_eq!(&edge_ids, &ids[10..]); + + // Lookup the previous 10 items + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::last(10).before(ids[10])) + .await + .unwrap(); + + // It returned the previous 10 items + assert!(!page.has_previous_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.id).collect(); + assert_eq!(&edge_ids, &ids[..10]); + + // Lookup 5 items between two IDs + let page = repo + .upstream_oauth_session() + .list(filter, Pagination::first(10).after(ids[5]).before(ids[11])) + .await + .unwrap(); + + // It returned the items in between + assert!(!page.has_next_page); + let edge_ids: Vec<_> = page.edges.iter().map(|s| s.id).collect(); + assert_eq!(&edge_ids, &ids[6..11]); + } } diff --git a/crates/storage-pg/src/upstream_oauth2/session.rs b/crates/storage-pg/src/upstream_oauth2/session.rs index 7946d63f0..a595f600d 100644 --- a/crates/storage-pg/src/upstream_oauth2/session.rs +++ b/crates/storage-pg/src/upstream_oauth2/session.rs @@ -10,13 +10,36 @@ use mas_data_model::{ UpstreamOAuthAuthorizationSession, UpstreamOAuthAuthorizationSessionState, UpstreamOAuthLink, UpstreamOAuthProvider, }; -use mas_storage::{Clock, upstream_oauth2::UpstreamOAuthSessionRepository}; +use mas_storage::{ + Clock, Page, Pagination, + upstream_oauth2::{UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository}, +}; use rand::RngCore; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query_binder::SqlxBinder; use sqlx::PgConnection; use ulid::Ulid; use uuid::Uuid; -use crate::{DatabaseError, DatabaseInconsistencyError, tracing::ExecuteExt}; +use crate::{ + DatabaseError, DatabaseInconsistencyError, + filter::{Filter, StatementExt}, + iden::UpstreamOAuthAuthorizationSessions, + pagination::QueryBuilderExt, + tracing::ExecuteExt, +}; + +impl Filter for UpstreamOAuthSessionFilter<'_> { + fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { + sea_query::Condition::all().add_option(self.provider().map(|provider| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthProviderId, + )) + .eq(Uuid::from(provider.id)) + })) + } +} /// An implementation of [`UpstreamOAuthSessionRepository`] for a PostgreSQL /// connection @@ -32,6 +55,8 @@ impl<'c> PgUpstreamOAuthSessionRepository<'c> { } } +#[derive(sqlx::FromRow)] +#[enum_def] struct SessionLookup { upstream_oauth_authorization_session_id: Uuid, upstream_oauth_provider_id: Uuid, @@ -346,4 +371,173 @@ impl UpstreamOAuthSessionRepository for PgUpstreamOAuthSessionRepository<'_> { Ok(upstream_oauth_authorization_session) } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.list", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn list( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error> { + let (sql, arguments) = Query::select() + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + )), + SessionLookupIden::UpstreamOauthAuthorizationSessionId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthProviderId, + )), + SessionLookupIden::UpstreamOauthProviderId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthLinkId, + )), + SessionLookupIden::UpstreamOauthLinkId, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::State, + )), + SessionLookupIden::State, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::CodeChallengeVerifier, + )), + SessionLookupIden::CodeChallengeVerifier, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::Nonce, + )), + SessionLookupIden::Nonce, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdToken, + )), + SessionLookupIden::IdToken, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdTokenClaims, + )), + SessionLookupIden::IdTokenClaims, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::ExtraCallbackParameters, + )), + SessionLookupIden::ExtraCallbackParameters, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::Userinfo, + )), + SessionLookupIden::Userinfo, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::CreatedAt, + )), + SessionLookupIden::CreatedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::CompletedAt, + )), + SessionLookupIden::CompletedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::ConsumedAt, + )), + SessionLookupIden::ConsumedAt, + ) + .expr_as( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UnlinkedAt, + )), + SessionLookupIden::UnlinkedAt, + ) + .from(UpstreamOAuthAuthorizationSessions::Table) + .apply_filter(filter) + .generate_pagination( + ( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + ), + pagination, + ) + .build_sqlx(PostgresQueryBuilder); + + let edges: Vec = sqlx::query_as_with(&sql, arguments) + .traced() + .fetch_all(&mut *self.conn) + .await?; + + let page = pagination + .process(edges) + .try_map(UpstreamOAuthAuthorizationSession::try_from)?; + + Ok(page) + } + + #[tracing::instrument( + name = "db.upstream_oauth_authorization_session.count", + skip_all, + fields( + db.query.text, + ), + err, + )] + async fn count( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + ) -> Result { + let (sql, arguments) = Query::select() + .expr( + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + )) + .count(), + ) + .from(UpstreamOAuthAuthorizationSessions::Table) + .apply_filter(filter) + .build_sqlx(PostgresQueryBuilder); + + let count: i64 = sqlx::query_scalar_with(&sql, arguments) + .traced() + .fetch_one(&mut *self.conn) + .await?; + + count + .try_into() + .map_err(DatabaseError::to_invalid_operation) + } } diff --git a/crates/storage/src/upstream_oauth2/mod.rs b/crates/storage/src/upstream_oauth2/mod.rs index 3fd6666b4..39fefffe8 100644 --- a/crates/storage/src/upstream_oauth2/mod.rs +++ b/crates/storage/src/upstream_oauth2/mod.rs @@ -16,5 +16,5 @@ pub use self::{ provider::{ UpstreamOAuthProviderFilter, UpstreamOAuthProviderParams, UpstreamOAuthProviderRepository, }, - session::UpstreamOAuthSessionRepository, + session::{UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository}, }; diff --git a/crates/storage/src/upstream_oauth2/session.rs b/crates/storage/src/upstream_oauth2/session.rs index 273370794..66fcb1ba8 100644 --- a/crates/storage/src/upstream_oauth2/session.rs +++ b/crates/storage/src/upstream_oauth2/session.rs @@ -9,7 +9,36 @@ use mas_data_model::{UpstreamOAuthAuthorizationSession, UpstreamOAuthLink, Upstr use rand_core::RngCore; use ulid::Ulid; -use crate::{Clock, repository_impl}; +use crate::{Clock, Pagination, pagination::Page, repository_impl}; + +/// Filter parameters for listing upstream OAuth sessions +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub struct UpstreamOAuthSessionFilter<'a> { + provider: Option<&'a UpstreamOAuthProvider>, +} + +impl<'a> UpstreamOAuthSessionFilter<'a> { + /// Create a new [`UpstreamOAuthSessionFilter`] with default values + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Set the upstream OAuth provider for which to list sessions + #[must_use] + pub fn for_provider(mut self, provider: &'a UpstreamOAuthProvider) -> Self { + self.provider = Some(provider); + self + } + + /// Get the upstream OAuth provider filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn provider(&self) -> Option<&UpstreamOAuthProvider> { + self.provider + } +} /// An [`UpstreamOAuthSessionRepository`] helps interacting with /// [`UpstreamOAuthAuthorizationSession`] saved in the storage backend @@ -112,6 +141,36 @@ pub trait UpstreamOAuthSessionRepository: Send + Sync { clock: &dyn Clock, upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, ) -> Result; + + /// List [`UpstreamOAuthAuthorizationSession`] with the given filter and + /// pagination + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// * `pagination`: The pagination parameters + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn list( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + /// Count the number of [`UpstreamOAuthAuthorizationSession`] with the given + /// filter + /// + /// # Parameters + /// + /// * `filter`: The filter to apply + /// + /// # Errors + /// + /// Returns [`Self::Error`] if the underlying repository fails + async fn count(&mut self, filter: UpstreamOAuthSessionFilter<'_>) + -> Result; } repository_impl!(UpstreamOAuthSessionRepository: @@ -146,4 +205,12 @@ repository_impl!(UpstreamOAuthSessionRepository: clock: &dyn Clock, upstream_oauth_authorization_session: UpstreamOAuthAuthorizationSession, ) -> Result; + + async fn list( + &mut self, + filter: UpstreamOAuthSessionFilter<'_>, + pagination: Pagination, + ) -> Result, Self::Error>; + + async fn count(&mut self, filter: UpstreamOAuthSessionFilter<'_>) -> Result; ); From 835b1b5f58508fd7a939c683aa51bd0a2d6c11ee Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 27 Jun 2025 15:52:35 +0200 Subject: [PATCH 03/10] Allow filtering upstream sessions by sub and sid claims --- ...m_oauth2_id_token_claims_sub_sid_index.sql | 15 ++++ ...m_oauth2_id_token_claims_sid_sub_index.sql | 15 ++++ crates/storage-pg/src/upstream_oauth2/mod.rs | 68 ++++++++++++++++++- .../storage-pg/src/upstream_oauth2/session.rs | 33 ++++++--- crates/storage/src/upstream_oauth2/session.rs | 32 +++++++++ 5 files changed, 154 insertions(+), 9 deletions(-) create mode 100644 crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql create mode 100644 crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql diff --git a/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql b/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql new file mode 100644 index 000000000..327022168 --- /dev/null +++ b/crates/storage-pg/migrations/20250602212103_upstream_oauth2_id_token_claims_sub_sid_index.sql @@ -0,0 +1,15 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We'll be requesting authorization sessions by provider, sub and sid, so we'll +-- need to index those columns +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_sub_sid_idx + ON upstream_oauth_authorization_sessions ( + upstream_oauth_provider_id, + (id_token_claims->>'sub'), + (id_token_claims->>'sid') + ); diff --git a/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql b/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql new file mode 100644 index 000000000..097c3da32 --- /dev/null +++ b/crates/storage-pg/migrations/20250602212104_upstream_oauth2_id_token_claims_sid_sub_index.sql @@ -0,0 +1,15 @@ +-- no-transaction +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- We'll be requesting authorization sessions by provider, sub and sid, so we'll +-- need to index those columns +CREATE INDEX CONCURRENTLY IF NOT EXISTS + upstream_oauth_authorization_sessions_sid_sub_idx + ON upstream_oauth_authorization_sessions ( + upstream_oauth_provider_id, + (id_token_claims->>'sid'), + (id_token_claims->>'sub') + ); diff --git a/crates/storage-pg/src/upstream_oauth2/mod.rs b/crates/storage-pg/src/upstream_oauth2/mod.rs index 2158d872c..84a52defd 100644 --- a/crates/storage-pg/src/upstream_oauth2/mod.rs +++ b/crates/storage-pg/src/upstream_oauth2/mod.rs @@ -499,15 +499,45 @@ mod tests { 0 ); + let mut links = Vec::with_capacity(3); + for subject in ["alice", "bob", "charlie"] { + let link = repo + .upstream_oauth_link() + .add(&mut rng, &clock, &provider, subject.to_owned(), None) + .await + .unwrap(); + links.push(link); + } + let mut ids = Vec::with_capacity(20); + let sids = ["one", "two"].into_iter().cycle(); // Create 20 sessions - for idx in 0..20 { + for (idx, (link, sid)) in links.iter().cycle().zip(sids).enumerate().take(20) { let state = format!("state-{idx}"); let session = repo .upstream_oauth_session() .add(&mut rng, &clock, &provider, state, None, None) .await .unwrap(); + let id_token_claims = serde_json::json!({ + "sub": link.subject, + "sid": sid, + "aud": provider.client_id, + "iss": "https://example.com/", + }); + let session = repo + .upstream_oauth_session() + .complete_with_link( + &clock, + session, + link, + None, + Some(id_token_claims), + None, + None, + ) + .await + .unwrap(); ids.push(session.id); clock.advance(Duration::microseconds(10 * 1000 * 1000)); } @@ -577,5 +607,41 @@ mod tests { assert!(!page.has_next_page); let edge_ids: Vec<_> = page.edges.iter().map(|s| s.id).collect(); assert_eq!(&edge_ids, &ids[6..11]); + + // Check the sub/sid filters + assert_eq!( + repo.upstream_oauth_session() + .count(filter.with_sub_claim("alice").with_sid_claim("one")) + .await + .unwrap(), + 4 + ); + assert_eq!( + repo.upstream_oauth_session() + .count(filter.with_sub_claim("bob").with_sid_claim("two")) + .await + .unwrap(), + 4 + ); + + let page = repo + .upstream_oauth_session() + .list( + filter.with_sub_claim("alice").with_sid_claim("one"), + Pagination::first(10), + ) + .await + .unwrap(); + assert_eq!(page.edges.len(), 4); + for edge in page.edges { + assert_eq!( + edge.id_token_claims().unwrap().get("sub").unwrap().as_str(), + Some("alice") + ); + assert_eq!( + edge.id_token_claims().unwrap().get("sid").unwrap().as_str(), + Some("one") + ); + } } } diff --git a/crates/storage-pg/src/upstream_oauth2/session.rs b/crates/storage-pg/src/upstream_oauth2/session.rs index a595f600d..8cc04eeb6 100644 --- a/crates/storage-pg/src/upstream_oauth2/session.rs +++ b/crates/storage-pg/src/upstream_oauth2/session.rs @@ -15,7 +15,7 @@ use mas_storage::{ upstream_oauth2::{UpstreamOAuthSessionFilter, UpstreamOAuthSessionRepository}, }; use rand::RngCore; -use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def}; +use sea_query::{Expr, PostgresQueryBuilder, Query, enum_def, extension::postgres::PgExpr}; use sea_query_binder::SqlxBinder; use sqlx::PgConnection; use ulid::Ulid; @@ -31,13 +31,30 @@ use crate::{ impl Filter for UpstreamOAuthSessionFilter<'_> { fn generate_condition(&self, _has_joins: bool) -> impl sea_query::IntoCondition { - sea_query::Condition::all().add_option(self.provider().map(|provider| { - Expr::col(( - UpstreamOAuthAuthorizationSessions::Table, - UpstreamOAuthAuthorizationSessions::UpstreamOAuthProviderId, - )) - .eq(Uuid::from(provider.id)) - })) + sea_query::Condition::all() + .add_option(self.provider().map(|provider| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthProviderId, + )) + .eq(Uuid::from(provider.id)) + })) + .add_option(self.sub_claim().map(|sub| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdTokenClaims, + )) + .cast_json_field("sub") + .eq(sub) + })) + .add_option(self.sid_claim().map(|sid| { + Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::IdTokenClaims, + )) + .cast_json_field("sid") + .eq(sid) + })) } } diff --git a/crates/storage/src/upstream_oauth2/session.rs b/crates/storage/src/upstream_oauth2/session.rs index 66fcb1ba8..d6505285b 100644 --- a/crates/storage/src/upstream_oauth2/session.rs +++ b/crates/storage/src/upstream_oauth2/session.rs @@ -15,6 +15,8 @@ use crate::{Clock, Pagination, pagination::Page, repository_impl}; #[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] pub struct UpstreamOAuthSessionFilter<'a> { provider: Option<&'a UpstreamOAuthProvider>, + sub_claim: Option<&'a str>, + sid_claim: Option<&'a str>, } impl<'a> UpstreamOAuthSessionFilter<'a> { @@ -38,6 +40,36 @@ impl<'a> UpstreamOAuthSessionFilter<'a> { pub fn provider(&self) -> Option<&UpstreamOAuthProvider> { self.provider } + + /// Set the `sub` claim to filter by + #[must_use] + pub fn with_sub_claim(mut self, sub_claim: &'a str) -> Self { + self.sub_claim = Some(sub_claim); + self + } + + /// Get the `sub` claim filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn sub_claim(&self) -> Option<&str> { + self.sub_claim + } + + /// Set the `sid` claim to filter by + #[must_use] + pub fn with_sid_claim(mut self, sid_claim: &'a str) -> Self { + self.sid_claim = Some(sid_claim); + self + } + + /// Get the `sid` claim filter + /// + /// Returns [`None`] if no filter was set + #[must_use] + pub fn sid_claim(&self) -> Option<&str> { + self.sid_claim + } } /// An [`UpstreamOAuthSessionRepository`] helps interacting with From e28ffccc3a7b949480b99f24479a969f968a22dc Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Mon, 30 Jun 2025 16:31:57 +0200 Subject: [PATCH 04/10] Backchannel logout behavior settings on upstream providers --- crates/cli/src/sync.rs | 10 ++++ crates/config/src/sections/mod.rs | 6 ++- crates/config/src/sections/upstream_oauth2.rs | 25 ++++++++++ crates/data-model/src/lib.rs | 6 +-- crates/data-model/src/upstream_oauth2/mod.rs | 1 + .../src/upstream_oauth2/provider.rs | 40 ++++++++++++++++ .../src/admin/v1/upstream_oauth_links/mod.rs | 4 +- crates/handlers/src/upstream_oauth2/cache.rs | 4 +- crates/handlers/src/upstream_oauth2/link.rs | 2 + crates/handlers/src/views/login.rs | 5 +- ...ea304d43c336ce80723789ff3e66c0dd4d86c.json | 46 +++++++++++++++++++ ...ffe11da64835ae297c9277271b8971d5de81.json} | 5 +- ...48b0f551b16f4cb57c022b50212cfc3d8431f.json | 45 ------------------ ...e08bab57721007c64ef2597cb09a62100792.json} | 10 +++- ...44dfab024b42e47ddc7bd9e551897ba6e9b8.json} | 10 +++- ...3_upstream_oauth_on_backchannel_logout.sql | 10 ++++ crates/storage-pg/src/iden.rs | 1 + crates/storage-pg/src/upstream_oauth2/mod.rs | 6 ++- .../src/upstream_oauth2/provider.rs | 42 +++++++++++++---- .../storage/src/upstream_oauth2/provider.rs | 7 ++- ...rite_user_with_upstream_provider_link.snap | 1 + .../syn2mas/src/synapse_reader/config/oidc.rs | 15 +++--- crates/templates/src/context.rs | 7 +-- docs/config.schema.json | 27 +++++++++++ 24 files changed, 256 insertions(+), 79 deletions(-) create mode 100644 crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json rename crates/storage-pg/.sqlx/{query-a711f4c6fa38b98c960ee565038d42ea16db436352b19fcd3b2c620c73d9cc0c.json => query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json} (78%) delete mode 100644 crates/storage-pg/.sqlx/query-585a1e78834c953c80a0af9215348b0f551b16f4cb57c022b50212cfc3d8431f.json rename crates/storage-pg/.sqlx/{query-a82b87ccfaa1de9a8e6433aaa67382fbb5029d5f7adf95aaa0decd668d25ba89.json => query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json} (91%) rename crates/storage-pg/.sqlx/{query-e6d66a7980933c12ab046958e02d419129ef52ac45bea4345471838016cae917.json => query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json} (89%) create mode 100644 crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql diff --git a/crates/cli/src/sync.rs b/crates/cli/src/sync.rs index aa99e1d35..d8433c291 100644 --- a/crates/cli/src/sync.rs +++ b/crates/cli/src/sync.rs @@ -276,6 +276,15 @@ pub async fn config_sync( } }; + let on_backchannel_logout = match provider.on_backchannel_logout { + mas_config::UpstreamOAuth2OnBackchannelLogout::DoNothing => { + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing + } + mas_config::UpstreamOAuth2OnBackchannelLogout::LogoutBrowserOnly => { + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly + } + }; + repo.upstream_oauth_provider() .upsert( clock, @@ -306,6 +315,7 @@ pub async fn config_sync( .collect(), forward_login_hint: provider.forward_login_hint, ui_order, + on_backchannel_logout, }, ) .await?; diff --git a/crates/config/src/sections/mod.rs b/crates/config/src/sections/mod.rs index f1f880594..850ca0c61 100644 --- a/crates/config/src/sections/mod.rs +++ b/crates/config/src/sections/mod.rs @@ -52,8 +52,10 @@ pub use self::{ upstream_oauth2::{ ClaimsImports as UpstreamOAuth2ClaimsImports, DiscoveryMode as UpstreamOAuth2DiscoveryMode, EmailImportPreference as UpstreamOAuth2EmailImportPreference, - ImportAction as UpstreamOAuth2ImportAction, PkceMethod as UpstreamOAuth2PkceMethod, - Provider as UpstreamOAuth2Provider, ResponseMode as UpstreamOAuth2ResponseMode, + ImportAction as UpstreamOAuth2ImportAction, + OnBackchannelLogout as UpstreamOAuth2OnBackchannelLogout, + PkceMethod as UpstreamOAuth2PkceMethod, Provider as UpstreamOAuth2Provider, + ResponseMode as UpstreamOAuth2ResponseMode, TokenAuthMethod as UpstreamOAuth2TokenAuthMethod, UpstreamOAuth2Config, }, }; diff --git a/crates/config/src/sections/upstream_oauth2.rs b/crates/config/src/sections/upstream_oauth2.rs index a2e62036a..2cf43b530 100644 --- a/crates/config/src/sections/upstream_oauth2.rs +++ b/crates/config/src/sections/upstream_oauth2.rs @@ -408,6 +408,25 @@ fn is_default_scope(scope: &str) -> bool { scope == default_scope() } +/// What to do when receiving an OIDC Backchannel logout request. +#[derive(Debug, Clone, Copy, Serialize, Deserialize, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +pub enum OnBackchannelLogout { + /// Do nothing + #[default] + DoNothing, + + /// Only log out the MAS 'browser session' started by this OIDC session + LogoutBrowserOnly, +} + +impl OnBackchannelLogout { + #[allow(clippy::trivially_copy_pass_by_ref)] + const fn is_default(&self) -> bool { + matches!(self, OnBackchannelLogout::DoNothing) + } +} + /// Configuration for one upstream OAuth 2 provider. #[skip_serializing_none] #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] @@ -583,4 +602,10 @@ pub struct Provider { /// Defaults to `false`. #[serde(default)] pub forward_login_hint: bool, + + /// What to do when receiving an OIDC Backchannel logout request. + /// + /// Defaults to "do_nothing". + #[serde(default, skip_serializing_if = "OnBackchannelLogout::is_default")] + pub on_backchannel_logout: OnBackchannelLogout, } diff --git a/crates/data-model/src/lib.rs b/crates/data-model/src/lib.rs index ce9631068..1ed15adcc 100644 --- a/crates/data-model/src/lib.rs +++ b/crates/data-model/src/lib.rs @@ -42,9 +42,9 @@ pub use self::{ UpstreamOAuthAuthorizationSession, UpstreamOAuthAuthorizationSessionState, UpstreamOAuthLink, UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderImportAction, - UpstreamOAuthProviderImportPreference, UpstreamOAuthProviderPkceMode, - UpstreamOAuthProviderResponseMode, UpstreamOAuthProviderSubjectPreference, - UpstreamOAuthProviderTokenAuthMethod, + UpstreamOAuthProviderImportPreference, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderResponseMode, + UpstreamOAuthProviderSubjectPreference, UpstreamOAuthProviderTokenAuthMethod, }, user_agent::{DeviceType, UserAgent}, users::{ diff --git a/crates/data-model/src/upstream_oauth2/mod.rs b/crates/data-model/src/upstream_oauth2/mod.rs index 1ed54e337..8e2638b9b 100644 --- a/crates/data-model/src/upstream_oauth2/mod.rs +++ b/crates/data-model/src/upstream_oauth2/mod.rs @@ -15,6 +15,7 @@ pub use self::{ DiscoveryMode as UpstreamOAuthProviderDiscoveryMode, ImportAction as UpstreamOAuthProviderImportAction, ImportPreference as UpstreamOAuthProviderImportPreference, + OnBackchannelLogout as UpstreamOAuthProviderOnBackchannelLogout, PkceMode as UpstreamOAuthProviderPkceMode, ResponseMode as UpstreamOAuthProviderResponseMode, SubjectPreference as UpstreamOAuthProviderSubjectPreference, diff --git a/crates/data-model/src/upstream_oauth2/provider.rs b/crates/data-model/src/upstream_oauth2/provider.rs index c4e990102..c384366df 100644 --- a/crates/data-model/src/upstream_oauth2/provider.rs +++ b/crates/data-model/src/upstream_oauth2/provider.rs @@ -216,6 +216,45 @@ impl std::str::FromStr for TokenAuthMethod { #[error("Invalid upstream OAuth 2.0 token auth method: {0}")] pub struct InvalidUpstreamOAuth2TokenAuthMethod(String); +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum OnBackchannelLogout { + DoNothing, + LogoutBrowserOnly, +} + +impl OnBackchannelLogout { + #[must_use] + pub fn as_str(self) -> &'static str { + match self { + Self::DoNothing => "do_nothing", + Self::LogoutBrowserOnly => "logout_browser_only", + } + } +} + +impl std::fmt::Display for OnBackchannelLogout { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +impl std::str::FromStr for OnBackchannelLogout { + type Err = InvalidUpstreamOAuth2OnBackchannelLogout; + + fn from_str(s: &str) -> Result { + match s { + "do_nothing" => Ok(Self::DoNothing), + "logout_browser_only" => Ok(Self::LogoutBrowserOnly), + s => Err(InvalidUpstreamOAuth2OnBackchannelLogout(s.to_owned())), + } + } +} + +#[derive(Debug, Clone, Error)] +#[error("Invalid upstream OAuth 2.0 'on backchannel logout': {0}")] +pub struct InvalidUpstreamOAuth2OnBackchannelLogout(String); + #[derive(Debug, Clone, PartialEq, Eq, Serialize)] pub struct UpstreamOAuthProvider { pub id: Ulid, @@ -242,6 +281,7 @@ pub struct UpstreamOAuthProvider { pub claims_imports: ClaimsImports, pub additional_authorization_parameters: Vec<(String, String)>, pub forward_login_hint: bool, + pub on_backchannel_logout: OnBackchannelLogout, } impl PartialOrd for UpstreamOAuthProvider { diff --git a/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs b/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs index 9d2e6599c..3433aa3ca 100644 --- a/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs +++ b/crates/handlers/src/admin/v1/upstream_oauth_links/mod.rs @@ -19,7 +19,8 @@ pub use self::{ mod test_utils { use mas_data_model::{ UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, - UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderTokenAuthMethod, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderPkceMode, + UpstreamOAuthProviderTokenAuthMethod, }; use mas_iana::jose::JsonWebSignatureAlg; use mas_storage::upstream_oauth2::UpstreamOAuthProviderParams; @@ -49,6 +50,7 @@ mod test_utils { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, } } } diff --git a/crates/handlers/src/upstream_oauth2/cache.rs b/crates/handlers/src/upstream_oauth2/cache.rs index d93743bc3..79a9fe5fb 100644 --- a/crates/handlers/src/upstream_oauth2/cache.rs +++ b/crates/handlers/src/upstream_oauth2/cache.rs @@ -296,7 +296,8 @@ mod tests { // 'insecure' discovery use mas_data_model::{ - UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderTokenAuthMethod, + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderTokenAuthMethod, }; use mas_iana::jose::JsonWebSignatureAlg; use mas_storage::{Clock, clock::MockClock}; @@ -427,6 +428,7 @@ mod tests { claims_imports: UpstreamOAuthProviderClaimsImports::default(), additional_authorization_parameters: Vec::new(), forward_login_hint: false, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }; // Without any override, it should just use discovery diff --git a/crates/handlers/src/upstream_oauth2/link.rs b/crates/handlers/src/upstream_oauth2/link.rs index feb9e9074..934af3626 100644 --- a/crates/handlers/src/upstream_oauth2/link.rs +++ b/crates/handlers/src/upstream_oauth2/link.rs @@ -986,6 +986,8 @@ mod tests { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 0, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, ) .await diff --git a/crates/handlers/src/views/login.rs b/crates/handlers/src/views/login.rs index f684f32ad..cf49dece2 100644 --- a/crates/handlers/src/views/login.rs +++ b/crates/handlers/src/views/login.rs @@ -424,7 +424,8 @@ mod test { header::{CONTENT_TYPE, LOCATION}, }; use mas_data_model::{ - UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderTokenAuthMethod, + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderTokenAuthMethod, }; use mas_iana::jose::JsonWebSignatureAlg; use mas_router::Route; @@ -500,6 +501,7 @@ mod test { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, ) .await @@ -542,6 +544,7 @@ mod test { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 1, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, ) .await diff --git a/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json b/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json new file mode 100644 index 000000000..1eb87fd3f --- /dev/null +++ b/crates/storage-pg/.sqlx/query-0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO upstream_oauth_providers (\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n token_endpoint_auth_method,\n token_endpoint_signing_alg,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n client_id,\n encrypted_client_secret,\n claims_imports,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n jwks_uri_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters,\n forward_login_hint,\n ui_order,\n on_backchannel_logout,\n created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n $11, $12, $13, $14, $15, $16, $17, $18, $19, $20,\n $21, $22, $23, $24, $25)\n ON CONFLICT (upstream_oauth_provider_id)\n DO UPDATE\n SET\n issuer = EXCLUDED.issuer,\n human_name = EXCLUDED.human_name,\n brand_name = EXCLUDED.brand_name,\n scope = EXCLUDED.scope,\n token_endpoint_auth_method = EXCLUDED.token_endpoint_auth_method,\n token_endpoint_signing_alg = EXCLUDED.token_endpoint_signing_alg,\n id_token_signed_response_alg = EXCLUDED.id_token_signed_response_alg,\n fetch_userinfo = EXCLUDED.fetch_userinfo,\n userinfo_signed_response_alg = EXCLUDED.userinfo_signed_response_alg,\n disabled_at = NULL,\n client_id = EXCLUDED.client_id,\n encrypted_client_secret = EXCLUDED.encrypted_client_secret,\n claims_imports = EXCLUDED.claims_imports,\n authorization_endpoint_override = EXCLUDED.authorization_endpoint_override,\n token_endpoint_override = EXCLUDED.token_endpoint_override,\n userinfo_endpoint_override = EXCLUDED.userinfo_endpoint_override,\n jwks_uri_override = EXCLUDED.jwks_uri_override,\n discovery_mode = EXCLUDED.discovery_mode,\n pkce_mode = EXCLUDED.pkce_mode,\n response_mode = EXCLUDED.response_mode,\n additional_parameters = EXCLUDED.additional_parameters,\n forward_login_hint = EXCLUDED.forward_login_hint,\n ui_order = EXCLUDED.ui_order,\n on_backchannel_logout = EXCLUDED.on_backchannel_logout\n RETURNING created_at\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Uuid", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Bool", + "Text", + "Text", + "Text", + "Jsonb", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Text", + "Jsonb", + "Bool", + "Int4", + "Text", + "Timestamptz" + ] + }, + "nullable": [ + false + ] + }, + "hash": "0f2ea548e00b080502edc04ee97ea304d43c336ce80723789ff3e66c0dd4d86c" +} diff --git a/crates/storage-pg/.sqlx/query-a711f4c6fa38b98c960ee565038d42ea16db436352b19fcd3b2c620c73d9cc0c.json b/crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json similarity index 78% rename from crates/storage-pg/.sqlx/query-a711f4c6fa38b98c960ee565038d42ea16db436352b19fcd3b2c620c73d9cc0c.json rename to crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json index 9944e855b..3f837630f 100644 --- a/crates/storage-pg/.sqlx/query-a711f4c6fa38b98c960ee565038d42ea16db436352b19fcd3b2c620c73d9cc0c.json +++ b/crates/storage-pg/.sqlx/query-3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n INSERT INTO upstream_oauth_providers (\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n token_endpoint_auth_method,\n token_endpoint_signing_alg,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n client_id,\n encrypted_client_secret,\n claims_imports,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n jwks_uri_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n forward_login_hint,\n created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10,\n $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22)\n ", + "query": "\n INSERT INTO upstream_oauth_providers (\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n token_endpoint_auth_method,\n token_endpoint_signing_alg,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n client_id,\n encrypted_client_secret,\n claims_imports,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n jwks_uri_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n forward_login_hint,\n on_backchannel_logout,\n created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11,\n $12, $13, $14, $15, $16, $17, $18, $19, $20,\n $21, $22, $23)\n ", "describe": { "columns": [], "parameters": { @@ -26,10 +26,11 @@ "Text", "Text", "Bool", + "Text", "Timestamptz" ] }, "nullable": [] }, - "hash": "a711f4c6fa38b98c960ee565038d42ea16db436352b19fcd3b2c620c73d9cc0c" + "hash": "3312f901f70c3b69e0d315206c31ffe11da64835ae297c9277271b8971d5de81" } diff --git a/crates/storage-pg/.sqlx/query-585a1e78834c953c80a0af9215348b0f551b16f4cb57c022b50212cfc3d8431f.json b/crates/storage-pg/.sqlx/query-585a1e78834c953c80a0af9215348b0f551b16f4cb57c022b50212cfc3d8431f.json deleted file mode 100644 index a7b63ca21..000000000 --- a/crates/storage-pg/.sqlx/query-585a1e78834c953c80a0af9215348b0f551b16f4cb57c022b50212cfc3d8431f.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO upstream_oauth_providers (\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n token_endpoint_auth_method,\n token_endpoint_signing_alg,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n client_id,\n encrypted_client_secret,\n claims_imports,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n jwks_uri_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters,\n forward_login_hint,\n ui_order,\n created_at\n ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11,\n $12, $13, $14, $15, $16, $17, $18, $19, $20,\n $21, $22, $23, $24)\n ON CONFLICT (upstream_oauth_provider_id)\n DO UPDATE\n SET\n issuer = EXCLUDED.issuer,\n human_name = EXCLUDED.human_name,\n brand_name = EXCLUDED.brand_name,\n scope = EXCLUDED.scope,\n token_endpoint_auth_method = EXCLUDED.token_endpoint_auth_method,\n token_endpoint_signing_alg = EXCLUDED.token_endpoint_signing_alg,\n id_token_signed_response_alg = EXCLUDED.id_token_signed_response_alg,\n fetch_userinfo = EXCLUDED.fetch_userinfo,\n userinfo_signed_response_alg = EXCLUDED.userinfo_signed_response_alg,\n disabled_at = NULL,\n client_id = EXCLUDED.client_id,\n encrypted_client_secret = EXCLUDED.encrypted_client_secret,\n claims_imports = EXCLUDED.claims_imports,\n authorization_endpoint_override = EXCLUDED.authorization_endpoint_override,\n token_endpoint_override = EXCLUDED.token_endpoint_override,\n userinfo_endpoint_override = EXCLUDED.userinfo_endpoint_override,\n jwks_uri_override = EXCLUDED.jwks_uri_override,\n discovery_mode = EXCLUDED.discovery_mode,\n pkce_mode = EXCLUDED.pkce_mode,\n response_mode = EXCLUDED.response_mode,\n additional_parameters = EXCLUDED.additional_parameters,\n forward_login_hint = EXCLUDED.forward_login_hint,\n ui_order = EXCLUDED.ui_order\n RETURNING created_at\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "created_at", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Uuid", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Bool", - "Text", - "Text", - "Text", - "Jsonb", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Text", - "Jsonb", - "Bool", - "Int4", - "Timestamptz" - ] - }, - "nullable": [ - false - ] - }, - "hash": "585a1e78834c953c80a0af9215348b0f551b16f4cb57c022b50212cfc3d8431f" -} diff --git a/crates/storage-pg/.sqlx/query-a82b87ccfaa1de9a8e6433aaa67382fbb5029d5f7adf95aaa0decd668d25ba89.json b/crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json similarity index 91% rename from crates/storage-pg/.sqlx/query-a82b87ccfaa1de9a8e6433aaa67382fbb5029d5f7adf95aaa0decd668d25ba89.json rename to crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json index 7c1a26a86..6bd2768cc 100644 --- a/crates/storage-pg/.sqlx/query-a82b87ccfaa1de9a8e6433aaa67382fbb5029d5f7adf95aaa0decd668d25ba89.json +++ b/crates/storage-pg/.sqlx/query-6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n client_id,\n encrypted_client_secret,\n token_endpoint_signing_alg,\n token_endpoint_auth_method,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n created_at,\n disabled_at,\n claims_imports as \"claims_imports: Json\",\n jwks_uri_override,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters as \"additional_parameters: Json>\",\n forward_login_hint\n FROM upstream_oauth_providers\n WHERE upstream_oauth_provider_id = $1\n ", + "query": "\n SELECT\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n client_id,\n encrypted_client_secret,\n token_endpoint_signing_alg,\n token_endpoint_auth_method,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n created_at,\n disabled_at,\n claims_imports as \"claims_imports: Json\",\n jwks_uri_override,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters as \"additional_parameters: Json>\",\n forward_login_hint,\n on_backchannel_logout\n FROM upstream_oauth_providers\n WHERE upstream_oauth_provider_id = $1\n ", "describe": { "columns": [ { @@ -122,6 +122,11 @@ "ordinal": 23, "name": "forward_login_hint", "type_info": "Bool" + }, + { + "ordinal": 24, + "name": "on_backchannel_logout", + "type_info": "Text" } ], "parameters": { @@ -153,8 +158,9 @@ false, true, true, + false, false ] }, - "hash": "a82b87ccfaa1de9a8e6433aaa67382fbb5029d5f7adf95aaa0decd668d25ba89" + "hash": "6589987e88fa9dbbd2bd48acd910e08bab57721007c64ef2597cb09a62100792" } diff --git a/crates/storage-pg/.sqlx/query-e6d66a7980933c12ab046958e02d419129ef52ac45bea4345471838016cae917.json b/crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json similarity index 89% rename from crates/storage-pg/.sqlx/query-e6d66a7980933c12ab046958e02d419129ef52ac45bea4345471838016cae917.json rename to crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json index d544590c4..eb1a801c4 100644 --- a/crates/storage-pg/.sqlx/query-e6d66a7980933c12ab046958e02d419129ef52ac45bea4345471838016cae917.json +++ b/crates/storage-pg/.sqlx/query-99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n client_id,\n encrypted_client_secret,\n token_endpoint_signing_alg,\n token_endpoint_auth_method,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n created_at,\n disabled_at,\n claims_imports as \"claims_imports: Json\",\n jwks_uri_override,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters as \"additional_parameters: Json>\",\n forward_login_hint\n FROM upstream_oauth_providers\n WHERE disabled_at IS NULL\n ORDER BY ui_order ASC, upstream_oauth_provider_id ASC\n ", + "query": "\n SELECT\n upstream_oauth_provider_id,\n issuer,\n human_name,\n brand_name,\n scope,\n client_id,\n encrypted_client_secret,\n token_endpoint_signing_alg,\n token_endpoint_auth_method,\n id_token_signed_response_alg,\n fetch_userinfo,\n userinfo_signed_response_alg,\n created_at,\n disabled_at,\n claims_imports as \"claims_imports: Json\",\n jwks_uri_override,\n authorization_endpoint_override,\n token_endpoint_override,\n userinfo_endpoint_override,\n discovery_mode,\n pkce_mode,\n response_mode,\n additional_parameters as \"additional_parameters: Json>\",\n forward_login_hint,\n on_backchannel_logout\n FROM upstream_oauth_providers\n WHERE disabled_at IS NULL\n ORDER BY ui_order ASC, upstream_oauth_provider_id ASC\n ", "describe": { "columns": [ { @@ -122,6 +122,11 @@ "ordinal": 23, "name": "forward_login_hint", "type_info": "Bool" + }, + { + "ordinal": 24, + "name": "on_backchannel_logout", + "type_info": "Text" } ], "parameters": { @@ -151,8 +156,9 @@ false, true, true, + false, false ] }, - "hash": "e6d66a7980933c12ab046958e02d419129ef52ac45bea4345471838016cae917" + "hash": "99394fbd9c07d6d24429934b3f7344dfab024b42e47ddc7bd9e551897ba6e9b8" } diff --git a/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql b/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql new file mode 100644 index 000000000..f6031ca62 --- /dev/null +++ b/crates/storage-pg/migrations/20250630120643_upstream_oauth_on_backchannel_logout.sql @@ -0,0 +1,10 @@ +-- Copyright 2025 New Vector Ltd. +-- +-- SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +-- Please see LICENSE in the repository root for full details. + +-- This defines the behavior when receiving a backchannel logout notification +ALTER TABLE "upstream_oauth_providers" + ADD COLUMN "on_backchannel_logout" TEXT + NOT NULL + DEFAULT 'do_nothing'; diff --git a/crates/storage-pg/src/iden.rs b/crates/storage-pg/src/iden.rs index f7342adf5..ab3ebe967 100644 --- a/crates/storage-pg/src/iden.rs +++ b/crates/storage-pg/src/iden.rs @@ -124,6 +124,7 @@ pub enum UpstreamOAuthProviders { TokenEndpointOverride, AuthorizationEndpointOverride, UserinfoEndpointOverride, + OnBackchannelLogout, } #[derive(sea_query::Iden)] diff --git a/crates/storage-pg/src/upstream_oauth2/mod.rs b/crates/storage-pg/src/upstream_oauth2/mod.rs index 84a52defd..0ec2c3670 100644 --- a/crates/storage-pg/src/upstream_oauth2/mod.rs +++ b/crates/storage-pg/src/upstream_oauth2/mod.rs @@ -20,7 +20,8 @@ pub use self::{ mod tests { use chrono::Duration; use mas_data_model::{ - UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderTokenAuthMethod, + UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderTokenAuthMethod, }; use mas_iana::jose::JsonWebSignatureAlg; use mas_storage::{ @@ -78,6 +79,7 @@ mod tests { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, ) .await @@ -349,6 +351,7 @@ mod tests { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, ) .await @@ -486,6 +489,7 @@ mod tests { additional_authorization_parameters: Vec::new(), forward_login_hint: false, ui_order: 0, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, ) .await diff --git a/crates/storage-pg/src/upstream_oauth2/provider.rs b/crates/storage-pg/src/upstream_oauth2/provider.rs index 4cdb35a11..4086e623c 100644 --- a/crates/storage-pg/src/upstream_oauth2/provider.rs +++ b/crates/storage-pg/src/upstream_oauth2/provider.rs @@ -71,6 +71,7 @@ struct ProviderLookup { response_mode: Option, additional_parameters: Option>>, forward_login_hint: bool, + on_backchannel_logout: String, } impl TryFrom for UpstreamOAuthProvider { @@ -194,6 +195,13 @@ impl TryFrom for UpstreamOAuthProvider { .map(|Json(x)| x) .unwrap_or_default(); + let on_backchannel_logout = value.on_backchannel_logout.parse().map_err(|e| { + DatabaseInconsistencyError::on("upstream_oauth_providers") + .column("on_backchannel_logout") + .row(id) + .source(e) + })?; + Ok(UpstreamOAuthProvider { id, issuer: value.issuer, @@ -219,6 +227,7 @@ impl TryFrom for UpstreamOAuthProvider { response_mode, additional_authorization_parameters, forward_login_hint: value.forward_login_hint, + on_backchannel_logout, }) } } @@ -277,7 +286,8 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { pkce_mode, response_mode, additional_parameters as "additional_parameters: Json>", - forward_login_hint + forward_login_hint, + on_backchannel_logout FROM upstream_oauth_providers WHERE upstream_oauth_provider_id = $1 "#, @@ -340,9 +350,11 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { pkce_mode, response_mode, forward_login_hint, + on_backchannel_logout, created_at - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, - $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22) + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, + $12, $13, $14, $15, $16, $17, $18, $19, $20, + $21, $22, $23) "#, Uuid::from(id), params.issuer.as_deref(), @@ -380,6 +392,7 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { params.pkce_mode.as_str(), params.response_mode.as_ref().map(ToString::to_string), params.forward_login_hint, + params.on_backchannel_logout.as_str(), created_at, ) .traced() @@ -410,6 +423,7 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { pkce_mode: params.pkce_mode, response_mode: params.response_mode, additional_authorization_parameters: params.additional_authorization_parameters, + on_backchannel_logout: params.on_backchannel_logout, forward_login_hint: params.forward_login_hint, }) } @@ -525,10 +539,11 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { additional_parameters, forward_login_hint, ui_order, + on_backchannel_logout, created_at - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, - $12, $13, $14, $15, $16, $17, $18, $19, $20, - $21, $22, $23, $24) + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, + $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, + $21, $22, $23, $24, $25) ON CONFLICT (upstream_oauth_provider_id) DO UPDATE SET @@ -554,7 +569,8 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { response_mode = EXCLUDED.response_mode, additional_parameters = EXCLUDED.additional_parameters, forward_login_hint = EXCLUDED.forward_login_hint, - ui_order = EXCLUDED.ui_order + ui_order = EXCLUDED.ui_order, + on_backchannel_logout = EXCLUDED.on_backchannel_logout RETURNING created_at "#, Uuid::from(id), @@ -595,6 +611,7 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { Json(¶ms.additional_authorization_parameters) as _, params.forward_login_hint, params.ui_order, + params.on_backchannel_logout.as_str(), created_at, ) .traced() @@ -626,6 +643,7 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { response_mode: params.response_mode, additional_authorization_parameters: params.additional_authorization_parameters, forward_login_hint: params.forward_login_hint, + on_backchannel_logout: params.on_backchannel_logout, }) } @@ -843,6 +861,13 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { )), ProviderLookupIden::ForwardLoginHint, ) + .expr_as( + Expr::col(( + UpstreamOAuthProviders::Table, + UpstreamOAuthProviders::OnBackchannelLogout, + )), + ProviderLookupIden::OnBackchannelLogout, + ) .from(UpstreamOAuthProviders::Table) .apply_filter(filter) .generate_pagination( @@ -936,7 +961,8 @@ impl UpstreamOAuthProviderRepository for PgUpstreamOAuthProviderRepository<'_> { pkce_mode, response_mode, additional_parameters as "additional_parameters: Json>", - forward_login_hint + forward_login_hint, + on_backchannel_logout FROM upstream_oauth_providers WHERE disabled_at IS NULL ORDER BY ui_order ASC, upstream_oauth_provider_id ASC diff --git a/crates/storage/src/upstream_oauth2/provider.rs b/crates/storage/src/upstream_oauth2/provider.rs index d28ed5c73..bc44bfab7 100644 --- a/crates/storage/src/upstream_oauth2/provider.rs +++ b/crates/storage/src/upstream_oauth2/provider.rs @@ -9,8 +9,8 @@ use std::marker::PhantomData; use async_trait::async_trait; use mas_data_model::{ UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, UpstreamOAuthProviderDiscoveryMode, - UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderResponseMode, - UpstreamOAuthProviderTokenAuthMethod, + UpstreamOAuthProviderOnBackchannelLogout, UpstreamOAuthProviderPkceMode, + UpstreamOAuthProviderResponseMode, UpstreamOAuthProviderTokenAuthMethod, }; use mas_iana::jose::JsonWebSignatureAlg; use oauth2_types::scope::Scope; @@ -101,6 +101,9 @@ pub struct UpstreamOAuthProviderParams { /// The position of the provider in the UI pub ui_order: i32, + + /// The behavior when receiving a backchannel logout notification + pub on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout, } /// Filter parameters for listing upstream OAuth 2.0 providers diff --git a/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap b/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap index a368aa9a5..adb6d4ee4 100644 --- a/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap +++ b/crates/syn2mas/src/mas_writer/snapshots/syn2mas__mas_writer__test__write_user_with_upstream_provider_link.snap @@ -25,6 +25,7 @@ upstream_oauth_providers: id_token_signed_response_alg: RS256 issuer: ~ jwks_uri_override: ~ + on_backchannel_logout: do_nothing pkce_mode: auto response_mode: query scope: openid diff --git a/crates/syn2mas/src/synapse_reader/config/oidc.rs b/crates/syn2mas/src/synapse_reader/config/oidc.rs index d59c185d3..c08023bf2 100644 --- a/crates/syn2mas/src/synapse_reader/config/oidc.rs +++ b/crates/syn2mas/src/synapse_reader/config/oidc.rs @@ -8,7 +8,8 @@ use std::{collections::BTreeMap, str::FromStr as _}; use chrono::{DateTime, Utc}; use mas_config::{ UpstreamOAuth2ClaimsImports, UpstreamOAuth2DiscoveryMode, UpstreamOAuth2ImportAction, - UpstreamOAuth2PkceMethod, UpstreamOAuth2ResponseMode, UpstreamOAuth2TokenAuthMethod, + UpstreamOAuth2OnBackchannelLogout, UpstreamOAuth2PkceMethod, UpstreamOAuth2ResponseMode, + UpstreamOAuth2TokenAuthMethod, }; use mas_iana::jose::JsonWebSignatureAlg; use oauth2_types::scope::{OPENID, Scope, ScopeToken}; @@ -159,7 +160,6 @@ pub struct OidcProvider { #[serde(default)] skip_verification: bool, - // Unsupported, we want to shout about it #[serde(default)] backchannel_logout_enabled: bool, @@ -219,10 +219,6 @@ impl OidcProvider { warn!("The `id_token_signing_alg_values_supported` option is not supported, ignoring."); } - if self.backchannel_logout_enabled { - warn!("The `backchannel_logout_enabled` option is not supported, ignoring."); - } - if !self.enable_registration { warn!( "Setting the `enable_registration` option to `false` is not supported, ignoring." @@ -319,6 +315,12 @@ impl OidcProvider { self.user_mapping_provider.config.into_mas_config() }; + let on_backchannel_logout = if self.backchannel_logout_enabled { + UpstreamOAuth2OnBackchannelLogout::DoNothing + } else { + UpstreamOAuth2OnBackchannelLogout::LogoutBrowserOnly + }; + Some(mas_config::UpstreamOAuth2Provider { enabled: true, id, @@ -345,6 +347,7 @@ impl OidcProvider { claims_imports, additional_authorization_parameters, forward_login_hint: self.forward_login_hint, + on_backchannel_logout, }) } } diff --git a/crates/templates/src/context.rs b/crates/templates/src/context.rs index d6bf86585..33c973d1e 100644 --- a/crates/templates/src/context.rs +++ b/crates/templates/src/context.rs @@ -21,9 +21,9 @@ use http::{Method, Uri, Version}; use mas_data_model::{ AuthorizationGrant, BrowserSession, Client, CompatSsoLogin, CompatSsoLoginState, DeviceCodeGrant, UpstreamOAuthLink, UpstreamOAuthProvider, UpstreamOAuthProviderClaimsImports, - UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderPkceMode, - UpstreamOAuthProviderTokenAuthMethod, User, UserEmailAuthentication, - UserEmailAuthenticationCode, UserRecoverySession, UserRegistration, + UpstreamOAuthProviderDiscoveryMode, UpstreamOAuthProviderOnBackchannelLogout, + UpstreamOAuthProviderPkceMode, UpstreamOAuthProviderTokenAuthMethod, User, + UserEmailAuthentication, UserEmailAuthenticationCode, UserRecoverySession, UserRegistration, }; use mas_i18n::DataLocale; use mas_iana::jose::JsonWebSignatureAlg; @@ -1543,6 +1543,7 @@ impl TemplateContext for UpstreamRegister { forward_login_hint: false, created_at: now, disabled_at: None, + on_backchannel_logout: UpstreamOAuthProviderOnBackchannelLogout::DoNothing, }, )] } diff --git a/docs/config.schema.json b/docs/config.schema.json index 534165920..cf2793c25 100644 --- a/docs/config.schema.json +++ b/docs/config.schema.json @@ -2136,6 +2136,14 @@ "description": "Whether the `login_hint` should be forwarded to the provider in the authorization request.\n\nDefaults to `false`.", "default": false, "type": "boolean" + }, + "on_backchannel_logout": { + "description": "What to do when receiving an OIDC Backchannel logout request.\n\nDefaults to \"do_nothing\".", + "allOf": [ + { + "$ref": "#/definitions/OnBackchannelLogout" + } + ] } } }, @@ -2435,6 +2443,25 @@ } } }, + "OnBackchannelLogout": { + "description": "What to do when receiving an OIDC Backchannel logout request.", + "oneOf": [ + { + "description": "Do nothing", + "type": "string", + "enum": [ + "do_nothing" + ] + }, + { + "description": "Only log out the MAS 'browser session' started by this OIDC session", + "type": "string", + "enum": [ + "logout_browser_only" + ] + } + ] + }, "BrandingConfig": { "description": "Configuration section for tweaking the branding of the service", "type": "object", From 80065647965393cdbf69338138ec7828ae08cc06 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 3 Jul 2025 15:14:56 +0200 Subject: [PATCH 05/10] Receive and validate backchannel logout requests We don't yet do anything with them, other than logging them --- crates/handlers/src/lib.rs | 4 + .../src/upstream_oauth2/backchannel_logout.rs | 250 ++++++++++++++++++ crates/handlers/src/upstream_oauth2/mod.rs | 1 + crates/jose/src/claims.rs | 26 +- crates/router/src/endpoints.rs | 23 ++ 5 files changed, 303 insertions(+), 1 deletion(-) create mode 100644 crates/handlers/src/upstream_oauth2/backchannel_logout.rs diff --git a/crates/handlers/src/lib.rs b/crates/handlers/src/lib.rs index 51dde981b..5cba740a5 100644 --- a/crates/handlers/src/lib.rs +++ b/crates/handlers/src/lib.rs @@ -440,6 +440,10 @@ where mas_router::UpstreamOAuth2Link::route(), get(self::upstream_oauth2::link::get).post(self::upstream_oauth2::link::post), ) + .route( + mas_router::UpstreamOAuth2BackchannelLogout::route(), + post(self::upstream_oauth2::backchannel_logout::post), + ) .route( mas_router::DeviceCodeLink::route(), get(self::oauth2::device::link::get), diff --git a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs new file mode 100644 index 000000000..082c0465f --- /dev/null +++ b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs @@ -0,0 +1,250 @@ +// Copyright 2025 New Vector Ltd. +// +// SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +// Please see LICENSE files in the repository root for full details. + +use std::collections::HashMap; + +use axum::{ + Form, Json, + extract::{Path, State, rejection::FormRejection}, + response::IntoResponse, +}; +use hyper::StatusCode; +use mas_axum_utils::record_error; +use mas_data_model::UpstreamOAuthProvider; +use mas_jose::{ + claims::{self, Claim, TimeOptions}, + jwt::JwtDecodeError, +}; +use mas_oidc_client::{ + error::JwtVerificationError, + requests::jose::{JwtVerificationData, verify_signed_jwt}, +}; +use mas_storage::{ + BoxClock, BoxRepository, Pagination, upstream_oauth2::UpstreamOAuthSessionFilter, +}; +use oauth2_types::errors::{ClientError, ClientErrorCode}; +use serde::Deserialize; +use serde_json::Value; +use thiserror::Error; +use ulid::Ulid; + +use crate::{MetadataCache, impl_from_error_for_route, upstream_oauth2::cache::LazyProviderInfos}; + +#[derive(Debug, Error)] +pub enum RouteError { + /// An internal error occurred. + #[error(transparent)] + Internal(Box), + + /// Invalid request body + #[error(transparent)] + InvalidRequestBody(#[from] FormRejection), + + /// Logout token is not a JWT + #[error("failed to decode logout token")] + InvalidLogoutToken(#[from] JwtDecodeError), + + /// Logout token failed to be verified + #[error("failed to verify logout token")] + LogoutTokenVerification(#[from] JwtVerificationError), + + /// Logout token had invalid claims + #[error("invalid claims in logout token")] + InvalidLogoutTokenClaims(#[from] claims::ClaimError), + + /// Logout token has neither a sub nor a sid claim + #[error("logout token has neither a sub nor a sid claim")] + NoSubOrSidClaim, + + /// Provider not found + #[error("provider not found")] + ProviderNotFound, +} + +impl IntoResponse for RouteError { + fn into_response(self) -> axum::response::Response { + let sentry_event_id = record_error!(self, Self::Internal(_)); + + let response = match self { + e @ Self::Internal(_) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json( + ClientError::from(ClientErrorCode::ServerError).with_description(e.to_string()), + ), + ) + .into_response(), + + e @ (Self::InvalidLogoutToken(_) + | Self::LogoutTokenVerification(_) + | Self::InvalidRequestBody(_) + | Self::InvalidLogoutTokenClaims(_) + | Self::NoSubOrSidClaim) => ( + StatusCode::BAD_REQUEST, + Json( + ClientError::from(ClientErrorCode::InvalidRequest) + .with_description(e.to_string()), + ), + ) + .into_response(), + + Self::ProviderNotFound => ( + StatusCode::NOT_FOUND, + Json( + ClientError::from(ClientErrorCode::InvalidRequest).with_description( + "Upstream OAuth provider not found, is the backchannel logout URI right?" + .to_owned(), + ), + ), + ) + .into_response(), + }; + + (sentry_event_id, response).into_response() + } +} + +impl_from_error_for_route!(mas_storage::RepositoryError); +impl_from_error_for_route!(mas_oidc_client::error::DiscoveryError); +impl_from_error_for_route!(mas_oidc_client::error::JwksError); + +#[derive(Deserialize)] +pub(crate) struct BackchannelLogoutRequest { + logout_token: String, +} + +#[derive(Deserialize)] +struct LogoutTokenEvents { + #[allow(dead_code)] // We just want to check it deserializes + #[serde(rename = "http://schemas.openid.net/event/backchannel-logout")] + backchannel_logout: HashMap, +} + +const EVENTS: Claim = Claim::new("events"); + +#[tracing::instrument( + name = "handlers.upstream_oauth2.backchannel_logout.post", + fields(upstream_oauth_provider.id = %provider_id), + skip_all, +)] +pub(crate) async fn post( + clock: BoxClock, + mut repo: BoxRepository, + State(metadata_cache): State, + State(client): State, + Path(provider_id): Path, + request: Result, FormRejection>, +) -> Result { + let Form(request) = request?; + let provider = repo + .upstream_oauth_provider() + .lookup(provider_id) + .await? + .filter(UpstreamOAuthProvider::enabled) + .ok_or(RouteError::ProviderNotFound)?; + + let mut lazy_metadata = LazyProviderInfos::new(&metadata_cache, &provider, &client); + + let jwks = + mas_oidc_client::requests::jose::fetch_jwks(&client, lazy_metadata.jwks_uri().await?) + .await?; + + // Validate the logout token. The rules are defined in + // + // + // Upon receiving a logout request at the back-channel logout URI, the RP MUST + // validate the Logout Token as follows: + // + // 1. If the Logout Token is encrypted, decrypt it using the keys and + // algorithms that the Client specified during Registration that the OP was + // to use to encrypt ID Tokens. If ID Token encryption was negotiated with + // the OP at Registration time and the Logout Token is not encrypted, the RP + // SHOULD reject it. + // 2. Validate the Logout Token signature in the same way that an ID Token + // signature is validated, with the following refinements. + // 3. Validate the alg (algorithm) Header Parameter in the same way it is + // validated for ID Tokens. Like ID Tokens, selection of the algorithm used + // is governed by the id_token_signing_alg_values_supported Discovery + // parameter and the id_token_signed_response_alg Registration parameter + // when they are used; otherwise, the value SHOULD be the default of RS256. + // Additionally, an alg with the value none MUST NOT be used for Logout + // Tokens. + // 4. Validate the iss, aud, iat, and exp Claims in the same way they are + // validated in ID Tokens. + // 5. Verify that the Logout Token contains a sub Claim, a sid Claim, or both. + // 6. Verify that the Logout Token contains an events Claim whose value is JSON + // object containing the member name http://schemas.openid.net/event/backchannel-logout. + // 7. Verify that the Logout Token does not contain a nonce Claim. + // 8. Optionally verify that another Logout Token with the same jti value has + // not been recently received. + // 9. Optionally verify that the iss Logout Token Claim matches the iss Claim + // in an ID Token issued for the current session or a recent session of this + // RP with the OP. + // 10. Optionally verify that any sub Logout Token Claim matches the sub Claim + // in an ID Token issued for the current session or a recent session of + // this RP with the OP. + // 11. Optionally verify that any sid Logout Token Claim matches the sid Claim + // in an ID Token issued for the current session or a recent session of + // this RP with the OP. + // + // If any of the validation steps fails, reject the Logout Token and return an + // HTTP 400 Bad Request error. Otherwise, proceed to perform the logout actions. + // + // The ISS and AUD claims are already checked by the verify_signed_jwt() + // function. + + // This verifies (1), (2), (3) and the iss and aud claims for (4) + let token = verify_signed_jwt( + &request.logout_token, + JwtVerificationData { + issuer: provider.issuer.as_deref(), + jwks: &jwks, + client_id: &provider.client_id, + signing_algorithm: &provider.id_token_signed_response_alg, + }, + )?; + + let (_header, mut claims) = token.into_parts(); + + let time_options = TimeOptions::new(clock.now()); + claims::EXP.extract_required_with_options(&mut claims, &time_options)?; // (4) + claims::IAT.extract_required_with_options(&mut claims, &time_options)?; // (4) + + let sub = claims::SUB.extract_optional(&mut claims)?; // (5) + let sid = claims::SID.extract_optional(&mut claims)?; // (5) + if sub.is_none() && sid.is_none() { + return Err(RouteError::NoSubOrSidClaim); + } + + EVENTS.extract_required(&mut claims)?; // (6) + claims::NONCE.assert_absent(&claims)?; // (7) + + // Find the corresponding upstream OAuth 2.0 sessions + let mut filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + if let Some(sub) = &sub { + filter = filter.with_sub_claim(sub); + } + if let Some(sid) = &sid { + filter = filter.with_sid_claim(sid); + } + + let mut cursor = Pagination::first(100); + let mut sessions = Vec::new(); + loop { + let page = repo.upstream_oauth_session().list(filter, cursor).await?; + + for session in page.edges { + cursor = cursor.after(session.id); + sessions.push(session); + } + + if !page.has_next_page { + break; + } + } + + tracing::info!(sub, sid, %provider.id, "Backchannel logout received, found {} corresponding sessions", sessions.len()); + + Ok(()) +} diff --git a/crates/handlers/src/upstream_oauth2/mod.rs b/crates/handlers/src/upstream_oauth2/mod.rs index e4fa88d6f..272af648b 100644 --- a/crates/handlers/src/upstream_oauth2/mod.rs +++ b/crates/handlers/src/upstream_oauth2/mod.rs @@ -16,6 +16,7 @@ use thiserror::Error; use url::Url; pub(crate) mod authorize; +pub(crate) mod backchannel_logout; pub(crate) mod cache; pub(crate) mod callback; mod cookie; diff --git a/crates/jose/src/claims.rs b/crates/jose/src/claims.rs index 8115d2a76..a4449695f 100644 --- a/crates/jose/src/claims.rs +++ b/crates/jose/src/claims.rs @@ -182,6 +182,22 @@ where Err(e) => Err(e), } } + + /// Assert that the claim is absent. + /// + /// # Errors + /// + /// Returns an error if the claim is present. + pub fn assert_absent( + &self, + claims: &HashMap, + ) -> Result<(), ClaimError> { + if claims.contains_key(self.claim) { + Err(ClaimError::InvalidClaim(self.claim)) + } else { + Ok(()) + } + } } #[derive(Debug, Clone)] @@ -525,7 +541,15 @@ mod oidc_core { pub const UPDATED_AT: Claim = Claim::new("updated_at"); } -pub use self::{oidc_core::*, rfc7519::*}; +/// Claims defined in OpenID.FrontChannel +/// +mod oidc_frontchannel { + use super::Claim; + + pub const SID: Claim = Claim::new("sid"); +} + +pub use self::{oidc_core::*, oidc_frontchannel::*, rfc7519::*}; #[cfg(test)] mod tests { diff --git a/crates/router/src/endpoints.rs b/crates/router/src/endpoints.rs index c285a36e1..3440f8bc6 100644 --- a/crates/router/src/endpoints.rs +++ b/crates/router/src/endpoints.rs @@ -738,6 +738,29 @@ impl Route for UpstreamOAuth2Link { } } +/// `POST /upstream/backchannel-logout/{id}` +pub struct UpstreamOAuth2BackchannelLogout { + id: Ulid, +} + +impl UpstreamOAuth2BackchannelLogout { + #[must_use] + pub const fn new(id: Ulid) -> Self { + Self { id } + } +} + +impl Route for UpstreamOAuth2BackchannelLogout { + type Query = (); + fn route() -> &'static str { + "/upstream/backchannel-logout/{provider_id}" + } + + fn path(&self) -> std::borrow::Cow<'static, str> { + format!("/upstream/backchannel-logout/{}", self.id).into() + } +} + /// `GET|POST /link` #[derive(Default, Serialize, Deserialize, Debug, Clone)] pub struct DeviceCodeLink { From ae06e4b5123abbe04e9a5309c135d35074791693 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 3 Jul 2025 17:57:59 +0200 Subject: [PATCH 06/10] storage: allow filtering browser sessions by which upstream session authd them --- crates/storage-pg/src/iden.rs | 12 ++++ crates/storage-pg/src/user/session.rs | 29 ++++++++- crates/storage-pg/src/user/tests.rs | 94 +++++++++++++++++++++++++++ crates/storage/src/user/session.rs | 20 ++++++ 4 files changed, 153 insertions(+), 2 deletions(-) diff --git a/crates/storage-pg/src/iden.rs b/crates/storage-pg/src/iden.rs index ab3ebe967..e6c03acc4 100644 --- a/crates/storage-pg/src/iden.rs +++ b/crates/storage-pg/src/iden.rs @@ -18,6 +18,18 @@ pub enum UserSessions { LastActiveIp, } +#[derive(sea_query::Iden)] +#[expect(dead_code)] +pub enum UserSessionAuthentications { + Table, + UserSessionAuthenticationId, + UserSessionId, + CreatedAt, + UserPasswordId, + #[iden = "upstream_oauth_authorization_session_id"] + UpstreamOAuthAuthorizationSessionId, +} + #[derive(sea_query::Iden)] pub enum Users { Table, diff --git a/crates/storage-pg/src/user/session.rs b/crates/storage-pg/src/user/session.rs index 9922cef9a..509c887c3 100644 --- a/crates/storage-pg/src/user/session.rs +++ b/crates/storage-pg/src/user/session.rs @@ -17,7 +17,7 @@ use mas_storage::{ user::{BrowserSessionFilter, BrowserSessionRepository}, }; use rand::RngCore; -use sea_query::{Expr, PostgresQueryBuilder}; +use sea_query::{Expr, PgFunc, PostgresQueryBuilder, Query}; use sea_query_binder::SqlxBinder; use sqlx::PgConnection; use ulid::Ulid; @@ -26,7 +26,7 @@ use uuid::Uuid; use crate::{ DatabaseError, DatabaseInconsistencyError, filter::StatementExt, - iden::{UserSessions, Users}, + iden::{UserSessionAuthentications, UserSessions, Users}, pagination::QueryBuilderExt, tracing::ExecuteExt, }; @@ -145,6 +145,31 @@ impl crate::filter::Filter for BrowserSessionFilter<'_> { .add_option(self.last_active_before().map(|last_active_before| { Expr::col((UserSessions::Table, UserSessions::LastActiveAt)).lt(last_active_before) })) + .add_option(self.authenticated_by_upstream_sessions().map(|sessions| { + // For filtering by upstream sessions, we need to hop over the + // `user_session_authentications` table + let session_ids: Vec<_> = sessions + .iter() + .map(|session| Uuid::from(session.id)) + .collect(); + + Expr::col((UserSessions::Table, UserSessions::UserSessionId)).in_subquery( + Query::select() + .expr(Expr::col(( + UserSessionAuthentications::Table, + UserSessionAuthentications::UserSessionId, + ))) + .from(UserSessionAuthentications::Table) + .and_where( + Expr::col(( + UserSessionAuthentications::Table, + UserSessionAuthentications::UpstreamOAuthAuthorizationSessionId, + )) + .eq(PgFunc::any(Expr::value(session_ids))), + ) + .take(), + ) + })) } } diff --git a/crates/storage-pg/src/user/tests.rs b/crates/storage-pg/src/user/tests.rs index de17fdeb5..280a44f38 100644 --- a/crates/storage-pg/src/user/tests.rs +++ b/crates/storage-pg/src/user/tests.rs @@ -5,14 +5,17 @@ // Please see LICENSE files in the repository root for full details. use chrono::Duration; +use mas_iana::jose::JsonWebSignatureAlg; use mas_storage::{ Clock, Pagination, RepositoryAccess, clock::MockClock, + upstream_oauth2::UpstreamOAuthProviderParams, user::{ BrowserSessionFilter, BrowserSessionRepository, UserEmailFilter, UserEmailRepository, UserFilter, UserPasswordRepository, UserRepository, }, }; +use oauth2_types::scope::{OPENID, Scope}; use rand::SeedableRng; use rand_chacha::ChaChaRng; use sqlx::PgPool; @@ -717,6 +720,97 @@ async fn test_user_session(pool: PgPool) { assert_eq!(repo.browser_session().count(all_bob).await.unwrap(), 5); assert_eq!(repo.browser_session().count(active_bob).await.unwrap(), 0); assert_eq!(repo.browser_session().count(finished).await.unwrap(), 11); + + // Checking the 'authenticaated by upstream sessions' filter + // We need a provider + let provider = repo + .upstream_oauth_provider() + .add( + &mut rng, + &clock, + UpstreamOAuthProviderParams { + issuer: None, + human_name: None, + brand_name: None, + scope: Scope::from_iter([OPENID]), + token_endpoint_auth_method: + mas_data_model::UpstreamOAuthProviderTokenAuthMethod::None, + token_endpoint_signing_alg: None, + id_token_signed_response_alg: JsonWebSignatureAlg::Rs256, + fetch_userinfo: false, + userinfo_signed_response_alg: None, + client_id: "client".to_owned(), + encrypted_client_secret: None, + claims_imports: mas_data_model::UpstreamOAuthProviderClaimsImports::default(), + authorization_endpoint_override: None, + token_endpoint_override: None, + userinfo_endpoint_override: None, + jwks_uri_override: None, + discovery_mode: mas_data_model::UpstreamOAuthProviderDiscoveryMode::Disabled, + pkce_mode: mas_data_model::UpstreamOAuthProviderPkceMode::Disabled, + response_mode: None, + additional_authorization_parameters: Vec::new(), + forward_login_hint: false, + ui_order: 0, + on_backchannel_logout: + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::DoNothing, + }, + ) + .await + .unwrap(); + + // Start a authorization session + let upstream_oauth_session = repo + .upstream_oauth_session() + .add(&mut rng, &clock, &provider, "state".to_owned(), None, None) + .await + .unwrap(); + + // Start a browser session + let session = repo + .browser_session() + .add(&mut rng, &clock, &alice, None) + .await + .unwrap(); + + // Make the session from alice authenticated by this session + repo.browser_session() + .authenticate_with_upstream(&mut rng, &clock, &session, &upstream_oauth_session) + .await + .unwrap(); + + let session_list = vec![upstream_oauth_session]; + let filter = BrowserSessionFilter::new().authenticated_by_upstream_sessions_only(&session_list); + + // Now try to look it up + let page = repo + .browser_session() + .list(filter, Pagination::first(10)) + .await + .unwrap(); + assert_eq!(page.edges.len(), 1); + assert_eq!(page.edges[0].id, session.id); + + // Try counting + assert_eq!(repo.browser_session().count(filter).await.unwrap(), 1); + + // Try finishing the session + let affected = repo + .browser_session() + .finish_bulk(&clock, filter) + .await + .unwrap(); + assert_eq!(affected, 1); + + // Lookup the session by its ID + let lookup = repo + .browser_session() + .lookup(session.id) + .await + .unwrap() + .expect("session to be found in the database"); + // It should be finished + assert!(lookup.finished_at.is_some()); } #[sqlx::test(migrator = "crate::MIGRATOR")] diff --git a/crates/storage/src/user/session.rs b/crates/storage/src/user/session.rs index 79b377c94..02dceb87e 100644 --- a/crates/storage/src/user/session.rs +++ b/crates/storage/src/user/session.rs @@ -39,6 +39,7 @@ pub struct BrowserSessionFilter<'a> { state: Option, last_active_before: Option>, last_active_after: Option>, + authenticated_by_upstream_sessions: Option<&'a [UpstreamOAuthAuthorizationSession]>, } impl<'a> BrowserSessionFilter<'a> { @@ -110,6 +111,25 @@ impl<'a> BrowserSessionFilter<'a> { pub fn state(&self) -> Option { self.state } + + /// Only return browser sessions authenticated by the given upstream OAuth + /// sessions + #[must_use] + pub fn authenticated_by_upstream_sessions_only( + mut self, + upstream_oauth_sessions: &'a [UpstreamOAuthAuthorizationSession], + ) -> Self { + self.authenticated_by_upstream_sessions = Some(upstream_oauth_sessions); + self + } + + /// Get the upstream OAuth session filter + #[must_use] + pub fn authenticated_by_upstream_sessions( + &self, + ) -> Option<&'a [UpstreamOAuthAuthorizationSession]> { + self.authenticated_by_upstream_sessions + } } /// A [`BrowserSessionRepository`] helps interacting with [`BrowserSession`] From 6ccdca6dbc9ecb1ada3843c2b83441f99e8aa5e9 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Thu, 3 Jul 2025 17:58:57 +0200 Subject: [PATCH 07/10] Log out browser sessions when receiving a backchannel logout notification --- .../src/upstream_oauth2/backchannel_logout.rs | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs index 082c0465f..6b749c591 100644 --- a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs +++ b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs @@ -12,7 +12,7 @@ use axum::{ }; use hyper::StatusCode; use mas_axum_utils::record_error; -use mas_data_model::UpstreamOAuthProvider; +use mas_data_model::{UpstreamOAuthProvider, UpstreamOAuthProviderOnBackchannelLogout}; use mas_jose::{ claims::{self, Claim, TimeOptions}, jwt::JwtDecodeError, @@ -23,6 +23,7 @@ use mas_oidc_client::{ }; use mas_storage::{ BoxClock, BoxRepository, Pagination, upstream_oauth2::UpstreamOAuthSessionFilter, + user::BrowserSessionFilter, }; use oauth2_types::errors::{ClientError, ClientErrorCode}; use serde::Deserialize; @@ -229,6 +230,9 @@ pub(crate) async fn post( filter = filter.with_sid_claim(sid); } + // Load the corresponding authentication sessions, by batches of 100s. It's + // VERY unlikely that we'll ever have more that 100 sessions for a single + // logout notification, but we'll handle it anyway. let mut cursor = Pagination::first(100); let mut sessions = Vec::new(); loop { @@ -244,7 +248,21 @@ pub(crate) async fn post( } } - tracing::info!(sub, sid, %provider.id, "Backchannel logout received, found {} corresponding sessions", sessions.len()); + tracing::info!(sub, sid, %provider.id, "Backchannel logout received, found {} corresponding authentication sessions", sessions.len()); + + match provider.on_backchannel_logout { + UpstreamOAuthProviderOnBackchannelLogout::DoNothing => { + tracing::warn!(%provider.id, "Provider configured to do nothing on backchannel logout"); + } + UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly => { + let filter = + BrowserSessionFilter::new().authenticated_by_upstream_sessions_only(&sessions); + let affected = repo.browser_session().finish_bulk(&clock, filter).await?; + tracing::info!("Finished {affected} browser sessions"); + } + } + + repo.save().await?; Ok(()) } From e245cd831f9cb78eb6243b8e772c85ace2582fb6 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 4 Jul 2025 09:48:25 +0200 Subject: [PATCH 08/10] Compose filters for batch logging out of browser sessions Instead of having to load all authentication sessions in memory, we allow composing browser session filters with a upstream auth sessions filter --- .../src/upstream_oauth2/backchannel_logout.rs | 36 ++++++------------- crates/storage-pg/src/user/session.rs | 27 +++++++------- crates/storage-pg/src/user/tests.rs | 9 +++-- crates/storage/src/user/session.rs | 15 ++++---- 4 files changed, 38 insertions(+), 49 deletions(-) diff --git a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs index 6b749c591..71d8b674f 100644 --- a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs +++ b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs @@ -22,7 +22,7 @@ use mas_oidc_client::{ requests::jose::{JwtVerificationData, verify_signed_jwt}, }; use mas_storage::{ - BoxClock, BoxRepository, Pagination, upstream_oauth2::UpstreamOAuthSessionFilter, + BoxClock, BoxRepository, upstream_oauth2::UpstreamOAuthSessionFilter, user::BrowserSessionFilter, }; use oauth2_types::errors::{ClientError, ClientErrorCode}; @@ -222,41 +222,27 @@ pub(crate) async fn post( claims::NONCE.assert_absent(&claims)?; // (7) // Find the corresponding upstream OAuth 2.0 sessions - let mut filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); + let mut auth_session_filter = UpstreamOAuthSessionFilter::new().for_provider(&provider); if let Some(sub) = &sub { - filter = filter.with_sub_claim(sub); + auth_session_filter = auth_session_filter.with_sub_claim(sub); } if let Some(sid) = &sid { - filter = filter.with_sid_claim(sid); + auth_session_filter = auth_session_filter.with_sid_claim(sid); } + let count = repo + .upstream_oauth_session() + .count(auth_session_filter) + .await?; - // Load the corresponding authentication sessions, by batches of 100s. It's - // VERY unlikely that we'll ever have more that 100 sessions for a single - // logout notification, but we'll handle it anyway. - let mut cursor = Pagination::first(100); - let mut sessions = Vec::new(); - loop { - let page = repo.upstream_oauth_session().list(filter, cursor).await?; - - for session in page.edges { - cursor = cursor.after(session.id); - sessions.push(session); - } - - if !page.has_next_page { - break; - } - } - - tracing::info!(sub, sid, %provider.id, "Backchannel logout received, found {} corresponding authentication sessions", sessions.len()); + tracing::info!(sub, sid, %provider.id, "Backchannel logout received, found {count} corresponding authentication sessions"); match provider.on_backchannel_logout { UpstreamOAuthProviderOnBackchannelLogout::DoNothing => { tracing::warn!(%provider.id, "Provider configured to do nothing on backchannel logout"); } UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly => { - let filter = - BrowserSessionFilter::new().authenticated_by_upstream_sessions_only(&sessions); + let filter = BrowserSessionFilter::new() + .authenticated_by_upstream_sessions_only(auth_session_filter); let affected = repo.browser_session().finish_bulk(&clock, filter).await?; tracing::info!("Finished {affected} browser sessions"); } diff --git a/crates/storage-pg/src/user/session.rs b/crates/storage-pg/src/user/session.rs index 509c887c3..db8a8cacf 100644 --- a/crates/storage-pg/src/user/session.rs +++ b/crates/storage-pg/src/user/session.rs @@ -17,7 +17,7 @@ use mas_storage::{ user::{BrowserSessionFilter, BrowserSessionRepository}, }; use rand::RngCore; -use sea_query::{Expr, PgFunc, PostgresQueryBuilder, Query}; +use sea_query::{Expr, PostgresQueryBuilder, Query}; use sea_query_binder::SqlxBinder; use sqlx::PgConnection; use ulid::Ulid; @@ -26,7 +26,7 @@ use uuid::Uuid; use crate::{ DatabaseError, DatabaseInconsistencyError, filter::StatementExt, - iden::{UserSessionAuthentications, UserSessions, Users}, + iden::{UpstreamOAuthAuthorizationSessions, UserSessionAuthentications, UserSessions, Users}, pagination::QueryBuilderExt, tracing::ExecuteExt, }; @@ -145,13 +145,17 @@ impl crate::filter::Filter for BrowserSessionFilter<'_> { .add_option(self.last_active_before().map(|last_active_before| { Expr::col((UserSessions::Table, UserSessions::LastActiveAt)).lt(last_active_before) })) - .add_option(self.authenticated_by_upstream_sessions().map(|sessions| { + .add_option(self.authenticated_by_upstream_sessions().map(|filter| { // For filtering by upstream sessions, we need to hop over the // `user_session_authentications` table - let session_ids: Vec<_> = sessions - .iter() - .map(|session| Uuid::from(session.id)) - .collect(); + let join_expr = Expr::col(( + UserSessionAuthentications::Table, + UserSessionAuthentications::UpstreamOAuthAuthorizationSessionId, + )) + .eq(Expr::col(( + UpstreamOAuthAuthorizationSessions::Table, + UpstreamOAuthAuthorizationSessions::UpstreamOAuthAuthorizationSessionId, + ))); Expr::col((UserSessions::Table, UserSessions::UserSessionId)).in_subquery( Query::select() @@ -160,13 +164,8 @@ impl crate::filter::Filter for BrowserSessionFilter<'_> { UserSessionAuthentications::UserSessionId, ))) .from(UserSessionAuthentications::Table) - .and_where( - Expr::col(( - UserSessionAuthentications::Table, - UserSessionAuthentications::UpstreamOAuthAuthorizationSessionId, - )) - .eq(PgFunc::any(Expr::value(session_ids))), - ) + .inner_join(UpstreamOAuthAuthorizationSessions::Table, join_expr) + .apply_filter(filter) .take(), ) })) diff --git a/crates/storage-pg/src/user/tests.rs b/crates/storage-pg/src/user/tests.rs index 280a44f38..898a30b65 100644 --- a/crates/storage-pg/src/user/tests.rs +++ b/crates/storage-pg/src/user/tests.rs @@ -9,7 +9,7 @@ use mas_iana::jose::JsonWebSignatureAlg; use mas_storage::{ Clock, Pagination, RepositoryAccess, clock::MockClock, - upstream_oauth2::UpstreamOAuthProviderParams, + upstream_oauth2::{UpstreamOAuthProviderParams, UpstreamOAuthSessionFilter}, user::{ BrowserSessionFilter, BrowserSessionRepository, UserEmailFilter, UserEmailRepository, UserFilter, UserPasswordRepository, UserRepository, @@ -779,8 +779,11 @@ async fn test_user_session(pool: PgPool) { .await .unwrap(); - let session_list = vec![upstream_oauth_session]; - let filter = BrowserSessionFilter::new().authenticated_by_upstream_sessions_only(&session_list); + // This will match all authorization sessions, which matches exactly that one + // authorization session + let upstream_oauth_session_filter = UpstreamOAuthSessionFilter::new(); + let filter = BrowserSessionFilter::new() + .authenticated_by_upstream_sessions_only(upstream_oauth_session_filter); // Now try to look it up let page = repo diff --git a/crates/storage/src/user/session.rs b/crates/storage/src/user/session.rs index 02dceb87e..23736bbfc 100644 --- a/crates/storage/src/user/session.rs +++ b/crates/storage/src/user/session.rs @@ -14,7 +14,10 @@ use mas_data_model::{ use rand_core::RngCore; use ulid::Ulid; -use crate::{Clock, Pagination, pagination::Page, repository_impl}; +use crate::{ + Clock, Pagination, pagination::Page, repository_impl, + upstream_oauth2::UpstreamOAuthSessionFilter, +}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum BrowserSessionState { @@ -39,7 +42,7 @@ pub struct BrowserSessionFilter<'a> { state: Option, last_active_before: Option>, last_active_after: Option>, - authenticated_by_upstream_sessions: Option<&'a [UpstreamOAuthAuthorizationSession]>, + authenticated_by_upstream_sessions: Option>, } impl<'a> BrowserSessionFilter<'a> { @@ -117,17 +120,15 @@ impl<'a> BrowserSessionFilter<'a> { #[must_use] pub fn authenticated_by_upstream_sessions_only( mut self, - upstream_oauth_sessions: &'a [UpstreamOAuthAuthorizationSession], + filter: UpstreamOAuthSessionFilter<'a>, ) -> Self { - self.authenticated_by_upstream_sessions = Some(upstream_oauth_sessions); + self.authenticated_by_upstream_sessions = Some(filter); self } /// Get the upstream OAuth session filter #[must_use] - pub fn authenticated_by_upstream_sessions( - &self, - ) -> Option<&'a [UpstreamOAuthAuthorizationSession]> { + pub fn authenticated_by_upstream_sessions(&self) -> Option> { self.authenticated_by_upstream_sessions } } From 8d6621f00ece46e011ccefab397603acc9e134a2 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 4 Jul 2025 12:49:07 +0200 Subject: [PATCH 09/10] Log out oauth & compat sessions when receiving a backchannel logout request --- crates/cli/src/sync.rs | 3 + crates/config/src/sections/upstream_oauth2.rs | 4 ++ .../src/upstream_oauth2/provider.rs | 3 + .../src/upstream_oauth2/backchannel_logout.rs | 68 ++++++++++++++++++- crates/storage-pg/src/compat/session.rs | 14 +++- crates/storage-pg/src/oauth2/session.rs | 14 +++- crates/storage/src/compat/session.rs | 19 +++++- crates/storage/src/oauth2/session.rs | 21 +++++- docs/config.schema.json | 7 ++ 9 files changed, 146 insertions(+), 7 deletions(-) diff --git a/crates/cli/src/sync.rs b/crates/cli/src/sync.rs index d8433c291..363c2a0f8 100644 --- a/crates/cli/src/sync.rs +++ b/crates/cli/src/sync.rs @@ -283,6 +283,9 @@ pub async fn config_sync( mas_config::UpstreamOAuth2OnBackchannelLogout::LogoutBrowserOnly => { mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly } + mas_config::UpstreamOAuth2OnBackchannelLogout::LogoutAll => { + mas_data_model::UpstreamOAuthProviderOnBackchannelLogout::LogoutAll + } }; repo.upstream_oauth_provider() diff --git a/crates/config/src/sections/upstream_oauth2.rs b/crates/config/src/sections/upstream_oauth2.rs index 2cf43b530..2162c9fe4 100644 --- a/crates/config/src/sections/upstream_oauth2.rs +++ b/crates/config/src/sections/upstream_oauth2.rs @@ -418,6 +418,10 @@ pub enum OnBackchannelLogout { /// Only log out the MAS 'browser session' started by this OIDC session LogoutBrowserOnly, + + /// Log out all sessions started by this OIDC session, including MAS + /// 'browser sessions' and client sessions + LogoutAll, } impl OnBackchannelLogout { diff --git a/crates/data-model/src/upstream_oauth2/provider.rs b/crates/data-model/src/upstream_oauth2/provider.rs index c384366df..3a71c03c3 100644 --- a/crates/data-model/src/upstream_oauth2/provider.rs +++ b/crates/data-model/src/upstream_oauth2/provider.rs @@ -221,6 +221,7 @@ pub struct InvalidUpstreamOAuth2TokenAuthMethod(String); pub enum OnBackchannelLogout { DoNothing, LogoutBrowserOnly, + LogoutAll, } impl OnBackchannelLogout { @@ -229,6 +230,7 @@ impl OnBackchannelLogout { match self { Self::DoNothing => "do_nothing", Self::LogoutBrowserOnly => "logout_browser_only", + Self::LogoutAll => "logout_all", } } } @@ -246,6 +248,7 @@ impl std::str::FromStr for OnBackchannelLogout { match s { "do_nothing" => Ok(Self::DoNothing), "logout_browser_only" => Ok(Self::LogoutBrowserOnly), + "logout_all" => Ok(Self::LogoutAll), s => Err(InvalidUpstreamOAuth2OnBackchannelLogout(s.to_owned())), } } diff --git a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs index 71d8b674f..9e2a034b9 100644 --- a/crates/handlers/src/upstream_oauth2/backchannel_logout.rs +++ b/crates/handlers/src/upstream_oauth2/backchannel_logout.rs @@ -3,7 +3,7 @@ // SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial // Please see LICENSE files in the repository root for full details. -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use axum::{ Form, Json, @@ -22,7 +22,11 @@ use mas_oidc_client::{ requests::jose::{JwtVerificationData, verify_signed_jwt}, }; use mas_storage::{ - BoxClock, BoxRepository, upstream_oauth2::UpstreamOAuthSessionFilter, + BoxClock, BoxRepository, BoxRng, Pagination, + compat::CompatSessionFilter, + oauth2::OAuth2SessionFilter, + queue::{QueueJobRepositoryExt as _, SyncDevicesJob}, + upstream_oauth2::UpstreamOAuthSessionFilter, user::BrowserSessionFilter, }; use oauth2_types::errors::{ClientError, ClientErrorCode}; @@ -131,6 +135,7 @@ const EVENTS: Claim = Claim::new("events"); )] pub(crate) async fn post( clock: BoxClock, + mut rng: BoxRng, mut repo: BoxRepository, State(metadata_cache): State, State(client): State, @@ -242,10 +247,67 @@ pub(crate) async fn post( } UpstreamOAuthProviderOnBackchannelLogout::LogoutBrowserOnly => { let filter = BrowserSessionFilter::new() - .authenticated_by_upstream_sessions_only(auth_session_filter); + .authenticated_by_upstream_sessions_only(auth_session_filter) + .active_only(); let affected = repo.browser_session().finish_bulk(&clock, filter).await?; tracing::info!("Finished {affected} browser sessions"); } + UpstreamOAuthProviderOnBackchannelLogout::LogoutAll => { + let browser_session_filter = BrowserSessionFilter::new() + .authenticated_by_upstream_sessions_only(auth_session_filter); + + // We need to loop through all the browser sessions to find all the + // users affected so that we can trigger a device sync job for them + let mut cursor = Pagination::first(1000); + let mut user_ids = HashSet::new(); + loop { + let browser_sessions = repo + .browser_session() + .list(browser_session_filter, cursor) + .await?; + for browser_session in browser_sessions.edges { + user_ids.insert(browser_session.user.id); + cursor = cursor.after(browser_session.id); + } + + if !browser_sessions.has_next_page { + break; + } + } + + let browser_sessions_affected = repo + .browser_session() + .finish_bulk(&clock, browser_session_filter.active_only()) + .await?; + + let oauth2_session_filter = OAuth2SessionFilter::new() + .active_only() + .for_browser_sessions(browser_session_filter); + + let oauth2_sessions_affected = repo + .oauth2_session() + .finish_bulk(&clock, oauth2_session_filter) + .await?; + + let compat_session_filter = CompatSessionFilter::new() + .active_only() + .for_browser_sessions(browser_session_filter); + + let compat_sessions_affected = repo + .compat_session() + .finish_bulk(&clock, compat_session_filter) + .await?; + + tracing::info!( + "Finished {browser_sessions_affected} browser sessions, {oauth2_sessions_affected} OAuth 2.0 sessions and {compat_sessions_affected} compatibility sessions" + ); + + for user_id in user_ids { + tracing::info!(user.id = %user_id, "Queueing a device sync job for user"); + let job = SyncDevicesJob::new_for_id(user_id); + repo.queue_job().schedule_job(&mut rng, &clock, job).await?; + } + } } repo.save().await?; diff --git a/crates/storage-pg/src/compat/session.rs b/crates/storage-pg/src/compat/session.rs index 19e6366d6..d5d41fb7b 100644 --- a/crates/storage-pg/src/compat/session.rs +++ b/crates/storage-pg/src/compat/session.rs @@ -27,7 +27,7 @@ use uuid::Uuid; use crate::{ DatabaseError, DatabaseInconsistencyError, filter::{Filter, StatementExt, StatementWithJoinsExt}, - iden::{CompatSessions, CompatSsoLogins}, + iden::{CompatSessions, CompatSsoLogins, UserSessions}, pagination::QueryBuilderExt, tracing::ExecuteExt, }; @@ -190,6 +190,18 @@ impl Filter for CompatSessionFilter<'_> { Expr::col((CompatSessions::Table, CompatSessions::UserSessionId)) .eq(Uuid::from(browser_session.id)) })) + .add_option(self.browser_session_filter().map(|browser_session_filter| { + Expr::col((CompatSessions::Table, CompatSessions::UserSessionId)).in_subquery( + Query::select() + .expr(Expr::col(( + UserSessions::Table, + UserSessions::UserSessionId, + ))) + .apply_filter(browser_session_filter) + .from(UserSessions::Table) + .take(), + ) + })) .add_option(self.state().map(|state| { if state.is_active() { Expr::col((CompatSessions::Table, CompatSessions::FinishedAt)).is_null() diff --git a/crates/storage-pg/src/oauth2/session.rs b/crates/storage-pg/src/oauth2/session.rs index 3aa3877b1..00fc501a0 100644 --- a/crates/storage-pg/src/oauth2/session.rs +++ b/crates/storage-pg/src/oauth2/session.rs @@ -24,7 +24,7 @@ use uuid::Uuid; use crate::{ DatabaseError, DatabaseInconsistencyError, filter::{Filter, StatementExt}, - iden::{OAuth2Clients, OAuth2Sessions}, + iden::{OAuth2Clients, OAuth2Sessions, UserSessions}, pagination::QueryBuilderExt, tracing::ExecuteExt, }; @@ -141,6 +141,18 @@ impl Filter for OAuth2SessionFilter<'_> { Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserSessionId)) .eq(Uuid::from(browser_session.id)) })) + .add_option(self.browser_session_filter().map(|browser_session_filter| { + Expr::col((OAuth2Sessions::Table, OAuth2Sessions::UserSessionId)).in_subquery( + Query::select() + .expr(Expr::col(( + UserSessions::Table, + UserSessions::UserSessionId, + ))) + .apply_filter(browser_session_filter) + .from(UserSessions::Table) + .take(), + ) + })) .add_option(self.state().map(|state| { if state.is_active() { Expr::col((OAuth2Sessions::Table, OAuth2Sessions::FinishedAt)).is_null() diff --git a/crates/storage/src/compat/session.rs b/crates/storage/src/compat/session.rs index 2b964ba22..5287b4cee 100644 --- a/crates/storage/src/compat/session.rs +++ b/crates/storage/src/compat/session.rs @@ -12,7 +12,7 @@ use mas_data_model::{BrowserSession, CompatSession, CompatSsoLogin, Device, User use rand_core::RngCore; use ulid::Ulid; -use crate::{Clock, Page, Pagination, repository_impl}; +use crate::{Clock, Page, Pagination, repository_impl, user::BrowserSessionFilter}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum CompatSessionState { @@ -59,6 +59,7 @@ impl CompatSessionType { pub struct CompatSessionFilter<'a> { user: Option<&'a User>, browser_session: Option<&'a BrowserSession>, + browser_session_filter: Option>, state: Option, auth_type: Option, device: Option<&'a Device>, @@ -106,12 +107,28 @@ impl<'a> CompatSessionFilter<'a> { self } + /// Set the browser sessions filter + #[must_use] + pub fn for_browser_sessions( + mut self, + browser_session_filter: BrowserSessionFilter<'a>, + ) -> Self { + self.browser_session_filter = Some(browser_session_filter); + self + } + /// Get the browser session filter #[must_use] pub fn browser_session(&self) -> Option<&'a BrowserSession> { self.browser_session } + /// Get the browser sessions filter + #[must_use] + pub fn browser_session_filter(&self) -> Option> { + self.browser_session_filter + } + /// Only return sessions with a last active time before the given time #[must_use] pub fn with_last_active_before(mut self, last_active_before: DateTime) -> Self { diff --git a/crates/storage/src/oauth2/session.rs b/crates/storage/src/oauth2/session.rs index faf933a7f..5d217c1e2 100644 --- a/crates/storage/src/oauth2/session.rs +++ b/crates/storage/src/oauth2/session.rs @@ -13,7 +13,7 @@ use oauth2_types::scope::Scope; use rand_core::RngCore; use ulid::Ulid; -use crate::{Clock, Pagination, pagination::Page, repository_impl}; +use crate::{Clock, Pagination, pagination::Page, repository_impl, user::BrowserSessionFilter}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum OAuth2SessionState { @@ -49,6 +49,7 @@ pub struct OAuth2SessionFilter<'a> { user: Option<&'a User>, any_user: Option, browser_session: Option<&'a BrowserSession>, + browser_session_filter: Option>, device: Option<&'a Device>, client: Option<&'a Client>, client_kind: Option, @@ -109,6 +110,16 @@ impl<'a> OAuth2SessionFilter<'a> { self } + /// List sessions started by a set of browser sessions + #[must_use] + pub fn for_browser_sessions( + mut self, + browser_session_filter: BrowserSessionFilter<'a>, + ) -> Self { + self.browser_session_filter = Some(browser_session_filter); + self + } + /// Get the browser session filter /// /// Returns [`None`] if no browser session filter was set @@ -117,6 +128,14 @@ impl<'a> OAuth2SessionFilter<'a> { self.browser_session } + /// Get the browser sessions filter + /// + /// Returns [`None`] if no browser session filter was set + #[must_use] + pub fn browser_session_filter(&self) -> Option> { + self.browser_session_filter + } + /// List sessions for a specific client #[must_use] pub fn for_client(mut self, client: &'a Client) -> Self { diff --git a/docs/config.schema.json b/docs/config.schema.json index cf2793c25..abb811fca 100644 --- a/docs/config.schema.json +++ b/docs/config.schema.json @@ -2459,6 +2459,13 @@ "enum": [ "logout_browser_only" ] + }, + { + "description": "Log out all sessions started by this OIDC session, including MAS 'browser sessions' and client sessions", + "type": "string", + "enum": [ + "logout_all" + ] } ] }, From 3bc3db15279b108860a76596e8703c17556bbd4b Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Fri, 4 Jul 2025 16:09:48 +0200 Subject: [PATCH 10/10] Add documentation for backchannel logout --- docs/reference/configuration.md | 7 +++++++ docs/setup/sso.md | 28 +++++++++++++++++++++++++--- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/docs/reference/configuration.md b/docs/reference/configuration.md index 3d4f5c4ae..4dad3d6a0 100644 --- a/docs/reference/configuration.md +++ b/docs/reference/configuration.md @@ -740,6 +740,13 @@ upstream_oauth2: # authorization request. #forward_login_hint: false + # What to do when receiving an OIDC Backchannel logout request. + # Possible values are: + # - `do_nothing` (default): do nothing, other than validating and logging the request + # - `logout_browser_only`: Only log out the MAS 'browser session' started by this OIDC session + # - `logout_all`: Log out all sessions started by this OIDC session, including MAS 'browser sessions' and client sessions + #on_backchannel_logout: do_nothing + # How user attributes should be mapped # # Most of those attributes have two main properties: diff --git a/docs/setup/sso.md b/docs/setup/sso.md index 0f3994825..4d82bd9a3 100644 --- a/docs/setup/sso.md +++ b/docs/setup/sso.md @@ -24,6 +24,7 @@ The general configuration usually goes as follows: - `response_type`: `code` - `response_mode`: `query` - `grant_type`: `authorization_code` + - (optional) `backchannel_logout_uri`: `https:///upstream/backchannel-logout/` - fill the `upstream_oauth2` section of the configuration file with the following parameters: - `providers`: - `id`: the previously generated ULID @@ -73,6 +74,25 @@ In such cases, the `human_name` parameter of the provider configuration is used If there is only one upstream provider configured and the local password database is disabled ([`passwords.enabled`](../reference/configuration.md#passwords) is set to `false`), the authentication service will automatically trigger an authorization flow with this provider. +## Backchannel logout + +The service supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) requests. +Those are notifications from the upstream provider that the user has logged out of the provider. + +The backchannel logout URI must be configured in the provider as `https:///upstream/backchannel-logout/`, where `` is the `id` of the provider. + +By default, the authentication service will not perform any action when receiving a backchannel logout request. +The [`on_backchannel_logout`](../reference/configuration.md#upstream_oauth2) option can be used to configure what to do when receiving a backchannel logout request. + +Possible values are: + + - `do_nothing`: Do nothing, other than validating and logging the request + - `logout_browser_only`: Only log out the MAS 'browser session' started by this OIDC session + - `logout_all`: Log out all sessions started by this OIDC session, including MAS 'browser sessions' and client sessions + +One important caveat is that `logout_all` will log out all sessions started by this upstream OIDC session, including 'remote' ones done through the Device Code flow. +Concretely, this means that if QR-code login is used to log in on a phone from a laptop, when MAS receives a backchannel logout request from the upstream provider for the laptop, MAS will also log out the session on the phone. + ## Sample configurations This section contains sample configurations for popular OIDC providers. @@ -93,12 +113,11 @@ upstream_oauth2: response_mode: "form_post" token_endpoint_auth_method: "sign_in_with_apple" sign_in_with_apple: - # Only one of the below should be filled for the private key private_key_file: "" # TO BE FILLED private_key: | # TO BE FILLED # - + team_id: "" # TO BE FILLED key_id: "" # TO BE FILLED claims_imports: @@ -386,6 +405,9 @@ Follow the [Getting Started Guide](https://www.keycloak.org/guides) to install K | Client Protocol | `openid-connect` | | Access Type | `confidential` | | Valid Redirect URIs | `https:///upstream/callback/` | + | Front channel logout | `Off` | + | Backchannel logout URL | `https:///upstream/backchannel-logout/` | + | Backchannel logout session required | `On` | 5. Click `Save` 6. On the Credentials tab, update the fields: @@ -554,4 +576,4 @@ To use a Rauthy-supported [Ephemeral Client](https://sebadob.github.io/rauthy/wo "access_token_signed_response_alg": "RS256", "id_token_signed_response_alg": "RS256" } -``` \ No newline at end of file +```