diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 2973490bc..a9985aaf6 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -225,8 +225,8 @@ jobs:
- name: Install toolchain
run: |
- rustup toolchain install 1.83.0
- rustup default 1.83.0
+ rustup toolchain install 1.84.0
+ rustup default 1.84.0
rustup component add clippy
- name: Setup OPA
diff --git a/Dockerfile b/Dockerfile
index 618a8d732..b7d5fc0a8 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -8,7 +8,7 @@
# The Debian version and version name must be in sync
ARG DEBIAN_VERSION=12
ARG DEBIAN_VERSION_NAME=bookworm
-ARG RUSTC_VERSION=1.83.0
+ARG RUSTC_VERSION=1.84.0
ARG NODEJS_VERSION=20.15.0
ARG OPA_VERSION=0.64.1
ARG CARGO_AUDITABLE_VERSION=0.6.6
diff --git a/crates/cli/src/commands/doctor.rs b/crates/cli/src/commands/doctor.rs
index d507b8308..c0ce4e261 100644
--- a/crates/cli/src/commands/doctor.rs
+++ b/crates/cli/src/commands/doctor.rs
@@ -47,8 +47,8 @@ See {DOCS_BASE}/setup/homeserver.html",
if !issuer.starts_with("https://") {
warn!(
- r#"⚠️ The issuer in the config (`http.issuer`/`http.public_base`) is not an HTTPS URL.
-This means some clients will refuse to use it."#
+ r"⚠️ The issuer in the config (`http.issuer`/`http.public_base`) is not an HTTPS URL.
+This means some clients will refuse to use it."
);
}
diff --git a/crates/handlers/src/upstream_oauth2/cache.rs b/crates/handlers/src/upstream_oauth2/cache.rs
index cac97a410..7d63369d7 100644
--- a/crates/handlers/src/upstream_oauth2/cache.rs
+++ b/crates/handlers/src/upstream_oauth2/cache.rs
@@ -41,9 +41,9 @@ impl<'a> LazyProviderInfos<'a> {
/// Trigger the discovery process and return the metadata if discovery is
/// enabled.
- pub async fn maybe_discover<'b>(
- &'b mut self,
- ) -> Result, DiscoveryError> {
+ pub async fn maybe_discover(
+ &mut self,
+ ) -> Result , DiscoveryError> {
match self.load().await {
Ok(metadata) => Ok(Some(metadata)),
Err(DiscoveryError::Disabled) => Ok(None),
@@ -51,7 +51,7 @@ impl<'a> LazyProviderInfos<'a> {
}
}
- async fn load<'b>(&'b mut self) -> Result<&'b VerifiedProviderMetadata, DiscoveryError> {
+ async fn load(&mut self) -> Result<&VerifiedProviderMetadata, DiscoveryError> {
if self.loaded_metadata.is_none() {
let verify = match self.provider.discovery_mode {
UpstreamOAuthProviderDiscoveryMode::Oidc => true,
diff --git a/crates/handlers/src/upstream_oauth2/link.rs b/crates/handlers/src/upstream_oauth2/link.rs
index b37c4fce7..ffe07f646 100644
--- a/crates/handlers/src/upstream_oauth2/link.rs
+++ b/crates/handlers/src/upstream_oauth2/link.rs
@@ -429,8 +429,8 @@ pub(crate) async fn get(
let ctx = ErrorContext::new()
.with_code("User exists")
.with_description(format!(
- r#"Upstream account provider returned {localpart:?} as username,
- which is not linked to that upstream account"#
+ r"Upstream account provider returned {localpart:?} as username,
+ which is not linked to that upstream account"
))
.with_language(&locale);
@@ -449,8 +449,8 @@ pub(crate) async fn get(
let ctx = ErrorContext::new()
.with_code("Policy error")
.with_description(format!(
- r#"Upstream account provider returned {localpart:?} as username,
- which does not pass the policy check: {res}"#
+ r"Upstream account provider returned {localpart:?} as username,
+ which does not pass the policy check: {res}"
))
.with_language(&locale);
@@ -593,7 +593,7 @@ pub(crate) async fn post(
// Is the email verified according to the upstream provider?
let provider_email_verified = env
.render_str("{{ user.email_verified | string }}", &context)
- .map_or(false, |v| v == "true");
+ .is_ok_and(|v| v == "true");
// Create a template context in case we need to re-render because of an error
let ctx = UpstreamRegister::new(link.clone(), provider.clone());
diff --git a/crates/iana-codegen/src/gen.rs b/crates/iana-codegen/src/gen.rs
index 32b03a629..ea84297f8 100644
--- a/crates/iana-codegen/src/gen.rs
+++ b/crates/iana-codegen/src/gen.rs
@@ -22,10 +22,10 @@ pub fn struct_def(
) -> std::fmt::Result {
write!(
f,
- r#"/// {}
+ r"/// {}
///
/// Source: <{}>
-#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]"#,
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]",
section.doc,
section.url.unwrap(),
)?;
@@ -33,15 +33,15 @@ pub fn struct_def(
if !is_exhaustive {
write!(
f,
- r#"
-#[non_exhaustive]"#
+ r"
+#[non_exhaustive]"
)?;
}
write!(
f,
- r#"
-pub enum {} {{"#,
+ r"
+pub enum {} {{",
section.key,
)?;
for member in list {
@@ -72,9 +72,9 @@ pub fn display_impl(
) -> std::fmt::Result {
write!(
f,
- r#"impl core::fmt::Display for {} {{
+ r"impl core::fmt::Display for {} {{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {{
- match self {{"#,
+ match self {{",
section.key,
)?;
@@ -97,10 +97,10 @@ pub fn display_impl(
writeln!(
f,
- r#"
+ r"
}}
}}
-}}"#,
+}}",
)
}
@@ -117,11 +117,11 @@ pub fn from_str_impl(
};
write!(
f,
- r#"impl core::str::FromStr for {} {{
+ r"impl core::str::FromStr for {} {{
type Err = {err_ty};
fn from_str(s: &str) -> Result {{
- match s {{"#,
+ match s {{",
section.key,
)?;
@@ -137,23 +137,23 @@ pub fn from_str_impl(
if is_exhaustive {
write!(
f,
- r#"
- _ => Err(crate::ParseError::new()),"#
+ r"
+ _ => Err(crate::ParseError::new()),"
)?;
} else {
write!(
f,
- r#"
- value => Ok(Self::Unknown(value.to_owned())),"#,
+ r"
+ value => Ok(Self::Unknown(value.to_owned())),",
)?;
}
writeln!(
f,
- r#"
+ r"
}}
}}
-}}"#,
+}}",
)
}
@@ -179,22 +179,22 @@ impl schemars::JsonSchema for {} {{
for member in list {
write!(
f,
- r#"
+ r"
// ---
- schemars::schema::SchemaObject {{"#,
+ schemars::schema::SchemaObject {{",
)?;
if let Some(description) = &member.description {
write!(
f,
- r#"
+ r"
metadata: Some(Box::new(schemars::schema::Metadata {{
description: Some(
// ---
{}.to_owned(),
),
..Default::default()
- }})),"#,
+ }})),",
raw_string(description),
)?;
}
@@ -212,7 +212,7 @@ impl schemars::JsonSchema for {} {{
writeln!(
f,
- r#"
+ r"
];
let description = {};
@@ -229,7 +229,7 @@ impl schemars::JsonSchema for {} {{
}}
.into()
}}
-}}"#,
+}}",
raw_string(section.doc),
)
}
diff --git a/crates/iana-codegen/src/main.rs b/crates/iana-codegen/src/main.rs
index 2a779c44d..75d3e660a 100644
--- a/crates/iana-codegen/src/main.rs
+++ b/crates/iana-codegen/src/main.rs
@@ -75,7 +75,7 @@ impl Display for File {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(
f,
- r#"// Copyright 2024 New Vector Ltd.
+ r"// Copyright 2024 New Vector Ltd.
// Copyright 2023, 2024 The Matrix.org Foundation C.I.C.
//
// SPDX-License-Identifier: AGPL-3.0-only
@@ -86,7 +86,7 @@ impl Display for File {
//! Enums from the {:?} IANA registry
//! See <{}>
-// Do not edit this file manually"#,
+// Do not edit this file manually",
self.registry_name, self.registry_url,
)?;
diff --git a/crates/listener/src/server.rs b/crates/listener/src/server.rs
index 169a2256f..823ed1003 100644
--- a/crates/listener/src/server.rs
+++ b/crates/listener/src/server.rs
@@ -217,7 +217,7 @@ where
let tls = stream.tls_info();
// Figure out if it's HTTP/2 based on the negociated ALPN info
- let is_h2 = tls.as_ref().map_or(false, TlsStreamInfo::is_alpn_h2);
+ let is_h2 = tls.as_ref().is_some_and(TlsStreamInfo::is_alpn_h2);
let info = ConnectionInfo {
tls,
diff --git a/crates/storage-pg/.sqlx/query-9ad4e6e9bfedea476d1f47753e4738455e94eade48ad5f577e53278cc70dc266.json b/crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json
similarity index 50%
rename from crates/storage-pg/.sqlx/query-9ad4e6e9bfedea476d1f47753e4738455e94eade48ad5f577e53278cc70dc266.json
rename to crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json
index 6a0c3b950..ef1ac0372 100644
--- a/crates/storage-pg/.sqlx/query-9ad4e6e9bfedea476d1f47753e4738455e94eade48ad5f577e53278cc70dc266.json
+++ b/crates/storage-pg/.sqlx/query-fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1.json
@@ -1,11 +1,11 @@
{
"db_name": "PostgreSQL",
- "query": "\n SELECT\n queue_schedules.schedule_name,\n queue_schedules.last_scheduled_at,\n queue_jobs.status IN ('completed', 'failed') as last_scheduled_job_completed\n FROM queue_schedules\n LEFT JOIN queue_jobs\n ON queue_jobs.queue_job_id = queue_schedules.last_scheduled_job_id\n ",
+ "query": "\n SELECT\n queue_schedules.schedule_name as \"schedule_name!\",\n queue_schedules.last_scheduled_at,\n queue_jobs.status IN ('completed', 'failed') as last_scheduled_job_completed\n FROM queue_schedules\n LEFT JOIN queue_jobs\n ON queue_jobs.queue_job_id = queue_schedules.last_scheduled_job_id\n ",
"describe": {
"columns": [
{
"ordinal": 0,
- "name": "schedule_name",
+ "name": "schedule_name!",
"type_info": "Text"
},
{
@@ -23,10 +23,10 @@
"Left": []
},
"nullable": [
- false,
+ true,
true,
null
]
},
- "hash": "9ad4e6e9bfedea476d1f47753e4738455e94eade48ad5f577e53278cc70dc266"
+ "hash": "fcd8b4b9e003d1540357c6bf1ff9c715560d011d4c01112703a9c046170c84f1"
}
diff --git a/crates/storage-pg/src/queue/schedule.rs b/crates/storage-pg/src/queue/schedule.rs
index 3594cee7e..afd09a8e3 100644
--- a/crates/storage-pg/src/queue/schedule.rs
+++ b/crates/storage-pg/src/queue/schedule.rs
@@ -69,7 +69,7 @@ impl QueueScheduleRepository for PgQueueScheduleRepository<'_> {
ScheduleLookup,
r#"
SELECT
- queue_schedules.schedule_name,
+ queue_schedules.schedule_name as "schedule_name!",
queue_schedules.last_scheduled_at,
queue_jobs.status IN ('completed', 'failed') as last_scheduled_job_completed
FROM queue_schedules