Skip to content

Commit bbe935e

Browse files
authored
chore: bump rust-toolchain to 1.88.0 (#7835)
Required to get https://github.com/getsentry/sentry-options working for snuba
1 parent 6e28280 commit bbe935e

File tree

8 files changed

+18
-26
lines changed

8 files changed

+18
-26
lines changed

rust_snuba/rust-toolchain.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
[toolchain]
2-
channel = "1.85.0"
2+
channel = "1.88.0"

rust_snuba/src/consumer.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,7 @@ pub fn process_message(
324324
// XXX: Currently only takes the message payload and metadata. This assumes
325325
// key and headers are not used for message processing
326326
let func = processors::get_processing_function(name).ok_or(SnubaRustError::new_err(
327-
format!("processor '{}' not found", name),
327+
format!("processor '{name}' not found"),
328328
))?;
329329

330330
let payload = KafkaPayload::new(None, None, Some(value));
@@ -338,15 +338,15 @@ pub fn process_message(
338338
match func {
339339
processors::ProcessingFunctionType::ProcessingFunction(f) => {
340340
let res = f(payload, meta, &config::ProcessorConfig::default())
341-
.map_err(|e| SnubaRustError::new_err(format!("invalid message: {:?}", e)))?;
341+
.map_err(|e| SnubaRustError::new_err(format!("invalid message: {e:?}")))?;
342342

343343
let payload = PyBytes::new(py, &res.rows.into_encoded_rows()).into();
344344

345345
Ok((Some(payload), None))
346346
}
347347
processors::ProcessingFunctionType::ProcessingFunctionWithReplacements(f) => {
348348
let res = f(payload, meta, &config::ProcessorConfig::default())
349-
.map_err(|e| SnubaRustError::new_err(format!("invalid message: {:?}", e)))?;
349+
.map_err(|e| SnubaRustError::new_err(format!("invalid message: {e:?}")))?;
350350

351351
match res {
352352
InsertOrReplacement::Insert(r) => {

rust_snuba/src/metrics/statsd.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ impl MetricSink for Wrapper {
2929

3030
impl StatsDBackend {
3131
pub fn new(host: &str, port: u16, prefix: &str) -> Self {
32-
let upstream_addr = format!("{}:{}", host, port);
32+
let upstream_addr = format!("{host}:{port}");
3333
let aggregator_sink = StatsdProxyMetricSink::new(move || {
3434
let upstream = Upstream::new(upstream_addr.clone()).unwrap();
3535

rust_snuba/src/processors/eap_items.rs

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1007,7 +1007,7 @@ mod tests {
10071007
// Verify bucketed string attributes have the same content
10081008
// "str_attr" should be in one of the string buckets
10091009
let str_bucket = (fnv_1a("str_attr".as_bytes()) as usize) % 40;
1010-
let json_field = format!("attributes_string_{}", str_bucket);
1010+
let json_field = format!("attributes_string_{str_bucket}");
10111011
let json_str_map: HashMap<String, String> = json_row
10121012
.get(&json_field)
10131013
.map(|v| serde_json::from_value(v.clone()).unwrap())
@@ -1070,8 +1070,7 @@ mod tests {
10701070
assert_eq!(
10711071
rb_str_val,
10721072
Some(&"hello".to_string()),
1073-
"str_attr not found in RowBinary output bucket {}",
1074-
str_bucket
1073+
"str_attr not found in RowBinary output bucket {str_bucket}"
10751074
);
10761075
assert_eq!(
10771076
json_str_map.get("str_attr"),
@@ -1082,7 +1081,7 @@ mod tests {
10821081
// Verify float attribute is in the correct bucket (int_attr and bool_attr are
10831082
// double-written as floats)
10841083
let float_bucket = (fnv_1a("float_attr".as_bytes()) as usize) % 40;
1085-
let json_float_field = format!("attributes_float_{}", float_bucket);
1084+
let json_float_field = format!("attributes_float_{float_bucket}");
10861085
let json_float_map: HashMap<String, f64> = json_row
10871086
.get(&json_float_field)
10881087
.map(|v| serde_json::from_value(v.clone()).unwrap())
@@ -1127,7 +1126,7 @@ mod tests {
11271126
let database = std::env::var("CLICKHOUSE_DATABASE").unwrap_or("default".to_string());
11281127

11291128
let client = clickhouse::Client::default()
1130-
.with_url(format!("http://{}:{}", host, http_port))
1129+
.with_url(format!("http://{host}:{http_port}"))
11311130
.with_database(&database)
11321131
.with_option("input_format_binary_read_json_as_string", "1")
11331132
.with_option("insert_deduplicate", "0");
@@ -1193,8 +1192,7 @@ mod tests {
11931192
// Read it back using organization_id (primary key prefix) for reliable lookup
11941193
let count: u64 = client
11951194
.query(&format!(
1196-
"SELECT count() FROM eap_items_1_local WHERE organization_id = {}",
1197-
unique_org_id
1195+
"SELECT count() FROM eap_items_1_local WHERE organization_id = {unique_org_id}"
11981196
))
11991197
.fetch_one()
12001198
.await
@@ -1208,9 +1206,8 @@ mod tests {
12081206
.query(&format!(
12091207
"SELECT organization_id, project_id, item_type, sampling_weight \
12101208
FROM eap_items_1_local \
1211-
WHERE organization_id = {} \
1212-
LIMIT 1",
1213-
unique_org_id
1209+
WHERE organization_id = {unique_org_id} \
1210+
LIMIT 1"
12141211
))
12151212
.fetch_one::<(u64, u64, u8, u64)>()
12161213
.await
@@ -1224,8 +1221,7 @@ mod tests {
12241221
// Clean up
12251222
client
12261223
.query(&format!(
1227-
"ALTER TABLE eap_items_1_local DELETE WHERE organization_id = {}",
1228-
unique_org_id
1224+
"ALTER TABLE eap_items_1_local DELETE WHERE organization_id = {unique_org_id}"
12291225
))
12301226
.execute()
12311227
.await

rust_snuba/src/processors/errors.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ pub fn process_message_with_replacement(
9090

9191
#[derive(Debug, Deserialize, JsonSchema)]
9292
#[serde(untagged)]
93+
#[allow(clippy::large_enum_variant)]
9394
enum Message {
9495
FourTrain(FourTrain),
9596
ThreeTrain(ThreeTrain),
@@ -548,7 +549,7 @@ impl ErrorRow {
548549
for (key, value) in container.unwrap_or_default() {
549550
if let Some(v) = value.0 {
550551
if key != "type" {
551-
contexts_keys.push(format!("{}.{}", container_name, key));
552+
contexts_keys.push(format!("{container_name}.{key}"));
552553
contexts_values.push(v);
553554
}
554555
}

rust_snuba/src/rebalancing.rs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,7 @@ pub fn delay_kafka_rebalance(configured_delay_secs: u64) {
3030

3131
pub fn get_rebalance_delay_secs(consumer_group: &str) -> Option<u64> {
3232
runtime_config::get_str_config(
33-
format!(
34-
"quantized_rebalance_consumer_group_delay_secs__{}",
35-
consumer_group
36-
)
37-
.as_str(),
33+
format!("quantized_rebalance_consumer_group_delay_secs__{consumer_group}").as_str(),
3834
)
3935
.ok()??
4036
.parse()

rust_snuba/src/strategies/clickhouse/writer_v2.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ mod tests {
318318
async fn it_works() -> Result<(), reqwest::Error> {
319319
crate::testutils::initialize_python();
320320
let config = make_test_config();
321-
println!("config: {:?}", config);
321+
println!("config: {config:?}");
322322
let client = ClickhouseClient::new(&config, "querylog_local", "test_storage".to_string());
323323

324324
let url = client.build_url();

rust_snuba/src/strategies/processor.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -535,8 +535,7 @@ mod tests {
535535
assert_eq!(
536536
IP_REGEX.is_match(address),
537537
is_ipv4,
538-
"{} failed IPv4 validation",
539-
address
538+
"{address} failed IPv4 validation"
540539
);
541540
}
542541
}

0 commit comments

Comments
 (0)