Skip to content

Commit 1f1a038

Browse files
committed
Fix clippy warnings
Fix various clippy warnings in both source and tests
1 parent 9be7eca commit 1f1a038

File tree

14 files changed

+72
-64
lines changed

14 files changed

+72
-64
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ jobs:
2020
components: rustfmt, clippy
2121
- run: cargo fmt -- --check
2222
- run: cargo clippy -- -Dwarnings
23+
- run: cargo clippy --tests -- -Dwarnings
2324
- run: cargo test --doc
2425

2526
check:

rdkafka-sys/build.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ fn main() {
7777
// Ensure that we are in the right directory
7878
let rdkafkasys_root = Path::new("rdkafka-sys");
7979
if rdkafkasys_root.exists() {
80-
assert!(env::set_current_dir(&rdkafkasys_root).is_ok());
80+
assert!(env::set_current_dir(rdkafkasys_root).is_ok());
8181
}
8282
if !Path::new("librdkafka/LICENSE").exists() {
8383
eprintln!("Setting up submodules");

src/consumer/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,12 +100,12 @@ pub trait ConsumerContext: ClientContext + Sized {
100100
/// Pre-rebalance callback. This method will run before the rebalance and
101101
/// should terminate its execution quickly.
102102
#[allow(unused_variables)]
103-
fn pre_rebalance<'a>(&self, base_consumer: &BaseConsumer<Self>, rebalance: &Rebalance<'a>) {}
103+
fn pre_rebalance(&self, base_consumer: &BaseConsumer<Self>, rebalance: &Rebalance<'_>) {}
104104

105105
/// Post-rebalance callback. This method will run after the rebalance and
106106
/// should terminate its execution quickly.
107107
#[allow(unused_variables)]
108-
fn post_rebalance<'a>(&self, base_consumer: &BaseConsumer<Self>, rebalance: &Rebalance<'a>) {}
108+
fn post_rebalance(&self, base_consumer: &BaseConsumer<Self>, rebalance: &Rebalance<'_>) {}
109109

110110
// TODO: convert pointer to structure
111111
/// Post commit callback. This method will run after a group of offsets was

src/message.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -425,20 +425,20 @@ impl<'a> Message for BorrowedMessage<'a> {
425425
type Headers = BorrowedHeaders;
426426

427427
fn key(&self) -> Option<&[u8]> {
428-
unsafe { util::ptr_to_opt_slice((*self.ptr).key, (*self.ptr).key_len) }
428+
unsafe { util::ptr_to_opt_slice(self.ptr.key, self.ptr.key_len) }
429429
}
430430

431431
fn payload(&self) -> Option<&[u8]> {
432-
unsafe { util::ptr_to_opt_slice((*self.ptr).payload, (*self.ptr).len) }
432+
unsafe { util::ptr_to_opt_slice(self.ptr.payload, self.ptr.len) }
433433
}
434434

435435
unsafe fn payload_mut(&mut self) -> Option<&mut [u8]> {
436-
util::ptr_to_opt_mut_slice((*self.ptr).payload, (*self.ptr).len)
436+
util::ptr_to_opt_mut_slice(self.ptr.payload, self.ptr.len)
437437
}
438438

439439
fn topic(&self) -> &str {
440440
unsafe {
441-
CStr::from_ptr(rdsys::rd_kafka_topic_name((*self.ptr).rkt))
441+
CStr::from_ptr(rdsys::rd_kafka_topic_name(self.ptr.rkt))
442442
.to_str()
443443
.expect("Topic name is not valid UTF-8")
444444
}

src/producer/base_producer.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -425,6 +425,7 @@ where
425425
/// Note that this method will never block.
426426
// Simplifying the return type requires generic associated types, which are
427427
// unstable.
428+
#[allow(clippy::result_large_err)]
428429
pub fn send<'a, K, P>(
429430
&self,
430431
mut record: BaseRecord<'a, K, P, C::DeliveryOpaque>,
@@ -701,6 +702,7 @@ where
701702
/// See the documentation for [`BaseProducer::send`] for details.
702703
// Simplifying the return type requires generic associated types, which are
703704
// unstable.
705+
#[allow(clippy::result_large_err)]
704706
pub fn send<'a, K, P>(
705707
&self,
706708
record: BaseRecord<'a, K, P, C::DeliveryOpaque>,

src/producer/future_producer.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -346,6 +346,7 @@ where
346346

347347
/// Like [`FutureProducer::send`], but if enqueuing fails, an error will be
348348
/// returned immediately, alongside the [`FutureRecord`] provided.
349+
#[allow(clippy::result_large_err)]
349350
pub fn send_result<'a, K, P>(
350351
&self,
351352
record: FutureRecord<'a, K, P>,

src/statistics.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -367,7 +367,7 @@ mod tests {
367367

368368
assert_eq!(stats.brokers.len(), 1);
369369

370-
let broker = stats.brokers.values().into_iter().collect::<Vec<_>>()[0];
370+
let broker = stats.brokers.values().collect::<Vec<_>>()[0];
371371

372372
assert_eq!(
373373
broker.req,
@@ -391,7 +391,7 @@ mod tests {
391391
}
392392

393393
// Example from https://github.com/edenhill/librdkafka/wiki/Statistics
394-
const EXAMPLE: &'static str = r#"
394+
const EXAMPLE: &str = r#"
395395
{
396396
"name": "rdkafka#producer-1",
397397
"client_id": "rdkafka",

src/topic_partition_list.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,7 @@ impl TopicPartitionList {
317317

318318
/// Sets all partitions in the list to the specified offset.
319319
pub fn set_all_offsets(&mut self, offset: Offset) -> Result<(), KafkaError> {
320-
let slice = unsafe { slice::from_raw_parts_mut((*self.ptr).elems, self.count()) };
320+
let slice = unsafe { slice::from_raw_parts_mut(self.ptr.elems, self.count()) };
321321
for elem_ptr in slice {
322322
let mut elem = TopicPartitionListElem::from_ptr(self, &mut *elem_ptr);
323323
elem.set_offset(offset)?;
@@ -327,7 +327,7 @@ impl TopicPartitionList {
327327

328328
/// Returns all the elements of the list.
329329
pub fn elements(&self) -> Vec<TopicPartitionListElem<'_>> {
330-
let slice = unsafe { slice::from_raw_parts_mut((*self.ptr).elems, self.count()) };
330+
let slice = unsafe { slice::from_raw_parts_mut(self.ptr.elems, self.count()) };
331331
let mut vec = Vec::with_capacity(slice.len());
332332
for elem_ptr in slice {
333333
vec.push(TopicPartitionListElem::from_ptr(self, &mut *elem_ptr));
@@ -337,7 +337,7 @@ impl TopicPartitionList {
337337

338338
/// Returns all the elements of the list that belong to the specified topic.
339339
pub fn elements_for_topic<'a>(&'a self, topic: &str) -> Vec<TopicPartitionListElem<'a>> {
340-
let slice = unsafe { slice::from_raw_parts_mut((*self.ptr).elems, self.count()) };
340+
let slice = unsafe { slice::from_raw_parts_mut(self.ptr.elems, self.count()) };
341341
let mut vec = Vec::with_capacity(slice.len());
342342
for elem_ptr in slice {
343343
let tp = TopicPartitionListElem::from_ptr(self, &mut *elem_ptr);

tests/test_admin.rs

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ async fn create_consumer_group(consumer_group_name: &str) {
3434
let admin_client = create_admin_client();
3535
let topic_name = &rand_test_topic(consumer_group_name);
3636
let consumer: BaseConsumer = create_config()
37-
.set("group.id", consumer_group_name.clone())
37+
.set("group.id", consumer_group_name)
3838
.create()
3939
.expect("create consumer failed");
4040

@@ -74,17 +74,19 @@ fn fetch_metadata(topic: &str) -> Metadata {
7474
create_config().create().expect("consumer creation failed");
7575
let timeout = Some(Duration::from_secs(1));
7676

77-
let mut backoff = ExponentialBackoff::default();
78-
backoff.max_elapsed_time = Some(Duration::from_secs(5));
77+
let mut backoff = ExponentialBackoff {
78+
max_elapsed_time: Some(Duration::from_secs(5)),
79+
..Default::default()
80+
};
7981
(|| {
8082
let metadata = consumer
8183
.fetch_metadata(Some(topic), timeout)
8284
.map_err(|e| e.to_string())?;
83-
if metadata.topics().len() == 0 {
85+
if metadata.topics().is_empty() {
8486
Err("metadata fetch returned no topics".to_string())?
8587
}
8688
let topic = &metadata.topics()[0];
87-
if topic.partitions().len() == 0 {
89+
if topic.partitions().is_empty() {
8890
Err("metadata fetch returned a topic with no partitions".to_string())?
8991
}
9092
Ok(metadata)
@@ -98,16 +100,18 @@ fn verify_delete(topic: &str) {
98100
create_config().create().expect("consumer creation failed");
99101
let timeout = Some(Duration::from_secs(1));
100102

101-
let mut backoff = ExponentialBackoff::default();
102-
backoff.max_elapsed_time = Some(Duration::from_secs(5));
103+
let mut backoff = ExponentialBackoff {
104+
max_elapsed_time: Some(Duration::from_secs(5)),
105+
..Default::default()
106+
};
103107
(|| {
104108
// Asking about the topic specifically will recreate it (under the
105109
// default Kafka configuration, at least) so we have to ask for the list
106110
// of all topics and search through it.
107111
let metadata = consumer
108112
.fetch_metadata(None, timeout)
109113
.map_err(|e| e.to_string())?;
110-
if let Some(_) = metadata.topics().iter().find(|t| t.name() == topic) {
114+
if metadata.topics().iter().any(|t| t.name() == topic) {
111115
Err(format!("topic {} still exists", topic))?
112116
}
113117
Ok(())
@@ -416,7 +420,7 @@ async fn test_configs() {
416420
}
417421
}
418422

419-
let config = AlterConfig::new(broker).set("log.flush.interval.ms", &orig_val);
423+
let config = AlterConfig::new(broker).set("log.flush.interval.ms", orig_val);
420424
let res = admin_client
421425
.alter_configs(&[config], &opts)
422426
.await

tests/test_high_producers.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,13 @@ async fn test_future_producer_send_full() {
6464

6565
// Fill up the queue.
6666
producer
67-
.send_result(FutureRecord::to(&topic_name).payload("A").key("B"))
67+
.send_result(FutureRecord::to(topic_name).payload("A").key("B"))
6868
.unwrap();
6969

7070
let send_message = |timeout| async move {
7171
let start = Instant::now();
7272
let res = producer
73-
.send(FutureRecord::to(&topic_name).payload("A").key("B"), timeout)
73+
.send(FutureRecord::to(topic_name).payload("A").key("B"), timeout)
7474
.await;
7575
match res {
7676
Ok(_) => panic!("send unexpectedly succeeded"),

0 commit comments

Comments
 (0)