Skip to content

Commit e20325a

Browse files
committed
Improve clang-tidy config and enable more warnings
1 parent 08603f5 commit e20325a

17 files changed

+88
-87
lines changed

.clang-tidy

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,42 +2,34 @@ Checks: "*,\
22
-llvm-header-guard,\
33
-llvm-namespace-comment,\
44
-fuchsia-*,\
5-
-google-default-arguments,\
65
-google-readability-namespace-comments,\
76
-google-build-using-namespace,\
87
-google-runtime-references,\
98
-google-readability-avoid-underscore-in-googletest-name,\
109
-modernize-use-nodiscard,\
1110
-modernize-deprecated-headers,\
12-
-modernize-avoid-c-arrays,\
1311
-modernize-use-trailing-return-type,\
1412
-hicpp-special-member-functions,\
1513
-hicpp-vararg,\
1614
-hicpp-no-malloc,\
17-
-hicpp-avoid-c-arrays,\
1815
-hicpp-no-array-decay,\
1916
-hicpp-deprecated-headers,\
2017
-hicpp-braces-around-statements,\
2118
-cppcoreguidelines-special-member-function,\
2219
-cppcoreguidelines-macro-usage,\
2320
-cppcoreguidelines-avoid-magic-numbers,\
2421
-cppcoreguidelines-pro-type-vararg,\
25-
-cppcoreguidelines-pro-type-cstyle-cast,\
2622
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,\
2723
-cppcoreguidelines-pro-bounds-pointer-arithmetic,\
2824
-cppcoreguidelines-special-member-functions,\
2925
-cppcoreguidelines-owning-memory,\
30-
-cppcoreguidelines-avoid-c-arrays,\
31-
-cppcoreguidelines-no-malloc,\
3226
-cppcoreguidelines-non-private-member-variables-in-classes,\
33-
-cppcoreguidelines-pro-bounds-constant-array-index,\
3427
-cppcoreguidelines-pro-type-union-access,\
3528
-misc-non-private-member-variables-in-classes,\
3629
-readability-magic-numbers,\
3730
-readability-implicit-bool-conversion,\
3831
-readability-braces-around-statements,\
3932
-readability-isolate-declaration,\
40-
-bugprone-macro-parentheses,\
4133
-bugprone-unused-return-value,\
4234
-cert-err58-cpp,\
4335
-cert-err60-cpp"

CMakeLists.txt

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,15 @@ cmake_minimum_required(VERSION "3.8")
33
project("Modern C++ Kafka API" VERSION 1.0.0)
44

55
get_property(parent_directory DIRECTORY PROPERTY PARENT_DIRECTORY)
6-
if(NOT parent_directory)
6+
if (NOT parent_directory)
77
set(cppkafka_master_project ON)
8-
endif()
8+
# Use Strict Options
9+
if ((CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR (CMAKE_CXX_COMPILER_ID STREQUAL "GNU"))
10+
add_compile_options("-Wall" "-Werror" "-Wextra" "-Wshadow" "-Wno-unused-result" "-Wsign-conversion")
11+
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
12+
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
13+
endif ()
14+
endif ()
915

1016
option(CPPKAFKA_ENABLE_TESTS "Generate the test targets" ${cppkafka_master_project})
1117

@@ -80,16 +86,6 @@ if (CMAKE_CXX_STANDARD EQUAL 14)
8086
endif ()
8187

8288

83-
#---------------------------
84-
# Build Options
85-
#---------------------------
86-
if ((CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR (CMAKE_CXX_COMPILER_ID STREQUAL "GNU"))
87-
add_compile_options("-Wall" "-Werror" "-Wextra" "-Wshadow" "-Wno-unused-result")
88-
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
89-
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
90-
endif ()
91-
92-
9389
#---------------------------
9490
# Build Option: UT stubs
9591
#---------------------------

examples/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
project("kafka-examples")
22

3+
34
# Target: kafka_sync_producer
45
add_executable("kafka_sync_producer" "kafka_sync_producer.cc")
56
target_link_libraries("kafka_sync_producer" modern-cpp-kafka-api)

include/kafka/ConsumerRecord.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ ConsumerRecord::headers() const
107107
const char* name = nullptr;
108108
const void* valuePtr = nullptr;
109109
std::size_t valueSize = 0;
110-
for (int i = 0; !rd_kafka_header_get_all(hdrs, i, &name, &valuePtr, &valueSize); i++)
110+
for (std::size_t i = 0; !rd_kafka_header_get_all(hdrs, i, &name, &valuePtr, &valueSize); i++)
111111
{
112112
headers.emplace_back(name, Header::Value(valuePtr, valueSize));
113113
}

include/kafka/KafkaConsumer.h

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,8 @@ class KafkaConsumer: public KafkaClient
205205
* Returns the polled records.
206206
* Note: 1) The result could be fetched through ConsumerRecord (with member function `error`).
207207
* 2) Make sure the `ConsumerRecord` be destructed before the `KafkaConsumer.close()`.
208+
* Throws KafkaException with errors:
209+
* - RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION: Unknow partition
208210
*/
209211
std::vector<consumer::ConsumerRecord> poll(std::chrono::milliseconds timeout);
210212

@@ -213,6 +215,8 @@ class KafkaConsumer: public KafkaClient
213215
* Returns the number of polled records (which have been saved into parameter `output`).
214216
* Note: 1) The result could be fetched through ConsumerRecord (with member function `error`).
215217
* 2) Make sure the `ConsumerRecord` be destructed before the `KafkaConsumer.close()`.
218+
* Throws KafkaException with errors:
219+
* - RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION: Unknow partition
216220
*/
217221
std::size_t poll(std::chrono::milliseconds timeout, std::vector<consumer::ConsumerRecord>& output);
218222

@@ -286,8 +290,8 @@ class KafkaConsumer: public KafkaClient
286290

287291
std::string _groupId;
288292

289-
unsigned int _maxPollRecords = 500; // From "max.poll.records" property, and here is the default for batch-poll
290-
bool _enableAutoCommit = false; // From "enable.auto.commit" property
293+
std::size_t _maxPollRecords = 500; // From "max.poll.records" property, and here is the default for batch-poll
294+
bool _enableAutoCommit = false; // From "enable.auto.commit" property
291295

292296
rd_kafka_queue_unique_ptr _rk_queue;
293297

@@ -379,7 +383,7 @@ KafkaConsumer::KafkaConsumer(const Properties &properties, EventsPollingOption e
379383
if (auto maxPollRecordsProperty = properties.getProperty(consumer::Config::MAX_POLL_RECORDS))
380384
{
381385
const std::string maxPollRecords = *maxPollRecordsProperty;
382-
_maxPollRecords = std::stoi(maxPollRecords);
386+
_maxPollRecords = static_cast<std::size_t>(std::stoi(maxPollRecords));
383387
}
384388
_properties.put(consumer::Config::MAX_POLL_RECORDS, std::to_string(_maxPollRecords));
385389

@@ -812,11 +816,15 @@ KafkaConsumer::pollMessages(int timeoutMs, std::vector<consumer::ConsumerRecord>
812816

813817
// Poll messages with librdkafka's API
814818
std::vector<rd_kafka_message_t*> msgPtrArray(_maxPollRecords);
815-
std::size_t msgReceived = rd_kafka_consume_batch_queue(_rk_queue.get(), timeoutMs, msgPtrArray.data(), _maxPollRecords);
819+
auto msgReceived = rd_kafka_consume_batch_queue(_rk_queue.get(), timeoutMs, msgPtrArray.data(), _maxPollRecords);
820+
if (msgReceived < 0)
821+
{
822+
KAFKA_THROW_ERROR(Error(rd_kafka_last_error()));
823+
}
816824

817825
// Wrap messages with ConsumerRecord
818826
output.clear();
819-
output.reserve(msgReceived);
827+
output.reserve(static_cast<std::size_t>(msgReceived));
820828
std::for_each(msgPtrArray.begin(), msgPtrArray.begin() + msgReceived, [&output](rd_kafka_message_t* rkMsg) { output.emplace_back(rkMsg); });
821829

822830
// Store the offsets for all these polled messages (for "enable.auto.commit=true" case)

include/kafka/KafkaProducer.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
#include <cassert>
1616
#include <condition_variable>
17+
#include <cstdint>
1718
#include <memory>
1819
#include <mutex>
1920
#include <unordered_map>
@@ -398,7 +399,7 @@ KafkaProducer::send(const producer::ProducerRecord& record,
398399
vu.vtype = RD_KAFKA_VTYPE_HEADER;
399400
vu.u.header.name = header.key.c_str();
400401
vu.u.header.val = header.value.data();
401-
vu.u.header.size = header.value.size();
402+
vu.u.header.size = static_cast<int64_t>(header.value.size());
402403
}
403404

404405
assert(uvCount == rkVUs.size());

include/kafka/Log.h

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44

55
#include <kafka/Utility.h>
66

7+
#include <algorithm>
8+
#include <array>
79
#include <cassert>
810
#include <functional>
911
#include <iostream>
@@ -25,57 +27,56 @@ struct Log
2527
Debug = 7
2628
};
2729

28-
static const std::string& levelString(int level)
30+
static const std::string& levelString(std::size_t level)
2931
{
30-
static const std::vector<std::string> levelNames = {"EMERG", "ALERT", "CRIT", "ERR", "WARNING", "NOTICE", "INFO", "DEBUG"};
31-
static const int numLevels = static_cast<int>(levelNames.size());
32-
static const std::string invalid = "INVALID";
32+
static const std::vector<std::string> levelNames = {"EMERG", "ALERT", "CRIT", "ERR", "WARNING", "NOTICE", "INFO", "DEBUG", "INVALID"};
33+
static const std::size_t maxIndex = levelNames.size() - 1;
3334

34-
return (level >= 0 && level < numLevels) ? levelNames[level] : invalid;
35+
return levelNames[std::min(level, maxIndex)];
3536
}
3637
};
3738

38-
template <int MAX_CAPACITY>
39+
template <std::size_t MAX_CAPACITY>
3940
class LogBuffer
4041
{
4142
public:
42-
LogBuffer():_wptr(_buf) { _buf[0] = 0; } // NOLINT
43+
LogBuffer():_wptr(_buf.data()) { _buf[0] = 0; } // NOLINT
4344

4445
LogBuffer& clear()
4546
{
46-
_wptr = _buf;
47+
_wptr = _buf.data();
4748
_buf[0] = 0;
4849
return *this;
4950
}
5051

5152
template<class ...Args>
5253
LogBuffer& print(const char* format, Args... args)
5354
{
54-
assert(!(_buf[0] != 0 && _wptr == _buf)); // means it has already been used as a plain buffer (with `str()`)
55+
assert(!(_buf[0] != 0 && _wptr == _buf.data())); // means it has already been used as a plain buffer (with `str()`)
5556

5657
auto cnt = std::snprintf(_wptr, capacity(), format, args...); // returns number of characters written if successful (not including '\0')
5758
if (cnt > 0)
5859
{
59-
_wptr = std::min(_wptr + cnt, _buf + MAX_CAPACITY - 1);
60+
_wptr = std::min(_wptr + cnt, _buf.data() + MAX_CAPACITY - 1);
6061
}
6162
return *this;
6263
}
6364
LogBuffer& print(const char* format) { return print("%s", format); }
6465

65-
std::size_t capacity() const { return static_cast<size_t>(_buf + MAX_CAPACITY - _wptr); }
66-
char* str() { return _buf; }
67-
const char* c_str() const { return _buf; }
66+
std::size_t capacity() const { return static_cast<size_t>(_buf.data() + MAX_CAPACITY - _wptr); }
67+
char* str() { return _buf.data(); }
68+
const char* c_str() const { return _buf.data(); }
6869

6970
private:
70-
char* _wptr;
71-
char _buf[MAX_CAPACITY];
71+
std::array<char, MAX_CAPACITY> _buf;
72+
char* _wptr;
7273
};
7374

7475
using Logger = std::function<void(int, const char*, int, const char* msg)>;
7576

7677
inline void DefaultLogger(int level, const char* /*filename*/, int /*lineno*/, const char* msg)
7778
{
78-
std::cout << "[" << utility::getCurrentTime() << "]" << Log::levelString(level) << " " << msg;
79+
std::cout << "[" << utility::getCurrentTime() << "]" << Log::levelString(static_cast<std::size_t>(level)) << " " << msg;
7980
std::cout << std::endl;
8081
}
8182

tests/integration/TestKafkaConsumer.cc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1564,15 +1564,15 @@ TEST(KafkaConsumer, OffsetsForTime)
15641564

15651565
using namespace std::chrono;
15661566

1567-
constexpr int MESSAGES_NUM = 5;
1567+
constexpr std::size_t MESSAGES_NUM = 5;
15681568

15691569
std::vector<time_point<system_clock>> checkPoints;
15701570
std::vector<kafka::TopicPartitionOffsets> expectedOffsets;
15711571

15721572
std::cout << "Produce messages:" << std::endl;
15731573
{
15741574
kafka::clients::KafkaProducer producer(KafkaTestUtility::GetKafkaClientCommonConfig());
1575-
for (int i = 0; i < MESSAGES_NUM; ++i)
1575+
for (std::size_t i = 0; i < MESSAGES_NUM; ++i)
15761576
{
15771577
checkPoints.emplace_back(system_clock::now());
15781578

@@ -1602,7 +1602,7 @@ TEST(KafkaConsumer, OffsetsForTime)
16021602
{
16031603
kafka::clients::KafkaConsumer consumer(KafkaTestUtility::GetKafkaClientCommonConfig());
16041604
consumer.subscribe({topic1, topic2});
1605-
for (int i = 0; i < MESSAGES_NUM; ++i)
1605+
for (std::size_t i = 0; i < MESSAGES_NUM; ++i)
16061606
{
16071607
const auto timepoint = checkPoints[i];
16081608
const auto expected = expectedOffsets[i];
@@ -1619,7 +1619,7 @@ TEST(KafkaConsumer, OffsetsForTime)
16191619
kafka::clients::KafkaConsumer consumer(KafkaTestUtility::GetKafkaClientCommonConfig());
16201620

16211621
// Here we doesn't subsribe to topic1 or topic2 (the result is undefined)
1622-
for (int i = 0; i < MESSAGES_NUM; ++i)
1622+
for (std::size_t i = 0; i < MESSAGES_NUM; ++i)
16231623
{
16241624
try
16251625
{
@@ -1654,7 +1654,7 @@ TEST(KafkaConsumer, OffsetsForTime)
16541654
kafka::clients::KafkaConsumer consumer(KafkaTestUtility::GetKafkaClientCommonConfig());
16551655
consumer.subscribe({topic1, topic2});
16561656

1657-
for (int i = 0; i < MESSAGES_NUM; ++i)
1657+
for (std::size_t i = 0; i < MESSAGES_NUM; ++i)
16581658
{
16591659
const auto timepoint = checkPoints[i];
16601660
const auto validTp = kafka::TopicPartition{topic1, partition1};

tests/integration/TestKafkaProducer.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -347,7 +347,7 @@ TEST(KafkaProducer, ThreadCount)
347347
TEST(KafkaProducer, MessageDeliveryCallback)
348348
{
349349
// Prepare messages to test
350-
const std::vector<std::tuple<std::string, std::string, int>> messages = {
350+
const std::vector<std::tuple<std::string, std::string, kafka::clients::producer::ProducerRecord::Id>> messages = {
351351
{"key1", "value1", 1},
352352
{"key2", "value2", 2},
353353
{"key3", "value3", 3},
@@ -379,7 +379,7 @@ TEST(KafkaProducer, MessageDeliveryCallback)
379379
auto record = kafka::clients::producer::ProducerRecord(topic, partition,
380380
kafka::Key(std::get<0>(msg).c_str(), std::get<0>(msg).size()),
381381
kafka::Value(std::get<1>(msg).c_str(), std::get<1>(msg).size()),
382-
std::get<2>(msg));
382+
std::get<2>(msg));
383383
std::cout << "[" <<kafka::utility::getCurrentTime() << "] ProducerRecord: " << record.toString() << std::endl;
384384
producer.send(record, drCallback);
385385
}
@@ -397,7 +397,7 @@ TEST(KafkaProducer, MessageDeliveryCallback)
397397
TEST(KafkaProducer, DeliveryCallback_ManuallyPollEvents)
398398
{
399399
// Prepare messages to test
400-
const std::vector<std::tuple<std::string, std::string, int>> messages = {
400+
const std::vector<std::tuple<std::string, std::string, kafka::clients::producer::ProducerRecord::Id>> messages = {
401401
{"key1", "value1", 1},
402402
{"key2", "value2", 2},
403403
{"key3", "value3", 3},

tests/integration/TestKafkaRecoverableProducer.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -93,8 +93,8 @@ TEST(KafkaRecoverableProducer, MockFatalError)
9393
// Prepare messages to send
9494
static constexpr int MSG_NUM = 50;
9595
std::mutex messagesMutex;
96-
std::list<int> messagesToSend;
97-
for (int i = 0; i < MSG_NUM; ++i) messagesToSend.push_back(i);
96+
std::list<kafka::clients::producer::ProducerRecord::Id> messagesToSend;
97+
for (std::size_t i = 0; i < MSG_NUM; ++i) messagesToSend.push_back(i);
9898

9999
int sendCount = 0;
100100
int deliveryCount = 0;
@@ -122,7 +122,7 @@ TEST(KafkaRecoverableProducer, MockFatalError)
122122
// Would resend the message
123123
if (error) {
124124
std::lock_guard<std::mutex> lock(messagesMutex);
125-
messagesToSend.push_front(std::stoi(*payload));
125+
messagesToSend.push_front(static_cast<kafka::clients::producer::ProducerRecord::Id>(std::stoi(*payload)));
126126
}
127127

128128
++deliveryCount;

0 commit comments

Comments
 (0)