Skip to content

Commit

Permalink
Fix KafkaRecoverableProducer.close()
Browse files Browse the repository at this point in the history
  • Loading branch information
kenneth-jia committed Oct 21, 2021
1 parent 14ce365 commit 82cbd27
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 45 deletions.
10 changes: 4 additions & 6 deletions include/kafka/addons/KafkaRecoverableProducer.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,7 @@ class KafkaRecoverableProducer

~KafkaRecoverableProducer()
{
std::lock_guard<std::mutex> lock(_producerMutex);

_running = false;
if (_pollThread.joinable()) _pollThread.join();

_producer->close();
if (_running) close();
}

/**
Expand Down Expand Up @@ -167,6 +162,9 @@ class KafkaRecoverableProducer
{
std::lock_guard<std::mutex> lock(_producerMutex);

_running = false;
if (_pollThread.joinable()) _pollThread.join();

_producer->close(timeout);
}

Expand Down
84 changes: 45 additions & 39 deletions tests/integration/TestKafkaRecoverableProducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -20,53 +20,59 @@ TEST(KafkaRecoverableProducer, SendMessages)

KafkaTestUtility::CreateKafkaTopic(topic, 5, 3);

// Properties for the producer
const auto props = KafkaTestUtility::GetKafkaClientCommonConfig().put(kafka::clients::producer::Config::ACKS, "all");

// Recoverable producer
kafka::clients::KafkaRecoverableProducer producer(props);

// Send messages
kafka::clients::producer::ProducerRecord::Id id = 0;
for (const auto& msg: messages)
{
auto record = kafka::clients::producer::ProducerRecord(topic, partition,
kafka::Key(msg.first.c_str(), msg.first.size()),
kafka::Value(msg.second.c_str(), msg.second.size()),
id++);
std::cout << "[" <<kafka::utility::getCurrentTime() << "] ProducerRecord: " << record.toString() << std::endl;
// Properties for the producer
const auto props = KafkaTestUtility::GetKafkaClientCommonConfig().put(kafka::clients::producer::Config::ACKS, "all");

// sync-send
{
auto metadata = producer.syncSend(record);
std::cout << "[" <<kafka::utility::getCurrentTime() << "] Message sync-sent. Metadata: " << metadata.toString() << std::endl;
}
// Recoverable producer
kafka::clients::KafkaRecoverableProducer producer(props);

// async-send
// Send messages
kafka::clients::producer::ProducerRecord::Id id = 0;
for (const auto& msg: messages)
{
producer.send(record,
[] (const kafka::clients::producer::RecordMetadata& metadata, const kafka::Error& error) {
EXPECT_FALSE(error);
std::cout << "[" <<kafka::utility::getCurrentTime() << "] Message async-sent. Metadata: " << metadata.toString() << std::endl;
});
}
}
auto record = kafka::clients::producer::ProducerRecord(topic, partition,
kafka::Key(msg.first.c_str(), msg.first.size()),
kafka::Value(msg.second.c_str(), msg.second.size()),
id++);
std::cout << "[" <<kafka::utility::getCurrentTime() << "] ProducerRecord: " << record.toString() << std::endl;

// Prepare a consumer
const auto consumerProps = KafkaTestUtility::GetKafkaClientCommonConfig().put(kafka::clients::consumer::Config::AUTO_OFFSET_RESET, "earliest");
kafka::clients::KafkaConsumer consumer(consumerProps);
consumer.setLogLevel(kafka::Log::Level::Crit);
consumer.subscribe({topic});
// sync-send
{
auto metadata = producer.syncSend(record);
std::cout << "[" <<kafka::utility::getCurrentTime() << "] Message sync-sent. Metadata: " << metadata.toString() << std::endl;
}

// Poll these messages
auto records = KafkaTestUtility::ConsumeMessagesUntilTimeout(consumer);
// async-send
{
producer.send(record,
[] (const kafka::clients::producer::RecordMetadata& metadata, const kafka::Error& error) {
EXPECT_FALSE(error);
std::cout << "[" <<kafka::utility::getCurrentTime() << "] Message async-sent. Metadata: " << metadata.toString() << std::endl;
});
}
}

producer.close();
}

// Check the messages
EXPECT_EQ(messages.size() * 2, records.size());
for (std::size_t i = 0; i < records.size(); ++i)
{
EXPECT_EQ(messages[i/2].first, std::string(static_cast<const char*>(records[i].key().data()), records[i].key().size()));
EXPECT_EQ(messages[i/2].second, std::string(static_cast<const char*>(records[i].value().data()), records[i].value().size()));
// Prepare a consumer
const auto consumerProps = KafkaTestUtility::GetKafkaClientCommonConfig().put(kafka::clients::consumer::Config::AUTO_OFFSET_RESET, "earliest");
kafka::clients::KafkaConsumer consumer(consumerProps);
consumer.setLogLevel(kafka::Log::Level::Crit);
consumer.subscribe({topic});

// Poll these messages
auto records = KafkaTestUtility::ConsumeMessagesUntilTimeout(consumer);

// Check the messages
EXPECT_EQ(messages.size() * 2, records.size());
for (std::size_t i = 0; i < records.size(); ++i)
{
EXPECT_EQ(messages[i/2].first, std::string(static_cast<const char*>(records[i].key().data()), records[i].key().size()));
EXPECT_EQ(messages[i/2].second, std::string(static_cast<const char*>(records[i].value().data()), records[i].value().size()));
}
}
}

Expand Down

0 comments on commit 82cbd27

Please sign in to comment.