Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Allow consumers to start from a specified offset #585

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,14 @@ public interface IConsumerConfigurationBuilder
/// <returns></returns>
IConsumerConfigurationBuilder ManualAssignPartitions(string topicName, IEnumerable<int> partitions);

/// <summary>
/// Explicitly defines the topic, partitions and offsets that will be used to read the messages
/// </summary>
/// <param name="topicName">Topic name</param>
/// <param name="partitionOffsets">The partition offset dictionary [Partition ID, Offset]</param>
/// <returns></returns>
IConsumerConfigurationBuilder ManualAssignPartitionOffsets(string topicName, IDictionary<int, long> partitionOffsets);

/// <summary>
/// Sets the topics that will be used to read the messages, the partitions will be automatically assigned
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
namespace KafkaFlow.Configuration
namespace KafkaFlow.Configuration;

/// <summary>SaslOauthbearerMethod enum values</summary>
public enum SaslOauthbearerMethod
{
/// <summary>SaslOauthbearerMethod enum values</summary>
public enum SaslOauthbearerMethod
{
/// <summary>Default</summary>
Default,
/// <summary>Default</summary>
Default,

/// <summary>Oidc</summary>
Oidc,
}
/// <summary>Oidc</summary>
Oidc,
}
16 changes: 16 additions & 0 deletions src/KafkaFlow.Abstractions/Configuration/TopicPartitionOffsets.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
using System.Collections.Generic;

namespace KafkaFlow.Configuration;

public class TopicPartitionOffsets
{
public TopicPartitionOffsets(string name, IDictionary<int, long> partitionOffsets)
{
this.Name = name;
this.PartitionOffsets = partitionOffsets;
}

public string Name { get; }

public IDictionary<int, long> PartitionOffsets { get; }
}
4 changes: 4 additions & 0 deletions src/KafkaFlow/Configuration/ConsumerConfiguration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ public ConsumerConfiguration(
Confluent.Kafka.ConsumerConfig consumerConfig,
IReadOnlyList<string> topics,
IReadOnlyList<TopicPartitions> manualAssignPartitions,
IReadOnlyList<TopicPartitionOffsets> manualAssignPartitionOffsets,
string consumerName,
ClusterConfiguration clusterConfiguration,
bool managementDisabled,
Expand Down Expand Up @@ -48,6 +49,7 @@ public ConsumerConfiguration(
this.AutoCommitInterval = autoCommitInterval;
this.Topics = topics ?? throw new ArgumentNullException(nameof(topics));
this.ManualAssignPartitions = manualAssignPartitions ?? throw new ArgumentNullException(nameof(manualAssignPartitions));
this.ManualAssignPartitionOffsets = manualAssignPartitionOffsets ?? throw new ArgumentNullException(nameof(manualAssignPartitionOffsets));
this.ConsumerName = consumerName ?? Guid.NewGuid().ToString();
this.ClusterConfiguration = clusterConfiguration;
this.ManagementDisabled = managementDisabled;
Expand Down Expand Up @@ -76,6 +78,8 @@ public ConsumerConfiguration(

public IReadOnlyList<TopicPartitions> ManualAssignPartitions { get; }

public IReadOnlyList<TopicPartitionOffsets> ManualAssignPartitionOffsets { get; }

public string ConsumerName { get; }

public ClusterConfiguration ClusterConfiguration { get; }
Expand Down
8 changes: 8 additions & 0 deletions src/KafkaFlow/Configuration/ConsumerConfigurationBuilder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ internal sealed class ConsumerConfigurationBuilder : IConsumerConfigurationBuild
{
private readonly List<string> _topics = new();
private readonly List<TopicPartitions> _topicsPartitions = new();
private readonly List<TopicPartitionOffsets> _topicsPartitionOffsets = new();
private readonly List<Action<string>> _statisticsHandlers = new();

private readonly List<PendingOffsetsStatisticsHandler> _pendingOffsetsStatisticsHandlers = new();
Expand Down Expand Up @@ -60,6 +61,12 @@ public IConsumerConfigurationBuilder ManualAssignPartitions(string topicName, IE
return this;
}

public IConsumerConfigurationBuilder ManualAssignPartitionOffsets(string topicName, IDictionary<int, long> partitionOffsets)
{
_topicsPartitionOffsets.Add(new TopicPartitionOffsets(topicName, partitionOffsets));
return this;
}

public IConsumerConfigurationBuilder WithConsumerConfig(Confluent.Kafka.ConsumerConfig config)
{
_consumerConfig = config;
Expand Down Expand Up @@ -259,6 +266,7 @@ public IConsumerConfiguration Build(ClusterConfiguration clusterConfiguration)
consumerConfigCopy,
_topics,
_topicsPartitions,
_topicsPartitionOffsets,
_name,
clusterConfiguration,
_disableManagement,
Expand Down
5 changes: 5 additions & 0 deletions src/KafkaFlow/Configuration/IConsumerConfiguration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,11 @@ public interface IConsumerConfiguration
/// </summary>
IReadOnlyList<TopicPartitions> ManualAssignPartitions { get; }

/// <summary>
/// Gets the topic partition offsets to manually assign
/// </summary>
IReadOnlyList<TopicPartitionOffsets> ManualAssignPartitionOffsets { get; }

/// <summary>
/// Gets the consumer name
/// </summary>
Expand Down
22 changes: 20 additions & 2 deletions src/KafkaFlow/Consumers/Consumer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -273,11 +273,16 @@ private void EnsureConsumer()

if (this.Configuration.ManualAssignPartitions.Any())
{
this.ManualAssign(this.Configuration.ManualAssignPartitions);
this.ManualAssignPartitions(this.Configuration.ManualAssignPartitions);
}

if (this.Configuration.ManualAssignPartitionOffsets.Any())
{
this.ManualAssignPartitionOffsets(this.Configuration.ManualAssignPartitionOffsets);
}
}

private void ManualAssign(IEnumerable<TopicPartitions> topics)
private void ManualAssignPartitions(IEnumerable<TopicPartitions> topics)
{
var partitions = topics
.SelectMany(
Expand All @@ -289,6 +294,19 @@ private void ManualAssign(IEnumerable<TopicPartitions> topics)
this.FirePartitionsAssignedHandlers(_consumer, partitions);
}

private void ManualAssignPartitionOffsets(IEnumerable<TopicPartitionOffsets> topics)
{
var partitionOffsets = topics
.SelectMany(
topic => topic.PartitionOffsets.Select(
partitionOffset => new Confluent.Kafka.TopicPartitionOffset(
topic.Name, new Partition(partitionOffset.Key), new Offset(partitionOffset.Value))))
.ToList();

_consumer.Assign(partitionOffsets);
this.FirePartitionsAssignedHandlers(_consumer, partitionOffsets.Select(x => x.TopicPartition).ToList());
}

private void FirePartitionsAssignedHandlers(
IConsumer<byte[], byte[]> consumer,
List<TopicPartition> partitions)
Expand Down
62 changes: 62 additions & 0 deletions tests/KafkaFlow.IntegrationTests/ConsumerTest.cs
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using AutoFixture;
using Confluent.Kafka;
using global::Microsoft.Extensions.DependencyInjection;
using global::Microsoft.VisualStudio.TestTools.UnitTesting;
using KafkaFlow.Consumers;
using KafkaFlow.IntegrationTests.Core;
using KafkaFlow.IntegrationTests.Core.Handlers;
using KafkaFlow.IntegrationTests.Core.Messages;
using KafkaFlow.IntegrationTests.Core.Producers;
using KafkaFlow.Serializer;

namespace KafkaFlow.IntegrationTests;

Expand Down Expand Up @@ -157,4 +161,62 @@ public void AddConsumer_WithSharedConsumerConfig_ConsumersAreConfiguratedIndepen
Assert.IsNotNull(consumers.FirstOrDefault(x => x.GroupId.Equals(Bootstrapper.ProtobufGroupId)));
Assert.IsNotNull(consumers.FirstOrDefault(x => x.GroupId.Equals(Bootstrapper.ProtobufGzipGroupId)));
}

[TestMethod]
public async Task ManualAssignPartitionOffsetsTest()
{
// Arrange
var producer = _provider.GetRequiredService<IMessageProducer<OffsetTrackerProducer>>();
var messages = _fixture
.Build<OffsetTrackerMessage>()
.Without(m => m.Offset)
.CreateMany(10).ToList();

messages.ForEach(m => producer.Produce(m.Id.ToString(), m, null, report => DeliveryHandler(report, messages)));

foreach (var message in messages)
{
await MessageStorage.AssertOffsetTrackerMessageNotReceivedAsync(message);
}

var endOffset = MessageStorage.GetOffsetTrack();
MessageStorage.Clear();

// Act
var serviceProviderHelper = new ServiceProviderHelper();

await serviceProviderHelper.GetServiceProviderAsync(
consumerConfig =>
{
consumerConfig.ManualAssignPartitionOffsets(Bootstrapper.OffsetTrackerTopicName, new Dictionary<int, long> { { 0, endOffset - 4 } })
.WithGroupId("ManualAssignPartitionOffsetsTest")
.WithBufferSize(100)
.WithWorkersCount(10)
.AddMiddlewares(
middlewares => middlewares
.AddDeserializer<JsonCoreDeserializer>()
.AddTypedHandlers(
handlers => handlers.AddHandler<OffsetTrackerMessageHandler>()));
}, null);

// Assert
for (var i = 0; i < 5; i++)
{
await MessageStorage.AssertOffsetTrackerMessageNotReceivedAsync(messages[i], false);
}

for (var i = 5; i < 10; i++)
{
await MessageStorage.AssertOffsetTrackerMessageNotReceivedAsync(messages[i]);
}

await serviceProviderHelper.StopBusAsync();
}

private static void DeliveryHandler(DeliveryReport<byte[], byte[]> report, List<OffsetTrackerMessage> messages)
{
var key = Encoding.UTF8.GetString(report.Message.Key);
var message = messages.First(m => m.Id.ToString() == key);
message.Offset = report.Offset;
}
}
25 changes: 25 additions & 0 deletions tests/KafkaFlow.IntegrationTests/Core/Bootstrapper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ internal static class Bootstrapper
internal const string AvroGroupId = "consumer-avro";
internal const string JsonGroupId = "consumer-json";
internal const string NullGroupId = "consumer-null";
internal const string OffsetTrackerGroupId = "consumer-offset-tracker";


private const string ProtobufTopicName = "test-protobuf";
Expand All @@ -43,6 +44,7 @@ internal static class Bootstrapper
private const string AvroTopicName = "test-avro";
private const string NullTopicName = "test-null";
private const string DefaultParamsTopicName = "test-default-params";
internal const string OffsetTrackerTopicName = "test-offset-tracker";

private static readonly Lazy<IServiceProvider> s_lazyProvider = new(SetupProvider);

Expand Down Expand Up @@ -203,6 +205,7 @@ private static void SetupServices(HostBuilderContext context, IServiceCollection
.CreateTopicIfNotExists(ProtobufGzipTopicName, 2, 1)
.CreateTopicIfNotExists(ProtobufGzipTopicName2, 2, 1)
.CreateTopicIfNotExists(NullTopicName, 1, 1)
.CreateTopicIfNotExists(OffsetTrackerTopicName, 1, 1)
.CreateTopicIfNotExists(DefaultParamsTopicName)
.AddConsumer(
consumer => consumer
Expand Down Expand Up @@ -270,6 +273,22 @@ private static void SetupServices(HostBuilderContext context, IServiceCollection
.WithHandlerLifetime(InstanceLifetime.Singleton)
.AddHandler<NullMessageHandler>()
)))
.AddConsumer(
consumer => consumer
.Topic(OffsetTrackerTopicName)
.WithGroupId(OffsetTrackerGroupId)
.WithBufferSize(100)
.WithWorkersCount(10)
.WithAutoOffsetReset(AutoOffsetReset.Latest)
.AddMiddlewares(
middlewares => middlewares
.AddDeserializer<JsonCoreDeserializer>()
.AddTypedHandlers(
handlers =>
handlers
.WithHandlerLifetime(InstanceLifetime.Singleton)
.AddHandler<OffsetTrackerMessageHandler>()
)))
.AddConsumer(
consumer => consumer
.Topics(GzipTopicName)
Expand Down Expand Up @@ -325,6 +344,12 @@ private static void SetupServices(HostBuilderContext context, IServiceCollection
.AddMiddlewares(
middlewares => middlewares
.AddSerializer<JsonCoreSerializer>()))
.AddProducer<OffsetTrackerProducer>(
producer => producer
.DefaultTopic(OffsetTrackerTopicName)
.AddMiddlewares(
middlewares => middlewares
.AddSerializer<JsonCoreSerializer>()))
.AddProducer<JsonGzipProducer>(
producer => producer
.DefaultTopic(JsonGzipTopicName)
Expand Down
39 changes: 39 additions & 0 deletions tests/KafkaFlow.IntegrationTests/Core/Handlers/MessageStorage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ internal static class MessageStorage
private static readonly ConcurrentBag<(long, int)> s_versions = new();
private static readonly ConcurrentBag<byte[]> s_byteMessages = new();
private static readonly ConcurrentBag<byte[]> s_nullMessages = new();
private static readonly ConcurrentBag<OffsetTrackerMessage> s_offsetTrackerMessages = new();
private static long s_offsetTrack;

public static void Add(ITestMessage message)
{
Expand All @@ -34,6 +36,12 @@ public static void Add(TestProtoMessage message)
{
s_protoMessages.Add(message);
}

public static void Add(OffsetTrackerMessage message)
{
s_offsetTrackerMessages.Add(message);
s_offsetTrack = Math.Max(message.Offset, s_offsetTrack);
}

public static void Add(byte[] message)
{
Expand Down Expand Up @@ -139,6 +147,35 @@ public static async Task AssertNullMessageAsync()
await Task.Delay(100).ConfigureAwait(false);
}
}

public static async Task AssertOffsetTrackerMessageNotReceivedAsync(OffsetTrackerMessage message, bool assertInStore = true)
{
var start = DateTime.Now;
Waddas marked this conversation as resolved.
Show resolved Hide resolved

while (!s_offsetTrackerMessages.Any(x => x.Id == message.Id && x.Offset == message.Offset))
{
if (DateTime.Now.Subtract(start).Seconds > TimeoutSec)
{
if (assertInStore)
{
Assert.Fail("Message (OffsetTrackerMessage) not received");
}
return;
}

await Task.Delay(100).ConfigureAwait(false);
}

if (!assertInStore)
{
Assert.Fail("Message (OffsetTrackerMessage) received when it should not have been.");
}
}

public static long GetOffsetTrack()
{
return s_offsetTrack;
}

public static List<(long ticks, int version)> GetVersions()
{
Expand All @@ -151,5 +188,7 @@ public static void Clear()
s_testMessages.Clear();
s_byteMessages.Clear();
s_protoMessages.Clear();
s_offsetTrackerMessages.Clear();
s_offsetTrack = 0;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
using System.Threading.Tasks;
using KafkaFlow.IntegrationTests.Core.Messages;

namespace KafkaFlow.IntegrationTests.Core.Handlers;

internal class OffsetTrackerMessageHandler : IMessageHandler<OffsetTrackerMessage>
{
public Task Handle(IMessageContext context, OffsetTrackerMessage message)
{
message.Offset = context.ConsumerContext.Offset;
MessageStorage.Add(message);
return Task.CompletedTask;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
using System;

namespace KafkaFlow.IntegrationTests.Core.Messages;

internal class OffsetTrackerMessage
{
public Guid Id { get; set; }
public long Offset { get; set; }
}
Loading