Skip to content

Commit 61f305c

Browse files
author
Plamen Pilev
committed
merge latest develop
2 parents 62b2fb6 + 9cfac50 commit 61f305c

File tree

187 files changed

+8220
-1013
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

187 files changed

+8220
-1013
lines changed

.circleci/config.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
- run: echo "deb http://ftp.debian.org/debian stable main contrib non-free" >> /etc/apt/sources.list
1616
- run: apt update
1717
- run: apt install -y unzip
18-
# Install JAVA 11
18+
# Install JAVA 17
1919
- run: mkdir -p /usr/share/man/man1 # FIX https://github.com/geerlingguy/ansible-role-java/issues/64
2020
- run: apt install -y openjdk-17-jdk
2121
# BEGIN Dependencies for RocksDB
@@ -24,7 +24,7 @@
2424
# END Dependencies for RocksDB
2525
- run: set JAVA_HOME /usr/lib/jvm/java-17-openjdk-amd64/
2626
- run: export JAVA_HOME
27-
- run: dotnet sonarscanner begin /k:LGouellec_kafka-streams-dotnet /o:kafka-streams-dotnet /d:sonar.login=${SONAR_TOKEN} /d:sonar.host.url=https://sonarcloud.io /d:sonar.cs.opencover.reportsPaths="**\coverage*.opencover.xml" /d:sonar.coverage.exclusions="**sample*/*.cs,**test*/*.cs,**Tests*.cs,**Mock*.cs"
27+
- run: dotnet sonarscanner begin /k:LGouellec_kafka-streams-dotnet /o:kafka-streams-dotnet /d:sonar.login=${SONAR_TOKEN} /d:sonar.host.url=https://sonarcloud.io /d:sonar.cs.opencover.reportsPaths="**\coverage*.opencover.xml" /d:sonar.coverage.exclusions="**sample*/*.cs,**test*/*.cs,**Tests*.cs,**Mock*.cs,**State/Cache/Internal/*.cs"
2828
- run: dotnet build
2929
- run: dotnet test --no-restore --no-build --verbosity normal -f net6.0 --collect:"XPlat Code Coverage" /p:CollectCoverage=true /p:CoverletOutputFormat=opencover test/Streamiz.Kafka.Net.Tests/Streamiz.Kafka.Net.Tests.csproj
3030
- run: dotnet sonarscanner end /d:sonar.login=${SONAR_TOKEN}

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,6 @@ build
99
TestResults
1010

1111
.idea/
12-
.vscode/
12+
.vscode/
13+
14+
confidential

README.md

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -104,22 +104,22 @@ static async System.Threading.Tasks.Task Main(string[] args)
104104
|:------------------------------------------------------------:|:----------------------------------:|:----------------------:|:------------------------------------------:|
105105
| Stateless processors | X | X | |
106106
| RocksDb store | X | X | |
107-
| Standby replicas | X | | No plan for now |
107+
| Standby replicas | X | | No plan for now |
108108
| InMemory store | X | X | |
109109
| Transformer, Processor API | X | X | |
110-
| Punctuate | X | X | |
110+
| Punctuate | X | X | |
111111
| KStream-KStream Join | X | X | |
112112
| KTable-KTable Join | X | X | |
113-
| KTable-KTable FK Join | X | | Plan for 1.6.0 |
113+
| KTable-KTable FK Join | X | | Plan for future |
114114
| KStream-KTable Join | X | X | |
115115
| KStream-GlobalKTable Join | X | X | |
116-
| KStream Async Processing (external call inside the topology) | | X | Not supported in Kafka Streams JAVA |
116+
| KStream Async Processing (external call inside the topology) | | X | Not supported in Kafka Streams JAVA |
117117
| Hopping window | X | X | |
118118
| Tumbling window | X | X | |
119119
| Sliding window | X | | No plan for now |
120120
| Session window | X | | No plan for now |
121-
| Cache | X | | Plan for 1.6.0 |
122-
| Suppress(..) | X | | No plan for now |
121+
| Cache | X | X | EA 1.6.0 |
122+
| Suppress(..) | X | | No plan for now |
123123
| Interactive Queries | X | | No plan for now |
124124
| State store batch restoring | X | | No plan for now |
125125
| Exactly Once (v1 and v2) | X | X | EOS V1 supported, EOS V2 not supported yet |
@@ -137,3 +137,13 @@ When adding or changing a service please add tests and documentations.
137137
# Support
138138

139139
You can found support [here](https://discord.gg/J7Jtxum)
140+
141+
# Star History
142+
143+
<a href="https://star-history.com/#LGouellec/kafka-streams-dotnet&Date">
144+
<picture>
145+
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=LGouellec/kafka-streams-dotnet&type=Date&theme=dark" />
146+
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=LGouellec/kafka-streams-dotnet&type=Date" />
147+
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=LGouellec/kafka-streams-dotnet&type=Date" />
148+
</picture>
149+
</a>

core/Crosscutting/DictionaryExtensions.cs

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
using System;
22
using System.Collections.Concurrent;
33
using System.Collections.Generic;
4+
using System.Diagnostics.CodeAnalysis;
45
using Confluent.Kafka;
56

67
namespace Streamiz.Kafka.Net.Crosscutting
@@ -26,11 +27,9 @@ public static bool AddOrUpdate<K, V>(this IDictionary<K, V> map, K key, V value)
2627
map[key] = value;
2728
return false;
2829
}
29-
else
30-
{
31-
map.Add(key, value);
32-
return true;
33-
}
30+
31+
map.Add(key, value);
32+
return true;
3433
}
3534

3635
/// <summary>
@@ -134,5 +133,32 @@ public static void CreateListOrAdd<K, V>(this IDictionary<K, List<V>> source, K
134133
source.Add(key, new List<V>{value});
135134
}
136135

136+
#if NETSTANDARD2_0
137+
public static bool TryAdd<K, V>( this IDictionary<K, V> dictionary,
138+
K key,
139+
V value){
140+
if (dictionary == null)
141+
throw new ArgumentNullException(nameof (dictionary));
142+
if (dictionary.ContainsKey(key))
143+
return false;
144+
dictionary.Add(key, value);
145+
return true;
146+
}
147+
148+
public static bool Remove<K, V>( this IDictionary<K, V> dictionary,
149+
K key,
150+
out V value){
151+
bool result = dictionary.TryGetValue(key, out V valueTmp);
152+
if (result)
153+
{
154+
value = valueTmp;
155+
dictionary.Remove(key);
156+
return true;
157+
}
158+
value = default(V);
159+
return false;
160+
}
161+
#endif
162+
137163
}
138164
}

core/Crosscutting/KafkaExtensions.cs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,9 @@ internal static Headers Clone(this Headers headers)
8686
originHeader.ForEach(h => copyHeaders.Add(h.Key, h.Item2));
8787
return copyHeaders;
8888
}
89+
90+
internal static long GetEstimatedSize(this Headers headers)
91+
=> headers.Sum(header => header.Key.Length + header.GetValueBytes().LongLength);
8992

9093
internal static Headers AddOrUpdate(this Headers headers, string key, byte[] value)
9194
{

core/Crosscutting/SortedDictionaryExtensions.cs

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,5 +33,19 @@ internal static IEnumerable<KeyValuePair<K, V>> SubMap<K, V>(this SortedDictiona
3333
}
3434
}
3535
}
36+
37+
internal static IEnumerable<KeyValuePair<K, V>> TailMap<K, V>(this SortedDictionary<K, V> sortedDic, K keyFrom,
38+
bool inclusive)
39+
{
40+
foreach (K k in sortedDic.Keys)
41+
{
42+
int rT = sortedDic.Comparer.Compare(keyFrom, k);
43+
44+
if ((inclusive && rT <= 0) || (!inclusive && rT < 0))
45+
{
46+
yield return new KeyValuePair<K, V>(k, sortedDic[k]);
47+
}
48+
}
49+
}
3650
}
3751
}

core/Crosscutting/Utils.cs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,5 +42,11 @@ public static bool IsNumeric(object expression, out Double number)
4242
, NumberFormatInfo.InvariantInfo
4343
, out number);
4444
}
45+
46+
public static void CheckIfNotNull(object parameter, string nameAccessor)
47+
{
48+
if(parameter == null)
49+
throw new ArgumentException($"{nameAccessor} must not be null");
50+
}
4551
}
4652
}

core/Kafka/IRecordCollector.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
using System.Collections.Generic;
1+
using System.Collections.Generic;
22
using Confluent.Kafka;
33
using Streamiz.Kafka.Net.SerDes;
44

@@ -12,5 +12,6 @@ internal interface IRecordCollector
1212
void Close();
1313
void Send<K, V>(string topic, K key, V value, Headers headers, long timestamp, ISerDes<K> keySerializer, ISerDes<V> valueSerializer);
1414
void Send<K, V>(string topic, K key, V value, Headers headers, int partition, long timestamp, ISerDes<K> keySerializer, ISerDes<V> valueSerializer);
15+
int PartitionsFor(string topic);
1516
}
1617
}

core/Kafka/Internal/DefaultKafkaClientSupplier.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,6 @@ public IProducer<byte[], byte[]> GetProducer(ProducerConfig config)
110110
producerStatisticsHandler.Publish(statistics);
111111
});
112112
}
113-
114113
return builder.Build();
115114
}
116115

core/Kafka/Internal/RecordCollector.cs

Lines changed: 26 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
using System.Collections.Concurrent;
88
using System.Collections.Generic;
99
using System.Diagnostics;
10+
using System.Linq;
1011
using System.Text;
1112
using Microsoft.Extensions.Logging;
1213
using Streamiz.Kafka.Net.Metrics;
@@ -81,24 +82,28 @@ public void Clear()
8182
private readonly IStreamConfig configuration;
8283
private readonly TaskId id;
8384
private readonly Sensor droppedRecordsSensor;
85+
private readonly IAdminClient _adminClient;
8486
private Exception exception = null;
8587

8688
private readonly string logPrefix;
8789
private readonly ILogger log = Logger.GetLogger(typeof(RecordCollector));
8890

89-
private readonly ConcurrentDictionary<TopicPartition, long> collectorsOffsets =
90-
new ConcurrentDictionary<TopicPartition, long>();
91+
private readonly ConcurrentDictionary<TopicPartition, long> collectorsOffsets = new();
9192

92-
private readonly RetryRecordContext retryRecordContext = new RetryRecordContext();
93+
private readonly RetryRecordContext retryRecordContext = new();
9394

9495
public IDictionary<TopicPartition, long> CollectorOffsets => collectorsOffsets.ToDictionary();
9596

96-
public RecordCollector(string logPrefix, IStreamConfig configuration, TaskId id, Sensor droppedRecordsSensor)
97+
public IDictionary<string, (int, DateTime)> cachePartitionsForTopics =
98+
new Dictionary<string, (int, DateTime)>();
99+
100+
public RecordCollector(string logPrefix, IStreamConfig configuration, TaskId id, Sensor droppedRecordsSensor, IAdminClient adminClient)
97101
{
98102
this.logPrefix = $"{logPrefix}";
99103
this.configuration = configuration;
100104
this.id = id;
101105
this.droppedRecordsSensor = droppedRecordsSensor;
106+
_adminClient = adminClient;
102107
}
103108

104109
public void Init(ref IProducer<byte[], byte[]> producer)
@@ -136,6 +141,8 @@ public void Close()
136141
}
137142
}
138143
}
144+
145+
_adminClient?.Dispose();
139146
}
140147

141148
public void Flush()
@@ -416,5 +423,20 @@ private void CheckForException()
416423
throw e;
417424
}
418425
}
426+
427+
public int PartitionsFor(string topic)
428+
{
429+
var adminConfig = configuration.ToAdminConfig("");
430+
var refreshInterval = adminConfig.TopicMetadataRefreshIntervalMs ?? 5 * 60 * 1000;
431+
432+
if (cachePartitionsForTopics.ContainsKey(topic) &&
433+
cachePartitionsForTopics[topic].Item2 + TimeSpan.FromMilliseconds(refreshInterval) > DateTime.Now)
434+
return cachePartitionsForTopics[topic].Item1;
435+
436+
var metadata = _adminClient.GetMetadata(topic, TimeSpan.FromSeconds(5));
437+
var partitionCount = metadata.Topics.FirstOrDefault(t => t.Topic.Equals(topic))!.Partitions.Count;
438+
cachePartitionsForTopics.Add(topic, (partitionCount, DateTime.Now));
439+
return partitionCount;
440+
}
419441
}
420442
}

0 commit comments

Comments
 (0)