Skip to content

Commit 61f305c

Browse files
author
Plamen Pilev
committed
merge latest develop
2 parents 62b2fb6 + 9cfac50 commit 61f305c

File tree

187 files changed

+8220
-1013
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

187 files changed

+8220
-1013
lines changed

.circleci/config.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
- run: echo "deb http://ftp.debian.org/debian stable main contrib non-free" >> /etc/apt/sources.list
1616
- run: apt update
1717
- run: apt install -y unzip
18-
# Install JAVA 11
18+
# Install JAVA 17
1919
- run: mkdir -p /usr/share/man/man1 # FIX https://github.com/geerlingguy/ansible-role-java/issues/64
2020
- run: apt install -y openjdk-17-jdk
2121
# BEGIN Dependencies for RocksDB
@@ -24,7 +24,7 @@
2424
# END Dependencies for RocksDB
2525
- run: set JAVA_HOME /usr/lib/jvm/java-17-openjdk-amd64/
2626
- run: export JAVA_HOME
27-
- run: dotnet sonarscanner begin /k:LGouellec_kafka-streams-dotnet /o:kafka-streams-dotnet /d:sonar.login=${SONAR_TOKEN} /d:sonar.host.url=https://sonarcloud.io /d:sonar.cs.opencover.reportsPaths="**\coverage*.opencover.xml" /d:sonar.coverage.exclusions="**sample*/*.cs,**test*/*.cs,**Tests*.cs,**Mock*.cs"
27+
- run: dotnet sonarscanner begin /k:LGouellec_kafka-streams-dotnet /o:kafka-streams-dotnet /d:sonar.login=${SONAR_TOKEN} /d:sonar.host.url=https://sonarcloud.io /d:sonar.cs.opencover.reportsPaths="**\coverage*.opencover.xml" /d:sonar.coverage.exclusions="**sample*/*.cs,**test*/*.cs,**Tests*.cs,**Mock*.cs,**State/Cache/Internal/*.cs"
2828
- run: dotnet build
2929
- run: dotnet test --no-restore --no-build --verbosity normal -f net6.0 --collect:"XPlat Code Coverage" /p:CollectCoverage=true /p:CoverletOutputFormat=opencover test/Streamiz.Kafka.Net.Tests/Streamiz.Kafka.Net.Tests.csproj
3030
- run: dotnet sonarscanner end /d:sonar.login=${SONAR_TOKEN}

.gitignore

+3-1
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,6 @@ build
99
TestResults
1010

1111
.idea/
12-
.vscode/
12+
.vscode/
13+
14+
confidential

README.md

+16-6
Original file line numberDiff line numberDiff line change
@@ -104,22 +104,22 @@ static async System.Threading.Tasks.Task Main(string[] args)
104104
|:------------------------------------------------------------:|:----------------------------------:|:----------------------:|:------------------------------------------:|
105105
| Stateless processors | X | X | |
106106
| RocksDb store | X | X | |
107-
| Standby replicas | X | | No plan for now |
107+
| Standby replicas | X | | No plan for now |
108108
| InMemory store | X | X | |
109109
| Transformer, Processor API | X | X | |
110-
| Punctuate | X | X | |
110+
| Punctuate | X | X | |
111111
| KStream-KStream Join | X | X | |
112112
| KTable-KTable Join | X | X | |
113-
| KTable-KTable FK Join | X | | Plan for 1.6.0 |
113+
| KTable-KTable FK Join | X | | Plan for future |
114114
| KStream-KTable Join | X | X | |
115115
| KStream-GlobalKTable Join | X | X | |
116-
| KStream Async Processing (external call inside the topology) | | X | Not supported in Kafka Streams JAVA |
116+
| KStream Async Processing (external call inside the topology) | | X | Not supported in Kafka Streams JAVA |
117117
| Hopping window | X | X | |
118118
| Tumbling window | X | X | |
119119
| Sliding window | X | | No plan for now |
120120
| Session window | X | | No plan for now |
121-
| Cache | X | | Plan for 1.6.0 |
122-
| Suppress(..) | X | | No plan for now |
121+
| Cache | X | X | EA 1.6.0 |
122+
| Suppress(..) | X | | No plan for now |
123123
| Interactive Queries | X | | No plan for now |
124124
| State store batch restoring | X | | No plan for now |
125125
| Exactly Once (v1 and v2) | X | X | EOS V1 supported, EOS V2 not supported yet |
@@ -137,3 +137,13 @@ When adding or changing a service please add tests and documentations.
137137
# Support
138138

139139
You can found support [here](https://discord.gg/J7Jtxum)
140+
141+
# Star History
142+
143+
<a href="https://star-history.com/#LGouellec/kafka-streams-dotnet&Date">
144+
<picture>
145+
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=LGouellec/kafka-streams-dotnet&type=Date&theme=dark" />
146+
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=LGouellec/kafka-streams-dotnet&type=Date" />
147+
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=LGouellec/kafka-streams-dotnet&type=Date" />
148+
</picture>
149+
</a>

core/Crosscutting/DictionaryExtensions.cs

+31-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
using System;
22
using System.Collections.Concurrent;
33
using System.Collections.Generic;
4+
using System.Diagnostics.CodeAnalysis;
45
using Confluent.Kafka;
56

67
namespace Streamiz.Kafka.Net.Crosscutting
@@ -26,11 +27,9 @@ public static bool AddOrUpdate<K, V>(this IDictionary<K, V> map, K key, V value)
2627
map[key] = value;
2728
return false;
2829
}
29-
else
30-
{
31-
map.Add(key, value);
32-
return true;
33-
}
30+
31+
map.Add(key, value);
32+
return true;
3433
}
3534

3635
/// <summary>
@@ -134,5 +133,32 @@ public static void CreateListOrAdd<K, V>(this IDictionary<K, List<V>> source, K
134133
source.Add(key, new List<V>{value});
135134
}
136135

136+
#if NETSTANDARD2_0
137+
public static bool TryAdd<K, V>( this IDictionary<K, V> dictionary,
138+
K key,
139+
V value){
140+
if (dictionary == null)
141+
throw new ArgumentNullException(nameof (dictionary));
142+
if (dictionary.ContainsKey(key))
143+
return false;
144+
dictionary.Add(key, value);
145+
return true;
146+
}
147+
148+
public static bool Remove<K, V>( this IDictionary<K, V> dictionary,
149+
K key,
150+
out V value){
151+
bool result = dictionary.TryGetValue(key, out V valueTmp);
152+
if (result)
153+
{
154+
value = valueTmp;
155+
dictionary.Remove(key);
156+
return true;
157+
}
158+
value = default(V);
159+
return false;
160+
}
161+
#endif
162+
137163
}
138164
}

core/Crosscutting/KafkaExtensions.cs

+3
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,9 @@ internal static Headers Clone(this Headers headers)
8686
originHeader.ForEach(h => copyHeaders.Add(h.Key, h.Item2));
8787
return copyHeaders;
8888
}
89+
90+
internal static long GetEstimatedSize(this Headers headers)
91+
=> headers.Sum(header => header.Key.Length + header.GetValueBytes().LongLength);
8992

9093
internal static Headers AddOrUpdate(this Headers headers, string key, byte[] value)
9194
{

core/Crosscutting/SortedDictionaryExtensions.cs

+14
Original file line numberDiff line numberDiff line change
@@ -33,5 +33,19 @@ internal static IEnumerable<KeyValuePair<K, V>> SubMap<K, V>(this SortedDictiona
3333
}
3434
}
3535
}
36+
37+
internal static IEnumerable<KeyValuePair<K, V>> TailMap<K, V>(this SortedDictionary<K, V> sortedDic, K keyFrom,
38+
bool inclusive)
39+
{
40+
foreach (K k in sortedDic.Keys)
41+
{
42+
int rT = sortedDic.Comparer.Compare(keyFrom, k);
43+
44+
if ((inclusive && rT <= 0) || (!inclusive && rT < 0))
45+
{
46+
yield return new KeyValuePair<K, V>(k, sortedDic[k]);
47+
}
48+
}
49+
}
3650
}
3751
}

core/Crosscutting/Utils.cs

+6
Original file line numberDiff line numberDiff line change
@@ -42,5 +42,11 @@ public static bool IsNumeric(object expression, out Double number)
4242
, NumberFormatInfo.InvariantInfo
4343
, out number);
4444
}
45+
46+
public static void CheckIfNotNull(object parameter, string nameAccessor)
47+
{
48+
if(parameter == null)
49+
throw new ArgumentException($"{nameAccessor} must not be null");
50+
}
4551
}
4652
}

core/Kafka/IRecordCollector.cs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
using System.Collections.Generic;
1+
using System.Collections.Generic;
22
using Confluent.Kafka;
33
using Streamiz.Kafka.Net.SerDes;
44

@@ -12,5 +12,6 @@ internal interface IRecordCollector
1212
void Close();
1313
void Send<K, V>(string topic, K key, V value, Headers headers, long timestamp, ISerDes<K> keySerializer, ISerDes<V> valueSerializer);
1414
void Send<K, V>(string topic, K key, V value, Headers headers, int partition, long timestamp, ISerDes<K> keySerializer, ISerDes<V> valueSerializer);
15+
int PartitionsFor(string topic);
1516
}
1617
}

core/Kafka/Internal/DefaultKafkaClientSupplier.cs

-1
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,6 @@ public IProducer<byte[], byte[]> GetProducer(ProducerConfig config)
110110
producerStatisticsHandler.Publish(statistics);
111111
});
112112
}
113-
114113
return builder.Build();
115114
}
116115

core/Kafka/Internal/RecordCollector.cs

+26-4
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
using System.Collections.Concurrent;
88
using System.Collections.Generic;
99
using System.Diagnostics;
10+
using System.Linq;
1011
using System.Text;
1112
using Microsoft.Extensions.Logging;
1213
using Streamiz.Kafka.Net.Metrics;
@@ -81,24 +82,28 @@ public void Clear()
8182
private readonly IStreamConfig configuration;
8283
private readonly TaskId id;
8384
private readonly Sensor droppedRecordsSensor;
85+
private readonly IAdminClient _adminClient;
8486
private Exception exception = null;
8587

8688
private readonly string logPrefix;
8789
private readonly ILogger log = Logger.GetLogger(typeof(RecordCollector));
8890

89-
private readonly ConcurrentDictionary<TopicPartition, long> collectorsOffsets =
90-
new ConcurrentDictionary<TopicPartition, long>();
91+
private readonly ConcurrentDictionary<TopicPartition, long> collectorsOffsets = new();
9192

92-
private readonly RetryRecordContext retryRecordContext = new RetryRecordContext();
93+
private readonly RetryRecordContext retryRecordContext = new();
9394

9495
public IDictionary<TopicPartition, long> CollectorOffsets => collectorsOffsets.ToDictionary();
9596

96-
public RecordCollector(string logPrefix, IStreamConfig configuration, TaskId id, Sensor droppedRecordsSensor)
97+
public IDictionary<string, (int, DateTime)> cachePartitionsForTopics =
98+
new Dictionary<string, (int, DateTime)>();
99+
100+
public RecordCollector(string logPrefix, IStreamConfig configuration, TaskId id, Sensor droppedRecordsSensor, IAdminClient adminClient)
97101
{
98102
this.logPrefix = $"{logPrefix}";
99103
this.configuration = configuration;
100104
this.id = id;
101105
this.droppedRecordsSensor = droppedRecordsSensor;
106+
_adminClient = adminClient;
102107
}
103108

104109
public void Init(ref IProducer<byte[], byte[]> producer)
@@ -136,6 +141,8 @@ public void Close()
136141
}
137142
}
138143
}
144+
145+
_adminClient?.Dispose();
139146
}
140147

141148
public void Flush()
@@ -416,5 +423,20 @@ private void CheckForException()
416423
throw e;
417424
}
418425
}
426+
427+
public int PartitionsFor(string topic)
428+
{
429+
var adminConfig = configuration.ToAdminConfig("");
430+
var refreshInterval = adminConfig.TopicMetadataRefreshIntervalMs ?? 5 * 60 * 1000;
431+
432+
if (cachePartitionsForTopics.ContainsKey(topic) &&
433+
cachePartitionsForTopics[topic].Item2 + TimeSpan.FromMilliseconds(refreshInterval) > DateTime.Now)
434+
return cachePartitionsForTopics[topic].Item1;
435+
436+
var metadata = _adminClient.GetMetadata(topic, TimeSpan.FromSeconds(5));
437+
var partitionCount = metadata.Topics.FirstOrDefault(t => t.Topic.Equals(topic))!.Partitions.Count;
438+
cachePartitionsForTopics.Add(topic, (partitionCount, DateTime.Now));
439+
return partitionCount;
440+
}
419441
}
420442
}

core/KafkaStream.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -324,7 +324,7 @@ string Protect(string str)
324324
() => StreamState != null && StreamState.IsRunning() ? 1 : 0,
325325
() => threads.Count(t => t.State != ThreadState.DEAD && t.State != ThreadState.PENDING_SHUTDOWN),
326326
metricsRegistry);
327-
327+
328328
threads = new IThread[numStreamThreads];
329329
var threadState = new Dictionary<long, Processors.ThreadState>();
330330

+69
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
using System;
2+
using System.Collections.Generic;
3+
using System.Threading;
4+
using Streamiz.Kafka.Net.Processors.Internal;
5+
6+
namespace Streamiz.Kafka.Net.Metrics.Internal
7+
{
8+
internal class CachingMetrics
9+
{
10+
internal static string CACHE_SIZE_BYTES_TOTAL = "cache-size-bytes-total";
11+
private static string CACHE_SIZE_BYTES_TOTAL_DESCRIPTION = "The total size in bytes of this cache state store.";
12+
13+
internal static string HIT_RATIO = "hit-ratio";
14+
private static string HIT_RATIO_DESCRIPTION = "The hit ratio defined as the ratio of cache read hits over the total cache read requests.";
15+
private static string HIT_RATIO_AVG_DESCRIPTION = "The average cache hit ratio";
16+
private static string HIT_RATIO_MIN_DESCRIPTION = "The minimum cache hit ratio";
17+
private static string HIT_RATIO_MAX_DESCRIPTION = "The maximum cache hit ratio";
18+
19+
public static Sensor HitRatioSensor(
20+
TaskId taskId,
21+
string storeType,
22+
string storeName,
23+
StreamMetricsRegistry streamsMetrics) {
24+
25+
Sensor sensor;
26+
string hitMetricName = HIT_RATIO;
27+
IDictionary<string, string> tags =
28+
streamsMetrics.StoreLevelTags(GetThreadId(), taskId.ToString(), storeName, storeType);
29+
30+
sensor = streamsMetrics.StoreLevelSensor(GetThreadId(), taskId, storeName, hitMetricName, HIT_RATIO_DESCRIPTION, MetricsRecordingLevel.DEBUG);
31+
32+
SensorHelper.AddAvgAndMinAndMaxToSensor(
33+
sensor,
34+
StreamMetricsRegistry.STATE_STORE_LEVEL_GROUP,
35+
tags,
36+
hitMetricName,
37+
HIT_RATIO_AVG_DESCRIPTION,
38+
HIT_RATIO_MAX_DESCRIPTION,
39+
HIT_RATIO_MIN_DESCRIPTION);
40+
41+
return sensor;
42+
}
43+
44+
public static Sensor TotalCacheSizeBytesSensor(
45+
TaskId taskId,
46+
string storeType,
47+
string storeName,
48+
StreamMetricsRegistry streamsMetrics) {
49+
50+
Sensor sensor;
51+
string totalCacheMetricName = CACHE_SIZE_BYTES_TOTAL;
52+
IDictionary<string, string> tags =
53+
streamsMetrics.StoreLevelTags(GetThreadId(), taskId.ToString(), storeName, storeType);
54+
55+
sensor = streamsMetrics.StoreLevelSensor(GetThreadId(), taskId, storeName, totalCacheMetricName, CACHE_SIZE_BYTES_TOTAL_DESCRIPTION, MetricsRecordingLevel.DEBUG);
56+
57+
SensorHelper.AddValueMetricToSensor(
58+
sensor,
59+
StreamMetricsRegistry.STATE_STORE_LEVEL_GROUP,
60+
tags,
61+
totalCacheMetricName,
62+
CACHE_SIZE_BYTES_TOTAL_DESCRIPTION);
63+
64+
return sensor;
65+
}
66+
67+
private static string GetThreadId() => Thread.CurrentThread.Name;
68+
}
69+
}

core/Metrics/Internal/SensorHelper.cs

+34
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,40 @@ internal static void AddAvgAndMaxToSensor(Sensor sensor,
125125
);
126126
}
127127

128+
internal static void AddAvgAndMinAndMaxToSensor(Sensor sensor,
129+
string group,
130+
IDictionary<string, string> tags,
131+
string operation,
132+
string descriptionOfAvg,
133+
string descriptionOfMax,
134+
string descriptionOfMin) {
135+
136+
sensor.AddStatMetric(
137+
new MetricName(
138+
operation + StreamMetricsRegistry.AVG_SUFFIX,
139+
group,
140+
descriptionOfAvg,
141+
tags),
142+
new Avg()
143+
);
144+
sensor.AddStatMetric(
145+
new MetricName(
146+
operation + StreamMetricsRegistry.MAX_SUFFIX,
147+
group,
148+
descriptionOfMax,
149+
tags),
150+
new Max()
151+
);
152+
sensor.AddStatMetric(
153+
new MetricName(
154+
operation + StreamMetricsRegistry.MIN_SUFFIX,
155+
group,
156+
descriptionOfMin,
157+
tags),
158+
new Min()
159+
);
160+
}
161+
128162
internal static void AddRateOfSumAndSumMetricsToSensor(Sensor sensor,
129163
string group,
130164
IDictionary<string, string> tags,

core/Metrics/Sensor.cs

+3
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,9 @@ internal void Record()
154154
internal void Record(long value)
155155
=> Record(value, DateTime.Now.GetMilliseconds());
156156

157+
internal void Record(double value)
158+
=> Record(value, DateTime.Now.GetMilliseconds());
159+
157160
internal virtual void Record(double value, long timeMs)
158161
=> RecordInternal(value, timeMs);
159162

0 commit comments

Comments
 (0)