@@ -14,7 +14,11 @@ import scala.util.Try
14
14
import scala .xml .Elem
15
15
import scala .xml .transform .{RewriteRule , RuleTransformer }
16
16
17
- // Warning: Flink doesn't work correctly with 2.12.11
17
+ // Warning: Flink doesn't work correctly with Scala 2.12.11 and higher.
18
+ // Upgrading to a newer version of Scala 2.12 causes the JavaCollectionsSerializationTest to fail
19
+ // because these versions switched to the same Java collection wrappers used in Scala 2.13.
20
+ // These wrappers lack dedicated Kryo serializers, which we added in flink-scala-2.13 library https://github.com/TouK/flink-scala-2.13.
21
+ // To bump Scala 2.12 we would need to do flink-scala-2.12 similar to flink-scala-2.13.
18
22
val scala212 = " 2.12.10"
19
23
val scala213 = " 2.13.15"
20
24
@@ -105,6 +109,8 @@ def defaultMergeStrategy: String => MergeStrategy = {
105
109
case PathList (ps @ _* ) if ps.last == " module-info.class" => MergeStrategy .discard
106
110
// we override Spring's class and we want to keep only our implementation
107
111
case PathList (ps @ _* ) if ps.last == " NumberUtils.class" => MergeStrategy .first
112
+ case PathList (ps @ _* ) if ps.last == " Projection.class" => MergeStrategy .first
113
+ case PathList (ps @ _* ) if ps.last == " Selection.class" => MergeStrategy .first
108
114
// merge Netty version information files
109
115
case PathList (ps @ _* ) if ps.last == " io.netty.versions.properties" => MergeStrategy .concat
110
116
// due to swagger-parser dependencies having different schema definitions (json-schema-validator and json-schema-core)
@@ -295,6 +301,7 @@ lazy val commonSettings =
295
301
// You can find versions provided by Flink in it's lib/flink-dist-*.jar/META-INF/DEPENDENCIES file.
296
302
val flinkV = " 1.19.2"
297
303
val flinkConnectorKafkaV = " 3.2.0-1.19"
304
+ val jdbcFlinkConnectorV = " 3.2.0-1.19"
298
305
val flinkCommonsCompressV = " 1.26.0"
299
306
val flinkCommonsLang3V = " 3.12.0"
300
307
val flinkCommonsTextV = " 1.10.0"
@@ -306,6 +313,8 @@ val avroV = "1.11.4"
306
313
// we should use max(version used by confluent, version acceptable by flink), https://docs.confluent.io/platform/current/installation/versions-interoperability.html - confluent version reference
307
314
val kafkaV = " 3.8.1"
308
315
// to update we need configurable SpEL length limit from 6.0.9, but 6.x requires JDK 17
316
+ // when updating note that we have copied and modified class org.springframework.expression.spel.ast.Projection
317
+ // and org.springframework.util.NumberUtils and org.springframework.expression.spel.ast.Selection
309
318
val springV = " 5.2.23.RELEASE"
310
319
val scalaTestV = " 3.2.18"
311
320
val scalaCheckV = " 1.17.1"
@@ -323,6 +332,7 @@ val jacksonV = "2.17.2"
323
332
val catsV = " 2.12.0"
324
333
val catsEffectV = " 3.5.4"
325
334
val everitSchemaV = " 1.14.4"
335
+ val fastParseV = " 3.1.1"
326
336
val slf4jV = " 1.7.36"
327
337
val scalaLoggingV = " 3.9.5"
328
338
val scalaCompatV = " 1.0.2"
@@ -358,6 +368,7 @@ val cronParserV = "9.1.6" // 9.1.7+ requires JDK 16+
358
368
val javaxValidationApiV = " 2.0.1.Final"
359
369
val caffeineCacheV = " 3.1.8"
360
370
val sttpV = " 3.9.8"
371
+ val sttpSharedV = " 1.3.22"
361
372
val tapirV = " 1.11.7"
362
373
val openapiCirceYamlV = " 0.11.3"
363
374
// we use legacy version because this one supports Scala 2.12
@@ -636,6 +647,7 @@ lazy val flinkDeploymentManager = (project in flink("management"))
636
647
)
637
648
.dependsOn(
638
649
deploymentManagerApi % Provided ,
650
+ scenarioCompilerFlinkDeps,
639
651
flinkMiniCluster,
640
652
commonUtils % Provided ,
641
653
utilsInternal % Provided ,
@@ -680,6 +692,7 @@ lazy val flinkDevModel = (project in flink("management/dev-model"))
680
692
// It has to be in the default, Compile scope because all components are eagerly loaded so it will be loaded also
681
693
// on the Flink side where this library is missing
682
694
liteComponentsApi,
695
+ defaultHelpers,
683
696
componentsUtils % Provided ,
684
697
// TODO: NodeAdditionalInfoProvider & ComponentExtractor should probably be moved to API?
685
698
scenarioCompiler % Provided ,
@@ -807,6 +820,7 @@ lazy val flinkExecutor = (project in flink("executor"))
807
820
}.toList,
808
821
)
809
822
.dependsOn(
823
+ scenarioCompilerFlinkDeps,
810
824
flinkComponentsUtils,
811
825
flinkExtensionsApi,
812
826
scenarioCompiler,
@@ -818,6 +832,19 @@ lazy val flinkExecutor = (project in flink("executor"))
818
832
flinkTestUtils % Test ,
819
833
)
820
834
835
+ lazy val scenarioCompilerFlinkDeps = (project in flink(" scenario-compiler-deps" ))
836
+ .settings(commonSettings)
837
+ .settings(
838
+ name := " nussknacker-flink-scenario-compiler-deps" ,
839
+ libraryDependencies ++= {
840
+ Seq (
841
+ // Dependencies below are provided by flink-dist jar in production flink or by flink DM for scenario testing/state verification purpose
842
+ " org.apache.flink" % " flink-streaming-java" % flinkV % Provided ,
843
+ )
844
+ }
845
+ )
846
+ .dependsOn(componentsApi % Provided )
847
+
821
848
lazy val scenarioCompiler = (project in file(" scenario-compiler" ))
822
849
.settings(commonSettings)
823
850
.settings(
@@ -826,10 +853,12 @@ lazy val scenarioCompiler = (project in file("scenario-compiler"))
826
853
Seq (
827
854
" org.typelevel" %% " cats-effect" % catsEffectV,
828
855
" org.scala-lang.modules" %% " scala-java8-compat" % scalaCompatV,
856
+ " com.lihaoyi" %% " fastparse" % fastParseV,
829
857
" org.apache.avro" % " avro" % avroV % Test ,
830
858
" org.scalacheck" %% " scalacheck" % scalaCheckV % Test ,
831
859
" com.cronutils" % " cron-utils" % cronParserV % Test ,
832
- " org.scalatestplus" %% s " scalacheck- $scalaCheckVshort" % scalaTestPlusV % Test
860
+ " org.scalatestplus" %% s " scalacheck- $scalaCheckVshort" % scalaTestPlusV % Test ,
861
+ " org.apache.flink" % " flink-core" % flinkV % Test ,
833
862
)
834
863
}
835
864
)
@@ -1011,6 +1040,7 @@ lazy val flinkKafkaComponentsUtils = (project in flink("kafka-components-utils")
1011
1040
.dependsOn(
1012
1041
componentsApi % Provided ,
1013
1042
kafkaComponentsUtils,
1043
+ schemedKafkaComponentsUtils,
1014
1044
flinkComponentsUtils % Provided ,
1015
1045
flinkExtensionsApi % Provided ,
1016
1046
componentsUtils % Provided ,
@@ -1131,7 +1161,12 @@ lazy val mathUtils = (project in utils("math-utils"))
1131
1161
lazy val defaultHelpers = (project in utils(" default-helpers" ))
1132
1162
.settings(commonSettings)
1133
1163
.settings(
1134
- name := " nussknacker-default-helpers"
1164
+ name := " nussknacker-default-helpers" ,
1165
+ libraryDependencies ++= {
1166
+ Seq (
1167
+ " org.apache.flink" % " flink-core" % flinkV % Test ,
1168
+ )
1169
+ }
1135
1170
)
1136
1171
.dependsOn(mathUtils, commonUtils, testUtils % Test , scenarioCompiler % " test->test;test->compile" )
1137
1172
@@ -1229,16 +1264,34 @@ lazy val flinkMiniCluster = (project in flink("minicluster"))
1229
1264
name := " nussknacker-flink-minicluster" ,
1230
1265
libraryDependencies ++= {
1231
1266
Seq (
1232
- (" org.apache.flink" % " flink-streaming-java" % flinkV)
1267
+ (" org.apache.flink" % " flink-streaming-java" % flinkV)
1233
1268
.excludeAll(
1234
1269
ExclusionRule (" log4j" , " log4j" ),
1235
1270
ExclusionRule (" org.slf4j" , " slf4j-log4j12" ),
1236
1271
ExclusionRule (" com.esotericsoftware" , " kryo-shaded" ),
1237
1272
),
1238
- " org.apache.flink" % " flink-statebackend-rocksdb" % flinkV,
1239
- " org.scala-lang.modules" %% " scala-collection-compat" % scalaCollectionsCompatV % Provided ,
1240
- " com.typesafe.scala-logging" %% " scala-logging" % scalaLoggingV % Provided ,
1241
- " com.softwaremill.retry" %% " retry" % retryV,
1273
+ " org.apache.flink" % " flink-statebackend-rocksdb" % flinkV,
1274
+ // Below is a list of libs that are available in flink distribution
1275
+ // We want to make flink minicluster as featured as standard flink distribution
1276
+ " org.apache.flink" % " flink-connector-files" % flinkV,
1277
+ " org.apache.flink" % " flink-csv" % flinkV,
1278
+ " org.apache.flink" % " flink-json" % flinkV,
1279
+ (" org.apache.flink" % " flink-table-api-java-bridge" % flinkV)
1280
+ .excludeAll(
1281
+ ExclusionRule (" com.esotericsoftware" , " kryo-shaded" )
1282
+ ),
1283
+ (" org.apache.flink" % " flink-table-runtime" % flinkV)
1284
+ .excludeAll(
1285
+ ExclusionRule (" com.esotericsoftware" , " kryo-shaded" )
1286
+ ),
1287
+ (" org.apache.flink" % " flink-table-planner-loader" % flinkV)
1288
+ .excludeAll(
1289
+ ExclusionRule (" com.esotericsoftware" , " kryo-shaded" )
1290
+ ),
1291
+ // end of list
1292
+ " org.scala-lang.modules" %% " scala-collection-compat" % scalaCollectionsCompatV % Provided ,
1293
+ " com.typesafe.scala-logging" %% " scala-logging" % scalaLoggingV % Provided ,
1294
+ " com.softwaremill.retry" %% " retry" % retryV,
1242
1295
) ++ flinkLibScalaDeps(scalaVersion.value)
1243
1296
}
1244
1297
)
@@ -1329,7 +1382,12 @@ lazy val liteKafkaComponentsTests: Project = (project in lite("components/kafka-
1329
1382
)
1330
1383
},
1331
1384
)
1332
- .dependsOn(liteEngineKafkaComponentsApi % Test , componentsUtils % Test , liteComponentsTestkit % Test )
1385
+ .dependsOn(
1386
+ liteEngineKafkaComponentsApi % Test ,
1387
+ componentsUtils % Test ,
1388
+ liteComponentsTestkit % Test ,
1389
+ kafkaTestUtils % Test
1390
+ )
1333
1391
1334
1392
lazy val liteRequestResponseComponents = (project in lite(" components/request-response" ))
1335
1393
.settings(commonSettings)
@@ -1717,6 +1775,7 @@ lazy val httpUtils = (project in utils("http-utils"))
1717
1775
" com.softwaremill.sttp.client3" %% " circe" % sttpV,
1718
1776
" com.softwaremill.sttp.client3" %% " async-http-client-backend-future" % sttpV,
1719
1777
" io.netty" % " netty-transport-native-epoll" % nettyV,
1778
+ " io.netty" % " netty-handler" % nettyV,
1720
1779
)
1721
1780
}
1722
1781
)
@@ -1739,7 +1798,8 @@ lazy val openapiComponents = (project in component("openapi"))
1739
1798
ExclusionRule (organization = " jakarta.validation" )
1740
1799
),
1741
1800
" org.apache.flink" % " flink-streaming-java" % flinkV % Provided ,
1742
- " org.scalatest" %% " scalatest" % scalaTestV % " it,test"
1801
+ " org.scalatest" %% " scalatest" % scalaTestV % " it,test" ,
1802
+ " org.wiremock" % " wiremock" % wireMockV % Test ,
1743
1803
),
1744
1804
)
1745
1805
.dependsOn(
@@ -1836,9 +1896,7 @@ lazy val flinkBaseComponentsTests = (project in flink("components/base-tests"))
1836
1896
.settings(
1837
1897
name := " nussknacker-flink-base-components-tests" ,
1838
1898
libraryDependencies ++= Seq (
1839
- " org.apache.flink" % " flink-connector-files" % flinkV % Test ,
1840
- " org.apache.flink" % " flink-csv" % flinkV % Test ,
1841
- " org.apache.flink" % " flink-json" % flinkV % Test
1899
+ " org.apache.flink" % " flink-connector-jdbc" % jdbcFlinkConnectorV % Test ,
1842
1900
)
1843
1901
)
1844
1902
.dependsOn(
@@ -1869,16 +1927,12 @@ lazy val flinkTableApiComponents = (project in flink("components/table"))
1869
1927
name := " nussknacker-flink-table-components" ,
1870
1928
libraryDependencies ++= {
1871
1929
Seq (
1872
- " org.apache.calcite" % " calcite-linq4j" % calciteV, // required by fliink-sql-parser
1873
- " org.apache.flink" % " flink-table-api-java" % flinkV,
1874
- " org.apache.flink" % " flink-table-api-java-bridge" % flinkV,
1875
- " org.apache.flink" % " flink-table-planner-loader" % flinkV,
1876
- " org.apache.flink" % " flink-table-runtime" % flinkV,
1877
- " org.apache.flink" % " flink-clients" % flinkV,
1878
1930
" org.apache.flink" % " flink-sql-parser" % flinkV,
1879
- " org.apache.flink" % " flink-connector-files" % flinkV, // needed for testing data generation
1880
- " org.apache.flink" % " flink-json" % flinkV, // needed for testing data generation
1881
- " org.apache.flink" % " flink-csv" % flinkV % Test ,
1931
+ " org.apache.calcite" % " calcite-linq4j" % calciteV, // required by fliink-sql-parser
1932
+ " org.apache.flink" % " flink-streaming-java" % flinkV % Provided ,
1933
+ " org.apache.flink" % " flink-table-api-java" % flinkV % Provided ,
1934
+ " org.apache.flink" % " flink-table-api-java-bridge" % flinkV % Provided ,
1935
+ " org.apache.flink" % " flink-connector-jdbc" % jdbcFlinkConnectorV % Test ,
1882
1936
)
1883
1937
}
1884
1938
)
@@ -1889,7 +1943,8 @@ lazy val flinkTableApiComponents = (project in flink("components/table"))
1889
1943
componentsUtils % Provided ,
1890
1944
flinkComponentsUtils % Provided ,
1891
1945
jsonUtils % Provided ,
1892
- flinkMiniCluster % Provided ,
1946
+ extensionsApi % Provided ,
1947
+ flinkMiniCluster % Test ,
1893
1948
testUtils % Test ,
1894
1949
flinkComponentsTestkit % Test ,
1895
1950
)
@@ -1931,7 +1986,8 @@ lazy val customHttpServiceApi = (project in file("designer/custom-http-service-a
1931
1986
name := " nussknacker-custom-http-service-api" ,
1932
1987
libraryDependencies ++= {
1933
1988
Seq (
1934
- " org.apache.pekko" %% " pekko-http" % pekkoHttpV,
1989
+ " org.apache.pekko" %% " pekko-http" % pekkoHttpV,
1990
+ " com.softwaremill.sttp.shared" %% " pekko" % sttpSharedV,
1935
1991
)
1936
1992
}
1937
1993
)
@@ -2237,6 +2293,7 @@ lazy val modules = List[ProjectReference](
2237
2293
utilsInternal,
2238
2294
testUtils,
2239
2295
flinkExecutor,
2296
+ scenarioCompilerFlinkDeps,
2240
2297
flinkSchemedKafkaComponentsUtils,
2241
2298
flinkKafkaComponentsUtils,
2242
2299
flinkComponentsUtils,
0 commit comments