diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index 7bdcf6b165a..2b7552b6ac2 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -353,7 +353,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest', 'windows-latest' ]
timeout-minutes: 60
steps:
@@ -376,7 +376,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 180
steps:
@@ -406,7 +406,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -436,7 +436,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -466,7 +466,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -495,7 +495,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -524,7 +524,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -553,7 +553,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -583,7 +583,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -613,7 +613,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -637,7 +637,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 45
steps:
@@ -673,7 +673,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -701,7 +701,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -729,7 +729,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -760,7 +760,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -791,7 +791,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -822,7 +822,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -853,7 +853,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -884,7 +884,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -915,7 +915,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -946,7 +946,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -974,7 +974,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1002,7 +1002,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1030,7 +1030,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1058,7 +1058,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1086,7 +1086,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1114,7 +1114,7 @@ jobs:
RUN_ZETA_CONTAINER: ${{ needs.changes.outputs.engine }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1139,7 +1139,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 30
steps:
@@ -1164,7 +1164,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1189,7 +1189,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1214,7 +1214,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1240,7 +1240,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1266,7 +1266,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1291,7 +1291,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 150
steps:
@@ -1321,7 +1321,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1346,7 +1346,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
@@ -1371,7 +1371,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- java: [ '8', '11' ]
+ java: [ '8', '17' ]
os: [ 'ubuntu-latest' ]
timeout-minutes: 120
steps:
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index aa1e4df95dd..0700da07892 100644
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -15,5 +15,5 @@
# specific language governing permissions and limitations
# under the License.
-distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.4/apache-maven-3.8.4-bin.zip
-wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.1.0/maven-wrapper-3.1.0.jar
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar
diff --git a/config/seatunnel-env.cmd b/config/seatunnel-env.cmd
index 79c2d3c117c..5390c0d7641 100644
--- a/config/seatunnel-env.cmd
+++ b/config/seatunnel-env.cmd
@@ -18,4 +18,29 @@ REM Home directory of spark distribution.
if "%SPARK_HOME%" == "" set "SPARK_HOME=C:\Program Files\spark"
REM Home directory of flink distribution.
-if "%FLINK_HOME%" == "" set "FLINK_HOME=C:\Program Files\flink"
\ No newline at end of file
+if "%FLINK_HOME%" == "" set "FLINK_HOME=C:\Program Files\flink"
+
+REM Initialize JAVA_OPTS to avoid unbound variable error.
+if not defined JAVA_OPTS (
+ set "JAVA_OPTS="
+)
+
+REM Function to check if the current Java version is 17 or higher.
+for /f "tokens=2 delims==" %%v in ('java -XshowSettings:properties -version 2>&1 ^| findstr "java.version"') do (
+ set "java_version=%%v"
+)
+for /f "tokens=1 delims=." %%v in ("%java_version%") do (
+ set "java_major_version=%%v"
+)
+
+REM Set JAVA_OPTS if Java version is 17 or newer.
+if %java_major_version% geq 17 (
+ if defined JAVA_OPTS (
+ set "JAVA_OPTS=%JAVA_OPTS% --add-exports=java.base/sun.net.util=ALL-UNNAMED --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED"
+ ) else (
+ set "JAVA_OPTS=--add-exports=java.base/sun.net.util=ALL-UNNAMED --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED"
+ )
+)
+
+REM Export JAVA_OPTS
+set JAVA_OPTS
\ No newline at end of file
diff --git a/config/seatunnel-env.sh b/config/seatunnel-env.sh
index 1bae8c76254..20583f7dcb8 100644
--- a/config/seatunnel-env.sh
+++ b/config/seatunnel-env.sh
@@ -20,3 +20,26 @@
SPARK_HOME=${SPARK_HOME:-/opt/spark}
# Home directory of flink distribution.
FLINK_HOME=${FLINK_HOME:-/opt/flink}
+
+# Initialize JAVA_OPTS to avoid unbound variable error.
+JAVA_OPTS=${JAVA_OPTS:-}
+
+# Function to check if the current Java version is 17 or higher.
+is_java_17_or_newer() {
+ java_version_output=$(java -version 2>&1)
+ if [[ $java_version_output == *"version \""* ]]; then
+ java_version=$(echo $java_version_output | awk -F '"' '/version/ {print $2}')
+ java_major_version=$(echo $java_version | cut -d'.' -f1)
+ if [[ "$java_major_version" -ge 17 ]]; then
+ return 0
+ fi
+ fi
+ return 1
+}
+
+# Set JAVA_OPTS if Java version is 17 or newer.
+if is_java_17_or_newer; then
+ JAVA_OPTS="$JAVA_OPTS --add-exports=java.base/sun.net.util=ALL-UNNAMED --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED"
+fi
+
+export JAVA_OPTS
\ No newline at end of file
diff --git a/docs/en/connector-v2/sink/Hive.md b/docs/en/connector-v2/sink/Hive.md
index 20f3d22cb86..393a39ef4ae 100644
--- a/docs/en/connector-v2/sink/Hive.md
+++ b/docs/en/connector-v2/sink/Hive.md
@@ -10,7 +10,7 @@ Write data to Hive.
In order to use this connector, You must ensure your spark/flink cluster already integrated hive. The tested hive version is 2.3.9 and 3.1.3 .
-If you use SeaTunnel Engine, You need put seatunnel-hadoop3-3.1.4-uber.jar and hive-exec-3.1.3.jar and libfb303-0.9.3.jar in $SEATUNNEL_HOME/lib/ dir.
+If you use SeaTunnel Engine, You need put seatunnel-hadoop3-3.3.4-uber.jar and hive-exec-3.1.3.jar and libfb303-0.9.3.jar in $SEATUNNEL_HOME/lib/ dir.
:::
## Key features
diff --git a/docs/en/connector-v2/sink/OssFile.md b/docs/en/connector-v2/sink/OssFile.md
index 9cab2b8e561..02556aa840e 100644
--- a/docs/en/connector-v2/sink/OssFile.md
+++ b/docs/en/connector-v2/sink/OssFile.md
@@ -17,7 +17,7 @@
### For SeaTunnel Zeta Engine
-1. You must ensure `seatunnel-hadoop3-3.1.4-uber.jar`, `aliyun-sdk-oss-3.4.1.jar`, `hadoop-aliyun-3.1.4.jar` and `jdom-1.1.jar` in `${SEATUNNEL_HOME}/lib/` dir.
+1. You must ensure `seatunnel-hadoop3-3.3.4-uber.jar`, `aliyun-sdk-oss-3.4.1.jar`, `hadoop-aliyun-3.1.4.jar` and `jdom-1.1.jar` in `${SEATUNNEL_HOME}/lib/` dir.
## Key features
diff --git a/docs/en/connector-v2/source/Hive.md b/docs/en/connector-v2/source/Hive.md
index 527a94fc94a..48e20116b22 100644
--- a/docs/en/connector-v2/source/Hive.md
+++ b/docs/en/connector-v2/source/Hive.md
@@ -10,7 +10,7 @@ Read data from Hive.
In order to use this connector, You must ensure your spark/flink cluster already integrated hive. The tested hive version is 2.3.9 and 3.1.3 .
-If you use SeaTunnel Engine, You need put seatunnel-hadoop3-3.1.4-uber.jar and hive-exec-3.1.3.jar and libfb303-0.9.3.jar in $SEATUNNEL_HOME/lib/ dir.
+If you use SeaTunnel Engine, You need put seatunnel-hadoop3-3.3.4-uber.jar and hive-exec-3.1.3.jar and libfb303-0.9.3.jar in $SEATUNNEL_HOME/lib/ dir.
:::
## Key features
diff --git a/docs/en/connector-v2/source/OssFile.md b/docs/en/connector-v2/source/OssFile.md
index 42163a9d13e..0d3f3d51987 100644
--- a/docs/en/connector-v2/source/OssFile.md
+++ b/docs/en/connector-v2/source/OssFile.md
@@ -17,7 +17,7 @@
### For SeaTunnel Zeta Engine
-1. You must ensure `seatunnel-hadoop3-3.1.4-uber.jar`, `aliyun-sdk-oss-3.4.1.jar`, `hadoop-aliyun-3.1.4.jar` and `jdom-1.1.jar` in `${SEATUNNEL_HOME}/lib/` dir.
+1. You must ensure `seatunnel-hadoop3-3.3.4-uber.jar`, `aliyun-sdk-oss-3.4.1.jar`, `hadoop-aliyun-3.1.4.jar` and `jdom-1.1.jar` in `${SEATUNNEL_HOME}/lib/` dir.
## Key features
diff --git a/docs/en/faq.md b/docs/en/faq.md
index 6a4e838eaed..50610075814 100644
--- a/docs/en/faq.md
+++ b/docs/en/faq.md
@@ -114,3 +114,7 @@ SeaTunnel features a highly abstracted and well-structured architecture, making
## Do I need to understand all of SeaTunnel’s source code if I want to develop my own source, sink, or transform?
No, you only need to focus on the interfaces for source, sink, and transform. If you want to develop your own connector (Connector V2) for the SeaTunnel API, refer to the **[Connector Development Guide](https://github.com/apache/seatunnel/blob/dev/seatunnel-connectors-v2/README.md)**.
+
+## How to solve java.lang.NoClassDefFoundError: org/apache/hadoop/shaded/com/ctc/wstx/io/InputBootstrapper in jdk17
+
+Upgrade hadoop version to 3.3.4
diff --git a/docs/en/seatunnel-engine/hybrid-cluster-deployment.md b/docs/en/seatunnel-engine/hybrid-cluster-deployment.md
index ac072c494df..f6f3c835875 100644
--- a/docs/en/seatunnel-engine/hybrid-cluster-deployment.md
+++ b/docs/en/seatunnel-engine/hybrid-cluster-deployment.md
@@ -285,7 +285,7 @@ hadoop-aliyun-3.3.6.jar
jdom2-2.0.6.jar
netty-buffer-4.1.89.Final.jar
netty-common-4.1.89.Final.jar
-seatunnel-hadoop3-3.1.4-uber.jar
+seatunnel-hadoop3-3.3.4-uber.jar
```
## 6. Configure The SeaTunnel Engine Client
diff --git a/docs/en/seatunnel-engine/separated-cluster-deployment.md b/docs/en/seatunnel-engine/separated-cluster-deployment.md
index 91215eb459a..392a24770c4 100644
--- a/docs/en/seatunnel-engine/separated-cluster-deployment.md
+++ b/docs/en/seatunnel-engine/separated-cluster-deployment.md
@@ -277,7 +277,7 @@ hadoop-aliyun-3.3.6.jar
jdom2-2.0.6.jar
netty-buffer-4.1.89.Final.jar
netty-common-4.1.89.Final.jar
-seatunnel-hadoop3-3.1.4-uber.jar
+seatunnel-hadoop3-3.3.4-uber.jar
```
### 4.7 Job Scheduling Strategy
diff --git a/docs/zh/faq.md b/docs/zh/faq.md
index 26867e4a188..aa84d420509 100644
--- a/docs/zh/faq.md
+++ b/docs/zh/faq.md
@@ -123,4 +123,6 @@ SeaTunnel 拥有完全抽象、结构化的非常优秀的架构设计和代码
不需要,您只需要关注 source、sink、transform 对应的接口即可。
如果你想针对 SeaTunnel API 开发自己的连接器(Connector V2),请查看**[Connector Development Guide](https://github.com/apache/seatunnel/blob/dev/seatunnel-connectors-v2/README.zh.md)** 。
+## JDK17环境下,提示java.lang.NoClassDefFoundError: org/apache/hadoop/shaded/com/ctc/wstx/io/InputBootstrapper怎么处理?
+升级hadoop客户端版本为3.3.4。
diff --git a/docs/zh/seatunnel-engine/hybrid-cluster-deployment.md b/docs/zh/seatunnel-engine/hybrid-cluster-deployment.md
index 77805273452..8c36b149799 100644
--- a/docs/zh/seatunnel-engine/hybrid-cluster-deployment.md
+++ b/docs/zh/seatunnel-engine/hybrid-cluster-deployment.md
@@ -284,7 +284,7 @@ hadoop-aliyun-3.3.6.jar
jdom2-2.0.6.jar
netty-buffer-4.1.89.Final.jar
netty-common-4.1.89.Final.jar
-seatunnel-hadoop3-3.1.4-uber.jar
+seatunnel-hadoop3-3.3.4-uber.jar
```
## 6. 配置 SeaTunnel Engine 客户端
diff --git a/docs/zh/seatunnel-engine/separated-cluster-deployment.md b/docs/zh/seatunnel-engine/separated-cluster-deployment.md
index bdc369ff8c0..eef83feca88 100644
--- a/docs/zh/seatunnel-engine/separated-cluster-deployment.md
+++ b/docs/zh/seatunnel-engine/separated-cluster-deployment.md
@@ -281,7 +281,7 @@ hadoop-aliyun-3.3.6.jar
jdom2-2.0.6.jar
netty-buffer-4.1.89.Final.jar
netty-common-4.1.89.Final.jar
-seatunnel-hadoop3-3.1.4-uber.jar
+seatunnel-hadoop3-3.3.4-uber.jar
```
### 4.7 作业调度策略
diff --git a/pom.xml b/pom.xml
index ee528ff743f..de8a98da861 100644
--- a/pom.xml
+++ b/pom.xml
@@ -66,7 +66,7 @@
${java.version}
${java.version}
- 1.7.25
+ 1.7.36
2.17.1
3.4.4
1.2.17
@@ -92,7 +92,7 @@
false
false
false
- 2.22.2
+ 3.3.1
2.22.2
1.6.8
3.0.1
@@ -138,7 +138,9 @@
2.29.0
4.9
2.7.0
+ 4.12.0
4.0.16
+
9.4.56.v20240826
4.0.4
@@ -158,10 +160,8 @@
0.16.0
true
-
- 3.1.4
+ 3.3.4
15.0.1
-
@@ -485,7 +485,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
provided
@@ -630,6 +630,7 @@
ch.qos.logback:logback-core
org.apache.logging.log4j:log4j-to-slf4j
+ ${surefire.argLine}
@@ -1076,6 +1077,52 @@
false
+
+ jdk8
+
+ 1.8
+
+
+
+
+
+
+ jdk11
+
+ 11
+
+
+
+
+
+
+ jdk17
+
+ [17,)
+
+
+ --add-exports java.base/sun.net.util=ALL-UNNAMED
+ --add-exports java.rmi/sun.rmi.registry=ALL-UNNAMED
+ --add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED
+ --add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED
+ --add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED
+ --add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED
+ --add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED
+ --add-exports java.security.jgss/sun.security.krb5=ALL-UNNAMED
+ --add-opens java.base/java.lang=ALL-UNNAMED
+ --add-opens java.base/java.net=ALL-UNNAMED
+ --add-opens java.base/java.io=ALL-UNNAMED
+ --add-opens java.base/java.nio=ALL-UNNAMED
+ --add-opens java.base/sun.nio.ch=ALL-UNNAMED
+ --add-opens java.base/java.lang.reflect=ALL-UNNAMED
+ --add-opens java.base/java.text=ALL-UNNAMED
+ --add-opens java.base/java.time=ALL-UNNAMED
+ --add-opens java.base/java.util=ALL-UNNAMED
+ --add-opens java.base/java.util.concurrent=ALL-UNNAMED
+ --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED
+ --add-opens java.base/java.util.concurrent.locks=ALL-UNNAMED
+
+
diff --git a/seatunnel-connectors-v2/connector-amazondynamodb/src/main/java/org/apache/seatunnel/connectors/seatunnel/amazondynamodb/serialize/DefaultSeaTunnelRowDeserializer.java b/seatunnel-connectors-v2/connector-amazondynamodb/src/main/java/org/apache/seatunnel/connectors/seatunnel/amazondynamodb/serialize/DefaultSeaTunnelRowDeserializer.java
index a531befe9a6..a764a38d88f 100644
--- a/seatunnel-connectors-v2/connector-amazondynamodb/src/main/java/org/apache/seatunnel/connectors/seatunnel/amazondynamodb/serialize/DefaultSeaTunnelRowDeserializer.java
+++ b/seatunnel-connectors-v2/connector-amazondynamodb/src/main/java/org/apache/seatunnel/connectors/seatunnel/amazondynamodb/serialize/DefaultSeaTunnelRowDeserializer.java
@@ -23,8 +23,10 @@
import org.apache.seatunnel.api.table.type.SeaTunnelRow;
import org.apache.seatunnel.api.table.type.SeaTunnelRowType;
import org.apache.seatunnel.common.exception.CommonError;
+import org.apache.seatunnel.common.utils.DateTimeUtils;
+import org.apache.seatunnel.common.utils.DateUtils;
-import lombok.AllArgsConstructor;
+import lombok.RequiredArgsConstructor;
import software.amazon.awssdk.core.SdkBytes;
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
@@ -34,16 +36,29 @@
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.temporal.ChronoField;
+import java.time.temporal.TemporalAccessor;
+import java.time.temporal.TemporalQueries;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-@AllArgsConstructor
+@RequiredArgsConstructor
public class DefaultSeaTunnelRowDeserializer implements SeaTunnelRowDeserializer {
private final SeaTunnelRowType typeInfo;
+ public static DateTimeFormatter TIME_FORMAT =
+ new DateTimeFormatterBuilder()
+ .appendPattern("HH:mm:ss")
+ .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true)
+ .toFormatter();
+
+ public Map fieldFormatterMap = new HashMap<>();
+
@Override
public SeaTunnelRow deserialize(Map item) {
SeaTunnelDataType>[] seaTunnelDataTypes = typeInfo.getFieldTypes();
@@ -63,7 +78,9 @@ private List
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
test
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/pom.xml
index e33b6273a89..af63e69e894 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/pom.xml
@@ -25,7 +25,7 @@
connector-file-s3-e2e
SeaTunnel : E2E : Connector V2 : File S3
- 3.1.4
+ 3.3.4
@@ -55,7 +55,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
test
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileIT.java
index 3ef03aad700..8e051264e25 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileIT.java
@@ -44,7 +44,7 @@ public class S3FileIT extends TestSuiteBase {
public static final String S3_SDK_DOWNLOAD =
"https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar";
public static final String HADOOP_S3_DOWNLOAD =
- "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
@TestContainerExtension
private final ContainerExtendedFactory extendedFactory =
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileWithMultipleTableIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileWithMultipleTableIT.java
index 34fd443146c..5038ffc5495 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileWithMultipleTableIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-file-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/file/s3/S3FileWithMultipleTableIT.java
@@ -36,7 +36,7 @@ public class S3FileWithMultipleTableIT extends TestSuiteBase {
public static final String S3_SDK_DOWNLOAD =
"https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar";
public static final String HADOOP_S3_DOWNLOAD =
- "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
@TestContainerExtension
private final ContainerExtendedFactory extendedFactory =
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/pom.xml
index f0373d76ca0..a718583764d 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/pom.xml
@@ -50,7 +50,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
test
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveIT.java
index bfa83dfb3b9..aaec0da4587 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveIT.java
@@ -72,7 +72,7 @@ private String libFb303Url() {
}
private String hadoopAwsUrl() {
- return "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ return "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
}
private String aliyunSdkOssUrl() {
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveKerberosIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveKerberosIT.java
index c2fca452fa8..9724d270112 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveKerberosIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hive-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hive/HiveKerberosIT.java
@@ -87,7 +87,7 @@ private String libFb303Url() {
}
private String hadoopAwsUrl() {
- return "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ return "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
}
private String aliyunSdkOssUrl() {
@@ -99,7 +99,7 @@ private String jdomUrl() {
}
private String hadoopAliyunUrl() {
- return "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aliyun/3.1.4/hadoop-aliyun-3.1.4.jar";
+ return "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aliyun/3.3.4/hadoop-aliyun-3.3.4.jar";
}
private String hadoopCosUrl() {
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hudi-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hudi/HudiSeatunnelS3MultiTableIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hudi-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hudi/HudiSeatunnelS3MultiTableIT.java
index 237fd100d26..48e5e5d77dc 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hudi-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hudi/HudiSeatunnelS3MultiTableIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-hudi-e2e/src/test/java/org/apache/seatunnel/e2e/connector/hudi/HudiSeatunnelS3MultiTableIT.java
@@ -72,7 +72,7 @@ public class HudiSeatunnelS3MultiTableIT extends SeaTunnelContainer {
protected static final String AWS_SDK_DOWNLOAD =
"https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar";
protected static final String HADOOP_AWS_DOWNLOAD =
- "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
@Override
@BeforeAll
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-e2e/pom.xml
index 11c147432d0..26618e1332e 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-e2e/pom.xml
@@ -63,7 +63,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
test
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-hadoop3-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-hadoop3-e2e/pom.xml
index cb6e73c3177..43bbf84fbb4 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-hadoop3-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-hadoop3-e2e/pom.xml
@@ -25,10 +25,6 @@
connector-iceberg-hadoop3-e2e
SeaTunnel : E2E : Connector V2 : Iceberg : Hadoop3
-
- 3.3.4
-
-
org.apache.seatunnel
@@ -49,16 +45,11 @@
test
- org.apache.hadoop
- hadoop-client
- ${hadoop-client.version}
+ org.apache.seatunnel
+ seatunnel-hadoop3-3.3.4-uber
+ ${project.version}
+ optional
test
-
-
- org.slf4j
- slf4j-reload4j
-
-
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/pom.xml
index a44c8d630fa..af809997558 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/pom.xml
@@ -28,7 +28,6 @@
1.19.1
8.5.6
- 3.1.4
@@ -65,21 +64,10 @@
test
- org.apache.hadoop
- hadoop-client
- ${hadoop3.version}
- test
-
-
- org.slf4j
- slf4j-reload4j
-
-
-
-
- org.apache.hadoop
- hadoop-aws
- ${hadoop3.version}
+ org.apache.seatunnel
+ seatunnel-hadoop3-3.3.4-uber
+ ${project.version}
+ optional
test
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/iceberg/s3/IcebergSourceIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/iceberg/s3/IcebergSourceIT.java
index 35101528929..1edee84722d 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/iceberg/s3/IcebergSourceIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-iceberg-s3-e2e/src/test/java/org/apache/seatunnel/e2e/connector/iceberg/s3/IcebergSourceIT.java
@@ -83,12 +83,12 @@
value = {TestContainerId.SPARK_2_4},
type = {EngineType.FLINK, EngineType.SEATUNNEL},
disabledReason =
- "Needs hadoop-aws,aws-java-sdk jar for flink, spark2.4. For the seatunnel engine, it crashes on seatunnel-hadoop3-3.1.4-uber.jar.")
+ "Needs hadoop-aws,aws-java-sdk jar for flink, spark2.4. For the seatunnel engine, it crashes on seatunnel-hadoop3-3.3.4-uber.jar.")
@Slf4j
public class IcebergSourceIT extends TestSuiteBase implements TestResource {
public static final String HADOOP_AWS_DOWNLOAD =
- "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
public static final String AWS_SDK_DOWNLOAD =
"https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar";
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-jdbc-e2e/connector-jdbc-e2e-part-7/src/test/java/org/apache/seatunnel/connectors/seatunnel/jdbc/JdbcIrisIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-jdbc-e2e/connector-jdbc-e2e-part-7/src/test/java/org/apache/seatunnel/connectors/seatunnel/jdbc/JdbcIrisIT.java
index 609710b2ca0..ac6245fe80c 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-jdbc-e2e/connector-jdbc-e2e-part-7/src/test/java/org/apache/seatunnel/connectors/seatunnel/jdbc/JdbcIrisIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-jdbc-e2e/connector-jdbc-e2e-part-7/src/test/java/org/apache/seatunnel/connectors/seatunnel/jdbc/JdbcIrisIT.java
@@ -40,6 +40,8 @@
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.condition.DisabledOnJre;
+import org.junit.jupiter.api.condition.JRE;
import org.testcontainers.containers.Container;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
@@ -68,6 +70,7 @@
import java.util.stream.Collectors;
@Slf4j
+@DisabledOnJre(JRE.JAVA_17)
public class JdbcIrisIT extends AbstractJdbcIT {
private static final String IRIS_IMAGE = "intersystems/iris-community:2023.1";
private static final String IRIS_NETWORK_ALIASES = "e2e_irisDb";
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml
index 668747b9db1..d27478d52c1 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-kafka-e2e/pom.xml
@@ -115,11 +115,6 @@
${testcontainer.version}
test
-
- mysql
- mysql-connector-java
- test
-
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/pom.xml
index 71784966f81..3b98b5a8e68 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/pom.xml
@@ -55,7 +55,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
optional
test
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/src/test/java/org/apache/seatunnel/e2e/connector/paimon/PaimonWithS3IT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/src/test/java/org/apache/seatunnel/e2e/connector/paimon/PaimonWithS3IT.java
index 2df1a5e49b2..a1251c07de8 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/src/test/java/org/apache/seatunnel/e2e/connector/paimon/PaimonWithS3IT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/src/test/java/org/apache/seatunnel/e2e/connector/paimon/PaimonWithS3IT.java
@@ -52,7 +52,7 @@ public class PaimonWithS3IT extends SeaTunnelContainer {
protected static final String AWS_SDK_DOWNLOAD =
"https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.11.271/aws-java-sdk-bundle-1.11.271.jar";
protected static final String HADOOP_AWS_DOWNLOAD =
- "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.1.4/hadoop-aws-3.1.4.jar";
+ "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.4/hadoop-aws-3.3.4.jar";
@Override
@BeforeAll
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-rabbitmq-e2e/src/test/java/org/apache/seatunnel/e2e/connector/rabbitmq/RabbitmqIT.java b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-rabbitmq-e2e/src/test/java/org/apache/seatunnel/e2e/connector/rabbitmq/RabbitmqIT.java
index a846949d857..340116427df 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-rabbitmq-e2e/src/test/java/org/apache/seatunnel/e2e/connector/rabbitmq/RabbitmqIT.java
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-rabbitmq-e2e/src/test/java/org/apache/seatunnel/e2e/connector/rabbitmq/RabbitmqIT.java
@@ -27,6 +27,8 @@
import org.apache.seatunnel.api.table.type.SeaTunnelRow;
import org.apache.seatunnel.api.table.type.SeaTunnelRowType;
import org.apache.seatunnel.common.Handover;
+import org.apache.seatunnel.common.utils.DateTimeUtils;
+import org.apache.seatunnel.common.utils.DateTimeUtils.Formatter;
import org.apache.seatunnel.connectors.seatunnel.rabbitmq.client.RabbitmqClient;
import org.apache.seatunnel.connectors.seatunnel.rabbitmq.config.RabbitmqConfig;
import org.apache.seatunnel.e2e.common.TestResource;
@@ -83,36 +85,9 @@ public class RabbitmqIT extends TestSuiteBase implements TestResource {
generateTestDataSet();
private static final JsonSerializationSchema JSON_SERIALIZATION_SCHEMA =
new JsonSerializationSchema(TEST_DATASET.getKey());
-
- private GenericContainer> rabbitmqContainer;
Connection connection;
RabbitmqClient rabbitmqClient;
-
- @BeforeAll
- @Override
- public void startUp() throws Exception {
- this.rabbitmqContainer =
- new GenericContainer<>(DockerImageName.parse(IMAGE))
- .withNetwork(NETWORK)
- .withNetworkAliases(HOST)
- .withExposedPorts(PORT, 15672)
- .withLogConsumer(new Slf4jLogConsumer(DockerLoggerFactory.getLogger(IMAGE)))
- .waitingFor(
- new HostPortWaitStrategy()
- .withStartupTimeout(Duration.ofMinutes(2)));
- Startables.deepStart(Stream.of(rabbitmqContainer)).join();
- log.info("rabbitmq container started");
- this.initRabbitMQ();
- }
-
- private void initSourceData() throws IOException, InterruptedException {
- List rows = TEST_DATASET.getValue();
- for (int i = 0; i < rows.size(); i++) {
- rabbitmqClient.write(
- new String(JSON_SERIALIZATION_SCHEMA.serialize(rows.get(1)))
- .getBytes(StandardCharsets.UTF_8));
- }
- }
+ private GenericContainer> rabbitmqContainer;
private static Pair> generateTestDataSet() {
@@ -155,6 +130,8 @@ private static Pair> generateTestDataSet()
List rows = new ArrayList<>();
for (int i = 0; i < 10; i++) {
+ final String datetime = "2023-12-22 00:00:00";
+ LocalDateTime parse = DateTimeUtils.parse(datetime, Formatter.YYYY_MM_DD_HH_MM_SS);
SeaTunnelRow row =
new SeaTunnelRow(
new Object[] {
@@ -172,13 +149,48 @@ private static Pair> generateTestDataSet()
BigDecimal.valueOf(11, 1),
"test".getBytes(),
LocalDate.now(),
- LocalDateTime.now()
+ parse
});
rows.add(row);
}
return Pair.of(rowType, rows);
}
+ @BeforeAll
+ @Override
+ public void startUp() throws Exception {
+ this.rabbitmqContainer =
+ new GenericContainer<>(DockerImageName.parse(IMAGE))
+ .withNetwork(NETWORK)
+ .withNetworkAliases(HOST)
+ .withExposedPorts(PORT, 15672)
+ .withLogConsumer(new Slf4jLogConsumer(DockerLoggerFactory.getLogger(IMAGE)))
+ .waitingFor(
+ new HostPortWaitStrategy()
+ .withStartupTimeout(Duration.ofMinutes(2)));
+ Startables.deepStart(Stream.of(rabbitmqContainer)).join();
+ log.info("rabbitmq container started");
+ this.initRabbitMQ();
+ }
+
+ @AfterAll
+ @Override
+ public void tearDown() throws Exception {
+ if (connection != null) {
+ connection.close();
+ }
+ rabbitmqContainer.close();
+ }
+
+ private void initSourceData() throws IOException, InterruptedException {
+ List rows = TEST_DATASET.getValue();
+ for (int i = 0; i < rows.size(); i++) {
+ rabbitmqClient.write(
+ new String(JSON_SERIALIZATION_SCHEMA.serialize(rows.get(1)))
+ .getBytes(StandardCharsets.UTF_8));
+ }
+ }
+
private void initRabbitMQ() {
try {
RabbitmqConfig config = new RabbitmqConfig();
@@ -216,15 +228,6 @@ private RabbitmqClient initSinkRabbitMQ() {
}
}
- @AfterAll
- @Override
- public void tearDown() throws Exception {
- if (connection != null) {
- connection.close();
- }
- rabbitmqContainer.close();
- }
-
@TestTemplate
public void testRabbitMQ(TestContainer container) throws Exception {
// send data to source queue before executeJob start in every testContainer
@@ -254,13 +257,8 @@ public void testRabbitMQ(TestContainer container) throws Exception {
sinkRabbitmqClient.close();
// assert source and sink data
Assertions.assertTrue(resultSet.size() > 0);
- Assertions.assertTrue(
- resultSet.stream()
- .findAny()
- .get()
- .equals(
- new String(
- JSON_SERIALIZATION_SCHEMA.serialize(
- TEST_DATASET.getValue().get(1)))));
+ Assertions.assertEquals(
+ resultSet.stream().findAny().get(),
+ new String(JSON_SERIALIZATION_SCHEMA.serialize(TEST_DATASET.getValue().get(1))));
}
}
diff --git a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-starrocks-e2e/pom.xml b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-starrocks-e2e/pom.xml
index 05829ea893d..cbc61f1fbe6 100644
--- a/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-starrocks-e2e/pom.xml
+++ b/seatunnel-e2e/seatunnel-connector-v2-e2e/connector-starrocks-e2e/pom.xml
@@ -26,7 +26,7 @@
SeaTunnel : E2E : Connector V2 : StarRocks
- 8.0.27
+ 8.0.32
diff --git a/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/ConnectorPackageServiceContainer.java b/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/ConnectorPackageServiceContainer.java
index 54804d10575..ada1bd06476 100644
--- a/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/ConnectorPackageServiceContainer.java
+++ b/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/ConnectorPackageServiceContainer.java
@@ -84,8 +84,8 @@ public void startUp() throws Exception {
server1.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar").toString());
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar").toString());
server2 =
new GenericContainer<>(getDockerImage())
@@ -110,8 +110,8 @@ public void startUp() throws Exception {
server2.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar").toString());
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar").toString());
server3 =
new GenericContainer<>(getDockerImage())
@@ -136,8 +136,8 @@ public void startUp() throws Exception {
server3.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar").toString());
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar").toString());
Startables.deepStart(Stream.of(server1)).join();
Startables.deepStart(Stream.of(server2)).join();
diff --git a/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/SeaTunnelContainer.java b/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/SeaTunnelContainer.java
index ad586153f46..a9c6cd0dc15 100644
--- a/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/SeaTunnelContainer.java
+++ b/seatunnel-e2e/seatunnel-e2e-common/src/test/java/org/apache/seatunnel/e2e/common/container/seatunnel/SeaTunnelContainer.java
@@ -126,8 +126,8 @@ private GenericContainer> createSeaTunnelServer(Network NETWORK)
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar").toString());
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar").toString());
// execute extra commands
executeExtraCommands(server);
@@ -175,8 +175,8 @@ protected GenericContainer> createSeaTunnelContainerWithFakeSourceAndInMemoryS
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar").toString());
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar").toString());
server.start();
// execute extra commands
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/pom.xml b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/pom.xml
index 6fbdfc826bf..2d8e599900a 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/pom.xml
+++ b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/pom.xml
@@ -66,7 +66,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
test
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/CheckpointEnableIT.java b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/CheckpointEnableIT.java
index 661da1b7cdc..b94cf79c625 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/CheckpointEnableIT.java
+++ b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/CheckpointEnableIT.java
@@ -46,7 +46,9 @@
import static org.awaitility.Awaitility.await;
@Slf4j
-@DisabledOnJre(value = JRE.JAVA_11, disabledReason = "slf4j jar conflict, we should fix it later")
+@DisabledOnJre(
+ value = {JRE.JAVA_11, JRE.JAVA_17},
+ disabledReason = "slf4j jar conflict, we should fix it later")
public class CheckpointEnableIT extends TestSuiteBase {
@TestTemplate
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/ClusterSeaTunnelContainer.java b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/ClusterSeaTunnelContainer.java
index 6bdf1c24153..7a572cf08f0 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/ClusterSeaTunnelContainer.java
+++ b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/ClusterSeaTunnelContainer.java
@@ -73,7 +73,7 @@ public class ClusterSeaTunnelContainer extends SeaTunnelContainer {
private static final Path binPath = Paths.get(SEATUNNEL_HOME, "bin", SERVER_SHELL);
private static final Path config = Paths.get(SEATUNNEL_HOME, "config");
private static final Path hadoopJar =
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar");
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar");
private static final long CUSTOM_JOB_ID_1 = 862969647010611201L;
@@ -1324,7 +1324,7 @@ private GenericContainer> createServer(String networkAlias)
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
hadoopJar.toString());
server.start();
// execute extra commands
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/joblog/JobLogIT.java b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/joblog/JobLogIT.java
index 18d08b7506e..d385571299b 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/joblog/JobLogIT.java
+++ b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/joblog/JobLogIT.java
@@ -66,7 +66,7 @@ public class JobLogIT extends SeaTunnelContainer {
private static final Path BIN_PATH = Paths.get(SEATUNNEL_HOME, "bin", SERVER_SHELL);
private static final Path CONFIG_PATH = Paths.get(SEATUNNEL_HOME, "config");
private static final Path HADOOP_JAR_PATH =
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar");
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar");
private GenericContainer> secondServer;
private final Network NETWORK = Network.newNetwork();
@@ -310,7 +310,7 @@ private GenericContainer> createServer(String networkAlias)
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
HADOOP_JAR_PATH.toString());
server.withCopyFileToContainer(
MountableFile.forHostPath(
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/telemetry/MasterWorkerClusterSeaTunnelWithTelemetryIT.java b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/telemetry/MasterWorkerClusterSeaTunnelWithTelemetryIT.java
index 2eca7f5e84b..d0c28519f24 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/telemetry/MasterWorkerClusterSeaTunnelWithTelemetryIT.java
+++ b/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/telemetry/MasterWorkerClusterSeaTunnelWithTelemetryIT.java
@@ -67,7 +67,7 @@ public class MasterWorkerClusterSeaTunnelWithTelemetryIT extends SeaTunnelContai
private static final Path binPath = Paths.get(SEATUNNEL_HOME, "bin", SERVER_SHELL);
private static final Path config = Paths.get(SEATUNNEL_HOME, "config");
private static final Path hadoopJar =
- Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar");
+ Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.3.4-uber.jar");
@Test
public void testSubmitJobs() throws InterruptedException {
@@ -695,7 +695,7 @@ private GenericContainer> createServer(String networkAlias, String role)
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
hadoopJar.toString());
server.start();
// execute extra commands
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/java/org/apache/seatunnel/engine/e2e/k8s/KubernetesIT.java b/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/java/org/apache/seatunnel/engine/e2e/k8s/KubernetesIT.java
index ce2b73fb10c..89b73872ea8 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/java/org/apache/seatunnel/engine/e2e/k8s/KubernetesIT.java
+++ b/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/java/org/apache/seatunnel/engine/e2e/k8s/KubernetesIT.java
@@ -186,8 +186,8 @@ private void copyFileToCurrentResources(String hazelCastConfigFile, String targe
Files.copy(
Paths.get(
PROJECT_ROOT_PATH
- + "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
- Paths.get(targetPath + "/jars/seatunnel-hadoop3-3.1.4-uber.jar"),
+ + "/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/target/seatunnel-hadoop3-3.3.4-uber.jar"),
+ Paths.get(targetPath + "/jars/seatunnel-hadoop3-3.3.4-uber.jar"),
StandardCopyOption.REPLACE_EXISTING);
Files.copy(
Paths.get(
diff --git a/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/resources/seatunnel_dockerfile b/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/resources/seatunnel_dockerfile
index d0a230230f2..844b1be5682 100644
--- a/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/resources/seatunnel_dockerfile
+++ b/seatunnel-e2e/seatunnel-engine-e2e/seatunnel-engine-k8s-e2e/src/test/resources/seatunnel_dockerfile
@@ -17,7 +17,7 @@
FROM openjdk:8u162-jdk
ENV SEATUNNEL_HOME="/opt/seatunnel"
-COPY /jars/seatunnel-hadoop3-3.1.4-uber.jar ${SEATUNNEL_HOME}/lib/seatunnel-hadoop3-3.1.4-uber.jar
+COPY /jars/seatunnel-hadoop3-3.3.4-uber.jar ${SEATUNNEL_HOME}/lib/seatunnel-hadoop3-3.3.4-uber.jar
COPY /jars/seatunnel-transforms-v2.jar ${SEATUNNEL_HOME}/lib/sseatunnel-transforms-v2.jar
COPY /jars/seatunnel-starter.jar ${SEATUNNEL_HOME}/starter/seatunnel-starter.jar
COPY /bin ${SEATUNNEL_HOME}/bin
diff --git a/seatunnel-engine/seatunnel-engine-client/pom.xml b/seatunnel-engine/seatunnel-engine-client/pom.xml
index dcad2e4f184..bd7e430d706 100644
--- a/seatunnel-engine/seatunnel-engine-client/pom.xml
+++ b/seatunnel-engine/seatunnel-engine-client/pom.xml
@@ -88,7 +88,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
test
diff --git a/seatunnel-engine/seatunnel-engine-server/pom.xml b/seatunnel-engine/seatunnel-engine-server/pom.xml
index 9bd13dcc4aa..2b577934bed 100644
--- a/seatunnel-engine/seatunnel-engine-server/pom.xml
+++ b/seatunnel-engine/seatunnel-engine-server/pom.xml
@@ -103,11 +103,16 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
provided
+
+ com.squareup.okhttp3
+ okhttp
+ ${okhttp.version}
+
org.apache.seatunnel
seatunnel-e2e-common
@@ -120,6 +125,12 @@
mockwebserver
2.7.5
test
+
+
+ com.squareup.okio
+ okio
+
+
org.junit-pioneer
diff --git a/seatunnel-engine/seatunnel-engine-server/src/main/java/org/apache/seatunnel/engine/server/event/JobEventHttpReportHandler.java b/seatunnel-engine/seatunnel-engine-server/src/main/java/org/apache/seatunnel/engine/server/event/JobEventHttpReportHandler.java
index 1182c726730..37dca3f5d20 100644
--- a/seatunnel-engine/seatunnel-engine-server/src/main/java/org/apache/seatunnel/engine/server/event/JobEventHttpReportHandler.java
+++ b/seatunnel-engine/seatunnel-engine-server/src/main/java/org/apache/seatunnel/engine/server/event/JobEventHttpReportHandler.java
@@ -28,13 +28,13 @@
import com.hazelcast.ringbuffer.ReadResultSet;
import com.hazelcast.ringbuffer.Ringbuffer;
import com.hazelcast.ringbuffer.impl.RingbufferProxy;
-import com.squareup.okhttp.MediaType;
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
-import com.squareup.okhttp.ResponseBody;
import lombok.extern.slf4j.Slf4j;
+import okhttp3.MediaType;
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+import okhttp3.ResponseBody;
import java.io.IOException;
import java.time.Duration;
@@ -147,9 +147,9 @@ public void close() {
}
private OkHttpClient createHttpClient() {
- OkHttpClient client = new OkHttpClient();
- client.setConnectTimeout(30, TimeUnit.SECONDS);
- client.setWriteTimeout(10, TimeUnit.SECONDS);
- return client;
+ OkHttpClient.Builder builder = new OkHttpClient.Builder();
+ builder.connectTimeout(30, TimeUnit.SECONDS);
+ builder.writeTimeout(10, TimeUnit.SECONDS);
+ return builder.build();
}
}
diff --git a/seatunnel-engine/seatunnel-engine-storage/checkpoint-storage-plugins/checkpoint-storage-hdfs/pom.xml b/seatunnel-engine/seatunnel-engine-storage/checkpoint-storage-plugins/checkpoint-storage-hdfs/pom.xml
index 8ae75cddd55..5918fe190af 100644
--- a/seatunnel-engine/seatunnel-engine-storage/checkpoint-storage-plugins/checkpoint-storage-hdfs/pom.xml
+++ b/seatunnel-engine/seatunnel-engine-storage/checkpoint-storage-plugins/checkpoint-storage-hdfs/pom.xml
@@ -37,7 +37,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
provided
diff --git a/seatunnel-engine/seatunnel-engine-storage/imap-storage-plugins/imap-storage-file/pom.xml b/seatunnel-engine/seatunnel-engine-storage/imap-storage-plugins/imap-storage-file/pom.xml
index c7eb61012e6..751890ce136 100644
--- a/seatunnel-engine/seatunnel-engine-storage/imap-storage-plugins/imap-storage-file/pom.xml
+++ b/seatunnel-engine/seatunnel-engine-storage/imap-storage-plugins/imap-storage-file/pom.xml
@@ -34,7 +34,7 @@
3.0.0
2.4.7
- 3.1.4
+ 3.3.4
4.1.60.Final
@@ -47,7 +47,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
optional
provided
diff --git a/seatunnel-examples/seatunnel-engine-examples/pom.xml b/seatunnel-examples/seatunnel-engine-examples/pom.xml
index 5ebe8ab7305..d80f5b6b5ac 100644
--- a/seatunnel-examples/seatunnel-engine-examples/pom.xml
+++ b/seatunnel-examples/seatunnel-engine-examples/pom.xml
@@ -44,7 +44,7 @@
org.apache.seatunnel
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${project.version}
diff --git a/seatunnel-shade/pom.xml b/seatunnel-shade/pom.xml
index 0011f6b37d2..5c1020de95b 100644
--- a/seatunnel-shade/pom.xml
+++ b/seatunnel-shade/pom.xml
@@ -27,7 +27,7 @@
SeaTunnel : Shade :
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
seatunnel-jackson
seatunnel-guava
seatunnel-thrift-service
diff --git a/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/pom.xml b/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/pom.xml
similarity index 85%
rename from seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/pom.xml
rename to seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/pom.xml
index 5b22bb67e8a..bc18d043766 100644
--- a/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/pom.xml
+++ b/seatunnel-shade/seatunnel-hadoop3-3.3.4-uber/pom.xml
@@ -22,11 +22,11 @@
${revision}
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
SeaTunnel : Shade : Hadoop3
- 3.1.4
+ 3.3.4
27.0-jre
@@ -46,6 +46,27 @@
org.apache.hadoop
hadoop-client
${hadoop3.version}
+
+
+ ch.qos.reload4j
+ reload4j
+
+
+
+
+ org.apache.hadoop
+ hadoop-client-runtime
+ ${hadoop3.version}
+
+
+ org.apache.hadoop
+ hadoop-client-api
+ ${hadoop3.version}
+
+
+ org.apache.hadoop
+ hadoop-aws
+ ${hadoop3.version}
org.xerial.snappy
@@ -66,7 +87,7 @@
package
- seatunnel-hadoop3-3.1.4-uber
+ seatunnel-hadoop3-3.3.4-uber
${enableSourceJarCreation}
true
false
@@ -121,7 +142,7 @@
- ${basedir}/target/seatunnel-hadoop3-3.1.4-uber.jar
+ ${basedir}/target/seatunnel-hadoop3-3.3.4-uber.jar
jar
optional
diff --git a/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-3.3/src/test/java/org/apache/seatunnel/translation/spark/sink/SparkSinkTest.java b/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-3.3/src/test/java/org/apache/seatunnel/translation/spark/sink/SparkSinkTest.java
index 2e0d0f3f0d3..494fe2e8199 100644
--- a/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-3.3/src/test/java/org/apache/seatunnel/translation/spark/sink/SparkSinkTest.java
+++ b/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-3.3/src/test/java/org/apache/seatunnel/translation/spark/sink/SparkSinkTest.java
@@ -61,7 +61,7 @@ public class SparkSinkTest {
@Test
@DisabledOnJre(
- value = JRE.JAVA_11,
+ value = {JRE.JAVA_11, JRE.JAVA_17},
disabledReason =
"We should update apache common lang3 version to 3.8 to avoid NPE, "
+ "see https://github.com/apache/commons-lang/commit/50ce8c44e1601acffa39f5568f0fc140aade0564")
diff --git a/tools/dependencies/checkLicense.sh b/tools/dependencies/checkLicense.sh
index 836ca91d354..99f5baa842f 100755
--- a/tools/dependencies/checkLicense.sh
+++ b/tools/dependencies/checkLicense.sh
@@ -23,7 +23,7 @@ if [ -d "/tmp/seatunnel-dependencies" ]; then
rm -rf /tmp/seatunnel-dependencies/*
fi
-./mvnw clean -pl '!seatunnel-dist' --batch-mode --no-snapshot-updates dependency:copy-dependencies -DincludeScope=runtime -DoutputDirectory=/tmp/seatunnel-dependencies
+./mvnw clean -pl '!seatunnel-dist' --batch-mode --no-snapshot-updates dependency:copy-dependencies -Prelease -DincludeScope=runtime -DoutputDirectory=/tmp/seatunnel-dependencies
# List all modules(jars) that belong to the SeaTunnel itself, these will be ignored when checking the dependency
ls /tmp/seatunnel-dependencies | sort > all-dependencies.txt
diff --git a/tools/dependencies/known-dependencies.txt b/tools/dependencies/known-dependencies.txt
index ce7d25b445f..939ad30b871 100755
--- a/tools/dependencies/known-dependencies.txt
+++ b/tools/dependencies/known-dependencies.txt
@@ -15,7 +15,7 @@ jackson-core-2.13.3.jar
jackson-databind-2.13.3.jar
jackson-dataformat-properties-2.13.3.jar
jackson-datatype-jsr310-2.13.3.jar
-jcl-over-slf4j-1.7.25.jar
+jcl-over-slf4j-1.7.36.jar
jcommander-1.81.jar
log4j-api-2.17.1.jar
log4j-core-2.17.1.jar
@@ -29,7 +29,7 @@ scala-library-2.12.15.jar
seatunnel-jackson-2.3.9-SNAPSHOT-optional.jar
seatunnel-guava-2.3.9-SNAPSHOT-optional.jar
seatunnel-hazelcast-shade-2.3.9-SNAPSHOT-optional.jar
-slf4j-api-1.7.25.jar
+slf4j-api-1.7.36.jar
jsqlparser-4.9.jar
animal-sniffer-annotations-1.17.jar
checker-qual-3.10.0.jar
@@ -45,6 +45,14 @@ json-smart-2.4.7.jar
accessors-smart-2.4.7.jar
asm-9.1.jar
avro-1.11.1.jar
+annotations-13.0.jar
+kotlin-stdlib-1.8.21.jar
+kotlin-stdlib-common-1.9.10.jar
+kotlin-stdlib-jdk7-1.8.21.jar
+kotlin-stdlib-jdk8-1.8.21.jar
+okhttp-4.12.0.jar
+okio-3.6.0.jar
+okio-jvm-3.6.0.jar
groovy-4.0.16.jar
seatunnel-janino-2.3.9-SNAPSHOT-optional.jar
protobuf-java-util-3.25.3.jar
@@ -65,8 +73,8 @@ jetty-io-9.4.56.v20240826.jar
jetty-security-9.4.56.v20240826.jar
jetty-server-9.4.56.v20240826.jar
jetty-servlet-9.4.56.v20240826.jar
-jetty-util-9.4.20.v20190813.jar
jetty-util-9.4.56.v20240826.jar
+jetty-util-9.4.43.v20210629.jar
jetty-util-ajax-9.4.56.v20240826.jar
javax.servlet-api-3.1.0.jar
seatunnel-jetty9-9.4.56-2.3.9-SNAPSHOT-optional.jar