Skip to content

Commit 344ca12

Browse files
Try to fix some things
1 parent 148957f commit 344ca12

File tree

6 files changed

+15
-75
lines changed

6 files changed

+15
-75
lines changed

.github/workflows/iceberg_test.yaml

Lines changed: 0 additions & 64 deletions
This file was deleted.

.github/workflows/kernel_test.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,5 +30,6 @@ jobs:
3030

3131
# Run unit tests with JDK 17. These unit tests depend on Spark, and Spark 4.0+ is JDK 17.
3232
- name: Run unit tests (JDK 17)
33+
# Disable coverage for now because it compiles all projects & causes a flink failure
3334
run: |
34-
python run-tests.py --group kernel --coverage
35+
python run-tests.py --group kernel

.github/workflows/spark_examples_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,6 @@ jobs:
4848
# Thus, we need to publishM2 first so those jars are locally accessible.
4949
run: |
5050
build/sbt clean
51-
build/sbt "++ $SCALA_VERSION publishM2"
51+
build/sbt "++ $SCALA_VERSION sparkGroup/publishM2"
5252
cd examples/scala && build/sbt "++ $SCALA_VERSION compile"
5353
if: steps.git-diff.outputs.diff

.github/workflows/spark_master_test.yaml

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@ jobs:
99
# These Scala versions must match those in the build.sbt
1010
scala: [2.13.13]
1111
# Important: This list of shards must be [0..NUM_SHARDS - 1]
12-
shard: [0, 1, 2]
12+
shard: [0, 1, 2, 3]
1313
env:
1414
SCALA_VERSION: ${{ matrix.scala }}
1515
# Important: This must be the same as the length of shards in matrix
16-
NUM_SHARDS: 3
16+
NUM_SHARDS: 4
1717
steps:
1818
- uses: actions/checkout@v3
1919
- uses: technote-space/get-diff-action@v4
@@ -81,11 +81,14 @@ jobs:
8181
pipenv run pip install numpy==1.21
8282
pipenv run pip install https://dist.apache.org/repos/dist/dev/spark/v4.0.0-rc4-bin/pyspark-4.0.0.tar.gz
8383
if: steps.git-diff.outputs.diff
84-
- name: Run Spark Master tests
85-
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_test.yaml
86-
# NOTE: in this branch, the default sparkVersion is the SPARK_MASTER_VERSION
84+
- name: Run Delta Connect tests
8785
run: |
8886
TEST_PARALLELISM_COUNT=4 build/sbt -DsparkVersion=master "++ ${{ matrix.scala }}" clean connectServer/test
8987
TEST_PARALLELISM_COUNT=4 build/sbt -DsparkVersion=master "++ ${{ matrix.scala }}" clean connectServer/assembly connectClient/test
90-
TEST_PARALLELISM_COUNT=4 pipenv run python run-tests.py --group spark
88+
if: steps.git-diff.outputs.diff
89+
- name: Run Delta Spark master tests
90+
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_test.yaml
91+
# NOTE: in this branch, the default sparkVersion is the SPARK_MASTER_VERSION
92+
run: |
93+
TEST_PARALLELISM_COUNT=4 pipenv run python run-tests.py --group spark --shard ${{ matrix.shard }}
9194
if: steps.git-diff.outputs.diff

kernel/kernel-api/src/test/scala/io/delta/kernel/TransactionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ object TransactionSuite extends VectorTestUtils with MockEngineUtils {
239239
// Update the vectors
240240
val newColumnVectors = vectors.toBuffer
241241
newColumnVectors.remove(ordinal)
242-
columnarBatch(newSchema, newColumnVectors)
242+
columnarBatch(newSchema, newColumnVectors.toSeq)
243243
}
244244

245245
override def getSize: Int = vectors.head.getSize

kernel/kernel-api/src/test/scala/io/delta/kernel/internal/DeltaLogActionUtilsSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ class DeltaLogActionUtilsSuite extends AnyFunSuite with MockFileSystemClientUtil
257257
).toInMemoryList.asScala
258258

259259
assert(commitFiles.forall(fs => FileNames.isCommitFile(fs.getPath)))
260-
assert(extractVersions(commitFiles) == Seq(10, 11, 12, 13, 14, 15, 16, 17))
260+
assert(extractVersions(commitFiles.toSeq) == Seq(10, 11, 12, 13, 14, 15, 16, 17))
261261

262262
val checkpointFiles = listDeltaLogFilesAsIter(
263263
createMockFSListFromEngine(checkpointsAndDeltas),
@@ -269,7 +269,7 @@ class DeltaLogActionUtilsSuite extends AnyFunSuite with MockFileSystemClientUtil
269269
).toInMemoryList.asScala
270270

271271
assert(checkpointFiles.forall(fs => FileNames.isCheckpointFile(fs.getPath)))
272-
assert(extractVersions(checkpointFiles) == Seq(10, 14, 14, 17))
272+
assert(extractVersions(checkpointFiles.toSeq) == Seq(10, 14, 14, 17))
273273
}
274274

275275
test("listDeltaLogFiles: mustBeRecreatable") {

0 commit comments

Comments
 (0)