Skip to content

Commit 4f8e790

Browse files
committed
update
1 parent c37da01 commit 4f8e790

File tree

19 files changed

+55
-62
lines changed

19 files changed

+55
-62
lines changed

.scalafmt.conf

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,8 @@ rewrite.rules = [AvoidInfix, Imports, RedundantBraces, SortModifiers]
4747
# Imports
4848
rewrite.imports.sort = scalastyle
4949
rewrite.imports.groups = [
50-
["org.apache.paimon\\..*"],
51-
["org.apache.paimon.shade\\..*"],
50+
["org.apache.fluss\\..*"],
51+
["org.apache.fluss.shade\\..*"],
5252
[".*"],
5353
["javax\\..*"],
5454
["java\\..*"],

fluss-common/src/main/java/org/apache/fluss/config/Configuration.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.fluss.config;
1919

2020
import org.apache.fluss.annotation.PublicStable;
21+
import org.apache.fluss.annotation.VisibleForTesting;
2122
import org.apache.fluss.utils.CollectionUtils;
2223

2324
import org.slf4j.Logger;
@@ -643,7 +644,8 @@ <T> void setValueInternal(String key, T value) {
643644
}
644645
}
645646

646-
private Optional<Object> getRawValue(String key) {
647+
@VisibleForTesting
648+
public Optional<Object> getRawValue(String key) {
647649
if (key == null) {
648650
throw new NullPointerException("Key must not be null.");
649651
}

fluss-spark/fluss-spark-3.4/pom.xml

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,25 +26,23 @@
2626
<version>0.9-SNAPSHOT</version>
2727
</parent>
2828

29-
<artifactId>fluss-spark-3.4</artifactId>
29+
<artifactId>fluss-spark-3.4_${scala.binary.version}</artifactId>
3030
<name>Fluss : Engine Spark : 3.4</name>
3131

3232
<properties>
3333
<spark.version>3.4.3</spark.version>
34-
<scala.version>${scala212.version}</scala.version>
35-
<scala.binary.version>2.12</scala.binary.version>
3634
</properties>
3735

3836
<dependencies>
3937
<dependency>
4038
<groupId>org.apache.fluss</groupId>
41-
<artifactId>fluss-spark-common</artifactId>
39+
<artifactId>fluss-spark-common_${scala.binary.version}</artifactId>
4240
<version>${project.version}</version>
4341
</dependency>
4442

4543
<dependency>
4644
<groupId>org.apache.fluss</groupId>
47-
<artifactId>fluss-spark-ut</artifactId>
45+
<artifactId>fluss-spark-ut_${scala.binary.version}</artifactId>
4846
<version>${project.version}</version>
4947
<classifier>tests</classifier>
5048
<scope>test</scope>

fluss-spark/fluss-spark-3.5/pom.xml

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,25 +26,23 @@
2626
<version>0.9-SNAPSHOT</version>
2727
</parent>
2828

29-
<artifactId>fluss-spark-3.5</artifactId>
29+
<artifactId>fluss-spark-3.5_${scala.binary.version}</artifactId>
3030
<name>Fluss : Engine Spark : 3.5</name>
3131

3232
<properties>
3333
<spark.version>3.5.7</spark.version>
34-
<scala.version>${scala212.version}</scala.version>
35-
<scala.binary.version>2.12</scala.binary.version>
3634
</properties>
3735

3836
<dependencies>
3937
<dependency>
4038
<groupId>org.apache.fluss</groupId>
41-
<artifactId>fluss-spark-common</artifactId>
39+
<artifactId>fluss-spark-common_${scala.binary.version}</artifactId>
4240
<version>${project.version}</version>
4341
</dependency>
4442

4543
<dependency>
4644
<groupId>org.apache.fluss</groupId>
47-
<artifactId>fluss-spark-ut</artifactId>
45+
<artifactId>fluss-spark-ut_${scala.binary.version}</artifactId>
4846
<version>${project.version}</version>
4947
<classifier>tests</classifier>
5048
<scope>test</scope>

fluss-spark/fluss-spark-common/pom.xml

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,9 @@
2626
<version>0.9-SNAPSHOT</version>
2727
</parent>
2828

29-
<artifactId>fluss-spark-common</artifactId>
29+
<artifactId>fluss-spark-common_${scala.binary.version}</artifactId>
3030
<name>Fluss : Engine Spark : Common</name>
3131

32-
<properties>
33-
<spark.version>3.5.7</spark.version>
34-
<scala.version>${scala212.version}</scala.version>
35-
<scala.binary.version>2.12</scala.binary.version>
36-
</properties>
37-
3832
<dependencies>
3933
<dependency>
4034
<groupId>org.apache.spark</groupId>

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/FlussCatalog.scala renamed to fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/SparkCatalog.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.fluss.spark
2020
import org.apache.fluss.exception.{DatabaseNotExistException, TableAlreadyExistException, TableNotExistException}
2121
import org.apache.fluss.metadata.TablePath
2222
import org.apache.fluss.spark.catalog.{SupportsFlussNamespaces, WithFlussAdmin}
23+
2324
import org.apache.spark.sql.catalyst.analysis.{NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException}
2425
import org.apache.spark.sql.connector.catalog.{Identifier, Table, TableCatalog, TableChange}
2526
import org.apache.spark.sql.connector.expressions.Transform
@@ -31,7 +32,7 @@ import java.util.concurrent.ExecutionException
3132

3233
import scala.collection.JavaConverters._
3334

34-
class FlussCatalog extends TableCatalog with SupportsFlussNamespaces with WithFlussAdmin {
35+
class SparkCatalog extends TableCatalog with SupportsFlussNamespaces with WithFlussAdmin {
3536

3637
private var catalogName: String = "fluss"
3738

@@ -48,7 +49,7 @@ class FlussCatalog extends TableCatalog with SupportsFlussNamespaces with WithFl
4849

4950
override def loadTable(ident: Identifier): Table = {
5051
try {
51-
FlussTable(admin.getTableInfo(toTablePath(ident)).get())
52+
SparkTable(admin.getTableInfo(toTablePath(ident)).get())
5253
} catch {
5354
case e: ExecutionException if e.getCause.isInstanceOf[TableNotExistException] =>
5455
throw new NoSuchTableException(ident)

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/SparkConversions.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,9 @@ package org.apache.fluss.spark
2020
import org.apache.fluss.config.FlussConfigUtils
2121
import org.apache.fluss.metadata.{Schema, TableDescriptor}
2222
import org.apache.fluss.spark.SparkConnectorOptions._
23-
import org.apache.fluss.spark.types.{FlussDataTypeToSparkDataType, SparkToFlussTypeVisitor}
23+
import org.apache.fluss.spark.types.{FlussToSparkTypeVisitor, SparkToFlussTypeVisitor}
2424
import org.apache.fluss.types.RowType
25+
2526
import org.apache.spark.sql.FlussIdentityTransform
2627
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
2728
import org.apache.spark.sql.connector.expressions.Transform
@@ -38,7 +39,7 @@ object SparkConversions {
3839
SparkToFlussTypeVisitor.visit(schema).asInstanceOf[RowType]
3940

4041
def toSparkDataType(rowType: RowType): StructType =
41-
FlussDataTypeToSparkDataType.visit(rowType).asInstanceOf[StructType]
42+
FlussToSparkTypeVisitor.visit(rowType).asInstanceOf[StructType]
4243

4344
def toFlussTable(
4445
sparkSchema: StructType,
@@ -76,7 +77,7 @@ object SparkConversions {
7677

7778
val (tableProps, customProps) =
7879
caseInsensitiveProps.filterNot(SPARK_TABLE_OPTIONS.contains).partition {
79-
case (key, _) => FlussConfigUtils.TABLE_OPTIONS.containsKey(key)
80+
case (key, _) => key.startsWith(FlussConfigUtils.TABLE_PREFIX)
8081
}
8182

8283
tableDescriptorBuilder

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/FlussTable.scala renamed to fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/SparkTable.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
package org.apache.fluss.spark
1919

2020
import org.apache.fluss.metadata.TableInfo
21-
import org.apache.fluss.spark.catalog.{FlussTableInfo, SupportsFlussPartitionManagement}
21+
import org.apache.fluss.spark.catalog.{AbstractSparkTable, SupportsFlussPartitionManagement}
2222

23-
case class FlussTable(table: TableInfo)
24-
extends FlussTableInfo(table)
23+
case class SparkTable(table: TableInfo)
24+
extends AbstractSparkTable(table)
2525
with SupportsFlussPartitionManagement {}

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/catalog/FlussTableInfo.scala renamed to fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/catalog/AbstractSparkTable.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,14 +19,15 @@ package org.apache.fluss.spark.catalog
1919

2020
import org.apache.fluss.metadata.TableInfo
2121
import org.apache.fluss.spark.SparkConversions
22+
2223
import org.apache.spark.sql.connector.catalog.{Table, TableCapability}
2324
import org.apache.spark.sql.types.StructType
2425

2526
import java.util
2627

2728
import scala.collection.JavaConverters._
2829

29-
abstract class FlussTableInfo(tableInfo: TableInfo) extends Table {
30+
abstract class AbstractSparkTable(tableInfo: TableInfo) extends Table {
3031

3132
protected lazy val _schema: StructType =
3233
SparkConversions.toSparkDataType(tableInfo.getSchema.getRowType)

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/catalog/SupportsFlussNamespaces.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.fluss.spark.catalog
2020
import org.apache.fluss.exception.DatabaseNotExistException
2121
import org.apache.fluss.metadata.DatabaseDescriptor
2222
import org.apache.fluss.utils.Preconditions
23+
2324
import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
2425
import org.apache.spark.sql.connector.catalog.{NamespaceChange, SupportsNamespaces}
2526

0 commit comments

Comments
 (0)