Skip to content

Commit daedfdc

Browse files
committed
scala doc
1 parent 96a4902 commit daedfdc

File tree

1 file changed

+10
-6
lines changed
  • clickhouse-integration/clickhouse-integration-spark/src/main/scala/org/apache/spark/sql/jdbc

1 file changed

+10
-6
lines changed

clickhouse-integration/clickhouse-integration-spark/src/main/scala/org/apache/spark/sql/jdbc/ClickHouseDialect.scala

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.types._
2424
import scala.util.matching.Regex
2525

2626
/**
27-
* ClickHouseDialects
27+
* ClickHouse SQL dialect
2828
*/
2929
object ClickHouseDialect extends JdbcDialect with Logging {
3030

@@ -42,7 +42,7 @@ object ClickHouseDialect extends JdbcDialect with Logging {
4242

4343
/**
4444
* Inferred schema always nullable.
45-
* see [[JDBCRDD.resolveTable(JDBCOptions)]]
45+
* see [[JDBCRDD.resolveTable]]
4646
*/
4747
override def getCatalystType(sqlType: Int,
4848
typeName: String,
@@ -61,8 +61,10 @@ object ClickHouseDialect extends JdbcDialect with Logging {
6161
}
6262
}
6363

64-
// Spark use a widening conversion both ways.
65-
// see https://github.com/apache/spark/pull/26301#discussion_r347725332
64+
/**
65+
* Spark use a widening conversion both ways, see detail at
66+
* [[https://github.com/apache/spark/pull/26301#discussion_r347725332]]
67+
*/
6668
private[jdbc] def toCatalystType(typeName: String,
6769
precision: Int,
6870
scale: Int): Option[(Boolean, DataType)] = {
@@ -95,8 +97,10 @@ object ClickHouseDialect extends JdbcDialect with Logging {
9597
case _ => (false, maybeNullableTypeName)
9698
}
9799

98-
// NOT recommend auto create ClickHouse table by Spark JDBC, the reason is it's hard to handle nullable because
99-
// ClickHouse use `T` to represent ANSI SQL `T NOT NULL` and `Nullable(T)` to represent ANSI SQL `T NULL`,
100+
/**
101+
* NOT recommend auto create ClickHouse table by Spark JDBC, the reason is it's hard to handle nullable because
102+
* ClickHouse use `T` to represent ANSI SQL `T NOT NULL` and `Nullable(T)` to represent ANSI SQL `T NULL`,
103+
*/
100104
override def getJDBCType(dt: DataType): Option[JdbcType] = dt match {
101105
case StringType => Some(JdbcType("String", Types.VARCHAR))
102106
// ClickHouse doesn't have the concept of encodings. Strings can contain an arbitrary set of bytes,

0 commit comments

Comments
 (0)