@@ -25,7 +25,6 @@ import io.glutenproject.substrait.rel.{LocalFilesBuilder, LocalFilesNode, SplitI
2525import io .glutenproject .substrait .rel .LocalFilesNode .ReadFileFormat
2626import io .glutenproject .utils ._
2727import io .glutenproject .vectorized ._
28-
2928import org .apache .spark .{SparkConf , SparkContext , TaskContext }
3029import org .apache .spark .internal .Logging
3130import org .apache .spark .rdd .RDD
@@ -35,7 +34,7 @@ import org.apache.spark.sql.catalyst.util.{DateFormatter, TimestampFormatter}
3534import org .apache .spark .sql .connector .read .InputPartition
3635import org .apache .spark .sql .execution .datasources .{FilePartition , PartitionedFile }
3736import org .apache .spark .sql .execution .metric .SQLMetric
38- import org .apache .spark .sql .types .{BinaryType , DateType , StructType , TimestampType }
37+ import org .apache .spark .sql .types .{BinaryType , DateType , Decimal , DecimalType , StructType , TimestampType }
3938import org .apache .spark .sql .utils .OASPackageBridge .InputMetricsWrapper
4039import org .apache .spark .sql .vectorized .ColumnarBatch
4140import org .apache .spark .util .ExecutorManager
@@ -45,7 +44,6 @@ import java.nio.charset.StandardCharsets
4544import java .time .ZoneOffset
4645import java .util .{ArrayList => JArrayList , HashMap => JHashMap , Map => JMap }
4746import java .util .concurrent .TimeUnit
48-
4947import scala .collection .JavaConverters ._
5048
5149class IteratorApiImpl extends IteratorApi with Logging {
@@ -118,6 +116,8 @@ class IteratorApiImpl extends IteratorApi with Logging {
118116 new String (pn.asInstanceOf [Array [Byte ]], StandardCharsets .UTF_8 )
119117 case _ : DateType =>
120118 DateFormatter .apply().format(pn.asInstanceOf [Integer ])
119+ case _ : DecimalType =>
120+ pn.asInstanceOf [Decimal ].toUnscaledLong.toString
121121 case _ : TimestampType =>
122122 TimestampFormatter
123123 .getFractionFormatter(ZoneOffset .UTC )
0 commit comments