This is an automated email from the ASF dual-hosted git repository.

diwu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris-spark-connector.git


The following commit(s) were added to refs/heads/master by this push:
     new 2090251  [fix] write timestamp type precision lost problem (#165)
2090251 is described below

commit 209025175e4b5650751d3a9d0734a96975766150
Author: gnehil <adamlee...@gmail.com>
AuthorDate: Tue Dec 5 14:44:03 2023 +0800

    [fix] write timestamp type precision lost problem (#165)
    
    * fix timestamp type precision lost
    
    * fix unit test
    
    ---------
    
    Co-authored-by: gnehil <gnehil489@github>
---
 .../org/apache/doris/spark/sql/SchemaUtils.scala    |  5 +++--
 .../apache/doris/spark/sql/TestSchemaUtils.scala    | 21 +++++++++++++++++----
 2 files changed, 20 insertions(+), 6 deletions(-)

diff --git 
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/SchemaUtils.scala
 
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/SchemaUtils.scala
index 1f0e942..d56a4a3 100644
--- 
a/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/SchemaUtils.scala
+++ 
b/spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/SchemaUtils.scala
@@ -32,7 +32,9 @@ import org.apache.spark.sql.types._
 import org.slf4j.LoggerFactory
 
 import java.sql.Timestamp
+import java.time.format.DateTimeFormatter
 import java.time.{LocalDateTime, ZoneOffset}
+import java.util.Locale
 import scala.collection.JavaConversions._
 import scala.collection.mutable
 
@@ -164,8 +166,7 @@ private[spark] object SchemaUtils {
         case DoubleType => row.getDouble(ordinal)
         case StringType => 
Option(row.getUTF8String(ordinal)).map(_.toString).getOrElse(DataUtil.NULL_VALUE)
         case TimestampType =>
-          LocalDateTime.ofEpochSecond(row.getLong(ordinal) / 100000, 
(row.getLong(ordinal) % 1000).toInt, ZoneOffset.UTC)
-          new Timestamp(row.getLong(ordinal) / 1000).toString
+          DateTimeUtils.toJavaTimestamp(row.getLong(ordinal)).toString
         case DateType => DateTimeUtils.toJavaDate(row.getInt(ordinal)).toString
         case BinaryType => row.getBinary(ordinal)
         case dt: DecimalType => row.getDecimal(ordinal, dt.precision, 
dt.scale).toJavaBigDecimal
diff --git 
a/spark-doris-connector/src/test/scala/org/apache/doris/spark/sql/TestSchemaUtils.scala
 
b/spark-doris-connector/src/test/scala/org/apache/doris/spark/sql/TestSchemaUtils.scala
index e11fb4f..98ec67f 100644
--- 
a/spark-doris-connector/src/test/scala/org/apache/doris/spark/sql/TestSchemaUtils.scala
+++ 
b/spark-doris-connector/src/test/scala/org/apache/doris/spark/sql/TestSchemaUtils.scala
@@ -20,13 +20,14 @@ package org.apache.doris.spark.sql
 import org.apache.doris.sdk.thrift.{TPrimitiveType, TScanColumnDesc}
 import org.apache.doris.spark.exception.DorisException
 import org.apache.doris.spark.rest.models.{Field, Schema}
+import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.types._
 import org.hamcrest.core.StringStartsWith.startsWith
-import org.junit.{Assert, Ignore, Test}
+import org.junit.{Assert, Test}
 
+import java.sql.Timestamp
 import scala.collection.JavaConverters._
 
-@Ignore
 class TestSchemaUtils extends ExpectedExceptionTest {
   @Test
   def testConvertToStruct(): Unit = {
@@ -54,12 +55,12 @@ class TestSchemaUtils extends ExpectedExceptionTest {
     Assert.assertEquals(DataTypes.LongType, 
SchemaUtils.getCatalystType("BIGINT", 0, 0))
     Assert.assertEquals(DataTypes.FloatType, 
SchemaUtils.getCatalystType("FLOAT", 0, 0))
     Assert.assertEquals(DataTypes.DoubleType, 
SchemaUtils.getCatalystType("DOUBLE", 0, 0))
-    Assert.assertEquals(DataTypes.StringType, 
SchemaUtils.getCatalystType("DATE", 0, 0))
+    Assert.assertEquals(DataTypes.DateType, 
SchemaUtils.getCatalystType("DATE", 0, 0))
     Assert.assertEquals(DataTypes.StringType, 
SchemaUtils.getCatalystType("DATETIME", 0, 0))
     Assert.assertEquals(DataTypes.BinaryType, 
SchemaUtils.getCatalystType("BINARY", 0, 0))
     Assert.assertEquals(DecimalType(9, 3), 
SchemaUtils.getCatalystType("DECIMAL", 9, 3))
     Assert.assertEquals(DataTypes.StringType, 
SchemaUtils.getCatalystType("CHAR", 0, 0))
-    Assert.assertEquals(DataTypes.StringType, 
SchemaUtils.getCatalystType("LARGEINT", 0, 0))
+    Assert.assertEquals(DecimalType(38, 0), 
SchemaUtils.getCatalystType("LARGEINT", 0, 0))
     Assert.assertEquals(DataTypes.StringType, 
SchemaUtils.getCatalystType("VARCHAR", 0, 0))
     Assert.assertEquals(DecimalType(10, 5), 
SchemaUtils.getCatalystType("DECIMALV2", 10, 5))
     Assert.assertEquals(DataTypes.DoubleType, 
SchemaUtils.getCatalystType("TIME", 0, 0))
@@ -113,4 +114,16 @@ class TestSchemaUtils extends ExpectedExceptionTest {
 
   }
 
+  @Test
+  def rowColumnValueTest(): Unit = {
+
+    val timestamp = Timestamp.valueOf("2021-01-01 11:12:23.345678")
+    val row = InternalRow.fromSeq(Seq(
+      timestamp.getTime / 1000 * 1000000 + timestamp.getNanos / 1000
+    ))
+
+    Assert.assertEquals("2021-01-01 11:12:23.345678", 
SchemaUtils.rowColumnValue(row, 0, TimestampType))
+
+  }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to