Repository: spark
Updated Branches:
  refs/heads/master 3fc6cfd07 -> f8cbb0a4b


[SPARK-7229] [SQL] SpecificMutableRow should take integer type as internal 
representation for Date

Author: Cheng Hao <[email protected]>

Closes #5772 from chenghao-intel/specific_row and squashes the following 
commits:

2cd064d [Cheng Hao] scala style issue
60347a2 [Cheng Hao] SpecificMutableRow should take integer type as internal 
representation for DateType


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f8cbb0a4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f8cbb0a4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f8cbb0a4

Branch: refs/heads/master
Commit: f8cbb0a4b37b0d4ba49515d888cb52dea9eb01f1
Parents: 3fc6cfd
Author: Cheng Hao <[email protected]>
Authored: Wed Apr 29 16:23:34 2015 -0700
Committer: Reynold Xin <[email protected]>
Committed: Wed Apr 29 16:23:34 2015 -0700

----------------------------------------------------------------------
 .../spark/sql/catalyst/expressions/SpecificMutableRow.scala | 1 +
 .../test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala    | 9 +++++++++
 2 files changed, 10 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f8cbb0a4/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SpecificMutableRow.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SpecificMutableRow.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SpecificMutableRow.scala
index 3475ed0..aa4099e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SpecificMutableRow.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SpecificMutableRow.scala
@@ -202,6 +202,7 @@ final class SpecificMutableRow(val values: 
Array[MutableValue]) extends MutableR
         case DoubleType => new MutableDouble
         case BooleanType => new MutableBoolean
         case LongType => new MutableLong
+        case DateType => new MutableInt // We use INT for DATE internally
         case _ => new MutableAny
       }.toArray)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/f8cbb0a4/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index db096af..856a806 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -256,6 +256,15 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
     assert(cachedRows(0).getAs[java.sql.Date](1) === 
java.sql.Date.valueOf("1996-01-01"))
   }
 
+  test("test DATE types in cache") {
+    val rows = TestSQLContext.jdbc(urlWithUserAndPass, 
"TEST.TIMETYPES").collect()
+    TestSQLContext
+      .jdbc(urlWithUserAndPass, 
"TEST.TIMETYPES").cache().registerTempTable("mycached_date")
+    val cachedRows = sql("select * from mycached_date").collect()
+    assert(rows(0).getAs[java.sql.Date](1) === 
java.sql.Date.valueOf("1996-01-01"))
+    assert(cachedRows(0).getAs[java.sql.Date](1) === 
java.sql.Date.valueOf("1996-01-01"))
+  }
+
   test("H2 floating-point types") {
     val rows = sql("SELECT * FROM flttypes").collect()
     assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to