Repository: spark
Updated Branches:
  refs/heads/master 507bea5ca -> 90787de86


[SPARK-15033][SQL] fix a flaky test in CachedTableSuite

## What changes were proposed in this pull request?

This is caused by https://github.com/apache/spark/pull/12776, which removes the 
`synchronized` from all methods in `AccumulatorContext`.

However, a test in `CachedTableSuite` synchronize on `AccumulatorContext` and 
expecting no one else can change it, which is not true anymore.

This PR update that test to not require to lock on `AccumulatorContext`.

## How was this patch tested?

N/A

Author: Wenchen Fan <[email protected]>

Closes #12811 from cloud-fan/flaky.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/90787de8
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/90787de8
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/90787de8

Branch: refs/heads/master
Commit: 90787de864b58a1079c23e6581381ca8ffe7685f
Parents: 507bea5
Author: Wenchen Fan <[email protected]>
Authored: Sat Apr 30 20:28:22 2016 -0700
Committer: Reynold Xin <[email protected]>
Committed: Sat Apr 30 20:28:22 2016 -0700

----------------------------------------------------------------------
 .../columnar/InMemoryTableScanExec.scala         |  2 +-
 .../org/apache/spark/sql/CachedTableSuite.scala  | 19 +++++++++++++------
 2 files changed, 14 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/90787de8/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
index 577c34b..94b87a5 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala
@@ -68,7 +68,7 @@ private[sql] case class InMemoryRelation(
 
   override def producedAttributes: AttributeSet = outputSet
 
-  private val batchStats: Accumulable[ArrayBuffer[InternalRow], InternalRow] =
+  private[sql] val batchStats: Accumulable[ArrayBuffer[InternalRow], 
InternalRow] =
     if (_batchStats == null) {
       
child.sqlContext.sparkContext.accumulableCollection(ArrayBuffer.empty[InternalRow])
     } else {

http://git-wip-us.apache.org/repos/asf/spark/blob/90787de8/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 1095a73..12dbed8 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -333,12 +333,19 @@ class CachedTableSuite extends QueryTest with 
SQLTestUtils with SharedSQLContext
     sql("SELECT * FROM t1").count()
     sql("SELECT * FROM t2").count()
 
-    AccumulatorContext.synchronized {
-      val accsSize = AccumulatorContext.numAccums
-      sqlContext.uncacheTable("t1")
-      sqlContext.uncacheTable("t2")
-      assert((accsSize - 2) == AccumulatorContext.numAccums)
-    }
+    val accId1 = sqlContext.table("t1").queryExecution.withCachedData.collect {
+      case i: InMemoryRelation => i.batchStats.id
+    }.head
+
+    val accId2 = sqlContext.table("t1").queryExecution.withCachedData.collect {
+      case i: InMemoryRelation => i.batchStats.id
+    }.head
+
+    sqlContext.uncacheTable("t1")
+    sqlContext.uncacheTable("t2")
+
+    assert(AccumulatorContext.get(accId1).isEmpty)
+    assert(AccumulatorContext.get(accId2).isEmpty)
   }
 
   test("SPARK-10327 Cache Table is not working while subquery has alias in its 
project list") {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to