This is an automated email from the ASF dual-hosted git repository. jongyoul pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/master by this push: new 07cddfd05e Integration tests with newer spark version (#4604) 07cddfd05e is described below commit 07cddfd05ed441f9c49a363009cce8f52291ddb2 Author: Philipp Dallig <philipp.dal...@gmail.com> AuthorDate: Tue May 30 08:51:11 2023 +0200 Integration tests with newer spark version (#4604) --- .../zeppelin/integration/ZSessionIntegrationTest.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java index 1c3fb82eae..a30234bcf5 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java @@ -67,7 +67,7 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_THRESHOLD.getVarName(), "10000"); notebook = TestUtils.getInstance(Notebook.class); - sparkHome = DownloadUtils.downloadSpark("2.4.4", "2.7"); + sparkHome = DownloadUtils.downloadSpark("3.2.4", "3.2"); flinkHome = DownloadUtils.downloadFlink("1.13.2", "2.11"); } @@ -188,11 +188,11 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { assertEquals(result.toString(), Status.FINISHED, result.getStatus()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("2.4.4")); + assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("3.2.4")); assertEquals(0, result.getJobUrls().size()); // pyspark - result = session.execute("pyspark", "df = spark.createDataFrame([(1,'a'),(2,'b')])\ndf.registerTempTable('df')\ndf.show()"); + result = session.execute("pyspark", "df = spark.createDataFrame([(1,'a'),(2,'b')])\ndf.createOrReplaceTempView('df')\ndf.show()"); assertEquals(Status.FINISHED, result.getStatus()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); @@ -211,7 +211,7 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("eruptions waiting")); - assertEquals(2, result.getJobUrls().size()); + assertTrue(result.getJobUrls().size() > 0); // spark sql result = session.execute("sql", "select * from df"); @@ -226,7 +226,7 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { assertEquals(Status.ERROR, result.getStatus()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("Table or view 'unknown_table' not found in database")); + assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("Table or view not found: unknown_table")); assertEquals(0, result.getJobUrls().size()); } finally { @@ -257,11 +257,11 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { assertEquals(result.toString(), Status.FINISHED, result.getStatus()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("2.4.4")); + assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("3.2.4")); assertEquals(0, result.getJobUrls().size()); // pyspark - result = session.submit("pyspark", "df = spark.createDataFrame([(1,'a'),(2,'b')])\ndf.registerTempTable('df')\ndf.show()"); + result = session.submit("pyspark", "df = spark.createDataFrame([(1,'a'),(2,'b')])\ndf.createOrReplaceTempView('df')\ndf.show()"); result = session.waitUntilFinished(result.getStatementId()); assertEquals(result.toString(), Status.FINISHED, result.getStatus()); assertEquals(1, result.getResults().size()); @@ -282,7 +282,7 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("eruptions waiting")); - assertEquals(2, result.getJobUrls().size()); + assertTrue(result.getJobUrls().size() > 0); // spark sql result = session.submit("sql", "select * from df"); @@ -299,7 +299,7 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { assertEquals(Status.ERROR, result.getStatus()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("Table or view 'unknown_table' not found in database")); + assertTrue(result.getResults().get(0).getData(), result.getResults().get(0).getData().contains("Table or view not found: unknown_table")); assertEquals(0, result.getJobUrls().size()); // cancel