This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 811c3fa76428 [SPARK-55706][SQL][TESTS] Disable DB2 JDBC Driver tests
811c3fa76428 is described below
commit 811c3fa76428ffe01a45a55d7e35302fac4bf940
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Thu Feb 26 18:14:22 2026 +0800
[SPARK-55706][SQL][TESTS] Disable DB2 JDBC Driver tests
### What changes were proposed in this pull request?
This PR aims to disable DB2 JDBC driver test coverage until it removes its
side-effect.
- `ConnectionProviderSuite` is revised to use another connection provider
instead of `DB2`.
- SPARK-55707 is filed to re-enable the disabled tests.
### Why are the changes needed?
To avoid a side-effect on LZ4 test dependency. We had better focus on the
non-test dependency first.
### Does this PR introduce _any_ user-facing change?
No Spark's behavior change because this is only a test dependency and
coverage.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #54505 from dongjoon-hyun/SPARK-55706.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
connector/docker-integration-tests/pom.xml | 5 +++--
.../apache/spark/sql/jdbc/DB2IntegrationSuite.scala | 5 +++--
.../spark/sql/jdbc/DB2KrbIntegrationSuite.scala | 2 ++
.../spark/sql/jdbc/v2/DB2IntegrationSuite.scala | 5 +++--
.../apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala | 6 +++++-
pom.xml | 5 +++--
sql/core/pom.xml | 5 +++--
.../jdbc/connection/ConnectionProviderSuite.scala | 20 ++++++++++----------
.../jdbc/connection/DB2ConnectionProviderSuite.scala | 3 +++
.../scala/org/apache/spark/sql/jdbc/JDBCSuite.scala | 10 +++++++---
10 files changed, 42 insertions(+), 24 deletions(-)
diff --git a/connector/docker-integration-tests/pom.xml
b/connector/docker-integration-tests/pom.xml
index e060de5c17f4..3c2234857814 100644
--- a/connector/docker-integration-tests/pom.xml
+++ b/connector/docker-integration-tests/pom.xml
@@ -100,11 +100,12 @@
<artifactId>ojdbc17</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
+ <!-- TODO(SPARK-55707: Re-enable DB2 JDBC Driver tests) -->
+ <!--dependency>
<groupId>com.ibm.db2</groupId>
<artifactId>jcc</artifactId>
<scope>test</scope>
- </dependency>
+ </dependency-->
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
diff --git
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
index 6a489ffb2d42..39aef6d7c90b 100644
---
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
+++
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala
@@ -21,12 +21,13 @@ import java.math.BigDecimal
import java.sql.{Connection, Date, Timestamp}
import java.util.Properties
+import org.scalatest.Ignore
+
import org.apache.spark.sql.{Row, SaveMode}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{ByteType, ShortType, StructType}
-import org.apache.spark.tags.DockerTest
/**
* To run this test suite for a specific version (e.g.,
icr.io/db2_community/db2:11.5.9.0):
@@ -36,7 +37,7 @@ import org.apache.spark.tags.DockerTest
* "docker-integration-tests/testOnly
org.apache.spark.sql.jdbc.DB2IntegrationSuite"
* }}}
*/
-@DockerTest
+@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
class DB2IntegrationSuite extends SharedJDBCIntegrationSuite {
override val db = new DB2DatabaseOnDocker
diff --git
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
index c7d8fc43393e..a34c1f5590a0 100644
---
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
+++
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
@@ -24,6 +24,7 @@ import javax.security.auth.login.Configuration
import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig,
HostConfig, Volume}
import org.apache.hadoop.security.{SecurityUtil, UserGroupInformation}
import
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS
+import org.scalatest.Ignore
import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
import
org.apache.spark.sql.execution.datasources.jdbc.connection.{DB2ConnectionProvider,
SecureConnectionProvider}
@@ -37,6 +38,7 @@ import org.apache.spark.tags.DockerTest
* "docker-integration-tests/testOnly *DB2KrbIntegrationSuite"
* }}}
*/
+@Ignore // TODO(SPARK-55707: Re-enable DB2 JDBC Driver tests)
@DockerTest
class DB2KrbIntegrationSuite extends DockerKrbJDBCIntegrationSuite {
override protected val userName = s"db2/$dockerIp"
diff --git
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala
index ecc02f705178..e0b937a61fec 100644
---
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala
+++
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala
@@ -20,12 +20,13 @@ package org.apache.spark.sql.jdbc.v2
import java.sql.Connection
import java.util.Locale
+import org.scalatest.Ignore
+
import org.apache.spark.SparkConf
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
import org.apache.spark.sql.jdbc.DB2DatabaseOnDocker
import org.apache.spark.sql.types._
-import org.apache.spark.tags.DockerTest
/**
* To run this test suite for a specific version (e.g.,
icr.io/db2_community/db2:11.5.9.0):
@@ -34,7 +35,7 @@ import org.apache.spark.tags.DockerTest
* ./build/sbt -Pdocker-integration-tests "testOnly
*v2.DB2IntegrationSuite"
* }}}
*/
-@DockerTest
+@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
class DB2IntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest
{
// Following tests are disabled for both single and multiple partition read
diff --git
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala
index 385039fb6bd5..f932727377de 100644
---
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala
+++
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala
@@ -21,6 +21,8 @@ import java.sql.Connection
import scala.jdk.CollectionConverters._
+import org.scalatest.Ignore
+
import org.apache.spark.sql.jdbc.{DB2DatabaseOnDocker,
DockerJDBCIntegrationSuite}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.tags.DockerTest
@@ -32,6 +34,7 @@ import org.apache.spark.tags.DockerTest
* ./build/sbt -Pdocker-integration-tests "testOnly *v2.DB2NamespaceSuite"
* }}}
*/
+@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
@DockerTest
class DB2NamespaceSuite extends DockerJDBCIntegrationSuite with
V2JDBCNamespaceTest {
override val db = new DB2DatabaseOnDocker
@@ -39,7 +42,8 @@ class DB2NamespaceSuite extends DockerJDBCIntegrationSuite
with V2JDBCNamespaceT
Map("url" -> db.getJdbcUrl(dockerIp, externalPort),
"driver" -> "com.ibm.db2.jcc.DB2Driver").asJava)
- catalog.initialize("db2", map)
+ // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
+ // catalog.initialize("db2", map)
override def dataPreparation(conn: Connection): Unit = {}
diff --git a/pom.xml b/pom.xml
index 0c46c13871f1..b7b43ab03c8e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1357,12 +1357,13 @@
<version>${postgresql.version}</version>
<scope>test</scope>
</dependency>
- <dependency>
+ <!-- TODO(SPARK-55707: Re-enable DB2 JDBC Driver tests) -->
+ <!--dependency>
<groupId>com.ibm.db2</groupId>
<artifactId>jcc</artifactId>
<version>${db2.jcc.version}</version>
<scope>test</scope>
- </dependency>
+ </dependency-->
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index bddc7790594e..18956310a082 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -206,11 +206,12 @@
<artifactId>postgresql</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
+ <!-- TODO(SPARK-55707: Re-enable DB2 JDBC Driver tests) -->
+ <!--dependency>
<groupId>com.ibm.db2</groupId>
<artifactId>jcc</artifactId>
<scope>test</scope>
- </dependency>
+ </dependency-->
<dependency>
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
index 0d7b133f0e15..b2e788b52109 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala
@@ -185,28 +185,28 @@ class ConnectionProviderSuite
val postgresDriver = registerDriver(postgresProvider.driverClass)
val postgresOptions = options("jdbc:postgresql://localhost/postgres")
val postgresAppEntry = postgresProvider.appEntry(postgresDriver,
postgresOptions)
- val db2Provider = new DB2ConnectionProvider()
- val db2Driver = registerDriver(db2Provider.driverClass)
- val db2Options = options("jdbc:db2://localhost/db2")
- val db2AppEntry = db2Provider.appEntry(db2Driver, db2Options)
+ val mysqlProvider = new MariaDBConnectionProvider()
+ val mysqlDriver = registerDriver(mysqlProvider.driverClass)
+ val mysqlOptions = options("jdbc:mysql://localhost/db")
+ val mysqlAppEntry = mysqlProvider.appEntry(mysqlDriver, mysqlOptions)
// Make sure no authentication for the databases are set
val rootConfig = Configuration.getConfiguration
assert(rootConfig.getAppConfigurationEntry(postgresAppEntry) == null)
- assert(rootConfig.getAppConfigurationEntry(db2AppEntry) == null)
+ assert(rootConfig.getAppConfigurationEntry(mysqlAppEntry) == null)
postgresProvider.setAuthenticationConfig(postgresDriver, postgresOptions)
val postgresConfig = Configuration.getConfiguration
- db2Provider.setAuthenticationConfig(db2Driver, db2Options)
- val db2Config = Configuration.getConfiguration
+ mysqlProvider.setAuthenticationConfig(mysqlDriver, mysqlOptions)
+ val mysqlConfig = Configuration.getConfiguration
// Make sure authentication for the databases are set
assert(rootConfig != postgresConfig)
- assert(rootConfig != db2Config)
+ assert(rootConfig != mysqlConfig)
// The topmost config in the chain is linked with all the subsequent
entries
- assert(db2Config.getAppConfigurationEntry(postgresAppEntry) != null)
- assert(db2Config.getAppConfigurationEntry(db2AppEntry) != null)
+ assert(mysqlConfig.getAppConfigurationEntry(postgresAppEntry) != null)
+ assert(mysqlConfig.getAppConfigurationEntry(mysqlAppEntry) != null)
Configuration.setConfiguration(null)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala
index 895b3d85d960..f29a1b1e7ab6 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala
@@ -17,6 +17,9 @@
package org.apache.spark.sql.execution.datasources.jdbc.connection
+import org.scalatest.Ignore
+
+@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
class DB2ConnectionProviderSuite extends ConnectionProviderSuiteBase {
test("setAuthenticationConfig must set authentication all the time") {
val provider = new DB2ConnectionProvider()
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index e38648e9468c..3268d532a34c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -1088,7 +1088,8 @@ class JDBCSuite extends QueryTest with SharedSparkSession
{
"SELECT TOP (123) a,b FROM test")
}
- test("SPARK-42534: DB2Dialect Limit query test") {
+ // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
+ ignore("SPARK-42534: DB2Dialect Limit query test") {
// JDBC url is a required option but is not used in this test.
val options = new JDBCOptions(Map("url" -> "jdbc:db2://host:port",
"dbtable" -> "test"))
assert(
@@ -2261,7 +2262,9 @@ class JDBCSuite extends QueryTest with SharedSparkSession
{
}
// not supported
Seq(
- "jdbc:db2://host:port", "jdbc:derby:memory", "jdbc:h2://host:port",
+ // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
+ // "jdbc:db2://host:port",
+ "jdbc:derby:memory", "jdbc:h2://host:port",
"jdbc:sqlserver://host:port", "jdbc:postgresql://host:5432/postgres",
"jdbc:snowflake://host:443?account=test",
"jdbc:teradata://host:port").foreach { url =>
val options = new JDBCOptions(baseParameters + ("url" -> url))
@@ -2282,7 +2285,8 @@ class JDBCSuite extends QueryTest with SharedSparkSession
{
"jdbc:mysql",
"jdbc:postgresql",
"jdbc:sqlserver",
- "jdbc:db2",
+ // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests
+ // "jdbc:db2",
"jdbc:h2",
"jdbc:teradata",
"jdbc:databricks"
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]