diff --git a/connector/docker-integration-tests/pom.xml b/connector/docker-integration-tests/pom.xml index e060de5c17f44..3c2234857814c 100644 --- a/connector/docker-integration-tests/pom.xml +++ b/connector/docker-integration-tests/pom.xml @@ -100,11 +100,12 @@ ojdbc17 test - + + com.microsoft.sqlserver mssql-jdbc diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala index 6a489ffb2d42e..39aef6d7c90b3 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2IntegrationSuite.scala @@ -21,12 +21,13 @@ import java.math.BigDecimal import java.sql.{Connection, Date, Timestamp} import java.util.Properties +import org.scalatest.Ignore + import org.apache.spark.sql.{Row, SaveMode} import org.apache.spark.sql.catalyst.util.CharVarcharUtils import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{ByteType, ShortType, StructType} -import org.apache.spark.tags.DockerTest /** * To run this test suite for a specific version (e.g., icr.io/db2_community/db2:11.5.9.0): @@ -36,7 +37,7 @@ import org.apache.spark.tags.DockerTest * "docker-integration-tests/testOnly org.apache.spark.sql.jdbc.DB2IntegrationSuite" * }}} */ -@DockerTest +@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests class DB2IntegrationSuite extends SharedJDBCIntegrationSuite { override val db = new DB2DatabaseOnDocker diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala index c7d8fc43393ed..a34c1f5590a0d 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala @@ -24,6 +24,7 @@ import javax.security.auth.login.Configuration import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig, HostConfig, Volume} import org.apache.hadoop.security.{SecurityUtil, UserGroupInformation} import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS +import org.scalatest.Ignore import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions import org.apache.spark.sql.execution.datasources.jdbc.connection.{DB2ConnectionProvider, SecureConnectionProvider} @@ -37,6 +38,7 @@ import org.apache.spark.tags.DockerTest * "docker-integration-tests/testOnly *DB2KrbIntegrationSuite" * }}} */ +@Ignore // TODO(SPARK-55707: Re-enable DB2 JDBC Driver tests) @DockerTest class DB2KrbIntegrationSuite extends DockerKrbJDBCIntegrationSuite { override protected val userName = s"db2/$dockerIp" diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala index ecc02f7051787..e0b937a61fecb 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2IntegrationSuite.scala @@ -20,12 +20,13 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection import java.util.Locale +import org.scalatest.Ignore + import org.apache.spark.SparkConf import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog import org.apache.spark.sql.jdbc.DB2DatabaseOnDocker import org.apache.spark.sql.types._ -import org.apache.spark.tags.DockerTest /** * To run this test suite for a specific version (e.g., icr.io/db2_community/db2:11.5.9.0): @@ -34,7 +35,7 @@ import org.apache.spark.tags.DockerTest * ./build/sbt -Pdocker-integration-tests "testOnly *v2.DB2IntegrationSuite" * }}} */ -@DockerTest +@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests class DB2IntegrationSuite extends DockerJDBCIntegrationV2Suite with V2JDBCTest { // Following tests are disabled for both single and multiple partition read diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala index 385039fb6bd51..f932727377de0 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala @@ -21,6 +21,8 @@ import java.sql.Connection import scala.jdk.CollectionConverters._ +import org.scalatest.Ignore + import org.apache.spark.sql.jdbc.{DB2DatabaseOnDocker, DockerJDBCIntegrationSuite} import org.apache.spark.sql.util.CaseInsensitiveStringMap import org.apache.spark.tags.DockerTest @@ -32,6 +34,7 @@ import org.apache.spark.tags.DockerTest * ./build/sbt -Pdocker-integration-tests "testOnly *v2.DB2NamespaceSuite" * }}} */ +@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests @DockerTest class DB2NamespaceSuite extends DockerJDBCIntegrationSuite with V2JDBCNamespaceTest { override val db = new DB2DatabaseOnDocker @@ -39,7 +42,8 @@ class DB2NamespaceSuite extends DockerJDBCIntegrationSuite with V2JDBCNamespaceT Map("url" -> db.getJdbcUrl(dockerIp, externalPort), "driver" -> "com.ibm.db2.jcc.DB2Driver").asJava) - catalog.initialize("db2", map) + // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests + // catalog.initialize("db2", map) override def dataPreparation(conn: Connection): Unit = {} diff --git a/pom.xml b/pom.xml index 0c46c13871f15..b7b43ab03c8e8 100644 --- a/pom.xml +++ b/pom.xml @@ -1357,12 +1357,13 @@ ${postgresql.version} test - + + com.microsoft.sqlserver mssql-jdbc diff --git a/sql/core/pom.xml b/sql/core/pom.xml index bddc7790594e2..18956310a0828 100644 --- a/sql/core/pom.xml +++ b/sql/core/pom.xml @@ -206,11 +206,12 @@ postgresql test - + + com.ibm.icu icu4j diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala index 0d7b133f0e15e..b2e788b52109e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuite.scala @@ -185,28 +185,28 @@ class ConnectionProviderSuite val postgresDriver = registerDriver(postgresProvider.driverClass) val postgresOptions = options("jdbc:postgresql://localhost/postgres") val postgresAppEntry = postgresProvider.appEntry(postgresDriver, postgresOptions) - val db2Provider = new DB2ConnectionProvider() - val db2Driver = registerDriver(db2Provider.driverClass) - val db2Options = options("jdbc:db2://localhost/db2") - val db2AppEntry = db2Provider.appEntry(db2Driver, db2Options) + val mysqlProvider = new MariaDBConnectionProvider() + val mysqlDriver = registerDriver(mysqlProvider.driverClass) + val mysqlOptions = options("jdbc:mysql://localhost/db") + val mysqlAppEntry = mysqlProvider.appEntry(mysqlDriver, mysqlOptions) // Make sure no authentication for the databases are set val rootConfig = Configuration.getConfiguration assert(rootConfig.getAppConfigurationEntry(postgresAppEntry) == null) - assert(rootConfig.getAppConfigurationEntry(db2AppEntry) == null) + assert(rootConfig.getAppConfigurationEntry(mysqlAppEntry) == null) postgresProvider.setAuthenticationConfig(postgresDriver, postgresOptions) val postgresConfig = Configuration.getConfiguration - db2Provider.setAuthenticationConfig(db2Driver, db2Options) - val db2Config = Configuration.getConfiguration + mysqlProvider.setAuthenticationConfig(mysqlDriver, mysqlOptions) + val mysqlConfig = Configuration.getConfiguration // Make sure authentication for the databases are set assert(rootConfig != postgresConfig) - assert(rootConfig != db2Config) + assert(rootConfig != mysqlConfig) // The topmost config in the chain is linked with all the subsequent entries - assert(db2Config.getAppConfigurationEntry(postgresAppEntry) != null) - assert(db2Config.getAppConfigurationEntry(db2AppEntry) != null) + assert(mysqlConfig.getAppConfigurationEntry(postgresAppEntry) != null) + assert(mysqlConfig.getAppConfigurationEntry(mysqlAppEntry) != null) Configuration.setConfiguration(null) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala index 895b3d85d960b..f29a1b1e7ab62 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala @@ -17,6 +17,9 @@ package org.apache.spark.sql.execution.datasources.jdbc.connection +import org.scalatest.Ignore + +@Ignore // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests class DB2ConnectionProviderSuite extends ConnectionProviderSuiteBase { test("setAuthenticationConfig must set authentication all the time") { val provider = new DB2ConnectionProvider() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index e38648e9468ca..3268d532a34cd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -1088,7 +1088,8 @@ class JDBCSuite extends QueryTest with SharedSparkSession { "SELECT TOP (123) a,b FROM test") } - test("SPARK-42534: DB2Dialect Limit query test") { + // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests + ignore("SPARK-42534: DB2Dialect Limit query test") { // JDBC url is a required option but is not used in this test. val options = new JDBCOptions(Map("url" -> "jdbc:db2://host:port", "dbtable" -> "test")) assert( @@ -2261,7 +2262,9 @@ class JDBCSuite extends QueryTest with SharedSparkSession { } // not supported Seq( - "jdbc:db2://host:port", "jdbc:derby:memory", "jdbc:h2://host:port", + // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests + // "jdbc:db2://host:port", + "jdbc:derby:memory", "jdbc:h2://host:port", "jdbc:sqlserver://host:port", "jdbc:postgresql://host:5432/postgres", "jdbc:snowflake://host:443?account=test", "jdbc:teradata://host:port").foreach { url => val options = new JDBCOptions(baseParameters + ("url" -> url)) @@ -2282,7 +2285,8 @@ class JDBCSuite extends QueryTest with SharedSparkSession { "jdbc:mysql", "jdbc:postgresql", "jdbc:sqlserver", - "jdbc:db2", + // TODO(SPARK-55707): Re-enable DB2 JDBC Driver tests + // "jdbc:db2", "jdbc:h2", "jdbc:teradata", "jdbc:databricks"