diff --git a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientE2ETestSuite.scala b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientE2ETestSuite.scala index 0e8cb9348c7f8..0ac946a19e4ba 100644 --- a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientE2ETestSuite.scala +++ b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientE2ETestSuite.scala @@ -42,7 +42,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.connect.ConnectConversions._ import org.apache.spark.sql.connect.client.{PlanCompressionOptions, RetryPolicy, SparkConnectClient, SparkResult} -import org.apache.spark.sql.connect.test.{ConnectFunSuite, IntegrationTestUtils, QueryTest, RemoteSparkSession, SQLHelper} +import org.apache.spark.sql.connect.test.{ConnectFunSuite, IntegrationTestUtils, QueryTest, RemoteSparkSession} import org.apache.spark.sql.connect.test.SparkConnectServerUtils.{createSparkSession, port} import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SqlApiConf @@ -53,7 +53,6 @@ class ClientE2ETestSuite extends QueryTest with ConnectFunSuite with RemoteSparkSession - with SQLHelper with PrivateMethodTester { test("throw SparkException with null filename in stack trace elements") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala index 291aa7cab7256..18e49976f1f57 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala @@ -714,7 +714,7 @@ trait QueryTestBase * Subclasses should *not* create `SparkSession`s in the test suite constructor, which is * prone to leaving multiple overlapping [[org.apache.spark.SparkContext]]s in the same JVM. */ -trait QueryTest extends SparkFunSuite with QueryTestBase with PlanTest { +trait QueryTest extends SparkFunSuite with QueryTestBase { /** * Creates a temporary directory, which is then passed to `f` and will be deleted after `f` diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 389d0d5a29d59..7f4853506150b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -25,7 +25,6 @@ import java.util.Locale import org.apache.spark.{SparkConf, TestUtils} import org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator import org.apache.spark.sql.catalyst.parser.ParseException -import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan} import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND @@ -153,7 +152,7 @@ import org.apache.spark.util.Utils */ // scalastyle:on line.size.limit @ExtendedSQLTest -class SQLQueryTestSuite extends SharedSparkSession with SQLHelper +class SQLQueryTestSuite extends SharedSparkSession with SQLQueryTestHelper with TPCDSSchema { import IntegratedUDFTestUtils._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala index ebe6d8858a7e3..d2f7aaaae7b68 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala @@ -32,7 +32,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Final, Max, Partial} import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParserInterface} -import org.apache.spark.sql.catalyst.plans.{PlanTest, SQLHelper} +import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, AggregateHint, ColumnStat, Limit, LocalRelation, LogicalPlan, Project, Range, Sort, SortHint, Statistics, UnresolvedHint} import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, Partitioning, SinglePartition} import org.apache.spark.sql.catalyst.rules.Rule @@ -58,7 +58,7 @@ import org.apache.spark.unsafe.types.UTF8String /** * Test cases for the [[SparkSessionExtensions]]. */ -class SparkSessionExtensionSuite extends PlanTest with SQLHelper with AdaptiveSparkPlanHelper { +class SparkSessionExtensionSuite extends PlanTest with AdaptiveSparkPlanHelper { private def create( builder: SparkSessionExtensionsProvider): Seq[SparkSessionExtensionsProvider] = Seq(builder) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala index bfd562086efdc..450dfca25ff42 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala @@ -20,13 +20,12 @@ package org.apache.spark.sql.errors import org.apache.spark.SparkThrowable import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.parser.ParseException -import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLId import org.apache.spark.sql.test.SharedSparkSession // Turn of the length check because most of the tests check entire error messages // scalastyle:off line.size.limit -class QueryParsingErrorsSuite extends SharedSparkSession with SQLHelper { +class QueryParsingErrorsSuite extends SharedSparkSession { private def parseException(sqlText: String): SparkThrowable = { intercept[ParseException](sql(sqlText).collect()) diff --git a/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceWithActualMetricsSuite.scala b/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceWithActualMetricsSuite.scala index e375678157b2f..6cae4fb30668e 100644 --- a/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceWithActualMetricsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceWithActualMetricsSuite.scala @@ -30,7 +30,6 @@ import org.apache.spark.SparkConf import org.apache.spark.deploy.history.HistoryServerSuite.getContentAndCode import org.apache.spark.sql.DataFrame import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException -import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.execution.metric.SQLMetricsTestUtils import org.apache.spark.sql.internal.SQLConf.ADAPTIVE_EXECUTION_ENABLED import org.apache.spark.sql.test.SharedSparkSession @@ -42,7 +41,7 @@ case class Salary(personId: Int, salary: Double) * Sql Resource Public API Unit Tests running query and extracting the metrics. */ class SqlResourceWithActualMetricsSuite - extends SharedSparkSession with SQLMetricsTestUtils with SQLHelper { + extends SharedSparkSession with SQLMetricsTestUtils { import testImplicits._