@@ -42,7 +42,6 @@ import org.apache.spark.sql.catalyst.plans._
4242import org .apache .spark .sql .catalyst .plans .PlanTest
4343import org .apache .spark .sql .catalyst .plans .logical .LogicalPlan
4444import org .apache .spark .sql .catalyst .util ._
45- import org .apache .spark .sql .classic .{ClassicConversions , ColumnConversions , ColumnNodeToExpressionConverter , SQLImplicits }
4645import org .apache .spark .sql .classic .ClassicConversions ._
4746import org .apache .spark .sql .execution .{FilterExec , QueryExecution , SQLExecution }
4847import org .apache .spark .sql .execution .adaptive .DisableAdaptiveExecution
@@ -77,23 +76,6 @@ trait QueryTestBase extends PlanTestBase
7776 // Shorthand for running a query using our SparkSession
7877 protected lazy val sql : String => DataFrame = spark.sql _
7978
80- /**
81- * A helper object for importing SQL implicits.
82- *
83- * Note that the alternative of importing `spark.implicits._` is not possible here.
84- * This is because we create the `SparkSession` immediately before the first test is run,
85- * but the implicits import is needed in the constructor.
86- */
87- protected object testImplicits
88- extends SQLImplicits
89- with ClassicConversions
90- with ColumnConversions {
91- private def classicSession : classic.SparkSession =
92- self.spark.asInstanceOf [classic.SparkSession ]
93- override protected def session : classic.SparkSession = classicSession
94- override protected def converter : ColumnNodeToExpressionConverter = classicSession.converter
95- }
96-
9779 protected override def withSQLConf [T ](pairs : (String , String )* )(f : => T ): T = {
9880 SparkSession .setActiveSession(spark)
9981 super .withSQLConf(pairs : _* )(f)
0 commit comments