Skip to content

Commit 38e3b96

Browse files
committed
Use spark.sessionState.artifactManager instead of spark.artifactManager
Avoids needing ClassicConversions import in DDLSuite. Co-authored-by: Isaac
1 parent afcdad2 commit 38e3b96

File tree

1 file changed

+1
-2
lines changed
  • sql/core/src/test/scala/org/apache/spark/sql/execution/command

1 file changed

+1
-2
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ import org.apache.spark.sql.catalyst.analysis.TempTableAlreadyExistsException
3232
import org.apache.spark.sql.catalyst.catalog._
3333
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
3434
import org.apache.spark.sql.catalyst.parser.ParseException
35-
import org.apache.spark.sql.classic.ClassicConversions._
3635
import org.apache.spark.sql.connector.catalog.CatalogManager
3736
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
3837
import org.apache.spark.sql.connector.catalog.SupportsNamespaces.PROP_OWNER
@@ -2235,7 +2234,7 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
22352234
// TODO(SPARK-50244): ADD JAR is inside `sql()` thus isolated. This will break an existing Hive
22362235
// use case (one session adds JARs and another session uses them). After we sort out the Hive
22372236
// isolation issue we will decide if the next assert should be wrapped inside `withResources`.
2238-
spark.artifactManager.withResources {
2237+
spark.sessionState.artifactManager.withResources {
22392238
assert(new File(SparkFiles.get(s"${directoryToAdd.getName}/${testFile.getName}")).exists())
22402239
}
22412240
}

0 commit comments

Comments
 (0)