diff --git a/backends-velox/src/test/scala/io/glutenproject/benchmarks/NativeBenchmarkPlanGenerator.scala b/backends-velox/src/test/scala/io/glutenproject/benchmarks/NativeBenchmarkPlanGenerator.scala index b5d59d62a854..875ce3034bd5 100644 --- a/backends-velox/src/test/scala/io/glutenproject/benchmarks/NativeBenchmarkPlanGenerator.scala +++ b/backends-velox/src/test/scala/io/glutenproject/benchmarks/NativeBenchmarkPlanGenerator.scala @@ -16,6 +16,7 @@ */ package io.glutenproject.benchmarks +import io.glutenproject.GlutenConfig import io.glutenproject.execution.{VeloxWholeStageTransformerSuite, WholeStageTransformer} import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanExec, ShuffleQueryStageExec} @@ -50,8 +51,9 @@ class NativeBenchmarkPlanGenerator extends VeloxWholeStageTransformerSuite { } test("Test plan json non-empty - AQE off") { - spark.sparkContext.setLogLevel("DEBUG") - withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false") { + withSQLConf( + SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false", + GlutenConfig.CACHE_WHOLE_STAGE_TRANSFORMER_CONTEXT.key -> "true") { val df = spark .sql(""" |select * from lineitem @@ -69,8 +71,9 @@ class NativeBenchmarkPlanGenerator extends VeloxWholeStageTransformerSuite { } test("Test plan json non-empty - AQE on") { - spark.sparkContext.setLogLevel("DEBUG") - withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true") { + withSQLConf( + SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true", + GlutenConfig.CACHE_WHOLE_STAGE_TRANSFORMER_CONTEXT.key -> "true") { val df = spark .sql(""" |select * from lineitem join orders on l_orderkey = o_orderkey @@ -90,10 +93,10 @@ class NativeBenchmarkPlanGenerator extends VeloxWholeStageTransformerSuite { test("generate example", GenerateExample) { import testImplicits._ - spark.sparkContext.setLogLevel("DEBUG") withSQLConf( SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1", - SQLConf.SHUFFLE_PARTITIONS.key -> "2" + SQLConf.SHUFFLE_PARTITIONS.key -> "2", + GlutenConfig.CACHE_WHOLE_STAGE_TRANSFORMER_CONTEXT.key -> "true" ) { val q4_lineitem = spark .sql(s"""