diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/execution/WriteFilesExecTransformer.scala b/gluten-substrait/src/main/scala/org/apache/gluten/execution/WriteFilesExecTransformer.scala index ec6edbb3c686..45dccb7cec34 100644 --- a/gluten-substrait/src/main/scala/org/apache/gluten/execution/WriteFilesExecTransformer.scala +++ b/gluten-substrait/src/main/scala/org/apache/gluten/execution/WriteFilesExecTransformer.scala @@ -21,6 +21,7 @@ import org.apache.gluten.backendsapi.BackendsApiManager import org.apache.gluten.expression.ConverterUtils import org.apache.gluten.extension.ValidationResult import org.apache.gluten.metrics.MetricsUpdater +import org.apache.gluten.planner.plan.GlutenPlanModel.GroupLeafExec import org.apache.gluten.substrait.`type`.ColumnTypeNode import org.apache.gluten.substrait.SubstraitContext import org.apache.gluten.substrait.extensions.ExtensionBuilder @@ -30,6 +31,7 @@ import org.apache.gluten.utils.SubstraitUtil import org.apache.spark.sql.catalyst.catalog.BucketSpec import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, Literal} +import org.apache.spark.sql.catalyst.plans.logical.Project import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap import org.apache.spark.sql.execution.{ProjectExec, SparkPlan} import org.apache.spark.sql.execution.datasources.FileFormat @@ -142,6 +144,12 @@ case class WriteFilesExecTransformer( t.projectList.exists(isConstantComplexType) case p: ProjectExec => p.projectList.exists(isConstantComplexType) + case g: GroupLeafExec => // support the ras + g.metadata.logicalLink().plan.exists { + case p: Project => + p.projectList.exists(isConstantComplexType) + case _ => false + } case _ => false } // TODO: currently the velox don't support parquet write with complex data type