Skip to content

Commit

Permalink
Remove SupportFormat
Browse files Browse the repository at this point in the history
  • Loading branch information
liujiayi771 committed Nov 29, 2023
1 parent e7e2f42 commit 4a597b9
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,10 @@
*/
package io.glutenproject.execution

import io.glutenproject.substrait.SupportFormat

import org.apache.spark.sql.connector.read.InputPartition
import org.apache.spark.sql.types.StructType

trait BaseDataSource extends SupportFormat {
trait BaseDataSource {

/** Returns the actual schema of this data source scan. */
def getDataSchema: StructType
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,21 @@ import io.glutenproject.substrait.rel.{ReadRelNode, RelBuilder, SplitInfo}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.expressions.{And, Attribute, Expression}
import org.apache.spark.sql.vectorized.ColumnarBatch

import com.google.common.collect.Lists

import scala.collection.JavaConverters._

import io.glutenproject.substrait.rel.LocalFilesNode.ReadFileFormat

trait BasicScanExecTransformer extends LeafTransformSupport with BaseDataSource {

/** Returns the filters that can be pushed down to native file scan */
def filterExprs(): Seq[Expression]

def outputAttributes(): Seq[Attribute]

/** This can be used to report FileFormat for a file based scan operator. */
val fileFormat: ReadFileFormat

// TODO: Remove this expensive call when CH support scan custom partition location.
def getInputFilePaths: Seq[String] = {
// This is a heavy operation, and only the required backend executes the corresponding logic.
Expand Down

This file was deleted.

0 comments on commit 4a597b9

Please sign in to comment.