Skip to content

Commit

Permalink
Undo from spark-2.X
Browse files Browse the repository at this point in the history
  • Loading branch information
odeke-em committed Jul 29, 2023
1 parent 05a94db commit 47456df
Show file tree
Hide file tree
Showing 8 changed files with 17 additions and 73 deletions.

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,6 @@ public boolean next() {
@Override
public T get() {
// TODO: Fill me in.
return true;
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@
import java.io.IOException;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.connector.read.Batch;
import org.apache.spark.sql.connector.read.ColumnarBatch;
import org.apache.spark.sql.connector.read.InputPartition;
import org.apache.spark.sql.connector.read.Partition;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.apache.spark.sql.connector.read.Scan;
import org.apache.spark.sql.types.StructType;
Expand All @@ -43,7 +41,6 @@ public void close() throws IOException {
// TODO: Fill me in.
}

@Override
public boolean supportsColumnarReads(InputPartition partition) {
// TODO: Fill me in.
return false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
Expand All @@ -42,17 +43,15 @@
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.connector.catalog.SupportsRead;
import org.apache.spark.sql.connector.catalog.Table;
import org.apache.spark.sql.connector.catalog.TableCapability;
import org.apache.spark.sql.connector.catalog.TableProvider;
import org.apache.spark.sql.connector.expressions.Transform;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.apache.spark.sql.connector.read.ScanBuilder;
import org.apache.spark.sql.sources.v2.DataSource;
import org.apache.spark.sql.sources.v2.reader.DataSourceOptions;
import org.apache.spark.sql.sources.v2.reader.DataSourceReader;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;

public class SpannerSpark implements TableProvider, SupportsRead, ReadSupport {
public class SpannerSpark implements TableProvider, SupportsRead {
private BatchClient batchClient;
private Map<String, String> properties;

Expand Down Expand Up @@ -195,16 +194,23 @@ public ScanBuilder newScanBuilder(CaseInsensitiveStringMap options) {
return new SpannerScanBuilder(options);
}

@Override
public Set<TableCapability> capabilities() {
return null;
}

@Override
public Map<String, String> properties() {
return this.properties;
}

/*
* The entry point to create a reader.
*/
@Override
public DataSourceReader createReader(DataSourceOptions options) {
return new SpannerDataSourceReader(options);
public String name() {
return "spanner";
}

@Override
public StructType schema() {
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ public StructType createSchema(String tableName, ResultSet rs) {
}
}
this.tableSchema = schema;
return schema;
}

public static DataType ofSpannerStrType(String spannerStrType) {
Expand Down

0 comments on commit 47456df

Please sign in to comment.