Skip to content

Commit

Permalink
mvn spotless:appy
Browse files Browse the repository at this point in the history
  • Loading branch information
odeke-em committed Jul 29, 2023
1 parent 031bc63 commit 0704388
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,8 @@
package com.google.cloud.spark.spanner;

import java.io.IOException;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.connector.read.Batch;
import org.apache.spark.sql.connector.read.InputPartition;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.apache.spark.sql.connector.read.Scan;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;

public class SpannerPartitionReader<T> implements PartitionReader<T> {
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

package com.google.cloud.spark.spanner;

import com.google.cloud.spanner.connection.ConnectionOptions;
import org.apache.spark.sql.connector.read.Scan;
import org.apache.spark.sql.connector.read.ScanBuilder;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
import org.apache.spark.sql.connector.catalog.TableCapability;
import org.apache.spark.sql.connector.catalog.TableProvider;
import org.apache.spark.sql.connector.expressions.Transform;
import org.apache.spark.sql.connector.read.PartitionReader;
import org.apache.spark.sql.connector.read.ScanBuilder;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@

package com.google.cloud.spark.spanner;

import com.google.cloud.spanner.connection.Connection;
import com.google.cloud.spanner.connection.ConnectionOptions;
import com.google.cloud.spanner.ResultSet;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.Struct;
import com.google.cloud.spanner.connection.Connection;
import com.google.cloud.spanner.connection.ConnectionOptions;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
package com.google.cloud.spark;

import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;

import com.google.api.gax.longrunning.OperationFuture;
import com.google.cloud.spanner.CreateDatabaseMetadata;
import com.google.cloud.spanner.Database;
import com.google.cloud.spanner.DatabaseAdminClient;
Expand All @@ -9,35 +13,30 @@
import java.util.Map;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;

import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;

import com.google.api.gax.longrunning.OperationFuture;
import org.apache.spark.sql.types.StructType;
import org.junit.Before;
import org.junit.After;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;

@RunWith(JUnit4.class)
public class SpannerSparkTest {

String instanceId = "ins";
String databaseId = "db";
String instanceId = "ins";
String databaseId = "db";

@Before
public void setUp() throws Exception {
SpannerOptions opts = SpannerOptions.newBuilder().build();
Spanner spanner = opts.getService();
DatabaseAdminClient dbAdminClient = spanner.getDatabaseAdminClient();
// 1. Setup the tables with the Cloud Spanner emulator.
OperationFuture<Database, CreateDatabaseMetadata> op = dbAdminClient
.createDatabase(
instanceId,
databaseId,
Arrays.asList(
"CREATE TABLE ATable (\n"
@Before
public void setUp() throws Exception {
SpannerOptions opts = SpannerOptions.newBuilder().build();
Spanner spanner = opts.getService();
DatabaseAdminClient dbAdminClient = spanner.getDatabaseAdminClient();
// 1. Setup the tables with the Cloud Spanner emulator.
OperationFuture<Database, CreateDatabaseMetadata> op = dbAdminClient
.createDatabase(
instanceId,
databaseId,
Arrays.asList(
"CREATE TABLE ATable (\n"
+ " A INT64 NOT NULL,\n"
+ " B STRING(100),\n"
+ " C BYTES(MAX),\n"
Expand Down

0 comments on commit 0704388

Please sign in to comment.