diff --git a/bigquery-connector-common/src/main/java/com/google/cloud/bigquery/connector/common/BigQueryUtil.java b/bigquery-connector-common/src/main/java/com/google/cloud/bigquery/connector/common/BigQueryUtil.java index 52bea1c83..266b01ad0 100644 --- a/bigquery-connector-common/src/main/java/com/google/cloud/bigquery/connector/common/BigQueryUtil.java +++ b/bigquery-connector-common/src/main/java/com/google/cloud/bigquery/connector/common/BigQueryUtil.java @@ -339,10 +339,13 @@ && typeWriteable(sourceField.getType(), destinationField.getType()) } // allowing widening narrow numeric into bignumeric + // allowing writing long to time @VisibleForTesting static boolean typeWriteable(LegacySQLTypeName sourceType, LegacySQLTypeName destinationType) { return (sourceType.equals(LegacySQLTypeName.NUMERIC) && destinationType.equals(LegacySQLTypeName.BIGNUMERIC)) + || (sourceType.equals(LegacySQLTypeName.INTEGER) + && destinationType.equals(LegacySQLTypeName.TIME)) || sourceType.equals(destinationType); } diff --git a/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/integration/WriteIntegrationTestBase.java b/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/integration/WriteIntegrationTestBase.java index d02dc75fe..6fde1575e 100644 --- a/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/integration/WriteIntegrationTestBase.java +++ b/spark-bigquery-connector-common/src/test/java/com/google/cloud/spark/bigquery/integration/WriteIntegrationTestBase.java @@ -1209,6 +1209,43 @@ public void testWriteNumericsToWiderFields() throws Exception { .isEqualTo(new BigDecimal("12345.123450000000000")); } + @Test + public void testWriteLongToTimeField() throws Exception { + IntegrationTestUtils.runQuery( + String.format( + "CREATE TABLE `%s.%s` (name STRING, wake_up_time TIME)", + testDataset, testTable)); + String name = "abc"; + Long wakeUpTime = 36000000000L; + Dataset df = + spark.createDataFrame( + Arrays.asList(RowFactory.create(name, wakeUpTime)), + structType( + StructField.apply("name", DataTypes.StringType, true, Metadata.empty()), + StructField.apply("wake_up_time", DataTypes.LongType, true, Metadata.empty()))); + df.write() + .format("bigquery") + .mode(SaveMode.Append) + .option("dataset", testDataset.toString()) + .option("table", testTable) + .option("temporaryGcsBucket", TestConstants.TEMPORARY_GCS_BUCKET) + .option("writeMethod", writeMethod.toString()) + .save(); + + Dataset resultDF = + spark + .read() + .format("bigquery") + .option("dataset", testDataset.toString()) + .option("table", testTable) + .load(); + List result = resultDF.collectAsList(); + assertThat(result).hasSize(1); + Row head = result.get(0); + assertThat(head.getString(head.fieldIndex("name"))).isEqualTo("abc"); + assertThat(head.getString(head.fieldIndex("wake_up_time"))).isEqualTo(36000000000L); + } + public void testWriteSchemaSubset() throws Exception { StructType initialSchema = structType(