Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[cherrypick][PLUGIN-1809]Fix added to override schema #35

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 50 additions & 0 deletions .github/workflows/build-report.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Copyright © 2024 Cask Data, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

# This workflow will build a Java project with Maven
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
# Note: Any changes to this workflow would be used only after merging into develop
name: Build Unit Tests Report

on:
workflow_run:
workflows:
- Build with unit tests
types:
- completed

jobs:
build:
runs-on: ubuntu-latest

if: ${{ github.event.workflow_run.conclusion != 'skipped' }}

steps:
# Pinned 1.0.0 version
- uses: marocchino/action-workflow_run-status@54b6e87d6cb552fc5f36dbe9a722a6048725917a

- name: Download artifact
uses: actions/download-artifact@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
path: artifacts/

- name: Surefire Report
# Pinned 3.5.2 version
uses: mikepenz/action-junit-report@16a9560bd02f11e7e3bf6b3e2ef6bba6c9d07c32
if: always()
with:
report_paths: '**/target/surefire-reports/TEST-*.xml'
github_token: ${{ secrets.GITHUB_TOKEN }}
detailed_summary: true
commit: ${{ github.event.workflow_run.head_sha }}
check_name: Build Test Report
59 changes: 59 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Copyright © 2024 Cask Data, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

# This workflow will build a Java project with Maven
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
# Note: Any changes to this workflow would be used only after merging into develop
name: Build with unit tests

on:
push:
branches: [ develop, release/** ]
pull_request:
branches: [ develop, release/** ]
types: [opened, synchronize, reopened, labeled]

jobs:
build:
runs-on: k8s-runner-build

# We allow builds:
# 1) When it's a merge into a branch
# 2) For PRs that are labeled as build and
# - It's a code change
# - A build label was just added
# A bit complex, but prevents builds when other labels are manipulated
if: >
github.event_name == 'push'
|| (contains(github.event.pull_request.labels.*.name, 'build')
&& (github.event.action != 'labeled' || github.event.label.name == 'build')
)
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.workflow_run.head_sha }}
- name: Cache
uses: actions/cache@v3
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-${{ github.workflow }}
- name: Build with Maven
run: mvn clean test -fae -T 2 -B -V -DcloudBuild -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=25
- name: Archive build artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: reports-${{ github.run_id }}
path: |
**/target/rat.txt
**/target/surefire-reports/*
8 changes: 4 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@
<surefire.redirectTestOutputToFile>true</surefire.redirectTestOutputToFile>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- version properties -->
<cdap.version>6.1.0-SNAPSHOT</cdap.version>
<hydrator.version>2.3.0-SNAPSHOT</hydrator.version>
<cdap.version>6.4.1</cdap.version>
<hydrator.version>2.6.1</hydrator.version>
<commons.csv.version>1.6</commons.csv.version>
<hadoop.version>2.8.0</hadoop.version>
<spark2.version>2.3.1</spark2.version>
Expand Down Expand Up @@ -398,8 +398,8 @@
<version>1.1.0</version>
<configuration>
<cdapArtifacts>
<parent>system:cdap-data-pipeline[6.1.0-SNAPSHOT,7.0.0-SNAPSHOT)</parent>
<parent>system:cdap-data-streams[6.1.0-SNAPSHOT,7.0.0-SNAPSHOT)</parent>
<parent>system:cdap-data-pipeline[6.4.1-SNAPSHOT,7.0.0-SNAPSHOT)</parent>
<parent>system:cdap-data-streams[6.4.1-SNAPSHOT,7.0.0-SNAPSHOT)</parent>
</cdapArtifacts>
</configuration>
<executions>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
public class QueryUtil {

// Matches "limit <number>". Also "limit $$$$" and "limit ''" which means unlimited in Snowflake.
private static final String LIMIT_PATTERN = "(?i)LIMIT (''|\\$\\$\\$\\$|\\d+)";
private static final String LIMIT_PATTERN = "(?i)LIMIT (NULL|''|\\$\\$\\$\\$|\\d+)";
private static final String LIMIT_STRING = "limit %s";

private QueryUtil() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,7 @@ public void prepareRun(BatchSourceContext context) {
@Override
public void initialize(BatchRuntimeContext context) throws Exception {
super.initialize(context);
SnowflakeSourceAccessor snowflakeAccessor = new SnowflakeSourceAccessor(config);
Schema schema = SchemaHelper.getSchema(snowflakeAccessor, config.getImportQuery());
Schema schema = SchemaHelper.getSchema(config, context.getFailureCollector());
this.transformer = new SnowflakeMapToRecordTransformer(schema);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@
package io.cdap.plugin.snowflake.common;

import io.cdap.cdap.etl.mock.test.HydratorTestBase;
import io.cdap.cdap.test.TestConfiguration;
import io.cdap.plugin.snowflake.Constants;
import io.cdap.plugin.snowflake.common.client.SnowflakeAccessorTest;
import io.cdap.plugin.snowflake.source.batch.SnowflakeBatchSourceConfig;
import io.cdap.plugin.snowflake.source.batch.SnowflakeBatchSourceConfigBuilder;
import net.snowflake.client.jdbc.SnowflakeBasicDataSource;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.internal.AssumptionViolatedException;
import org.junit.rules.TestName;
Expand Down Expand Up @@ -55,6 +57,9 @@ public abstract class BaseSnowflakeTest extends HydratorTestBase {

private static final Logger LOG = LoggerFactory.getLogger(SnowflakeAccessorTest.class);

@ClassRule
public static final TestConfiguration TEST_CONFIG = new TestConfiguration("explore.enabled", false);

protected static final String ACCOUNT_NAME = System.getProperty("snowflake.test.account.name");
protected static final String DATABASE = System.getProperty("snowflake.test.database");
protected static final String SCHEMA = System.getProperty("snowflake.test.schema");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,22 +65,24 @@ public void testGetSchemaInvalidJson() {

@Test
public void testGetSchemaFromSnowflakeUnknownType() throws IOException {
String importQuery = "SELECT * FROM someTable";
MockFailureCollector collector = new MockFailureCollector(MOCK_STAGE);
SnowflakeSourceAccessor snowflakeAccessor = Mockito.mock(SnowflakeSourceAccessor.class);

List<SnowflakeFieldDescriptor> sample = new ArrayList<>();
sample.add(new SnowflakeFieldDescriptor("field1", -1000, false));

Mockito.when(snowflakeAccessor.describeQuery(null)).thenReturn(sample);
Mockito.when(snowflakeAccessor.describeQuery(importQuery)).thenReturn(sample);

SchemaHelper.getSchema(snowflakeAccessor, null, collector, null);
SchemaHelper.getSchema(snowflakeAccessor, null, collector, importQuery);

ValidationAssertions.assertValidationFailed(
collector, Collections.singletonList(SnowflakeBatchSourceConfig.PROPERTY_SCHEMA));
}

@Test
public void testGetSchemaFromSnowflake() throws IOException {
String importQuery = "SELECT * FROM someTable";
MockFailureCollector collector = new MockFailureCollector(MOCK_STAGE);
SnowflakeSourceAccessor snowflakeAccessor = Mockito.mock(SnowflakeSourceAccessor.class);

Expand Down Expand Up @@ -142,9 +144,9 @@ public void testGetSchemaFromSnowflake() throws IOException {
Schema.Field.of("field134", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MICROS)))
);

Mockito.when(snowflakeAccessor.describeQuery(null)).thenReturn(sample);
Mockito.when(snowflakeAccessor.describeQuery(importQuery)).thenReturn(sample);

Schema actual = SchemaHelper.getSchema(snowflakeAccessor, null, collector, null);
Schema actual = SchemaHelper.getSchema(snowflakeAccessor, null, collector, importQuery);

Assert.assertTrue(collector.getValidationFailures().isEmpty());
Assert.assertEquals(expected, actual);
Expand Down Expand Up @@ -182,4 +184,21 @@ public void testGetSchemaWhenMacroIsEnabledSchemaIsNull() {
Assert.assertNull(actual);

}

@Test
public void testGetSchemaManuallyUpdatedTheSchema() {
Schema expected = Schema.recordOf("test",
Schema.Field.of("test_field", Schema.nullableOf(Schema.of(Schema.Type.LONG)))
);

SnowflakeBatchSourceConfig mockConfig = Mockito.mock(SnowflakeBatchSourceConfig.class);
Mockito.when(mockConfig.canConnect()).thenReturn(false);
Mockito.when(mockConfig.getSchema()).thenReturn(expected.toString());

MockFailureCollector collector = new MockFailureCollector(MOCK_STAGE);
Schema actual = SchemaHelper.getSchema(mockConfig, collector);

Assert.assertTrue(collector.getValidationFailures().isEmpty());
Assert.assertEquals(expected, actual);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ public void transform() {
row.put("COLUMN_CHARACTER", "2");
row.put("COLUMN_STRING", "text_115");
row.put("COLUMN_TEXT", "text_116");
row.put("COLUMN_BINARY", "text_117");
row.put("COLUMN_VARBINARY", "text_118");
row.put("COLUMN_BINARY", "746578745f313137");
row.put("COLUMN_VARBINARY", "746578745f313138");
row.put("COLUMN_BOOLEAN", "true");
row.put("COLUMN_DATE", "2019-01-01");
row.put("COLUMN_DATETIME", "2019-01-01T01:01:01+00:00");
Expand Down
Loading