Skip to content

Commit

Permalink
Merge pull request #671 from priyabhatnagar25/Wrangler_parseAvro-Log
Browse files Browse the repository at this point in the history
Parse as Avro and Log
  • Loading branch information
itsankit-google authored Jan 25, 2024
2 parents affc6cd + f29e76c commit ddff013
Show file tree
Hide file tree
Showing 12 changed files with 988 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@Wrangler
Feature: Wrangler - Run time scenarios for Parse Avro

@BQ_SOURCE_AVRO_TEST @BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario: To verify User is able to run a pipeline using parse avro directive
Given Open Datafusion Project to configure pipeline
Then Click on the Plus Green Button to import the pipelines
Then Select the file for importing the pipeline for the plugin "Directive_parse_avro"
Then Navigate to the properties page of plugin: "BigQueryTable"
Then Replace input plugin property: "project" with value: "projectId"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Validate "BigQueryTable" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Replace input plugin property: "project" with value: "projectId"
Then Replace input plugin property: "table" with value: "bqTargetTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Validate "BigQuery2" plugin properties
Then Close the Plugin Properties page
Then Rename the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate The Data From BQ To BQ With Actual And Expected File for: "ExpectedDirective_parse_avro"
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@Wrangler
Feature: Wrangler - Run time scenarios for Parse Log

@BQ_SOURCE_LOG_TEST @BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario: To verify User is able to run a pipeline using parse log directive
Given Open Datafusion Project to configure pipeline
Then Click on the Plus Green Button to import the pipelines
Then Select the file for importing the pipeline for the plugin "Directive_parse_log"
Then Navigate to the properties page of plugin: "BigQueryTable"
Then Replace input plugin property: "project" with value: "projectId"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Validate "BigQueryTable" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Replace input plugin property: "project" with value: "projectId"
Then Replace input plugin property: "table" with value: "bqTargetTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Validate "BigQuery2" plugin properties
Then Close the Plugin Properties page
Then Rename the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate The Data From BQ To BQ With Actual And Expected File for: "ExpectedDirective_parse_log"
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,16 @@ public static void deleteTempSourceBQTable() throws IOException, InterruptedExce
BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully");
PluginPropertyUtils.removePluginProp("bqSourceTable");
}
@Before(order = 1, value = "@BQ_SOURCE_AVRO_TEST")
public static void createTempSourceBQTableAvro() throws IOException, InterruptedException {
createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFileAvro"),
PluginPropertyUtils.pluginProp("InsertBQDataQueryFileAvro"));
}
@Before(order = 1, value = "@BQ_SOURCE_LOG_TEST")
public static void createTempSourceBQTableLog() throws IOException, InterruptedException {
createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFileLog"),
PluginPropertyUtils.pluginProp("InsertBQDataQueryFileLog"));
}


@Before(order = 1, value = "@BQ_SOURCE_JSON_TEST")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{"id":1,"name":"ABC","nameNew":"BC","value":1}
{"id":2,"name":"DEF","nameNew":"EF","value":0}
{"id":4,"name":"XYZ","nameNew":"YZ","value":null}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"Body_icd_9_description":"Body Post Catalog lookup","body":"127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] \"GET /apache_pb.gif HTTP/1.0\" 200 2326","bytes_response_body_bytes":"2326","bytes_response_body_bytes_last":"2326","http_firstline_request_firstline":"GET /apache_pb.gif HTTP/1.0","http_method_request_firstline_method":"GET","http_path_request_firstline_uri_path":"/apache_pb.gif","http_protocol_request_firstline_protocol":"HTTP","http_protocol_version_request_firstline_protocol":"HTTP/1.0","http_protocol_version_request_firstline_protocol_version":"1.0","http_ref_request_firstline_uri_ref":"no value","http_uri_request_firstline_uri":"/apache_pb.gif","id":1,"ip_connection_client_host":"127.0.0.1","ip_connection_client_host_1":"127.0.0.1","ip_connection_client_host_last":"127.0.0.1","string_connection_client_user":"frank","string_connection_client_user_last":"frank","string_request_status_last":"200","time_date_request_receive_time_date":"2000-10-10","time_date_request_receive_time_date_utc":"2000-10-10","time_date_request_receive_time_last_date":"2000-10-10","time_date_request_receive_time_last_date_utc":"2000-10-10","time_day_request_receive_time_day":"10","time_day_request_receive_time_day_utc":"10","time_day_request_receive_time_last_day":10,"time_day_request_receive_time_last_day_utc":"10","time_epoch_request_receive_time_epoch":"971211336000","time_epoch_request_receive_time_last_epoch":"971211336000","time_hour_request_receive_time_hour":"13","time_hour_request_receive_time_hour_utc":"20","time_hour_request_receive_time_last_hour":"13","time_hour_request_receive_time_last_hour_utc":"20","time_millisecond_request_receive_time_last_millisecond":"0","time_millisecond_request_receive_time_last_millisecond_utc":"0","time_millisecond_request_receive_time_millisecond":"0","time_millisecond_request_receive_time_millisecond_utc":"0","time_minute_request_receive_time_last_minute":"55","time_minute_request_receive_time_last_minute_utc":"55","time_minute_request_receive_time_minute":"55","time_minute_request_receive_time_minute_utc":"55","time_month_request_receive_time_last_month":"10","time_month_request_receive_time_last_month_utc":"10","time_month_request_receive_time_month":"10","time_month_request_receive_time_month_utc":"10","time_monthname_request_receive_time_last_monthname":"October","time_monthname_request_receive_time_last_monthname_utc":"October","time_monthname_request_receive_time_monthname":"October","time_monthname_request_receive_time_monthname_utc":"October","time_second_request_receive_time_last_second":"36","time_second_request_receive_time_last_second_utc":"36","time_second_request_receive_time_second":"36","time_second_request_receive_time_second_utc":"36","time_stamp_request_receive_time":"10/Oct/2000:13:55:36 -0700","time_stamp_request_receive_time_last":"10/Oct/2000:13:55:36 -0700","time_time_request_receive_time_last_time":"13:55:36","time_time_request_receive_time_last_time_utc":"20:55:36","time_time_request_receive_time_time":"13:55:36","time_time_request_receive_time_time_utc":"20:55:36","time_week_request_receive_time_last_weekofweekyear":"41","time_week_request_receive_time_last_weekofweekyear_utc":"41","time_week_request_receive_time_weekofweekyear":"41","time_week_request_receive_time_weekofweekyear_utc":"41","time_year_request_receive_time_last_weekyear":"2000","time_year_request_receive_time_last_weekyear_utc":"2000","time_year_request_receive_time_last_year":"2000","time_year_request_receive_time_last_year_utc":"2000","time_year_request_receive_time_weekyear":"2000","time_year_request_receive_time_weekyear_utc":"2000","time_year_request_receive_time_year":"2000","time_year_request_receive_time_year_utc":"2000"}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
create table `DATASET.TABLE_NAME` (id INTEGER, body BYTES)
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
create table `DATASET.TABLE_NAME` (id INTEGER, body STRING(100))
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
INSERT INTO DATASET.TABLE_NAME (body)
VALUES
(FROM_BASE64("T2JqAQQUYXZyby5jb2RlYwhudWxsFmF2cm8uc2NoZW1htgJ7InR5cGUiOiJyZWNvcmQiLCJuYW1lIjoiUmVjb3JkIiwiZmllbGRzIjpbeyJuYW1lIjoiaWQiLCJ0eXBlIjoibG9uZyJ9LHsibmFtZSI6Im5hbWUiLCJ0eXBlIjpbIm51bGwiLCJzdHJpbmciXX0seyJuYW1lIjoidmFsdWUiLCJ0eXBlIjpbIm51bGwiLCJkb3VibGUiXX1dfQDPLTa6WtteXtkAuF/3puB+CGACAgZBQkMCWDm0yHa+8z8EAgZERUYCAAAAAAAAAAAGAAKuR+E6gskyQQgCBlhZWgDPLTa6WtteXtkAuF/3puB+"))
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
INSERT INTO DATASET.TABLE_NAME (id,body)
VALUES
(1,'127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326');
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ Directive_parse_xml=testData/Wrangler/parse_xmltojson_wrangler-cdap-data-pipelin
Directive_parse_excel=testData/Wrangler/parse_excel_wrangler_copy-cdap-data-pipeline.json
Directive_parse_csv=testData/Wrangler\
/parse_csv_wrangle-cdap-data-pipeline.json
Directive_parse_avro=testData/Wrangler/parseAsAvro-cdap-data-pipeline (1).json
Directive_parse_log=testData/Wrangler/parse_log_wrangler_copy-cdap-data-pipeline.json
Directive_GroupBy=testData/Wrangler/BQ2BQwithWrnglerNGrpby-cdap-data-pipeline (1).json
bqSourceTable=dummy
bqTargetTable=dummy
Expand All @@ -30,6 +32,10 @@ CreateBQDataQueryFileXml=BQtesdata/BigQuery/BigQueryCreateTableQueryXml.txt
InsertBQDataQueryFileXml=BQtesdata/BigQuery/BigQueryInsertDataQueryXml.txt
CreateBQTableQueryFileCsv=BQtesdata/BigQuery/BigQueryCreateTableQueryCsv.txt
InsertBQDataQueryFileCsv=BQtesdata/BigQuery/BigQueryInsertDataQueryCsv.txt
CreateBQTableQueryFileAvro=BQtesdata/BigQuery/BigQueryCreateTableQueryAvro.txt
InsertBQDataQueryFileAvro=BQtesdata/BigQuery/BigQueryInsertDataQueryAvro.txt
CreateBQTableQueryFileLog=BQtesdata/BigQuery/BigQueryCreateTableQueryLog.txt
InsertBQDataQueryFileLog=BQtesdata/BigQuery/BigQueryInsertDataQueryLog.txt
CreateBQTableQueryFile=BQtesdata/BigQuery/BigQueryCreateTableQuery.txt
InsertBQDataQueryFile=BQtesdata/BigQuery/BigQueryInsertDataQuery.txt

Expand All @@ -48,3 +54,5 @@ ExpectedDirective_parse_json=BQValidationExpectedFiles/Directive_parse_json
ExpectedDirective_parse_xml=BQValidationExpectedFiles/Directive_parse_xmltojson
ExpectedDirective_parse_excel=BQValidationExpectedFiles/Directive_parse_excel
ExpectedDirective_parse_csv=BQValidationExpectedFiles/Directive_parse_csv
ExpectedDirective_parse_avro=BQValidationExpectedFiles/Directive_parse_avro
ExpectedDirective_parse_log=BQValidationExpectedFiles/Directive_parse_log
Loading

0 comments on commit ddff013

Please sign in to comment.