diff --git a/pom.xml b/pom.xml
index d9cd73c..c418626 100644
--- a/pom.xml
+++ b/pom.xml
@@ -30,7 +30,8 @@
UTF-8
6.1.0-SNAPSHOT
2.3.0-SNAPSHOT
- 19.0
+ 27.0.1-jre
+ ${project.basedir}/src/test/java/
@@ -83,6 +84,7 @@
+ ${testSourceLocation}
org.apache.maven.plugins
@@ -188,7 +190,123 @@
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+ 1.8
+ 1.8
+
+
+
+
+
+ e2e-tests
+
+ src/e2e-test/java
+
+
+
+
+ src/e2e-test/resources
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 2.18.1
+
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ 3.0.0-M5
+
+
+ TestRunner.java
+
+
+ classes
+ 2
+ 2
+ true
+
+
+
+ ${GOOGLE_APPLICATION_CREDENTIALS}
+
+
+ ${SERVICE_ACCOUNT_TYPE}
+
+
+ ${SERVICE_ACCOUNT_FILE_PATH}
+
+
+ ${SERVICE_ACCOUNT_JSON}
+
+
+
+
+
+
+ integration-test
+
+
+
+
+
+
+ net.masterthought
+ maven-cucumber-reporting
+ 5.5.0
+
+
+
+ execution
+ verify
+
+ generate
+
+
+ Cucumber Reports
+ target/cucumber-reports/advanced-reports
+ 1
+ false
+ ${project.build.directory}/cucumber-reports
+
+ **/*.json
+
+ ${project.build.directory}/cucumber-reports
+ true
+
+
+
+
+
+
+
+
+
+ io.cdap.tests.e2e
+ cdap-e2e-framework
+ 0.0.1-SNAPSHOT
+ test
+
+
+ ch.qos.logback
+ logback-classic
+ 1.2.8
+ runtime
+
+
+
+
+
diff --git a/src/e2e-test/features/datetransformplugin/DateTransformErrorScenarios.feature b/src/e2e-test/features/datetransformplugin/DateTransformErrorScenarios.feature
new file mode 100644
index 0000000..0f825c0
--- /dev/null
+++ b/src/e2e-test/features/datetransformplugin/DateTransformErrorScenarios.feature
@@ -0,0 +1,105 @@
+@DateTransform
+Feature:DateTransform - Verify DateTransform Plugin Error scenarios
+
+ @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario:Verify DateTransform plugin validation errors for mandatory fields
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Click on the Validate button
+ Then Verify mandatory property error for below listed properties:
+ | sourceField |
+ | targetField |
+
+ @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario:Verify DateTransform plugin error for invalid Source Field Name
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.IncorrectFieldName"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformInvalidSourceFieldName" on the header
+
+ @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario:Verify DateTransform plugin error for invalid Target Field Name
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldName"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.IncorrectFieldName"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformInvalidTargetFieldName" on the header
+
+ @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario:Verify DateTransform plugin error for Source and Target field must have same number of fields
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldNames"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformMustHaveSameNumberOfFields" on the header
+
+ @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario:Verify DateTransform plugin error for No Input Schema available
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldName"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
+ Then Click on the Validate button
+ Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformForInputSchema" on the header
diff --git a/src/e2e-test/features/datetransformplugin/DateTransformMacros.feature b/src/e2e-test/features/datetransformplugin/DateTransformMacros.feature
new file mode 100644
index 0000000..ae41974
--- /dev/null
+++ b/src/e2e-test/features/datetransformplugin/DateTransformMacros.feature
@@ -0,0 +1,65 @@
+@DateTransform
+Feature:DateTransform - Verification of DateTransform pipeline with BigQuery as source and target using macros
+
+ @BQ_SINK_TEST @BQ_SOURCE_DATETRANSFORM_TEST @PLUGIN-1224
+ Scenario: To verify data is getting transferred from BigQuery to BigQuery successfully with DateTransform plugin properties as macro arguments
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Click on the Macro button of Property: "SourceFieldName" and set the value to: "dateTransform.SourceFieldName"
+ Then Click on the Macro button of Property: "SourceFieldDateFormat" and set the value to: "dateTransform.SourceFieldDateFormat"
+ Then Click on the Macro button of Property: "TargetFieldName" and set the value to: "dateTransform.TargetFieldName"
+ Then Click on the Macro button of Property: "TargetFieldDateFormat" and set the value to: "dateTransform.TargetFieldDateFormat"
+ Then Validate "Date Transform" plugin properties
+ Then Close the Plugin Properties page
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "DateTransform" and "BigQuery2" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery2"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Validate "BigQuery2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "SourceFieldName" for key "dateTransform.SourceFieldName"
+ Then Enter runtime argument value "SourceFieldDateFormat" for key "dateTransform.SourceFieldDateFormat"
+ Then Enter runtime argument value "TargetFieldName" for key "dateTransform.TargetFieldName"
+ Then Enter runtime argument value "TargetFieldDateFormat" for key "dateTransform.TargetFieldDateFormat"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Click on the Preview Data link on the Sink plugin node: "BigQueryTable"
+ Then Verify sink plugin's Preview Data for Input Records table and the Input Schema matches the Output Schema of Source plugin
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "SourceFieldName" for key "dateTransform.SourceFieldName"
+ Then Enter runtime argument value "SourceFieldDateFormat" for key "dateTransform.SourceFieldDateFormat"
+ Then Enter runtime argument value "TargetFieldName" for key "dateTransform.TargetFieldName"
+ Then Enter runtime argument value "TargetFieldDateFormat" for key "dateTransform.TargetFieldDateFormat"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate OUT record count is equal to IN record count
+ Then Validate dateFormat "dateTransform.TargetFieldDateFormat" of the fields "dateTransform.TargetFieldName" in target BQ table "bqTargetTable"
diff --git a/src/e2e-test/features/datetransformplugin/DateTransformWithBQ.feature b/src/e2e-test/features/datetransformplugin/DateTransformWithBQ.feature
new file mode 100644
index 0000000..22dffb5
--- /dev/null
+++ b/src/e2e-test/features/datetransformplugin/DateTransformWithBQ.feature
@@ -0,0 +1,162 @@
+@DateTransform
+Feature: DateTransform - Verification of DateTransform pipeline with BigQuery as source and target
+
+ @BQ_SINK_TEST @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario: To verify complete flow of data extract and transfer from BQ source to BQ sink using DateTransform Plugin for string datatype field
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldName"
+ Then Enter input plugin property: "sourceFieldDateFormat" with value: "dateTransform.SourceFieldDateFormat"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
+ Then Enter input plugin property: "targetFieldDateFormat" with value: "dateTransform.TargetFieldDateFormat"
+ Then Validate "DateTransform" plugin properties
+ Then Close the Plugin Properties page
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "DateTransform" and "BigQuery2" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery2"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Validate "BigQuery2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Click on the Preview Data link on the Sink plugin node: "BigQueryTable"
+ Then Verify sink plugin's Preview Data for Input Records table and the Input Schema matches the Output Schema of Source plugin
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate OUT record count is equal to IN record count
+ Then Validate dateFormat "dateTransform.TargetFieldDateFormat" of the fields "dateTransform.TargetFieldName" in target BQ table "bqTargetTable"
+
+ @BQ_SINK_TEST @BQ_SOURCE_DATETRANSFORM_TEST
+ Scenario: To verify complete flow of data extract and transfer from BQ source to BQ sink using DateTransform Plugin for multiple fields
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldNames"
+ Then Enter input plugin property: "sourceFieldDateFormat" with value: "dateTransform.SourceFieldDateFormat"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldNames"
+ Then Enter input plugin property: "targetFieldDateFormat" with value: "dateTransform.TargetFieldDateFormat"
+ Then Validate "DateTransform" plugin properties
+ Then Close the Plugin Properties page
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "DateTransform" and "BigQuery2" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery2"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Validate "BigQuery2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Click on the Preview Data link on the Sink plugin node: "BigQueryTable"
+ Then Verify sink plugin's Preview Data for Input Records table and the Input Schema matches the Output Schema of Source plugin
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate OUT record count is equal to IN record count
+ Then Validate dateFormat "dateTransform.TargetFieldDateFormat" of the fields "dateTransform.TargetFieldNames" in target BQ table "bqTargetTable"
+
+ @BQ_SINK_TEST @BQ_SOURCE_DATETRANSFORM_TEST @PLUGIN-1225
+ Scenario: To verify complete flow of data extract and transfer from BQ source to BQ sink using DateTransform Plugin for long datatype field
+ Given Open Datafusion Project to configure pipeline
+ When Select plugin: "BigQuery" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Transform"
+ When Select plugin: "Date Transform" from the plugins list as: "Transform"
+ Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqSourceTable"
+ Then Click on the Get Schema button
+ Then Capture the generated Output Schema
+ Then Validate "BigQuery" plugin properties
+ Then Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "DateTransform"
+ Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldNameWithLongDataType"
+ Then Enter input plugin property: "sourceFieldDateFormat" with value: "dateTransform.SourceFieldDateFormat"
+ Then Select dropdown plugin property: "SourceInSecondsOrMilliseconds" with option value: "dateTransform.sourceInSecondsOrMilliseconds"
+ Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldNameWithLongDataType"
+ Then Enter input plugin property: "targetFieldDateFormat" with value: "dateTransform.TargetFieldDateFormat"
+ Then Validate "DateTransform" plugin properties
+ Then Close the Plugin Properties page
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery" from the plugins list as: "Sink"
+ Then Connect plugins: "DateTransform" and "BigQuery2" to establish connection
+ Then Navigate to the properties page of plugin: "BigQuery2"
+ Then Replace input plugin property: "projectId" with value: "projectId"
+ Then Enter input plugin property: "datasetProjectId" with value: "projectId"
+ Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
+ Then Enter input plugin property: "dataset" with value: "dataset"
+ Then Enter input plugin property: "table" with value: "bqTargetTable"
+ Then Validate "BigQuery2" plugin properties
+ Then Close the Plugin Properties page
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Click on the Preview Data link on the Sink plugin node: "BigQueryTable"
+ Then Verify sink plugin's Preview Data for Input Records table and the Input Schema matches the Output Schema of Source plugin
+ Then Close the preview data
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Close the pipeline logs
+ Then Validate OUT record count is equal to IN record count
+ Then Validate dateFormat "dateTransform.TargetFieldDateFormat" of the fields "dateTransform.TargetFieldName" in target BQ table "bqTargetTable"
diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java
new file mode 100644
index 0000000..1c7cee3
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright © 2022 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.common.stepsdesign;
+
+import com.google.cloud.bigquery.BigQueryException;
+import io.cdap.e2e.utils.BigQueryClient;
+import io.cdap.e2e.utils.PluginPropertyUtils;
+import io.cucumber.java.After;
+import io.cucumber.java.Before;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.Assert;
+import stepsdesign.BeforeActions;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.NoSuchElementException;
+import java.util.UUID;
+
+/**
+ * GCP test hooks.
+ */
+public class TestSetupHooks {
+
+ @Before(order = 1, value = "@BQ_SINK_TEST")
+ public static void setTempTargetBQTableName() {
+ String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_");
+ PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName);
+ BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName);
+ }
+
+ @After(order = 1, value = "@BQ_SINK_TEST")
+ public static void deleteTempTargetBQTable() throws IOException, InterruptedException {
+ String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable");
+ try {
+ BigQueryClient.dropBqQuery(bqTargetTableName);
+ BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully");
+ PluginPropertyUtils.removePluginProp("bqTargetTable");
+ } catch (BigQueryException e) {
+ if (e.getMessage().contains("Not found: Table")) {
+ BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist");
+ } else {
+ Assert.fail(e.getMessage());
+ }
+ }
+ }
+
+ /**
+ * Create BigQuery table with 3 columns (Id - Int, Value - Int, UID - string) containing random testdata.
+ * Sample row:
+ * Id | Value | UID
+ * 22 | 968 | 245308db-6088-4db2-a933-f0eea650846a
+ */
+ @Before(order = 1, value = "@BQ_SOURCE_TEST")
+ public static void createTempSourceBQTable() throws IOException, InterruptedException {
+ String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_");
+ StringBuilder records = new StringBuilder(StringUtils.EMPTY);
+ for (int index = 2; index <= 25; index++) {
+ records.append(" (").append(index).append(", ").append((int) (Math.random() * 1000 + 1)).append(", '")
+ .append(UUID.randomUUID()).append("'), ");
+ }
+ BigQueryClient.getSoleQueryResult("create table `test_automation." + bqSourceTable + "` as " +
+ "SELECT * FROM UNNEST([ " +
+ " STRUCT(1 AS Id, " + ((int) (Math.random() * 1000 + 1)) + " as Value, " +
+ "'" + UUID.randomUUID() + "' as UID), " +
+ records +
+ " (26, " + ((int) (Math.random() * 1000 + 1)) + ", " +
+ "'" + UUID.randomUUID() + "') " +
+ "])");
+ PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable);
+ BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " created successfully");
+ }
+
+ @After(order = 1, value = "@BQ_SOURCE_TEST or @BQ_SOURCE_DATETRANSFORM_TEST")
+ public static void deleteTempSourceBQTable() throws IOException, InterruptedException {
+ String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable");
+ BigQueryClient.dropBqQuery(bqSourceTable);
+ BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully");
+ PluginPropertyUtils.removePluginProp("bqSourceTable");
+ }
+
+ @Before(order = 1, value = "@BQ_SOURCE_DATETRANSFORM_TEST")
+ public static void createSourceBQTableWithDifferentDataTypes() throws IOException, InterruptedException {
+ createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("dateTransformBQCreateTableQueryFile"),
+ PluginPropertyUtils.pluginProp("dateTransformBQInsertDataQueryFile"));
+ }
+
+ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) throws
+ IOException, InterruptedException {
+ String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_");
+
+ String createTableQuery = StringUtils.EMPTY;
+ try {
+ createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource
+ ("/" + bqCreateTableQueryFile).toURI()))
+ , StandardCharsets.UTF_8);
+ createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset"))
+ .replace("TABLE_NAME", bqSourceTable);
+ } catch (Exception e) {
+ BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage());
+ Assert.fail("Exception in BigQuery testdata prerequisite setup " +
+ "- error in reading create table query file " + e.getMessage());
+ }
+
+ String insertDataQuery = StringUtils.EMPTY;
+ try {
+ insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource
+ ("/" + bqInsertDataQueryFile).toURI()))
+ , StandardCharsets.UTF_8);
+ insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset"))
+ .replace("TABLE_NAME", bqSourceTable);
+ } catch (Exception e) {
+ BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage());
+ Assert.fail("Exception in BigQuery testdata prerequisite setup " +
+ "- error in reading insert data query file " + e.getMessage());
+ }
+ BigQueryClient.getSoleQueryResult(createTableQuery);
+ try {
+ BigQueryClient.getSoleQueryResult(insertDataQuery);
+ } catch (NoSuchElementException e) {
+ // Insert query does not return any record.
+ // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException
+ }
+ PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable);
+ BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully");
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java
new file mode 100644
index 0000000..c904873
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the stepDesign for the common features.
+ */
+package io.cdap.plugin.common.stepsdesign;
diff --git a/src/e2e-test/java/io/cdap/plugin/datetransform/runners/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/datetransform/runners/TestRunner.java
new file mode 100644
index 0000000..37511d3
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/datetransform/runners/TestRunner.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright © 2022 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.datetransform.runners;
+
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+/**
+ * Test Runner to execute DateTransform plugin testcases.
+ */
+@RunWith(Cucumber.class)
+@CucumberOptions(
+ features = {"src/e2e-test/features"},
+ glue = {"io.cdap.plugin.datetransform.stepsdesign", "stepsdesign", "io.cdap.plugin.common.stepsdesign"},
+ tags = {"@DateTransform and not @PLUGIN-1224 and not @PLUGIN-1225"},
+ /*TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/PLUGIN-1224
+ TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/PLUGIN-1225*/
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report/datetransform-action",
+ "json:target/cucumber-reports/cucumber-datetransform-action.json",
+ "junit:target/cucumber-reports/cucumber-datetransform-action.xml"}
+)
+public class TestRunner {
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/datetransform/runners/package-info.java b/src/e2e-test/java/io/cdap/plugin/datetransform/runners/package-info.java
new file mode 100644
index 0000000..223e9ee
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/datetransform/runners/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright © 2022 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains the runner for the DateTransform plugin.
+ */
+package io.cdap.plugin.datetransform.runners;
+
diff --git a/src/e2e-test/java/io/cdap/plugin/datetransform/stepsdesign/DateTransform.java b/src/e2e-test/java/io/cdap/plugin/datetransform/stepsdesign/DateTransform.java
new file mode 100644
index 0000000..e6ab7b5
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/datetransform/stepsdesign/DateTransform.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright © 2022 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.datetransform.stepsdesign;
+
+import io.cdap.e2e.utils.BigQueryClient;
+import io.cdap.e2e.utils.CdfHelper;
+import io.cdap.e2e.utils.PluginPropertyUtils;
+import io.cucumber.java.en.Then;
+import org.apache.commons.lang.StringUtils;
+import org.junit.Assert;
+import stepsdesign.BeforeActions;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Optional;
+
+/**
+ * DateTransform related Stepsdesign.
+ */
+
+public class DateTransform implements CdfHelper {
+
+ @Then("Validate dateFormat {string} of the fields {string} in target BQ table {string}")
+ public void validateDateFormatOfTheFieldsInTargetBQTable(String dateFormat, String fields, String targetBQTable)
+ throws IOException, InterruptedException {
+ String[] fieldNames = PluginPropertyUtils.pluginProp(fields).split(",");
+ for (String field : fieldNames) {
+ Optional result = BigQueryClient
+ .getSoleQueryResult("SELECT " + field + " FROM `" + (PluginPropertyUtils.pluginProp("projectId")) + "."
+ + (PluginPropertyUtils.pluginProp("dataset")) + "."
+ + PluginPropertyUtils.pluginProp(targetBQTable) + "`");
+ String dateInTargetTable = StringUtils.EMPTY;
+ if (result.isPresent()) {
+ dateInTargetTable = result.get();
+ }
+ SimpleDateFormat sdf = new SimpleDateFormat(PluginPropertyUtils.pluginProp(dateFormat));
+ sdf.setLenient(false);
+ try {
+ sdf.parse(dateInTargetTable);
+ BeforeActions.scenario.write("Date field " + field + " is formatted to " + PluginPropertyUtils.pluginProp
+ (dateFormat) + " format in target table");
+ } catch (ParseException e) {
+ Assert.fail("Date transformation is not done for the field " + field + " in target table");
+ }
+ }
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/datetransform/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/datetransform/stepsdesign/package-info.java
new file mode 100644
index 0000000..5bcff04
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/datetransform/stepsdesign/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright © 2022 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * Package contains the step designs for the DateTransform Plugin features.
+ */
+package io.cdap.plugin.datetransform.stepsdesign;
diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties
new file mode 100644
index 0000000..dcd9e61
--- /dev/null
+++ b/src/e2e-test/resources/errorMessage.properties
@@ -0,0 +1,10 @@
+validationSuccessMessage=No errors found.
+validationErrorMessage=COUNT ERROR found
+errorMessageInputPath=Invalid schema: Input path not found
+errorMessageFileInvalidOutputField=Path field 'PATH_FIELD' must exist in input schema.
+errorMessageDateTransformInvalidSourceFieldName=Null error occurred while configuring the stage DateTransform.
+errorMessageDateTransformInvalidTargetFieldName=Error encountered while configuring the stage: 'Target Field must exist \
+ in output schema.'
+errorMessageDateTransformMustHaveSameNumberOfFields=Error encountered while configuring the stage: 'Target and source\
+ \ fields must have the same number of fields.'
+errorMessageDateTransformForInputSchema=Required property 'schema' has no value.
diff --git a/src/e2e-test/resources/logback-test.xml b/src/e2e-test/resources/logback-test.xml
new file mode 100644
index 0000000..bd2a458
--- /dev/null
+++ b/src/e2e-test/resources/logback-test.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+ %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/e2e-test/resources/pluginDataCyAttributes.properties b/src/e2e-test/resources/pluginDataCyAttributes.properties
new file mode 100644
index 0000000..1867d94
--- /dev/null
+++ b/src/e2e-test/resources/pluginDataCyAttributes.properties
@@ -0,0 +1,11 @@
+referenceName=referenceName
+projectId=project
+datasetProjectId=datasetProject
+dataset=dataset
+table=table
+truncateTable=switch-truncateTable
+sourceFieldName=sourceField
+sourceFieldDateFormat=sourceFormat
+targetFieldName=targetField
+targetFieldDateFormat=targetFormat
+sourceInSecondsOrMilliseconds=secondsOrMilliseconds
diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties
new file mode 100644
index 0000000..63b7377
--- /dev/null
+++ b/src/e2e-test/resources/pluginParameters.properties
@@ -0,0 +1,17 @@
+projectId=cdf-athena
+dataset=test_automation
+
+## DateTransform-PLUGIN-PROPERTIES-START
+dateTransformBQCreateTableQueryFile=testdata/BigQuery/bqDateTransformCreateTableQuery.txt
+dateTransformBQInsertDataQueryFile=testdata/BigQuery/bqDateTransformInsertQuery.txt
+dateTransform.SourceFieldName=date_of_admission
+dateTransform.TargetFieldName=date_of_admission
+dateTransform.SourceFieldDateFormat=yyyy-mm-dd
+dateTransform.TargetFieldDateFormat=dd-mm-yyyy
+dateTransform.SourceFieldNames=date_of_admission,date_of_joining
+dateTransform.TargetFieldNames=date_of_admission,date_of_joining
+dateTransform.SourceFieldNameWithLongDataType=transaction_date
+dateTransform.TargetFieldNameWithLongDataType=transaction_date
+dateTransform.IncorrectFieldName=@#$%^&
+dateTransform.sourceInSecondsOrMilliseconds=Milliseconds
+## DateTransform-PLUGIN-PROPERTIES-END
diff --git a/src/e2e-test/resources/testdata/BigQuery/bqDateTransformCreateTableQuery.txt b/src/e2e-test/resources/testdata/BigQuery/bqDateTransformCreateTableQuery.txt
new file mode 100644
index 0000000..b0d6115
--- /dev/null
+++ b/src/e2e-test/resources/testdata/BigQuery/bqDateTransformCreateTableQuery.txt
@@ -0,0 +1,2 @@
+create table `DATASET.TABLE_NAME` ( transaction_uid STRING,
+transaction_date INT64, date_of_admission STRING, date_of_joining STRING)
diff --git a/src/e2e-test/resources/testdata/BigQuery/bqDateTransformInsertQuery.txt b/src/e2e-test/resources/testdata/BigQuery/bqDateTransformInsertQuery.txt
new file mode 100644
index 0000000..790b7ff
--- /dev/null
+++ b/src/e2e-test/resources/testdata/BigQuery/bqDateTransformInsertQuery.txt
@@ -0,0 +1,2 @@
+insert into `DATASET.TABLE_NAME` (transaction_uid, transaction_date, date_of_admission, date_of_joining ) values
+('10',10111981,'2019-07-07','2021-09-06'), ('11',10111981,'2019-08-07','2020-09-06'), ('12',10111981,'2001-07-07','2021-09-06'),('13',10111981,'2010-07-07','2004-09-06');