diff --git a/.github/workflows/run_release_tests.yml b/.github/workflows/run_release_tests.yml index a7a1b9bd11..feebd4bb3d 100644 --- a/.github/workflows/run_release_tests.yml +++ b/.github/workflows/run_release_tests.yml @@ -50,5 +50,4 @@ jobs: RESULTS_LOCATION: ${{ secrets.RESULTS_LOCATION }} DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD }} S3_DATA_PATH: ${{ secrets.S3_DATA_PATH }} - S3_JARS_BUCKET: ${{ secrets.S3_JARS_BUCKET }} SPILL_BUCKET: ${{ secrets.SPILL_BUCKET }} diff --git a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/dynamo-stack.ts b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/dynamo-stack.ts index 0cd3bacb84..7e1dfbee85 100644 --- a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/dynamo-stack.ts +++ b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/dynamo-stack.ts @@ -2,6 +2,7 @@ import * as cdk from 'aws-cdk-lib'; import * as glue from '@aws-cdk/aws-glue-alpha'; import * as ddb from 'aws-cdk-lib/aws-dynamodb'; import * as iam from 'aws-cdk-lib/aws-iam'; +import { Repository } from 'aws-cdk-lib/aws-ecr'; import { CfnInclude } from 'aws-cdk-lib/cloudformation-include'; import { Construct } from 'constructs'; import tpcdsJson from '../../resources/tpcds_specs.json' @@ -56,6 +57,31 @@ export class DynamoDBStack extends cdk.Stack { SpillBucket: spill_bucket } }); + const ecrRepo = new Repository(this, 'DynamoDBRepository', { + repositoryName: 'athena-federation-repository-dynamodb', + emptyOnDelete: true + }); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'CrossAccountPermission', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.AnyPrincipal()], + }), + ); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'LambdaECRImageCrossAccountRetrievalPolicy', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.ServicePrincipal('lambda.amazonaws.com')], + conditions: { + StringLike: { + 'aws:sourceArn': 'arn:aws:lambda:*:*:function:*', + }, + }, + }), + ); } initDdbTableWithHashKey(tableName: string, hashKey: string) { diff --git a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/opensearch-stack.ts b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/opensearch-stack.ts index 5b2378e815..78c00b85c4 100644 --- a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/opensearch-stack.ts +++ b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/opensearch-stack.ts @@ -5,6 +5,7 @@ import * as ec2 from 'aws-cdk-lib/aws-ec2'; import * as glue from '@aws-cdk/aws-glue-alpha'; import * as s3 from 'aws-cdk-lib/aws-s3'; import * as iam from 'aws-cdk-lib/aws-iam'; +import { Repository } from 'aws-cdk-lib/aws-ecr'; import { CfnInclude } from 'aws-cdk-lib/cloudformation-include'; const path = require('path'); import {FederationStackProps} from './stack-props' @@ -176,5 +177,31 @@ export class OpenSearchStack extends cdk.Stack { 'SpillBucket': spill_bucket, } }); + + const ecrRepo = new Repository(this, 'ElasticsearchRepository', { + repositoryName: 'athena-federation-repository-elasticsearch', + emptyOnDelete: true + }); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'CrossAccountPermission', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.AnyPrincipal()], + }), + ); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'LambdaECRImageCrossAccountRetrievalPolicy', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.ServicePrincipal('lambda.amazonaws.com')], + conditions: { + StringLike: { + 'aws:sourceArn': 'arn:aws:lambda:*:*:function:*', + }, + }, + }), + ); } } diff --git a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/rds-generic-stack.ts b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/rds-generic-stack.ts index c6bde711fc..65b7c1e327 100644 --- a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/rds-generic-stack.ts +++ b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/rds-generic-stack.ts @@ -3,6 +3,7 @@ import * as rds from 'aws-cdk-lib/aws-rds'; import * as ec2 from 'aws-cdk-lib/aws-ec2'; import * as glue from '@aws-cdk/aws-glue-alpha'; import * as iam from 'aws-cdk-lib/aws-iam' +import { Repository } from 'aws-cdk-lib/aws-ecr'; import { CfnInclude } from 'aws-cdk-lib/cloudformation-include'; import { Construct } from 'constructs'; const path = require('path') @@ -152,6 +153,32 @@ export class RdsGenericStack extends cdk.Stack { 'SpillBucket': spill_bucket, } }); + + const ecrRepo = new Repository(this, `${db_type}Repository`, { + repositoryName: `athena-federation-repository-${db_type}`, + emptyOnDelete: true + }); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'CrossAccountPermission', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.AnyPrincipal()], + }), + ); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'LambdaECRImageCrossAccountRetrievalPolicy', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.ServicePrincipal('lambda.amazonaws.com')], + conditions: { + StringLike: { + 'aws:sourceArn': 'arn:aws:lambda:*:*:function:*', + }, + }, + }), + ); } getEngineVersion(db_type: string): rds.IClusterEngine { diff --git a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/redshift-stack.ts b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/redshift-stack.ts index 8f70345b02..fcc6191cba 100644 --- a/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/redshift-stack.ts +++ b/validation_testing/cdk_federation_infra_provisioning/app/lib/stacks/redshift-stack.ts @@ -148,6 +148,32 @@ export class RedshiftStack extends cdk.Stack { 'SpillBucket': spill_bucket, } }); + + const ecrRepo = new Repository(this, 'RedshiftRepository', { + repositoryName: 'athena-federation-repository-redshift', + emptyOnDelete: true + }); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'CrossAccountPermission', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.AnyPrincipal()], + }), + ); + ecrRepo.addToResourcePolicy( + new iam.PolicyStatement({ + sid: 'LambdaECRImageCrossAccountRetrievalPolicy', + effect: iam.Effect.ALLOW, + actions: ['ecr:BatchGetImage', 'ecr:GetDownloadUrlForLayer'], + principals: [new iam.ServicePrincipal('lambda.amazonaws.com')], + conditions: { + StringLike: { + 'aws:sourceArn': 'arn:aws:lambda:*:*:function:*', + }, + }, + }), + ); } } diff --git a/validation_testing/deploy_infra.sh b/validation_testing/deploy_infra.sh index a02961918b..efe0b9d4b2 100644 --- a/validation_testing/deploy_infra.sh +++ b/validation_testing/deploy_infra.sh @@ -1,11 +1,6 @@ - CONNECTOR_NAME=$1 VALIDATION_TESTING_ROOT=$REPOSITORY_ROOT/validation_testing -# upload connector jar to s3 and update yaml to s3 uri, redirect to /dev/null to not log the s3 path -aws s3 cp $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/target/athena-$CONNECTOR_NAME-2022.47.1.jar "$S3_JARS_BUCKET/" > /dev/null -sed -i "s#CodeUri: \"./target/athena-$CONNECTOR_NAME-2022.47.1.jar\"#CodeUri: \"$S3_JARS_BUCKET/athena-$CONNECTOR_NAME-2022.47.1.jar\"#" $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml - # go to cdk dir, build/synth/deploy cd $(dirname $(find . -name ATHENA_INFRA_SPINUP_ROOT))/app; @@ -20,6 +15,22 @@ npm run build; npm run cdk synth; npm run cdk deploy ${CONNECTOR_NAME}CdkStack > /dev/null; -sed -i "s#CodeUri: \"$S3_JARS_BUCKET/athena-$CONNECTOR_NAME-2022.47.1.jar\"#CodeUri: \"./target/athena-$CONNECTOR_NAME-2022.47.1.jar\"#" $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml - echo "FINISHED DEPLOYING INFRA FOR ${CONNECTOR_NAME}." + +# cd back to validation root +cd $VALIDATION_TESTING_ROOT + +# get the AWS account ID from the current roll (for use in ECR repo name) +ACCOUNT_ID="$(aws sts get-caller-identity --query Account --output text)" + +# now we push the ECR image for the connector to the ECR repository created in the CDK stack +aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com +docker build -t athena-federation-repository-$CONNECTOR_NAME $REPOSITORY_ROOT/athena-$CONNECTOR_NAME +docker tag athena-federation-repository-$CONNECTOR_NAME\:latest $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com/athena-federation-repository-$CONNECTOR_NAME\:latest +docker push $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com/athena-federation-repository-$CONNECTOR_NAME\:latest + +# update the template to use the correct ImageUri +sed -i "s|292517598671|$ACCOUNT_ID|g" "$REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml" +sed -i "s#\(/athena-federation-repository-$CONNECTOR_NAME:\)[0-9]\{4\}\.[0-9]\{1,2\}\.[0-9]\{1\}#\1latest#" $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml + +echo "FINISHED PUSHING CONNECTOR IMAGE TO ECR REPOSITORY" diff --git a/validation_testing/main.py b/validation_testing/main.py index bf03f13670..9bb8f10274 100644 --- a/validation_testing/main.py +++ b/validation_testing/main.py @@ -19,7 +19,6 @@ def assert_required_env_vars_set(): 'REPOSITORY_ROOT', 'DATABASE_PASSWORD', 'S3_DATA_PATH', - 'S3_JARS_BUCKET', 'SPILL_BUCKET' ] if not all([os.environ.get(env_var) for env_var in required_env_vars]): diff --git a/validation_testing/run_release_tests.sh b/validation_testing/run_release_tests.sh index 5b0a7c815b..0561c70959 100644 --- a/validation_testing/run_release_tests.sh +++ b/validation_testing/run_release_tests.sh @@ -7,10 +7,6 @@ CONNECTOR_NAME=$1 VALIDATION_TESTING_ROOT=$REPOSITORY_ROOT/validation_testing -# upload connector jar to s3 and update yaml to s3 uri, redirect to /dev/null to not log the s3 path -aws s3 cp $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/target/athena-$CONNECTOR_NAME-2022.47.1.jar $S3_JARS_BUCKET > /dev/null -sed -i "s#CodeUri: \"./target/athena-$CONNECTOR_NAME-2022.47.1.jar\"#CodeUri: \"$S3_JARS_BUCKET/athena-$CONNECTOR_NAME-2022.47.1.jar\"#" $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml - # go to cdk dir, build/synth/deploy cd $(dirname $(find . -name ATHENA_INFRA_SPINUP_ROOT))/app; @@ -25,13 +21,26 @@ npm run build; npm run cdk synth; npm run cdk deploy ${CONNECTOR_NAME}CdkStack > /dev/null; -sed -i "s#CodeUri: \"$S3_JARS_BUCKET/athena-$CONNECTOR_NAME-2022.47.1.jar\"#CodeUri: \"./target/athena-$CONNECTOR_NAME-2022.47.1.jar\"#" $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml - echo "FINISHED DEPLOYING INFRA FOR ${CONNECTOR_NAME}." # cd back to validation root cd $VALIDATION_TESTING_ROOT +# get the AWS account ID from the current roll (for use in ECR repo name) +ACCOUNT_ID="$(aws sts get-caller-identity --query Account --output text)" + +# now we push the ECR image for the connector to the ECR repository created in the CDK stack +aws ecr get-login-password --region us-east-1 | docker login --username AWS --password-stdin $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com +docker build -t athena-federation-repository-$CONNECTOR_NAME $REPOSITORY_ROOT/athena-$CONNECTOR_NAME +docker tag athena-federation-repository-$CONNECTOR_NAME\:latest $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com/athena-federation-repository-$CONNECTOR_NAME\:latest +docker push $ACCOUNT_ID.dkr.ecr.us-east-1.amazonaws.com/athena-federation-repository-$CONNECTOR_NAME\:latest + +# update the template to use the correct ImageUri +sed -i "s|292517598671|$ACCOUNT_ID|g" "$REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml" +sed -i "s#\(/athena-federation-repository-$CONNECTOR_NAME:\)[0-9]\{4\}\.[0-9]\{1,2\}\.[0-9]\{1\}#\1latest#" $REPOSITORY_ROOT/athena-$CONNECTOR_NAME/athena-$CONNECTOR_NAME.yaml + +echo "FINISHED PUSHING CONNECTOR IMAGE TO ECR REPOSITORY" + # now we run the glue jobs that the CDK stack created # If there is any output to glue_job_synchronous_execution.py, we will exit this script with a failure code. # The 2>&1 lets us pipe both stdout and stderr to grep, as opposed to just the stdout. https://stackoverflow.com/questions/818255/what-does-21-mean