Skip to content

Commit

Permalink
cicd: refactor pipeline logic
Browse files Browse the repository at this point in the history
Signed-off-by: Matthew Fala <[email protected]>
  • Loading branch information
matthewfala committed Dec 12, 2023
1 parent 0ab5be1 commit b687cc4
Show file tree
Hide file tree
Showing 12 changed files with 547 additions and 723 deletions.
17 changes: 11 additions & 6 deletions buildspec_load_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ env:
LOG_STORAGE_STACK_NAME: 'load-test-fluent-bit-log-storage'
EKS_CLUSTER_NAME: 'load-test-fluent-bit-eks-cluster'
PREFIX: 'load-test-fluent-bit-'
LOAD_TEST_RUN_TIME_IN_SECONDS: 10 # Default 600

phases:
install:
Expand Down Expand Up @@ -61,17 +62,21 @@ phases:
- export AWS_ACCESS_KEY_ID=$(echo "${CREDS}" | jq -r '.Credentials.AccessKeyId')
- export AWS_SECRET_ACCESS_KEY=$(echo "${CREDS}" | jq -r '.Credentials.SecretAccessKey')
- export AWS_SESSION_TOKEN=$(echo "${CREDS}" | jq -r '.Credentials.SessionToken')
# Create and set up related testing resources
- python ./load_tests/load_test.py create_testing_resources
- source ./load_tests/setup_test_environment.sh
- export AWS_ACCESS_KEY_ID=""
- export AWS_SECRET_ACCESS_KEY=""
- export AWS_SESSION_TOKEN=""
# Run load tests on corresponding platform
- python ./load_tests/load_test.py ${PLATFORM}
finally:
# Clear up testing resources
- python ./load_tests/load_test.py delete_testing_resources
- |
for i in {1..2}; do
python ./load_tests/load_test.py ${PLATFORM} && break \
|| (
if [ $i == 2 ]; then
echo "Load test retries failed. Exiting."
exit 1
fi
echo "Retrying load tests. Attempt" $(($i+1)))
done
artifacts:
files:
- '**/*'
70 changes: 70 additions & 0 deletions buildspec_load_test_create_infra_ecs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
version: 0.2
env:
shell: bash
variables:
THROUGHPUT_LIST: '["20m", "25m", "30m"]'
CW_THROUGHPUT_LIST: '["1m", "2m", "3m"]'
TESTING_RESOURCES_STACK_NAME: 'load-test-fluent-bit-testing-resources'
LOG_STORAGE_STACK_NAME: 'load-test-fluent-bit-log-storage'
EKS_CLUSTER_NAME: 'load-test-fluent-bit-eks-cluster'
PREFIX: 'load-test-fluent-bit-'

phases:
install:
runtime-versions:
golang: 1.16
python: 3.x
nodejs: 14
pre_build:
commands:
- echo Running the load tests
# upgrade node version
- npm install -g n
- n stable
# install cdk
- npm config set prefix /usr/local
- npm install -g [email protected]
# install eksctl
- curl --silent --location "https://github.com/weaveworks/eksctl/releases/latest/download/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp
- mv /tmp/eksctl /usr/local/bin
# install kubectl
- curl -o kubectl https://amazon-eks.s3.us-west-2.amazonaws.com/1.21.2/2021-07-05/bin/linux/amd64/kubectl
- chmod +x ./kubectl
- mkdir -p $HOME/bin && cp ./kubectl $HOME/bin/kubectl && export PATH=$PATH:$HOME/bin
- echo 'export PATH=$PATH:$HOME/bin' >> ~/.bashrc
# install aws
- pip3 uninstall awscli -y
- curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
- unzip awscliv2.zip
- ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/bin --update
- which aws
- aws --version
# install aws-iam-authenticator
- curl -o aws-iam-authenticator https://amazon-eks.s3.us-west-2.amazonaws.com/1.21.2/2021-07-05/bin/linux/amd64/aws-iam-authenticator
- chmod +x ./aws-iam-authenticator
- mkdir -p $HOME/bin && cp ./aws-iam-authenticator $HOME/bin/aws-iam-authenticator && export PATH=$PATH:$HOME/bin
- echo 'export PATH=$PATH:$HOME/bin' >> ~/.bashrc
# pre-config
- aws configure set default.region us-west-2
build:
commands:
# Activate Python virtual environment and install the AWS CDK core dependencies
- python -m venv venv
- source venv/bin/activate
- pip install -r ./load_tests/requirements.txt
- |
if [ "${PLATFORM}" == "EKS" ]; then
aws eks update-kubeconfig --name $EKS_CLUSTER_NAME
fi
- CREDS=$(aws sts assume-role --role-arn $LOAD_TEST_CFN_ROLE_ARN --role-session-name load-test-cfn --duration-seconds 3600)
- export CREDS
- export AWS_ACCESS_KEY_ID=$(echo "${CREDS}" | jq -r '.Credentials.AccessKeyId')
- export AWS_SECRET_ACCESS_KEY=$(echo "${CREDS}" | jq -r '.Credentials.SecretAccessKey')
- export AWS_SESSION_TOKEN=$(echo "${CREDS}" | jq -r '.Credentials.SessionToken')
# Clear up testing resources
- python ./load_tests/load_test.py delete_testing_resources
# Create and set up related testing resources
- python ./load_tests/load_test.py create_testing_resources
artifacts:
files:
- '**/*'
68 changes: 68 additions & 0 deletions buildspec_load_test_delete_infra_ecs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
version: 0.2
env:
shell: bash
variables:
THROUGHPUT_LIST: '["20m", "25m", "30m"]'
CW_THROUGHPUT_LIST: '["1m", "2m", "3m"]'
TESTING_RESOURCES_STACK_NAME: 'load-test-fluent-bit-testing-resources'
LOG_STORAGE_STACK_NAME: 'load-test-fluent-bit-log-storage'
EKS_CLUSTER_NAME: 'load-test-fluent-bit-eks-cluster'
PREFIX: 'load-test-fluent-bit-'

phases:
install:
runtime-versions:
golang: 1.16
python: 3.x
nodejs: 14
pre_build:
commands:
- echo Running the load tests
# upgrade node version
- npm install -g n
- n stable
# install cdk
- npm config set prefix /usr/local
- npm install -g [email protected]
# install eksctl
- curl --silent --location "https://github.com/weaveworks/eksctl/releases/latest/download/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp
- mv /tmp/eksctl /usr/local/bin
# install kubectl
- curl -o kubectl https://amazon-eks.s3.us-west-2.amazonaws.com/1.21.2/2021-07-05/bin/linux/amd64/kubectl
- chmod +x ./kubectl
- mkdir -p $HOME/bin && cp ./kubectl $HOME/bin/kubectl && export PATH=$PATH:$HOME/bin
- echo 'export PATH=$PATH:$HOME/bin' >> ~/.bashrc
# install aws
- pip3 uninstall awscli -y
- curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
- unzip awscliv2.zip
- ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/bin --update
- which aws
- aws --version
# install aws-iam-authenticator
- curl -o aws-iam-authenticator https://amazon-eks.s3.us-west-2.amazonaws.com/1.21.2/2021-07-05/bin/linux/amd64/aws-iam-authenticator
- chmod +x ./aws-iam-authenticator
- mkdir -p $HOME/bin && cp ./aws-iam-authenticator $HOME/bin/aws-iam-authenticator && export PATH=$PATH:$HOME/bin
- echo 'export PATH=$PATH:$HOME/bin' >> ~/.bashrc
# pre-config
- aws configure set default.region us-west-2
build:
commands:
# Activate Python virtual environment and install the AWS CDK core dependencies
- python -m venv venv
- source venv/bin/activate
- pip install -r ./load_tests/requirements.txt
- |
if [ "${PLATFORM}" == "EKS" ]; then
aws eks update-kubeconfig --name $EKS_CLUSTER_NAME
fi
- CREDS=$(aws sts assume-role --role-arn $LOAD_TEST_CFN_ROLE_ARN --role-session-name load-test-cfn --duration-seconds 3600)
- export CREDS
- export AWS_ACCESS_KEY_ID=$(echo "${CREDS}" | jq -r '.Credentials.AccessKeyId')
- export AWS_SECRET_ACCESS_KEY=$(echo "${CREDS}" | jq -r '.Credentials.SecretAccessKey')
- export AWS_SESSION_TOKEN=$(echo "${CREDS}" | jq -r '.Credentials.SessionToken')
# Clear up testing resources
- python ./load_tests/load_test.py delete_testing_resources
artifacts:
files:
- '**/*'
2 changes: 1 addition & 1 deletion buildspec_publish_dockerhub.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ phases:
- 'DRY_RUN="false" ./scripts/publish.sh cicd-publish dockerhub'

# Pull the image from dockerhub and verify
- './scripts/publish.sh cicd-verify dockerhub'
- './scripts/publish.sh cicd-verify-publish dockerhub'
artifacts:
files:
- '**/*'
4 changes: 2 additions & 2 deletions buildspec_publish_ecr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ phases:
fi
# Push the image to ECR
- './scripts/publish.sh cicd-publish ${REGION_TO_PUSH}'
- './scripts/publish.sh cicd-publish private-ecr'

# Nullify the temporary credentials for the assumed role to publish
- |
Expand All @@ -55,7 +55,7 @@ phases:
- export AWS_SESSION_TOKEN=`echo $CREDS | jq -r .Credentials.SessionToken`

# Verify from the verification account
- './scripts/publish.sh cicd-verify ${REGION_TO_PUSH}'
- './scripts/publish.sh cicd-verify-publish private-ecr'
artifacts:
files:
- '**/*'
2 changes: 1 addition & 1 deletion buildspec_publish_public_ecr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ phases:
fi
# Pull the image from Public ECR and verify
- './scripts/publish.sh cicd-verify public-ecr'
- './scripts/publish.sh cicd-verify-publish public-ecr'
artifacts:
files:
- '**/*'
2 changes: 1 addition & 1 deletion buildspec_publish_ssm.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ phases:
commands:
# Enforce STS regional endpoints
- export AWS_STS_REGIONAL_ENDPOINTS=regional
- './scripts/publish.sh cicd-publish-ssm ${AWS_REGION}'
- './scripts/publish.sh cicd-publish-ssm'

# Assume role to verify, get the credentials, and set them as environment variables.
# Verification should be done using the credentials from a different account. It ensures that
Expand Down
51 changes: 33 additions & 18 deletions buildspec_sync.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,34 +14,40 @@ phases:
- aws --version
build:
commands:
# Note: The "sync" task is used both on the primary distribution account
# and replica accounts. In the primary account, it is used for stable
# tag updates exclusively and their respective ssm param updates.
#
# In the replica accounts, it is used for replicating images to regional
# Private ECRs from the primary Public ECR, and replicating ssm params.
- export AWS_ACCOUNT=$(aws sts get-caller-identity --query Account --output text)

# Enforce STS regional endpoints
- export AWS_STS_REGIONAL_ENDPOINTS=regional

- './scripts/publish.sh cicd-publish ${AWS_REGION} stable'

# Publish stable tag to Dockerhub when AWS_REGION is us-west-2
- './scripts/publish.sh cicd-publish public-dockerhub-stable ${AWS_REGION}'

# Assume role to publish, get the credentials, and set them as environment variables
# Replica accounts: Private ECR sync
- './scripts/publish.sh cicd-replicate ${AWS_REGION}'

# Primary account: Private ECR stable update
- './scripts/publish.sh cicd-publish private-ecr-stable'
- './scripts/publish.sh cicd-publish dockerhub-stable'
# Assume role to publish, get the credentials, and set them as environment variables
- |
if [ "${PUBLISH_ROLE_ARN_PUBLIC_ECR}" != "" ]; then
CREDS=`aws sts assume-role --role-arn ${PUBLISH_ROLE_ARN_PUBLIC_ECR} --role-session-name publicECR`
export AWS_ACCESS_KEY_ID=`echo $CREDS | jq -r .Credentials.AccessKeyId`
export AWS_SECRET_ACCESS_KEY=`echo $CREDS | jq -r .Credentials.SecretAccessKey`
export AWS_SESSION_TOKEN=`echo $CREDS | jq -r .Credentials.SessionToken`
fi
# Publish stable tag to Public ECR when AWS_REGION is us-west-2
- './scripts/publish.sh cicd-publish public-ecr-stable ${AWS_REGION}'

# Nullify the temporary credentials for the assumed role to publish
- './scripts/publish.sh cicd-publish public-ecr-stable'
# Nullify the temporary credentials for the assumed role to publish
- |
if [ "${PUBLISH_ROLE_ARN_PUBLIC_ECR}" != "" ]; then
export AWS_ACCESS_KEY_ID=
export AWS_SECRET_ACCESS_KEY=
export AWS_SESSION_TOKEN=
fi
# Assume role to verify, get the credentials, and set them as environment variables.
# Verification should be done using the credentials from a different account. It ensures that
# the images we published are public and accessible from any account.
Expand All @@ -50,17 +56,26 @@ phases:
- export AWS_SECRET_ACCESS_KEY=`echo $CREDS | jq -r .Credentials.SecretAccessKey`
- export AWS_SESSION_TOKEN=`echo $CREDS | jq -r .Credentials.SessionToken`

# Verify from the verification account
- './scripts/publish.sh cicd-verify ${AWS_REGION} stable'
- './scripts/publish.sh cicd-verify-ssm ${AWS_REGION} stable'
# Replica accounts: Verify from the verification account
- './scripts/publish.sh cicd-verify-replicate ${AWS_REGION}'

# Primary account: Verify stable update
- './scripts/publish.sh cicd-verify-publish private-ecr-stable'

# Nullify the temporary credentials for the assumed role to verify
# Note: stable updates on primary account and sync task includes SSM updates

# Replica & primary accounts: Verify ssm update
- './scripts/publish.sh cicd-verify-ssm ${AWS_REGION} true'

# Nullify the temporary credentials for the assumed role to verify
- export AWS_ACCESS_KEY_ID=
- export AWS_SECRET_ACCESS_KEY=
- export AWS_SESSION_TOKEN=

# Verify the publishing on Public ECR and Dockerhub when AWS_REGION is us-west-2
- './scripts/publish.sh cicd-verify stable ${AWS_REGION}'
# Run verification in the pipeline account
- './scripts/publish.sh cicd-verify-publish dockerhub-stable'
- './scripts/publish.sh cicd-verify-publish public-ecr-stable'

artifacts:
files:
- '**/*'
4 changes: 3 additions & 1 deletion load_tests/.gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
create_testing_resources/cdk.out
create_testing_resources/kinesis_s3_firehose/cdk.out
create_testing_resources/kinesis_s3_firehose/load-test-fluent-bit-
task_definitions/*_*m.json
__pycache__
.venv
.venv
.env
Loading

0 comments on commit b687cc4

Please sign in to comment.