diff --git a/README.md b/README.md
index 4b0095e..3e5f17f 100644
--- a/README.md
+++ b/README.md
@@ -283,7 +283,7 @@ MINOR, and PATCH versions on each release to indicate any incompatibilities.
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | 3.54.0 |
+| [aws](#provider\_aws) | 3.56.0 |
## Modules
diff --git a/docs/deployment/part2.md b/docs/deployment/part2.md
index 1ff5736..be526a7 100644
--- a/docs/deployment/part2.md
+++ b/docs/deployment/part2.md
@@ -55,6 +55,7 @@ No modules.
| [codebuild\_environment\_image](#input\_codebuild\_environment\_image) | Docker image to use for this build project. | `string` | `"aws/codebuild/amazonlinux2-x86_64-standard:3.0"` | no |
| [codebuild\_environment\_type](#input\_codebuild\_environment\_type) | Type of build environment to use for related builds. | `string` | `"LINUX_CONTAINER"` | no |
| [codebuild\_role\_arn](#input\_codebuild\_role\_arn) | ARN of an existing IAM role for CodeBuild execution. If empty, a dedicated role for your Lambda function with minimal required permissions will be created. | `string` | `""` | no |
+| [codepipeline\_artifact\_store\_bucket](#input\_codepipeline\_artifact\_store\_bucket) | Name of an existing S3 bucket used by AWS CodePipeline to store pipeline artifacts. Use the same bucket name as in `s3_bucket` to store deployment packages and pipeline artifacts in one bucket for `package_type=Zip` functions. If empty, a dedicated S3 bucket for your Lambda function will be created. | `string` | `""` | no |
| [codepipeline\_role\_arn](#input\_codepipeline\_role\_arn) | ARN of an existing IAM role for CodePipeline execution. If empty, a dedicated role for your Lambda function with minimal required permissions will be created. | `string` | `""` | no |
| [codestar\_notifications\_detail\_type](#input\_codestar\_notifications\_detail\_type) | The level of detail to include in the notifications for this resource. Possible values are BASIC and FULL. | `string` | `"BASIC"` | no |
| [codestar\_notifications\_enabled](#input\_codestar\_notifications\_enabled) | Enable CodeStar notifications for your pipeline. | `bool` | `true` | no |
diff --git a/docs/part2.md b/docs/part2.md
index 451d931..5c75349 100644
--- a/docs/part2.md
+++ b/docs/part2.md
@@ -9,7 +9,7 @@
| Name | Version |
|------|---------|
-| [aws](#provider\_aws) | 3.54.0 |
+| [aws](#provider\_aws) | 3.56.0 |
## Modules
diff --git a/examples/deployment/container-image/README.md b/examples/deployment/container-image/README.md
index c2af711..1f968e0 100644
--- a/examples/deployment/container-image/README.md
+++ b/examples/deployment/container-image/README.md
@@ -11,6 +11,17 @@ terraform plan
Note that this example may create resources which cost money. Run `terraform destroy` to destroy those resources.
+### deploy
+
+Push an updated container image to `ECR` to start the deployment pipeline:
+
+```shell
+aws ecr get-login-password --region {region} | docker login --username AWS --password-stdin {account_id}.dkr.ecr.{region}.amazonaws.com
+docker build --tag {account_id}.dkr.ecr.{region}.amazonaws.com/with-ecr-codepipeline:production {context}
+docker push {account_id}.dkr.ecr.{region}.amazonaws.com/with-ecr-codepipeline:production
+```
+
+
## Requirements
| Name | Version |
diff --git a/examples/deployment/container-image/main.tf b/examples/deployment/container-image/main.tf
index 0fdfc6e..c00b176 100644
--- a/examples/deployment/container-image/main.tf
+++ b/examples/deployment/container-image/main.tf
@@ -1,6 +1,6 @@
locals {
environment = "production"
- function_name = "example-with-ecr-codepipeline"
+ function_name = "with-ecr-codepipeline"
}
module "lambda" {
diff --git a/examples/deployment/s3/README.md b/examples/deployment/s3/README.md
index 4b0ebbc..b9eb884 100644
--- a/examples/deployment/s3/README.md
+++ b/examples/deployment/s3/README.md
@@ -11,6 +11,14 @@ terraform plan
Note that this example may create resources which cost money. Run `terraform destroy` to destroy those resources.
+### deploy
+
+Upload a new `zip` package to S3 to start the deployment pipeline:
+
+```shell
+aws s3api put-object --bucket example-ci-{account_id}-{region} --key with-s3-codepipeline/package/lambda.zip --body lambda.zip
+```
+
## Requirements
| Name | Version |
diff --git a/examples/deployment/s3/main.tf b/examples/deployment/s3/main.tf
index 2a16373..20c451c 100644
--- a/examples/deployment/s3/main.tf
+++ b/examples/deployment/s3/main.tf
@@ -8,8 +8,8 @@ module "function" {
locals {
cloudtrail_s3_prefix = "cloudtrail"
environment = "production"
- function_name = "example-with-s3-codepipeline"
- s3_key = "package/lambda.zip"
+ function_name = "with-s3-codepipeline"
+ s3_key = "${local.function_name}/package/lambda.zip"
}
module "lambda" {
@@ -20,9 +20,9 @@ module "lambda" {
ignore_external_function_updates = true
publish = true
runtime = "nodejs14.x"
- s3_bucket = aws_s3_bucket_object.source.bucket
+ s3_bucket = aws_s3_bucket.source.bucket
s3_key = local.s3_key
- s3_object_version = aws_s3_bucket_object.source.version_id
+ s3_object_version = aws_s3_bucket_object.initial.version_id
}
# ---------------------------------------------------------------------------------------------------------------------
@@ -42,11 +42,12 @@ resource "aws_lambda_alias" "this" {
module "deployment" {
source = "../../../modules/deployment"
- alias_name = aws_lambda_alias.this.name
- create_codepipeline_cloudtrail = false // for brevity only, it's recommended to create a central CloudTrail for all S3 based Lambda functions externally to this module (see below)
- function_name = local.function_name
- s3_bucket = aws_s3_bucket_object.source.bucket
- s3_key = local.s3_key
+ alias_name = aws_lambda_alias.this.name
+ create_codepipeline_cloudtrail = false // for brevity only, it's recommended to create a central CloudTrail for all S3 based Lambda functions externally to this module (see resources below)
+ codepipeline_artifact_store_bucket = aws_s3_bucket.source.bucket // example to (optionally) use the same bucket for deployment packages and pipeline artifacts
+ function_name = local.function_name
+ s3_bucket = aws_s3_bucket.source.bucket
+ s3_key = local.s3_key
}
# ---------------------------------------------------------------------------------------------------------------------
@@ -55,7 +56,7 @@ module "deployment" {
resource "aws_s3_bucket" "source" {
acl = "private"
- bucket = "${local.function_name}-sources-${data.aws_caller_identity.current.account_id}-${data.aws_region.current.name}"
+ bucket = "example-ci-${data.aws_caller_identity.current.account_id}-${data.aws_region.current.name}"
force_destroy = true
versioning {
@@ -74,7 +75,7 @@ resource "aws_s3_bucket_public_access_block" "source" {
// this resource is only used for the initial `terraform apply` - all further
// deployments are running on CodePipeline
-resource "aws_s3_bucket_object" "source" {
+resource "aws_s3_bucket_object" "initial" {
bucket = aws_s3_bucket.source.bucket
key = local.s3_key
source = module.function.output_path
diff --git a/modules/deployment/README.md b/modules/deployment/README.md
index 6bd22ff..577442a 100644
--- a/modules/deployment/README.md
+++ b/modules/deployment/README.md
@@ -219,6 +219,7 @@ No modules.
| [codebuild\_environment\_image](#input\_codebuild\_environment\_image) | Docker image to use for this build project. | `string` | `"aws/codebuild/amazonlinux2-x86_64-standard:3.0"` | no |
| [codebuild\_environment\_type](#input\_codebuild\_environment\_type) | Type of build environment to use for related builds. | `string` | `"LINUX_CONTAINER"` | no |
| [codebuild\_role\_arn](#input\_codebuild\_role\_arn) | ARN of an existing IAM role for CodeBuild execution. If empty, a dedicated role for your Lambda function with minimal required permissions will be created. | `string` | `""` | no |
+| [codepipeline\_artifact\_store\_bucket](#input\_codepipeline\_artifact\_store\_bucket) | Name of an existing S3 bucket used by AWS CodePipeline to store pipeline artifacts. Use the same bucket name as in `s3_bucket` to store deployment packages and pipeline artifacts in one bucket for `package_type=Zip` functions. If empty, a dedicated S3 bucket for your Lambda function will be created. | `string` | `""` | no |
| [codepipeline\_role\_arn](#input\_codepipeline\_role\_arn) | ARN of an existing IAM role for CodePipeline execution. If empty, a dedicated role for your Lambda function with minimal required permissions will be created. | `string` | `""` | no |
| [codestar\_notifications\_detail\_type](#input\_codestar\_notifications\_detail\_type) | The level of detail to include in the notifications for this resource. Possible values are BASIC and FULL. | `string` | `"BASIC"` | no |
| [codestar\_notifications\_enabled](#input\_codestar\_notifications\_enabled) | Enable CodeStar notifications for your pipeline. | `bool` | `true` | no |
diff --git a/modules/deployment/iam_codebuild.tf b/modules/deployment/iam_codebuild.tf
index 0693a02..a280540 100644
--- a/modules/deployment/iam_codebuild.tf
+++ b/modules/deployment/iam_codebuild.tf
@@ -64,11 +64,10 @@ resource "aws_iam_role" "codebuild_role" {
},
{
Action = [
- "s3:Get*",
- "s3:PutObject"
+ "s3:Get*"
]
Effect = "Allow"
- Resource = "${aws_s3_bucket.pipeline.arn}/*"
+ Resource = "${local.artifact_store_bucket_arn}/*"
}
]
})
diff --git a/modules/deployment/iam_codepipeline.tf b/modules/deployment/iam_codepipeline.tf
index c7c883f..43b26dd 100644
--- a/modules/deployment/iam_codepipeline.tf
+++ b/modules/deployment/iam_codepipeline.tf
@@ -82,8 +82,8 @@ resource "aws_iam_role" "codepipeline_role" {
]
Effect = "Allow"
Resource = [
- aws_s3_bucket.pipeline.arn,
- "${aws_s3_bucket.pipeline.arn}/*"
+ local.artifact_store_bucket_arn,
+ "${local.artifact_store_bucket_arn}/*"
]
}
]
diff --git a/modules/deployment/main.tf b/modules/deployment/main.tf
index 3b474d8..84bc593 100644
--- a/modules/deployment/main.tf
+++ b/modules/deployment/main.tf
@@ -1,13 +1,18 @@
data "aws_caller_identity" "current" {}
data "aws_region" "current" {}
+locals {
+ artifact_store_bucket = var.codepipeline_artifact_store_bucket != "" ? var.codepipeline_artifact_store_bucket : aws_s3_bucket.pipeline[0].bucket
+ artifact_store_bucket_arn = "arn:aws:s3:::${local.artifact_store_bucket}"
+}
+
resource "aws_codepipeline" "this" {
name = var.function_name
role_arn = var.codepipeline_role_arn == "" ? aws_iam_role.codepipeline_role[0].arn : var.codepipeline_role_arn
tags = var.tags
artifact_store {
- location = aws_s3_bucket.pipeline.bucket
+ location = local.artifact_store_bucket
type = "S3"
}
@@ -86,6 +91,8 @@ resource "aws_codepipeline" "this" {
}
resource "aws_s3_bucket" "pipeline" {
+ count = var.codepipeline_artifact_store_bucket == "" ? 1 : 0
+
acl = "private"
bucket = "${var.function_name}-pipeline-${data.aws_caller_identity.current.account_id}-${data.aws_region.current.name}"
force_destroy = true
@@ -93,8 +100,9 @@ resource "aws_s3_bucket" "pipeline" {
}
resource "aws_s3_bucket_public_access_block" "source" {
- bucket = aws_s3_bucket.pipeline.id
+ count = var.codepipeline_artifact_store_bucket == "" ? 1 : 0
+ bucket = aws_s3_bucket.pipeline[count.index].id
block_public_acls = true
block_public_policy = true
ignore_public_acls = true
diff --git a/modules/deployment/variables.tf b/modules/deployment/variables.tf
index 942d905..d9a52e1 100644
--- a/modules/deployment/variables.tf
+++ b/modules/deployment/variables.tf
@@ -24,6 +24,12 @@ variable "create_codepipeline_cloudtrail" {
type = bool
}
+variable "codepipeline_artifact_store_bucket" {
+ description = "Name of an existing S3 bucket used by AWS CodePipeline to store pipeline artifacts. Use the same bucket name as in `s3_bucket` to store deployment packages and pipeline artifacts in one bucket for `package_type=Zip` functions. If empty, a dedicated S3 bucket for your Lambda function will be created."
+ default = ""
+ type = string
+}
+
variable "codepipeline_role_arn" {
description = "ARN of an existing IAM role for CodePipeline execution. If empty, a dedicated role for your Lambda function with minimal required permissions will be created."
default = ""