Skip to content

Commit

Permalink
Merge pull request #6 from aidanmelen/confluent_platform_tls_only
Browse files Browse the repository at this point in the history
autogenerated_tls_only
  • Loading branch information
aidanmelen authored Aug 6, 2022
2 parents 7458e15 + ecf5efa commit 66b8f2b
Show file tree
Hide file tree
Showing 54 changed files with 1,181 additions and 127 deletions.
1 change: 1 addition & 0 deletions .make.docs
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ test-complete Test the complete example
test-kafka-topic Test the kafka_topic example
test-schema Test the schema example
test-connector Test the connector example
test-confluent-role-binding Test the confluent_role_binding example
clean Clean project
2 changes: 1 addition & 1 deletion .terraform-docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ content: |-
### Confluent Platform
```hcl
{{ include "examples/complete/.main.tf.docs" }}
{{ include "examples/confluent_platform/.main.tf.docs" }}
```
## Usage
Expand Down
14 changes: 13 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,16 @@ setup: apply-cfk-crds ## Setup project
# terraform
terraform init
cd modules/confluent_operator && terraform init
cd modules/kafka_rest_class && terraform init
cd modules/confluent_role_binding && terraform init
cd modules/kafka_topic && terraform init
cd modules/connector && terraform init
cd modules/schema && terraform init
cd examples/confluent_operator && terraform init
cd examples/confluent_platform && terraform init
cd examples/confluent_platform_singlenode && terraform init
cd examples/complete && terraform init
cd examples/autogenerated_tls_only && terraform init
cd examples/kafka_topic && terraform init
cd examples/schema && terraform init
cd examples/connector && terraform init
Expand Down Expand Up @@ -58,7 +61,7 @@ lint-all: docs ## Lint all files with pre-commit
pre-commit run --all-files
git add -A

tests: test-confluent-operator test-confluent-platform test-confluent-platform-singlenode test-complete test-kafka-topic test-schema test-connector ## Tests with Terratest
tests: test-confluent-operator test-confluent-platform test-confluent-platform-singlenode test-complete test-kafka-topic test-schema test-connector test-confluent-role-binding ## Tests with Terratest

test-confluent-operator: ## Test the confluent_operator example
go test test/terraform_confluent_operator_test.go -timeout 5m -v |& tee test/terraform_confluent_operator_test.log
Expand Down Expand Up @@ -92,33 +95,42 @@ test-schema: ## Test the schema example
test-connector: ## Test the connector example
go test test/terraform_connector_test.go -timeout 10m -v |& tee test/terraform_connector_test.log

test-confluent-role-binding: ## Test the confluent_role_binding example
go test test/terraform_confluent_role_binding_test.go -timeout 10m -v |& tee test/terraform_confluent_role_binding_test.log

delete-cfk-crds:
kubectl config set-cluster docker-desktop
kubectl delete -f ./crds/2.4.0

clean: delete-cfk-crds ## Clean project
@rm -f .terraform.lock.hcl
@rm -f modules/confluent_operator/.terraform.lock.hcl
@rm -f modules/kafka_rest_class/.terraform.lock.hcl
@rm -f modules/confluent_role_binding/.terraform.lock.hcl
@rm -f modules/kafka_topic/.terraform.lock.hcl
@rm -f modules/connector/.terraform.lock.hcl
@rm -f modules/schema/.terraform.lock.hcl
@rm -f examples/confluent_operator/.terraform.lock.hcl
@rm -f examples/confluent_platform/.terraform.lock.hcl
@rm -f examples/confluent_platform_singlenode/.terraform.lock.hcl
@rm -f examples/complete/.terraform.lock.hcl
@rm -f examples/autogenerated_tls_only/.terraform.lock.hcl
@rm -rf examples/kafka_topic/.terraform.lock.hcl
@rm -rf examples/schema/.terraform.lock.hcl
@rm -rf examples/connector/.terraform.lock.hcl

@rm -rf .terraform
@rm -rf modules/confluent_operator/.terraform
@rm -rf modules/kafka_rest_class/.terraform
@rm -rf modules/confluent_role_binding/.terraform
@rm -rf modules/kafka_topic/.terraform
@rm -rf modules/connector/.terraform
@rm -rf modules/schema/.terraform
@rm -rf examples/confluent_operator/.terraform
@rm -rf examples/confluent_platform/.terraform
@rm -rf examples/confluent_platform_singlenode/.terraform
@rm -rf examples/complete/.terraform
@rm -rf examples/autogenerated_tls_only/.terraform
@rm -rf examples/kafka_topic/.terraform
@rm -rf examples/schema/.terraform
@rm -rf examples/connector/.terraform
Expand Down
70 changes: 26 additions & 44 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,56 +37,27 @@ Similar to the [values file for Helm](https://helm.sh/docs/chart_template_guide/
```hcl
module "confluent_platform" {
source = "aidanmelen/confluent-platform/kubernetes"
version = ">= 0.8.0"
version = ">= 0.9.0"
namespace = var.namespace
# assumes the confluent operator was deployed in another terraform run
confluent_operator = {
create_namespace = true
name = "confluent-operator"
chart_version = "0.517.12"
create = false
}
# value overrides
zookeeper = {
"spec" = {
"replicas" = "3"
}
}
# yaml inline value overrides
kafka = yamldecode(<<-EOF
spec:
replicas: 3
EOF
)
# yaml file value overrides
connect = yamldecode(file("${path.module}/values/connect.yaml"))
create_ksqldb = false
create_controlcenter = var.create_controlcenter
create_schemaregistry = true # create with default values
create_kafkarestproxy = false
kafka_topics = {
"pageviews" = {}
"my-other-topic" = {
"values" = { "spec" = { "configs" = { "cleanup.policy" = "compact" } } }
}
}
schemas = {
"pageviews-value" = {
"data" = file("${path.module}/schemas/pageviews_schema.avro")
}
}
connectors = {
"pageviews-source" = {
"values" = yamldecode(file("${path.module}/values/connector.yaml"))
}
}
# uncomment to override the modules default local values
/*
zookeeper = yamldecode(file("${path.module}/values/zookeeper.yaml"))
kafka = yamldecode(file("${path.module}/values/kafka.yaml"))
connect = yamldecode(file("${path.module}/values/connect.yaml"))
ksqldb = yamldecode(file("${path.module}/values/ksqldb.yaml"))
controlcenter = yamldecode(file("${path.module}/values/controlcenter.yaml"))
schemaregistry = yamldecode(file("${path.module}/values/schemaregistry.yaml"))
kafkarestproxy = yamldecode(file("${path.module}/values/kafkarestproxy.yaml"))
*/
create_controlcenter = var.create_controlcenter
}
```

Expand Down Expand Up @@ -132,6 +103,7 @@ test-complete Test the complete example
test-kafka-topic Test the kafka_topic example
test-schema Test the schema example
test-connector Test the connector example
test-confluent-role-binding Test the confluent_role_binding example
clean Clean project
```

Expand All @@ -153,7 +125,9 @@ clean Clean project
|------|--------|---------|
| <a name="module_confluent_operator"></a> [confluent\_operator](#module\_confluent\_operator) | ./modules/confluent_operator | n/a |
| <a name="module_confluent_platform_override_values"></a> [confluent\_platform\_override\_values](#module\_confluent\_platform\_override\_values) | Invicton-Labs/deepmerge/null | 0.1.5 |
| <a name="module_confluent_role_bindings"></a> [confluent\_role\_bindings](#module\_confluent\_role\_bindings) | ./modules/confluent_role_binding | n/a |
| <a name="module_connectors"></a> [connectors](#module\_connectors) | ./modules/connector | n/a |
| <a name="module_kafka_rest_classes"></a> [kafka\_rest\_classes](#module\_kafka\_rest\_classes) | ./modules/kafka_rest_class | n/a |
| <a name="module_kafka_topics"></a> [kafka\_topics](#module\_kafka\_topics) | ./modules/kafka_topic | n/a |
| <a name="module_schemas"></a> [schemas](#module\_schemas) | ./modules/schema | n/a |
## Resources
Expand All @@ -169,6 +143,7 @@ clean Clean project
| <a name="input_confluent_operator"></a> [confluent\_operator](#input\_confluent\_operator) | Controls if the Confluent Operator resources should be created. This is required when the Confluent Operator is not already running on the kubernetes cluster. | `any` | <pre>{<br> "create": true<br>}</pre> | no |
| <a name="input_confluent_operator_app_version"></a> [confluent\_operator\_app\_version](#input\_confluent\_operator\_app\_version) | The default Confluent Operator app version. This may be overriden by component override values. This version must be compatible with the `confluent_platform_version`. Please see confluent docs for more information: https://docs.confluent.io/platform/current/installation/versions-interoperability.html#confluent-operator | `string` | `"2.4.0"` | no |
| <a name="input_confluent_platform_version"></a> [confluent\_platform\_version](#input\_confluent\_platform\_version) | The default Confluent Platform app version. This may be overriden by component override values. This version must be compatible with the `confluent_operator_app_version`. Please see confluent docs for more information: https://docs.confluent.io/platform/current/installation/versions-interoperability.html#confluent-operator | `string` | `"7.2.0"` | no |
| <a name="input_confluent_role_bindings"></a> [confluent\_role\_bindings](#input\_confluent\_role\_bindings) | A map of Confluent Role Bindings to create. The key is the confluent role binding name. The value map is the input for the `confluent_role_binding` submodule. | `any` | `{}` | no |
| <a name="input_connect"></a> [connect](#input\_connect) | The Connect override values. | `any` | `{}` | no |
| <a name="input_connectors"></a> [connectors](#input\_connectors) | A map of Connectors to create. The key is the connector name. The value map is the input for the `connector` submodule. | `any` | `{}` | no |
| <a name="input_controlcenter"></a> [controlcenter](#input\_controlcenter) | The ControlCenter override values. | `any` | `{}` | no |
Expand All @@ -183,6 +158,7 @@ clean Clean project
| <a name="input_create_zookeeper"></a> [create\_zookeeper](#input\_create\_zookeeper) | Controls if the Zookeeper component of the Confluent Platform should be created. | `bool` | `true` | no |
| <a name="input_delete_timeout"></a> [delete\_timeout](#input\_delete\_timeout) | The delete timeout for each Confluent Platform component. | `string` | `"10m"` | no |
| <a name="input_kafka"></a> [kafka](#input\_kafka) | The Kafka override values. | `any` | `{}` | no |
| <a name="input_kafka_rest_classes"></a> [kafka\_rest\_classes](#input\_kafka\_rest\_classes) | A map of Kafka Rest Classes to create. The key is the kafka rest class name. The value map is the input for the `kafka_rest_class` submodule. | `any` | `{}` | no |
| <a name="input_kafka_topics"></a> [kafka\_topics](#input\_kafka\_topics) | A map of Kafka Topics to create. The key is the topic name. The value map is the input for the `kafka_topic` submodule. | `any` | `{}` | no |
| <a name="input_kafkarestproxy"></a> [kafkarestproxy](#input\_kafkarestproxy) | The KafkaRestProxy override values. | `any` | `{}` | no |
| <a name="input_ksqldb"></a> [ksqldb](#input\_ksqldb) | The KsqlDB override values. | `any` | `{}` | no |
Expand All @@ -196,6 +172,9 @@ clean Clean project
| Name | Description |
|------|-------------|
| <a name="output_confluent_operator"></a> [confluent\_operator](#output\_confluent\_operator) | Map of attributes for the Confluent Operator. |
| <a name="output_confluent_role_binding_manifests"></a> [confluent\_role\_binding\_manifests](#output\_confluent\_role\_binding\_manifests) | Map of attribute maps for all the ConfluentRoleBinding manifests created. |
| <a name="output_confluent_role_binding_objects"></a> [confluent\_role\_binding\_objects](#output\_confluent\_role\_binding\_objects) | Map of attribute maps for all the ConfluentRoleBinding objects created. |
| <a name="output_confluent_role_bindings"></a> [confluent\_role\_bindings](#output\_confluent\_role\_bindings) | Map of attribute maps for all ConfluentRoleBinding submodules created. |
| <a name="output_connect_manifest"></a> [connect\_manifest](#output\_connect\_manifest) | The Connect manifest. |
| <a name="output_connect_object"></a> [connect\_object](#output\_connect\_object) | The Connect object. |
| <a name="output_connector_manifests"></a> [connector\_manifests](#output\_connector\_manifests) | Map of attribute maps for all the Connector manifests created. |
Expand All @@ -205,6 +184,9 @@ clean Clean project
| <a name="output_controlcenter_object"></a> [controlcenter\_object](#output\_controlcenter\_object) | The ControlCenter object. |
| <a name="output_kafka_manifest"></a> [kafka\_manifest](#output\_kafka\_manifest) | The Kafka manifest. |
| <a name="output_kafka_object"></a> [kafka\_object](#output\_kafka\_object) | The Kafka object. |
| <a name="output_kafka_rest_class_manifests"></a> [kafka\_rest\_class\_manifests](#output\_kafka\_rest\_class\_manifests) | Map of attribute maps for all the KafkaRestClass manifests created. |
| <a name="output_kafka_rest_class_objects"></a> [kafka\_rest\_class\_objects](#output\_kafka\_rest\_class\_objects) | Map of attribute maps for all the KafkaRestClass objects created. |
| <a name="output_kafka_rest_classes"></a> [kafka\_rest\_classes](#output\_kafka\_rest\_classes) | Map of attribute maps for all KafkaRestClass submodules created. |
| <a name="output_kafka_topic_manifests"></a> [kafka\_topic\_manifests](#output\_kafka\_topic\_manifests) | Map of attribute maps for all the KafkaTopic manifests created. |
| <a name="output_kafka_topic_objects"></a> [kafka\_topic\_objects](#output\_kafka\_topic\_objects) | Map of attribute maps for all the KafkaTopic objects created. |
| <a name="output_kafka_topics"></a> [kafka\_topics](#output\_kafka\_topics) | Map of attribute maps for all KafkaTopic submodules created. |
Expand Down
2 changes: 1 addition & 1 deletion TODO.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Implement the remaining submodules:
- cluster_link
- confluent_cluster_role_binding
- confluent_role_binding

Create examples:

Expand Down
15 changes: 8 additions & 7 deletions bin/render-docs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@
# render terraform-docs code examples

## examples
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/confluent_operator"\n version = ">= 0.8.0"\n/g' examples/confluent_operator/main.tf > examples/confluent_operator/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.8.0"\n/g' examples/confluent_platform/main.tf > examples/confluent_platform/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.8.0"\n/g' examples/confluent_platform_singlenode/main.tf > examples/confluent_platform_singlenode/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.8.0"/1' examples/complete/main.tf > examples/complete/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/kafka_topic"\n version = ">= 0.8.0"/g' examples/kafka_topic/main.tf > examples/kafka_topic/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/schema"\n version = ">= 0.8.0"\n/g' examples/schema/main.tf > examples/schema/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/connector"\n version = ">= 0.8.0"/1' examples/connector/main.tf > examples/connector/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/confluent_operator"\n version = ">= 0.9.0"\n/g' examples/confluent_operator/main.tf > examples/confluent_operator/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.9.0"\n/g' examples/confluent_platform/main.tf > examples/confluent_platform/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.9.0"\n/g' examples/confluent_platform_singlenode/main.tf > examples/confluent_platform_singlenode/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.9.0"/1' examples/complete/main.tf > examples/complete/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes"\n version = ">= 0.9.0"\n/g' examples/autogenerated_tls_only/main.tf > examples/autogenerated_tls_only/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/kafka_topic"\n version = ">= 0.9.0"/g' examples/kafka_topic/main.tf > examples/kafka_topic/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/schema"\n version = ">= 0.9.0"\n/g' examples/schema/main.tf > examples/schema/.main.tf.docs
sed -z 's/source[^\r\n]*/source = "aidanmelen\/confluent-platform\/kubernetes\/\/modules\/connector"\n version = ">= 0.9.0"/1' examples/connector/main.tf > examples/connector/.main.tf.docs

# render Makefile targets examples
make > .make.docs
Expand Down
126 changes: 126 additions & 0 deletions examples/autogenerated_tls_only/.main.tf.docs
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
module "confluent_platform" {
source = "aidanmelen/confluent-platform/kubernetes"
version = ">= 0.9.0"

namespace = var.namespace

zookeeper = yamldecode(<<-EOF
spec:
tls:
# For this component, Confluent for Kubernete will autogenerate and
# configure server certs, using a certificate authority specified in
# the secret `ca-pair-sslcerts`.
# This same configuration is specified for all other components.
autoGeneratedCerts: true
EOF
)

kafka = yamldecode(<<-EOF
spec:
configOverrides:
server:
- "log.file.size=${100 * 1024 * 1024}"
tls:
autoGeneratedCerts: true
listeners:
internal:
# The `internal` listener will be TLS enabled.
tls:
enabled: true
# Since no secretRef is specified, the Kafka auto-generated tls
# configuration specified above will be used for this listener.
metricReporter:
enabled: true
bootstrapEndpoint: kafka:9071
tls:
enabled: true
dependencies:
zookeeper:
endpoint: zookeeper.${var.namespace}.svc.cluster.local:2182
tls:
enabled: true
EOF
)

connect = yamldecode(<<-EOF
spec:
tls:
autoGeneratedCerts: true
dependencies:
kafka:
bootstrapEndpoint: kafka:9071
tls:
enabled: true
EOF
)

ksqldb = yamldecode(<<-EOF
spec:
tls:
autoGeneratedCerts: true
dependencies:
kafka:
bootstrapEndpoint: kafka:9071
tls:
enabled: true
EOF
)

controlcenter = yamldecode(<<-EOF
spec:
tls:
autoGeneratedCerts: true
dependencies:
kafka:
bootstrapEndpoint: kafka.${var.namespace}.svc.cluster.local:9071
tls:
enabled: true
schemaRegistry:
url: https://schemaregistry.${var.namespace}.svc.cluster.local:8081
tls:
enabled: true
ksqldb:
- name: ksql
url: https://ksqldb.${var.namespace}.svc.cluster.local:8088
tls:
enabled: true
connect:
- name: connect-dev
url: https://connect.${var.namespace}.svc.cluster.local:8083
tls:
enabled: true
EOF
)

schemaregistry = yamldecode(<<-EOF
spec:
tls:
autoGeneratedCerts: true
dependencies:
kafka:
bootstrapEndpoint: kafka:9071
tls:
enabled: true
EOF
)

kafkarestproxy = yamldecode(<<-EOF
spec:
tls:
autoGeneratedCerts: true
dependencies:
schemaRegistry:
url: https://schemaregistry.${var.namespace}.svc.cluster.local:8081
tls:
enabled: true
EOF
)

kafka_rest_classes = {
"default" = {}
}

kafka_topics = {
"my-topic" = {}
}
}
12 changes: 12 additions & 0 deletions examples/autogenerated_tls_only/.terraform-docs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
content: |-
{{ .Header }}
## Example
```hcl
{{ include ".main.tf.docs" }}
```
{{ .Requirements }}
{{ .Modules }}
{{ .Inputs }}
{{ .Outputs }}
Loading

0 comments on commit 66b8f2b

Please sign in to comment.