diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/rule.yml b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/rule.yml
new file mode 100644
index 000000000000..bbf7464de8d3
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/rule.yml
@@ -0,0 +1,83 @@
+documentation_complete: true
+
+title: 'Ensure deployments have either anti-affinity rules or topology spread constraints'
+
+description: |-
+ Distributing Kubernetes pods across nodes and availability zones using pod topology spread
+ constraints and anti-affinity rules enhances high availability, fault tolerance, performance,
+ and security by balancing workloads, reducing single points of failure, and ensuring compliance
+ and data protection.
+
+ There might be deployments, that do not require high availability or spreading across nodes.
+ To limit the number of false positives, this rule only checks deployments with a replica count
+ of more than one. For deployments with one replica neither anti-affinity rules nor topology
+ spread constraints provide any value.
+
+ To exclude other deployments from this rule, you can create a regular expression for deployment
+ names: var_deployments_without_high_availability. This will ignore deployments matching
+ those names in all namespaces.
+
+ An example allowing all deployments named uncritical-service is as follows:
+
+
+ apiVersion: compliance.openshift.io/v1alpha1
+ kind: TailoredProfile
+ metadata:
+ name: bsi-additional-deployments
+ spec:
+ description: Allows additional deployments to not be highly available and evenly spread
+ setValues:
+ - name: upstream-ocp4-var_deployments_without_high_availability
+ rationale: Ignore our uncritical service
+ value: ^uncritical-service$
+ extends: upstream-ocp4-bsi
+ title: Modified BSI allowing non-highly-available deployments
+
+
+ Finally, reference this TailoredProfile in a ScanSettingBinding
+ For more information on Tailoring the Compliance Operator, please consult the
+ OpenShift documentation:
+ {{{ weblink(link="https://docs.openshift.com/container-platform/4.16/security/compliance_operator/co-scans/compliance-operator-tailor.html") }}}
+
+
+rationale: |-
+ Distributing Kubernetes pods across nodes and availability zones using pod topology spread
+ constraints and anti-affinity rules is essential for enhancing high availability, fault
+ tolerance, and security.
+ This approach ensures that a single node or AZ failure does not lead to total application
+ downtime, as workloads are balanced and resources are efficiently utilized.
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+{{% set jqfilter = '[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]' %}}
+
+ocil_clause: 'Deployments with neither anti-affinity rules or topology spread constraints exist'
+
+ocil: |-
+ Run the following command to determine anti-affinity rules and topology spread constraints of
+ all deployments:
+ $ oc get deployments -A -o json | jq '{{{ jqfilter }}}'"
+ Make sure that either suitable anti-affinity rule or topology spread constraints are configured
+ for all workloads that require high availability.
+
+warnings:
+- general: |-
+ {{{ openshift_filtered_cluster_setting({'/apis/apps/v1/deployments?limit=500': jqfilter}) | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: "true"
+ filepath: |-
+ {{{ openshift_filtered_path('/apis/apps/v1/deployments?limit=500', jqfilter) }}}
+ yamlpath: "[:]"
+ check_existence: "none_exist"
+ entity_check: "all"
+ values:
+ - value: "(.*?)"
+ operation: "pattern match"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_anti_affinity.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_anti_affinity.pass.sh
new file mode 100644
index 000000000000..a07a5cc89507
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_anti_affinity.pass.sh
@@ -0,0 +1,1389 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/deployments"
+
+deployment_apipath="/apis/apps/v1/deployments?limit=500"
+
+cat < "$kube_apipath$deployment_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "5"
+ },
+ "creationTimestamp": "2022-04-04T12:44:36Z",
+ "generation": 5,
+ "labels": {
+ "app.kubernetes.io/component": "exporter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "kube-state-metrics",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "2.6.0"
+ },
+ "name": "kube-state-metrics",
+ "namespace": "openshift-monitoring",
+ "resourceVersion": "1357677010",
+ "uid": "681b1826-0401-4596-a5a1-1b354f569908"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "exporter",
+ "app.kubernetes.io/name": "kube-state-metrics",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "kubectl.kubernetes.io/default-container": "kube-state-metrics",
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/component": "exporter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "kube-state-metrics",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "2.6.0"
+ }
+ },
+ "spec": {
+ "automountServiceAccountToken": true,
+ "containers": [
+ {
+ "args": [
+ "--host=127.0.0.1",
+ "--port=8081",
+ "--telemetry-host=127.0.0.1",
+ "--telemetry-port=8082",
+ "--metric-denylist=\n^kube_secret_labels$,\n^kube_.+_annotations$\n",
+ "--metric-labels-allowlist=pods=[*],nodes=[*],namespaces=[*],persistentvolumes=[*],persistentvolumeclaims=[*],poddisruptionbudgets=[*],poddisruptionbudget=[*]",
+ "--metric-denylist=\n^kube_.+_created$,\n^kube_.+_metadata_resource_version$,\n^kube_replicaset_metadata_generation$,\n^kube_replicaset_status_observed_generation$,\n^kube_pod_restart_policy$,\n^kube_pod_init_container_status_terminated$,\n^kube_pod_init_container_status_running$,\n^kube_pod_container_status_terminated$,\n^kube_pod_container_status_running$,\n^kube_pod_completion_time$,\n^kube_pod_status_scheduled$\n"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:bb0303469ff9ac257efe236775f5c746458e3d55126666de80e460e451dfa383",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-state-metrics",
+ "resources": {
+ "requests": {
+ "cpu": "2m",
+ "memory": "80Mi"
+ }
+ },
+ "securityContext": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/tmp",
+ "name": "volume-directive-shadow"
+ }
+ ]
+ },
+ {
+ "args": [
+ "--logtostderr",
+ "--secure-listen-address=:8443",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--upstream=http://127.0.0.1:8081/",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--client-ca-file=/etc/tls/client/client-ca.crt",
+ "--config-file=/etc/kube-rbac-policy/config.yaml",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-main",
+ "ports": [
+ {
+ "containerPort": 8443,
+ "name": "https-main",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "securityContext": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "kube-state-metrics-tls"
+ },
+ {
+ "mountPath": "/etc/tls/client",
+ "name": "metrics-client-ca"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-policy",
+ "name": "kube-state-metrics-kube-rbac-proxy-config",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "--logtostderr",
+ "--secure-listen-address=:9443",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--upstream=http://127.0.0.1:8082/",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--client-ca-file=/etc/tls/client/client-ca.crt",
+ "--config-file=/etc/kube-rbac-policy/config.yaml",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-self",
+ "ports": [
+ {
+ "containerPort": 9443,
+ "name": "https-self",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "securityContext": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "kube-state-metrics-tls"
+ },
+ {
+ "mountPath": "/etc/tls/client",
+ "name": "metrics-client-ca"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-policy",
+ "name": "kube-state-metrics-kube-rbac-proxy-config",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "kube-state-metrics",
+ "serviceAccountName": "kube-state-metrics",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "volume-directive-shadow"
+ },
+ {
+ "name": "kube-state-metrics-tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "kube-state-metrics-tls"
+ }
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "metrics-client-ca"
+ },
+ "name": "metrics-client-ca"
+ },
+ {
+ "name": "kube-state-metrics-kube-rbac-proxy-config",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "kube-state-metrics-kube-rbac-proxy-config"
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "3"
+ },
+ "creationTimestamp": "2023-01-30T10:35:46Z",
+ "generation": 3,
+ "labels": {
+ "app.kubernetes.io/component": "exporter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "openshift-state-metrics",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ },
+ "name": "openshift-state-metrics",
+ "namespace": "openshift-monitoring",
+ "resourceVersion": "1357676976",
+ "uid": "0d4971e0-ec8c-424f-a4f4-ab6041c769d1"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "exporter",
+ "app.kubernetes.io/name": "openshift-state-metrics"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/component": "exporter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "openshift-state-metrics",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "args": [
+ "--logtostderr",
+ "--secure-listen-address=:8443",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--upstream=http://127.0.0.1:8081/",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--config-file=/etc/kube-rbac-policy/config.yaml",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-main",
+ "ports": [
+ {
+ "containerPort": 8443,
+ "name": "https-main",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "20Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "openshift-state-metrics-tls"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-policy",
+ "name": "openshift-state-metrics-kube-rbac-proxy-config",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "--logtostderr",
+ "--secure-listen-address=:9443",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--upstream=http://127.0.0.1:8082/",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--config-file=/etc/kube-rbac-policy/config.yaml",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-self",
+ "ports": [
+ {
+ "containerPort": 9443,
+ "name": "https-self",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "20Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "openshift-state-metrics-tls"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-policy",
+ "name": "openshift-state-metrics-kube-rbac-proxy-config",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "--host=127.0.0.1",
+ "--port=8081",
+ "--telemetry-host=127.0.0.1",
+ "--telemetry-port=8082"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5501a4680652bbd1aafd6435771725f6462bd2061f4ebe82a22a66f630bc6f72",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "openshift-state-metrics",
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "32Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File"
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "openshift-state-metrics",
+ "serviceAccountName": "openshift-state-metrics",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "name": "openshift-state-metrics-tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "openshift-state-metrics-tls"
+ }
+ },
+ {
+ "name": "openshift-state-metrics-kube-rbac-proxy-config",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "openshift-state-metrics-kube-rbac-proxy-config"
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "347"
+ },
+ "creationTimestamp": "2022-04-04T12:44:37Z",
+ "generation": 347,
+ "labels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.10.0"
+ },
+ "name": "prometheus-adapter",
+ "namespace": "openshift-monitoring",
+ "resourceVersion": "1348266955",
+ "uid": "d2c2d49c-dbe6-40a4-85e8-ce9329cb55c0"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": 1
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.10.0"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "podAntiAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "namespaces": [
+ "openshift-monitoring"
+ ],
+ "topologyKey": "kubernetes.io/hostname"
+ }
+ ]
+ }
+ },
+ "automountServiceAccountToken": true,
+ "containers": [
+ {
+ "args": [
+ "--prometheus-auth-config=/etc/prometheus-config/prometheus-config.yaml",
+ "--config=/etc/adapter/config.yaml",
+ "--logtostderr=true",
+ "--metrics-relist-interval=1m",
+ "--prometheus-url=https://prometheus-k8s.openshift-monitoring.svc:9091",
+ "--secure-port=6443",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--client-ca-file=/etc/tls/private/client-ca-file",
+ "--requestheader-client-ca-file=/etc/tls/private/requestheader-client-ca-file",
+ "--requestheader-allowed-names=kube-apiserver-proxy,system:kube-apiserver-proxy,system:openshift-aggregator",
+ "--requestheader-extra-headers-prefix=X-Remote-Extra-",
+ "--requestheader-group-headers=X-Remote-Group",
+ "--requestheader-username-headers=X-Remote-User",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--audit-policy-file=/etc/audit/metadata-profile.yaml",
+ "--audit-log-path=/var/log/adapter/audit.log",
+ "--audit-log-maxsize=100",
+ "--audit-log-maxbackup=5",
+ "--audit-log-compress=true",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3cade03b16237889606ab1e3b1b7fc12d160cacc36ae3df2de05d281bccc7f20",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 5,
+ "httpGet": {
+ "path": "/livez",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 5,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "prometheus-adapter",
+ "ports": [
+ {
+ "containerPort": 6443,
+ "name": "https",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 5,
+ "httpGet": {
+ "path": "/readyz",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 5,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "40Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "readOnlyRootFilesystem": true
+ },
+ "startupProbe": {
+ "failureThreshold": 18,
+ "httpGet": {
+ "path": "/livez",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/tmp",
+ "name": "tmpfs"
+ },
+ {
+ "mountPath": "/etc/adapter",
+ "name": "config"
+ },
+ {
+ "mountPath": "/etc/prometheus-config",
+ "name": "prometheus-adapter-prometheus-config"
+ },
+ {
+ "mountPath": "/etc/ssl/certs",
+ "name": "serving-certs-ca-bundle"
+ },
+ {
+ "mountPath": "/etc/audit",
+ "name": "prometheus-adapter-audit-profiles",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/var/log/adapter",
+ "name": "audit-log"
+ },
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "tls",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "prometheus-adapter",
+ "serviceAccountName": "prometheus-adapter",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "tmpfs"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "prometheus-adapter-prometheus-config"
+ },
+ "name": "prometheus-adapter-prometheus-config"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "items": [
+ {
+ "key": "service-ca.crt",
+ "path": "service-ca.crt"
+ }
+ ],
+ "name": "serving-certs-ca-bundle"
+ },
+ "name": "serving-certs-ca-bundle"
+ },
+ {
+ "emptyDir": {},
+ "name": "audit-log"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "prometheus-adapter-audit-profiles"
+ },
+ "name": "prometheus-adapter-audit-profiles"
+ },
+ {
+ "name": "tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "prometheus-adapter-6fk0fnclda7g1"
+ }
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "adapter-config"
+ },
+ "name": "config"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "2"
+ },
+ "creationTimestamp": "2023-10-05T11:34:18Z",
+ "generation": 2,
+ "labels": {
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "prometheus-operator-admission-webhook",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.60.1"
+ },
+ "name": "prometheus-operator-admission-webhook",
+ "namespace": "openshift-monitoring",
+ "resourceVersion": "1357676847",
+ "uid": "6e07ce87-4124-452f-86ac-124613d0046b"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "prometheus-operator-admission-webhook",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": 1
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "kubectl.kubernetes.io/default-container": "prometheus-operator-admission-webhook",
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "prometheus-operator-admission-webhook",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.60.1"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "podAntiAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "prometheus-operator-admission-webhook",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "namespaces": [
+ "openshift-monitoring"
+ ],
+ "topologyKey": "kubernetes.io/hostname"
+ }
+ ]
+ }
+ },
+ "automountServiceAccountToken": false,
+ "containers": [
+ {
+ "args": [
+ "--web.enable-tls=true",
+ "--web.tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--web.tls-min-version=VersionTLS12",
+ "--web.cert-file=/etc/tls/private/tls.crt",
+ "--web.key-file=/etc/tls/private/tls.key"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7812668217067f9038cc63d1542d3363ccacc30bf9047e2fcb9446136f48ca01",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "prometheus-operator-admission-webhook",
+ "ports": [
+ {
+ "containerPort": 8443,
+ "name": "https",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "requests": {
+ "cpu": "5m",
+ "memory": "30Mi"
+ }
+ },
+ "securityContext": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "tls-certificates",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "prometheus-operator-admission-webhook",
+ "serviceAccountName": "prometheus-operator-admission-webhook",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "name": "tls-certificates",
+ "secret": {
+ "defaultMode": 420,
+ "items": [
+ {
+ "key": "tls.crt",
+ "path": "tls.crt"
+ },
+ {
+ "key": "tls.key",
+ "path": "tls.key"
+ }
+ ],
+ "secretName": "prometheus-operator-admission-webhook-tls"
+ }
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "5"
+ },
+ "creationTimestamp": "2023-01-30T10:35:49Z",
+ "generation": 5,
+ "labels": {
+ "app.kubernetes.io/component": "query-layer",
+ "app.kubernetes.io/instance": "thanos-querier",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "thanos-query",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.28.1"
+ },
+ "name": "thanos-querier",
+ "namespace": "openshift-monitoring",
+ "resourceVersion": "1357676663",
+ "uid": "0a687885-1b09-42d6-8479-311afc4e59c4"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "query-layer",
+ "app.kubernetes.io/instance": "thanos-querier",
+ "app.kubernetes.io/name": "thanos-query",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": 1
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/component": "query-layer",
+ "app.kubernetes.io/instance": "thanos-querier",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "thanos-query",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.28.1"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "podAntiAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "query-layer",
+ "app.kubernetes.io/instance": "thanos-querier",
+ "app.kubernetes.io/name": "thanos-query",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "topologyKey": "kubernetes.io/hostname"
+ }
+ ]
+ }
+ },
+ "containers": [
+ {
+ "args": [
+ "query",
+ "--grpc-address=127.0.0.1:10901",
+ "--http-address=127.0.0.1:9090",
+ "--log.format=logfmt",
+ "--query.replica-label=prometheus_replica",
+ "--query.replica-label=thanos_ruler_replica",
+ "--store=dnssrv+_grpc._tcp.prometheus-operated.openshift-monitoring.svc.cluster.local",
+ "--query.auto-downsampling",
+ "--store.sd-dns-resolver=miekgdns",
+ "--grpc-client-tls-secure",
+ "--grpc-client-tls-cert=/etc/tls/grpc/client.crt",
+ "--grpc-client-tls-key=/etc/tls/grpc/client.key",
+ "--grpc-client-tls-ca=/etc/tls/grpc/ca.crt",
+ "--grpc-client-server-name=prometheus-grpc",
+ "--rule=dnssrv+_grpc._tcp.prometheus-operated.openshift-monitoring.svc.cluster.local",
+ "--target=dnssrv+_grpc._tcp.prometheus-operated.openshift-monitoring.svc.cluster.local"
+ ],
+ "env": [
+ {
+ "name": "HOST_IP_ADDRESS",
+ "valueFrom": {
+ "fieldRef": {
+ "apiVersion": "v1",
+ "fieldPath": "status.hostIP"
+ }
+ }
+ }
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a2a0a1b4f08e2c5b3e5c1fe527400315e6532063af6c6e2dce7a0eac79a1d1bf",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "thanos-query",
+ "ports": [
+ {
+ "containerPort": 9090,
+ "name": "http",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "10m",
+ "memory": "12Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/grpc",
+ "name": "secret-grpc-tls"
+ }
+ ]
+ },
+ {
+ "args": [
+ "-provider=openshift",
+ "-https-address=:9091",
+ "-http-address=",
+ "-email-domain=*",
+ "-upstream=http://localhost:9090",
+ "-openshift-service-account=thanos-querier",
+ "-openshift-sar={\"resource\": \"namespaces\", \"verb\": \"get\"}",
+ "-openshift-delegate-urls={\"/\": {\"resource\": \"namespaces\", \"verb\": \"get\"}}",
+ "-tls-cert=/etc/tls/private/tls.crt",
+ "-tls-key=/etc/tls/private/tls.key",
+ "-client-secret-file=/var/run/secrets/kubernetes.io/serviceaccount/token",
+ "-cookie-secret-file=/etc/proxy/secrets/session_secret",
+ "-openshift-ca=/etc/pki/tls/cert.pem",
+ "-openshift-ca=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt",
+ "-bypass-auth-for=^/-/(healthy|ready)$"
+ ],
+ "env": [
+ {
+ "name": "HTTP_PROXY"
+ },
+ {
+ "name": "HTTPS_PROXY"
+ },
+ {
+ "name": "NO_PROXY"
+ }
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8cee1c6d7316b2108cc2d0272ebf2932ee999c9eb05d5c6e296df362da58e9ce",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 4,
+ "httpGet": {
+ "path": "/-/healthy",
+ "port": 9091,
+ "scheme": "HTTPS"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "oauth-proxy",
+ "ports": [
+ {
+ "containerPort": 9091,
+ "name": "web",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 20,
+ "httpGet": {
+ "path": "/-/ready",
+ "port": 9091,
+ "scheme": "HTTPS"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 5,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "20Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-thanos-querier-tls"
+ },
+ {
+ "mountPath": "/etc/proxy/secrets",
+ "name": "secret-thanos-querier-oauth-cookie"
+ },
+ {
+ "mountPath": "/etc/pki/ca-trust/extracted/pem/",
+ "name": "thanos-querier-trusted-ca-bundle",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "--secure-listen-address=0.0.0.0:9092",
+ "--upstream=http://127.0.0.1:9095",
+ "--config-file=/etc/kube-rbac-proxy/config.yaml",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--logtostderr=true",
+ "--allow-paths=/api/v1/query,/api/v1/query_range,/api/v1/labels,/api/v1/label/*/values,/api/v1/series",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy",
+ "ports": [
+ {
+ "containerPort": 9092,
+ "name": "tenancy",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-thanos-querier-tls"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-proxy",
+ "name": "secret-thanos-querier-kube-rbac-proxy"
+ }
+ ]
+ },
+ {
+ "args": [
+ "--insecure-listen-address=127.0.0.1:9095",
+ "--upstream=http://127.0.0.1:9090",
+ "--label=namespace",
+ "--enable-label-apis",
+ "--error-on-replace"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:4626710ac6a341bf707b2d5be57607ebc39ddd9d300ca9496e40fcfc75f20f3e",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "prom-label-proxy",
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError"
+ },
+ {
+ "args": [
+ "--secure-listen-address=0.0.0.0:9093",
+ "--upstream=http://127.0.0.1:9095",
+ "--config-file=/etc/kube-rbac-proxy/config.yaml",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--logtostderr=true",
+ "--allow-paths=/api/v1/rules",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-rules",
+ "ports": [
+ {
+ "containerPort": 9093,
+ "name": "tenancy-rules",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-thanos-querier-tls"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-proxy",
+ "name": "secret-thanos-querier-kube-rbac-proxy-rules"
+ }
+ ]
+ },
+ {
+ "args": [
+ "--secure-listen-address=0.0.0.0:9094",
+ "--upstream=http://127.0.0.1:9090",
+ "--config-file=/etc/kube-rbac-proxy/config.yaml",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--client-ca-file=/etc/tls/client/client-ca.crt",
+ "--logtostderr=true",
+ "--allow-paths=/metrics",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-metrics",
+ "ports": [
+ {
+ "containerPort": 9094,
+ "name": "metrics",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-thanos-querier-tls"
+ },
+ {
+ "mountPath": "/etc/kube-rbac-proxy",
+ "name": "secret-thanos-querier-kube-rbac-proxy-metrics"
+ },
+ {
+ "mountPath": "/etc/tls/client",
+ "name": "metrics-client-ca",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {
+ "runAsNonRoot": true,
+ "seccompProfile": {
+ "type": "RuntimeDefault"
+ }
+ },
+ "serviceAccount": "thanos-querier",
+ "serviceAccountName": "thanos-querier",
+ "terminationGracePeriodSeconds": 120,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "name": "secret-thanos-querier-tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "thanos-querier-tls"
+ }
+ },
+ {
+ "name": "secret-thanos-querier-oauth-cookie",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "thanos-querier-oauth-cookie"
+ }
+ },
+ {
+ "name": "secret-thanos-querier-kube-rbac-proxy",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "thanos-querier-kube-rbac-proxy"
+ }
+ },
+ {
+ "name": "secret-thanos-querier-kube-rbac-proxy-rules",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "thanos-querier-kube-rbac-proxy-rules"
+ }
+ },
+ {
+ "name": "secret-thanos-querier-kube-rbac-proxy-metrics",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "thanos-querier-kube-rbac-proxy-metrics"
+ }
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "metrics-client-ca"
+ },
+ "name": "metrics-client-ca"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "items": [
+ {
+ "key": "ca-bundle.crt",
+ "path": "tls-ca-bundle.pem"
+ }
+ ],
+ "name": "thanos-querier-trusted-ca-bundle-b4a61vnd2as9r",
+ "optional": true
+ },
+ "name": "thanos-querier-trusted-ca-bundle"
+ },
+ {
+ "name": "secret-grpc-tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "thanos-querier-grpc-tls-ciisrsmf0gg3i"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$deployment_apipath#$(echo -n "$deployment_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$deployment_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_excluded.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_excluded.pass.sh
new file mode 100644
index 000000000000..05690fa1e38d
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_excluded.pass.sh
@@ -0,0 +1,219 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/deployments"
+
+deployment_apipath="/apis/apps/v1/deployments?limit=500"
+
+cat < "$kube_apipath$deployment_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "143"
+ },
+ "creationTimestamp": "2022-04-19T12:58:24Z",
+ "generation": 143,
+ "labels": {
+ "app.kubernetes.io/component": "server",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-server",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-server",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1357676941",
+ "uid": "4572963f-3e9d-4260-a8d7-bda9e557e093"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 3,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-server"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-server",
+ "image.upgraded": "11082023-014723-UTC"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "command": [
+ "argocd-server",
+ "--insecure",
+ "--staticassets",
+ "/shared/app",
+ "--dex-server",
+ "https://argocd-dex-server.argocd.svc.cluster.local:5556",
+ "--repo-server",
+ "argocd-repo-server.argocd.svc.cluster.local:8081",
+ "--redis",
+ "argocd-redis.argocd.svc.cluster.local:6379",
+ "--loglevel",
+ "info",
+ "--logformat",
+ "text"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/argocd-rhel8@sha256:f77594bc053be144b33ff9603ee9675c7e82946ec0dbfb04d8f942c8d73155da",
+ "imagePullPolicy": "Always",
+ "livenessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": 8080,
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 3,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "argocd-server",
+ "ports": [
+ {
+ "containerPort": 8080,
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 8083,
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": 8080,
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 3,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/app/config/ssh",
+ "name": "ssh-known-hosts"
+ },
+ {
+ "mountPath": "/app/config/tls",
+ "name": "tls-certs"
+ },
+ {
+ "mountPath": "/app/config/server/tls",
+ "name": "argocd-repo-server-tls"
+ },
+ {
+ "mountPath": "/app/config/server/tls/redis",
+ "name": "argocd-operator-redis-tls"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "argocd-argocd-server",
+ "serviceAccountName": "argocd-argocd-server",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-ssh-known-hosts-cm"
+ },
+ "name": "ssh-known-hosts"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-tls-certs-cm"
+ },
+ "name": "tls-certs"
+ },
+ {
+ "name": "argocd-repo-server-tls",
+ "secret": {
+ "defaultMode": 420,
+ "optional": true,
+ "secretName": "argocd-repo-server-tls"
+ }
+ },
+ {
+ "name": "argocd-operator-redis-tls",
+ "secret": {
+ "defaultMode": 420,
+ "optional": true,
+ "secretName": "argocd-operator-redis-tls"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+jq_filter_with_var='[ .items[] | select(.metadata.name | test("^argocd-server$"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$deployment_apipath#$(echo -n "$deployment_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter_with_var" "$kube_apipath$deployment_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_no_rules.fail.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_no_rules.fail.sh
new file mode 100644
index 000000000000..e6ed11b84f23
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_no_rules.fail.sh
@@ -0,0 +1,739 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/deployments"
+
+deployment_apipath="/apis/apps/v1/deployments?limit=500"
+
+cat < "$kube_apipath$deployment_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "198"
+ },
+ "creationTimestamp": "2022-08-19T13:10:14Z",
+ "generation": 216,
+ "labels": {
+ "app.kubernetes.io/component": "dex-server",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-dex-server",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-dex-server",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1303859027",
+ "uid": "5a0e160e-371d-4412-a697-bd07453e71c1"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-dex-server"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-dex-server",
+ "dex.config.changed": "10242023-134623-UTC",
+ "image.upgraded": "11082023-014723-UTC"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "command": [
+ "/shared/argocd-dex",
+ "rundex"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/dex-rhel8@sha256:8cc59901689858e06f5c2942f8c975449c17454fa8364da6153d9b5a3538a985",
+ "imagePullPolicy": "Always",
+ "livenessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz/live",
+ "port": 5558,
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 60,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "dex",
+ "ports": [
+ {
+ "containerPort": 5556,
+ "name": "http",
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 5557,
+ "name": "grpc",
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 5558,
+ "name": "metrics",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "runAsNonRoot": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/shared",
+ "name": "static-files"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "initContainers": [
+ {
+ "command": [
+ "cp",
+ "-n",
+ "/usr/local/bin/argocd",
+ "/shared/argocd-dex"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/argocd-rhel8@sha256:f77594bc053be144b33ff9603ee9675c7e82946ec0dbfb04d8f942c8d73155da",
+ "imagePullPolicy": "Always",
+ "name": "copyutil",
+ "resources": {},
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "runAsNonRoot": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/shared",
+ "name": "static-files"
+ }
+ ]
+ }
+ ],
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "argocd-argocd-dex-server",
+ "serviceAccountName": "argocd-argocd-dex-server",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "static-files"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "27"
+ },
+ "creationTimestamp": "2022-04-19T12:58:24Z",
+ "generation": 29,
+ "labels": {
+ "app.kubernetes.io/component": "redis",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-redis",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-redis",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1357676855",
+ "uid": "269ad8b0-2de5-4302-94b1-66bfe9460483"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-redis"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-redis",
+ "image.upgraded": "11072023-102823-UTC"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "args": [
+ "redis-server",
+ "--protected-mode",
+ "no",
+ "--save",
+ "",
+ "--appendonly",
+ "no"
+ ],
+ "image": "registry.redhat.io/rhel8/redis-6@sha256:edbd40185ed8c20ee61ebdf9f2e1e1d7594598fceff963b4dee3201472d6deda",
+ "imagePullPolicy": "Always",
+ "name": "redis",
+ "ports": [
+ {
+ "containerPort": 6379,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File"
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "161"
+ },
+ "creationTimestamp": "2022-04-19T12:58:24Z",
+ "generation": 163,
+ "labels": {
+ "app.kubernetes.io/component": "repo-server",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-repo-server",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-repo-server",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1357676885",
+ "uid": "f099a55f-a7f9-48d1-8809-828868f83bcf"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 3,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-repo-server"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-repo-server",
+ "image.upgraded": "11082023-014723-UTC"
+ }
+ },
+ "spec": {
+ "automountServiceAccountToken": false,
+ "containers": [
+ {
+ "command": [
+ "uid_entrypoint.sh",
+ "argocd-repo-server",
+ "--redis",
+ "argocd-redis.argocd.svc.cluster.local:6379",
+ "--loglevel",
+ "info",
+ "--logformat",
+ "text"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/argocd-rhel8@sha256:f77594bc053be144b33ff9603ee9675c7e82946ec0dbfb04d8f942c8d73155da",
+ "imagePullPolicy": "Always",
+ "livenessProbe": {
+ "failureThreshold": 3,
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "tcpSocket": {
+ "port": 8081
+ },
+ "timeoutSeconds": 1
+ },
+ "name": "argocd-repo-server",
+ "ports": [
+ {
+ "containerPort": 8081,
+ "name": "server",
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 8084,
+ "name": "metrics",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "tcpSocket": {
+ "port": 8081
+ },
+ "timeoutSeconds": 1
+ },
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/app/config/ssh",
+ "name": "ssh-known-hosts"
+ },
+ {
+ "mountPath": "/app/config/tls",
+ "name": "tls-certs"
+ },
+ {
+ "mountPath": "/app/config/gpg/source",
+ "name": "gpg-keys"
+ },
+ {
+ "mountPath": "/app/config/gpg/keys",
+ "name": "gpg-keyring"
+ },
+ {
+ "mountPath": "/tmp",
+ "name": "tmp"
+ },
+ {
+ "mountPath": "/app/config/reposerver/tls",
+ "name": "argocd-repo-server-tls"
+ },
+ {
+ "mountPath": "/app/config/reposerver/tls/redis",
+ "name": "argocd-operator-redis-tls"
+ },
+ {
+ "mountPath": "/home/argocd/cmp-server/plugins",
+ "name": "plugins"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "initContainers": [
+ {
+ "command": [
+ "cp",
+ "-n",
+ "/usr/local/bin/argocd",
+ "/var/run/argocd/argocd-cmp-server"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/argocd-rhel8@sha256:f77594bc053be144b33ff9603ee9675c7e82946ec0dbfb04d8f942c8d73155da",
+ "imagePullPolicy": "Always",
+ "name": "copyutil",
+ "resources": {},
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "runAsNonRoot": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/argocd",
+ "name": "var-files"
+ }
+ ]
+ }
+ ],
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-ssh-known-hosts-cm"
+ },
+ "name": "ssh-known-hosts"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-tls-certs-cm"
+ },
+ "name": "tls-certs"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-gpg-keys-cm"
+ },
+ "name": "gpg-keys"
+ },
+ {
+ "emptyDir": {},
+ "name": "gpg-keyring"
+ },
+ {
+ "emptyDir": {},
+ "name": "tmp"
+ },
+ {
+ "name": "argocd-repo-server-tls",
+ "secret": {
+ "defaultMode": 420,
+ "optional": true,
+ "secretName": "argocd-repo-server-tls"
+ }
+ },
+ {
+ "name": "argocd-operator-redis-tls",
+ "secret": {
+ "defaultMode": 420,
+ "optional": true,
+ "secretName": "argocd-operator-redis-tls"
+ }
+ },
+ {
+ "emptyDir": {},
+ "name": "var-files"
+ },
+ {
+ "emptyDir": {},
+ "name": "plugins"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "143"
+ },
+ "creationTimestamp": "2022-04-19T12:58:24Z",
+ "generation": 143,
+ "labels": {
+ "app.kubernetes.io/component": "server",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-server",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-server",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1357676941",
+ "uid": "4572963f-3e9d-4260-a8d7-bda9e557e093"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 3,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-server"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-server",
+ "image.upgraded": "11082023-014723-UTC"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "command": [
+ "argocd-server",
+ "--insecure",
+ "--staticassets",
+ "/shared/app",
+ "--dex-server",
+ "https://argocd-dex-server.argocd.svc.cluster.local:5556",
+ "--repo-server",
+ "argocd-repo-server.argocd.svc.cluster.local:8081",
+ "--redis",
+ "argocd-redis.argocd.svc.cluster.local:6379",
+ "--loglevel",
+ "info",
+ "--logformat",
+ "text"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/argocd-rhel8@sha256:f77594bc053be144b33ff9603ee9675c7e82946ec0dbfb04d8f942c8d73155da",
+ "imagePullPolicy": "Always",
+ "livenessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": 8080,
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 3,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "argocd-server",
+ "ports": [
+ {
+ "containerPort": 8080,
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 8083,
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": 8080,
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 3,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/app/config/ssh",
+ "name": "ssh-known-hosts"
+ },
+ {
+ "mountPath": "/app/config/tls",
+ "name": "tls-certs"
+ },
+ {
+ "mountPath": "/app/config/server/tls",
+ "name": "argocd-repo-server-tls"
+ },
+ {
+ "mountPath": "/app/config/server/tls/redis",
+ "name": "argocd-operator-redis-tls"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "argocd-argocd-server",
+ "serviceAccountName": "argocd-argocd-server",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-ssh-known-hosts-cm"
+ },
+ "name": "ssh-known-hosts"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "argocd-tls-certs-cm"
+ },
+ "name": "tls-certs"
+ },
+ {
+ "name": "argocd-repo-server-tls",
+ "secret": {
+ "defaultMode": 420,
+ "optional": true,
+ "secretName": "argocd-repo-server-tls"
+ }
+ },
+ {
+ "name": "argocd-operator-redis-tls",
+ "secret": {
+ "defaultMode": 420,
+ "optional": true,
+ "secretName": "argocd-operator-redis-tls"
+ }
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$deployment_apipath#$(echo -n "$deployment_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$deployment_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_single_replica.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_single_replica.pass.sh
new file mode 100644
index 000000000000..3e3866b6df6a
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_single_replica.pass.sh
@@ -0,0 +1,329 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/deployments"
+
+deployment_apipath="/apis/apps/v1/deployments?limit=500"
+
+cat < "$kube_apipath$deployment_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "198"
+ },
+ "creationTimestamp": "2022-08-19T13:10:14Z",
+ "generation": 216,
+ "labels": {
+ "app.kubernetes.io/component": "dex-server",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-dex-server",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-dex-server",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1303859027",
+ "uid": "5a0e160e-371d-4412-a697-bd07453e71c1"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-dex-server"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-dex-server",
+ "dex.config.changed": "10242023-134623-UTC",
+ "image.upgraded": "11082023-014723-UTC"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "command": [
+ "/shared/argocd-dex",
+ "rundex"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/dex-rhel8@sha256:8cc59901689858e06f5c2942f8c975449c17454fa8364da6153d9b5a3538a985",
+ "imagePullPolicy": "Always",
+ "livenessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz/live",
+ "port": 5558,
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 60,
+ "periodSeconds": 30,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "dex",
+ "ports": [
+ {
+ "containerPort": 5556,
+ "name": "http",
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 5557,
+ "name": "grpc",
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 5558,
+ "name": "metrics",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "runAsNonRoot": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/shared",
+ "name": "static-files"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "initContainers": [
+ {
+ "command": [
+ "cp",
+ "-n",
+ "/usr/local/bin/argocd",
+ "/shared/argocd-dex"
+ ],
+ "image": "registry.redhat.io/openshift-gitops-1/argocd-rhel8@sha256:f77594bc053be144b33ff9603ee9675c7e82946ec0dbfb04d8f942c8d73155da",
+ "imagePullPolicy": "Always",
+ "name": "copyutil",
+ "resources": {},
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "runAsNonRoot": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/shared",
+ "name": "static-files"
+ }
+ ]
+ }
+ ],
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "argocd-argocd-dex-server",
+ "serviceAccountName": "argocd-argocd-dex-server",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "static-files"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "27"
+ },
+ "creationTimestamp": "2022-04-19T12:58:24Z",
+ "generation": 29,
+ "labels": {
+ "app.kubernetes.io/component": "redis",
+ "app.kubernetes.io/managed-by": "argocd",
+ "app.kubernetes.io/name": "argocd-redis",
+ "app.kubernetes.io/part-of": "argocd"
+ },
+ "name": "argocd-redis",
+ "namespace": "argocd",
+ "ownerReferences": [
+ {
+ "apiVersion": "argoproj.io/v1alpha1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ArgoCD",
+ "name": "argocd",
+ "uid": "366e4fb4-f3b1-4f1e-b319-a886aaae928a"
+ }
+ ],
+ "resourceVersion": "1357676855",
+ "uid": "269ad8b0-2de5-4302-94b1-66bfe9460483"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/name": "argocd-redis"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/name": "argocd-redis",
+ "image.upgraded": "11072023-102823-UTC"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "args": [
+ "redis-server",
+ "--protected-mode",
+ "no",
+ "--save",
+ "",
+ "--appendonly",
+ "no"
+ ],
+ "image": "registry.redhat.io/rhel8/redis-6@sha256:edbd40185ed8c20ee61ebdf9f2e1e1d7594598fceff963b4dee3201472d6deda",
+ "imagePullPolicy": "Always",
+ "name": "redis",
+ "ports": [
+ {
+ "containerPort": 6379,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File"
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": ""
+ },
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ]
+ }
+ }
+ },
+ "status": {
+ "availableReplicas": 1,
+ "conditions": [
+ {
+ "lastTransitionTime": "2022-04-19T12:58:24Z",
+ "lastUpdateTime": "2023-11-07T10:28:41Z",
+ "message": "ReplicaSet \"argocd-redis-6bfd7df9fd\" has successfully progressed.",
+ "reason": "NewReplicaSetAvailable",
+ "status": "True",
+ "type": "Progressing"
+ },
+ {
+ "lastTransitionTime": "2024-07-11T13:58:40Z",
+ "lastUpdateTime": "2024-07-11T13:58:40Z",
+ "message": "Deployment has minimum availability.",
+ "reason": "MinimumReplicasAvailable",
+ "status": "True",
+ "type": "Available"
+ }
+ ],
+ "observedGeneration": 29,
+ "readyReplicas": 1,
+ "replicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$deployment_apipath#$(echo -n "$deployment_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$deployment_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_topology_spread_constraints.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_topology_spread_constraints.pass.sh
new file mode 100644
index 000000000000..423d4db6cc30
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/deployments_topology_spread_constraints.pass.sh
@@ -0,0 +1,466 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/deployments"
+
+deployment_apipath="/apis/apps/v1/deployments?limit=500"
+
+cat < "$kube_apipath$deployment_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1",
+ "openshift.io/generated-by": "OpenShiftWebConsole"
+ },
+ "creationTimestamp": "2021-05-18T20:18:35Z",
+ "generation": 1,
+ "labels": {
+ "app": "nextcloud",
+ "app.kubernetes.io/component": "nextcloud",
+ "app.kubernetes.io/instance": "nextcloud",
+ "app.openshift.io/runtime-namespace": "myapp"
+ },
+ "name": "nextcloud",
+ "namespace": "myapp",
+ "resourceVersion": "1303859019",
+ "uid": "f3ddd586-f034-41ae-845f-472b7026e966"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "nextcloud"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "openshift.io/generated-by": "OpenShiftWebConsole"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "nextcloud",
+ "deploymentconfig": "nextcloud"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "image": "nextcloud@sha256:3edcc23febe484fff37f9121f96bc634512a56d318477e81316de24cfdec7380",
+ "imagePullPolicy": "Always",
+ "name": "nextcloud",
+ "ports": [
+ {
+ "containerPort": 80,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/var/www/html",
+ "name": "nextcloud-1"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30,
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "nextcloud-1"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2024-07-15T09:40:37Z",
+ "generation": 1,
+ "labels": {
+ "app": "webserver"
+ },
+ "name": "webserver",
+ "namespace": "myapp",
+ "resourceVersion": "1363603995",
+ "uid": "5f2f4752-3f8a-4dd6-9c74-e6766336d579"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 6,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "webserver"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app": "webserver"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "command": [
+ "nginx",
+ "-g",
+ "daemon off;"
+ ],
+ "image": "registry.access.redhat.com/ubi9/nginx-120:1-148.1719561315",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "webserver",
+ "ports": [
+ {
+ "containerPort": 80,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File"
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30,
+ "topologySpreadConstraints": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app": "webserver"
+ }
+ },
+ "maxSkew": 1,
+ "topologyKey": "topology.kubernetes.io/zone",
+ "whenUnsatisfiable": "DoNotSchedule"
+ }
+ ]
+ }
+ }
+ }
+ },
+ {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "347"
+ },
+ "creationTimestamp": "2022-04-04T12:44:37Z",
+ "generation": 347,
+ "labels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.10.0"
+ },
+ "name": "prometheus-adapter",
+ "namespace": "openshift-monitoring",
+ "resourceVersion": "1348266955",
+ "uid": "d2c2d49c-dbe6-40a4-85e8-ce9329cb55c0"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": 1
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/managed-by": "cluster-monitoring-operator",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.10.0"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "podAntiAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "metrics-adapter",
+ "app.kubernetes.io/name": "prometheus-adapter",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "namespaces": [
+ "openshift-monitoring"
+ ],
+ "topologyKey": "kubernetes.io/hostname"
+ }
+ ]
+ }
+ },
+ "automountServiceAccountToken": true,
+ "containers": [
+ {
+ "args": [
+ "--prometheus-auth-config=/etc/prometheus-config/prometheus-config.yaml",
+ "--config=/etc/adapter/config.yaml",
+ "--logtostderr=true",
+ "--metrics-relist-interval=1m",
+ "--prometheus-url=https://prometheus-k8s.openshift-monitoring.svc:9091",
+ "--secure-port=6443",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--client-ca-file=/etc/tls/private/client-ca-file",
+ "--requestheader-client-ca-file=/etc/tls/private/requestheader-client-ca-file",
+ "--requestheader-allowed-names=kube-apiserver-proxy,system:kube-apiserver-proxy,system:openshift-aggregator",
+ "--requestheader-extra-headers-prefix=X-Remote-Extra-",
+ "--requestheader-group-headers=X-Remote-Group",
+ "--requestheader-username-headers=X-Remote-User",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--audit-policy-file=/etc/audit/metadata-profile.yaml",
+ "--audit-log-path=/var/log/adapter/audit.log",
+ "--audit-log-maxsize=100",
+ "--audit-log-maxbackup=5",
+ "--audit-log-compress=true",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3cade03b16237889606ab1e3b1b7fc12d160cacc36ae3df2de05d281bccc7f20",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 5,
+ "httpGet": {
+ "path": "/livez",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 5,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "prometheus-adapter",
+ "ports": [
+ {
+ "containerPort": 6443,
+ "name": "https",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 5,
+ "httpGet": {
+ "path": "/readyz",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 5,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "40Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "readOnlyRootFilesystem": true
+ },
+ "startupProbe": {
+ "failureThreshold": 18,
+ "httpGet": {
+ "path": "/livez",
+ "port": "https",
+ "scheme": "HTTPS"
+ },
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/tmp",
+ "name": "tmpfs"
+ },
+ {
+ "mountPath": "/etc/adapter",
+ "name": "config"
+ },
+ {
+ "mountPath": "/etc/prometheus-config",
+ "name": "prometheus-adapter-prometheus-config"
+ },
+ {
+ "mountPath": "/etc/ssl/certs",
+ "name": "serving-certs-ca-bundle"
+ },
+ {
+ "mountPath": "/etc/audit",
+ "name": "prometheus-adapter-audit-profiles",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/var/log/adapter",
+ "name": "audit-log"
+ },
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "tls",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "prometheus-adapter",
+ "serviceAccountName": "prometheus-adapter",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "tmpfs"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "prometheus-adapter-prometheus-config"
+ },
+ "name": "prometheus-adapter-prometheus-config"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "items": [
+ {
+ "key": "service-ca.crt",
+ "path": "service-ca.crt"
+ }
+ ],
+ "name": "serving-certs-ca-bundle"
+ },
+ "name": "serving-certs-ca-bundle"
+ },
+ {
+ "emptyDir": {},
+ "name": "audit-log"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "prometheus-adapter-audit-profiles"
+ },
+ "name": "prometheus-adapter-audit-profiles"
+ },
+ {
+ "name": "tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "prometheus-adapter-6fk0fnclda7g1"
+ }
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "adapter-config"
+ },
+ "name": "config"
+ }
+ ]
+ }
+ }
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$deployment_apipath#$(echo -n "$deployment_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$deployment_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/no_deployments.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/no_deployments.pass.sh
new file mode 100644
index 000000000000..18de23a94e96
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/tests/no_deployments.pass.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/deployments"
+
+deployment_apipath="/apis/apps/v1/deployments?limit=500"
+
+# This file assumes that we dont have any deployments.
+cat < "$kube_apipath$deployment_apipath"
+{
+ "apiVersion": "v1",
+ "items": [],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": "",
+ "selfLink": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_deployments_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$deployment_apipath#$(echo -n "$deployment_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$deployment_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/var_deployments_without_high_availability.var b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/var_deployments_without_high_availability.var
new file mode 100644
index 000000000000..8e6101508272
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_deployment/var_deployments_without_high_availability.var
@@ -0,0 +1,14 @@
+documentation_complete: true
+
+title: 'Permitted Deployments without high availability'
+
+description: 'A regular expression that lists all deployments that are ignored in rules for high-availability and evenly spread across nodes and zones'
+
+type: string
+
+operator: equals
+
+interactive: false
+
+options:
+ default: ""
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/rule.yml b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/rule.yml
new file mode 100644
index 000000000000..13bc4b24d54d
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/rule.yml
@@ -0,0 +1,83 @@
+documentation_complete: true
+
+title: 'Ensure statefulsets have either anti-affinity rules or topology spread constraints'
+
+description: |-
+ Distributing Kubernetes pods across nodes and availability zones using pod topology spread
+ constraints and anti-affinity rules enhances high availability, fault tolerance, performance,
+ and security by balancing workloads, reducing single points of failure, and ensuring compliance
+ and data protection.
+
+ There might be statefulsets, that do not require high availability or spreading across nodes.
+ To limit the number of false positives, this rule only checks statefulsets with a replica count
+ of more than one. For statefulsets with one replica neither anti-affinity rules nor topology
+ spread constraints provide any value.
+
+ To exclude other statefulsets from this rule, you can create a regular expression for statefulset
+ names: var_statefulsets_without_high_availability. This will ignore statefulsets matching
+ those names in all namespaces.
+
+ An example allowing all statefulsets named uncritical-service is as follows:
+
+
+ apiVersion: compliance.openshift.io/v1alpha1
+ kind: TailoredProfile
+ metadata:
+ name: bsi-additional-statefulsets
+ spec:
+ description: Allows additional statefulsets to not be highly available and evenly spread
+ setValues:
+ - name: upstream-ocp4-var_statefulsets_without_high_availability
+ rationale: Ignore our uncritical service
+ value: ^uncritical-service$
+ extends: upstream-ocp4-bsi
+ title: Modified BSI allowing non-highly-available statefulsets
+
+
+ Finally, reference this TailoredProfile in a ScanSettingBinding
+ For more information on Tailoring the Compliance Operator, please consult the
+ OpenShift documentation:
+ {{{ weblink(link="https://docs.openshift.com/container-platform/4.16/security/compliance_operator/co-scans/compliance-operator-tailor.html") }}}
+
+
+rationale: |-
+ Distributing Kubernetes pods across nodes and availability zones using pod topology spread
+ constraints and anti-affinity rules is essential for enhancing high availability, fault
+ tolerance, and security.
+ This approach ensures that a single node or AZ failure does not lead to total application
+ downtime, as workloads are balanced and resources are efficiently utilized.
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+{{% set jqfilter = '[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]' %}}
+
+ocil_clause: 'StatefulSets with neither anti-affinity rules or topology spread constraints exist'
+
+ocil: |-
+ Run the following command to determine anti-affinity rules and topology spread constraints of
+ all statefulsets:
+ $ oc get statefulsets -A -o json | jq '{{{ jqfilter }}}'"
+ Make sure that either suitable anti-affinity rule or topology spread constraints are configured
+ for all workloads that require high availability.
+
+warnings:
+- general: |-
+ {{{ openshift_filtered_cluster_setting({'/apis/apps/v1/statefulsets?limit=500': jqfilter}) | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: "true"
+ filepath: |-
+ {{{ openshift_filtered_path('/apis/apps/v1/statefulsets?limit=500', jqfilter) }}}
+ yamlpath: "[:]"
+ check_existence: "none_exist"
+ entity_check: "all"
+ values:
+ - value: "(.*?)"
+ operation: "pattern match"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/no_statefulsets.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/no_statefulsets.pass.sh
new file mode 100644
index 000000000000..09523d5d5e80
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/no_statefulsets.pass.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/statefulsets"
+
+statefulset_apipath="/apis/apps/v1/statefulsets?limit=500"
+
+# This file assumes that we dont have any statefulsets.
+cat < "$kube_apipath$statefulset_apipath"
+{
+ "apiVersion": "v1",
+ "items": [],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": "",
+ "selfLink": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$statefulset_apipath#$(echo -n "$statefulset_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$statefulset_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_anti_affinity.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_anti_affinity.pass.sh
new file mode 100644
index 000000000000..1ee138aa2219
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_anti_affinity.pass.sh
@@ -0,0 +1,613 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/statefulsets"
+
+statefulset_apipath="/apis/apps/v1/statefulsets?limit=500"
+
+cat < "$kube_apipath$statefulset_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "StatefulSet",
+ "metadata": {
+ "annotations": {
+ "prometheus-operator-input-hash": "13692666772834160214"
+ },
+ "creationTimestamp": "2023-01-30T10:30:41Z",
+ "generation": 44,
+ "labels": {
+ "alertmanager": "main",
+ "app.kubernetes.io/component": "alert-router",
+ "app.kubernetes.io/instance": "main",
+ "app.kubernetes.io/name": "alertmanager",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.24.0"
+ },
+ "name": "alertmanager-main",
+ "namespace": "openshift-monitoring",
+ "ownerReferences": [
+ {
+ "apiVersion": "monitoring.coreos.com/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "Alertmanager",
+ "name": "main",
+ "uid": "ffe3937b-78f2-44b2-961f-71147eee311f"
+ }
+ ],
+ "resourceVersion": "1357676826",
+ "uid": "8f4fda56-13c8-41b6-aa84-c650030da9e2"
+ },
+ "spec": {
+ "podManagementPolicy": "Parallel",
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "alertmanager": "main",
+ "app.kubernetes.io/instance": "main",
+ "app.kubernetes.io/managed-by": "prometheus-operator",
+ "app.kubernetes.io/name": "alertmanager"
+ }
+ },
+ "serviceName": "alertmanager-operated",
+ "template": {
+ "metadata": {
+ "annotations": {
+ "kubectl.kubernetes.io/default-container": "alertmanager",
+ "target.workload.openshift.io/management": "{\"effect\": \"PreferredDuringScheduling\"}"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "alertmanager": "main",
+ "app.kubernetes.io/component": "alert-router",
+ "app.kubernetes.io/instance": "main",
+ "app.kubernetes.io/managed-by": "prometheus-operator",
+ "app.kubernetes.io/name": "alertmanager",
+ "app.kubernetes.io/part-of": "openshift-monitoring",
+ "app.kubernetes.io/version": "0.24.0"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "podAntiAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app.kubernetes.io/component": "alert-router",
+ "app.kubernetes.io/instance": "main",
+ "app.kubernetes.io/name": "alertmanager",
+ "app.kubernetes.io/part-of": "openshift-monitoring"
+ }
+ },
+ "namespaces": [
+ "openshift-monitoring"
+ ],
+ "topologyKey": "kubernetes.io/hostname"
+ }
+ ]
+ }
+ },
+ "containers": [
+ {
+ "args": [
+ "--config.file=/etc/alertmanager/config_out/alertmanager.env.yaml",
+ "--storage.path=/alertmanager",
+ "--data.retention=120h",
+ "--cluster.listen-address=[$(POD_IP)]:9094",
+ "--web.listen-address=127.0.0.1:9093",
+ "--web.external-url=https://console-openshift-console.apps.ccloud-ocp-dev.ccloud.ninja/monitoring",
+ "--web.route-prefix=/",
+ "--cluster.peer=alertmanager-main-0.alertmanager-operated:9094",
+ "--cluster.peer=alertmanager-main-1.alertmanager-operated:9094",
+ "--cluster.reconnect-timeout=5m",
+ "--web.config.file=/etc/alertmanager/web_config/web-config.yaml"
+ ],
+ "env": [
+ {
+ "name": "POD_IP",
+ "valueFrom": {
+ "fieldRef": {
+ "apiVersion": "v1",
+ "fieldPath": "status.podIP"
+ }
+ }
+ }
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:738076ced738ea22a37704ba3e0dab4925ea85c0c16e41d33556818977358f50",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "alertmanager",
+ "ports": [
+ {
+ "containerPort": 9094,
+ "name": "mesh-tcp",
+ "protocol": "TCP"
+ },
+ {
+ "containerPort": 9094,
+ "name": "mesh-udp",
+ "protocol": "UDP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "4m",
+ "memory": "40Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "readOnlyRootFilesystem": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/alertmanager/config",
+ "name": "config-volume"
+ },
+ {
+ "mountPath": "/etc/alertmanager/config_out",
+ "name": "config-out",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/certs",
+ "name": "tls-assets",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/alertmanager",
+ "name": "alertmanager-main-db",
+ "subPath": "alertmanager-db"
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-main-tls",
+ "name": "secret-alertmanager-main-tls",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-main-proxy",
+ "name": "secret-alertmanager-main-proxy",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-kube-rbac-proxy",
+ "name": "secret-alertmanager-kube-rbac-proxy",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-kube-rbac-proxy-metric",
+ "name": "secret-alertmanager-kube-rbac-proxy-metric",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/pki/ca-trust/extracted/pem/",
+ "name": "alertmanager-trusted-ca-bundle",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/web_config/web-config.yaml",
+ "name": "web-config",
+ "readOnly": true,
+ "subPath": "web-config.yaml"
+ }
+ ]
+ },
+ {
+ "args": [
+ "--listen-address=localhost:8080",
+ "--reload-url=http://localhost:9093/-/reload",
+ "--config-file=/etc/alertmanager/config/alertmanager.yaml.gz",
+ "--config-envsubst-file=/etc/alertmanager/config_out/alertmanager.env.yaml",
+ "--watched-dir=/etc/alertmanager/secrets/alertmanager-main-tls",
+ "--watched-dir=/etc/alertmanager/secrets/alertmanager-main-proxy",
+ "--watched-dir=/etc/alertmanager/secrets/alertmanager-kube-rbac-proxy",
+ "--watched-dir=/etc/alertmanager/secrets/alertmanager-kube-rbac-proxy-metric"
+ ],
+ "command": [
+ "/bin/prometheus-config-reloader"
+ ],
+ "env": [
+ {
+ "name": "POD_NAME",
+ "valueFrom": {
+ "fieldRef": {
+ "apiVersion": "v1",
+ "fieldPath": "metadata.name"
+ }
+ }
+ },
+ {
+ "name": "SHARD",
+ "value": "-1"
+ }
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3cfeba7a98901ea510f476268f0fc520f73329d6bac8939070f20cab36c235dc",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "config-reloader",
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "10Mi"
+ }
+ },
+ "securityContext": {
+ "allowPrivilegeEscalation": false,
+ "capabilities": {
+ "drop": [
+ "ALL"
+ ]
+ },
+ "readOnlyRootFilesystem": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/alertmanager/config",
+ "name": "config-volume",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/config_out",
+ "name": "config-out"
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-main-tls",
+ "name": "secret-alertmanager-main-tls",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-main-proxy",
+ "name": "secret-alertmanager-main-proxy",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-kube-rbac-proxy",
+ "name": "secret-alertmanager-kube-rbac-proxy",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/alertmanager/secrets/alertmanager-kube-rbac-proxy-metric",
+ "name": "secret-alertmanager-kube-rbac-proxy-metric",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "-provider=openshift",
+ "-https-address=:9095",
+ "-http-address=",
+ "-email-domain=*",
+ "-upstream=http://localhost:9093",
+ "-openshift-sar=[{\"resource\": \"namespaces\", \"verb\": \"get\"}, {\"resource\": \"alertmanagers\", \"resourceAPIGroup\": \"monitoring.coreos.com\", \"namespace\": \"openshift-monitoring\", \"verb\": \"patch\", \"resourceName\": \"non-existant\"}]",
+ "-openshift-delegate-urls={\"/\": {\"resource\": \"namespaces\", \"verb\": \"get\"}, \"/\": {\"resource\":\"alertmanagers\", \"group\": \"monitoring.coreos.com\", \"namespace\": \"openshift-monitoring\", \"verb\": \"patch\", \"name\": \"non-existant\"}}",
+ "-tls-cert=/etc/tls/private/tls.crt",
+ "-tls-key=/etc/tls/private/tls.key",
+ "-client-secret-file=/var/run/secrets/kubernetes.io/serviceaccount/token",
+ "-cookie-secret-file=/etc/proxy/secrets/session_secret",
+ "-openshift-service-account=alertmanager-main",
+ "-openshift-ca=/etc/pki/tls/cert.pem",
+ "-openshift-ca=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt"
+ ],
+ "env": [
+ {
+ "name": "HTTP_PROXY"
+ },
+ {
+ "name": "HTTPS_PROXY"
+ },
+ {
+ "name": "NO_PROXY"
+ }
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8cee1c6d7316b2108cc2d0272ebf2932ee999c9eb05d5c6e296df362da58e9ce",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "alertmanager-proxy",
+ "ports": [
+ {
+ "containerPort": 9095,
+ "name": "web",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "20Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-alertmanager-main-tls"
+ },
+ {
+ "mountPath": "/etc/proxy/secrets",
+ "name": "secret-alertmanager-main-proxy"
+ },
+ {
+ "mountPath": "/etc/pki/ca-trust/extracted/pem/",
+ "name": "alertmanager-trusted-ca-bundle",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "--secure-listen-address=0.0.0.0:9092",
+ "--upstream=http://127.0.0.1:9096",
+ "--config-file=/etc/kube-rbac-proxy/config.yaml",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--logtostderr=true",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy",
+ "ports": [
+ {
+ "containerPort": 9092,
+ "name": "tenancy",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/kube-rbac-proxy",
+ "name": "secret-alertmanager-kube-rbac-proxy"
+ },
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-alertmanager-main-tls"
+ }
+ ]
+ },
+ {
+ "args": [
+ "--secure-listen-address=0.0.0.0:9097",
+ "--upstream=http://127.0.0.1:9093",
+ "--config-file=/etc/kube-rbac-proxy/config.yaml",
+ "--tls-cert-file=/etc/tls/private/tls.crt",
+ "--tls-private-key-file=/etc/tls/private/tls.key",
+ "--tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "--client-ca-file=/etc/tls/client/client-ca.crt",
+ "--logtostderr=true",
+ "--allow-paths=/metrics",
+ "--tls-min-version=VersionTLS12"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d63bf13113fa7224bdeb21f4b07d53dce96f9fcc955048b870a97e7c1d054e11",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "kube-rbac-proxy-metric",
+ "ports": [
+ {
+ "containerPort": 9097,
+ "name": "metrics",
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "15Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError",
+ "volumeMounts": [
+ {
+ "mountPath": "/etc/kube-rbac-proxy",
+ "name": "secret-alertmanager-kube-rbac-proxy-metric",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/tls/private",
+ "name": "secret-alertmanager-main-tls",
+ "readOnly": true
+ },
+ {
+ "mountPath": "/etc/tls/client",
+ "name": "metrics-client-ca",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "args": [
+ "--insecure-listen-address=127.0.0.1:9096",
+ "--upstream=http://127.0.0.1:9093",
+ "--label=namespace",
+ "--error-on-replace"
+ ],
+ "image": "quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:4626710ac6a341bf707b2d5be57607ebc39ddd9d300ca9496e40fcfc75f20f3e",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "prom-label-proxy",
+ "resources": {
+ "requests": {
+ "cpu": "1m",
+ "memory": "20Mi"
+ }
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "FallbackToLogsOnError"
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "nodeSelector": {
+ "node-role.kubernetes.io/infra": ""
+ },
+ "priorityClassName": "system-cluster-critical",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {
+ "fsGroup": 65534,
+ "runAsNonRoot": true,
+ "runAsUser": 65534
+ },
+ "serviceAccount": "alertmanager-main",
+ "serviceAccountName": "alertmanager-main",
+ "terminationGracePeriodSeconds": 120,
+ "tolerations": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra",
+ "operator": "Exists"
+ }
+ ],
+ "volumes": [
+ {
+ "name": "config-volume",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "alertmanager-main-generated"
+ }
+ },
+ {
+ "name": "tls-assets",
+ "projected": {
+ "defaultMode": 420,
+ "sources": [
+ {
+ "secret": {
+ "name": "alertmanager-main-tls-assets-0"
+ }
+ }
+ ]
+ }
+ },
+ {
+ "emptyDir": {},
+ "name": "config-out"
+ },
+ {
+ "name": "secret-alertmanager-main-tls",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "alertmanager-main-tls"
+ }
+ },
+ {
+ "name": "secret-alertmanager-main-proxy",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "alertmanager-main-proxy"
+ }
+ },
+ {
+ "name": "secret-alertmanager-kube-rbac-proxy",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "alertmanager-kube-rbac-proxy"
+ }
+ },
+ {
+ "name": "secret-alertmanager-kube-rbac-proxy-metric",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "alertmanager-kube-rbac-proxy-metric"
+ }
+ },
+ {
+ "name": "web-config",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "alertmanager-main-web-config"
+ }
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "name": "metrics-client-ca"
+ },
+ "name": "metrics-client-ca"
+ },
+ {
+ "configMap": {
+ "defaultMode": 420,
+ "items": [
+ {
+ "key": "ca-bundle.crt",
+ "path": "tls-ca-bundle.pem"
+ }
+ ],
+ "name": "alertmanager-trusted-ca-bundle-b4a61vnd2as9r",
+ "optional": true
+ },
+ "name": "alertmanager-trusted-ca-bundle"
+ }
+ ]
+ }
+ },
+ "updateStrategy": {
+ "type": "RollingUpdate"
+ },
+ "volumeClaimTemplates": [
+ {
+ "apiVersion": "v1",
+ "kind": "PersistentVolumeClaim",
+ "metadata": {
+ "creationTimestamp": null,
+ "name": "alertmanager-main-db"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "10Gi"
+ }
+ },
+ "storageClassName": "ontapnas-ai-at-the-edge-central",
+ "volumeMode": "Filesystem"
+ },
+ "status": {
+ "phase": "Pending"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$statefulset_apipath#$(echo -n "$statefulset_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$statefulset_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_excluded.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_excluded.pass.sh
new file mode 100644
index 000000000000..5c82ff035659
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_excluded.pass.sh
@@ -0,0 +1,197 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/statefulsets"
+
+statefulset_apipath="/apis/apps/v1/statefulsets?limit=500"
+
+cat < "$kube_apipath$statefulset_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "StatefulSet",
+ "metadata": {
+ "creationTimestamp": "2022-05-18T08:17:44Z",
+ "generation": 1,
+ "labels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/managed-by": "Helm",
+ "app.kubernetes.io/name": "trivy",
+ "app.kubernetes.io/version": "0.27.0",
+ "helm.sh/chart": "trivy-0.4.13"
+ },
+ "name": "trivy",
+ "namespace": "trivy",
+ "resourceVersion": "1345155135",
+ "uid": "3ff7b9c1-df8f-4531-8534-bf11bdd2124a"
+ },
+ "spec": {
+ "podManagementPolicy": "Parallel",
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/name": "trivy"
+ }
+ },
+ "serviceName": "trivy",
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/name": "trivy"
+ }
+ },
+ "spec": {
+ "automountServiceAccountToken": false,
+ "containers": [
+ {
+ "args": [
+ "server"
+ ],
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "trivy"
+ }
+ },
+ {
+ "secretRef": {
+ "name": "trivy"
+ }
+ }
+ ],
+ "image": "docker.io/aquasec/trivy:0.27.0",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 10,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "trivy-http",
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "main",
+ "ports": [
+ {
+ "containerPort": 4954,
+ "name": "trivy-http",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "trivy-http",
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "limits": {
+ "cpu": "1",
+ "memory": "1Gi"
+ },
+ "requests": {
+ "cpu": "200m",
+ "memory": "512Mi"
+ }
+ },
+ "securityContext": {
+ "privileged": false,
+ "readOnlyRootFilesystem": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/tmp",
+ "name": "tmp-data"
+ },
+ {
+ "mountPath": "/home/scanner/.cache",
+ "name": "data"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "trivy",
+ "serviceAccountName": "trivy",
+ "terminationGracePeriodSeconds": 30,
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "tmp-data"
+ }
+ ]
+ }
+ },
+ "updateStrategy": {
+ "rollingUpdate": {
+ "partition": 0
+ },
+ "type": "RollingUpdate"
+ },
+ "volumeClaimTemplates": [
+ {
+ "apiVersion": "v1",
+ "kind": "PersistentVolumeClaim",
+ "metadata": {
+ "creationTimestamp": null,
+ "name": "data"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "5Gi"
+ }
+ },
+ "volumeMode": "Filesystem"
+ },
+ "status": {
+ "phase": "Pending"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+jq_filter_with_var='[ .items[] | select(.metadata.name | test("^trivy$"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$statefulset_apipath#$(echo -n "$statefulset_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter_with_var" "$kube_apipath$statefulset_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_no_rules.fail.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_no_rules.fail.sh
new file mode 100644
index 000000000000..e38a6dfc7e0a
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_no_rules.fail.sh
@@ -0,0 +1,197 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/statefulsets"
+
+statefulset_apipath="/apis/apps/v1/statefulsets?limit=500"
+
+cat < "$kube_apipath$statefulset_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "StatefulSet",
+ "metadata": {
+ "creationTimestamp": "2022-05-18T08:17:44Z",
+ "generation": 1,
+ "labels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/managed-by": "Helm",
+ "app.kubernetes.io/name": "trivy",
+ "app.kubernetes.io/version": "0.27.0",
+ "helm.sh/chart": "trivy-0.4.13"
+ },
+ "name": "trivy",
+ "namespace": "trivy",
+ "resourceVersion": "1345155135",
+ "uid": "3ff7b9c1-df8f-4531-8534-bf11bdd2124a"
+ },
+ "spec": {
+ "podManagementPolicy": "Parallel",
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/name": "trivy"
+ }
+ },
+ "serviceName": "trivy",
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/name": "trivy"
+ }
+ },
+ "spec": {
+ "automountServiceAccountToken": false,
+ "containers": [
+ {
+ "args": [
+ "server"
+ ],
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "trivy"
+ }
+ },
+ {
+ "secretRef": {
+ "name": "trivy"
+ }
+ }
+ ],
+ "image": "docker.io/aquasec/trivy:0.27.0",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 10,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "trivy-http",
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "main",
+ "ports": [
+ {
+ "containerPort": 4954,
+ "name": "trivy-http",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "trivy-http",
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "limits": {
+ "cpu": "1",
+ "memory": "1Gi"
+ },
+ "requests": {
+ "cpu": "200m",
+ "memory": "512Mi"
+ }
+ },
+ "securityContext": {
+ "privileged": false,
+ "readOnlyRootFilesystem": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/tmp",
+ "name": "tmp-data"
+ },
+ {
+ "mountPath": "/home/scanner/.cache",
+ "name": "data"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "trivy",
+ "serviceAccountName": "trivy",
+ "terminationGracePeriodSeconds": 30,
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "tmp-data"
+ }
+ ]
+ }
+ },
+ "updateStrategy": {
+ "rollingUpdate": {
+ "partition": 0
+ },
+ "type": "RollingUpdate"
+ },
+ "volumeClaimTemplates": [
+ {
+ "apiVersion": "v1",
+ "kind": "PersistentVolumeClaim",
+ "metadata": {
+ "creationTimestamp": null,
+ "name": "data"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "5Gi"
+ }
+ },
+ "volumeMode": "Filesystem"
+ },
+ "status": {
+ "phase": "Pending"
+ }
+ }
+ ]
+ }
+ }
+
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$statefulset_apipath#$(echo -n "$statefulset_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$statefulset_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_single_replica.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_single_replica.pass.sh
new file mode 100644
index 000000000000..c22e0975f273
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_single_replica.pass.sh
@@ -0,0 +1,196 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/statefulsets"
+
+statefulset_apipath="/apis/apps/v1/statefulsets?limit=500"
+
+cat < "$kube_apipath$statefulset_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "StatefulSet",
+ "metadata": {
+ "creationTimestamp": "2022-05-18T08:17:44Z",
+ "generation": 1,
+ "labels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/managed-by": "Helm",
+ "app.kubernetes.io/name": "trivy",
+ "app.kubernetes.io/version": "0.27.0",
+ "helm.sh/chart": "trivy-0.4.13"
+ },
+ "name": "trivy",
+ "namespace": "trivy",
+ "resourceVersion": "1345155135",
+ "uid": "3ff7b9c1-df8f-4531-8534-bf11bdd2124a"
+ },
+ "spec": {
+ "podManagementPolicy": "Parallel",
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/name": "trivy"
+ }
+ },
+ "serviceName": "trivy",
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app.kubernetes.io/instance": "trivy",
+ "app.kubernetes.io/name": "trivy"
+ }
+ },
+ "spec": {
+ "automountServiceAccountToken": false,
+ "containers": [
+ {
+ "args": [
+ "server"
+ ],
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "trivy"
+ }
+ },
+ {
+ "secretRef": {
+ "name": "trivy"
+ }
+ }
+ ],
+ "image": "docker.io/aquasec/trivy:0.27.0",
+ "imagePullPolicy": "IfNotPresent",
+ "livenessProbe": {
+ "failureThreshold": 10,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "trivy-http",
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "name": "main",
+ "ports": [
+ {
+ "containerPort": 4954,
+ "name": "trivy-http",
+ "protocol": "TCP"
+ }
+ ],
+ "readinessProbe": {
+ "failureThreshold": 3,
+ "httpGet": {
+ "path": "/healthz",
+ "port": "trivy-http",
+ "scheme": "HTTP"
+ },
+ "initialDelaySeconds": 5,
+ "periodSeconds": 10,
+ "successThreshold": 1,
+ "timeoutSeconds": 1
+ },
+ "resources": {
+ "limits": {
+ "cpu": "1",
+ "memory": "1Gi"
+ },
+ "requests": {
+ "cpu": "200m",
+ "memory": "512Mi"
+ }
+ },
+ "securityContext": {
+ "privileged": false,
+ "readOnlyRootFilesystem": true
+ },
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "volumeMounts": [
+ {
+ "mountPath": "/tmp",
+ "name": "tmp-data"
+ },
+ {
+ "mountPath": "/home/scanner/.cache",
+ "name": "data"
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "trivy",
+ "serviceAccountName": "trivy",
+ "terminationGracePeriodSeconds": 30,
+ "volumes": [
+ {
+ "emptyDir": {},
+ "name": "tmp-data"
+ }
+ ]
+ }
+ },
+ "updateStrategy": {
+ "rollingUpdate": {
+ "partition": 0
+ },
+ "type": "RollingUpdate"
+ },
+ "volumeClaimTemplates": [
+ {
+ "apiVersion": "v1",
+ "kind": "PersistentVolumeClaim",
+ "metadata": {
+ "creationTimestamp": null,
+ "name": "data"
+ },
+ "spec": {
+ "accessModes": [
+ "ReadWriteOnce"
+ ],
+ "resources": {
+ "requests": {
+ "storage": "5Gi"
+ }
+ },
+ "volumeMode": "Filesystem"
+ },
+ "status": {
+ "phase": "Pending"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$statefulset_apipath#$(echo -n "$statefulset_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$statefulset_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_topology_spread_constraints.pass.sh b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_topology_spread_constraints.pass.sh
new file mode 100644
index 000000000000..fc0205c16125
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/tests/statefulsets_topology_spread_constraints.pass.sh
@@ -0,0 +1,112 @@
+#!/bin/bash
+
+# remediation = none
+yum install -y jq
+
+kube_apipath="/kubernetes-api-resources"
+
+mkdir -p "$kube_apipath/apis/apps/v1/statefulsets"
+
+statefulset_apipath="/apis/apps/v1/statefulsets?limit=500"
+
+# This file assumes that we dont have any statefulsets.
+cat < "$kube_apipath$statefulset_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "apps/v1",
+ "kind": "StatefulSet",
+ "metadata": {
+ "creationTimestamp": "2024-07-15T10:14:13Z",
+ "generation": 1,
+ "labels": {
+ "app": "webserver"
+ },
+ "name": "webserver",
+ "namespace": "aiate",
+ "resourceVersion": "1363639972",
+ "uid": "3b008246-7297-4492-a093-d30102d94c9c"
+ },
+ "spec": {
+ "podManagementPolicy": "OrderedReady",
+ "replicas": 2,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "webserver"
+ }
+ },
+ "serviceName": "",
+ "template": {
+ "metadata": {
+ "creationTimestamp": null,
+ "labels": {
+ "app": "webserver"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "command": [
+ "nginx",
+ "-g",
+ "daemon off;"
+ ],
+ "image": "registry.access.redhat.com/ubi9/nginx-120:1-148.1719561315",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "webserver",
+ "ports": [
+ {
+ "containerPort": 80,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File"
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30,
+ "topologySpreadConstraints": [
+ {
+ "labelSelector": {
+ "matchLabels": {
+ "app": "webserver"
+ }
+ },
+ "maxSkew": 1,
+ "topologyKey": "topology.kubernetes.io/zone",
+ "whenUnsatisfiable": "DoNotSchedule"
+ }
+ ]
+ }
+ },
+ "updateStrategy": {
+ "rollingUpdate": {
+ "partition": 0
+ },
+ "type": "RollingUpdate"
+ }
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+
+jq_filter='[ .items[] | select(.metadata.name | test("{{.var_statefulsets_without_high_availability}}"; "") | not) | select (.spec.replicas == 0 or .spec.replicas == 1 | not) | select(.spec.template.spec.affinity.podAntiAffinity == null and .spec.template.spec.topologySpreadConstraints == null) | .metadata.namespace + "/" + .metadata.name ]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$statefulset_apipath#$(echo -n "$statefulset_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$statefulset_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/var_statefulsets_without_high_availability.var b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/var_statefulsets_without_high_availability.var
new file mode 100644
index 000000000000..88a90e530635
--- /dev/null
+++ b/applications/openshift/high-availability/anti_affinity_or_topology_spread_constraints_in_statefulset/var_statefulsets_without_high_availability.var
@@ -0,0 +1,14 @@
+documentation_complete: true
+
+title: 'Permitted StatefulSets without high availability'
+
+description: 'A regular expression that lists all statefulsets that are ignored in rules for high-availability and evenly spread across nodes and zones'
+
+type: string
+
+operator: equals
+
+interactive: false
+
+options:
+ default: ""
diff --git a/applications/openshift/high-availability/control_plane_nodes_in_three_zones/rule.yml b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/rule.yml
new file mode 100644
index 000000000000..e0f3c246d442
--- /dev/null
+++ b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/rule.yml
@@ -0,0 +1,54 @@
+documentation_complete: true
+
+title: 'Ensure control plane / master nodes are distribute across three failure zones'
+
+description: |-
+ Distributing Kubernetes control plane nodes across failure zones enhances security by mitigating
+ the risk of a single point of failure and reducing the impact of API inavailability or attacks
+ targeting a specific zone. Maintaining three instances of etcd across different failure zones
+ ensures fault tolerance and enables the system to reach quorum, thereby safeguarding critical data
+ integrity and ensuring continued operation even in the event of zone failures.
+
+rationale: |-
+ Distributing Kubernetes control plane nodes across failure zones is crucial for enhancing overall
+ system resilience and security. By spreading control plane components across different zones,
+ the system becomes more fault-tolerant, reducing the risk of widespread outages due to failures or
+ attacks in a single zone. Having multiple instances of etcd spread across these zones ensures data
+ integrity and availability, as it requires a quorum of nodes to reach consensus.
+ With three zones, Kubernetes can achieve a quorum with a simple majority (i.e., two out of three)
+ for critical components like etcd, ensuring system stability even if one zone fails.
+ Failure zones are marked on nodes using a well-known label called "topology.kubernetes.io/zone".
+ This label is automatically assigned to each node by cloud providers but might need to be managed
+ manually in other environments
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+ocil_clause: 'Kubernetes control plane not distributed across three failure zones'
+
+ocil: |-
+ Run the following command to determine the failure zones of the control plane nodes, indicated by
+ the label .
+ $ oc get nodes --selector "node-role.kubernetes.io/master" -o custom-columns='NAME:.metadata.name,ZONE:.metadata.labels.topology\.kubernetes\.io/zone'
+ Make sure that three distinct zones are listed.
+
+{{% set jqfilter = '.items | map(select(.metadata.labels["node-role.kubernetes.io/master"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length' %}}
+
+warnings:
+- general: |-
+ {{{ openshift_filtered_cluster_setting({'/api/v1/nodes': jqfilter}) | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: "true"
+ filepath: |-
+ {{{ openshift_filtered_path('/api/v1/nodes', jqfilter) }}}
+ yamlpath: "$"
+ values:
+ - value: 3
+ type: int
diff --git a/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/no_zones.fail.sh b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/no_zones.fail.sh
new file mode 100644
index 000000000000..8df992530675
--- /dev/null
+++ b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/no_zones.fail.sh
@@ -0,0 +1,152 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/master"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/ocp4/e2e.yml b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/ocp4/e2e.yml
new file mode 100644
index 000000000000..f426dc3d7ea4
--- /dev/null
+++ b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/ocp4/e2e.yml
@@ -0,0 +1,2 @@
+---
+default_result: FAIL
diff --git a/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/three_zones.pass.sh b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/three_zones.pass.sh
new file mode 100644
index 000000000000..bdc12ecdd397
--- /dev/null
+++ b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/three_zones.pass.sh
@@ -0,0 +1,158 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/master"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/two_nodes.fail.sh b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/two_nodes.fail.sh
new file mode 100644
index 000000000000..ee46daceb6ad
--- /dev/null
+++ b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/two_nodes.fail.sh
@@ -0,0 +1,115 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/master"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/two_zones.fail.sh b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/two_zones.fail.sh
new file mode 100644
index 000000000000..0370a61b65b6
--- /dev/null
+++ b/applications/openshift/high-availability/control_plane_nodes_in_three_zones/tests/two_zones.fail.sh
@@ -0,0 +1,158 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/master"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/group.yml b/applications/openshift/high-availability/group.yml
new file mode 100644
index 000000000000..def8ee5a6489
--- /dev/null
+++ b/applications/openshift/high-availability/group.yml
@@ -0,0 +1,6 @@
+documentation_complete: true
+
+
+title: 'OpenShift - High Availability Practices'
+
+description: 'Contains evaluations for practices to achieve high availability in an OpenShift environment.'
diff --git a/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/rule.yml b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/rule.yml
new file mode 100644
index 000000000000..ece22dd411bf
--- /dev/null
+++ b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/rule.yml
@@ -0,0 +1,54 @@
+documentation_complete: true
+
+title: 'Ensure infrastructure nodes are distribute across three failure zones'
+
+description: |-
+ Distributing Kubernetes infrastructure nodes across failure zones enhances security by mitigating
+ the risk of a single point of failure and reducing the impact of infrastructure service
+ inavailability or attacks targeting a specific zone.
+
+rationale: |-
+ Distributing Kubernetes infrastructure nodes across failure zones is crucial for enhancing overall
+ system resilience and security. By spreading infrastructure components across different zones,
+ the system becomes more fault-tolerant, reducing the risk of widespread outages due to failures or
+ attacks in a single zone.
+ For infrastructure nodes, two failure zones are often deemed sufficient due to the nature of
+ infrastructure services, which typically don't rely on quorum-based applications like etcd.
+ Infrastructure services such as Ingress load balancers, logging or monitoring systems usually
+ operate independently across zones without requiring coordination for data consistency or failover.
+ Failure zones are marked on nodes using a well-known label called "topology.kubernetes.io/zone".
+ This label is automatically assigned to each node by cloud providers but might need to be managed
+ manually in other environments
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+ocil_clause: 'Kubernetes infrastructure nodes not distributed across three failure zones'
+
+ocil: |-
+ Run the following command to determine the failure zones of the infrastructure nodes, indicated by
+ the label .
+ $ oc get nodes --selector "node-role.kubernetes.io/infra" -o custom-columns='NAME:.metadata.name,ZONE:.metadata.labels.topology\.kubernetes\.io/zone'
+ Make sure that three distinct zones are listed.
+
+{{% set jqfilter = '.items | map(select(.metadata.labels["node-role.kubernetes.io/infra"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length' %}}
+
+warnings:
+- general: |-
+ {{{ openshift_filtered_cluster_setting({'/api/v1/nodes': jqfilter}) | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: "true"
+ filepath: |-
+ {{{ openshift_filtered_path('/api/v1/nodes', jqfilter) }}}
+ yamlpath: "$"
+ values:
+ - value: 2
+ type: int
+ operation: "greater than or equal"
diff --git a/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/no_infra_nodes.pass.sh b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/no_infra_nodes.pass.sh
new file mode 100644
index 000000000000..488be7409293
--- /dev/null
+++ b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/no_infra_nodes.pass.sh
@@ -0,0 +1,324 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/infra"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/ocp4/e2e.yml b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/ocp4/e2e.yml
new file mode 100644
index 000000000000..f426dc3d7ea4
--- /dev/null
+++ b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/ocp4/e2e.yml
@@ -0,0 +1,2 @@
+---
+default_result: FAIL
diff --git a/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/one_zone.fail.sh b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/one_zone.fail.sh
new file mode 100644
index 000000000000..4553f6e3f358
--- /dev/null
+++ b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/one_zone.fail.sh
@@ -0,0 +1,324 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/infra"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/three_zones.pass.sh b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/three_zones.pass.sh
new file mode 100644
index 000000000000..488be7409293
--- /dev/null
+++ b/applications/openshift/high-availability/infra_nodes_in_two_zones_or_more/tests/three_zones.pass.sh
@@ -0,0 +1,324 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/infra": "",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/infra"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/infra"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/multiple_nodes_in_every_role/rule.yml b/applications/openshift/high-availability/multiple_nodes_in_every_role/rule.yml
new file mode 100644
index 000000000000..cc42e73cadb8
--- /dev/null
+++ b/applications/openshift/high-availability/multiple_nodes_in_every_role/rule.yml
@@ -0,0 +1,48 @@
+documentation_complete: true
+
+title: 'Ensure every MachineConfigPool consists of more than one node'
+
+description: |-
+ To ensure, that workloads are able to be provisioned highly available, every node role should
+ consist of more than one node. This enables workloads to be scheduled across multiple nodes and
+ stay available in case one node of a role is unavailable. Different node roles may exist to isolate
+ control plane, infrastructure and application workload. There might be additional use cases to
+ create additional node roles for further isolation.
+
+rationale: |-
+ To ensure, that workloads are able to be provisioned highly available, every node role should
+ consist of more than one node. This enables workloads to be scheduled across multiple nodes and
+ stay available in case one node of a role is unavailable.
+
+{{% set jqfilter = '[.items[] | select(.status.machineCount == 1 or .status.machineCount == 0) | .metadata.name]' %}}
+
+ocil_clause: 'MachineConfigPools with less than two nodes exist'
+
+ocil: |-
+ Run the following command to retrieve a list of MachineConfigPools that have less than two nodes.
+ $ oc get machineconfigpools -o json | jq '{{{ jqfilter }}}'
+ Make sure that there is output nothing in the result.
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+warnings:
+- general: |-
+ {{{ openshift_filtered_cluster_setting({'/apis/machineconfiguration.openshift.io/v1/machineconfigpools': jqfilter}) | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: "true"
+ filepath: |-
+ {{{ openshift_filtered_path('/apis/machineconfiguration.openshift.io/v1/machineconfigpools', jqfilter) }}}
+ yamlpath: "[:]"
+ check_existence: "none_exist"
+ entity_check: "all"
+ values:
+ - value: "(.*?)"
+ operation: "pattern match"
diff --git a/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/master_infra_three_nodes.pass.sh b/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/master_infra_three_nodes.pass.sh
new file mode 100644
index 000000000000..9691ae016468
--- /dev/null
+++ b/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/master_infra_three_nodes.pass.sh
@@ -0,0 +1,363 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/apis/machineconfiguration.openshift.io/v1"
+master_apipath="/apis/machineconfiguration.openshift.io/v1/machineconfigpools"
+
+cat < "$kube_apipath$master_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfigPool",
+ "metadata": {
+ "creationTimestamp": "2021-01-04T14:27:26Z",
+ "generation": 28,
+ "labels": {
+ "machineconfiguration.openshift.io/mco-built-in": "",
+ "operator.machineconfiguration.openshift.io/required-for-upgrade": "",
+ "pools.operator.machineconfiguration.openshift.io/master": ""
+ },
+ "name": "master",
+ "resourceVersion": "1155401403",
+ "uid": "4ae68800-4d14-4d0e-a2b1-7104f28bf80a"
+ },
+ "spec": {
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "machineConfigSelector": {
+ "matchLabels": {
+ "machineconfiguration.openshift.io/role": "master"
+ }
+ },
+ "nodeSelector": {
+ "matchLabels": {
+ "node-role.kubernetes.io/master": ""
+ }
+ },
+ "paused": false
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2023-11-07T11:00:52Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "NodeDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:13Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "RenderDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:16Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Degraded"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "All nodes are updated with rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "reason": "",
+ "status": "True",
+ "type": "Updated"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Updating"
+ }
+ ],
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "degradedMachineCount": 0,
+ "machineCount": 3,
+ "observedGeneration": 28,
+ "readyMachineCount": 3,
+ "unavailableMachineCount": 0,
+ "updatedMachineCount": 3
+ }
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfigPool",
+ "metadata": {
+ "annotations": {
+ "kubectl.kubernetes.io/last-applied-configuration": "{\"apiVersion\":\"machineconfiguration.openshift.io/v1\",\"kind\":\"MachineConfigPool\",\"metadata\":{\"annotations\":{},\"labels\":{\"app.kubernetes.io/instance\":\"infra-nodes\"},\"name\":\"infra\"},\"spec\":{\"machineConfigSelector\":{\"matchExpressions\":[{\"key\":\"machineconfiguration.openshift.io/role\",\"operator\":\"In\",\"values\":[\"worker\",\"infra\"]}]},\"nodeSelector\":{\"matchLabels\":{\"node-role.kubernetes.io/infra\":\"\"}}}}\n"
+ },
+ "creationTimestamp": "2021-02-10T13:31:34Z",
+ "generation": 23,
+ "labels": {
+ "app.kubernetes.io/instance": "infra-nodes"
+ },
+ "name": "infra",
+ "resourceVersion": "1155388232",
+ "uid": "1a7e48fb-e6db-4a15-98dd-5f1a7516e077"
+ },
+ "spec": {
+ "configuration": {
+ "name": "rendered-infra-f4e5b015e49ebe23158b1ac1029b13fb",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-worker"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-ssh"
+ }
+ ]
+ },
+ "machineConfigSelector": {
+ "matchExpressions": [
+ {
+ "key": "machineconfiguration.openshift.io/role",
+ "operator": "In",
+ "values": [
+ "worker",
+ "infra"
+ ]
+ }
+ ]
+ },
+ "nodeSelector": {
+ "matchLabels": {
+ "node-role.kubernetes.io/infra": ""
+ }
+ },
+ "paused": false
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2021-06-30T16:37:26Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "NodeDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:12Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "RenderDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:16Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Degraded"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:14:21Z",
+ "message": "All nodes are updated with rendered-infra-f4e5b015e49ebe23158b1ac1029b13fb",
+ "reason": "",
+ "status": "True",
+ "type": "Updated"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:14:21Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Updating"
+ }
+ ],
+ "configuration": {
+ "name": "rendered-infra-f4e5b015e49ebe23158b1ac1029b13fb",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-worker"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-ssh"
+ }
+ ]
+ },
+ "degradedMachineCount": 0,
+ "machineCount": 3,
+ "observedGeneration": 23,
+ "readyMachineCount": 3,
+ "unavailableMachineCount": 0,
+ "updatedMachineCount": 3
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='[.items[] | select(.status.machineCount == 1 or .status.machineCount == 0) | .metadata.name]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$master_apipath#$(echo -n "$master_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$master_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/ocp4/e2e.yml b/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/ocp4/e2e.yml
new file mode 100644
index 000000000000..b49fd368b988
--- /dev/null
+++ b/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/ocp4/e2e.yml
@@ -0,0 +1,2 @@
+---
+default_result: PASS
diff --git a/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/single_worker.fail.sh b/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/single_worker.fail.sh
new file mode 100644
index 000000000000..3ec11577fd08
--- /dev/null
+++ b/applications/openshift/high-availability/multiple_nodes_in_every_role/tests/single_worker.fail.sh
@@ -0,0 +1,354 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/apis/machineconfiguration.openshift.io/v1"
+master_apipath="/apis/machineconfiguration.openshift.io/v1/machineconfigpools"
+
+cat < "$kube_apipath$master_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfigPool",
+ "metadata": {
+ "creationTimestamp": "2021-01-04T14:27:26Z",
+ "generation": 28,
+ "labels": {
+ "machineconfiguration.openshift.io/mco-built-in": "",
+ "operator.machineconfiguration.openshift.io/required-for-upgrade": "",
+ "pools.operator.machineconfiguration.openshift.io/master": ""
+ },
+ "name": "master",
+ "resourceVersion": "1155401403",
+ "uid": "4ae68800-4d14-4d0e-a2b1-7104f28bf80a"
+ },
+ "spec": {
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "machineConfigSelector": {
+ "matchLabels": {
+ "machineconfiguration.openshift.io/role": "master"
+ }
+ },
+ "nodeSelector": {
+ "matchLabels": {
+ "node-role.kubernetes.io/master": ""
+ }
+ },
+ "paused": false
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2023-11-07T11:00:52Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "NodeDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:13Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "RenderDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:16Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Degraded"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "All nodes are updated with rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "reason": "",
+ "status": "True",
+ "type": "Updated"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Updating"
+ }
+ ],
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "degradedMachineCount": 0,
+ "machineCount": 3,
+ "observedGeneration": 28,
+ "readyMachineCount": 3,
+ "unavailableMachineCount": 0,
+ "updatedMachineCount": 3
+ }
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfigPool",
+ "metadata": {
+ "creationTimestamp": "2021-01-04T14:27:26Z",
+ "generation": 28,
+ "labels": {
+ "machineconfiguration.openshift.io/mco-built-in": "",
+ "pools.operator.machineconfiguration.openshift.io/worker": ""
+ },
+ "name": "worker",
+ "resourceVersion": "1145698233",
+ "uid": "dfba5d38-515c-4715-b30e-8a2f7d78ff23"
+ },
+ "spec": {
+ "configuration": {
+ "name": "rendered-worker-f4e5b015e49ebe23158b1ac1029b13fb",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-worker"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-ssh"
+ }
+ ]
+ },
+ "machineConfigSelector": {
+ "matchLabels": {
+ "machineconfiguration.openshift.io/role": "worker"
+ }
+ },
+ "nodeSelector": {
+ "matchLabels": {
+ "node-role.kubernetes.io/worker": ""
+ }
+ },
+ "paused": false
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2021-09-16T13:54:19Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "NodeDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-01-14T00:00:08Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "RenderDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-01-14T00:00:13Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Degraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-26T16:30:22Z",
+ "message": "All nodes are updated with rendered-worker-f4e5b015e49ebe23158b1ac1029b13fb",
+ "reason": "",
+ "status": "True",
+ "type": "Updated"
+ },
+ {
+ "lastTransitionTime": "2024-02-26T16:30:22Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Updating"
+ }
+ ],
+ "configuration": {
+ "name": "rendered-worker-f4e5b015e49ebe23158b1ac1029b13fb",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-worker"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-worker-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-worker-ssh"
+ }
+ ]
+ },
+ "degradedMachineCount": 0,
+ "machineCount": 1,
+ "observedGeneration": 28,
+ "readyMachineCount": 1,
+ "unavailableMachineCount": 0,
+ "updatedMachineCount": 1
+ }
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='[.items[] | select(.status.machineCount == 1 or .status.machineCount == 0) | .metadata.name]'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$master_apipath#$(echo -n "$master_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$master_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/three_control_plane_nodes/rule.yml b/applications/openshift/high-availability/three_control_plane_nodes/rule.yml
new file mode 100644
index 000000000000..8532f7769d11
--- /dev/null
+++ b/applications/openshift/high-availability/three_control_plane_nodes/rule.yml
@@ -0,0 +1,39 @@
+documentation_complete: true
+
+title: 'Ensure machine count of MachineConfigPool master is 3'
+
+description: |-
+ To ensure, that the OpenShift control plane stays accessible on outage of a single master node, a
+ number of 3 control plane nodes is required.
+
+rationale: |-
+ A highly-available OpenShift control plane requires 3 control plane nodes. This allows etcd to have
+ a functional quorum state, when a single control plane node is unavailable.
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+ocil_clause: 'MachineConfigPool master has less than three nodes'
+
+ocil: |-
+ Run the following command to retrieve the count of nodes in the MachineConfigPool master.
+ $ oc get machineconfigpool master -o jsonpath='{.status.machineCount}'
+ Make sure that the number equals to 3.
+
+warnings:
+- general: |-
+ {{{ openshift_cluster_setting("/apis/machineconfiguration.openshift.io/v1/machineconfigpools/master") | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: 'true'
+ filepath: /apis/machineconfiguration.openshift.io/v1/machineconfigpools/master
+ yamlpath: .status.machineCount
+ entity_check: at least one
+ values:
+ - value: '3'
diff --git a/applications/openshift/high-availability/three_control_plane_nodes/tests/ocp4/e2e.yml b/applications/openshift/high-availability/three_control_plane_nodes/tests/ocp4/e2e.yml
new file mode 100644
index 000000000000..b49fd368b988
--- /dev/null
+++ b/applications/openshift/high-availability/three_control_plane_nodes/tests/ocp4/e2e.yml
@@ -0,0 +1,2 @@
+---
+default_result: PASS
diff --git a/applications/openshift/high-availability/three_control_plane_nodes/tests/three_masters.pass.sh b/applications/openshift/high-availability/three_control_plane_nodes/tests/three_masters.pass.sh
new file mode 100644
index 000000000000..13fb5b20f274
--- /dev/null
+++ b/applications/openshift/high-availability/three_control_plane_nodes/tests/three_masters.pass.sh
@@ -0,0 +1,173 @@
+#!/bin/bash
+# remediation = none
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/apis/machineconfiguration.openshift.io/v1/machineconfigpools"
+master_apipath="/apis/machineconfiguration.openshift.io/v1/machineconfigpools/master"
+
+cat < "$kube_apipath$master_apipath"
+{
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfigPool",
+ "metadata": {
+ "creationTimestamp": "2021-01-04T14:27:26Z",
+ "generation": 28,
+ "labels": {
+ "machineconfiguration.openshift.io/mco-built-in": "",
+ "operator.machineconfiguration.openshift.io/required-for-upgrade": "",
+ "pools.operator.machineconfiguration.openshift.io/master": ""
+ },
+ "name": "master",
+ "resourceVersion": "1155401403",
+ "uid": "4ae68800-4d14-4d0e-a2b1-7104f28bf80a"
+ },
+ "spec": {
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "machineConfigSelector": {
+ "matchLabels": {
+ "machineconfiguration.openshift.io/role": "master"
+ }
+ },
+ "nodeSelector": {
+ "matchLabels": {
+ "node-role.kubernetes.io/master": ""
+ }
+ },
+ "paused": false
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2023-11-07T11:00:52Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "NodeDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:13Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "RenderDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:16Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Degraded"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "All nodes are updated with rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "reason": "",
+ "status": "True",
+ "type": "Updated"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Updating"
+ }
+ ],
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "degradedMachineCount": 0,
+ "machineCount": 3,
+ "observedGeneration": 28,
+ "readyMachineCount": 3,
+ "unavailableMachineCount": 0,
+ "updatedMachineCount": 3
+ }
+}
+EOF
diff --git a/applications/openshift/high-availability/three_control_plane_nodes/tests/two_masters.fail.sh b/applications/openshift/high-availability/three_control_plane_nodes/tests/two_masters.fail.sh
new file mode 100644
index 000000000000..f64d3adfd0f0
--- /dev/null
+++ b/applications/openshift/high-availability/three_control_plane_nodes/tests/two_masters.fail.sh
@@ -0,0 +1,173 @@
+#!/bin/bash
+# remediation = none
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/apis/machineconfiguration.openshift.io/v1/machineconfigpools"
+master_apipath="/apis/machineconfiguration.openshift.io/v1/machineconfigpools/master"
+
+cat < "$kube_apipath$master_apipath"
+{
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfigPool",
+ "metadata": {
+ "creationTimestamp": "2021-01-04T14:27:26Z",
+ "generation": 28,
+ "labels": {
+ "machineconfiguration.openshift.io/mco-built-in": "",
+ "operator.machineconfiguration.openshift.io/required-for-upgrade": "",
+ "pools.operator.machineconfiguration.openshift.io/master": ""
+ },
+ "name": "master",
+ "resourceVersion": "1155401403",
+ "uid": "4ae68800-4d14-4d0e-a2b1-7104f28bf80a"
+ },
+ "spec": {
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "machineConfigSelector": {
+ "matchLabels": {
+ "machineconfiguration.openshift.io/role": "master"
+ }
+ },
+ "nodeSelector": {
+ "matchLabels": {
+ "node-role.kubernetes.io/master": ""
+ }
+ },
+ "paused": false
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2023-11-07T11:00:52Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "NodeDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:13Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "RenderDegraded"
+ },
+ {
+ "lastTransitionTime": "2024-02-29T14:42:16Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Degraded"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "All nodes are updated with rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "reason": "",
+ "status": "True",
+ "type": "Updated"
+ },
+ {
+ "lastTransitionTime": "2024-03-03T22:26:49Z",
+ "message": "",
+ "reason": "",
+ "status": "False",
+ "type": "Updating"
+ }
+ ],
+ "configuration": {
+ "name": "rendered-master-20de05f95332a16cf0e41fc15fd58039",
+ "source": [
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "00-master"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-container-runtime"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "01-master-kubelet"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-chrony-configuration"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-add-inheritable-capabilities"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-crio-seccomp-use-default"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-generated-registries"
+ },
+ {
+ "apiVersion": "machineconfiguration.openshift.io/v1",
+ "kind": "MachineConfig",
+ "name": "99-master-ssh"
+ }
+ ]
+ },
+ "degradedMachineCount": 0,
+ "machineCount": 2,
+ "observedGeneration": 28,
+ "readyMachineCount": 2,
+ "unavailableMachineCount": 0,
+ "updatedMachineCount": 2
+ }
+}
+EOF
diff --git a/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/rule.yml b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/rule.yml
new file mode 100644
index 000000000000..373585afc6ef
--- /dev/null
+++ b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/rule.yml
@@ -0,0 +1,52 @@
+documentation_complete: true
+
+title: 'Ensure worker nodes are distribute across three failure zones'
+
+description: |-
+ Distributing Kubernetes worker nodes across failure zones enhances security by mitigating
+ the risk of a single point of failure and reducing the impact of application workload
+ inavailability or attacks targeting a specific zone.
+
+rationale: |-
+ Distributing Kubernetes worker nodes across failure zones is crucial for enhancing overall
+ system resilience and security. By spreading applications across different zones, the system
+ becomes more fault-tolerant, reducing the risk of widespread outages due to failures or
+ attacks in a single zone.
+ For application workloads nodes, two failure zones are often deemed sufficient due to the nature
+ of typical applications, which typically don't rely on quorum-based applications like etcd.
+ Failure zones are marked on nodes using a well-known label called "topology.kubernetes.io/zone".
+ This label is automatically assigned to each node by cloud providers but might need to be managed
+ manually in other environments
+
+identifiers: {}
+
+references:
+ bsi: APP.4.4.A19
+
+severity: medium
+
+ocil_clause: 'Kubernetes worker nodes not distributed across three failure zones'
+
+ocil: |-
+ Run the following command to determine the failure zones of the worker nodes, indicated by
+ the label .
+ $ oc get nodes --selector "node-role.kubernetes.io/worker" -o custom-columns='NAME:.metadata.name,ZONE:.metadata.labels.topology\.kubernetes\.io/zone'
+ Make sure that three distinct zones are listed.
+
+{{% set jqfilter = '.items | map(select(.metadata.labels["node-role.kubernetes.io/worker"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length' %}}
+
+warnings:
+- general: |-
+ {{{ openshift_filtered_cluster_setting({'/api/v1/nodes': jqfilter}) | indent(4) }}}
+
+template:
+ name: yamlfile_value
+ vars:
+ ocp_data: "true"
+ filepath: |-
+ {{{ openshift_filtered_path('/api/v1/nodes', jqfilter) }}}
+ yamlpath: "$"
+ values:
+ - value: 2
+ type: int
+ operation: "greater than or equal"
diff --git a/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/four_zones.pass.sh b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/four_zones.pass.sh
new file mode 100644
index 000000000000..7a54c11b23c1
--- /dev/null
+++ b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/four_zones.pass.sh
@@ -0,0 +1,305 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1d"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/worker"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/no_zone.fail.sh b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/no_zone.fail.sh
new file mode 100644
index 000000000000..02b1cd2a4765
--- /dev/null
+++ b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/no_zone.fail.sh
@@ -0,0 +1,297 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/worker"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/ocp4/e2e.yml b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/ocp4/e2e.yml
new file mode 100644
index 000000000000..f426dc3d7ea4
--- /dev/null
+++ b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/ocp4/e2e.yml
@@ -0,0 +1,2 @@
+---
+default_result: FAIL
diff --git a/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/one_zone.fail.sh b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/one_zone.fail.sh
new file mode 100644
index 000000000000..d977f106a54b
--- /dev/null
+++ b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/one_zone.fail.sh
@@ -0,0 +1,305 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/worker"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/three_zones.pass.sh b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/three_zones.pass.sh
new file mode 100644
index 000000000000..854ebf3393fd
--- /dev/null
+++ b/applications/openshift/high-availability/worker_nodes_in_two_zones_or_more/tests/three_zones.pass.sh
@@ -0,0 +1,305 @@
+#!/bin/bash
+# remediation = none
+# packages = jq
+
+kube_apipath="/kubernetes-api-resources"
+mkdir -p "$kube_apipath/api/v1"
+nodes_apipath="/api/v1/nodes"
+
+cat < "$kube_apipath$nodes_apipath"
+{
+ "apiVersion": "v1",
+ "items": [
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:23:02Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-control1.domain.local",
+ "resourceVersion": "1192119588",
+ "uid": "c0aa2f3d-71ed-428d-9d11-4824f0e914da"
+ },
+ "spec": {
+ "podCIDR": "10.128.0.0/24",
+ "podCIDRs": [
+ "10.128.0.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:24:11Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-control2.domain.local",
+ "resourceVersion": "1192119593",
+ "uid": "33735f94-a745-4d7d-8707-73df67cbc8e1"
+ },
+ "spec": {
+ "podCIDR": "10.128.1.0/24",
+ "podCIDRs": [
+ "10.128.1.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-master-d0a23f1409780adbe3913473e3e42154",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:25:24Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-control3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/master": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-control3.domain.local",
+ "resourceVersion": "1192117923",
+ "uid": "ffd0364a-b48d-4b53-bb69-47568e6511b5"
+ },
+ "spec": {
+ "podCIDR": "10.128.2.0/24",
+ "podCIDRs": [
+ "10.128.2.0/24"
+ ],
+ "taints": [
+ {
+ "effect": "NoSchedule",
+ "key": "node-role.kubernetes.io/master"
+ }
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-worker-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/ssh": "accessed",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:16Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker1.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker1.domain.local",
+ "resourceVersion": "1192122216",
+ "uid": "1667ec5a-ca3d-4994-88bd-27da3644e338"
+ },
+ "spec": {
+ "podCIDR": "10.128.5.0/24",
+ "podCIDRs": [
+ "10.128.5.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:48:12Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker2.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1b"
+ },
+ "name": "ocp-worker2.domain.local",
+ "resourceVersion": "1192122353",
+ "uid": "bd56f83c-e625-4365-a838-47fa496b7d93"
+ },
+ "spec": {
+ "podCIDR": "10.128.4.0/24",
+ "podCIDRs": [
+ "10.128.4.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-04T14:47:56Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker3.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1c"
+ },
+ "name": "ocp-worker3.domain.local",
+ "resourceVersion": "1192119492",
+ "uid": "65395ca7-8181-4a0b-95cf-128922f105f5"
+ },
+ "spec": {
+ "podCIDR": "10.128.3.0/24",
+ "podCIDRs": [
+ "10.128.3.0/24"
+ ]
+ }
+ },
+ {
+ "apiVersion": "v1",
+ "kind": "Node",
+ "metadata": {
+ "annotations": {
+ "machineconfiguration.openshift.io/controlPlaneTopology": "HighlyAvailable",
+ "machineconfiguration.openshift.io/currentConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredConfig": "rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/desiredDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/lastAppliedDrain": "uncordon-rendered-infra-2bc1dcecc35503442d9102830613c52b",
+ "machineconfiguration.openshift.io/reason": "",
+ "machineconfiguration.openshift.io/state": "Done",
+ "volumes.kubernetes.io/controller-managed-attach-detach": "true"
+ },
+ "creationTimestamp": "2023-01-14T07:25:59Z",
+ "labels": {
+ "beta.kubernetes.io/arch": "amd64",
+ "beta.kubernetes.io/os": "linux",
+ "cluster.ocs.openshift.io/openshift-storage": "",
+ "kubernetes.io/arch": "amd64",
+ "kubernetes.io/hostname": "ocp-worker4.domain.local",
+ "kubernetes.io/os": "linux",
+ "node-role.kubernetes.io/worker": "",
+ "node.openshift.io/os_id": "rhcos",
+ "topology.kubernetes.io/region": "eu-central-1",
+ "topology.kubernetes.io/zone": "eu-central-1a"
+ },
+ "name": "ocp-worker4.domain.local",
+ "resourceVersion": "1192119420",
+ "uid": "6e993021-17e9-4945-9c46-65e60c90c65a"
+ },
+ "spec": {}
+ }
+ ],
+ "kind": "List",
+ "metadata": {
+ "resourceVersion": ""
+ }
+}
+EOF
+
+jq_filter='.items | map(select(.metadata.labels["node-role.kubernetes.io/worker"] == "") | .metadata.labels["topology.kubernetes.io/zone"]) | unique | length'
+
+# Get file path. This will actually be read by the scan
+filteredpath="$kube_apipath$nodes_apipath#$(echo -n "$nodes_apipath$jq_filter" | sha256sum | awk '{print $1}')"
+
+# populate filtered path with jq-filtered result
+jq "$jq_filter" "$kube_apipath$nodes_apipath" > "$filteredpath"
diff --git a/applications/openshift/worker/file_owner_worker_ca/rule.yml b/applications/openshift/worker/file_owner_worker_ca/rule.yml
index 2cd85ac2901e..f4eef80c9c14 100644
--- a/applications/openshift/worker/file_owner_worker_ca/rule.yml
+++ b/applications/openshift/worker/file_owner_worker_ca/rule.yml
@@ -18,6 +18,7 @@ identifiers:
cce@ocp4: CCE-83495-2
references:
+ bsi: APP.4.4.A17
cis@ocp4: 4.1.8
nerc-cip: CIP-003-8 R6,CIP-004-6 R3,CIP-007-3 R6.1
nist: CM-6,CM-6(1)
diff --git a/controls/bsi_app_4_4.yml b/controls/bsi_app_4_4.yml
index 9b55dec984e1..8f00a907eb2a 100644
--- a/controls/bsi_app_4_4.yml
+++ b/controls/bsi_app_4_4.yml
@@ -443,20 +443,47 @@ controls:
levels:
- elevated
description: >-
- A Kubernetes operation SHOULD be set up in such a way that if a site fails, the clusters (and
- thus the applications in the pods) either continue to run without interruption or can be
+ (1) A Kubernetes operation SHOULD be set up in such a way that if a site fails, the clusters
+ (and thus the applications in the pods) either continue to run without interruption or can be
restarted in a short time at another site.
- Should a restart be required, all the necessary configuration files, images, user data, network
- connections, and other resources required for operation (including the necessary hardware)
+ (2) Should a restart be required, all the necessary configuration files, images, user data,
+ network connections, and other resources required for operation (including the necessary hardware)
SHOULD already be available at the alternative site.
- For the uninterrupted operation of clusters, the control plane of Kubernetes, the infrastructure
+ (3) For the uninterrupted operation of clusters, the control plane of Kubernetes, the infrastructure
applications of the clusters, and the pods of the applications SHOULD be distributed across
several fire zones based on the location data of the corresponding nodes so that the failure of a
fire zone will not lead to the failure of an application.
notes: >-
- TBD
+ Section 1: OpenShift support topology labels to differentiate between failure zones. To achieve
+ continued operation without interruption, nodes of every role need to be spread across zones.
+ For quorum-based applications, such as the Kubernetes control plane, three zones are required.
+ A sufficient number of control plane nodes and sufficient spreading across zones is checked using
+ rules. If a restart-based approach is chosen, the adequacy needs to be ensured organizationally.
+
+ Section 2: The availability of all required resources for operation after restart in a different
+ site needs to be ensured organizationally. Regular tests are essential. The availability of
+ persistent data used by pods requires the storage inside of PVs/PVCs and a storage provider,
+ that is also available at the alternative site.
+
+ Section 3: The OpenShift control plane is evenly distributed across the control plane nodes
+ out-of-the box. If the control plane nodes are distributed across failure zones, the control
+ plane is hence prone to node or zone outage. For infrastructure and application workloads, a
+ distribution across nodes and zones needs to be configured during deployment using affinity /
+ anti-affinity rules or topology spread constraints.
+
+ Single Node OpenShift (SNO) is not highly available and therefore incompliant to this control.
status: pending
- rules: []
+ rules:
+ # Section 1, 3
+ - multiple_nodes_in_every_role
+ - control_plane_nodes_in_three_zones
+ - worker_nodes_in_two_zones_or_more
+ - infra_nodes_in_two_zones_or_more
+ # Section 3
+ - three_control_plane_nodes
+ - anti_affinity_or_topology_spread_constraints_in_deployment
+ - anti_affinity_or_topology_spread_constraints_in_statefulset
+
- id: APP.4.4.A20
title: Encrypted Data Storage for Pods