-
Notifications
You must be signed in to change notification settings - Fork 62
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
ci: add workflow to check for vulnerabilities in images
- Loading branch information
1 parent
0133259
commit 4efc18f
Showing
4 changed files
with
394 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,148 @@ | ||
--- | ||
# The aim of this GitHub workflow is to update the `ci/securitty-scan/security_scan_results.md` with latest security scan results. | ||
name: Update notebook image security reports | ||
on: | ||
workflow_dispatch: | ||
inputs: | ||
branch: | ||
required: true | ||
description: "Provide the name of the branch you want to update ex main, vYYYYx etc: " | ||
schedule: | ||
- cron: "0 0 * * 0" #Scheduled every Friday | ||
env: | ||
SEC_SCAN_BRANCH: sec-scan-${{ github.run_id }} | ||
BRANCH_NAME: main | ||
RELEASE_VERSION_N: 2023b | ||
RELEASE_VERSION_N_1: 2023a | ||
jobs: | ||
initialize: | ||
runs-on: ubuntu-latest | ||
permissions: | ||
contents: write | ||
steps: | ||
- name: Install Skopeo CLI | ||
shell: bash | ||
run: | | ||
sudo apt-get -y update | ||
sudo apt-get -y install skopeo | ||
# Checkout the branch | ||
- name: Checkout branch | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ env.BRANCH_NAME }} | ||
|
||
# Create a new branch | ||
- name: Create a new branch | ||
run: | | ||
echo ${{ env.SEC_SCAN_BRANCH }} | ||
git checkout -b ${{ env.SEC_SCAN_BRANCH }} | ||
git push --set-upstream origin ${{ env.SEC_SCAN_BRANCH }} | ||
update-n-version: | ||
needs: [initialize] | ||
runs-on: ubuntu-latest | ||
permissions: | ||
contents: write | ||
steps: | ||
- name: Configure Git | ||
run: | | ||
git config --global user.email "github-actions[bot]@users.noreply.github.com" | ||
git config --global user.name "GitHub Actions" | ||
# Get the latest weekly build commit hash: https://github.com/opendatahub-io/notebooks/commits/2023b | ||
- name: Checkout upstream notebooks repo | ||
uses: actions/checkout@v3 | ||
with: | ||
repository: opendatahub-io/notebooks.git | ||
ref: ${{ env.RELEASE_VERSION_N }} | ||
|
||
- name: Retrieve latest weekly commit hash from the "N" branch | ||
id: hash-n | ||
shell: bash | ||
run: | | ||
echo "HASH_N=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT} | ||
- name: Checkout "N - 1" branch | ||
uses: actions/checkout@v3 | ||
with: | ||
repository: opendatahub-io/notebooks.git | ||
ref: ${{ env.RELEASE_VERSION_N_1 }} | ||
|
||
- name: Retrieve latest weekly commit hash from the "N - 1" branch | ||
id: hash-n-1 | ||
shell: bash | ||
run: | | ||
echo "HASH_N_1=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT} | ||
- name: Checkout "main" branch | ||
uses: actions/checkout@v3 | ||
with: | ||
repository: opendatahub-io/notebooks.git | ||
ref: main | ||
|
||
- name: Retrieve latest weekly commit hash from the "main" branch | ||
id: hash-main | ||
shell: bash | ||
run: | | ||
echo "LATEST_MAIN_COMMIT=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT} | ||
# Checkout the release branch to apply the updates | ||
- name: Checkout release branch | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ env.SEC_SCAN_BRANCH }} | ||
|
||
- name: setup python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: '3.10' # install the python version needed | ||
|
||
- name: install python packages | ||
run: | | ||
python -m pip install --upgrade pip | ||
pip install requests | ||
- name: execute py script # run trial.py | ||
env: | ||
HASH_N: ${{ steps.hash-n.outputs.HASH_N }} | ||
RELEASE_VERSION_N: 2023b | ||
|
||
HASH_N_1: ${{ steps.hash-n-1.outputs.HASH_N_1 }} | ||
RELEASE_VERSION_N_1: 2023a | ||
|
||
LATEST_MAIN_COMMIT: ${{ steps.hash-main.outputs.LATEST_MAIN_COMMIT }} | ||
run: make scan-image-vulnerabilities | ||
|
||
- name: Push the files | ||
run: | | ||
git fetch origin ${{ env.SEC_SCAN_BRANCH }} && git pull origin ${{ env.SEC_SCAN_BRANCH }} && git add . && git commit -m "Update security scans" && git push origin ${{ env.SEC_SCAN_BRANCH }} | ||
# Creates the Pull Request | ||
open-pull-request: | ||
needs: [update-n-version] | ||
runs-on: ubuntu-latest | ||
permissions: | ||
pull-requests: write | ||
steps: | ||
- name: Checkout repo | ||
uses: actions/checkout@v3 | ||
|
||
- name: pull-request | ||
uses: repo-sync/pull-request@v2 | ||
with: | ||
source_branch: ${{ env.SEC_SCAN_BRANCH }} | ||
destination_branch: ${{ env.BRANCH_NAME}} | ||
github_token: ${{ secrets.GH_TOKEN }} | ||
pr_label: "automated pr" | ||
pr_title: "[Security Scanner Action] Weekly update of security vulnerabilities reported by Quay" | ||
pr_body: | | ||
:rocket: This is an automated Pull Request. | ||
This PR updates: | ||
* `ci/security-scan/security_scan_results.md` file with the latest security vulnerabilities reported by Quay. | ||
* `ci/security-scan/weekly_commit_ids` with the latest updated SHA digests of the notebooks (N & N-1) | ||
Created by `/.github/workflows/sec-scan.yaml` | ||
:exclamation: **IMPORTANT NOTE**: Remember to delete the ` ${{ env.SEC_SCAN_BRANCH }}` branch after merging the changes |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,197 @@ | ||
import os | ||
import subprocess | ||
import re | ||
from datetime import date | ||
import requests | ||
from collections import Counter | ||
import fileinput | ||
|
||
branch_dictionary = {} | ||
|
||
commit_id_path = "ci/security-scan/weekly_commit_ids.env" | ||
|
||
IMAGES_MAIN = [ | ||
"odh-minimal-notebook-image-main", | ||
"odh-runtime-minimal-notebook-image-main", | ||
"odh-runtime-data-science-notebook-image-main", | ||
"odh-minimal-gpu-notebook-image-main", | ||
"odh-pytorch-gpu-notebook-image-main", | ||
"odh-generic-data-science-notebook-image-main", | ||
"odh-tensorflow-gpu-notebook-image-main", | ||
"odh-trustyai-notebook-image-main", | ||
"odh-habana-notebook-image-main", | ||
"odh-codeserver-notebook-main", | ||
"odh-rstudio-notebook-main", | ||
"odh-rstudio-gpu-notebook-main" | ||
] | ||
|
||
IMAGES = [ | ||
"odh-minimal-notebook-image-n", | ||
"odh-runtime-minimal-notebook-image-n", | ||
"odh-runtime-data-science-notebook-image-n", | ||
"odh-minimal-gpu-notebook-image-n", | ||
"odh-pytorch-gpu-notebook-image-n", | ||
"odh-runtime-pytorch-notebook-image-n", | ||
"odh-generic-data-science-notebook-image-n", | ||
"odh-tensorflow-gpu-notebook-image-n", | ||
"odh-runtime-tensorflow-notebook-image-n", | ||
"odh-trustyai-notebook-image-n", | ||
"odh-habana-notebook-image-n", | ||
"odh-codeserver-notebook-n", | ||
"odh-rstudio-notebook-n", | ||
"odh-rstudio-gpu-notebook-n" | ||
] | ||
|
||
IMAGES_N_1 = [ | ||
"odh-minimal-notebook-image-n-1", | ||
"odh-runtime-minimal-notebook-image-n-1", | ||
"odh-minimal-gpu-notebook-image-n-1", | ||
"odh-pytorch-gpu-notebook-image-n-1", | ||
"odh-runtime-pytorch-notebook-image-n-1", | ||
"odh-runtime-data-science-notebook-image-n-1", | ||
"odh-generic-data-science-notebook-image-n-1", | ||
"odh-tensorflow-gpu-notebook-image-n-1", | ||
"odh-runtime-tensorflow-notebook-image-n-1", | ||
"odh-trustyai-notebook-image-n-1", | ||
"odh-codeserver-notebook-n-1", | ||
"odh-rstudio-notebook-n-1", | ||
"odh-rstudio-gpu-notebook-n-1" | ||
] | ||
|
||
def process_image(image, commit_id_path, RELEASE_VERSION_N, HASH_N): | ||
with open(commit_id_path, 'r') as params_file: | ||
img_line = next(line for line in params_file if re.search(f"{image}=", line)) | ||
img = img_line.split('=')[1].strip() | ||
|
||
registry = img.split('@')[0] | ||
|
||
src_tag_cmd = f'skopeo inspect docker://{img} | jq \'.Env[] | select(startswith("OPENSHIFT_BUILD_NAME=")) | split("=")[1]\'' | ||
src_tag = subprocess.check_output(src_tag_cmd, shell=True, text=True).strip().strip('"').replace('-amd64', '') | ||
|
||
regex = "" | ||
|
||
if RELEASE_VERSION_N == "": | ||
regex = f"{src_tag}-(\\d+-)?{HASH_N}" | ||
else: | ||
regex = f"{src_tag}-{RELEASE_VERSION_N}-\\d+-{HASH_N}" | ||
|
||
latest_tag_cmd = f'skopeo inspect docker://{img} | jq -r --arg regex "{regex}" \'.RepoTags | map(select(. | test($regex))) | .[0]\'' | ||
latest_tag = subprocess.check_output(latest_tag_cmd, shell=True, text=True).strip() | ||
|
||
digest_cmd = f'skopeo inspect docker://{registry}:{latest_tag} | jq .Digest | tr -d \'"\'' | ||
digest = subprocess.check_output(digest_cmd, shell=True, text=True).strip() | ||
|
||
if digest is None or digest == "": | ||
return | ||
|
||
output = f"{registry}@{digest}" | ||
|
||
sha_ = output.split(":")[1] | ||
|
||
url = f"https://quay.io/api/v1/repository/opendatahub/workbench-images/manifest/sha256:{sha_}/security" | ||
headers = { | ||
"X-Requested-With": "XMLHttpRequest", | ||
"Authorization": "Bearer 3PZX0UYX6FSENKQ14I1VTHUJ4KGBS8L5LHJ0W1RN7TPHFVQ4P0NR7VQNCZIFRC9B_1" | ||
} | ||
|
||
response = requests.get(url, headers=headers) | ||
data = response.json() | ||
|
||
vulnerabilities = [] | ||
|
||
for feature in data['data']['Layer']['Features']: | ||
if(len(feature['Vulnerabilities']) > 0): | ||
for vulnerability in feature['Vulnerabilities']: | ||
vulnerabilities.append(vulnerability) | ||
|
||
severity_levels = [entry.get("Severity", "Unknown") for entry in vulnerabilities] | ||
severity_counts = Counter(severity_levels) | ||
|
||
branch_dictionary[latest_tag] = {} | ||
branch_dictionary[latest_tag]['sha']= digest | ||
|
||
for severity, count in severity_counts.items(): | ||
branch_dictionary[latest_tag][severity] = count | ||
|
||
for line in fileinput.input(commit_id_path, inplace=True): | ||
if line.startswith(f"{image}="): | ||
line = f"{image}={output}\n" | ||
print(line, end="") | ||
|
||
LATEST_MAIN_COMMIT = os.environ['LATEST_MAIN_COMMIT'] | ||
|
||
for i, image in enumerate(IMAGES_MAIN): | ||
process_image(image, commit_id_path, "", LATEST_MAIN_COMMIT) | ||
|
||
branch_main_data = "" | ||
for key, value in branch_dictionary.items(): | ||
branch_main_data += f"| [{key}](https://quay.io/repository/opendatahub/workbench-images/manifest/{branch_dictionary[key]['sha']}?tab=vulnerabilities) |" | ||
for severity in ['Medium', 'Low', 'Unknown', 'High', 'Critical']: | ||
count = value.get(severity, 0) # Get count for the severity, default to 0 if not present | ||
branch_main_data += f" {count} |" | ||
branch_main_data += "\n" | ||
|
||
branch_dictionary = {} | ||
|
||
RELEASE_VERSION_N = os.environ['RELEASE_VERSION_N'] | ||
HASH_N = os.environ['HASH_N'] | ||
|
||
# Call the function for each image in IMAGES | ||
for i, image in enumerate(IMAGES): | ||
process_image(image, commit_id_path, RELEASE_VERSION_N, HASH_N) | ||
|
||
today = date.today() | ||
d2 = today.strftime("%B %d, %Y") | ||
|
||
branch_n_data = "" | ||
for key, value in branch_dictionary.items(): | ||
branch_n_data += f"| [{key}](https://quay.io/repository/opendatahub/workbench-images/manifest/{branch_dictionary[key]['sha']}?tab=vulnerabilities) |" | ||
for severity in ['Medium', 'Low', 'Unknown', 'High', 'Critical']: | ||
count = value.get(severity, 0) # Get count for the severity, default to 0 if not present | ||
branch_n_data += f" {count} |" | ||
branch_n_data += "\n" | ||
|
||
branch_dictionary = {} | ||
|
||
RELEASE_VERSION_N_1 = os.environ['RELEASE_VERSION_N_1'] | ||
HASH_N_1 = os.environ['HASH_N_1'] | ||
|
||
for i, image in enumerate(IMAGES_N_1): | ||
process_image(image, commit_id_path, RELEASE_VERSION_N_1, HASH_N_1) | ||
|
||
branch_n_1_data = "" | ||
for key, value in branch_dictionary.items(): | ||
branch_n_1_data += f"| [{key}](https://quay.io/repository/opendatahub/workbench-images/manifest/{branch_dictionary[key]['sha']}?tab=vulnerabilities) |" | ||
for severity in ['Medium', 'Low', 'Unknown', 'High', 'Critical']: | ||
count = value.get(severity, 0) # Get count for the severity, default to 0 if not present | ||
branch_n_1_data += f" {count} |" | ||
branch_n_1_data += "\n" | ||
|
||
markdown_content = """# Security Scan Results | ||
Date: {todays_date} | ||
# Branch main | ||
| Image Name | Medium | Low | Unknown | High | Critical | | ||
|------------|-------|-----|---------|------|------| | ||
{branch_main} | ||
# Branch N | ||
| Image Name | Medium | Low | Unknown | High | Critical | | ||
|------------|-------|-----|---------|------|------| | ||
{branch_n} | ||
# Branch N - 1 | ||
| Image Name | Medium | Low | Unknown | High | Critical | | ||
|------------|-------|-----|---------|------|------| | ||
{branch_n_1} | ||
""" | ||
|
||
final_markdown = markdown_content.format(branch_n=branch_n_data, todays_date=d2, branch_n_1=branch_n_1_data, branch_main=branch_main_data) | ||
|
||
# Writing to the markdown file | ||
with open("ci/security-scan/security_scan_results.md", "w") as markdown_file: | ||
markdown_file.write(final_markdown) |
Oops, something went wrong.