Skip to content

Commit

Permalink
ci: add workflow to check for vulnerabilities in images
Browse files Browse the repository at this point in the history
  • Loading branch information
rkpattnaik780 committed Jan 2, 2024
1 parent 32f7670 commit c52c2bf
Show file tree
Hide file tree
Showing 7 changed files with 502 additions and 1 deletion.
135 changes: 135 additions & 0 deletions .github/workflows/sec-scan.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
---
# The aim of this GitHub workflow is to update the `ci/securitty-scan/security_scan_results.md` with latest security scan results.
name: Update notebook image security reports
on:
workflow_dispatch:
inputs:
branch:
required: true
description: "Provide the name of the branch you want to update ex main, vYYYYx etc: "
# Put the scheduler on comment until automate the full release procedure
# schedule:
# - cron: "0 0 * * 5" #Scheduled every Friday
env:
SEC_SCAN_BRANCH: sec-scan-${{ github.run_id }}
BRANCH_NAME: ${{ github.event.inputs.branch || 'main' }}
RELEASE_VERSION_N: 2023b
RELEASE_VERSION_N_1: 2023a
jobs:
initialize:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Install Skopeo CLI
shell: bash
run: |
sudo apt-get -y update
sudo apt-get -y install skopeo
# Checkout the branch
- name: Checkout branch
uses: actions/checkout@v3
with:
ref: ${{ env.BRANCH_NAME }}

# Create a new branch
- name: Create a new branch
run: |
echo ${{ env.SEC_SCAN_BRANCH }}
git checkout -b ${{ env.SEC_SCAN_BRANCH }}
git push --set-upstream origin ${{ env.SEC_SCAN_BRANCH }}
update-n-version:
needs: [initialize]
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Configure Git
run: |
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git config --global user.name "GitHub Actions"
# Get the latest weekly build commit hash: https://github.com/opendatahub-io/notebooks/commits/2023b
- name: Checkout upstream notebooks repo
uses: actions/checkout@v3
with:
repository: opendatahub-io/notebooks.git
ref: ${{ env.RELEASE_VERSION_N }}

- name: Retrieve latest weekly commit hash from the release branch
id: hash-n
shell: bash
run: |
echo "HASH_N=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT}
- name: Checkout "N - 1" branch
uses: actions/checkout@v3
with:
repository: opendatahub-io/notebooks.git
ref: ${{ env.RELEASE_VERSION_N_1 }}

- name: Retrieve latest weekly commit hash from the release branch
id: hash-n-1
shell: bash
run: |
echo "HASH_N_1=$(git rev-parse --short HEAD)" >> ${GITHUB_OUTPUT}
# Checkout the release branch to apply the updates
- name: Checkout release branch
uses: actions/checkout@v3
with:
ref: ${{ env.SEC_SCAN_BRANCH }}

- name: setup python
uses: actions/setup-python@v4
with:
python-version: '3.10' # install the python version needed

- name: install python packages
run: |
python -m pip install --upgrade pip
pip install requests
- name: execute py script # run trial.py
env:
HASH_N : ${{ steps.hash-n.outputs.HASH_N }}
RELEASE_VERSION_N: 2023b

HASH_N_1: ${{ steps.hash-n-1.outputs.HASH_N_1 }}
RELEASE_VERSION_N_1: 2023a
run: make scan-image-vulnerabilities

- name: Push the files
run: |
git fetch origin ${{ env.SEC_SCAN_BRANCH }} && git pull origin ${{ env.SEC_SCAN_BRANCH }} && git add . && git commit -m "Update security scans" && git push origin ${{ env.SEC_SCAN_BRANCH }}
# Creates the Pull Request
open-pull-request:
needs: [update-n-version]
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Checkout repo
uses: actions/checkout@v3

- name: pull-request
uses: repo-sync/pull-request@v2
with:
source_branch: ${{ env.SEC_SCAN_BRANCH }}
destination_branch: ${{ env.BRANCH_NAME}}
github_token: ${{ secrets.GH_TOKEN }}
pr_label: "automated pr"
pr_title: "[Security Scanner Action] Weekly update of security vulnerabilities reported by Quay"
pr_body: |
:rocket: This is an automated Pull Request.
This PR updates:
* `ci/security-scan/security_scan_results.md` file with the latest security vulnerabilities reported by Quay.
* `ci/security-scan/weekly_commit_ids` with the latest updated SHA digests of the notebooks (N & N-1)
Created by `/.github/workflows/sec-scan.yaml`
:exclamation: **IMPORTANT NOTE**: Remember to delete the ` ${{ env.SEC_SCAN_BRANCH }}` branch after merging the changes
7 changes: 6 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -468,4 +468,9 @@ refresh-pipfilelock-files:
cd runtimes/tensorflow/ubi8-python-3.8 && pipenv lock
cd runtimes/tensorflow/ubi9-python-3.9 && pipenv lock
cd base/c9s-python-3.9 && pipenv lock


# This is only for the workflow action
# For running manually, set the environment variables "RELEASE_VERSION_N" and "HASH_N"
.PHONY: scan-image-vulnerabilities
scan-image-vulnerabilities:
python ci/security-scan/quay_security_analysis.py
164 changes: 164 additions & 0 deletions ci/security-scan/quay_security_analysis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
import os
import subprocess
import re
from datetime import date
import requests
from collections import Counter
import fileinput

my_dictionary = {}

commit_id_path = "ci/security-scan/weekly_commit_ids.env"


IMAGES = [
"odh-minimal-notebook-image-n",
"odh-runtime-minimal-notebook-image-n",
"odh-runtime-data-science-notebook-image-n",
"odh-minimal-gpu-notebook-image-n",
"odh-pytorch-gpu-notebook-image-n",
"odh-generic-data-science-notebook-image-n",
"odh-tensorflow-gpu-notebook-image-n",
"odh-trustyai-notebook-image-n",
"odh-habana-notebook-image-n",
"odh-codeserver-notebook-n",
"odh-rstudio-notebook-n",
"odh-rstudio-gpu-notebook-n"
]

IMAGES_N_1 = [
"odh-minimal-notebook-image-n-1",
"odh-minimal-gpu-notebook-image-n-1",
"odh-pytorch-gpu-notebook-image-n-1",
"odh-runtime-data-science-notebook-image-n-1",
"odh-generic-data-science-notebook-image-n-1",
"odh-tensorflow-gpu-notebook-image-n-1",
"odh-trustyai-notebook-image-n-1",
"odh-codeserver-notebook-n-1",
"odh-rstudio-notebook-n-1",
"odh-rstudio-gpu-notebook-n-1"
]

def process_image(image, commit_id_path, RELEASE_VERSION_N, HASH_N):
with open(commit_id_path, 'r') as params_file:
img_line = next(line for line in params_file if re.search(f"{image}=", line))
img = img_line.split('=')[1].strip()

registry = img.split('@')[0]

src_tag_cmd = f'skopeo inspect docker://{img} | jq \'.Env[] | select(startswith("OPENSHIFT_BUILD_NAME=")) | split("=")[1]\''
src_tag = subprocess.check_output(src_tag_cmd, shell=True, text=True).strip().strip('"').replace('-amd64', '')


regex = "" # f"{src_tag}-{RELEASE_VERSION_N}-\\d+-{HASH_N}"

if RELEASE_VERSION_N == "":
regex = f"{src_tag}-(\\d+-)?{HASH_N}"
else:
regex = f"{src_tag}-{RELEASE_VERSION_N}-\\d+-{HASH_N}"

latest_tag_cmd = f'skopeo inspect docker://{img} | jq -r --arg regex "{regex}" \'.RepoTags | map(select(. | test($regex))) | .[0]\''
print("latest_tag_cmd")
print(latest_tag_cmd)
latest_tag = subprocess.check_output(latest_tag_cmd, shell=True, text=True).strip()

digest_cmd = f'skopeo inspect docker://{registry}:{latest_tag} | jq .Digest | tr -d \'"\''
digest = subprocess.check_output(digest_cmd, shell=True, text=True).strip()

if digest is None or digest == "":
return

output = f"{registry}@{digest}"

print("output")
print(output)

sha_ = output.split(":")[1]

url = f"https://quay.io/api/v1/repository/opendatahub/workbench-images/manifest/sha256:{sha_}/security"
headers = {
"X-Requested-With": "XMLHttpRequest",
"Authorization": "Bearer 3PZX0UYX6FSENKQ14I1VTHUJ4KGBS8L5LHJ0W1RN7TPHFVQ4P0NR7VQNCZIFRC9B_1"
}

response = requests.get(url, headers=headers)
data = response.json()

vulnerabilities = []

for feature in data['data']['Layer']['Features']:
if(len(feature['Vulnerabilities']) > 0):
for vulnerability in feature['Vulnerabilities']:
vulnerabilities.append(vulnerability)

severity_levels = [entry.get("Severity", "Unknown") for entry in vulnerabilities]
severity_counts = Counter(severity_levels)

my_dictionary[latest_tag] = {}
my_dictionary[latest_tag]['sha']= digest

for severity, count in severity_counts.items():
my_dictionary[latest_tag][severity] = count

for line in fileinput.input(commit_id_path, inplace=True):
if line.startswith(f"{image}="):
line = f"{image}={output}\n"
print(line, end="")


RELEASE_VERSION_N = os.environ['RELEASE_VERSION_N']
HASH_N = os.environ['HASH_N']

# Call the function for each image in IMAGES
for i, image in enumerate(IMAGES):
process_image(image, commit_id_path, RELEASE_VERSION_N, HASH_N)

today = date.today()
d2 = today.strftime("%B %d, %Y")

formatted_data = ""
for key, value in my_dictionary.items():
formatted_data += f"| [{key}](https://quay.io/repository/opendatahub/workbench-images/manifest/{my_dictionary[key]['sha']}?tab=vulnerabilities) |"
for severity in ['Medium', 'Low', 'Unknown', 'High', 'Critical']:
count = value.get(severity, 0) # Get count for the severity, default to 0 if not present
formatted_data += f" {count} |"
formatted_data += "\n"

my_dictionary = {}

RELEASE_VERSION_N_1 = os.environ['RELEASE_VERSION_N_1']
HASH_N_1 = os.environ['HASH_N_1']

for i, image in enumerate(IMAGES_N_1):
process_image(image, commit_id_path, RELEASE_VERSION_N_1, HASH_N_1)

branch_n_data = ""
for key, value in my_dictionary.items():
branch_n_data += f"| [{key}](https://quay.io/repository/opendatahub/workbench-images/manifest/{my_dictionary[key]['sha']}?tab=vulnerabilities) |"
for severity in ['Medium', 'Low', 'Unknown', 'High', 'Critical']:
count = value.get(severity, 0) # Get count for the severity, default to 0 if not present
branch_n_data += f" {count} |"
branch_n_data += "\n"

markdown_content = """# Security Scan Results
Date: {todays_date}
# Branch N
| Image Name | Medium | Low | Unknown | High | Critical |
|------------|-------|-----|---------|------|------|
{table_content}
# Branch N - 1
| Image Name | Medium | Low | Unknown | High | Critical |
|------------|-------|-----|---------|------|------|
{branch_n}
"""

final_markdown = markdown_content.format(table_content=formatted_data, todays_date=d2, branch_n=branch_n_data)

# Writing to the markdown file
with open("ci/security-scan/security_scan_results.md", "w") as markdown_file:
markdown_file.write(final_markdown)
Loading

0 comments on commit c52c2bf

Please sign in to comment.