Skip to content

WIP: testing PR, ignore me for now #1004

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 7 commits into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 70 additions & 0 deletions .tekton/jupyter-minimal-ubi9-python-3-11-pull-request.yaml
Original file line number Diff line number Diff line change
@@ -608,6 +608,76 @@ spec:
operator: in
values:
- "false"
- name: check-image-software
params:
- name: IMAGE_URL
value: $(tasks.build-image-index.results.IMAGE_URL)
# - name: PLATFORM
# value: $(tasks.build-image-index.results.IMAGE_URL)
- name: CHAINS-GIT_URL
value: $(tasks.clone-repository.results.url)
- name: CHAINS-GIT_COMMIT
value: $(tasks.clone-repository.results.commit)
- name: COSIGN_VERSION
value: v2.4.3
runAfter:
# - show-sbom
- build-image-index
# taskRef:
# params:
# - name: name
# value: show-sbom
# - name: bundle
# value: quay.io/konflux-ci/tekton-catalog/task-show-sbom:0.1@sha256:04f15cbce548e1db7770eee3f155ccb2cc0140a6c371dc67e9a34d83673ea0c0
# - name: kind
# value: task
# resolver: bundles
taskSpec:
params:
- name: IMAGE_URL
# - name: PLATFORM
- name: CHAINS-GIT_URL
- name: CHAINS-GIT_COMMIT
- name: COSIGN_VERSION
results:
- name: CHECK_OUTPUT
description: Check output
steps:
- name: check-image-software
image: registry.redhat.io/openshift4/ose-cli:latest
env:
- name: IMAGE_URL
value: $(params.IMAGE_URL)
# - name: PLATFORM
# value: $(params.PLATFORM)
- name: GIT_URL
value: $(params.CHAINS-GIT_URL)
- name: GIT_COMMIT
value: $(params.CHAINS-GIT_COMMIT)
- name: COSIGN_VERSION
value: $(params.COSIGN_VERSION)
script: |
#!/bin/bash
env

set -x
REPO_SPACE=$(echo "${GIT_URL}" | sed 's#.*github.com/\(.*\)#\1#')
FILE_NAME="konflux-check-software.sh"
FILE_PATH="ci/${FILE_NAME}"

#wget --progress=bar "https://raw.githubusercontent.com/${REPO_SPACE}/${GIT_COMMIT}/${FILE_PATH}"
git clone --single-branch --branch "GIT_COMMIT" -- "${GIT_URL}" notebooks
cd notebooks
#chmod a+x "${FILE_NAME}"
set +x

wget --progress=bar --output-document=cosign "https://github.com/sigstore/cosign/releases/download/${COSIGN_VERSION}/cosign-linux-amd64"
chmod a+x cosign
export PATH="${PWD}:${PATH}"

dnf install -y jq skopeo

./${FILE_PATH}
workspaces:
- name: git-auth
optional: true
70 changes: 70 additions & 0 deletions .tekton/jupyter-minimal-ubi9-python-3-11-push.yaml
Original file line number Diff line number Diff line change
@@ -604,6 +604,76 @@ spec:
operator: in
values:
- "false"
- name: check-image-software
params:
- name: IMAGE_URL
value: $(tasks.build-image-index.results.IMAGE_URL)
# - name: PLATFORM
# value: $(tasks.build-image-index.results.IMAGE_URL)
- name: CHAINS-GIT_URL
value: $(tasks.clone-repository.results.url)
- name: CHAINS-GIT_COMMIT
value: $(tasks.clone-repository.results.commit)
- name: COSIGN_VERSION
value: v2.4.3
runAfter:
# - show-sbom
- build-image-index
# taskRef:
# params:
# - name: name
# value: show-sbom
# - name: bundle
# value: quay.io/konflux-ci/tekton-catalog/task-show-sbom:0.1@sha256:04f15cbce548e1db7770eee3f155ccb2cc0140a6c371dc67e9a34d83673ea0c0
# - name: kind
# value: task
# resolver: bundles
taskSpec:
params:
- name: IMAGE_URL
# - name: PLATFORM
- name: CHAINS-GIT_URL
- name: CHAINS-GIT_COMMIT
- name: COSIGN_VERSION
results:
- name: CHECK_OUTPUT
description: Check output
steps:
- name: check-image-software
image: registry.redhat.io/openshift4/ose-cli:latest
env:
- name: IMAGE_URL
value: $(params.IMAGE_URL)
# - name: PLATFORM
# value: $(params.PLATFORM)
- name: GIT_URL
value: $(params.CHAINS-GIT_URL)
- name: GIT_COMMIT
value: $(params.CHAINS-GIT_COMMIT)
- name: COSIGN_VERSION
value: $(params.COSIGN_VERSION)
script: |
#!/bin/bash
env

set -x
REPO_SPACE=$(echo "${GIT_URL}" | sed 's#.*github.com/\(.*\)#\1#')
FILE_NAME="konflux-check-software.sh"
FILE_PATH="ci/${FILE_NAME}"

#wget --progress=bar "https://raw.githubusercontent.com/${REPO_SPACE}/${GIT_COMMIT}/${FILE_PATH}"
git clone --single-branch --branch "GIT_COMMIT" -- "${GIT_URL}" notebooks
cd notebooks
#chmod a+x "${FILE_NAME}"
set +x

wget --progress=bar --output-document=cosign "https://github.com/sigstore/cosign/releases/download/${COSIGN_VERSION}/cosign-linux-amd64"
chmod a+x cosign
export PATH="${PWD}:${PATH}"

dnf install -y jq skopeo

./${FILE_PATH}
workspaces:
- name: git-auth
optional: true
409 changes: 330 additions & 79 deletions ci/check-software-versions.py
Original file line number Diff line number Diff line change
@@ -40,6 +40,10 @@ class AnnotationType(Enum):
prune_podman_data = False


def raise_exception(error_msg):
log.error(error_msg)
raise Exception(error_msg)

def find_imagestream_files(directory="."):
"""Finds all ImageStream YAML files in the given directory and its subdirectories."""

@@ -122,19 +126,15 @@ def stop_and_remove_container(container_id):
"""Stops and removes a Podman container."""

if not container_id:
log.error("Given undefined value in 'container_id' argument!")
return 1
raise_exception("Given undefined value in 'container_id' argument!")
try:
subprocess.run(["podman", "stop", container_id], check=True)
subprocess.run(["podman", "rm", container_id], check=True)
if prune_podman_data:
subprocess.run(["podman", "system", "prune", "--all", "--force"], check=True)
log.info(f"Container {container_id} stopped and removed.")
except (subprocess.CalledProcessError, Exception) as e:
log.error(f"Error stopping/removing container '{container_id}': {e}")
return 1

return 0
raise_exception(f"Error stopping/removing container '{container_id}': {e}")


def parse_json_string(json_string):
@@ -143,19 +143,87 @@ def parse_json_string(json_string):
try:
return json.loads(json_string)
except (json.JSONDecodeError, Exception) as e:
log.error(f"Error parsing JSON: {e}")
return None
raise_exception(f"Error parsing JSON: {e}")


import subprocess
import time
import sys
import os

def download_sbom_with_retry(platform_arg: str, image_url: str, sbom: str):
"""
Downloads an SBOM with retry logic
Args:
platform_arg: The platform argument for the cosign command.
image_url: The URL of the image to download the SBOM for.
sbom: The path to the file where the SBOM should be saved.
"""
# TODO improve by ./cosign tree image and check for the "SBOMs" string - if present, the sboms is there, if missing it's not there
# max_try = 5
max_try = 1
wait_sec = 2
status = -1
err_file = "err" # Temporary file to store stderr
# command_bin = "cosign"
command_bin = "/tmp/cosign-linux-amd64"

for run in range(1, max_try + 1):
status = 0
command = [
command_bin,
"download",
"sbom",
# platform_arg,
image_url,
]

try:
with open(sbom, "w") as outfile, open(err_file, "w") as errfile:
result = subprocess.run(
command,
stdout=outfile,
stderr=errfile,
check=False # Don't raise an exception on non-zero exit code
)
status = result.returncode
except FileNotFoundError:
print(f"Error: The '{command_bin}' command was not found. Make sure it's in your PATH or the current directory.", file=sys.stderr)
return

if status == 0:
break
else:
print(f"Attempt {run} failed with status {status}. Retrying in {wait_sec} seconds...", file=sys.stderr)
time.sleep(wait_sec)

if status != 0:
print(f"Failed to get SBOM after {max_try} tries", file=sys.stderr)
try:
with open(err_file, "r") as f:
error_output = f.read()
print(error_output, file=sys.stderr)
except FileNotFoundError:
print(f"Error file '{err_file}' not found.", file=sys.stderr)
finally:
raise_exception(f"SBOM download failed!")
else:
print(f"Successfully downloaded SBOM to: {sbom}")

# Clean up the temporary error file
if os.path.exists(err_file):
os.remove(err_file)


def process_dependency_item(item, container_id, annotation_type):
"""Processes a single item (dictionary) from the JSON data."""

name, version = item.get("name"), item.get("version")
if not name or not version:
log.error(f"Missing name or version in item: {item}")
return 1
raise_exception(f"Missing name or version in item: {item}")

log.info(f"Checking {name} (version {version}) in container...")
log.info(f"Checking '{name}' (version '{version}') in container...")

command_mapping = {
"PyTorch": ["/bin/bash", "-c", "pip show torch | grep 'Version: '"],
@@ -184,123 +252,300 @@ def process_dependency_item(item, container_id, annotation_type):
if output and version.lstrip("v") in output:
log.info(f"{name} version check passed.")
else:
log.error(f"{name} version check failed. Expected '{version}', found '{output}'.")
return 1
raise_exception(f"{name} version check failed. Expected '{version}', found '{output}'.")

return 0

def check_sbom_available(image):
# TODO
return True

def process_tag(tag):
ret_code = 0

tag_annotations = tag.get("annotations", {})
def load_json_file(filepath):
"""
Loads data from a JSON file.
if "name" not in tag:
log.error(f"Missing 'name' field for {tag}!")
return 1
Args:
filepath (str): The path to the JSON file.
log.info(f"Processing tag: {tag['name']}.")
outdated_annotation = "opendatahub.io/image-tag-outdated"
if tag_annotations.get(outdated_annotation) == "true":
log.info("Skipping processing of this tag as it is marked as outdated.")
print_delimiter()
return 0
if "from" not in tag or "name" not in tag["from"]:
log.error(f"Missing 'from.name' in tag {tag['name']}")
return 1
Returns:
dict or list: The data loaded from the JSON file,
or None if an error occurs.
"""
try:
with open(filepath, 'r') as f:
data = json.load(f)
return data
except FileNotFoundError:
print(f"Error: File not found at {filepath}")
return None
except json.JSONDecodeError:
print(f"Error: Could not decode JSON from {filepath}")
return None
except Exception as e:
print(f"An unexpected error occurred: {e}")
return None

image_ref = tag["from"]["name"]
image_var = extract_variable(image_ref)
image_val = get_variable_value(image_var)
log.debug(f"Retrieved image link: '{image_val}'")

if not image_val:
log.error(f"Failed to parse image value reference pointing by '{image_ref}'!")
return 1
def find_item_in_array_by_name(json_data, array_key, target_name):
"""
Finds an item in a JSON array (list of dictionaries) by matching a 'name' value.
container_id = run_podman_container(image_var, image_val)
if not container_id:
log.error(f"Failed to start a container from image '{image_val}' for the '{image_ref}' tag!")
return 1
Args:
json_data (dict or list): The loaded JSON data.
array_key (str): The key in json_data that holds the array.
target_name (str): The value of the 'name' key to search for.
Returns:
dict or None: The dictionary item if found, otherwise None.
"""
if isinstance(json_data, dict) and array_key in json_data:
data_array = json_data[array_key]
if isinstance(data_array, list):
for item in data_array:
# Check if the item is a dictionary and has a 'name' key
if isinstance(item, dict) and 'name' in item:
if item['name'] == target_name:
return item # Return the first matching item
return None # Return None if the array or item is not found


def check_sbom_item(item, sbom_data):
name, version = item.get("name"), item.get("version")
if not name or not version:
raise_exception(f"Missing name or version in item: {item}")

log.info(f"Checking '{name}' (version '{version}') in given SBOM data.")

item_mapping = {
"Python": ["python3.11", "3.11"],
"ROCm-TensorFlow": ["tensorflow-rocm", version],
# "PyTorch": ["/bin/bash", "-c", "pip show torch | grep 'Version: '"],
# "ROCm": ["/bin/bash", "-c", "rpm -q --queryformat '%{VERSION}\n' rocm-core"],
# "ROCm-PyTorch": ["/bin/bash", "-c", "pip show torch | grep 'Version: ' | grep rocm"],
# "TensorFlow": ["/bin/bash", "-c", "pip show tensorflow | grep 'Version: '"],
# "R": ["/bin/bash", "-c", "R --version"],
# "rstudio-server": ["/bin/bash", "-c", "rpm -q --queryformat '%{VERSION}\n' rstudio-server"],
# "Sklearn-onnx": ["/bin/bash", "-c", "pip show skl2onnx | grep 'Version: '"],
# "MySQL Connector/Python": ["/bin/bash", "-c", "pip show mysql-connector-python | grep 'Version: '"],
# "Nvidia-CUDA-CU12-Bundle": ["/bin/bash", "-c", "pip show nvidia-cuda-runtime-cu12 | grep 'Version: '"],
# "CUDA": ["/bin/bash", "-c", "nvcc --version"],
}

mapped_item = item_mapping.get(name)
if not mapped_item:
mapped_item = [name.lower(), version]

sbom_item = find_item_in_array_by_name(sbom_data, "packages", mapped_item[0])

if sbom_item == None:
raise_exception(f"Can't find the package record for the '{mapped_item[0]}' in the SBOM file!")

sbom_version = sbom_item["versionInfo"]
if mapped_item[1] not in sbom_version:
raise_exception(f"The expected version '{mapped_item[1]}' doesn't match the data in the SBOM file '{sbom_version}'!")


def check_against_image(tag, tag_annotations, tag_name, image):
ntb_sw_annotation = "opendatahub.io/notebook-software"
python_dep_annotation = "opendatahub.io/notebook-python-dependencies"

try:
software = tag_annotations.get(ntb_sw_annotation)
if not software:
log.error(f"Missing '{ntb_sw_annotation}' in ImageStream tag '{tag}'!")
return 1
raise_exception(f"Missing '{ntb_sw_annotation}' in ImageStream tag '{tag}'!")

python_deps = tag_annotations.get(python_dep_annotation)
if not python_deps:
log.error(f"Missing '{python_dep_annotation}' in ImageStream tag '{tag}'!")
return 1

for item in parse_json_string(software) or []:
if process_dependency_item(item, container_id, AnnotationType.SOFTWARE) != 0:
log.error(f"Failed check for the '{image_ref}' tag!")
ret_code = 1

for item in parse_json_string(python_deps) or []:
if process_dependency_item(item, container_id, AnnotationType.PYTHON_DEPS) != 0:
log.error(f"Failed check for the '{image_ref}' tag!")
ret_code = 1
raise_exception(f"Missing '{python_dep_annotation}' in ImageStream tag '{tag}'!")
finally:
print_delimiter()

# Check if the sbom for the image is available
sbom_downloaded = False
output_file = "sbom.json"
if check_sbom_available:
log.info(f"SBOM for image '{image}' is available.")
platform = "--platform=linux/amd64"
download_sbom_with_retry(platform, image, output_file)
sbom_downloaded = True

container_id = 0
if sbom_downloaded == False:
# SBOM not available -> gather data directly from the running image
container_id = run_podman_container(f"{tag_name}-container", image)
if not container_id:
raise_exception(f"Failed to start a container from image '{image}' for the '{tag_name}' tag!")

errors = []
try:
if sbom_downloaded == True:
sbom_data = load_json_file(output_file)
if sbom_data:
print(f"Successfully loaded JSON data from {output_file}")
else:
raise_exception(f"Can't load JSON data from {output_file}!")

try:
for item in parse_json_string(software) or []:
if sbom_downloaded == True:
check_sbom_item(item, sbom_data)
else:
process_dependency_item(item, container_id, AnnotationType.SOFTWARE)
except Exception as e:
log.error(f"Failed check for the '{tag_name}' tag: {str(e)}")
errors.append(str(e))

try:
for item in parse_json_string(python_deps) or []:
if sbom_downloaded == True:
check_sbom_item(item, sbom_data)
else:
process_dependency_item(item, container_id, AnnotationType.PYTHON_DEPS)
except Exception as e:
log.error(f"Failed check for the '{tag_name}' tag: {str(e)}")
errors.append(str(e))
finally:
if stop_and_remove_container(container_id) != 0:
log.error(f"Failed to stop/remove the container '{container_id}' for the '{image_ref}' tag!")
print_delimiter()
return 1 # noqa: B012 `return` inside `finally` blocks cause exceptions to be silenced
print_delimiter()
if sbom_downloaded == False:
try:
stop_and_remove_container(container_id)
except Exception as e:
log.error(f"Failed to stop/remove the container '{container_id}' for the '{tag_name}' tag: {str(e)}")
errors.append(str(e))

if errors:
raise Exception(errors)

return ret_code

def process_tag(tag, image):
tag_annotations = tag.get("annotations", {})

if "name" not in tag:
raise_exception(f"Missing 'name' field for {tag}!")

def process_imagestream(imagestream):
log.info(f"Processing tag: {tag['name']}.")
outdated_annotation = "opendatahub.io/image-tag-outdated"
if tag_annotations.get(outdated_annotation) == "true":
log.info("Skipping processing of this tag as it is marked as outdated.")
print_delimiter()
return 0
if "from" not in tag or "name" not in tag["from"]:
raise_exception(f"Missing 'from.name' in tag {tag['name']}")

tag_name = tag["from"]["name"]
if (image == None):
image_var = extract_variable(tag_name)
image_val = get_variable_value(image_var)
log.debug(f"Retrieved image link: '{image_val}'")

if not image_val:
raise_exception(f"Failed to parse image value reference pointing by '{tag_name}'!")
else:
image_val = image
log.debug(f"Using the given image link: '{image_val}'")

# Now, with the image known and the tag with the manifest data, let's compare what is on the image
check_against_image(tag, tag_annotations, tag_name, image_val)


def process_imagestream(imagestream, image, given_tag):
"""Processes a single ImageStream file and check images that it is referencing."""

ret_code = 0
log.info(f"Processing ImageStream: {imagestream}.")

yaml_data = load_yaml(imagestream)
if not yaml_data or "spec" not in yaml_data or "tags" not in yaml_data["spec"]:
log.error(f"Invalid YAML in {imagestream} as ImageStream file!")
return 1
raise_exception(f"Invalid YAML content in {imagestream} as ImageStream file!")

tags = yaml_data["spec"]["tags"]

# Process each image version in the ImageStream:
for tag in yaml_data["spec"]["tags"]:
if process_tag(tag) != 0:
log.error(f"Failed to process tag {tag} in ImageStream {imagestream}!")
# Let's move on the next tag if any
ret_code = 1
errors = []
for tag in tags:
tag_name = tag["name"]
if (given_tag != None) and given_tag != tag_name:
log.info(f"Skipping the processing of the {tag_name}.")
continue

return ret_code
try:
process_tag(tag, image)
except Exception as e:
# We want to continue to process the next tag if possible
log.error(f"Failed to process tag {tag} in ImageStream '{imagestream}': {str(e)}")
# errors.append(f"{tag}:" + str(e))
errors.append(f"///:" + str(e))

if (len(errors) > 0):
raise Exception(errors)


def print_delimiter():
log.info("----------------------------------------------------------------------")
log.info("")


def main():
def parse_arguments():
parser = argparse.ArgumentParser(description="Process command-line arguments.")
parser.add_argument(
"-p",
"--prune-podman-data",
action="store_true",
help="Prune Podman data after each image is processed. This is useful when running in GHA workers.",
)
group_required = parser.add_argument_group("Explicit image and manifest tag information")
group_required.add_argument(
"-i",
"--image",
type=str,
help="Particular image to check.",
)
group_required.add_argument(
"-s",
"--image-stream",
type=str,
help="Particular ImageStream definition selected to check.",
)
group_required.add_argument(
"-t",
"--tag",
type=str,
help="Particular tag name to process from the given ImageStream.",
)

args = parser.parse_args()
global prune_podman_data # noqa: PLW0603 Using the global statement to update `prune_podman_data` is discouraged
prune_podman_data = args.prune_podman_data

image = args.image
image_stream = args.image_stream
tag = args.tag
# Enforce that image, image_stream and tag arguments are either all set or none are set
if (image and image_stream and tag) or (not image and not image_stream and not tag):
if image:
print(f"Processing the explicitly given image and ImageStream/tag: {image}, {image_stream}, {tag}")
else:
print("Running the check against all ImageStreams we'll find.")
else:
parser.error("The arguments --image, --image-stream, and --tag must be either all set or none of them should be set.")

return prune_podman_data, image, image_stream, tag


def main():
global prune_podman_data # noqa: PLW0603 Using the global statement to update `prune_podman_data` is discouraged
prune_podman_data, image, image_stream, tag = parse_arguments()


log.info(f"{prune_podman_data}, {tag}, {image}, {image_stream}")

ret_code = 0
log.info("Starting the check ImageStream software version references.")

imagestreams = find_imagestream_files()
log.info("Following list of ImageStream manifests has been found:")
if (image_stream == None):
imagestreams = find_imagestream_files()
else:
imagestreams = [image_stream]

log.info("Following list of ImageStream manifests will be processed:")
for imagestream in imagestreams:
log.info(imagestream)

@@ -310,16 +555,22 @@ def main():

print_delimiter()

errors = []
for imagestream in imagestreams:
if process_imagestream(imagestream) != 0:
log.error(f"Failed to process {imagestream} ImageStream manifest file!")
# Let's move on the next imagestream if any
ret_code = 1
continue
try:
process_imagestream(imagestream, image, tag)
except Exception as e:
log.error(f"Failed to process {imagestream} ImageStream manifest file: {str(e)}")
errors.append(f"ImageStream path: {imagestream} --- " + str(e))

print_delimiter()
log.info("Test results:")

if ret_code == 0:
if len(errors) == 0:
log.info("The software versions check in manifests was successful. Congrats! :)")
else:
for error in errors:
log.error(error)
log.error("The software version check failed, see errors above in the log for more information!")

sys.exit(ret_code)
104 changes: 104 additions & 0 deletions ci/konflux-check-software.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
#!/bin/bash
#
# TODO - description, prereq and usage etc...

download_sbom_with_retry() {
local status=-1
local max_try=5
local wait_sec=2

local platform_arg="${1}"
local image_url="${2}"
local sbom="${3}"

for run in $(seq 1 ${max_try}); do
status=0
./cosign download sbom ${platform_arg} ${image_url} 2>>err 1>>"${sbom}"
status=$?
if [ "$status" -eq 0 ]; then
break
fi
sleep ${wait_sec}
done
if [ "${status}" -ne 0 ]; then
echo "Failed to get SBOM after ${max_try} tries" >&2
cat err >&2
fi
}

# Find all YAML files in the specified directory and select the one that matches the expected metadata.name value.
select_manifest() {
local yaml_directory="${1}"
local expected_name="${2}"

find "${yaml_directory}" -type f -name "*imagestream.yaml" -o -name "*.yml" -print0 | while IFS= read -r -d $'\0' file; do
# Check if the metadata.name field exists and contains the specified name element
if yq '.metadata.name' "${file}" | grep -q "^${expected_name}-notebook$"; then
echo "${file}"
return
fi
done
}


process_the_software_versions() {
local manifest_file="${1}"
local sbom="${2}"

for

jq -r '.packages[] | select(.name == "boto3") | .versionInfo' ./sbom.json

echo "Processing file: ${file}"
echo "---"

# Iterate over the selected fields and extract the data
for field in "${selected_fields[@]}"; do
echo " $field:"
if value=$(extract_yaml_data "$file" "$field"); then
echo " $value"
else
echo " (Not found or error)"
fi
done
echo "---"
}

RAW_OUTPUT=$(skopeo inspect --no-tags --raw "docker://${IMAGE_URL}")
if [ "$(jq 'has("manifests")' <<< "${RAW_OUTPUT}")" == "true" ] ; then
# Multi arch
OS=$(jq -r '.manifests[].platform.os' <<< ${RAW_OUTPUT})
ARCH=$(jq -r '.manifests[].platform.architecture' <<< ${RAW_OUTPUT})
if test "${ARCH}" = "amd64"; then
ARCH="x86-64"
fi
PLATFORM="${OS}-${ARCH}"
else
PLATFORM=""
fi

RAW_OUTPUT_CONFIG=$(skopeo inspect --no-tags --config --raw "docker://${IMAGE_URL}")

# LABEL name="odh-notebook-jupyter-datascience-ubi9-python-3.11" \
LABEL_NAME=$(jq -r '.container_config.Labels.name' <<< ${RAW_OUTPUT_CONFIG})
echo "Image label name: ${LABEL_NAME}"
# Filter the required value from the image label name
LABEL=$(echo ${LABEL_NAME} | sed 's/odh-notebook-\(.*\)-ubi9.*/\1/')


MANIFEST_TO_PROCESS=$(select_manifest "manifests" "${LABEL}")

SBOM_FILE="./sbom.json"

if [ -z "${PLATFORM}" ] ; then
# single arch image
# download_sbom_with_retry "" "${IMAGE_URL}"
download_sbom_with_retry "" "${IMAGE_URL}-${PLATFORM}" "${SBOM_FILE}"
else
# download_sbom_with_retry " --platform=${PLATFORM} " "${IMAGE_URL}"-
download_sbom_with_retry "" "${IMAGE_URL}-${PLATFORM}" "${SBOM_FILE}"
fi



process_the_software_versions "${MANIFEST_TO_PROCESS}" "${SBOM_FILE}"
32 changes: 16 additions & 16 deletions jupyter/minimal/ubi9-python-3.11/Dockerfile.cpu
Original file line number Diff line number Diff line change
@@ -26,7 +26,7 @@ RUN curl -L https://mirror.openshift.com/pub/openshift-v4/$(uname -m)/clients/oc
####################
# jupyter-minimal #
####################
FROM base AS jupyter-minimal
FROM base AS jupyter-minimal

ARG JUPYTER_REUSABLE_UTILS=jupyter/utils
ARG MINIMAL_SOURCE_CODE=jupyter/minimal/ubi9-python-3.11
@@ -47,20 +47,20 @@ COPY ${JUPYTER_REUSABLE_UTILS} utils/

COPY ${MINIMAL_SOURCE_CODE}/Pipfile.lock ${MINIMAL_SOURCE_CODE}/start-notebook.sh ./

# Install Python dependencies from Pipfile.lock file
RUN echo "Installing softwares and packages" && \
micropipenv install && \
rm -f ./Pipfile.lock && \
# Disable announcement plugin of jupyterlab \
jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \
# Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \
sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \
# Fix permissions to support pip in Openshift environments \
chmod -R g+w /opt/app-root/lib/python3.11/site-packages && \
fix-permissions /opt/app-root -P && \
# Apply JupyterLab addons \
/opt/app-root/bin/utils/addons/apply.sh
# # Install Python dependencies from Pipfile.lock file
# RUN echo "Installing softwares and packages" && \
# micropipenv install && \
# rm -f ./Pipfile.lock && \
# # Disable announcement plugin of jupyterlab \
# jupyter labextension disable "@jupyterlab/apputils-extension:announcements" && \
# # Replace Notebook's launcher, "(ipykernel)" with Python's version 3.x.y \
# sed -i -e "s/Python.*/$(python --version | cut -d '.' -f-2)\",/" /opt/app-root/share/jupyter/kernels/python3/kernel.json && \
# # Fix permissions to support pip in Openshift environments \
# chmod -R g+w /opt/app-root/lib/python3.11/site-packages && \
# fix-permissions /opt/app-root -P && \
# # Apply JupyterLab addons \
# /opt/app-root/bin/utils/addons/apply.sh

WORKDIR /opt/app-root/src

ENTRYPOINT ["start-notebook.sh"]
ENTRYPOINT ["start-notebook.sh"]