Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,19 @@ RUN apt-get update \
&& apt-get autoremove --yes \
&& rm -rf /var/lib/{apt,dpkg,cache,log}/

# Install the Rosa CLI
RUN curl -L https://mirror.openshift.com/pub/openshift-v4/clients/rosa/latest/rosa-linux.tar.gz --output /tmp/rosa-linux.tar.gz \
&& tar xvf /tmp/rosa-linux.tar.gz --no-same-owner \
&& mv rosa /usr/bin/rosa \
&& chmod +x /usr/bin/rosa \
&& rosa version

# Install the OpenShift CLI (OC)
RUN curl -L https://mirror.openshift.com/pub/openshift-v4/x86_64/clients/ocp/stable/openshift-client-linux.tar.gz --output /tmp/openshift-client-linux.tar.gz \
&& tar xvf /tmp/openshift-client-linux.tar.gz --no-same-owner \
&& mv oc /usr/bin/oc \
&& chmod +x /usr/bin/oc
Comment on lines +27 to +31
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Add verification command for OC CLI installation.

The Rosa CLI installation includes a verification step (rosa version), but the OC CLI installation lacks similar verification. This could lead to silent failures during the build process.

# Install the OpenShift CLI (OC)
RUN curl -L https://mirror.openshift.com/pub/openshift-v4/x86_64/clients/ocp/stable/openshift-client-linux.tar.gz --output /tmp/openshift-client-linux.tar.gz \
    && tar xvf /tmp/openshift-client-linux.tar.gz --no-same-owner \
    && mv oc /usr/bin/oc \
-    && chmod +x /usr/bin/oc
+    && chmod +x /usr/bin/oc \
+    && rm -f /tmp/openshift-client-linux.tar.gz \
+    && oc version --client
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
# Install the OpenShift CLI (OC)
RUN curl -L https://mirror.openshift.com/pub/openshift-v4/x86_64/clients/ocp/stable/openshift-client-linux.tar.gz --output /tmp/openshift-client-linux.tar.gz \
&& tar xvf /tmp/openshift-client-linux.tar.gz --no-same-owner \
&& mv oc /usr/bin/oc \
&& chmod +x /usr/bin/oc
# Install the OpenShift CLI (OC)
RUN curl -L https://mirror.openshift.com/pub/openshift-v4/x86_64/clients/ocp/stable/openshift-client-linux.tar.gz --output /tmp/openshift-client-linux.tar.gz \
&& tar xvf /tmp/openshift-client-linux.tar.gz --no-same-owner \
&& mv oc /usr/bin/oc \
&& chmod +x /usr/bin/oc \
&& rm -f /tmp/openshift-client-linux.tar.gz \
&& oc version --client
🤖 Prompt for AI Agents
In Dockerfile lines 27 to 31, the OpenShift CLI (OC) installation lacks a
verification step to confirm successful installation. Add a command after
setting permissions to run `oc version` or a similar command to verify the OC
CLI is correctly installed and executable, ensuring any installation issues are
caught during the build process.


# Install grpcurl
RUN curl -sSL "https://github.com/fullstorydev/grpcurl/releases/download/v1.9.2/grpcurl_1.9.2_linux_x86_64.tar.gz" --output /tmp/grpcurl_1.2.tar.gz \
&& tar xvf /tmp/grpcurl_1.2.tar.gz --no-same-owner \
Expand Down
55 changes: 26 additions & 29 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
import pytest
import shortuuid
import yaml
from _pytest._py.path import LocalPath
from _pytest.legacypath import TempdirFactory
from _pytest.tmpdir import TempPathFactory
from ocp_resources.config_map import ConfigMap
from ocp_resources.dsc_initialization import DSCInitialization
Expand All @@ -34,7 +32,6 @@
create_ns,
login_with_user_password,
get_openshift_token,
download_oc_console_cli,
)
from utilities.constants import (
AcceleratorType,
Expand Down Expand Up @@ -530,38 +527,38 @@ def related_images_refs(admin_client: DynamicClient) -> set[str]:
return related_images_refs


@pytest.fixture(scope="session")
def os_path_environment() -> str:
return os.environ["PATH"]
# @pytest.fixture(scope="session")
# def os_path_environment() -> str:
# return os.environ["PATH"]


@pytest.fixture(scope="session")
def bin_directory(tmpdir_factory: TempdirFactory) -> LocalPath:
return tmpdir_factory.mktemp(basename="bin")
# @pytest.fixture(scope="session")
# def bin_directory(tmpdir_factory: TempdirFactory) -> LocalPath:
# return tmpdir_factory.mktemp(basename="bin")


@pytest.fixture(scope="session")
def bin_directory_to_os_path(os_path_environment: str, bin_directory: LocalPath, oc_binary_path: str) -> None:
LOGGER.info(f"OC binary path: {oc_binary_path}")
LOGGER.info(f"Adding {bin_directory} to $PATH")
os.environ["PATH"] = f"{bin_directory}:{os_path_environment}"
# @pytest.fixture(scope="session")
# def bin_directory_to_os_path(os_path_environment: str, bin_directory: LocalPath, oc_binary_path: str) -> None:
# LOGGER.info(f"OC binary path: {oc_binary_path}")
# LOGGER.info(f"Adding {bin_directory} to $PATH")
# os.environ["PATH"] = f"{bin_directory}:{os_path_environment}"


@pytest.fixture(scope="session")
def oc_binary_path(bin_directory: LocalPath) -> str:
installed_oc_binary_path = os.getenv("OC_BINARY_PATH")
if installed_oc_binary_path:
LOGGER.warning(f"Using previously installed: {installed_oc_binary_path}")
return installed_oc_binary_path
# @pytest.fixture(scope="session")
# def oc_binary_path(bin_directory: LocalPath) -> str:
# installed_oc_binary_path = os.getenv("OC_BINARY_PATH")
# if installed_oc_binary_path:
# LOGGER.warning(f"Using previously installed: {installed_oc_binary_path}")
# return installed_oc_binary_path

return download_oc_console_cli(tmpdir=bin_directory)
# return download_oc_console_cli(tmpdir=bin_directory)


@pytest.fixture(scope="session", autouse=True)
@pytest.mark.early(order=0)
def autouse_fixtures(
bin_directory_to_os_path: None,
cluster_sanity_scope_session: None,
) -> None:
"""Fixture to control the order of execution of some of the fixtures"""
return
# @pytest.fixture(scope="session", autouse=True)
# @pytest.mark.early(order=0)
# def autouse_fixtures(
# bin_directory_to_os_path: None,
# cluster_sanity_scope_session: None,
# ) -> None:
# """Fixture to control the order of execution of some of the fixtures"""
# return
173 changes: 83 additions & 90 deletions utilities/infra.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,14 @@
import os
import re
import shlex
import stat
import tarfile
import tempfile
import zipfile
from contextlib import contextmanager
from functools import cache
from typing import Any, Generator, Optional, Set, Callable
from json import JSONDecodeError

import kubernetes
import platform
import pytest
import requests
from _pytest._py.path import LocalPath
from _pytest.fixtures import FixtureRequest
from kubernetes.dynamic import DynamicClient
from kubernetes.dynamic.exceptions import (
Expand All @@ -26,7 +20,6 @@
from ocp_resources.catalog_source import CatalogSource
from ocp_resources.cluster_service_version import ClusterServiceVersion
from ocp_resources.config_map import ConfigMap
from ocp_resources.console_cli_download import ConsoleCLIDownload
from ocp_resources.data_science_cluster import DataScienceCluster
from ocp_resources.deployment import Deployment
from ocp_resources.dsc_initialization import DSCInitialization
Expand Down Expand Up @@ -1153,86 +1146,86 @@ def _get_image_json(cmd: str) -> Any:
raise


def get_machine_platform() -> str:
os_machine_type = platform.machine()
return "amd64" if os_machine_type == "x86_64" else os_machine_type


def get_os_system() -> str:
os_system = platform.system().lower()
if os_system == "darwin" and platform.mac_ver()[0]:
os_system = "mac"
return os_system


def get_oc_console_cli_download_link() -> str:
oc_console_cli_download = ConsoleCLIDownload(name="oc-cli-downloads", ensure_exists=True)
os_system = get_os_system()
machine_platform = get_machine_platform()
oc_links = oc_console_cli_download.instance.spec.links
all_links = [
link_ref.href
for link_ref in oc_links
if link_ref.href.endswith(("oc.tar", "oc.zip"))
and os_system in link_ref.href
and machine_platform in link_ref.href
]
LOGGER.info(f"All oc console cli download links: {all_links}")
if not all_links:
raise ValueError(f"No oc console cli download link found for {os_system} {machine_platform} in {oc_links}")

return all_links[0]


def get_server_cert(tmpdir: LocalPath) -> str:
data = ConfigMap(name="kube-root-ca.crt", namespace="openshift-apiserver", ensure_exists=True).instance.data[
"ca.crt"
]
file_path = os.path.join(tmpdir, "cluster-ca.cert")
with open(file_path, "w") as fd:
fd.write(data)
return file_path


def download_oc_console_cli(tmpdir: LocalPath) -> str:
"""
Download and extract the OpenShift CLI binary.

Args:
tmpdir (str): Directory to download and extract the binary to

Returns:
str: Path to the extracted binary

Raises:
ValueError: If multiple files are found in the archive or if no download link is found
"""
oc_console_cli_download_link = get_oc_console_cli_download_link()
LOGGER.info(f"Downloading archive using: url={oc_console_cli_download_link}")
cert_file = get_server_cert(tmpdir=tmpdir)
local_file_name = os.path.join(tmpdir, oc_console_cli_download_link.split("/")[-1])
with requests.get(oc_console_cli_download_link, verify=cert_file, stream=True) as created_request:
created_request.raise_for_status()
with open(local_file_name, "wb") as file_downloaded:
for chunk in created_request.iter_content(chunk_size=8192):
file_downloaded.write(chunk)
LOGGER.info("Extract the downloaded archive.")
extracted_filenames = []
if oc_console_cli_download_link.endswith(".zip"):
zip_file = zipfile.ZipFile(file=local_file_name)
zip_file.extractall(path=tmpdir)
extracted_filenames = zip_file.namelist()
else:
with tarfile.open(name=local_file_name, mode="r") as tar_file:
tar_file.extractall(path=tmpdir)
extracted_filenames = tar_file.getnames()
LOGGER.info(f"Downloaded file: {extracted_filenames}")

if len(extracted_filenames) > 1:
raise ValueError(f"Multiple files found in {extracted_filenames}")
# Remove the downloaded file
if os.path.isfile(local_file_name):
os.remove(local_file_name)
binary_path = os.path.join(tmpdir, extracted_filenames[0])
os.chmod(binary_path, stat.S_IRUSR | stat.S_IXUSR)
return binary_path
# def get_machine_platform() -> str:
# os_machine_type = platform.machine()
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wondering if commenting is the best way to stow away changes, I will not block it though.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Tox seems to be complaining about unused code https://github.com/opendatahub-io/opendatahub-tests/actions/runs/15758251029/job/44418477232?pr=364
I might be wrong but if true, maybe deleting this and reverting it later is the better option, else it will complain everytime Tox runs.

# return "amd64" if os_machine_type == "x86_64" else os_machine_type


# def get_os_system() -> str:
# os_system = platform.system().lower()
# if os_system == "darwin" and platform.mac_ver()[0]:
# os_system = "mac"
# return os_system


# def get_oc_console_cli_download_link() -> str:
# oc_console_cli_download = ConsoleCLIDownload(name="oc-cli-downloads", ensure_exists=True)
# os_system = get_os_system()
# machine_platform = get_machine_platform()
# oc_links = oc_console_cli_download.instance.spec.links
# all_links = [
# link_ref.href
# for link_ref in oc_links
# if link_ref.href.endswith(("oc.tar", "oc.zip"))
# and os_system in link_ref.href
# and machine_platform in link_ref.href
# ]
# LOGGER.info(f"All oc console cli download links: {all_links}")
# if not all_links:
# raise ValueError(f"No oc console cli download link found for {os_system} {machine_platform} in {oc_links}")

# return all_links[0]


# def get_server_cert(tmpdir: LocalPath) -> str:
# data = ConfigMap(name="kube-root-ca.crt", namespace="openshift-apiserver", ensure_exists=True).instance.data[
# "ca.crt"
# ]
# file_path = os.path.join(tmpdir, "cluster-ca.cert")
# with open(file_path, "w") as fd:
# fd.write(data)
# return file_path


# def download_oc_console_cli(tmpdir: LocalPath) -> str:
# """
# Download and extract the OpenShift CLI binary.

# Args:
# tmpdir (str): Directory to download and extract the binary to

# Returns:
# str: Path to the extracted binary

# Raises:
# ValueError: If multiple files are found in the archive or if no download link is found
# """
# oc_console_cli_download_link = get_oc_console_cli_download_link()
# LOGGER.info(f"Downloading archive using: url={oc_console_cli_download_link}")
# cert_file = get_server_cert(tmpdir=tmpdir)
# local_file_name = os.path.join(tmpdir, oc_console_cli_download_link.split("/")[-1])
# with requests.get(oc_console_cli_download_link, verify=cert_file, stream=True) as created_request:
# created_request.raise_for_status()
# with open(local_file_name, "wb") as file_downloaded:
# for chunk in created_request.iter_content(chunk_size=8192):
# file_downloaded.write(chunk)
# LOGGER.info("Extract the downloaded archive.")
# extracted_filenames = []
# if oc_console_cli_download_link.endswith(".zip"):
# zip_file = zipfile.ZipFile(file=local_file_name)
# zip_file.extractall(path=tmpdir)
# extracted_filenames = zip_file.namelist()
# else:
# with tarfile.open(name=local_file_name, mode="r") as tar_file:
# tar_file.extractall(path=tmpdir)
# extracted_filenames = tar_file.getnames()
# LOGGER.info(f"Downloaded file: {extracted_filenames}")

# if len(extracted_filenames) > 1:
# raise ValueError(f"Multiple files found in {extracted_filenames}")
# # Remove the downloaded file
# if os.path.isfile(local_file_name):
# os.remove(local_file_name)
# binary_path = os.path.join(tmpdir, extracted_filenames[0])
# os.chmod(binary_path, stat.S_IRUSR | stat.S_IXUSR)
# return binary_path
Loading