Skip to content

Commit 5f7d91e

Browse files
committed
Archive the must-gather collected
1 parent 91fc08c commit 5f7d91e

File tree

3 files changed

+35
-87
lines changed

3 files changed

+35
-87
lines changed

conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -449,6 +449,7 @@ def pytest_exception_interact(node: Item | Collector, call: CallInfo[Any], repor
449449

450450
try:
451451
collect_rhoai_must_gather(
452+
base_file_name=f"mg-{test_start_time}",
452453
since=calculate_must_gather_timer(test_start_time=test_start_time),
453454
target_dir=os.path.join(get_must_gather_collector_dir(), "pytest_exception_interact"),
454455
)

utilities/infra.py

Lines changed: 0 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,13 @@
1-
import base64
21
import json
32
import os
43
import re
54
import shlex
65
import stat
76
import tarfile
8-
import tempfile
97
import zipfile
108
from contextlib import contextmanager
119
from functools import cache
1210
from typing import Any, Generator, Optional, Set, Callable
13-
from json import JSONDecodeError
1411

1512
import kubernetes
1613
import platform
@@ -1025,15 +1022,6 @@ def get_rhods_operator_installed_csv() -> ClusterServiceVersion | None:
10251022
return None
10261023

10271024

1028-
def get_rhods_csv_version() -> Version | None:
1029-
rhoai_csv = get_rhods_operator_installed_csv()
1030-
if rhoai_csv:
1031-
LOGGER.info(f"RHOAI CSV version: {rhoai_csv.instance.spec.version}")
1032-
return Version.parse(version=rhoai_csv.instance.spec.version)
1033-
LOGGER.warning("No RHOAI CSV found. Potentially ODH cluster")
1034-
return None
1035-
1036-
10371025
@retry(
10381026
wait_timeout=120,
10391027
sleep=5,
@@ -1115,56 +1103,6 @@ def verify_cluster_sanity(
11151103
pytest.exit(reason=error_msg, returncode=return_code)
11161104

11171105

1118-
def get_openshift_pull_secret(client: DynamicClient = None) -> Secret:
1119-
openshift_config_namespace = "openshift-config"
1120-
pull_secret_name = "pull-secret" # pragma: allowlist secret
1121-
secret = Secret(
1122-
client=client or get_client(),
1123-
name=pull_secret_name,
1124-
namespace=openshift_config_namespace,
1125-
)
1126-
assert secret.exists, f"Pull-secret {pull_secret_name} not found in namespace {openshift_config_namespace}"
1127-
return secret
1128-
1129-
1130-
def generate_openshift_pull_secret_file(client: DynamicClient = None) -> str:
1131-
pull_secret = get_openshift_pull_secret(client=client)
1132-
pull_secret_path = tempfile.mkdtemp(suffix="odh-pull-secret")
1133-
json_file = os.path.join(pull_secret_path, "pull-secrets.json")
1134-
secret = base64.b64decode(pull_secret.instance.data[".dockerconfigjson"]).decode(encoding="utf-8")
1135-
with open(file=json_file, mode="w") as outfile:
1136-
outfile.write(secret)
1137-
return json_file
1138-
1139-
1140-
def get_oc_image_info(
1141-
image: str,
1142-
architecture: str,
1143-
pull_secret: str | None = None,
1144-
) -> Any:
1145-
def _get_image_json(cmd: str) -> Any:
1146-
return json.loads(run_command(command=shlex.split(cmd), check=False)[1])
1147-
1148-
base_command = f"oc image -o json info {image} --filter-by-os {architecture}"
1149-
if pull_secret:
1150-
base_command = f"{base_command} --registry-config={pull_secret}"
1151-
1152-
sample = None
1153-
try:
1154-
for sample in TimeoutSampler(
1155-
wait_timeout=10,
1156-
sleep=5,
1157-
exceptions_dict={JSONDecodeError: [], TypeError: []},
1158-
func=_get_image_json,
1159-
cmd=base_command,
1160-
):
1161-
if sample:
1162-
return sample
1163-
except TimeoutExpiredError:
1164-
LOGGER.error(f"Failed to parse {base_command}")
1165-
raise
1166-
1167-
11681106
def get_machine_platform() -> str:
11691107
os_machine_type = platform.machine()
11701108
return "amd64" if os_machine_type == "x86_64" else os_machine_type

utilities/must_gather_collector.py

Lines changed: 34 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
import os
22
import shlex
3+
import shutil
34

45
from pytest_testconfig import config as py_config
56
from pytest import Item
67
from pyhelper_utils.shell import run_command
78
from simple_logger.logger import get_logger
89
from utilities.exceptions import InvalidArgumentsError
9-
from utilities.infra import get_rhods_csv_version, get_oc_image_info, generate_openshift_pull_secret_file
10+
from utilities.infra import get_rhods_operator_installed_csv
1011

1112
BASE_DIRECTORY_NAME = "must-gather-collected"
1213
BASE_RESULTS_DIR = "/home/odh/opendatahub-tests/"
@@ -132,33 +133,32 @@ def run_must_gather(
132133
return run_command(command=shlex.split(must_gather_command), check=False)[1]
133134

134135

135-
def get_must_gather_image_info(architecture: str = "linux/amd64") -> str:
136-
try:
137-
csv_version = get_rhods_csv_version()
138-
if csv_version:
139-
must_gather_image_manifest = f"quay.io/modh/must-gather:rhoai-{csv_version.major}.{csv_version.minor}"
140-
pull_secret = generate_openshift_pull_secret_file()
141-
image_info = get_oc_image_info(
142-
image=must_gather_image_manifest, architecture=architecture, pull_secret=pull_secret
143-
)
144-
return f"quay.io/modh/must-gather@{image_info['digest']}"
145-
else:
146-
LOGGER.warning(
147-
"No RHAOI CSV found. Potentially ODH cluster and must-gather collection is not "
148-
"relevant for this cluster"
149-
)
150-
return ""
151-
except Exception as exec:
152-
raise RuntimeError(f"Failed to retrieve must-gather image info: {str(exec)}") from exec
136+
def get_must_gather_image_info() -> str:
137+
csv_object = get_rhods_operator_installed_csv()
138+
if not csv_object:
139+
return ""
140+
must_gather_image = [
141+
image["image"] for image in csv_object.instance.spec.relatedImages if "odh-must-gather" in image["image"]
142+
]
143+
if not must_gather_image:
144+
LOGGER.warning(
145+
"No RHAOI CSV found. Potentially ODH cluster and must-gather collection is not relevant for this cluster"
146+
)
147+
return ""
148+
return must_gather_image[0]
153149

154150

155151
def collect_rhoai_must_gather(
156-
target_dir: str, since: int, save_collection_output: bool = True, architecture: str = "linux/amd64"
157-
) -> str:
152+
base_file_name: str,
153+
target_dir: str,
154+
since: int,
155+
save_collection_output: bool = True,
156+
) -> None:
158157
"""
159158
Collect must-gather data for RHOAI cluster.
160159
161160
Args:
161+
base_file_name: (str): Base file name for must-gather compressed file
162162
target_dir (str): Directory to store the must-gather output
163163
since (int): Time in seconds to collect logs from
164164
save_collection_output (bool, optional): Whether to save must-gather command output. Defaults to True.
@@ -167,13 +167,22 @@ def collect_rhoai_must_gather(
167167
Returns:
168168
str: Path to the must-gather output directory, or empty string if collection is skipped
169169
"""
170-
must_gather_image = get_must_gather_image_info(architecture=architecture)
170+
must_gather_image = get_must_gather_image_info()
171171
if must_gather_image:
172172
output = run_must_gather(image_url=must_gather_image, target_dir=target_dir, since=f"{since}s")
173173
if save_collection_output:
174174
with open(os.path.join(target_dir, "output.log"), "w") as _file:
175175
_file.write(output)
176-
return get_must_gather_output_dir(must_gather_path=target_dir)
176+
# get must gather directory to archive
177+
path = get_must_gather_output_dir(must_gather_path=target_dir)
178+
# archive the folder and get the zip file's name
179+
file_name = shutil.make_archive(base_name=base_file_name, format="zip", base_dir=path)
180+
# remove the folder that was archived
181+
shutil.rmtree(path=path, ignore_errors=True)
182+
# copy back the archived file to the same path
183+
dest_file = os.path.join(target_dir, file_name)
184+
shutil.copy(src=file_name, dst=dest_file)
185+
LOGGER.info(f"{dest_file} is collected successfully")
186+
os.unlink(file_name)
177187
else:
178-
LOGGER.warning("Must-gather collection would be skipped.")
179-
return ""
188+
LOGGER.error("No must-gather image is found from the csv. Must-gather collection would be skipped.")

0 commit comments

Comments
 (0)