Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ minimum_pre_commit_version: 3.3.0
default_install_hook_types: [pre-commit, commit-msg]

default_language_version:
python: python3.13
python: python3.14

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand Down
8 changes: 4 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
FROM fedora:42
FROM fedora:43

ARG USER=odh
ARG HOME=/home/$USER
ARG TESTS_DIR=$HOME/opendatahub-tests/
ENV UV_PYTHON=python3.13
ENV UV_PYTHON=python3.14
ENV UV_COMPILE_BYTECODE=1
ENV UV_NO_SYNC=1
ENV UV_NO_CACHE=1

ENV BIN_DIR="$HOME_DIR/.local/bin"
ENV PATH="$PATH:$BIN_DIR"

# Install Python 3.13 and other dependencies using dnf
# Install system dependencies using dnf
RUN dnf update -y \
&& dnf install -y python3.13 python3.13-pip ssh gnupg curl gpg wget vim httpd-tools rsync openssl openssl-devel\
&& dnf install -y python3 python3-pip ssh gnupg curl gpg wget vim httpd-tools rsync openssl openssl-devel\
&& dnf clean all \
Comment thread
dbasunag marked this conversation as resolved.
&& rm -rf /var/cache/dnf

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ dev = [
]

[project]
requires-python = "==3.13.*"
requires-python = "==3.14.*"
name = "opendatahub-tests"
version = "0.1.0"
description = "Tests repository for Open Data Hub (ODH)"
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -835,7 +835,7 @@ def gpu_count_on_cluster(nodes: list[Any]) -> int:
if key in allowed_exact or any(key.startswith(p) for p in allowed_prefixes):
try:
total_gpus += int(val)
except (ValueError, TypeError):
except ValueError, TypeError:
LOGGER.debug(f"Skipping non-integer allocatable for {key} on {node.name}: {val!r}")
continue
return total_gpus
Expand Down
2 changes: 1 addition & 1 deletion tests/model_registry/model_catalog/search/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ def _validate_single_criterion(
else:
LOGGER.warning(f"Unknown key_type: {key_type}")
return False, f"{key_name}: unknown type {key_type}"
except (ValueError, TypeError):
except ValueError, TypeError:
return False, f"{key_name}: conversion error"

# Perform comparison based on type
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,13 +128,13 @@ def dog_breed_inference_graph(

try:
name = request.param["name"]
except (AttributeError, KeyError):
except AttributeError, KeyError:
name = "dog-breed-pipeline"

try:
if not request.param["external-route"]:
labels[networking_label] = "cluster-local"
except (AttributeError, KeyError):
except AttributeError, KeyError:
pass

with InferenceGraph(
Expand Down Expand Up @@ -256,7 +256,7 @@ def bare_service_account(
try:
if request.param["name"]:
name = request.param["name"]
except (AttributeError, KeyError):
except AttributeError, KeyError:
name = "sa-" + token_hex(4)

with ServiceAccount(
Expand Down
4 changes: 2 additions & 2 deletions tests/model_serving/model_server/maas_billing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def mint_token(
)
try:
body = resp.json()
except (JSONDecodeError, ValueError):
except JSONDecodeError, ValueError:
body = {}
return resp, body

Expand Down Expand Up @@ -400,7 +400,7 @@ def get_total_tokens(resp: Response, *, fail_if_missing: bool = False) -> int |
if header_val is not None:
try:
return int(header_val)
except (TypeError, ValueError):
except TypeError, ValueError:
if fail_if_missing:
raise AssertionError(
f"Token usage header is not parseable as int; headers={dict(resp.headers)} body={resp.text[:500]}"
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ skipsdist = True

#Unused code
[testenv:unused-code]
basepython = python3
basepython = python3.14
recreate=True
setenv =
PYTHONPATH = {toxinidir}
Expand Down
2 changes: 1 addition & 1 deletion utilities/infra.py
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,7 @@ def wait_for_serverless_pods_deletion(resource: Project | Namespace, admin_clien
LOGGER.info(f"Waiting for {KServeDeploymentType.SERVERLESS} pod {pod.name} to be deleted")
pod.wait_deleted(timeout=Timeout.TIMEOUT_1MIN)

except (ResourceNotFoundError, NotFoundError):
except ResourceNotFoundError, NotFoundError:
LOGGER.info(f"Pod {pod.name} is deleted")


Expand Down
2 changes: 1 addition & 1 deletion utilities/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class RedactedString(str):
Used to redact the representation of a sensitive string.
"""

def __new__(cls, *, value: object) -> "RedactedString": # noqa: PYI034
def __new__(cls, *, value: object) -> RedactedString: # noqa: PYI034
return super().__new__(cls, value)

def __repr__(self) -> str:
Expand Down
4 changes: 2 additions & 2 deletions utilities/plugins/openai_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def streaming_request_http(
message = json.loads(data)
token = self._parse_streaming_response(endpoint, message)
tokens.append(token)
except (requests.exceptions.RequestException, json.JSONDecodeError):
except requests.exceptions.RequestException, json.JSONDecodeError:
LOGGER.error("Streaming request error")
raise
return "".join(tokens)
Expand Down Expand Up @@ -137,7 +137,7 @@ def get_request_http(host: str, endpoint: str) -> Any:
if data:
data = OpenAIClient._remove_keys(data, keys_to_remove)
return data # noqa: TRY300
except (requests.exceptions.RequestException, json.JSONDecodeError):
except requests.exceptions.RequestException, json.JSONDecodeError:
LOGGER.exception("Request error")

@retry(stop=stop_after_attempt(MAX_RETRIES), wait=wait_exponential(min=1, max=6))
Expand Down
Loading