Skip to content

Commit 41a3ab1

Browse files
authored
Fix authentication tests for InferenceService (#1175)
* update authentication tests * skip test with bug * precommit fixes
1 parent 7ac82f6 commit 41a3ab1

File tree

3 files changed

+21
-38
lines changed

3 files changed

+21
-38
lines changed

tests/model_serving/model_server/kserve/authentication/conftest.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from ocp_resources.secret import Secret
1414
from ocp_resources.service_account import ServiceAccount
1515
from ocp_resources.serving_runtime import ServingRuntime
16+
from simple_logger.logger import get_logger
1617

1718
from utilities.constants import (
1819
Annotations,
@@ -32,6 +33,8 @@
3233
from utilities.logger import RedactedString
3334
from utilities.serving_runtime import ServingRuntimeFromTemplate
3435

36+
LOGGER = get_logger(name=__name__)
37+
3538

3639
# HTTP/REST model serving
3740
@pytest.fixture(scope="class")
@@ -92,7 +95,8 @@ def patched_remove_raw_authentication_isvc(
9295
}
9396
}
9497
):
95-
if is_jira_open(jira_id="RHOAIENG-19275", admin_client=admin_client):
98+
if is_jira_open(jira_id="RHOAIENG-52129", admin_client=admin_client):
99+
LOGGER.info("RHOAIENG-52129 is open; waiting for predictor pod rollout after auth toggle")
96100
predictor_pod.wait_deleted()
97101

98102
yield http_s3_ovms_raw_inference_service

tests/model_serving/model_server/kserve/authentication/test_kserve_token_authentication_raw.py

Lines changed: 12 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,8 @@
33

44
from tests.model_serving.model_server.utils import verify_inference_response
55
from utilities.constants import Annotations, Protocols
6-
from utilities.inference_utils import Inference, UserInference
6+
from utilities.inference_utils import Inference
77
from utilities.infra import check_pod_status_in_time, get_pods_by_isvc_label
8-
from utilities.jira import is_jira_open
98
from utilities.manifests.onnx import ONNX_INFERENCE_CONFIG
109

1110
pytestmark = pytest.mark.usefixtures("valid_aws_config")
@@ -50,7 +49,7 @@ def test_disabled_raw_model_authentication(self, patched_remove_raw_authenticati
5049
)
5150

5251
@pytest.mark.sanity
53-
@pytest.mark.jira("RHOAIENG-19275", run=False)
52+
@pytest.mark.jira("RHOAIENG-52129", run=False)
5453
def test_raw_disable_enable_authentication_no_pod_rollout(self, http_s3_ovms_raw_inference_service):
5554
"""Verify no pod rollout when disabling and enabling authentication"""
5655
pod = get_pods_by_isvc_label(
@@ -100,38 +99,14 @@ def test_re_enabled_raw_model_authentication(self, http_s3_ovms_raw_inference_se
10099
indirect=True,
101100
)
102101
@pytest.mark.dependency(name="test_cross_model_authentication_raw")
103-
def test_cross_model_authentication_raw(
104-
self, http_s3_ovms_raw_inference_service_2, http_raw_inference_token, admin_client
105-
):
102+
def test_cross_model_authentication_raw(self, http_s3_ovms_raw_inference_service_2, http_raw_inference_token):
106103
"""Verify model with another model token"""
107-
if is_jira_open(jira_id="RHOAIENG-19645", admin_client=admin_client):
108-
inference = UserInference(
109-
inference_service=http_s3_ovms_raw_inference_service_2,
110-
inference_config=ONNX_INFERENCE_CONFIG,
111-
inference_type=Inference.INFER,
112-
protocol=Protocols.HTTPS,
113-
)
114-
115-
res = inference.run_inference_flow(
116-
model_name=http_s3_ovms_raw_inference_service_2.name,
117-
use_default_query=True,
118-
token=http_raw_inference_token,
119-
insecure=False,
120-
)
121-
output = res.get("output", res)
122-
if isinstance(output, dict):
123-
output = str(output)
124-
status_line = output.splitlines()[0]
125-
# Updated: Now expecting 403 Forbidden for cross-model authentication
126-
# (token from service 1 cannot access service 2)
127-
assert "403 Forbidden" in status_line, f"Expected '403 Forbidden' in status line, got: {status_line}"
128-
else:
129-
verify_inference_response(
130-
inference_service=http_s3_ovms_raw_inference_service_2,
131-
inference_config=ONNX_INFERENCE_CONFIG,
132-
inference_type=Inference.INFER,
133-
protocol=Protocols.HTTPS,
134-
use_default_query=True,
135-
token=http_raw_inference_token,
136-
authorized_user=False,
137-
)
104+
verify_inference_response(
105+
inference_service=http_s3_ovms_raw_inference_service_2,
106+
inference_config=ONNX_INFERENCE_CONFIG,
107+
inference_type=Inference.INFER,
108+
protocol=Protocols.HTTPS,
109+
use_default_query=True,
110+
token=http_raw_inference_token,
111+
authorized_user=False,
112+
)

tests/model_serving/model_server/utils.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,10 @@ def verify_inference_response(
9292
):
9393
assert "x-forbidden-reason: Access to the InferenceGraph is not allowed" in res["output"]
9494

95+
elif "403 Forbidden" in res["output"]:
96+
resource = f"{inference_service.kind.lower()}s"
97+
assert re.search(rf"Forbidden \(user=.*verb=get.*resource={resource}", res["output"])
98+
9599
else:
96100
raise ValueError(f"Auth header {auth_header} not found in response. Response: {res['output']}")
97101

0 commit comments

Comments
 (0)