Skip to content

Commit ef9ee0c

Browse files
authored
Add model mesh authentication tests (#115)
* Create size-labeler.yml * Delete .github/workflows/size-labeler.yml * model mesh - add auth tests * model mesh - add auth tests * fix code * update * use isvc as role * update runtime kwargs * use serevice for mm auth * fix svc * fix: address comments * fix: resolve reivew comments * fix: removed bug that will not be fixed
1 parent f8ddbc1 commit ef9ee0c

File tree

11 files changed

+264
-150
lines changed

11 files changed

+264
-150
lines changed

tests/model_serving/model_server/authentication/conftest.py

Lines changed: 61 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import shlex
21
from typing import Any, Generator
32
from urllib.parse import urlparse
43

@@ -13,12 +12,11 @@
1312
from ocp_resources.secret import Secret
1413
from ocp_resources.service_account import ServiceAccount
1514
from ocp_resources.serving_runtime import ServingRuntime
16-
from pyhelper_utils.shell import run_command
1715

1816
from utilities.inference_utils import create_isvc
1917
from utilities.infra import (
20-
create_isvc_view_role,
2118
create_ns,
19+
create_isvc_view_role,
2220
get_pods_by_isvc_label,
2321
s3_endpoint_secret,
2422
create_inference_token,
@@ -255,11 +253,7 @@ def grpc_role_binding(
255253

256254
@pytest.fixture(scope="class")
257255
def grpc_inference_token(grpc_model_service_account: ServiceAccount, grpc_role_binding: RoleBinding) -> str:
258-
return run_command(
259-
command=shlex.split(
260-
f"oc create token -n {grpc_model_service_account.namespace} {grpc_model_service_account.name}"
261-
)
262-
)[1].strip()
256+
return create_inference_token(model_service_account=grpc_model_service_account)
263257

264258

265259
@pytest.fixture(scope="class")
@@ -444,3 +438,62 @@ def unprivileged_s3_caikit_raw_inference_service(
444438
storage_path=ModelStoragePath.FLAN_T5_SMALL_CAIKIT,
445439
) as isvc:
446440
yield isvc
441+
442+
443+
@pytest.fixture()
444+
def patched_remove_authentication_model_mesh_runtime(
445+
admin_client: DynamicClient,
446+
http_s3_ovms_model_mesh_serving_runtime: ServingRuntime,
447+
) -> Generator[ServingRuntime, Any, Any]:
448+
with ResourceEditor(
449+
patches={
450+
http_s3_ovms_model_mesh_serving_runtime: {
451+
"metadata": {
452+
"annotations": {"enable-auth": "false"},
453+
}
454+
}
455+
}
456+
):
457+
yield http_s3_ovms_model_mesh_serving_runtime
458+
459+
460+
@pytest.fixture(scope="class")
461+
def http_model_mesh_view_role(
462+
admin_client: DynamicClient,
463+
http_s3_openvino_model_mesh_inference_service: InferenceService,
464+
http_s3_ovms_model_mesh_serving_runtime: ServingRuntime,
465+
) -> Generator[Role, Any, Any]:
466+
with Role(
467+
client=admin_client,
468+
name=f"{http_s3_openvino_model_mesh_inference_service.name}-view",
469+
namespace=http_s3_openvino_model_mesh_inference_service.namespace,
470+
rules=[
471+
{"apiGroups": [""], "resources": ["services"], "verbs": ["get"]},
472+
],
473+
) as role:
474+
yield role
475+
476+
477+
@pytest.fixture(scope="class")
478+
def http_model_mesh_role_binding(
479+
admin_client: DynamicClient,
480+
http_model_mesh_view_role: Role,
481+
ci_service_account: ServiceAccount,
482+
) -> Generator[RoleBinding, Any, Any]:
483+
with RoleBinding(
484+
client=admin_client,
485+
namespace=ci_service_account.namespace,
486+
name=f"{Protocols.HTTP}-{ci_service_account.name}-view",
487+
role_ref_name=http_model_mesh_view_role.name,
488+
role_ref_kind=http_model_mesh_view_role.kind,
489+
subjects_kind=ci_service_account.kind,
490+
subjects_name=ci_service_account.name,
491+
) as rb:
492+
yield rb
493+
494+
495+
@pytest.fixture(scope="class")
496+
def http_model_mesh_inference_token(
497+
ci_service_account: ServiceAccount, http_model_mesh_role_binding: RoleBinding
498+
) -> str:
499+
return create_inference_token(model_service_account=ci_service_account)

tests/model_serving/model_server/authentication/test_kserve_token_authentication_serverless.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
],
2121
indirect=True,
2222
)
23-
class TestKserveTokenAuthentication:
23+
class TestKserveServerlessTokenAuthentication:
2424
@pytest.mark.smoke
2525
@pytest.mark.dependency(name="test_model_authentication_using_rest")
2626
def test_model_authentication_using_rest(self, http_s3_caikit_serverless_inference_service, http_inference_token):
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
import pytest
2+
3+
from tests.model_serving.model_server.utils import verify_inference_response
4+
from utilities.constants import (
5+
ModelStoragePath,
6+
Protocols,
7+
)
8+
from utilities.inference_utils import Inference
9+
from utilities.manifests.openvino import OPENVINO_INFERENCE_CONFIG
10+
11+
pytestmark = [pytest.mark.modelmesh, pytest.mark.sanity]
12+
13+
14+
@pytest.mark.parametrize(
15+
"model_namespace, http_s3_ovms_model_mesh_serving_runtime, http_s3_openvino_model_mesh_inference_service",
16+
[
17+
pytest.param(
18+
{"name": "model-mesh-authentication", "modelmesh-enabled": True},
19+
{"enable-auth": True, "enable-external-route": True},
20+
{"model-path": ModelStoragePath.OPENVINO_EXAMPLE_MODEL},
21+
)
22+
],
23+
indirect=True,
24+
)
25+
class TestModelMeshAuthentication:
26+
"""Model Mesh Authentication is based on the created Service; cross-model authentication is not blocked"""
27+
28+
@pytest.mark.dependency(name="test_model_mesh_model_authentication_openvino_inference_with_tensorflow")
29+
def test_model_mesh_model_authentication_openvino_inference_with_tensorflow(
30+
self,
31+
http_s3_openvino_model_mesh_inference_service,
32+
http_model_mesh_inference_token,
33+
):
34+
"""Verify model query with token using REST"""
35+
verify_inference_response(
36+
inference_service=http_s3_openvino_model_mesh_inference_service,
37+
inference_config=OPENVINO_INFERENCE_CONFIG,
38+
inference_type=Inference.INFER,
39+
protocol=Protocols.HTTPS,
40+
use_default_query=True,
41+
token=http_model_mesh_inference_token,
42+
)
43+
44+
@pytest.mark.dependency(name="test_model_mesh_disabled_model_authentication")
45+
def test_model_mesh_disabled_model_authentication(
46+
self,
47+
patched_remove_authentication_model_mesh_runtime,
48+
http_s3_openvino_model_mesh_inference_service,
49+
):
50+
"""Verify model query after authentication is disabled"""
51+
verify_inference_response(
52+
inference_service=http_s3_openvino_model_mesh_inference_service,
53+
inference_config=OPENVINO_INFERENCE_CONFIG,
54+
inference_type=Inference.INFER,
55+
protocol=Protocols.HTTPS,
56+
use_default_query=True,
57+
)
58+
59+
@pytest.mark.dependency(depends=["test_model_mesh_disabled_model_authentication"])
60+
def test_model_mesh_re_enabled_model_authentication(
61+
self,
62+
http_s3_openvino_model_mesh_inference_service,
63+
http_model_mesh_inference_token,
64+
):
65+
"""Verify model query after authentication is re-enabled"""
66+
verify_inference_response(
67+
inference_service=http_s3_openvino_model_mesh_inference_service,
68+
inference_config=OPENVINO_INFERENCE_CONFIG,
69+
inference_type=Inference.INFER,
70+
protocol=Protocols.HTTPS,
71+
use_default_query=True,
72+
token=http_model_mesh_inference_token,
73+
)
74+
75+
@pytest.mark.dependency(depends=["test_model_mesh_model_authentication_openvino_inference_with_tensorflow"])
76+
def test_model_mesh_model_authentication_using_invalid_token(self, http_s3_openvino_model_mesh_inference_service):
77+
"""Verify model query with an invalid token"""
78+
verify_inference_response(
79+
inference_service=http_s3_openvino_model_mesh_inference_service,
80+
inference_config=OPENVINO_INFERENCE_CONFIG,
81+
inference_type=Inference.INFER,
82+
protocol=Protocols.HTTPS,
83+
use_default_query=True,
84+
token="dummy",
85+
authorized_user=False,
86+
)
87+
88+
@pytest.mark.dependency(depends=["test_model_mesh_model_authentication_openvino_inference_with_tensorflow"])
89+
def test_model_mesh_model_authentication_without_token(self, http_s3_openvino_model_mesh_inference_service):
90+
"""Verify model query without providing a token"""
91+
verify_inference_response(
92+
inference_service=http_s3_openvino_model_mesh_inference_service,
93+
inference_config=OPENVINO_INFERENCE_CONFIG,
94+
inference_type=Inference.INFER,
95+
protocol=Protocols.HTTPS,
96+
use_default_query=True,
97+
authorized_user=False,
98+
)

tests/model_serving/model_server/components/kserve_dsc_deployment_mode/test_kserve_dsc_default_deployment_mode.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def test_isvc_on_dsc_default_deployment_mode_change_to_serverless(
170170
],
171171
indirect=True,
172172
)
173-
def test_restarted_pod_is_serverless(
173+
def test_restarted_pod_is_raw(
174174
self,
175175
patched_default_deployment_mode_in_dsc,
176176
restarted_inference_pod,

0 commit comments

Comments
 (0)