Skip to content

Commit 58cc592

Browse files
israel-hdezpre-commit-ci[bot]Jooho
authored
Add more coverage for InferenceGraph
* Add more coverage for InferenceGraph * Add more tests for Serverless InferenceGraph to cover for private cases and auth * Add test cases for Raw InferenceGraph covering basic deployment, auth, private cases, and combinations. Additionally, this moves to using the unprivileged client for IG-related fixtures. --- ## Summary by CodeRabbit * New Features * Added comprehensive tests covering multiple deployment and authentication scenarios for inference graphs, including raw and serverless modes. * Introduced utilities for creating and managing service accounts and role-based access for inference graph viewing. * Added new helper functions for handling inference graph pod selection and Kubernetes role creation. * Enhanced inference utilities to support InferenceGraph objects with improved exposure detection and inference execution. * Bug Fixes * Improved handling of authentication and authorization checks in inference response verification, especially for raw deployment scenarios. * Tests * Expanded test coverage for inference graphs to validate access control, deployment modes, and service exposure. * Added fixtures to support flexible, parameterized test setups for various user privilege levels. * Chores * Updated fixtures to use unprivileged clients and namespaces, enhancing security and test reliability. * Refined configuration for ONNX inference endpoint to improve deployment compatibility. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Jooho Lee <ljhiyh@gmail.com>
1 parent 67a427c commit 58cc592

File tree

9 files changed

+483
-44
lines changed

9 files changed

+483
-44
lines changed
Lines changed: 111 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from secrets import token_hex
12
from typing import Generator, Any
23

34
import pytest
@@ -6,17 +7,21 @@
67
from ocp_resources.inference_graph import InferenceGraph
78
from ocp_resources.inference_service import InferenceService
89
from ocp_resources.namespace import Namespace
10+
from ocp_resources.role_binding import RoleBinding
911
from ocp_resources.secret import Secret
12+
from ocp_resources.service_account import ServiceAccount
1013
from ocp_resources.serving_runtime import ServingRuntime
1114

12-
from utilities.constants import ModelFormat, KServeDeploymentType, ModelStoragePath
15+
from utilities.constants import ModelFormat, KServeDeploymentType, ModelStoragePath, Annotations, Labels
1316
from utilities.inference_utils import create_isvc
17+
from utilities.infra import create_inference_token, create_inference_graph_view_role
1418

1519

1620
@pytest.fixture
1721
def dog_breed_inference_graph(
18-
admin_client: DynamicClient,
19-
model_namespace: Namespace,
22+
request: FixtureRequest,
23+
unprivileged_client: DynamicClient,
24+
unprivileged_model_namespace: Namespace,
2025
dog_cat_inference_service: InferenceService,
2126
dog_breed_inference_service: InferenceService,
2227
) -> Generator[InferenceGraph, Any, Any]:
@@ -34,28 +39,58 @@ def dog_breed_inference_graph(
3439
],
3540
}
3641
}
42+
43+
annotations = {}
44+
labels = {}
45+
networking_label = Labels.Kserve.NETWORKING_KNATIVE_IO
46+
try:
47+
if request.param.get("deployment-mode"):
48+
annotations[Annotations.KserveIo.DEPLOYMENT_MODE] = request.param["deployment-mode"]
49+
if request.param["deployment-mode"] == KServeDeploymentType.RAW_DEPLOYMENT:
50+
networking_label = Labels.Kserve.NETWORKING_KSERVE_IO
51+
except AttributeError:
52+
pass
53+
54+
try:
55+
if request.param.get("enable-auth"):
56+
annotations[Annotations.KserveAuth.SECURITY] = "true"
57+
except AttributeError:
58+
pass
59+
60+
try:
61+
name = request.param["name"]
62+
except (AttributeError, KeyError):
63+
name = "dog-breed-pipeline"
64+
65+
try:
66+
if not request.param["external-route"]:
67+
labels[networking_label] = "cluster-local"
68+
except (AttributeError, KeyError):
69+
pass
70+
3771
with InferenceGraph(
38-
client=admin_client,
39-
name="dog-breed-pipeline",
40-
namespace=model_namespace.name,
72+
client=unprivileged_client,
73+
name=name,
74+
namespace=unprivileged_model_namespace.name,
4175
nodes=nodes,
76+
annotations=annotations,
77+
label=labels,
4278
) as inference_graph:
4379
inference_graph.wait_for_condition(condition=inference_graph.Condition.READY, status="True")
4480
yield inference_graph
4581

4682

47-
@pytest.fixture
83+
@pytest.fixture(scope="class")
4884
def dog_cat_inference_service(
49-
request: FixtureRequest,
50-
admin_client: DynamicClient,
51-
model_namespace: Namespace,
85+
unprivileged_client: DynamicClient,
86+
unprivileged_model_namespace: Namespace,
5287
ovms_kserve_serving_runtime: ServingRuntime,
5388
models_endpoint_s3_secret: Secret,
5489
) -> Generator[InferenceService, Any, Any]:
5590
with create_isvc(
56-
client=admin_client,
91+
client=unprivileged_client,
5792
name="dog-cat-classifier",
58-
namespace=model_namespace.name,
93+
namespace=unprivileged_model_namespace.name,
5994
runtime=ovms_kserve_serving_runtime.name,
6095
storage_key=models_endpoint_s3_secret.name,
6196
storage_path=ModelStoragePath.CAT_DOG_ONNX,
@@ -66,18 +101,17 @@ def dog_cat_inference_service(
66101
yield isvc
67102

68103

69-
@pytest.fixture
104+
@pytest.fixture(scope="class")
70105
def dog_breed_inference_service(
71-
request: FixtureRequest,
72-
admin_client: DynamicClient,
73-
model_namespace: Namespace,
106+
unprivileged_client: DynamicClient,
107+
unprivileged_model_namespace: Namespace,
74108
ovms_kserve_serving_runtime: ServingRuntime,
75109
models_endpoint_s3_secret: Secret,
76110
) -> Generator[InferenceService, Any, Any]:
77111
with create_isvc(
78-
client=admin_client,
112+
client=unprivileged_client,
79113
name="dog-breed-classifier",
80-
namespace=model_namespace.name,
114+
namespace=unprivileged_model_namespace.name,
81115
runtime=ovms_kserve_serving_runtime.name,
82116
storage_key=models_endpoint_s3_secret.name,
83117
storage_path=ModelStoragePath.DOG_BREED_ONNX,
@@ -86,3 +120,62 @@ def dog_breed_inference_service(
86120
protocol_version="v2",
87121
) as isvc:
88122
yield isvc
123+
124+
125+
@pytest.fixture
126+
def inference_graph_unprivileged_sa_token(
127+
bare_service_account: ServiceAccount,
128+
) -> str:
129+
return create_inference_token(model_service_account=bare_service_account)
130+
131+
132+
@pytest.fixture
133+
def inference_graph_sa_token_with_access(
134+
service_account_with_access: ServiceAccount,
135+
) -> str:
136+
return create_inference_token(model_service_account=service_account_with_access)
137+
138+
139+
@pytest.fixture
140+
def service_account_with_access(
141+
unprivileged_client: DynamicClient,
142+
unprivileged_model_namespace: Namespace,
143+
dog_breed_inference_graph: InferenceGraph,
144+
bare_service_account: ServiceAccount,
145+
) -> Generator[ServiceAccount, Any, Any]:
146+
with create_inference_graph_view_role(
147+
client=unprivileged_client,
148+
name=f"{dog_breed_inference_graph.name}-view",
149+
namespace=unprivileged_model_namespace.name,
150+
resource_names=[dog_breed_inference_graph.name],
151+
) as role:
152+
with RoleBinding(
153+
client=unprivileged_client,
154+
namespace=unprivileged_model_namespace.name,
155+
name=f"{bare_service_account.name}-view",
156+
role_ref_name=role.name,
157+
role_ref_kind=role.kind,
158+
subjects_kind=bare_service_account.kind,
159+
subjects_name=bare_service_account.name,
160+
):
161+
yield bare_service_account
162+
163+
164+
@pytest.fixture
165+
def bare_service_account(
166+
request: FixtureRequest,
167+
unprivileged_client: DynamicClient,
168+
unprivileged_model_namespace: Namespace,
169+
) -> Generator[ServiceAccount, Any, Any]:
170+
try:
171+
if request.param["name"]:
172+
name = request.param["name"]
173+
except (AttributeError, KeyError):
174+
name = "sa-" + token_hex(4)
175+
176+
with ServiceAccount(
177+
client=unprivileged_client,
178+
namespace=unprivileged_model_namespace.name,
179+
name=name,
180+
) as sa:
181+
yield sa

tests/model_serving/model_server/inference_graph/test_inference_graph_deployment.py

Lines changed: 68 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,83 @@
66
from utilities.manifests.onnx import ONNX_INFERENCE_CONFIG
77

88

9+
@pytest.mark.serverless
10+
@pytest.mark.sanity
911
@pytest.mark.parametrize(
10-
"model_namespace,ovms_kserve_serving_runtime",
11-
[pytest.param({"name": "kserve-inference-graph-deploy"}, {"runtime-name": ModelInferenceRuntime.ONNX_RUNTIME})],
12+
"unprivileged_model_namespace,ovms_kserve_serving_runtime",
13+
[
14+
pytest.param(
15+
{"name": "kserve-inference-graph-deploy"},
16+
{"runtime-name": ModelInferenceRuntime.ONNX_RUNTIME},
17+
)
18+
],
1219
indirect=True,
1320
)
1421
class TestInferenceGraphDeployment:
15-
def test_inference_graph_deployment(self, dog_breed_inference_graph):
22+
@pytest.mark.parametrize(
23+
"dog_breed_inference_graph",
24+
[{"name": "dog-breed-serverless-pipeline"}],
25+
indirect=True,
26+
)
27+
def test_inference_graph_serverless_deployment(self, dog_breed_inference_graph):
28+
verify_inference_response(
29+
inference_service=dog_breed_inference_graph,
30+
inference_config=ONNX_INFERENCE_CONFIG,
31+
inference_type=Inference.GRAPH,
32+
model_name="dog-breed-classifier",
33+
protocol=Protocols.HTTPS,
34+
use_default_query=True,
35+
)
36+
37+
@pytest.mark.parametrize(
38+
"dog_breed_inference_graph",
39+
[{"name": "dog-breed-private-serverless-ig", "external-route": False}],
40+
indirect=True,
41+
)
42+
def test_private_inference_graph_serverless_deployment(self, dog_breed_inference_graph):
43+
verify_inference_response(
44+
inference_service=dog_breed_inference_graph,
45+
inference_config=ONNX_INFERENCE_CONFIG,
46+
inference_type=Inference.GRAPH,
47+
model_name="dog-breed-classifier",
48+
protocol=Protocols.HTTP,
49+
use_default_query=True,
50+
)
51+
52+
@pytest.mark.smoke
53+
@pytest.mark.parametrize(
54+
"dog_breed_inference_graph",
55+
[{"name": "dog-breed-auth-serverless-ig", "enable-auth": True}],
56+
indirect=True,
57+
)
58+
def test_inference_graph_serverless_authentication(
59+
self, dog_breed_inference_graph, inference_graph_sa_token_with_access
60+
):
61+
verify_inference_response(
62+
inference_service=dog_breed_inference_graph,
63+
inference_config=ONNX_INFERENCE_CONFIG,
64+
inference_type=Inference.GRAPH,
65+
model_name="dog-breed-classifier",
66+
protocol=Protocols.HTTPS,
67+
use_default_query=True,
68+
token=inference_graph_sa_token_with_access,
69+
)
70+
71+
@pytest.mark.parametrize(
72+
"dog_breed_inference_graph",
73+
[{"name": "dog-breed-bad-auth-serverless-ig", "enable-auth": True}],
74+
indirect=True,
75+
)
76+
def test_inference_graph_serverless_authentication_without_privileges(
77+
self, dog_breed_inference_graph, inference_graph_unprivileged_sa_token
78+
):
1679
verify_inference_response(
1780
inference_service=dog_breed_inference_graph,
1881
inference_config=ONNX_INFERENCE_CONFIG,
1982
inference_type=Inference.GRAPH,
2083
model_name="dog-breed-classifier",
2184
protocol=Protocols.HTTPS,
2285
use_default_query=True,
86+
token=inference_graph_unprivileged_sa_token,
87+
authorized_user=False,
2388
)

0 commit comments

Comments
 (0)