Skip to content

Commit 5c6271b

Browse files
authored
[model server] add raw with exteranl route to serverless interop tests (#178)
* Create size-labeler.yml * Delete .github/workflows/size-labeler.yml * model mesh - add auth tests * xx * feat: add raw external with serverless * feat: add raw with exteranl route to serverless interop tests * fix: fixture usage * fix: split to classes * fix: fix protocol
1 parent 1d02fe1 commit 5c6271b

File tree

1 file changed

+96
-8
lines changed

1 file changed

+96
-8
lines changed

tests/model_serving/model_server/components/raw_deployment_serverless_co_exist/test_raw_deployment_serverless_inference_co_exist.py

Lines changed: 96 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,52 @@
5757
],
5858
indirect=True,
5959
)
60-
class TestServerlessRawDeploymentInferenceCoExist:
61-
def test_serverless_openvino_created_before_raw_deployment_caikit_inference(
60+
class TestServerlessRawInternalDeploymentInferenceCoExist:
61+
def test_serverless_openvino_created_before_raw_internal_deployment_caikit_inference(
62+
self, ovms_serverless_inference_service, s3_models_inference_service
63+
):
64+
"""Verify that Serverless model can be queried when running with raw deployment inference service"""
65+
verify_inference_response(
66+
inference_service=ovms_serverless_inference_service,
67+
inference_config=OPENVINO_KSERVE_INFERENCE_CONFIG,
68+
inference_type=Inference.INFER,
69+
protocol=Protocols.HTTPS,
70+
use_default_query=True,
71+
)
72+
73+
def test_raw_internal_deployment_caikit_created_after_serverless_in_namespace_rest_inference(
6274
self,
6375
ovms_serverless_inference_service,
6476
s3_models_inference_service,
77+
):
78+
"""Verify that raw deployment model can be queried when running with kserve inference service"""
79+
verify_inference_response(
80+
inference_service=s3_models_inference_service,
81+
inference_config=CAIKIT_STANDALONE_INFERENCE_CONFIG,
82+
inference_type="embedding",
83+
protocol=Protocols.HTTP,
84+
model_name=ModelName.CAIKIT_BGE_LARGE_EN,
85+
use_default_query=True,
86+
)
87+
88+
89+
@pytest.mark.parametrize(
90+
"model_namespace, openvino_kserve_serving_runtime, ovms_serverless_inference_service, "
91+
"serving_runtime_from_template, s3_models_inference_service",
92+
[
93+
pytest.param(
94+
{"name": "serverless-raw-ext-deployment"},
95+
SERVERLESS_RUNTIME_PARAMS,
96+
SERVERLESS_ISVC_PARAMS,
97+
RAW_RUNTIME_PARAMS,
98+
{**RAW_ISVC_PARAMS, "external-route": True},
99+
),
100+
],
101+
indirect=True,
102+
)
103+
class TestServerlessRawExternalDeploymentInferenceCoExist:
104+
def test_serverless_openvino_created_before_raw_external_deployment_caikit_inference(
105+
self, ovms_serverless_inference_service, s3_models_inference_service
65106
):
66107
"""Verify that Serverless model can be queried when running with raw deployment inference service"""
67108
verify_inference_response(
@@ -72,7 +113,7 @@ def test_serverless_openvino_created_before_raw_deployment_caikit_inference(
72113
use_default_query=True,
73114
)
74115

75-
def test_raw_deployment_caikit_created_after_serverless_in_namespace_rest_inference(
116+
def test_raw_external_deployment_caikit_created_after_serverless_in_namespace_rest_inference(
76117
self,
77118
ovms_serverless_inference_service,
78119
s3_models_inference_service,
@@ -82,7 +123,7 @@ def test_raw_deployment_caikit_created_after_serverless_in_namespace_rest_infere
82123
inference_service=s3_models_inference_service,
83124
inference_config=CAIKIT_STANDALONE_INFERENCE_CONFIG,
84125
inference_type="embedding",
85-
protocol=Protocols.HTTP,
126+
protocol=Protocols.HTTPS,
86127
model_name=ModelName.CAIKIT_BGE_LARGE_EN,
87128
use_default_query=True,
88129
)
@@ -102,9 +143,11 @@ def test_raw_deployment_caikit_created_after_serverless_in_namespace_rest_infere
102143
],
103144
indirect=True,
104145
)
105-
class TestRawDeploymentServerlessInferenceCoExist:
106-
def test_raw_deployment_caikit_created_before_serverless_openvino_in_namespace_rest_inference(
107-
self, s3_models_inference_service, ovms_serverless_inference_service
146+
class TestRawInternalDeploymentServerlessInferenceCoExist:
147+
def test_raw_internal_deployment_caikit_created_before_serverless_openvino_in_namespace_rest_inference(
148+
self,
149+
s3_models_inference_service,
150+
ovms_serverless_inference_service,
108151
):
109152
"""Verify that raw deployment model can be queried when running with kserve inference service"""
110153
verify_inference_response(
@@ -116,7 +159,52 @@ def test_raw_deployment_caikit_created_before_serverless_openvino_in_namespace_r
116159
use_default_query=True,
117160
)
118161

119-
def test_serverless_openvino_created_after_raw_deployment_caikit_ns_rest_inference(
162+
def test_serverless_openvino_created_after_raw_internal_deployment_caikit_ns_rest_inference(
163+
self,
164+
s3_models_inference_service,
165+
ovms_serverless_inference_service,
166+
):
167+
"""Verify that Serverless model can be queried when running with raw deployment exists"""
168+
verify_inference_response(
169+
inference_service=ovms_serverless_inference_service,
170+
inference_config=OPENVINO_KSERVE_INFERENCE_CONFIG,
171+
inference_type=Inference.INFER,
172+
protocol=Protocols.HTTPS,
173+
use_default_query=True,
174+
)
175+
176+
177+
@pytest.mark.parametrize(
178+
"model_namespace, serving_runtime_from_template, s3_models_inference_service,"
179+
"openvino_kserve_serving_runtime, ovms_serverless_inference_service",
180+
[
181+
pytest.param(
182+
{"name": "raw-etx-deployment-serverless"},
183+
RAW_RUNTIME_PARAMS,
184+
{**RAW_ISVC_PARAMS, "external-route": True},
185+
SERVERLESS_RUNTIME_PARAMS,
186+
SERVERLESS_ISVC_PARAMS,
187+
),
188+
],
189+
indirect=True,
190+
)
191+
class TestRawExternalDeploymentServerlessInferenceCoExist:
192+
def test_raw_external_deployment_caikit_created_before_serverless_openvino_in_namespace_rest_inference(
193+
self,
194+
s3_models_inference_service,
195+
ovms_serverless_inference_service,
196+
):
197+
"""Verify that raw deployment model can be queried when running with kserve inference service"""
198+
verify_inference_response(
199+
inference_service=s3_models_inference_service,
200+
inference_config=CAIKIT_STANDALONE_INFERENCE_CONFIG,
201+
inference_type="embedding",
202+
protocol=Protocols.HTTPS,
203+
model_name=ModelName.CAIKIT_BGE_LARGE_EN,
204+
use_default_query=True,
205+
)
206+
207+
def test_serverless_openvino_created_after_raw_external_deployment_caikit_ns_rest_inference(
120208
self, s3_models_inference_service, ovms_serverless_inference_service
121209
):
122210
"""Verify that Serverless model can be queried when running with raw deployment exists"""

0 commit comments

Comments
 (0)