Skip to content

Commit de11af3

Browse files
committed
feat: add raw with exteranl route to serverless interop tests
1 parent 262da0f commit de11af3

File tree

1 file changed

+12
-9
lines changed

1 file changed

+12
-9
lines changed

tests/model_serving/model_server/components/raw_deployment_serverless_co_exist/test_raw_deployment_serverless_inference_co_exist.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -45,30 +45,30 @@
4545

4646
@pytest.mark.parametrize(
4747
"model_namespace, openvino_kserve_serving_runtime, ovms_serverless_inference_service, "
48-
"serving_runtime_from_template, s3_models_inference_service",
48+
"serving_runtime_from_template, s3_models_inference_service, protocol",
4949
[
5050
pytest.param(
5151
{"name": "serverless-raw-deployment"},
5252
SERVERLESS_RUNTIME_PARAMS,
5353
SERVERLESS_ISVC_PARAMS,
5454
RAW_RUNTIME_PARAMS,
5555
RAW_ISVC_PARAMS,
56+
Protocols.HTTP,
5657
),
5758
pytest.param(
5859
{"name": "serverless-raw-ext-deployment"},
5960
SERVERLESS_RUNTIME_PARAMS,
6061
SERVERLESS_ISVC_PARAMS,
6162
RAW_RUNTIME_PARAMS,
6263
{**RAW_ISVC_PARAMS, "external-route": True},
64+
Protocols.HTTPS,
6365
),
6466
],
6567
indirect=True,
6668
)
6769
class TestServerlessRawDeploymentInferenceCoExist:
6870
def test_serverless_openvino_created_before_raw_deployment_caikit_inference(
69-
self,
70-
ovms_serverless_inference_service,
71-
s3_models_inference_service,
71+
self, ovms_serverless_inference_service, s3_models_inference_service, protocol
7272
):
7373
"""Verify that Serverless model can be queried when running with raw deployment inference service"""
7474
verify_inference_response(
@@ -83,55 +83,58 @@ def test_raw_deployment_caikit_created_after_serverless_in_namespace_rest_infere
8383
self,
8484
ovms_serverless_inference_service,
8585
s3_models_inference_service,
86+
protocol,
8687
):
8788
"""Verify that raw deployment model can be queried when running with kserve inference service"""
8889
verify_inference_response(
8990
inference_service=s3_models_inference_service,
9091
inference_config=CAIKIT_STANDALONE_INFERENCE_CONFIG,
9192
inference_type="embedding",
92-
protocol=Protocols.HTTP,
93+
protocol=protocol,
9394
model_name=ModelName.CAIKIT_BGE_LARGE_EN,
9495
use_default_query=True,
9596
)
9697

9798

9899
@pytest.mark.parametrize(
99100
"model_namespace, serving_runtime_from_template, s3_models_inference_service,"
100-
"openvino_kserve_serving_runtime, ovms_serverless_inference_service",
101+
"openvino_kserve_serving_runtime, ovms_serverless_inference_service, protocol",
101102
[
102103
pytest.param(
103104
{"name": "raw-deployment-serverless"},
104105
RAW_RUNTIME_PARAMS,
105106
RAW_ISVC_PARAMS,
106107
SERVERLESS_RUNTIME_PARAMS,
107108
SERVERLESS_ISVC_PARAMS,
109+
Protocols.HTTP,
108110
),
109111
pytest.param(
110112
{"name": "raw-etx-deployment-serverless"},
111113
RAW_RUNTIME_PARAMS,
112114
{**RAW_ISVC_PARAMS, "external-route": True},
113115
SERVERLESS_RUNTIME_PARAMS,
114116
SERVERLESS_ISVC_PARAMS,
117+
Protocols.HTTPS,
115118
),
116119
],
117120
indirect=True,
118121
)
119122
class TestRawDeploymentServerlessInferenceCoExist:
120123
def test_raw_deployment_caikit_created_before_serverless_openvino_in_namespace_rest_inference(
121-
self, s3_models_inference_service, ovms_serverless_inference_service
124+
self, s3_models_inference_service, ovms_serverless_inference_service, protocol
122125
):
123126
"""Verify that raw deployment model can be queried when running with kserve inference service"""
124127
verify_inference_response(
125128
inference_service=s3_models_inference_service,
126129
inference_config=CAIKIT_STANDALONE_INFERENCE_CONFIG,
127130
inference_type="embedding",
128-
protocol=Protocols.HTTP,
131+
protocol=protocol,
129132
model_name=ModelName.CAIKIT_BGE_LARGE_EN,
130133
use_default_query=True,
131134
)
132135

133136
def test_serverless_openvino_created_after_raw_deployment_caikit_ns_rest_inference(
134-
self, s3_models_inference_service, ovms_serverless_inference_service
137+
self, s3_models_inference_service, ovms_serverless_inference_service, protocol
135138
):
136139
"""Verify that Serverless model can be queried when running with raw deployment exists"""
137140
verify_inference_response(

0 commit comments

Comments
 (0)