|
2 | 2 |
|
3 | 3 | import pytest |
4 | 4 | from ocp_resources.route import Route |
| 5 | +from ocp_resources.secret import Secret |
5 | 6 | from ocp_resources.service import Service |
6 | 7 | from pytest import FixtureRequest |
7 | 8 | from kubernetes.dynamic import DynamicClient |
@@ -307,3 +308,133 @@ def vllm_emulator_route( |
307 | 308 | service=vllm_emulator_service.name, |
308 | 309 | ) as route: |
309 | 310 | yield route |
| 311 | + |
| 312 | + |
| 313 | +@pytest.fixture(scope="function") |
| 314 | +def lmeval_minio_pvc( |
| 315 | + admin_client: DynamicClient, model_namespace: Namespace |
| 316 | +) -> Generator[PersistentVolumeClaim, Any, Any]: |
| 317 | + with PersistentVolumeClaim( |
| 318 | + client=admin_client, |
| 319 | + name="minio-pvc", |
| 320 | + namespace=model_namespace.name, |
| 321 | + annotations={ |
| 322 | + "helm.sh/hook": "pre-install", |
| 323 | + "helm.sh/hook-weight": "0", |
| 324 | + }, |
| 325 | + accessmodes=PersistentVolumeClaim.AccessMode.RWO, |
| 326 | + size="10Gi", |
| 327 | + ) as pvc: |
| 328 | + yield pvc |
| 329 | + |
| 330 | + |
| 331 | +@pytest.fixture(scope="function") |
| 332 | +def lmeval_minio_deployment( |
| 333 | + admin_client: DynamicClient, model_namespace: Namespace, lmeval_minio_pvc: PersistentVolumeClaim |
| 334 | +) -> Generator[Deployment, Any, Any]: |
| 335 | + with Deployment( |
| 336 | + client=admin_client, |
| 337 | + name="minio", |
| 338 | + namespace=model_namespace.name, |
| 339 | + annotations={ |
| 340 | + "helm.sh/hook": "pre-install", |
| 341 | + "helm.sh/hook-weight": "0", |
| 342 | + }, |
| 343 | + replicas=1, |
| 344 | + selector={"matchLabels": {"app": "minio"}}, |
| 345 | + template={ |
| 346 | + "metadata": {"labels": {"app": "minio"}}, |
| 347 | + "spec": { |
| 348 | + "volumes": [{"name": "minio-storage", "persistentVolumeClaim": {"claimName": "minio-pvc"}}], |
| 349 | + "containers": [ |
| 350 | + { |
| 351 | + "name": "minio", |
| 352 | + "image": "quay.io/minio/minio:latest", |
| 353 | + "args": ["server", "/data", "--console-address", ":9001"], |
| 354 | + "env": [ |
| 355 | + {"name": "MINIO_ROOT_USER", "value": "minioadmin"}, |
| 356 | + {"name": "MINIO_ROOT_PASSWORD", "value": "minioadmin"}, |
| 357 | + ], |
| 358 | + "ports": [{"containerPort": 9000}, {"containerPort": 9001}], |
| 359 | + "volumeMounts": [{"name": "minio-storage", "mountPath": "/data"}], |
| 360 | + } |
| 361 | + ], |
| 362 | + }, |
| 363 | + }, |
| 364 | + label={"app": "minio"}, |
| 365 | + wait_for_resource=True, |
| 366 | + ) as deployment: |
| 367 | + deployment.wait_for_replicas(timeout=Timeout.TIMEOUT_10MIN) |
| 368 | + yield deployment |
| 369 | + |
| 370 | + |
| 371 | +@pytest.fixture(scope="function") |
| 372 | +def lmeval_minio_copy_pod( |
| 373 | + admin_client: DynamicClient, model_namespace: Namespace, lmeval_minio_deployment: Deployment, minio_service: Service |
| 374 | +) -> Generator[Pod, Any, Any]: |
| 375 | + with Pod( |
| 376 | + client=admin_client, |
| 377 | + name="copy-to-minio", |
| 378 | + namespace=model_namespace.name, |
| 379 | + restart_policy="Never", |
| 380 | + volumes=[{"name": "shared-data", "emptyDir": {}}], |
| 381 | + init_containers=[ |
| 382 | + { |
| 383 | + "name": "copy-data", |
| 384 | + "image": "quay.io/ruimvieira/lmeval-assets-flan-arceasy:latest", |
| 385 | + "command": ["/bin/sh", "-c"], |
| 386 | + "args": ["cp -r /mnt/data /shared"], |
| 387 | + "volumeMounts": [{"name": "shared-data", "mountPath": "/shared"}], |
| 388 | + } |
| 389 | + ], |
| 390 | + containers=[ |
| 391 | + { |
| 392 | + "name": "minio-uploader", |
| 393 | + "image": "quay.io/minio/mc:latest", |
| 394 | + "command": ["/bin/sh", "-c"], |
| 395 | + "args": [ |
| 396 | + "mc alias set myminio http://minio:9000 minioadmin minioadmin &&\n" |
| 397 | + "mc mb --ignore-existing myminio/models &&\n" |
| 398 | + "mc cp --recursive /shared/data/ myminio/models" |
| 399 | + ], |
| 400 | + "volumeMounts": [{"name": "shared-data", "mountPath": "/shared"}], |
| 401 | + } |
| 402 | + ], |
| 403 | + wait_for_resource=True, |
| 404 | + ) as pod: |
| 405 | + pod.wait_for_status(status=Pod.Status.SUCCEEDED) |
| 406 | + yield pod |
| 407 | + |
| 408 | + |
| 409 | +@pytest.fixture(scope="function") |
| 410 | +def lmevaljob_s3_offline( |
| 411 | + admin_client: DynamicClient, |
| 412 | + model_namespace: Namespace, |
| 413 | + lmeval_minio_deployment: Deployment, |
| 414 | + lmeval_minio_copy_pod: Pod, |
| 415 | + minio_data_connection: Secret, |
| 416 | +) -> Generator[LMEvalJob, Any, Any]: |
| 417 | + with LMEvalJob( |
| 418 | + client=admin_client, |
| 419 | + name="evaljob-sample", |
| 420 | + namespace=model_namespace.name, |
| 421 | + model="hf", |
| 422 | + model_args=[{"name": "pretrained", "value": "/opt/app-root/src/hf_home/flan"}], |
| 423 | + task_list={"taskNames": ["arc_easy"]}, |
| 424 | + log_samples=True, |
| 425 | + allow_online=False, |
| 426 | + offline={ |
| 427 | + "storage": { |
| 428 | + "s3": { |
| 429 | + "accessKeyId": {"name": minio_data_connection.name, "key": "AWS_ACCESS_KEY_ID"}, |
| 430 | + "secretAccessKey": {"name": minio_data_connection.name, "key": "AWS_SECRET_ACCESS_KEY"}, |
| 431 | + "bucket": {"name": minio_data_connection.name, "key": "AWS_S3_BUCKET"}, |
| 432 | + "endpoint": {"name": minio_data_connection.name, "key": "AWS_S3_ENDPOINT"}, |
| 433 | + "region": {"name": minio_data_connection.name, "key": "AWS_DEFAULT_REGION"}, |
| 434 | + "path": "", |
| 435 | + "verifySSL": False, |
| 436 | + } |
| 437 | + } |
| 438 | + }, |
| 439 | + ) as job: |
| 440 | + yield job |
0 commit comments