diff --git a/tests/model_registry/conftest.py b/tests/model_registry/conftest.py index 05c07e6d4..bf6b3bca8 100644 --- a/tests/model_registry/conftest.py +++ b/tests/model_registry/conftest.py @@ -62,124 +62,173 @@ def model_registry_namespace(updated_dsc_component_state_scope_class: DataScienc @pytest.fixture(scope="class") def model_registry_db_service( - admin_client: DynamicClient, model_registry_namespace: str, is_model_registry_oauth: bool + pytestconfig: Config, + admin_client: DynamicClient, + model_registry_namespace: str, + teardown_resources: bool, + is_model_registry_oauth: bool, ) -> Generator[Service, Any, Any]: - with Service( - client=admin_client, - name=DB_RESOURCES_NAME, - namespace=model_registry_namespace, - ports=[ - { - "name": "mysql", - "nodePort": 0, - "port": 3306, - "protocol": "TCP", - "appProtocol": "tcp", - "targetPort": 3306, - } - ], - selector={ - "name": DB_RESOURCES_NAME, - }, - label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), - annotations={ - "template.openshift.io/expose-uri": r"mysql://{.spec.clusterIP}:{.spec.ports[?(.name==\mysql\)].port}", - }, - ) as mr_db_service: + if pytestconfig.option.post_upgrade: + mr_db_service = Service(name=DB_RESOURCES_NAME, namespace=model_registry_namespace, ensure_exists=True) yield mr_db_service + mr_db_service.delete(wait=True) + else: + with Service( + client=admin_client, + name=DB_RESOURCES_NAME, + namespace=model_registry_namespace, + ports=[ + { + "name": "mysql", + "nodePort": 0, + "port": 3306, + "protocol": "TCP", + "appProtocol": "tcp", + "targetPort": 3306, + } + ], + selector={ + "name": DB_RESOURCES_NAME, + }, + label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), + annotations={ + "template.openshift.io/expose-uri": r"mysql://{.spec.clusterIP}:{.spec.ports[?(.name==\mysql\)].port}", + }, + teardown=teardown_resources, + ) as mr_db_service: + yield mr_db_service @pytest.fixture(scope="class") def model_registry_db_pvc( - admin_client: DynamicClient, model_registry_namespace: str, is_model_registry_oauth: bool + pytestconfig: Config, + admin_client: DynamicClient, + model_registry_namespace: str, + teardown_resources: bool, + is_model_registry_oauth: bool, ) -> Generator[PersistentVolumeClaim, Any, Any]: - with PersistentVolumeClaim( - accessmodes="ReadWriteOnce", - name=DB_RESOURCES_NAME, - namespace=model_registry_namespace, - client=admin_client, - size="5Gi", - label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), - ) as pvc: - yield pvc + if pytestconfig.option.post_upgrade: + mr_db_pvc = PersistentVolumeClaim( + name=DB_RESOURCES_NAME, namespace=model_registry_namespace, ensure_exists=True + ) + yield mr_db_pvc + mr_db_pvc.delete(wait=True) + else: + with PersistentVolumeClaim( + accessmodes="ReadWriteOnce", + name=DB_RESOURCES_NAME, + namespace=model_registry_namespace, + client=admin_client, + size="5Gi", + label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), + teardown=teardown_resources, + ) as pvc: + yield pvc @pytest.fixture(scope="class") def model_registry_db_secret( + pytestconfig: Config, admin_client: DynamicClient, model_registry_namespace: str, + teardown_resources: bool, is_model_registry_oauth: bool, ) -> Generator[Secret, Any, Any]: - with Secret( - client=admin_client, - name=DB_RESOURCES_NAME, - namespace=model_registry_namespace, - string_data=MODEL_REGISTRY_DB_SECRET_STR_DATA, - label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), - annotations=MODEL_REGISTRY_DB_SECRET_ANNOTATIONS, - ) as mr_db_secret: + if pytestconfig.option.post_upgrade: + mr_db_secret = Secret(name=DB_RESOURCES_NAME, namespace=model_registry_namespace, ensure_exists=True) yield mr_db_secret + mr_db_secret.delete(wait=True) + else: + with Secret( + client=admin_client, + name=DB_RESOURCES_NAME, + namespace=model_registry_namespace, + string_data=MODEL_REGISTRY_DB_SECRET_STR_DATA, + label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), + annotations=MODEL_REGISTRY_DB_SECRET_ANNOTATIONS, + teardown=teardown_resources, + ) as mr_db_secret: + yield mr_db_secret @pytest.fixture(scope="class") def model_registry_db_deployment( + pytestconfig: Config, admin_client: DynamicClient, model_registry_namespace: str, model_registry_db_secret: Secret, model_registry_db_pvc: PersistentVolumeClaim, model_registry_db_service: Service, + teardown_resources: bool, is_model_registry_oauth: bool, ) -> Generator[Deployment, Any, Any]: - with Deployment( - name=DB_RESOURCES_NAME, - namespace=model_registry_namespace, - annotations={ - "template.alpha.openshift.io/wait-for-ready": "true", - }, - label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), - replicas=1, - revision_history_limit=0, - selector={"matchLabels": {"name": DB_RESOURCES_NAME}}, - strategy={"type": "Recreate"}, - template=get_model_registry_deployment_template_dict( - secret_name=model_registry_db_secret.name, resource_name=DB_RESOURCES_NAME - ), - wait_for_resource=True, - ) as mr_db_deployment: - mr_db_deployment.wait_for_replicas(deployed=True) - yield mr_db_deployment + if pytestconfig.option.post_upgrade: + db_deployment = Deployment(name=DB_RESOURCES_NAME, namespace=model_registry_namespace, ensure_exists=True) + yield db_deployment + db_deployment.delete(wait=True) + else: + with Deployment( + name=DB_RESOURCES_NAME, + namespace=model_registry_namespace, + annotations={ + "template.alpha.openshift.io/wait-for-ready": "true", + }, + label=get_model_registry_db_label_dict(db_resource_name=DB_RESOURCES_NAME), + replicas=1, + revision_history_limit=0, + selector={"matchLabels": {"name": DB_RESOURCES_NAME}}, + strategy={"type": "Recreate"}, + template=get_model_registry_deployment_template_dict( + secret_name=model_registry_db_secret.name, resource_name=DB_RESOURCES_NAME + ), + wait_for_resource=True, + teardown=teardown_resources, + ) as mr_db_deployment: + mr_db_deployment.wait_for_replicas(deployed=True) + yield mr_db_deployment @pytest.fixture(scope="class") def model_registry_instance( - model_registry_namespace: str, model_registry_mysql_config: dict[str, Any], is_model_registry_oauth: bool + pytestconfig: Config, + admin_client: DynamicClient, + model_registry_namespace: str, + model_registry_mysql_config: dict[str, Any], + teardown_resources: bool, + is_model_registry_oauth: bool, ) -> Generator[ModelRegistry, Any, Any]: - istio_config = None - oauth_config = None - mr_class_name = ModelRegistry - if is_model_registry_oauth: - LOGGER.warning("Requested Ouath Proxy configuration:") - oauth_config = OAUTH_PROXY_CONFIG_DICT - else: - LOGGER.warning("Requested OSSM configuration:") - istio_config = ISTIO_CONFIG_DICT - mr_class_name = ModelRegistryV1Alpha1 """Creates a model registry instance with oauth proxy configuration.""" - with mr_class_name( - name=MR_INSTANCE_NAME, - namespace=model_registry_namespace, - label=MODEL_REGISTRY_STANDARD_LABELS, - grpc={}, - rest={}, - istio=istio_config, - oauth_proxy=oauth_config, - mysql=model_registry_mysql_config, - wait_for_resource=True, - ) as mr: - mr.wait_for_condition(condition="Available", status="True") - mr.wait_for_condition(condition="OAuthProxyAvailable", status="True") - - yield mr + if pytestconfig.option.post_upgrade: + mr_instance = ModelRegistry(name=MR_INSTANCE_NAME, namespace=model_registry_namespace, ensure_exists=True) + yield mr_instance + mr_instance.delete(wait=True) + else: + istio_config = None + oauth_config = None + mr_class_name = ModelRegistry + if is_model_registry_oauth: + LOGGER.warning("Requested Ouath Proxy configuration:") + oauth_config = OAUTH_PROXY_CONFIG_DICT + else: + LOGGER.warning("Requested OSSM configuration:") + istio_config = ISTIO_CONFIG_DICT + mr_class_name = ModelRegistryV1Alpha1 + with mr_class_name( + name=MR_INSTANCE_NAME, + namespace=model_registry_namespace, + label=MODEL_REGISTRY_STANDARD_LABELS, + grpc={}, + rest={}, + istio=istio_config, + oauth_proxy=oauth_config, + mysql=model_registry_mysql_config, + wait_for_resource=True, + teardown=teardown_resources, + ) as mr: + mr.wait_for_condition(condition="Available", status="True") + mr.wait_for_condition(condition="OAuthProxyAvailable", status="True") + + yield mr @pytest.fixture(scope="class") @@ -204,11 +253,11 @@ def model_registry_mysql_config( param = request.param if hasattr(request, "param") else {} config = { "host": f"{model_registry_db_deployment.name}.{model_registry_db_deployment.namespace}.svc.cluster.local", - "database": model_registry_db_secret.string_data["database-name"], + "database": MODEL_REGISTRY_DB_SECRET_STR_DATA["database-name"], "passwordSecret": {"key": "database-password", "name": model_registry_db_deployment.name}, "port": param.get("port", 3306), "skipDBCreation": False, - "username": model_registry_db_secret.string_data["database-user"], + "username": MODEL_REGISTRY_DB_SECRET_STR_DATA["database-user"], } if "sslRootCertificateConfigMap" in param: config["sslRootCertificateConfigMap"] = param["sslRootCertificateConfigMap"] @@ -289,46 +338,114 @@ def after_call(self, response: Response, case: Case) -> None: @pytest.fixture(scope="class") def updated_dsc_component_state_scope_class( + pytestconfig: Config, request: FixtureRequest, dsc_resource: DataScienceCluster, admin_client: DynamicClient, + teardown_resources: bool, is_model_registry_oauth: bool, ) -> Generator[DataScienceCluster, Any, Any]: - original_components = dsc_resource.instance.spec.components - component_patch = request.param["component_patch"] - - with ResourceEditor(patches={dsc_resource: {"spec": {"components": component_patch}}}): - for component_name in component_patch: - dsc_resource.wait_for_condition(condition=DscComponents.COMPONENT_MAPPING[component_name], status="True") - if component_patch.get(DscComponents.MODELREGISTRY): - namespace = Namespace( - name=dsc_resource.instance.spec.components.modelregistry.registriesNamespace, ensure_exists=True + if not teardown_resources or pytestconfig.option.post_upgrade: + # if we are not tearing down resources or we are in post upgrade, we don't need to do anything + # the pre_upgrade/post_upgrade fixtures will handle the rest + yield dsc_resource + else: + original_components = dsc_resource.instance.spec.components + component_patch = request.param["component_patch"] + + with ResourceEditor(patches={dsc_resource: {"spec": {"components": component_patch}}}): + for component_name in component_patch: + dsc_resource.wait_for_condition( + condition=DscComponents.COMPONENT_MAPPING[component_name], status="True" + ) + if component_patch.get(DscComponents.MODELREGISTRY): + namespace = Namespace( + name=dsc_resource.instance.spec.components.modelregistry.registriesNamespace, ensure_exists=True + ) + namespace.wait_for_status(status=Namespace.Status.ACTIVE) + wait_for_pods_running( + admin_client=admin_client, + namespace_name=py_config["applications_namespace"], + number_of_consecutive_checks=6, ) - namespace.wait_for_status(status=Namespace.Status.ACTIVE) + yield dsc_resource + + for component_name, value in component_patch.items(): + LOGGER.info(f"Waiting for component {component_name} to be updated.") + if original_components[component_name]["managementState"] == DscComponents.ManagementState.MANAGED: + dsc_resource.wait_for_condition( + condition=DscComponents.COMPONENT_MAPPING[component_name], status="True" + ) + if ( + component_name == DscComponents.MODELREGISTRY + and value.get("managementState") == DscComponents.ManagementState.MANAGED + ): + # Since namespace specified in registriesNamespace is automatically created after setting + # managementStateto Managed. We need to explicitly delete it on clean up. + namespace = Namespace(name=value["registriesNamespace"], ensure_exists=True) + if namespace: + namespace.delete(wait=True) + + +@pytest.fixture(scope="class") +def pre_upgrade_dsc_patch( + dsc_resource: DataScienceCluster, + admin_client: DynamicClient, +) -> DataScienceCluster: + original_components = dsc_resource.instance.spec.components + component_patch = {DscComponents.MODELREGISTRY: {"managementState": DscComponents.ManagementState.MANAGED}} + if ( + original_components.get(DscComponents.MODELREGISTRY).get("managementState") + == DscComponents.ManagementState.MANAGED + ): + pytest.fail("Model Registry is already set to Managed before upgrade - was this intentional?") + else: + editor = ResourceEditor(patches={dsc_resource: {"spec": {"components": component_patch}}}) + editor.update() + dsc_resource.wait_for_condition(condition=DscComponents.COMPONENT_MAPPING["modelregistry"], status="True") + namespace = Namespace( + name=dsc_resource.instance.spec.components.modelregistry.registriesNamespace, ensure_exists=True + ) + namespace.wait_for_status(status=Namespace.Status.ACTIVE) wait_for_pods_running( admin_client=admin_client, namespace_name=py_config["applications_namespace"], number_of_consecutive_checks=6, ) - yield dsc_resource + return dsc_resource + + +@pytest.fixture(scope="class") +def post_upgrade_dsc_patch( + dsc_resource: DataScienceCluster, +) -> Generator[DataScienceCluster, Any, Any]: + # yield right away so that the rest of the fixture is executed at teardown time + yield dsc_resource - for component_name, value in component_patch.items(): - LOGGER.info(f"Waiting for component {component_name} to be updated.") - if original_components[component_name]["managementState"] == DscComponents.ManagementState.MANAGED: - dsc_resource.wait_for_condition(condition=DscComponents.COMPONENT_MAPPING[component_name], status="True") - if ( - component_name == DscComponents.MODELREGISTRY - and value.get("managementState") == DscComponents.ManagementState.MANAGED - ): - # Since namespace specified in registriesNamespace is automatically created after setting - # managementStateto Managed. We need to explicitly delete it on clean up. - namespace = Namespace(name=value["registriesNamespace"], ensure_exists=True) - if namespace: - namespace.delete(wait=True) + # the state we found after the upgrade + original_components = dsc_resource.instance.spec.components + # We don't have an easy way to figure out the state of the components before the upgrade at runtime + # For now we know that MR has to go back to Removed after post upgrade tests are run + component_patch = {DscComponents.MODELREGISTRY: {"managementState": DscComponents.ManagementState.REMOVED}} + if ( + original_components.get(DscComponents.MODELREGISTRY).get("managementState") + == DscComponents.ManagementState.REMOVED + ): + pytest.fail("Model Registry is already set to Removed after upgrade - was this intentional?") + else: + editor = ResourceEditor(patches={dsc_resource: {"spec": {"components": component_patch}}}) + editor.update() + ns = original_components.get(DscComponents.MODELREGISTRY).get("registriesNamespace") + namespace = Namespace(name=ns, ensure_exists=True) + if namespace: + namespace.delete(wait=True) @pytest.fixture(scope="class") -def model_registry_client(current_client_token: str, model_registry_instance_rest_endpoint: str) -> ModelRegistryClient: +def model_registry_client( + current_client_token: str, + model_registry_instance_rest_endpoint: str, +) -> ModelRegistryClient: """ Get a client for the model registry instance. Args: diff --git a/tests/model_registry/upgrade/test_model_registry_upgrade.py b/tests/model_registry/upgrade/test_model_registry_upgrade.py new file mode 100644 index 000000000..04450b738 --- /dev/null +++ b/tests/model_registry/upgrade/test_model_registry_upgrade.py @@ -0,0 +1,87 @@ +import pytest +from typing import Self +from tests.model_registry.constants import MODEL_NAME, MODEL_DICT +from model_registry.types import RegisteredModel +from model_registry import ModelRegistry as ModelRegistryClient +from ocp_resources.model_registry_modelregistry_opendatahub_io import ModelRegistry +from simple_logger.logger import get_logger +from tests.model_registry.rest_api.utils import ModelRegistryV1Alpha1 +from tests.model_registry.utils import get_and_validate_registered_model + +LOGGER = get_logger(name=__name__) + + +@pytest.mark.parametrize( + "registered_model", + [ + pytest.param( + MODEL_DICT, + ), + ], + indirect=True, +) +@pytest.mark.usefixtures("pre_upgrade_dsc_patch") +class TestPreUpgradeModelRegistry: + @pytest.mark.pre_upgrade + def test_registering_model_pre_upgrade( + self: Self, + model_registry_client: ModelRegistryClient, + registered_model: RegisteredModel, + ): + errors = get_and_validate_registered_model( + model_registry_client=model_registry_client, model_name=MODEL_NAME, registered_model=registered_model + ) + if errors: + pytest.fail("errors found in model registry response validation:\n{}".format("\n".join(errors))) + + # TODO: if we are in <=2.21, we can create a servicemesh MR here instead of oauth (v1alpha1), and then in + # post-upgrade check that it automatically gets converted to oauth (v1beta1) - to be done in 2.21 branch directly. + + +@pytest.mark.usefixtures("post_upgrade_dsc_patch") +class TestPostUpgradeModelRegistry: + @pytest.mark.post_upgrade + def test_retrieving_model_post_upgrade( + self: Self, + model_registry_client: ModelRegistryClient, + model_registry_instance: ModelRegistry, + ): + errors = get_and_validate_registered_model( + model_registry_client=model_registry_client, + model_name=MODEL_NAME, + ) + if errors: + pytest.fail("errors found in model registry response validation:\n{}".format("\n".join(errors))) + + @pytest.mark.post_upgrade + def test_model_registry_instance_api_version_post_upgrade( + self: Self, + model_registry_instance: ModelRegistry, + ): + # the following is valid for 2.22+ + api_version = model_registry_instance.instance.apiVersion + expected_version = f"{ModelRegistry.ApiGroup.MODELREGISTRY_OPENDATAHUB_IO}/{ModelRegistry.ApiVersion.V1BETA1}" + assert api_version == expected_version + + @pytest.mark.post_upgrade + def test_model_registry_instance_spec_post_upgrade( + self: Self, + model_registry_instance: ModelRegistry, + ): + model_registry_instance_spec = model_registry_instance.instance.spec + assert not model_registry_instance_spec.istio + assert model_registry_instance_spec.oauthProxy.serviceRoute == "enabled" + + @pytest.mark.post_upgrade + def test_model_registry_instance_status_conversion_post_upgrade( + self: Self, + model_registry_instance: ModelRegistry, + ): + # TODO: After v1alpha1 is removed (2.24?) this has to be removed + mr_instance = ModelRegistryV1Alpha1( + name=model_registry_instance.name, namespace=model_registry_instance.namespace, ensure_exists=True + ).instance + status = mr_instance.status.to_dict() + LOGGER.info(f"Validating MR status {status}") + if not status: + pytest.fail(f"Empty status found for {mr_instance}") diff --git a/tests/model_registry/utils.py b/tests/model_registry/utils.py index 5e1a15f3d..569029e74 100644 --- a/tests/model_registry/utils.py +++ b/tests/model_registry/utils.py @@ -1,5 +1,5 @@ import uuid -from typing import Any +from typing import Any, List from kubernetes.dynamic import DynamicClient from ocp_resources.pod import Pod @@ -12,6 +12,8 @@ from tests.model_registry.constants import MR_DB_IMAGE_DIGEST from utilities.exceptions import ProtocolNotSupportedError, TooManyServicesError from utilities.constants import Protocols, Annotations +from model_registry import ModelRegistry as ModelRegistryClient +from model_registry.types import RegisteredModel ADDRESS_ANNOTATION_PREFIX: str = "routing.opendatahub.io/external-address-" @@ -330,3 +332,32 @@ def apply_mysql_args_and_volume_mounts( my_sql_container["args"] = mysql_args my_sql_container["volumeMounts"] = volumes_mounts return my_sql_container + + +def get_and_validate_registered_model( + model_registry_client: ModelRegistryClient, + model_name: str, + registered_model: RegisteredModel = None, +) -> List[str]: + """ + Get and validate a registered model. + """ + model = model_registry_client.get_registered_model(name=model_name) + if registered_model is not None: + expected_attrs = { + "id": registered_model.id, + "name": registered_model.name, + "description": registered_model.description, + "owner": registered_model.owner, + "state": registered_model.state, + } + else: + expected_attrs = { + "name": model_name, + } + errors = [ + f"Unexpected {attr} expected: {expected}, received {getattr(model, attr)}" + for attr, expected in expected_attrs.items() + if getattr(model, attr) != expected + ] + return errors