Skip to content

Refactor determination of storageclass for deployments #12042

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 1 addition & 15 deletions ocs_ci/deployment/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from ocs_ci.ocs import constants, exceptions, ocp, machine
from ocs_ci.ocs.resources import pod
from ocs_ci.ocs.node import drain_nodes
from ocs_ci.utility import cco, templating, version
from ocs_ci.utility import cco, templating
from ocs_ci.utility.aws import (
AWS as AWSUtil,
create_and_attach_volume_for_all_workers,
Expand Down Expand Up @@ -56,9 +56,6 @@

class AWSBase(CloudDeploymentBase):

# default storage class for StorageCluster CRD on AWS platform
DEFAULT_STORAGECLASS = "gp2"

def __init__(self):
"""
This would be base for both IPI and UPI deployment
Expand All @@ -68,20 +65,9 @@ def __init__(self):
# dict of cluster prefixes with special handling rules (for existence
# check or during a cluster cleanup)
self.cluster_prefixes_special_rules = CLUSTER_PREFIXES_SPECIAL_RULES
ocp_version = version.get_semantic_ocp_version_from_config()
if ocp_version >= version.VERSION_4_12:
self.DEFAULT_STORAGECLASS = "gp2-csi"

def deploy_ocp(self, log_cli_level="DEBUG"):
super(AWSBase, self).deploy_ocp(log_cli_level)
ocp_version = version.get_semantic_ocp_version_from_config()
ocs_version = version.get_semantic_ocs_version_from_config()

if ocs_version >= version.VERSION_4_10 and ocp_version >= version.VERSION_4_9:
# If we don't customize the storage class, we will use the default one
self.DEFAULT_STORAGECLASS = config.DEPLOYMENT.get(
"customized_deployment_storage_class", self.DEFAULT_STORAGECLASS
)

def host_network_update(self):
"""
Expand Down
9 changes: 1 addition & 8 deletions ocs_ci/deployment/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from ocs_ci.deployment.cloud import IPIOCPDeployment
from ocs_ci.deployment.ocp import OCPDeployment as BaseOCPDeployment
from ocs_ci.ocs import constants
from ocs_ci.utility import cco, version
from ocs_ci.utility import cco
from ocs_ci.utility.azure_utils import AZURE as AzureUtil, AzureAroUtil
from ocs_ci.utility.deployment import get_ocp_release_image_from_installer
from ocs_ci.utility.utils import exec_cmd
Expand All @@ -34,13 +34,6 @@ class AZUREBase(CloudDeploymentBase):
comparable with other platforms.
"""

# default storage class for StorageCluster CRD on Azure platform
# From OCP 4.11, default storage class is managed-csi
if version.get_semantic_ocp_version_from_config() >= version.VERSION_4_11:
DEFAULT_STORAGECLASS = "managed-csi"
else:
DEFAULT_STORAGECLASS = "managed-premium"

def __init__(self):
super(AZUREBase, self).__init__()
self.azure_util = AzureUtil()
Expand Down
65 changes: 18 additions & 47 deletions ocs_ci/deployment/deployment.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from botocore.exceptions import EndpointConnectionError, BotoCoreError

from ocs_ci.deployment.helpers import storage_class
from ocs_ci.deployment.ocp import OCPDeployment as BaseOCPDeployment
from ocs_ci.deployment.helpers.external_cluster_helpers import (
ExternalCluster,
Expand Down Expand Up @@ -191,32 +192,14 @@ class Deployment(object):
Base for all deployment platforms
"""

# Default storage class for StorageCluster CRD,
# every platform specific class which extending this base class should
# define it
DEFAULT_STORAGECLASS = None

# Default storage class for LSO deployments. While each platform specific
# subclass can redefine it, there is a well established platform
# independent default value (based on OCS Installation guide), and it's
# redefinition is not necessary in normal cases.
DEFAULT_STORAGECLASS_LSO = "localblock"

CUSTOM_STORAGE_CLASS_PATH = None
"""str: filepath of yaml file with custom storage class if necessary

For some platforms, one have to create custom storage class for OCS to make
sure ceph uses disks of expected type and parameters (eg. OCS requires
ssd). This variable is either None (meaning that such custom storage class
is not needed), or point to a yaml file with custom storage class.
"""

def __init__(self):
self.platform = config.ENV_DATA["platform"]
self.ocp_deployment_type = config.ENV_DATA["deployment_type"]
self.cluster_path = config.ENV_DATA["cluster_path"]
self.namespace = config.ENV_DATA["cluster_namespace"]
self.sts_role_arn = None
self.storage_class = storage_class.get_storageclass()
self.custom_storage_class_path = None

class OCPDeployment(BaseOCPDeployment):
"""
Expand Down Expand Up @@ -475,7 +458,6 @@ def do_deploy_mce(self):

"""
if config.ENV_DATA["skip_ocs_deployment"]:

if config.ENV_DATA.get("deploy_mce"):
mce_installer = MCEInstaller()
mce_installer.deploy_mce()
Expand Down Expand Up @@ -621,7 +603,6 @@ def do_deploy_hyperconverged(self):
Should run on OCP deployment phase
"""
if config.ENV_DATA["skip_ocs_deployment"]:

if config.ENV_DATA.get(
"deploy_hyperconverged"
) and not config.DEPLOYMENT.get("cnv_deployment"):
Expand Down Expand Up @@ -712,11 +693,11 @@ def deploy_cluster(self, log_cli_level="DEBUG"):
perform_lso_standalone_deployment = config.DEPLOYMENT.get(
"lso_standalone_deployment", False
) and not ocp.OCP(kind=constants.STORAGECLASS).is_exist(
resource_name=self.DEFAULT_STORAGECLASS_LSO
resource_name=constants.DEFAULT_STORAGECLASS_LSO
)
if perform_lso_standalone_deployment:
cleanup_nodes_for_lso_install()
setup_local_storage(storageclass=self.DEFAULT_STORAGECLASS_LSO)
setup_local_storage(storageclass=constants.DEFAULT_STORAGECLASS_LSO)
self.do_deploy_lvmo()
self.do_deploy_submariner()
self.do_gitops_deploy()
Expand Down Expand Up @@ -1130,14 +1111,13 @@ def deploy_ocs_via_operator(self, image=None):

if local_storage:
log_step("Deploy and setup Local Storage Operator")
setup_local_storage(storageclass=self.DEFAULT_STORAGECLASS_LSO)
setup_local_storage(storageclass=constants.DEFAULT_STORAGECLASS_LSO)

log_step("Creating namespace and operator group")
# patch OLM YAML with the namespace
olm_ns_op_group_data = list(templating.load_yaml(constants.OLM_YAML, True))

if self.namespace != constants.OPENSHIFT_STORAGE_NAMESPACE:

for cr in olm_ns_op_group_data:
if cr["kind"] == "Namespace":
cr["metadata"]["name"] = self.namespace
Expand Down Expand Up @@ -1372,13 +1352,10 @@ def deploy_ocs_via_operator(self, image=None):
)

# create custom storage class for StorageCluster CR if necessary
if self.CUSTOM_STORAGE_CLASS_PATH is not None:
with open(self.CUSTOM_STORAGE_CLASS_PATH, "r") as custom_sc_fo:
custom_sc = yaml.load(custom_sc_fo, Loader=yaml.SafeLoader)
# set value of DEFAULT_STORAGECLASS to mach the custom storage cls
self.DEFAULT_STORAGECLASS = custom_sc["metadata"]["name"]
log_step(f"Creating custom storage class {self.DEFAULT_STORAGECLASS}")
run_cmd(f"oc create -f {self.CUSTOM_STORAGE_CLASS_PATH}")
if self.custom_storage_class_path is not None:
self.storage_class_name = storage_class.create_custom_storageclass(
self.custom_storage_class_path
)

# Set rook log level
self.set_rook_log_level()
Expand Down Expand Up @@ -1466,7 +1443,7 @@ def deploy_ocs_via_operator(self, image=None):
constants.HCI_BAREMETAL,
]:
pv_size_list = helpers.get_pv_size(
storageclass=self.DEFAULT_STORAGECLASS_LSO
storageclass=constants.DEFAULT_STORAGECLASS_LSO
)
pv_size_list.sort()
deviceset_data["dataPVCTemplate"]["spec"]["resources"]["requests"][
Expand All @@ -1477,16 +1454,11 @@ def deploy_ocs_via_operator(self, image=None):
"storage"
] = f"{device_size}Gi"

if self.platform.lower() == constants.ROSA_HCP_PLATFORM:
self.DEFAULT_STORAGECLASS = config.DEPLOYMENT.get(
"customized_deployment_storage_class", self.DEFAULT_STORAGECLASS
)

# set storage class to OCS default on current platform
if self.DEFAULT_STORAGECLASS:
if self.storage_class:
deviceset_data["dataPVCTemplate"]["spec"][
"storageClassName"
] = self.DEFAULT_STORAGECLASS
] = self.storage_class

# StorageCluster tweaks for LSO
if local_storage:
Expand All @@ -1496,7 +1468,7 @@ def deploy_ocs_via_operator(self, image=None):
deviceset_data["portable"] = False
deviceset_data["dataPVCTemplate"]["spec"][
"storageClassName"
] = self.DEFAULT_STORAGECLASS_LSO
] = constants.DEFAULT_STORAGECLASS_LSO
lso_type = config.DEPLOYMENT.get("type")
if (
self.platform.lower() == constants.AWS_PLATFORM
Expand Down Expand Up @@ -1581,7 +1553,7 @@ def deploy_ocs_via_operator(self, image=None):
"spec": {
"accessModes": ["ReadWriteOnce"],
"resources": {"requests": {"storage": "20Gi"}},
"storageClassName": self.DEFAULT_STORAGECLASS,
"storageClassName": self.storage_class,
"volumeMode": "Filesystem",
}
}
Expand Down Expand Up @@ -2410,14 +2382,13 @@ def patch_default_sc_to_non_default(self):
"""
Patch storage class which comes as default with installation to non-default
"""
if not self.DEFAULT_STORAGECLASS:
if not self.storage_class:
logger.info(
"Default StorageClass is not set for this class: "
f"{self.__class__.__name__}"
f"Default StorageClass is not set for this class: {self.__class__.__name__}"
)
return

sc_to_patch = self.DEFAULT_STORAGECLASS
sc_to_patch = self.storage_class
if (
config.ENV_DATA.get("use_custom_sc_in_deployment")
and self.platform.lower() == constants.VSPHERE_PLATFORM
Expand Down
13 changes: 6 additions & 7 deletions ocs_ci/deployment/gcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,14 @@ class GCPIPI(GCPBase):
A class to handle GCP IPI specific deployment
"""

# storage class for StorageCluster CRD on Google Cloud platform
# uses a custom storageclass, which is created prior creating
# StorageCluster CR during OCS installation
CUSTOM_STORAGE_CLASS_PATH = os.path.join(
TEMPLATE_DEPLOYMENT_DIR, "storageclass.gcp.yaml"
)

OCPDeployment = IPIOCPDeployment

def __init__(self):
self.name = self.__class__.__name__
super(GCPIPI, self).__init__()
# storage class for StorageCluster CRD on Google Cloud platform
# uses a custom storageclass, which is created prior creating
# StorageCluster CR during OCS installation
self.custom_storage_class_path = os.path.join(
TEMPLATE_DEPLOYMENT_DIR, "storageclass.gcp.yaml"
)
64 changes: 64 additions & 0 deletions ocs_ci/deployment/helpers/storage_class.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import logging
import yaml

from ocs_ci.framework import config
from ocs_ci.framework.logger_helper import log_step
from ocs_ci.ocs import constants
from ocs_ci.utility.utils import run_cmd


logger = logging.getLogger(__name__)

DEFAULT_STORAGE_CLASS_MAP = {
constants.AWS_PLATFORM: "gp2-csi",
constants.IBMCLOUD_PLATFORM: "ibmc-vpc-block-10iops-tier",
constants.VSPHERE_PLATFORM: "thin-csi",
constants.AZURE_PLATFORM: "managed-csi",
constants.GCP_PLATFORM: None,
constants.ROSA_HCP_PLATFORM: None,
constants.RHV_PLATFORM: "ovirt-csi-sc",
}
Comment on lines +12 to +20
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The thin-csi value is also available as constants.THIN_CSI_STORAGECLASS, but I'm not sure, if it is worth to change all the values and get them from constants, so maybe as it is now is ok.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I noticed that as well and came to the same conclusion. I didn't think it was necessary to convert them each to their own constant. If you think it's worth it to move the map to the constants module I can do that, but for now I will leave it as is.



def get_storageclass() -> str:
"""
Retrieve the storageclass to use from the config or based on platform

Returns:
str: Name of the storageclass

"""
logger.info("Getting storageclass")
platform = config.ENV_DATA.get("platform")
customized_deployment_storage_class = config.DEPLOYMENT.get(
"customized_deployment_storage_class"
)

if customized_deployment_storage_class:
storage_class = customized_deployment_storage_class
else:
storage_class = DEFAULT_STORAGE_CLASS_MAP[platform]

logger.info(f"Using storage class: {storage_class}")
return storage_class


def create_custom_storageclass(storage_class_path: str) -> str:
"""
Create a custom storageclass using the yaml file defined at the storage_class_path

Args:
storage_class_path (str): Filepath to storageclass yaml definition

Returns:
str: Name of the storageclass

"""
with open(storage_class_path, "r") as custom_sc_fo:
custom_sc = yaml.load(custom_sc_fo, Loader=yaml.SafeLoader)

storage_class_name = custom_sc["metadata"]["name"]
log_step(f"Creating custom storage class {storage_class_name}")
run_cmd(f"oc create -f {storage_class_path}")

return storage_class_name
5 changes: 1 addition & 4 deletions ocs_ci/deployment/ibmcloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,6 @@ class IBMCloud(CloudDeploymentBase):
Deployment class for IBM Cloud
"""

DEFAULT_STORAGECLASS = "ibmc-vpc-block-10iops-tier"

OCPDeployment = IBMCloudOCPDeployment

def __init__(self):
Expand Down Expand Up @@ -139,7 +137,6 @@ class IBMCloudIPI(CloudDeploymentBase):
A class to handle IBM Cloud IPI specific deployment
"""

DEFAULT_STORAGECLASS = "ibmc-vpc-block-10iops-tier"
OCPDeployment = IPIOCPDeployment

def __init__(self):
Expand Down Expand Up @@ -207,7 +204,7 @@ def destroy_cluster(self, log_level="DEBUG"):
if resource_group:
try:
self.delete_bucket()
scale_down_pods_and_remove_pvcs(self.DEFAULT_STORAGECLASS)
scale_down_pods_and_remove_pvcs(self.storage_class)
except Exception as err:
logger.warning(
f"Failed to scale down mon/osd pods or failed to remove PVC's. Error: {err}"
Expand Down
3 changes: 0 additions & 3 deletions ocs_ci/deployment/rhv.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ class RHVBASE(OnPremDeploymentBase):
RHV deployment base class, with code common to both IPI and UPI.
"""

# default storage class for StorageCluster CRD on RHV platform
DEFAULT_STORAGECLASS = "ovirt-csi-sc"

def __init__(self):
super(RHVBASE, self).__init__()
if config.ENV_DATA.get("default_cluster_name"):
Expand Down
3 changes: 0 additions & 3 deletions ocs_ci/deployment/tests/test_cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@ class TestCloudDeployment(CloudDeploymentBase):
will fail (which is expected for such base class).
"""

# avoid raising NotImplementedError so that testing base class is possible
DEFAULT_STORAGECLASS = "cloudstorage"


def test_clouddeploymentbase_init(clusterdir):
"""
Expand Down
Loading