From 1c00e587244cffeb89456b01644bea0934bcfaff Mon Sep 17 00:00:00 2001 From: Jakub Stejskal Date: Tue, 30 Jan 2024 13:30:14 +0100 Subject: [PATCH] Add Modelmesh, Ray and TrustyAI operands (#79) Signed-off-by: Jakub Stejskal --- .gitignore | 2 +- .../io/odh/test/OdhAnnotationsLabels.java | 2 + src/main/java/io/odh/test/TestConstants.java | 10 +++ .../framework/manager/ResourceManager.java | 8 +- .../requirements/PipelinesOperator.java | 50 +++++++++++++ .../requirements/ServerlessOperator.java | 75 +++++++++++++++++++ .../requirements/ServiceMeshOperator.java | 51 +++++++++++++ .../resources/DataScienceClusterResource.java | 52 ++++++++++++- .../java/io/odh/test/install/OlmInstall.java | 3 +- src/test/java/io/odh/test/e2e/Abstract.java | 11 +++ .../e2e/standard/DataScienceClusterST.java | 26 ++++++- .../io/odh/test/e2e/standard/NotebookST.java | 22 +++++- .../odh/test/e2e/upgrade/UpgradeAbstract.java | 19 ++++- 13 files changed, 314 insertions(+), 17 deletions(-) create mode 100644 src/main/java/io/odh/test/framework/manager/requirements/PipelinesOperator.java create mode 100644 src/main/java/io/odh/test/framework/manager/requirements/ServerlessOperator.java create mode 100644 src/main/java/io/odh/test/framework/manager/requirements/ServiceMeshOperator.java diff --git a/.gitignore b/.gitignore index cbd99741..4248e009 100644 --- a/.gitignore +++ b/.gitignore @@ -38,5 +38,5 @@ build/ ## Suite specific files **/*.kubeconfig -env +*.env config.yaml diff --git a/src/main/java/io/odh/test/OdhAnnotationsLabels.java b/src/main/java/io/odh/test/OdhAnnotationsLabels.java index fdf78e5b..d52a683b 100644 --- a/src/main/java/io/odh/test/OdhAnnotationsLabels.java +++ b/src/main/java/io/odh/test/OdhAnnotationsLabels.java @@ -14,5 +14,7 @@ public class OdhAnnotationsLabels { public static final String ANNO_SERVICE_MESH = ODH_DOMAIN + "service-mesh"; public static final String ANNO_NTB_INJECT_OAUTH = "notebooks." + ODH_DOMAIN + "inject-oauth"; + public static final String APP_LABEL_KEY = "app"; + public static final String APP_LABEL_VALUE = "odh-e2e"; } diff --git a/src/main/java/io/odh/test/TestConstants.java b/src/main/java/io/odh/test/TestConstants.java index f7a5de73..340e7651 100644 --- a/src/main/java/io/odh/test/TestConstants.java +++ b/src/main/java/io/odh/test/TestConstants.java @@ -12,6 +12,9 @@ public class TestConstants { public static final String SUBSCRIPTION = "Subscription"; public static final String OPERATOR_GROUP = "OperatorGroup"; + public static final String APPROVAL_AUTOMATIC = "Automatic"; + public static final String APPROVAL_MANUAL = "Manual"; + public static final String LATEST_BUNDLE_DEPLOY_FILE = "install-files/latest.yaml"; public static final String RELEASED_BUNDLE_DEPLOY_FILE = "install-files/released.yaml"; @@ -23,6 +26,13 @@ public class TestConstants { public static final long GLOBAL_STABILITY_TIME = Duration.ofMinutes(1).toSeconds(); public static final String LOG_COLLECT_LABEL = "io.odh-e2e.collect-logs"; + // OLM Constants + public static final String OPENSHIFT_MARKETPLACE_NS = "openshift-marketplace"; + public static final String OPENSHIFT_OPERATORS_NS = "openshift-operators"; + public static final String REDHAT_CATALOG = "redhat-operators"; + public static final String CHANNEL_STABLE = "stable"; + public static final String CHANNEL_LATEST = "latest"; + private TestConstants() { } } diff --git a/src/main/java/io/odh/test/framework/manager/ResourceManager.java b/src/main/java/io/odh/test/framework/manager/ResourceManager.java index 4daf3b10..ed9de604 100644 --- a/src/main/java/io/odh/test/framework/manager/ResourceManager.java +++ b/src/main/java/io/odh/test/framework/manager/ResourceManager.java @@ -147,10 +147,14 @@ private void createResource(boolean waitReady, T... reso } else { client.getClient().resource(resource).create(); } - } } else { - type.create(resource); + if (client.getClient().resource(resource).get() != null) { + type.update(resource); + } else { + type.create(resource); + } + if (waitReady) { assertTrue(waitResourceCondition(resource, ResourceCondition.readiness(type)), String.format("Timed out waiting for %s %s/%s to be ready", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName())); diff --git a/src/main/java/io/odh/test/framework/manager/requirements/PipelinesOperator.java b/src/main/java/io/odh/test/framework/manager/requirements/PipelinesOperator.java new file mode 100644 index 00000000..a1892a6d --- /dev/null +++ b/src/main/java/io/odh/test/framework/manager/requirements/PipelinesOperator.java @@ -0,0 +1,50 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.framework.manager.requirements; + +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.SubscriptionBuilder; +import io.odh.test.OdhAnnotationsLabels; +import io.odh.test.TestConstants; +import io.odh.test.framework.manager.ResourceItem; +import io.odh.test.framework.manager.ResourceManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Collections; + +public class PipelinesOperator { + private static final Logger LOGGER = LoggerFactory.getLogger(PipelinesOperator.class); + + public static final String SUBSCRIPTION_NAME = "openshift-pipelines-operator"; + public static final String OPERATOR_NAME = "openshift-pipelines-operator-rh"; + + public static void deployOperator() { + Subscription subscription = new SubscriptionBuilder() + .editOrNewMetadata() + .withName(SUBSCRIPTION_NAME) + .withNamespace(TestConstants.OPENSHIFT_OPERATORS_NS) + .withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE)) + .endMetadata() + .editOrNewSpec() + .withName(OPERATOR_NAME) + .withChannel(TestConstants.CHANNEL_LATEST) + .withSource(TestConstants.REDHAT_CATALOG) + .withSourceNamespace(TestConstants.OPENSHIFT_MARKETPLACE_NS) + .withInstallPlanApproval(TestConstants.APPROVAL_AUTOMATIC) + .editOrNewConfig() + .endConfig() + .endSpec() + .build(); + + ResourceManager.getInstance().createResourceWithWait(subscription); + ResourceManager.getInstance().pushToStack(new ResourceItem(() -> deleteOperator(subscription), null)); + } + + public static void deleteOperator(Subscription subscription) { + ResourceManager.getClient().delete(Arrays.asList(subscription)); + } +} diff --git a/src/main/java/io/odh/test/framework/manager/requirements/ServerlessOperator.java b/src/main/java/io/odh/test/framework/manager/requirements/ServerlessOperator.java new file mode 100644 index 00000000..b3056588 --- /dev/null +++ b/src/main/java/io/odh/test/framework/manager/requirements/ServerlessOperator.java @@ -0,0 +1,75 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.framework.manager.requirements; + +import io.fabric8.kubernetes.api.model.Namespace; +import io.fabric8.kubernetes.api.model.NamespaceBuilder; +import io.fabric8.openshift.api.model.operatorhub.v1.OperatorGroupBuilder; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.SubscriptionBuilder; +import io.odh.test.OdhAnnotationsLabels; +import io.odh.test.TestConstants; +import io.odh.test.framework.manager.ResourceItem; +import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.resources.OperatorGroupResource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Collections; + +public class ServerlessOperator { + private static final Logger LOGGER = LoggerFactory.getLogger(ServerlessOperator.class); + public static final String SUBSCRIPTION_NAME = "serverless-operator"; + public static final String OPERATOR_NAME = "serverless-operator"; + public static final String OPERATOR_NAMESPACE = "openshift-serverless"; + public static void deployOperator() { + // Create ns for the operator + Namespace ns = new NamespaceBuilder() + .withNewMetadata() + .withName(OPERATOR_NAMESPACE) + .withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE)) + .endMetadata() + .build(); + ResourceManager.getInstance().createResourceWithoutWait(ns); + //Create operator group for the operator + if (OperatorGroupResource.operatorGroupClient().inNamespace(OPERATOR_NAMESPACE).list().getItems().isEmpty()) { + OperatorGroupBuilder operatorGroup = new OperatorGroupBuilder() + .editOrNewMetadata() + .withName("odh-group") + .withNamespace(OPERATOR_NAMESPACE) + .withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE)) + .endMetadata(); + + ResourceManager.getInstance().createResourceWithWait(operatorGroup.build()); + } else { + LOGGER.info("OperatorGroup is already exists."); + } + // Create subscription + Subscription subscription = new SubscriptionBuilder() + .editOrNewMetadata() + .withName(SUBSCRIPTION_NAME) + .withNamespace(OPERATOR_NAMESPACE) + .withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE)) + .endMetadata() + .editOrNewSpec() + .withName(OPERATOR_NAME) + .withChannel(TestConstants.CHANNEL_STABLE) + .withSource(TestConstants.REDHAT_CATALOG) + .withSourceNamespace(TestConstants.OPENSHIFT_MARKETPLACE_NS) + .withInstallPlanApproval(TestConstants.APPROVAL_AUTOMATIC) + .editOrNewConfig() + .endConfig() + .endSpec() + .build(); + + ResourceManager.getInstance().createResourceWithWait(subscription); + ResourceManager.getInstance().pushToStack(new ResourceItem(() -> deleteOperator(ns), null)); + } + + public static void deleteOperator(Namespace namespace) { + ResourceManager.getClient().delete(Arrays.asList(namespace)); + } +} diff --git a/src/main/java/io/odh/test/framework/manager/requirements/ServiceMeshOperator.java b/src/main/java/io/odh/test/framework/manager/requirements/ServiceMeshOperator.java new file mode 100644 index 00000000..f47223b2 --- /dev/null +++ b/src/main/java/io/odh/test/framework/manager/requirements/ServiceMeshOperator.java @@ -0,0 +1,51 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.framework.manager.requirements; + +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription; +import io.fabric8.openshift.api.model.operatorhub.v1alpha1.SubscriptionBuilder; +import io.odh.test.OdhAnnotationsLabels; +import io.odh.test.TestConstants; +import io.odh.test.framework.manager.ResourceItem; +import io.odh.test.framework.manager.ResourceManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Collections; + +public class ServiceMeshOperator { + private static final Logger LOGGER = LoggerFactory.getLogger(ServiceMeshOperator.class); + public static final String SUBSCRIPTION_NAME = "servicemeshoperator"; + public static final String OPERATOR_NAME = "servicemeshoperator"; + public static final String SERVICE_MESH_NAMESPACE = "istio-system"; + public static final String SERVICE_MESH_NAME = "data-science-smcp"; + + public static void deployOperator() { + Subscription subscription = new SubscriptionBuilder() + .editOrNewMetadata() + .withName(SUBSCRIPTION_NAME) + .withNamespace(TestConstants.OPENSHIFT_OPERATORS_NS) + .withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE)) + .endMetadata() + .editOrNewSpec() + .withName(OPERATOR_NAME) + .withChannel(TestConstants.CHANNEL_STABLE) + .withSource(TestConstants.REDHAT_CATALOG) + .withSourceNamespace(TestConstants.OPENSHIFT_MARKETPLACE_NS) + .withInstallPlanApproval(TestConstants.APPROVAL_AUTOMATIC) + .editOrNewConfig() + .endConfig() + .endSpec() + .build(); + + ResourceManager.getInstance().createResourceWithWait(subscription); + ResourceManager.getInstance().pushToStack(new ResourceItem(() -> deleteOperator(subscription), null)); + } + + public static void deleteOperator(Subscription subscription) { + ResourceManager.getClient().delete(Arrays.asList(subscription)); + } +} diff --git a/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java index a0cc095f..66c78cb0 100644 --- a/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java +++ b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java @@ -15,6 +15,12 @@ import io.odh.test.platform.KubeUtils; import io.odh.test.utils.PodUtils; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Codeflare; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Datasciencepipelines; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Modelmeshserving; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Ray; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Trustyai; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,13 +61,55 @@ public boolean waitForReadiness(DataScienceCluster resource) { DataScienceCluster dsc = dataScienceCLusterClient().withName(resource.getMetadata().getName()).get(); String dashboardStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "dashboardReady").getStatus(); - LOGGER.debug("DataScienceCluster {} dashboard status: {}", resource.getMetadata().getName(), dashboardStatus); + LOGGER.debug("DataScienceCluster {} Dashboard status: {}", resource.getMetadata().getName(), dashboardStatus); dscReady = dashboardStatus.equals("True"); String workbenchesStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "workbenchesReady").getStatus(); - LOGGER.debug("DataScienceCluster {} workbenches status: {}", resource.getMetadata().getName(), workbenchesStatus); + LOGGER.debug("DataScienceCluster {} Workbenches status: {}", resource.getMetadata().getName(), workbenchesStatus); dscReady = dscReady && workbenchesStatus.equals("True"); + // Wait for CodeFlare + if (resource.getSpec().getComponents().getCodeflare().getManagementState().equals(Codeflare.ManagementState.MANAGED)) { + String codeflareStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "codeflareReady").getStatus(); + LOGGER.debug("DataScienceCluster {} CodeFlare status: {}", resource.getMetadata().getName(), codeflareStatus); + dscReady = dscReady && codeflareStatus.equals("True"); + } + + // Wait for ModelMesh + if (resource.getSpec().getComponents().getModelmeshserving().getManagementState().equals(Modelmeshserving.ManagementState.MANAGED)) { + String modemeshStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "model-meshReady").getStatus(); + LOGGER.debug("DataScienceCluster {} ModelMesh status: {}", resource.getMetadata().getName(), modemeshStatus); + dscReady = dscReady && modemeshStatus.equals("True"); + } + + // Wait for Ray + if (resource.getSpec().getComponents().getRay().getManagementState().equals(Ray.ManagementState.MANAGED)) { + String rayStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "rayReady").getStatus(); + LOGGER.debug("DataScienceCluster {} Ray status: {}", resource.getMetadata().getName(), rayStatus); + dscReady = dscReady && rayStatus.equals("True"); + } + + // Wait for TrustyAi + if (resource.getSpec().getComponents().getTrustyai().getManagementState().equals(Trustyai.ManagementState.MANAGED)) { + String trustyAiStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "trustyaiReady").getStatus(); + LOGGER.debug("DataScienceCluster {} TrustyAI status: {}", resource.getMetadata().getName(), trustyAiStatus); + dscReady = dscReady && trustyAiStatus.equals("True"); + } + + // Wait for KServe + if (resource.getSpec().getComponents().getKserve().getManagementState().equals(Kserve.ManagementState.MANAGED)) { + String kserveStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "kserveReady").getStatus(); + LOGGER.debug("DataScienceCluster {} KServe status: {}", resource.getMetadata().getName(), kserveStatus); + dscReady = dscReady && kserveStatus.equals("True"); + } + + // Wait for PipelinesOperator + if (resource.getSpec().getComponents().getDatasciencepipelines().getManagementState().equals(Datasciencepipelines.ManagementState.MANAGED)) { + String pipelinesStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "data-science-pipelines-operatorReady").getStatus(); + LOGGER.debug("DataScienceCluster {} DataSciencePipelines status: {}", resource.getMetadata().getName(), pipelinesStatus); + dscReady = dscReady && pipelinesStatus.equals("True"); + } + return dscReady; }, () -> { }); diff --git a/src/main/java/io/odh/test/install/OlmInstall.java b/src/main/java/io/odh/test/install/OlmInstall.java index cbbcfe51..da8ddc00 100644 --- a/src/main/java/io/odh/test/install/OlmInstall.java +++ b/src/main/java/io/odh/test/install/OlmInstall.java @@ -12,6 +12,7 @@ import io.fabric8.openshift.client.OpenShiftClient; import io.odh.test.Environment; import io.odh.test.OdhConstants; +import io.odh.test.TestConstants; import io.odh.test.framework.manager.ResourceItem; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.manager.resources.OperatorGroupResource; @@ -37,7 +38,7 @@ public class OlmInstall { private String operatorVersion = Environment.OLM_OPERATOR_VERSION; private String csvName = operatorName + "." + operatorVersion; - private String approval = "Automatic"; + private String approval = TestConstants.APPROVAL_AUTOMATIC; public void create() { createNamespace(); diff --git a/src/test/java/io/odh/test/e2e/Abstract.java b/src/test/java/io/odh/test/e2e/Abstract.java index 105b2aeb..65956e23 100644 --- a/src/test/java/io/odh/test/e2e/Abstract.java +++ b/src/test/java/io/odh/test/e2e/Abstract.java @@ -8,6 +8,10 @@ import io.odh.test.framework.listeners.TestVisualSeparator; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.listeners.TestExceptionCallbackListener; +import io.odh.test.framework.manager.requirements.PipelinesOperator; +import io.odh.test.framework.manager.requirements.ServerlessOperator; +import io.odh.test.framework.manager.requirements.ServiceMeshOperator; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.ExtendWith; @@ -19,4 +23,11 @@ public abstract class Abstract implements TestVisualSeparator { static { ResourceManager.getInstance(); } + + @BeforeAll + void setupDependencies() { + PipelinesOperator.deployOperator(); + ServiceMeshOperator.deployOperator(); + ServerlessOperator.deployOperator(); + } } diff --git a/src/test/java/io/odh/test/e2e/standard/DataScienceClusterST.java b/src/test/java/io/odh/test/e2e/standard/DataScienceClusterST.java index 493b7003..3028d175 100644 --- a/src/test/java/io/odh/test/e2e/standard/DataScienceClusterST.java +++ b/src/test/java/io/odh/test/e2e/standard/DataScienceClusterST.java @@ -7,6 +7,7 @@ import io.odh.test.OdhConstants; import io.odh.test.TestSuite; import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.requirements.ServiceMeshOperator; import io.odh.test.framework.manager.resources.DataScienceClusterResource; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; import io.opendatahub.datasciencecluster.v1.DataScienceClusterBuilder; @@ -19,6 +20,12 @@ import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.DatasciencepipelinesBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.KserveBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Modelmeshserving; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.ModelmeshservingBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Ray; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.RayBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Trustyai; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.TrustyaiBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Workbenches; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.WorkbenchesBuilder; import io.opendatahub.dscinitialization.v1.DSCInitialization; @@ -49,10 +56,10 @@ void createDataScienceCluster() { .withNamespace(OdhConstants.MONITORING_NAMESPACE) .endMonitoring() .withNewServiceMesh() - .withManagementState(ServiceMesh.ManagementState.REMOVED) + .withManagementState(ServiceMesh.ManagementState.MANAGED) .withNewControlPlane() - .withName("data-science-smcp") - .withNamespace("istio-system") + .withName(ServiceMeshOperator.SERVICE_MESH_NAME) + .withNamespace(ServiceMeshOperator.SERVICE_MESH_NAMESPACE) .withMetricsCollection(ControlPlane.MetricsCollection.ISTIO) .endControlPlane() .endServiceMesh() @@ -81,11 +88,19 @@ void createDataScienceCluster() { .withDatasciencepipelines( new DatasciencepipelinesBuilder().withManagementState(Datasciencepipelines.ManagementState.MANAGED).build() ) + .withModelmeshserving( + new ModelmeshservingBuilder().withManagementState(Modelmeshserving.ManagementState.MANAGED).build() + ) + .withRay( + new RayBuilder().withManagementState(Ray.ManagementState.MANAGED).build() + ) + .withTrustyai( + new TrustyaiBuilder().withManagementState(Trustyai.ManagementState.MANAGED).build() + ) .build()) .endSpec() .build(); - ResourceManager.getInstance().createResourceWithWait(dsci); ResourceManager.getInstance().createResourceWithWait(c); @@ -96,5 +111,8 @@ void createDataScienceCluster() { assertEquals(Dashboard.ManagementState.MANAGED, cluster.getSpec().getComponents().getDashboard().getManagementState()); assertEquals(Datasciencepipelines.ManagementState.MANAGED, cluster.getSpec().getComponents().getDatasciencepipelines().getManagementState()); assertEquals(Workbenches.ManagementState.MANAGED, cluster.getSpec().getComponents().getWorkbenches().getManagementState()); + assertEquals(Modelmeshserving.ManagementState.MANAGED, cluster.getSpec().getComponents().getModelmeshserving().getManagementState()); + assertEquals(Ray.ManagementState.MANAGED, cluster.getSpec().getComponents().getRay().getManagementState()); + assertEquals(Trustyai.ManagementState.MANAGED, cluster.getSpec().getComponents().getTrustyai().getManagementState()); } } diff --git a/src/test/java/io/odh/test/e2e/standard/NotebookST.java b/src/test/java/io/odh/test/e2e/standard/NotebookST.java index dcd55722..f3012b25 100644 --- a/src/test/java/io/odh/test/e2e/standard/NotebookST.java +++ b/src/test/java/io/odh/test/e2e/standard/NotebookST.java @@ -14,6 +14,7 @@ import io.odh.test.OdhAnnotationsLabels; import io.odh.test.OdhConstants; import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.requirements.ServiceMeshOperator; import io.odh.test.framework.manager.resources.NotebookResource; import io.odh.test.utils.PodUtils; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; @@ -27,6 +28,12 @@ import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.DatasciencepipelinesBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.KserveBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Modelmeshserving; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.ModelmeshservingBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Ray; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.RayBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Trustyai; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.TrustyaiBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Workbenches; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.WorkbenchesBuilder; import io.opendatahub.dscinitialization.v1.DSCInitialization; @@ -101,10 +108,10 @@ void deployDataScienceCluster() { .withNamespace(OdhConstants.MONITORING_NAMESPACE) .endMonitoring() .withNewServiceMesh() - .withManagementState(ServiceMesh.ManagementState.REMOVED) + .withManagementState(ServiceMesh.ManagementState.MANAGED) .withNewControlPlane() - .withName("data-science-smcp") - .withNamespace("istio-system") + .withName(ServiceMeshOperator.SERVICE_MESH_NAME) + .withNamespace(ServiceMeshOperator.SERVICE_MESH_NAMESPACE) .withMetricsCollection(ControlPlane.MetricsCollection.ISTIO) .endControlPlane() .endServiceMesh() @@ -134,6 +141,15 @@ void deployDataScienceCluster() { .withDatasciencepipelines( new DatasciencepipelinesBuilder().withManagementState(Datasciencepipelines.ManagementState.REMOVED).build() ) + .withModelmeshserving( + new ModelmeshservingBuilder().withManagementState(Modelmeshserving.ManagementState.REMOVED).build() + ) + .withRay( + new RayBuilder().withManagementState(Ray.ManagementState.REMOVED).build() + ) + .withTrustyai( + new TrustyaiBuilder().withManagementState(Trustyai.ManagementState.REMOVED).build() + ) .build()) .endSpec() .build(); diff --git a/src/test/java/io/odh/test/e2e/upgrade/UpgradeAbstract.java b/src/test/java/io/odh/test/e2e/upgrade/UpgradeAbstract.java index 62bb1a47..ebcea9ed 100644 --- a/src/test/java/io/odh/test/e2e/upgrade/UpgradeAbstract.java +++ b/src/test/java/io/odh/test/e2e/upgrade/UpgradeAbstract.java @@ -17,6 +17,7 @@ import io.odh.test.framework.listeners.OdhResourceCleaner; import io.odh.test.framework.listeners.ResourceManagerDeleteHandler; import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.requirements.ServiceMeshOperator; import io.odh.test.framework.manager.resources.NotebookResource; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; import io.opendatahub.datasciencecluster.v1.DataScienceClusterBuilder; @@ -31,6 +32,10 @@ import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.KserveBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Modelmeshserving; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.ModelmeshservingBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Ray; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.RayBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Trustyai; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.TrustyaiBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Workbenches; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.WorkbenchesBuilder; import io.opendatahub.dscinitialization.v1.DSCInitialization; @@ -63,10 +68,10 @@ protected void deployDsc(String name) { .withNamespace(OdhConstants.MONITORING_NAMESPACE) .endMonitoring() .withNewServiceMesh() - .withManagementState(ServiceMesh.ManagementState.REMOVED) + .withManagementState(ServiceMesh.ManagementState.MANAGED) .withNewControlPlane() - .withName("data-science-smcp") - .withNamespace("istio-system") + .withName(ServiceMeshOperator.SERVICE_MESH_NAME) + .withNamespace(ServiceMeshOperator.SERVICE_MESH_NAMESPACE) .withMetricsCollection(ControlPlane.MetricsCollection.ISTIO) .endControlPlane() .endServiceMesh() @@ -97,7 +102,13 @@ protected void deployDsc(String name) { new DatasciencepipelinesBuilder().withManagementState(Datasciencepipelines.ManagementState.MANAGED).build() ) .withModelmeshserving( - new ModelmeshservingBuilder().withManagementState(Modelmeshserving.ManagementState.REMOVED).build() + new ModelmeshservingBuilder().withManagementState(Modelmeshserving.ManagementState.MANAGED).build() + ) + .withRay( + new RayBuilder().withManagementState(Ray.ManagementState.MANAGED).build() + ) + .withTrustyai( + new TrustyaiBuilder().withManagementState(Trustyai.ManagementState.MANAGED).build() ) .build()) .endSpec()