Skip to content

Commit

Permalink
Add Modelmesh, Ray and TrustyAI operands (#79)
Browse files Browse the repository at this point in the history
Signed-off-by: Jakub Stejskal <[email protected]>
  • Loading branch information
Frawless authored Jan 30, 2024
1 parent a00e90a commit 1c00e58
Show file tree
Hide file tree
Showing 13 changed files with 314 additions and 17 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,5 @@ build/

## Suite specific files
**/*.kubeconfig
env
*.env
config.yaml
2 changes: 2 additions & 0 deletions src/main/java/io/odh/test/OdhAnnotationsLabels.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,7 @@ public class OdhAnnotationsLabels {

public static final String ANNO_SERVICE_MESH = ODH_DOMAIN + "service-mesh";
public static final String ANNO_NTB_INJECT_OAUTH = "notebooks." + ODH_DOMAIN + "inject-oauth";
public static final String APP_LABEL_KEY = "app";
public static final String APP_LABEL_VALUE = "odh-e2e";

}
10 changes: 10 additions & 0 deletions src/main/java/io/odh/test/TestConstants.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ public class TestConstants {
public static final String SUBSCRIPTION = "Subscription";
public static final String OPERATOR_GROUP = "OperatorGroup";

public static final String APPROVAL_AUTOMATIC = "Automatic";
public static final String APPROVAL_MANUAL = "Manual";

public static final String LATEST_BUNDLE_DEPLOY_FILE = "install-files/latest.yaml";
public static final String RELEASED_BUNDLE_DEPLOY_FILE = "install-files/released.yaml";

Expand All @@ -23,6 +26,13 @@ public class TestConstants {
public static final long GLOBAL_STABILITY_TIME = Duration.ofMinutes(1).toSeconds();
public static final String LOG_COLLECT_LABEL = "io.odh-e2e.collect-logs";

// OLM Constants
public static final String OPENSHIFT_MARKETPLACE_NS = "openshift-marketplace";
public static final String OPENSHIFT_OPERATORS_NS = "openshift-operators";
public static final String REDHAT_CATALOG = "redhat-operators";
public static final String CHANNEL_STABLE = "stable";
public static final String CHANNEL_LATEST = "latest";

private TestConstants() {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,14 @@ private <T extends HasMetadata> void createResource(boolean waitReady, T... reso
} else {
client.getClient().resource(resource).create();
}

}
} else {
type.create(resource);
if (client.getClient().resource(resource).get() != null) {
type.update(resource);
} else {
type.create(resource);
}

if (waitReady) {
assertTrue(waitResourceCondition(resource, ResourceCondition.readiness(type)),
String.format("Timed out waiting for %s %s/%s to be ready", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName()));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Copyright Skodjob authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.odh.test.framework.manager.requirements;

import io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription;
import io.fabric8.openshift.api.model.operatorhub.v1alpha1.SubscriptionBuilder;
import io.odh.test.OdhAnnotationsLabels;
import io.odh.test.TestConstants;
import io.odh.test.framework.manager.ResourceItem;
import io.odh.test.framework.manager.ResourceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.Collections;

public class PipelinesOperator {
private static final Logger LOGGER = LoggerFactory.getLogger(PipelinesOperator.class);

public static final String SUBSCRIPTION_NAME = "openshift-pipelines-operator";
public static final String OPERATOR_NAME = "openshift-pipelines-operator-rh";

public static void deployOperator() {
Subscription subscription = new SubscriptionBuilder()
.editOrNewMetadata()
.withName(SUBSCRIPTION_NAME)
.withNamespace(TestConstants.OPENSHIFT_OPERATORS_NS)
.withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE))
.endMetadata()
.editOrNewSpec()
.withName(OPERATOR_NAME)
.withChannel(TestConstants.CHANNEL_LATEST)
.withSource(TestConstants.REDHAT_CATALOG)
.withSourceNamespace(TestConstants.OPENSHIFT_MARKETPLACE_NS)
.withInstallPlanApproval(TestConstants.APPROVAL_AUTOMATIC)
.editOrNewConfig()
.endConfig()
.endSpec()
.build();

ResourceManager.getInstance().createResourceWithWait(subscription);
ResourceManager.getInstance().pushToStack(new ResourceItem(() -> deleteOperator(subscription), null));
}

public static void deleteOperator(Subscription subscription) {
ResourceManager.getClient().delete(Arrays.asList(subscription));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
/*
* Copyright Skodjob authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.odh.test.framework.manager.requirements;

import io.fabric8.kubernetes.api.model.Namespace;
import io.fabric8.kubernetes.api.model.NamespaceBuilder;
import io.fabric8.openshift.api.model.operatorhub.v1.OperatorGroupBuilder;
import io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription;
import io.fabric8.openshift.api.model.operatorhub.v1alpha1.SubscriptionBuilder;
import io.odh.test.OdhAnnotationsLabels;
import io.odh.test.TestConstants;
import io.odh.test.framework.manager.ResourceItem;
import io.odh.test.framework.manager.ResourceManager;
import io.odh.test.framework.manager.resources.OperatorGroupResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.Collections;

public class ServerlessOperator {
private static final Logger LOGGER = LoggerFactory.getLogger(ServerlessOperator.class);
public static final String SUBSCRIPTION_NAME = "serverless-operator";
public static final String OPERATOR_NAME = "serverless-operator";
public static final String OPERATOR_NAMESPACE = "openshift-serverless";
public static void deployOperator() {
// Create ns for the operator
Namespace ns = new NamespaceBuilder()
.withNewMetadata()
.withName(OPERATOR_NAMESPACE)
.withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE))
.endMetadata()
.build();
ResourceManager.getInstance().createResourceWithoutWait(ns);
//Create operator group for the operator
if (OperatorGroupResource.operatorGroupClient().inNamespace(OPERATOR_NAMESPACE).list().getItems().isEmpty()) {
OperatorGroupBuilder operatorGroup = new OperatorGroupBuilder()
.editOrNewMetadata()
.withName("odh-group")
.withNamespace(OPERATOR_NAMESPACE)
.withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE))
.endMetadata();

ResourceManager.getInstance().createResourceWithWait(operatorGroup.build());
} else {
LOGGER.info("OperatorGroup is already exists.");
}
// Create subscription
Subscription subscription = new SubscriptionBuilder()
.editOrNewMetadata()
.withName(SUBSCRIPTION_NAME)
.withNamespace(OPERATOR_NAMESPACE)
.withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE))
.endMetadata()
.editOrNewSpec()
.withName(OPERATOR_NAME)
.withChannel(TestConstants.CHANNEL_STABLE)
.withSource(TestConstants.REDHAT_CATALOG)
.withSourceNamespace(TestConstants.OPENSHIFT_MARKETPLACE_NS)
.withInstallPlanApproval(TestConstants.APPROVAL_AUTOMATIC)
.editOrNewConfig()
.endConfig()
.endSpec()
.build();

ResourceManager.getInstance().createResourceWithWait(subscription);
ResourceManager.getInstance().pushToStack(new ResourceItem(() -> deleteOperator(ns), null));
}

public static void deleteOperator(Namespace namespace) {
ResourceManager.getClient().delete(Arrays.asList(namespace));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* Copyright Skodjob authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.odh.test.framework.manager.requirements;

import io.fabric8.openshift.api.model.operatorhub.v1alpha1.Subscription;
import io.fabric8.openshift.api.model.operatorhub.v1alpha1.SubscriptionBuilder;
import io.odh.test.OdhAnnotationsLabels;
import io.odh.test.TestConstants;
import io.odh.test.framework.manager.ResourceItem;
import io.odh.test.framework.manager.ResourceManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.Collections;

public class ServiceMeshOperator {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceMeshOperator.class);
public static final String SUBSCRIPTION_NAME = "servicemeshoperator";
public static final String OPERATOR_NAME = "servicemeshoperator";
public static final String SERVICE_MESH_NAMESPACE = "istio-system";
public static final String SERVICE_MESH_NAME = "data-science-smcp";

public static void deployOperator() {
Subscription subscription = new SubscriptionBuilder()
.editOrNewMetadata()
.withName(SUBSCRIPTION_NAME)
.withNamespace(TestConstants.OPENSHIFT_OPERATORS_NS)
.withLabels(Collections.singletonMap(OdhAnnotationsLabels.APP_LABEL_KEY, OdhAnnotationsLabels.APP_LABEL_VALUE))
.endMetadata()
.editOrNewSpec()
.withName(OPERATOR_NAME)
.withChannel(TestConstants.CHANNEL_STABLE)
.withSource(TestConstants.REDHAT_CATALOG)
.withSourceNamespace(TestConstants.OPENSHIFT_MARKETPLACE_NS)
.withInstallPlanApproval(TestConstants.APPROVAL_AUTOMATIC)
.editOrNewConfig()
.endConfig()
.endSpec()
.build();

ResourceManager.getInstance().createResourceWithWait(subscription);
ResourceManager.getInstance().pushToStack(new ResourceItem(() -> deleteOperator(subscription), null));
}

public static void deleteOperator(Subscription subscription) {
ResourceManager.getClient().delete(Arrays.asList(subscription));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@
import io.odh.test.platform.KubeUtils;
import io.odh.test.utils.PodUtils;
import io.opendatahub.datasciencecluster.v1.DataScienceCluster;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Codeflare;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Datasciencepipelines;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Modelmeshserving;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Ray;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Trustyai;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -55,13 +61,55 @@ public boolean waitForReadiness(DataScienceCluster resource) {
DataScienceCluster dsc = dataScienceCLusterClient().withName(resource.getMetadata().getName()).get();

String dashboardStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "dashboardReady").getStatus();
LOGGER.debug("DataScienceCluster {} dashboard status: {}", resource.getMetadata().getName(), dashboardStatus);
LOGGER.debug("DataScienceCluster {} Dashboard status: {}", resource.getMetadata().getName(), dashboardStatus);
dscReady = dashboardStatus.equals("True");

String workbenchesStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "workbenchesReady").getStatus();
LOGGER.debug("DataScienceCluster {} workbenches status: {}", resource.getMetadata().getName(), workbenchesStatus);
LOGGER.debug("DataScienceCluster {} Workbenches status: {}", resource.getMetadata().getName(), workbenchesStatus);
dscReady = dscReady && workbenchesStatus.equals("True");

// Wait for CodeFlare
if (resource.getSpec().getComponents().getCodeflare().getManagementState().equals(Codeflare.ManagementState.MANAGED)) {
String codeflareStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "codeflareReady").getStatus();
LOGGER.debug("DataScienceCluster {} CodeFlare status: {}", resource.getMetadata().getName(), codeflareStatus);
dscReady = dscReady && codeflareStatus.equals("True");
}

// Wait for ModelMesh
if (resource.getSpec().getComponents().getModelmeshserving().getManagementState().equals(Modelmeshserving.ManagementState.MANAGED)) {
String modemeshStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "model-meshReady").getStatus();
LOGGER.debug("DataScienceCluster {} ModelMesh status: {}", resource.getMetadata().getName(), modemeshStatus);
dscReady = dscReady && modemeshStatus.equals("True");
}

// Wait for Ray
if (resource.getSpec().getComponents().getRay().getManagementState().equals(Ray.ManagementState.MANAGED)) {
String rayStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "rayReady").getStatus();
LOGGER.debug("DataScienceCluster {} Ray status: {}", resource.getMetadata().getName(), rayStatus);
dscReady = dscReady && rayStatus.equals("True");
}

// Wait for TrustyAi
if (resource.getSpec().getComponents().getTrustyai().getManagementState().equals(Trustyai.ManagementState.MANAGED)) {
String trustyAiStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "trustyaiReady").getStatus();
LOGGER.debug("DataScienceCluster {} TrustyAI status: {}", resource.getMetadata().getName(), trustyAiStatus);
dscReady = dscReady && trustyAiStatus.equals("True");
}

// Wait for KServe
if (resource.getSpec().getComponents().getKserve().getManagementState().equals(Kserve.ManagementState.MANAGED)) {
String kserveStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "kserveReady").getStatus();
LOGGER.debug("DataScienceCluster {} KServe status: {}", resource.getMetadata().getName(), kserveStatus);
dscReady = dscReady && kserveStatus.equals("True");
}

// Wait for PipelinesOperator
if (resource.getSpec().getComponents().getDatasciencepipelines().getManagementState().equals(Datasciencepipelines.ManagementState.MANAGED)) {
String pipelinesStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "data-science-pipelines-operatorReady").getStatus();
LOGGER.debug("DataScienceCluster {} DataSciencePipelines status: {}", resource.getMetadata().getName(), pipelinesStatus);
dscReady = dscReady && pipelinesStatus.equals("True");
}

return dscReady;
}, () -> { });

Expand Down
3 changes: 2 additions & 1 deletion src/main/java/io/odh/test/install/OlmInstall.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import io.fabric8.openshift.client.OpenShiftClient;
import io.odh.test.Environment;
import io.odh.test.OdhConstants;
import io.odh.test.TestConstants;
import io.odh.test.framework.manager.ResourceItem;
import io.odh.test.framework.manager.ResourceManager;
import io.odh.test.framework.manager.resources.OperatorGroupResource;
Expand All @@ -37,7 +38,7 @@ public class OlmInstall {
private String operatorVersion = Environment.OLM_OPERATOR_VERSION;
private String csvName = operatorName + "." + operatorVersion;

private String approval = "Automatic";
private String approval = TestConstants.APPROVAL_AUTOMATIC;

public void create() {
createNamespace();
Expand Down
11 changes: 11 additions & 0 deletions src/test/java/io/odh/test/e2e/Abstract.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
import io.odh.test.framework.listeners.TestVisualSeparator;
import io.odh.test.framework.manager.ResourceManager;
import io.odh.test.framework.listeners.TestExceptionCallbackListener;
import io.odh.test.framework.manager.requirements.PipelinesOperator;
import io.odh.test.framework.manager.requirements.ServerlessOperator;
import io.odh.test.framework.manager.requirements.ServiceMeshOperator;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;

Expand All @@ -19,4 +23,11 @@ public abstract class Abstract implements TestVisualSeparator {
static {
ResourceManager.getInstance();
}

@BeforeAll
void setupDependencies() {
PipelinesOperator.deployOperator();
ServiceMeshOperator.deployOperator();
ServerlessOperator.deployOperator();
}
}
Loading

0 comments on commit 1c00e58

Please sign in to comment.