From 82a5240f62436c649148d40b636d770eef9d1011 Mon Sep 17 00:00:00 2001 From: Jakub Stejskal Date: Tue, 5 Dec 2023 16:29:10 +0100 Subject: [PATCH 1/2] Add support for Notebooks to create them in tests Signed-off-by: Jakub Stejskal --- pom.xml | 8 + src/main/java/io/odh/test/Environment.java | 5 + .../io/odh/test/OdhAnnotationsLabels.java | 18 +++ src/main/java/io/odh/test/TestUtils.java | 15 ++ .../framework/manager/ResourceManager.java | 6 +- .../resources/DataScienceClusterResource.java | 49 ++++++ .../manager/resources/NotebookResource.java | 63 ++++++++ .../io/odh/test/install/InstallTypes.java | 10 ++ .../java/io/odh/test/platform/KubeClient.java | 4 + .../io/odh/test/utils/NamespaceUtils.java | 37 +++++ src/main/java/io/odh/test/utils/PodUtils.java | 64 ++++++++ src/main/resources/notebook.yaml | 146 ++++++++++++++++++ src/test/java/io/odh/test/e2e/Abstract.java | 26 ++++ .../io/odh/test/e2e/standard/NotebookST.java | 122 +++++++++++++++ 14 files changed, 571 insertions(+), 2 deletions(-) create mode 100644 src/main/java/io/odh/test/OdhAnnotationsLabels.java create mode 100644 src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java create mode 100644 src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java create mode 100644 src/main/java/io/odh/test/install/InstallTypes.java create mode 100644 src/main/java/io/odh/test/utils/NamespaceUtils.java create mode 100644 src/main/java/io/odh/test/utils/PodUtils.java create mode 100644 src/main/resources/notebook.yaml create mode 100644 src/test/java/io/odh/test/e2e/standard/NotebookST.java diff --git a/pom.xml b/pom.xml index 6b1004f7..4354e587 100644 --- a/pom.xml +++ b/pom.xml @@ -57,6 +57,7 @@ 10.12.5 3.3.1 1.7.1 + 2.15.1 @@ -161,6 +162,13 @@ ${slf4j.version} compile + + + commons-io + commons-io + ${commons.io.version} + compile + diff --git a/src/main/java/io/odh/test/Environment.java b/src/main/java/io/odh/test/Environment.java index 4ff5d115..b8e79794 100644 --- a/src/main/java/io/odh/test/Environment.java +++ b/src/main/java/io/odh/test/Environment.java @@ -4,6 +4,7 @@ */ package io.odh.test; +import io.odh.test.install.InstallTypes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,6 +45,7 @@ public class Environment { private static final String OLM_APP_BUNDLE_PREFIX_ENV = "OLM_APP_BUNDLE_PREFIX"; private static final String OLM_OPERATOR_VERSION_ENV = "OLM_OPERATOR_VERSION"; private static final String OLM_OPERATOR_CHANNEL_ENV = "OLM_OPERATOR_CHANNEL"; + private static final String OPERATOR_INSTALL_TYPE_ENV = "OPERATOR_INSTALL_TYPE"; /** * Defaults @@ -78,6 +80,9 @@ public class Environment { public static final String OLM_APP_BUNDLE_PREFIX = getOrDefault(OLM_APP_BUNDLE_PREFIX_ENV, OLM_APP_BUNDLE_PREFIX_DEFAULT); public static final String OLM_OPERATOR_CHANNEL = getOrDefault(OLM_OPERATOR_CHANNEL_ENV, OLM_OPERATOR_CHANNEL_DEFAULT); public static final String OLM_OPERATOR_VERSION = getOrDefault(OLM_OPERATOR_VERSION_ENV, OLM_OPERATOR_VERSION_DEFAULT); + + public static final String OPERATOR_INSTALL_TYPE = getOrDefault(OPERATOR_INSTALL_TYPE_ENV, InstallTypes.OLM.toString()); + private Environment() { } static { diff --git a/src/main/java/io/odh/test/OdhAnnotationsLabels.java b/src/main/java/io/odh/test/OdhAnnotationsLabels.java new file mode 100644 index 00000000..fdf78e5b --- /dev/null +++ b/src/main/java/io/odh/test/OdhAnnotationsLabels.java @@ -0,0 +1,18 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test; + +public class OdhAnnotationsLabels { + public static final String OPENSHIFT_DOMAIN = "openshift.io/"; + public static final String ODH_DOMAIN = "opendatahub.io/"; + + public static final String LABEL_DASHBOARD = ODH_DOMAIN + "dashboard"; + public static final String LABEL_ODH_MANAGED = ODH_DOMAIN + "odh-managed"; + public static final String LABEL_SIDECAR_ISTIO_INJECT = "sidecar.istio.io/inject"; + + public static final String ANNO_SERVICE_MESH = ODH_DOMAIN + "service-mesh"; + public static final String ANNO_NTB_INJECT_OAUTH = "notebooks." + ODH_DOMAIN + "inject-oauth"; + +} diff --git a/src/main/java/io/odh/test/TestUtils.java b/src/main/java/io/odh/test/TestUtils.java index 28127461..528a441d 100644 --- a/src/main/java/io/odh/test/TestUtils.java +++ b/src/main/java/io/odh/test/TestUtils.java @@ -4,10 +4,14 @@ */ package io.odh.test; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.exc.InvalidFormatException; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import io.odh.test.framework.WaitException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; @@ -174,4 +178,15 @@ public static InputStream getFileFromResourceAsStream(String fileName) { } } + + public static T configFromYaml(String yamlFile, Class c) { + ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); + try { + return mapper.readValue(yamlFile, c); + } catch (InvalidFormatException e) { + throw new IllegalArgumentException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } } diff --git a/src/main/java/io/odh/test/framework/manager/ResourceManager.java b/src/main/java/io/odh/test/framework/manager/ResourceManager.java index 205bed82..4e07c9d3 100644 --- a/src/main/java/io/odh/test/framework/manager/ResourceManager.java +++ b/src/main/java/io/odh/test/framework/manager/ResourceManager.java @@ -7,15 +7,15 @@ import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.admissionregistration.v1.ValidatingWebhookConfiguration; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; -import io.fabric8.kubernetes.api.model.apps.Deployment; import io.fabric8.kubernetes.api.model.rbac.ClusterRole; import io.fabric8.kubernetes.api.model.rbac.ClusterRoleBinding; import io.odh.test.TestConstants; import io.odh.test.TestUtils; +import io.odh.test.framework.manager.resources.DataScienceClusterResource; +import io.odh.test.framework.manager.resources.NotebookResource; import io.odh.test.framework.manager.resources.OperatorGroupResource; import io.odh.test.framework.manager.resources.SubscriptionResource; import io.odh.test.platform.KubeClient; -import io.odh.test.utils.DeploymentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,6 +51,8 @@ public static KubeClient getClient() { private final ResourceType[] resourceTypes = new ResourceType[]{ new SubscriptionResource(), new OperatorGroupResource(), + new DataScienceClusterResource(), + new NotebookResource(), }; public final void switchToTestResourceStack() { diff --git a/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java new file mode 100644 index 00000000..7f3f85ca --- /dev/null +++ b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java @@ -0,0 +1,49 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.framework.manager.resources; + +import io.fabric8.kubernetes.api.model.KubernetesResourceList; +import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.ResourceType; +import io.opendatahub.datasciencecluster.v1.DataScienceCluster; + +public class DataScienceClusterResource implements ResourceType { + @Override + public String getKind() { + return "DataScienceCluster"; + } + + @Override + public DataScienceCluster get(String namespace, String name) { + return dataScienceCLusterClient().inNamespace(namespace).withName(name).get(); + } + + @Override + public void create(DataScienceCluster resource) { + dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).create(); + } + + @Override + public void delete(DataScienceCluster resource) { + dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).withName(resource.getMetadata().getName()).delete(); + } + + @Override + public void update(DataScienceCluster resource) { + dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).update(); + } + + @Override + public boolean waitForReadiness(DataScienceCluster resource) { + return resource != null; + } + + public static MixedOperation, Resource> dataScienceCLusterClient() { + return ResourceManager.getClient().getClient().resources(DataScienceCluster.class); + } + +} diff --git a/src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java b/src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java new file mode 100644 index 00000000..f2afd1ba --- /dev/null +++ b/src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java @@ -0,0 +1,63 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.framework.manager.resources; + +import io.fabric8.kubernetes.api.model.KubernetesResourceList; +import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.odh.test.TestUtils; +import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.ResourceType; +import org.kubeflow.v1.Notebook; + +import java.io.IOException; +import java.io.InputStream; +import org.apache.commons.io.IOUtils; + + +public class NotebookResource implements ResourceType { + + private static final String NOTEBOOK_TEMPLATE_PATH = "notebook.yaml"; + @Override + public String getKind() { + return "Notebook"; + } + + @Override + public Notebook get(String namespace, String name) { + return notebookClient().inNamespace(namespace).withName(name).get(); + } + + @Override + public void create(Notebook resource) { + notebookClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).create(); + } + + @Override + public void delete(Notebook resource) { + notebookClient().inNamespace(resource.getMetadata().getNamespace()).withName(resource.getMetadata().getName()).delete(); + } + + @Override + public void update(Notebook resource) { + notebookClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).update(); + } + + @Override + public boolean waitForReadiness(Notebook resource) { + return resource != null; + } + + public static MixedOperation, Resource> notebookClient() { + return ResourceManager.getClient().getClient().resources(Notebook.class); + } + + public static Notebook loadDefaultNotebook(String namespace, String name) throws IOException { + InputStream is = TestUtils.getFileFromResourceAsStream(NOTEBOOK_TEMPLATE_PATH); + String notebookString = IOUtils.toString(is, "UTF-8"); + notebookString = notebookString.replace("my-project", namespace).replace("my-workbench", name); + return TestUtils.configFromYaml(notebookString, Notebook.class); + } +} diff --git a/src/main/java/io/odh/test/install/InstallTypes.java b/src/main/java/io/odh/test/install/InstallTypes.java new file mode 100644 index 00000000..ad33d18a --- /dev/null +++ b/src/main/java/io/odh/test/install/InstallTypes.java @@ -0,0 +1,10 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.install; + +public enum InstallTypes { + OLM, + BUNDLE +} diff --git a/src/main/java/io/odh/test/platform/KubeClient.java b/src/main/java/io/odh/test/platform/KubeClient.java index 0e12898c..97cde87a 100644 --- a/src/main/java/io/odh/test/platform/KubeClient.java +++ b/src/main/java/io/odh/test/platform/KubeClient.java @@ -185,6 +185,10 @@ public List listPods(String namespaceName) { return client.pods().inNamespace(namespaceName).list().getItems(); } + public List listPods(String namespaceName, LabelSelector selector) { + return client.pods().inNamespace(namespaceName).withLabelSelector(selector).list().getItems(); + } + /** * Returns list of pods by prefix in pod name * diff --git a/src/main/java/io/odh/test/utils/NamespaceUtils.java b/src/main/java/io/odh/test/utils/NamespaceUtils.java new file mode 100644 index 00000000..72398ee7 --- /dev/null +++ b/src/main/java/io/odh/test/utils/NamespaceUtils.java @@ -0,0 +1,37 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.utils; + +import io.odh.test.TestConstants; +import io.odh.test.TestUtils; +import io.odh.test.framework.manager.ResourceManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; + +public class NamespaceUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(NamespaceUtils.class); + private static final long DELETION_TIMEOUT = Duration.ofMinutes(2).toMillis(); + + private NamespaceUtils() { } + + public static void waitForNamespaceReadiness(String name) { + LOGGER.info("Waiting for Namespace: {} readiness", name); + + TestUtils.waitFor("Namespace: " + name, TestConstants.GLOBAL_POLL_INTERVAL_SHORT, DELETION_TIMEOUT, + () -> ResourceManager.getClient().getNamespace(name) != null); + LOGGER.info("Namespace: {} is ready", name); + } + + public static void waitForNamespaceDeletion(String name) { + LOGGER.info("Waiting for Namespace: {} deletion", name); + + TestUtils.waitFor("Namespace: " + name, TestConstants.GLOBAL_POLL_INTERVAL_SHORT, DELETION_TIMEOUT, + () -> ResourceManager.getClient().getNamespace(name) == null); + LOGGER.info("Namespace: {} was deleted", name); + } +} diff --git a/src/main/java/io/odh/test/utils/PodUtils.java b/src/main/java/io/odh/test/utils/PodUtils.java new file mode 100644 index 00000000..57da893f --- /dev/null +++ b/src/main/java/io/odh/test/utils/PodUtils.java @@ -0,0 +1,64 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.utils; + +import io.fabric8.kubernetes.api.model.ContainerStatus; +import io.fabric8.kubernetes.api.model.LabelSelector; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.client.readiness.Readiness; +import io.odh.test.TestConstants; +import io.odh.test.TestUtils; +import io.odh.test.framework.manager.ResourceManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.List; + +public class PodUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(PodUtils.class); + private static final long DELETION_TIMEOUT = Duration.ofMinutes(5).toMillis(); + private static final long READINESS_TIMEOUT = Duration.ofMinutes(10).toMillis(); + + private PodUtils() { } + + public static void waitForPodsReady(String namespaceName, LabelSelector selector, int expectPods, boolean containers, Runnable onTimeout) { + TestUtils.waitFor("readiness of all Pods matching: " + selector, + TestConstants.GLOBAL_POLL_INTERVAL_MEDIUM, READINESS_TIMEOUT, + () -> { + List pods = ResourceManager.getClient().listPods(namespaceName, selector); + if (pods.isEmpty() && expectPods == 0) { + LOGGER.debug("Expected Pods are ready"); + return true; + } + if (pods.isEmpty()) { + LOGGER.debug("Pods matching: {}/{} are not ready", namespaceName, selector); + return false; + } + if (pods.size() != expectPods) { + LOGGER.debug("Expected Pods: {}/{} are not ready", namespaceName, selector); + return false; + } + for (Pod pod : pods) { + if (!Readiness.isPodReady(pod)) { + LOGGER.debug("Pod not ready: {}/{}", namespaceName, pod.getMetadata().getName()); + return false; + } else { + if (containers) { + for (ContainerStatus cs : pod.getStatus().getContainerStatuses()) { + if (!Boolean.TRUE.equals(cs.getReady())) { + LOGGER.debug("Container: {} of Pod: {}/{} not ready", namespaceName, pod.getMetadata().getName(), cs.getName()); + return false; + } + } + } + } + } + LOGGER.info("Pods matching: {}/{} are ready", namespaceName, selector); + return true; + }, onTimeout); + } +} diff --git a/src/main/resources/notebook.yaml b/src/main/resources/notebook.yaml new file mode 100644 index 00000000..15d91261 --- /dev/null +++ b/src/main/resources/notebook.yaml @@ -0,0 +1,146 @@ +apiVersion: kubeflow.org/v1 +kind: Notebook +metadata: + annotations: + notebooks.opendatahub.io/inject-oauth: 'true' + opendatahub.io/service-mesh: 'false' + opendatahub.io/accelerator-name: '' + labels: + app: my-workbench + opendatahub.io/dashboard: 'true' + opendatahub.io/odh-managed: 'true' + sidecar.istio.io/inject: 'false' + name: my-workbench + namespace: my-project +spec: + template: + spec: + affinity: {} + containers: + - env: + - name: NOTEBOOK_ARGS + value: |- + --ServerApp.port=8888 + --ServerApp.token='' + --ServerApp.password='' + --ServerApp.base_url=/notebook/my-project/my-workbench + --ServerApp.quit_button=False + --ServerApp.tornado_settings={"user":"","hub_host":"odh_dashboard_route","hub_prefix":"/projects/my-project"} + - name: JUPYTER_IMAGE + value: image-registry.openshift-image-registry.svc:5000/opendatahub/jupyter-pytorch-notebook:2023.2 + image: image-registry.openshift-image-registry.svc:5000/opendatahub/jupyter-pytorch-notebook:2023.2 + imagePullPolicy: Always + livenessProbe: + failureThreshold: 3 + httpGet: + path: /notebook/my-project/my-workbench/api + port: notebook-port + scheme: HTTP + initialDelaySeconds: 10 + periodSeconds: 5 + successThreshold: 1 + timeoutSeconds: 1 + name: my-workbench + ports: + - containerPort: 8888 + name: notebook-port + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: /notebook/my-project/my-workbench/api + port: notebook-port + scheme: HTTP + initialDelaySeconds: 10 + periodSeconds: 5 + successThreshold: 1 + timeoutSeconds: 1 + resources: + limits: + cpu: "2" + memory: 4Gi + requests: + cpu: "1" + memory: 1Gi + volumeMounts: + - mountPath: /opt/app-root/src + name: my-workbench + - mountPath: /dev/shm + name: shm + workingDir: /opt/app-root/src + - args: + - --provider=openshift + - --https-address=:8443 + - --http-address= + - --openshift-service-account=my-workbench + - --cookie-secret-file=/etc/oauth/config/cookie_secret + - --cookie-expire=24h0m0s + - --tls-cert=/etc/tls/private/tls.crt + - --tls-key=/etc/tls/private/tls.key + - --upstream=http://localhost:8888 + - --upstream-ca=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt + - --email-domain=* + - --skip-provider-button + - --openshift-sar={"verb":"get","resource":"notebooks","resourceAPIGroup":"kubeflow.org","resourceName":"my-workbench","namespace":"$(NAMESPACE)"} + - --logout-url=odh_dashboard_route/projects/my-project?notebookLogout=my-workbench + env: + - name: NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + image: registry.redhat.io/openshift4/ose-oauth-proxy:v4.10 + imagePullPolicy: Always + livenessProbe: + failureThreshold: 3 + httpGet: + path: /oauth/healthz + port: oauth-proxy + scheme: HTTPS + initialDelaySeconds: 30 + periodSeconds: 5 + successThreshold: 1 + timeoutSeconds: 1 + name: oauth-proxy + ports: + - containerPort: 8443 + name: oauth-proxy + protocol: TCP + readinessProbe: + failureThreshold: 3 + httpGet: + path: /oauth/healthz + port: oauth-proxy + scheme: HTTPS + initialDelaySeconds: 5 + periodSeconds: 5 + successThreshold: 1 + timeoutSeconds: 1 + resources: + limits: + cpu: 100m + memory: 64Mi + requests: + cpu: 100m + memory: 64Mi + volumeMounts: + - mountPath: /etc/oauth/config + name: oauth-config + - mountPath: /etc/tls/private + name: tls-certificates + enableServiceLinks: false + serviceAccountName: my-workbench + volumes: + - name: my-workbench + persistentVolumeClaim: + claimName: my-workbench + - emptyDir: + medium: Memory + name: shm + - name: oauth-config + secret: + defaultMode: 420 + secretName: my-workbench-oauth-config + - name: tls-certificates + secret: + defaultMode: 420 + secretName: my-workbench-tls diff --git a/src/test/java/io/odh/test/e2e/Abstract.java b/src/test/java/io/odh/test/e2e/Abstract.java index 7a5078ec..691ec6cf 100644 --- a/src/test/java/io/odh/test/e2e/Abstract.java +++ b/src/test/java/io/odh/test/e2e/Abstract.java @@ -6,13 +6,21 @@ import io.odh.test.framework.ExtensionContextParameterResolver; import io.odh.test.framework.TestCallbackListener; +import io.odh.test.Environment; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.TestExceptionCallbackListener; +import io.odh.test.install.InstallTypes; +import io.odh.test.install.OlmInstall; +import io.odh.test.framework.TestSeparator; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.ExtendWith; +import static org.junit.jupiter.api.Assertions.assertTrue; + @DisplayNameGeneration(DisplayNameGenerator.IndicativeSentences.class) @ExtendWith(TestExceptionCallbackListener.class) @ExtendWith(TestCallbackListener.class) @@ -24,4 +32,22 @@ public class Abstract { ResourceManager.getInstance(); } + @BeforeAll + void setupEnvironment() { + if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.OLM.toString())) { + OlmInstall olmInstall = new OlmInstall(); + olmInstall.create(); + } else if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.BUNDLE.toString())) { + LOGGER.error("Bundle install is not implemented yet!"); + assertTrue(false); + } else { + LOGGER.error("Unknown install type {}! You should implement it at first!", Environment.OPERATOR_INSTALL_TYPE); + assertTrue(false); + } + } + + @AfterAll + void teardownEnvironment() { + ResourceManager.getInstance().deleteResources(); + } } diff --git a/src/test/java/io/odh/test/e2e/standard/NotebookST.java b/src/test/java/io/odh/test/e2e/standard/NotebookST.java new file mode 100644 index 00000000..e65f0059 --- /dev/null +++ b/src/test/java/io/odh/test/e2e/standard/NotebookST.java @@ -0,0 +1,122 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.e2e.standard; + +import io.fabric8.kubernetes.api.model.LabelSelector; +import io.fabric8.kubernetes.api.model.LabelSelectorBuilder; +import io.fabric8.kubernetes.api.model.Namespace; +import io.fabric8.kubernetes.api.model.NamespaceBuilder; +import io.fabric8.kubernetes.api.model.PersistentVolumeClaim; +import io.fabric8.kubernetes.api.model.PersistentVolumeClaimBuilder; +import io.fabric8.kubernetes.api.model.Quantity; +import io.odh.test.OdhAnnotationsLabels; +import io.odh.test.e2e.Abstract; +import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.framework.manager.resources.NotebookResource; +import io.odh.test.utils.PodUtils; +import io.opendatahub.datasciencecluster.v1.DataScienceCluster; +import io.opendatahub.datasciencecluster.v1.DataScienceClusterBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.ComponentsBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Codeflare; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.CodeflareBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Dashboard; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.DashboardBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Datasciencepipelines; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.DatasciencepipelinesBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.KserveBuilder; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Workbenches; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.WorkbenchesBuilder; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.kubeflow.v1.Notebook; +import org.kubeflow.v1.NotebookBuilder; + +import java.io.IOException; + +public class NotebookST extends Abstract { + + private static final String DS_PROJECT_NAME = "test-notebooks"; + private static final String DS_PROJECT_NAMESPACE = "test-notebooks"; + + private static final String NTB_NAME = "test-odh-notebook"; + private static final String NTB_NAMESPACE = "test-odh-notebook"; + @Test + void testCreateSimpleNotebook() throws IOException { + // Create namespace + Namespace ns = new NamespaceBuilder() + .withNewMetadata() + .withName(NTB_NAMESPACE) + .addToLabels(OdhAnnotationsLabels.LABEL_DASHBOARD, "true") + .addToAnnotations(OdhAnnotationsLabels.ANNO_SERVICE_MESH, "false") + .endMetadata() + .build(); + ResourceManager.getInstance().createResourceWithoutWait(ns); + + PersistentVolumeClaim pvc = new PersistentVolumeClaimBuilder() + .withNewMetadata() + .withName(NTB_NAME) + .withNamespace(NTB_NAMESPACE) + .addToLabels(OdhAnnotationsLabels.LABEL_DASHBOARD, "true") + .endMetadata() + .withNewSpec() + .addToAccessModes("ReadWriteOnce") + .withNewResources() + .addToRequests("storage", new Quantity("10Gi")) + .endResources() + .withVolumeMode("Filesystem") + .endSpec() + .build(); + ResourceManager.getInstance().createResourceWithoutWait(pvc); + + Notebook notebook = new NotebookBuilder(NotebookResource.loadDefaultNotebook(NTB_NAMESPACE, NTB_NAME)).build(); + ResourceManager.getInstance().createResourceWithoutWait(notebook); + + LabelSelector lblSelector = new LabelSelectorBuilder() + .withMatchLabels(ResourceManager.getClient().listPods(NTB_NAMESPACE).get(0).getMetadata().getLabels()) + .build(); + + PodUtils.waitForPodsReady(NTB_NAMESPACE, lblSelector, 1, true, () -> { }); + + } + + @BeforeAll + void deployDataScienceCluster() { + // Create namespace + Namespace ns = new NamespaceBuilder().withNewMetadata().withName(DS_PROJECT_NAMESPACE).endMetadata().build(); + ResourceManager.getInstance().createResourceWithoutWait(ns); + + // Create DSC + DataScienceCluster dsc = new DataScienceClusterBuilder() + .withNewMetadata() + .withName(DS_PROJECT_NAME) + .withNamespace(DS_PROJECT_NAMESPACE) + .endMetadata() + .withNewSpec() + .withComponents( + new ComponentsBuilder() + .withWorkbenches( + new WorkbenchesBuilder().withManagementState(Workbenches.ManagementState.MANAGED).build() + ) + .withDashboard( + new DashboardBuilder().withManagementState(Dashboard.ManagementState.MANAGED).build() + ) + .withKserve( + new KserveBuilder().withManagementState(Kserve.ManagementState.REMOVED).build() + ) + .withCodeflare( + new CodeflareBuilder().withManagementState(Codeflare.ManagementState.REMOVED).build() + ) + .withDatasciencepipelines( + new DatasciencepipelinesBuilder().withManagementState(Datasciencepipelines.ManagementState.REMOVED).build() + ) + .build()) + .endSpec() + .build(); + // Deploy DSC + ResourceManager.getInstance().createResourceWithWait(dsc); + // TODO - tady musi byt wait na DSC pody + } +} From 28015104ae441b044ac74c272283a8656fbfcaaa Mon Sep 17 00:00:00 2001 From: Jakub Stejskal Date: Wed, 6 Dec 2023 13:26:53 +0100 Subject: [PATCH 2/2] Add some fixes, proper cleaning, etc Signed-off-by: Jakub Stejskal --- src/main/java/io/odh/test/Environment.java | 2 +- .../test/framework/TestCallbackListener.java | 2 + .../framework/manager/ResourceManager.java | 42 +++++++++++++----- .../resources/DataScienceClusterResource.java | 33 +++++++++++--- .../io/odh/test/install/BundleInstall.java | 5 ++- .../java/io/odh/test/install/OlmInstall.java | 2 + .../java/io/odh/test/platform/KubeUtils.java | 18 ++++++++ src/test/java/io/odh/test/e2e/Abstract.java | 27 ------------ .../e2e/standard/DataScienceClusterIT.java | 3 +- .../io/odh/test/e2e/standard/NotebookST.java | 8 ++-- .../io/odh/test/e2e/standard/OdhInstall.java | 38 ---------------- .../test/e2e/standard/StandardAbstract.java | 44 +++++++++++++++++++ 12 files changed, 134 insertions(+), 90 deletions(-) delete mode 100644 src/test/java/io/odh/test/e2e/standard/OdhInstall.java create mode 100644 src/test/java/io/odh/test/e2e/standard/StandardAbstract.java diff --git a/src/main/java/io/odh/test/Environment.java b/src/main/java/io/odh/test/Environment.java index b8e79794..e47f0856 100644 --- a/src/main/java/io/odh/test/Environment.java +++ b/src/main/java/io/odh/test/Environment.java @@ -81,7 +81,7 @@ public class Environment { public static final String OLM_OPERATOR_CHANNEL = getOrDefault(OLM_OPERATOR_CHANNEL_ENV, OLM_OPERATOR_CHANNEL_DEFAULT); public static final String OLM_OPERATOR_VERSION = getOrDefault(OLM_OPERATOR_VERSION_ENV, OLM_OPERATOR_VERSION_DEFAULT); - public static final String OPERATOR_INSTALL_TYPE = getOrDefault(OPERATOR_INSTALL_TYPE_ENV, InstallTypes.OLM.toString()); + public static final String OPERATOR_INSTALL_TYPE = getOrDefault(OPERATOR_INSTALL_TYPE_ENV, InstallTypes.BUNDLE.toString()); private Environment() { } diff --git a/src/main/java/io/odh/test/framework/TestCallbackListener.java b/src/main/java/io/odh/test/framework/TestCallbackListener.java index 4efddb4b..ed04bcea 100644 --- a/src/main/java/io/odh/test/framework/TestCallbackListener.java +++ b/src/main/java/io/odh/test/framework/TestCallbackListener.java @@ -5,6 +5,7 @@ package io.odh.test.framework; import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.platform.KubeUtils; import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.AfterEachCallback; import org.junit.jupiter.api.extension.BeforeAllCallback; @@ -41,6 +42,7 @@ public void beforeEach(ExtensionContext extensionContext) throws Exception { public void afterAll(ExtensionContext extensionContext) throws Exception { ResourceManager.getInstance().switchToClassResourceStack(); ResourceManager.getInstance().deleteResources(); + KubeUtils.clearOdhCRDs(); } @Override diff --git a/src/main/java/io/odh/test/framework/manager/ResourceManager.java b/src/main/java/io/odh/test/framework/manager/ResourceManager.java index 4e07c9d3..c2040eb6 100644 --- a/src/main/java/io/odh/test/framework/manager/ResourceManager.java +++ b/src/main/java/io/odh/test/framework/manager/ResourceManager.java @@ -7,6 +7,7 @@ import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.admissionregistration.v1.ValidatingWebhookConfiguration; import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition; +import io.fabric8.kubernetes.api.model.apps.Deployment; import io.fabric8.kubernetes.api.model.rbac.ClusterRole; import io.fabric8.kubernetes.api.model.rbac.ClusterRoleBinding; import io.odh.test.TestConstants; @@ -16,6 +17,10 @@ import io.odh.test.framework.manager.resources.OperatorGroupResource; import io.odh.test.framework.manager.resources.SubscriptionResource; import io.odh.test.platform.KubeClient; +import io.odh.test.platform.cmdClient.KubeCmdClient; +import io.odh.test.platform.cmdClient.Oc; +import io.odh.test.utils.DeploymentUtils; +import io.opendatahub.datasciencecluster.v1.DataScienceCluster; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,6 +35,7 @@ public class ResourceManager { private static ResourceManager instance; private static KubeClient client; + private static KubeCmdClient kubeCmdClient; static final Stack CLASS_RESOURCE_STACK = new Stack<>(); static final Stack METHOD_RESOURCE_STACK = new Stack<>(); @@ -40,6 +46,7 @@ public static synchronized ResourceManager getInstance() { if (instance == null) { instance = new ResourceManager(); client = new KubeClient(TestConstants.DEFAULT_NAMESPACE); + kubeCmdClient = new Oc(); } return instance; } @@ -48,6 +55,10 @@ public static KubeClient getClient() { return client; } + public static KubeCmdClient getKubeCmdClient() { + return kubeCmdClient; + } + private final ResourceType[] resourceTypes = new ResourceType[]{ new SubscriptionResource(), new OperatorGroupResource(), @@ -82,6 +93,14 @@ private void createResource(boolean waitReady, T... reso for (T resource : resources) { ResourceType type = findResourceType(resource); + synchronized (this) { + resourceStackPointer.push( + new ResourceItem( + () -> deleteResource(resource), + resource + )); + } + if (resource.getMetadata().getNamespace() == null) { LOGGER.info("Creating/Updating {} {}", resource.getKind(), resource.getMetadata().getName()); @@ -93,12 +112,21 @@ private void createResource(boolean waitReady, T... reso if (type == null) { if (resource instanceof Deployment) { Deployment deployment = (Deployment) resource; - client.getClient().apps().deployments().resource(deployment).create(); + if (client.getClient().apps().deployments().resource(deployment).get() != null) { + client.getClient().apps().deployments().resource(deployment).update(); + } else { + client.getClient().apps().deployments().resource(deployment).create(); + } if (waitReady) { DeploymentUtils.waitForDeploymentReady(resource.getMetadata().getNamespace(), resource.getMetadata().getName()); } } else { - client.getClient().resource(resource).create(); + if (client.getClient().resource(resource).get() != null) { + client.getClient().resource(resource).update(); + } else { + client.getClient().resource(resource).create(); + } + } } else { type.create(resource); @@ -107,14 +135,6 @@ private void createResource(boolean waitReady, T... reso String.format("Timed out waiting for %s %s/%s to be ready", resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName())); } } - - synchronized (this) { - resourceStackPointer.push( - new ResourceItem( - () -> deleteResource(resource), - resource - )); - } } } @@ -174,7 +194,7 @@ public final boolean waitResourceCondition(T resource, R assertNotNull(resource.getMetadata().getName()); // cluster role binding and custom resource definition does not need namespace... - if (!(resource instanceof ClusterRoleBinding || resource instanceof CustomResourceDefinition || resource instanceof ClusterRole || resource instanceof ValidatingWebhookConfiguration)) { + if (!(resource instanceof ClusterRoleBinding || resource instanceof CustomResourceDefinition || resource instanceof ClusterRole || resource instanceof ValidatingWebhookConfiguration || resource instanceof DataScienceCluster)) { assertNotNull(resource.getMetadata().getNamespace()); } diff --git a/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java index 7f3f85ca..dc7a6c14 100644 --- a/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java +++ b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java @@ -7,11 +7,18 @@ import io.fabric8.kubernetes.api.model.KubernetesResourceList; import io.fabric8.kubernetes.client.dsl.MixedOperation; import io.fabric8.kubernetes.client.dsl.Resource; +import io.odh.test.TestConstants; +import io.odh.test.TestUtils; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.manager.ResourceType; +import io.odh.test.platform.KubeUtils; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class DataScienceClusterResource implements ResourceType { + + private static final Logger LOGGER = LoggerFactory.getLogger(DataScienceClusterResource.class); @Override public String getKind() { return "DataScienceCluster"; @@ -19,27 +26,43 @@ public String getKind() { @Override public DataScienceCluster get(String namespace, String name) { - return dataScienceCLusterClient().inNamespace(namespace).withName(name).get(); + return dataScienceCLusterClient().withName(name).get(); } @Override public void create(DataScienceCluster resource) { - dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).create(); + dataScienceCLusterClient().resource(resource).create(); } @Override public void delete(DataScienceCluster resource) { - dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).withName(resource.getMetadata().getName()).delete(); + dataScienceCLusterClient().withName(resource.getMetadata().getName()).delete(); } @Override public void update(DataScienceCluster resource) { - dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).update(); + dataScienceCLusterClient().resource(resource).update(); } @Override public boolean waitForReadiness(DataScienceCluster resource) { - return resource != null; + String message = String.format("DataScienceCluster %s readiness", resource.getMetadata().getName()); + TestUtils.waitFor(message, TestConstants.GLOBAL_POLL_INTERVAL_SHORT, TestConstants.GLOBAL_TIMEOUT, () -> { + boolean dscReady; + + DataScienceCluster dsc = dataScienceCLusterClient().withName(resource.getMetadata().getName()).get(); + + String dashboardStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "dashboardReady").getStatus(); + LOGGER.debug("DataScienceCluster {} dashboard status: {}", resource.getMetadata().getName(), dashboardStatus); + dscReady = dashboardStatus.equals("True"); + + String workbenchesStatus = KubeUtils.getDscConditionByType(dsc.getStatus().getConditions(), "workbenchesReady").getStatus(); + LOGGER.debug("DataScienceCluster {} workbenches status: {}", resource.getMetadata().getName(), workbenchesStatus); + dscReady = dscReady && workbenchesStatus.equals("True"); + + return dscReady; + }, () -> { }); + return true; } public static MixedOperation, Resource> dataScienceCLusterClient() { diff --git a/src/main/java/io/odh/test/install/BundleInstall.java b/src/main/java/io/odh/test/install/BundleInstall.java index 52b0e521..34633346 100644 --- a/src/main/java/io/odh/test/install/BundleInstall.java +++ b/src/main/java/io/odh/test/install/BundleInstall.java @@ -10,7 +10,9 @@ import io.odh.test.Environment; import io.odh.test.TestConstants; import io.odh.test.TestUtils; +import io.odh.test.framework.manager.ResourceItem; import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.platform.KubeUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,7 +57,8 @@ public void printResources() { }); } - public void installBundle() { + public void create() { ResourceManager.getInstance().createResourceWithWait(resources.toArray(new HasMetadata[0])); + ResourceManager.getInstance().pushToStack(new ResourceItem(KubeUtils::deleteDefaultDSCI, null)); } } diff --git a/src/main/java/io/odh/test/install/OlmInstall.java b/src/main/java/io/odh/test/install/OlmInstall.java index 04290348..51baa53a 100644 --- a/src/main/java/io/odh/test/install/OlmInstall.java +++ b/src/main/java/io/odh/test/install/OlmInstall.java @@ -12,6 +12,7 @@ import io.odh.test.framework.manager.ResourceItem; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.manager.resources.OperatorGroupResource; +import io.odh.test.platform.KubeUtils; import io.odh.test.utils.DeploymentUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,6 +69,7 @@ private void createAndModifySubscription() { Subscription subscription = prepareSubscription(); ResourceManager.getInstance().createResourceWithWait(subscription); + ResourceManager.getInstance().pushToStack(new ResourceItem(KubeUtils::deleteDefaultDSCI, null)); } public void updateSubscription() { Subscription subscription = prepareSubscription(); diff --git a/src/main/java/io/odh/test/platform/KubeUtils.java b/src/main/java/io/odh/test/platform/KubeUtils.java index 73f470e6..32345795 100644 --- a/src/main/java/io/odh/test/platform/KubeUtils.java +++ b/src/main/java/io/odh/test/platform/KubeUtils.java @@ -4,6 +4,7 @@ */ package io.odh.test.platform; +import io.odh.test.framework.manager.ResourceManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,6 +22,23 @@ public static org.kubeflow.v1.notebookstatus.Conditions getNotebookConditionByTy return conditions.stream().filter(c -> c.getType().equals(type)).findFirst().orElseGet(null); } + public static void clearOdhCRDs() { + ResourceManager.getClient().getClient().apiextensions().v1().customResourceDefinitions().list().getItems() + .stream().filter(crd -> crd.getMetadata().getName().contains("opendatahub.io")).toList() + .forEach(crd -> { + LOGGER.info("Deleting CRD {}", crd.getMetadata().getName()); + ResourceManager.getClient().getClient().resource(crd).delete(); + }); + } + + /** + * TODO - this should be removed when https://github.com/opendatahub-io/opendatahub-operator/issues/765 will be resolved + */ + public static void deleteDefaultDSCI() { + LOGGER.info("Clearing DSCI ..."); + ResourceManager.getKubeCmdClient().exec("delete", "dsci", "--all"); + } + private KubeUtils() { } } diff --git a/src/test/java/io/odh/test/e2e/Abstract.java b/src/test/java/io/odh/test/e2e/Abstract.java index 691ec6cf..6a69f105 100644 --- a/src/test/java/io/odh/test/e2e/Abstract.java +++ b/src/test/java/io/odh/test/e2e/Abstract.java @@ -6,21 +6,13 @@ import io.odh.test.framework.ExtensionContextParameterResolver; import io.odh.test.framework.TestCallbackListener; -import io.odh.test.Environment; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.TestExceptionCallbackListener; -import io.odh.test.install.InstallTypes; -import io.odh.test.install.OlmInstall; -import io.odh.test.framework.TestSeparator; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.ExtendWith; -import static org.junit.jupiter.api.Assertions.assertTrue; - @DisplayNameGeneration(DisplayNameGenerator.IndicativeSentences.class) @ExtendWith(TestExceptionCallbackListener.class) @ExtendWith(TestCallbackListener.class) @@ -31,23 +23,4 @@ public class Abstract { static { ResourceManager.getInstance(); } - - @BeforeAll - void setupEnvironment() { - if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.OLM.toString())) { - OlmInstall olmInstall = new OlmInstall(); - olmInstall.create(); - } else if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.BUNDLE.toString())) { - LOGGER.error("Bundle install is not implemented yet!"); - assertTrue(false); - } else { - LOGGER.error("Unknown install type {}! You should implement it at first!", Environment.OPERATOR_INSTALL_TYPE); - assertTrue(false); - } - } - - @AfterAll - void teardownEnvironment() { - ResourceManager.getInstance().deleteResources(); - } } diff --git a/src/test/java/io/odh/test/e2e/standard/DataScienceClusterIT.java b/src/test/java/io/odh/test/e2e/standard/DataScienceClusterIT.java index 46848745..f234ebd3 100644 --- a/src/test/java/io/odh/test/e2e/standard/DataScienceClusterIT.java +++ b/src/test/java/io/odh/test/e2e/standard/DataScienceClusterIT.java @@ -8,7 +8,6 @@ import io.fabric8.kubernetes.api.model.NamespaceBuilder; import io.fabric8.kubernetes.client.dsl.MixedOperation; import io.fabric8.kubernetes.client.dsl.Resource; -import io.odh.test.e2e.Abstract; import io.odh.test.framework.manager.ResourceManager; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; import io.opendatahub.datasciencecluster.v1.DataScienceClusterBuilder; @@ -31,7 +30,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; @Tag("standard") -public class DataScienceClusterIT extends Abstract { +public class DataScienceClusterIT extends StandardAbstract { private static final String DS_PROJECT_NAME = "test-dsp"; private static final String DS_PROJECT_NAMESPACE = "test-ns-ds"; diff --git a/src/test/java/io/odh/test/e2e/standard/NotebookST.java b/src/test/java/io/odh/test/e2e/standard/NotebookST.java index e65f0059..ae1267dd 100644 --- a/src/test/java/io/odh/test/e2e/standard/NotebookST.java +++ b/src/test/java/io/odh/test/e2e/standard/NotebookST.java @@ -12,7 +12,6 @@ import io.fabric8.kubernetes.api.model.PersistentVolumeClaimBuilder; import io.fabric8.kubernetes.api.model.Quantity; import io.odh.test.OdhAnnotationsLabels; -import io.odh.test.e2e.Abstract; import io.odh.test.framework.manager.ResourceManager; import io.odh.test.framework.manager.resources.NotebookResource; import io.odh.test.utils.PodUtils; @@ -35,8 +34,9 @@ import org.kubeflow.v1.NotebookBuilder; import java.io.IOException; +import java.util.Map; -public class NotebookST extends Abstract { +public class NotebookST extends StandardAbstract { private static final String DS_PROJECT_NAME = "test-notebooks"; private static final String DS_PROJECT_NAMESPACE = "test-notebooks"; @@ -75,7 +75,7 @@ void testCreateSimpleNotebook() throws IOException { ResourceManager.getInstance().createResourceWithoutWait(notebook); LabelSelector lblSelector = new LabelSelectorBuilder() - .withMatchLabels(ResourceManager.getClient().listPods(NTB_NAMESPACE).get(0).getMetadata().getLabels()) + .withMatchLabels(Map.of("app", NTB_NAME)) .build(); PodUtils.waitForPodsReady(NTB_NAMESPACE, lblSelector, 1, true, () -> { }); @@ -92,7 +92,6 @@ void deployDataScienceCluster() { DataScienceCluster dsc = new DataScienceClusterBuilder() .withNewMetadata() .withName(DS_PROJECT_NAME) - .withNamespace(DS_PROJECT_NAMESPACE) .endMetadata() .withNewSpec() .withComponents( @@ -117,6 +116,5 @@ void deployDataScienceCluster() { .build(); // Deploy DSC ResourceManager.getInstance().createResourceWithWait(dsc); - // TODO - tady musi byt wait na DSC pody } } diff --git a/src/test/java/io/odh/test/e2e/standard/OdhInstall.java b/src/test/java/io/odh/test/e2e/standard/OdhInstall.java deleted file mode 100644 index 0e406177..00000000 --- a/src/test/java/io/odh/test/e2e/standard/OdhInstall.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Skodjob authors. - * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). - */ -package io.odh.test.e2e.standard; - -import io.fabric8.kubernetes.api.model.apps.Deployment; -import io.odh.test.e2e.Abstract; -import io.odh.test.framework.manager.ResourceManager; -import io.odh.test.install.BundleInstall; -import io.odh.test.install.OlmInstall; -import org.junit.jupiter.api.Test; - -import java.io.IOException; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -public class OdhInstall extends Abstract { - - @Test - void testInstallOdhOlm() { - OlmInstall olmInstall = new OlmInstall(); - olmInstall.create(); - - Deployment dep = ResourceManager.getClient().getDeployment(olmInstall.getNamespace(), olmInstall.getDeploymentName()); - assertNotNull(dep); - } - - @Test - void testInstallOdhBundle() throws IOException { - BundleInstall bundle = new BundleInstall(); - bundle.printResources(); - bundle.installBundle(); - - Deployment dep = ResourceManager.getClient().getDeployment(bundle.getNamespace(), bundle.getDeploymentName()); - assertNotNull(dep); - } -} diff --git a/src/test/java/io/odh/test/e2e/standard/StandardAbstract.java b/src/test/java/io/odh/test/e2e/standard/StandardAbstract.java new file mode 100644 index 00000000..418e1ed2 --- /dev/null +++ b/src/test/java/io/odh/test/e2e/standard/StandardAbstract.java @@ -0,0 +1,44 @@ +/* + * Copyright Skodjob authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.e2e.standard; + +import io.odh.test.Environment; +import io.odh.test.e2e.Abstract; +import io.odh.test.framework.manager.ResourceManager; +import io.odh.test.install.BundleInstall; +import io.odh.test.install.InstallTypes; +import io.odh.test.install.OlmInstall; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +import static org.junit.jupiter.api.Assertions.fail; + +public class StandardAbstract extends Abstract { + + private static final Logger LOGGER = LoggerFactory.getLogger(Abstract.class); + + @BeforeAll + void setupEnvironment() throws IOException { + if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.OLM.toString())) { + OlmInstall olmInstall = new OlmInstall(); + olmInstall.create(); + } else if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.BUNDLE.toString())) { + BundleInstall bundleInstall = new BundleInstall(); + bundleInstall.create(); + } else { + LOGGER.error("Unknown install type {}! You should implement it at first!", Environment.OPERATOR_INSTALL_TYPE); + fail(String.format("Unknown install type %s! You should implement it at first!", Environment.OPERATOR_INSTALL_TYPE)); + } + } + + @AfterAll + void teardownEnvironment() { + ResourceManager.getInstance().deleteResources(); + } +}