diff --git a/pom.xml b/pom.xml
index 6b1004f7..4354e587 100644
--- a/pom.xml
+++ b/pom.xml
@@ -57,6 +57,7 @@
10.12.5
3.3.1
1.7.1
+ 2.15.1
@@ -161,6 +162,13 @@
${slf4j.version}
compile
+
+
+ commons-io
+ commons-io
+ ${commons.io.version}
+ compile
+
diff --git a/src/main/java/io/odh/test/Environment.java b/src/main/java/io/odh/test/Environment.java
index 4ff5d115..b8e79794 100644
--- a/src/main/java/io/odh/test/Environment.java
+++ b/src/main/java/io/odh/test/Environment.java
@@ -4,6 +4,7 @@
*/
package io.odh.test;
+import io.odh.test.install.InstallTypes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -44,6 +45,7 @@ public class Environment {
private static final String OLM_APP_BUNDLE_PREFIX_ENV = "OLM_APP_BUNDLE_PREFIX";
private static final String OLM_OPERATOR_VERSION_ENV = "OLM_OPERATOR_VERSION";
private static final String OLM_OPERATOR_CHANNEL_ENV = "OLM_OPERATOR_CHANNEL";
+ private static final String OPERATOR_INSTALL_TYPE_ENV = "OPERATOR_INSTALL_TYPE";
/**
* Defaults
@@ -78,6 +80,9 @@ public class Environment {
public static final String OLM_APP_BUNDLE_PREFIX = getOrDefault(OLM_APP_BUNDLE_PREFIX_ENV, OLM_APP_BUNDLE_PREFIX_DEFAULT);
public static final String OLM_OPERATOR_CHANNEL = getOrDefault(OLM_OPERATOR_CHANNEL_ENV, OLM_OPERATOR_CHANNEL_DEFAULT);
public static final String OLM_OPERATOR_VERSION = getOrDefault(OLM_OPERATOR_VERSION_ENV, OLM_OPERATOR_VERSION_DEFAULT);
+
+ public static final String OPERATOR_INSTALL_TYPE = getOrDefault(OPERATOR_INSTALL_TYPE_ENV, InstallTypes.OLM.toString());
+
private Environment() { }
static {
diff --git a/src/main/java/io/odh/test/OdhAnnotationsLabels.java b/src/main/java/io/odh/test/OdhAnnotationsLabels.java
new file mode 100644
index 00000000..fdf78e5b
--- /dev/null
+++ b/src/main/java/io/odh/test/OdhAnnotationsLabels.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test;
+
+public class OdhAnnotationsLabels {
+ public static final String OPENSHIFT_DOMAIN = "openshift.io/";
+ public static final String ODH_DOMAIN = "opendatahub.io/";
+
+ public static final String LABEL_DASHBOARD = ODH_DOMAIN + "dashboard";
+ public static final String LABEL_ODH_MANAGED = ODH_DOMAIN + "odh-managed";
+ public static final String LABEL_SIDECAR_ISTIO_INJECT = "sidecar.istio.io/inject";
+
+ public static final String ANNO_SERVICE_MESH = ODH_DOMAIN + "service-mesh";
+ public static final String ANNO_NTB_INJECT_OAUTH = "notebooks." + ODH_DOMAIN + "inject-oauth";
+
+}
diff --git a/src/main/java/io/odh/test/TestUtils.java b/src/main/java/io/odh/test/TestUtils.java
index 28127461..528a441d 100644
--- a/src/main/java/io/odh/test/TestUtils.java
+++ b/src/main/java/io/odh/test/TestUtils.java
@@ -4,10 +4,14 @@
*/
package io.odh.test;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.exc.InvalidFormatException;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import io.odh.test.framework.WaitException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
@@ -174,4 +178,15 @@ public static InputStream getFileFromResourceAsStream(String fileName) {
}
}
+
+ public static T configFromYaml(String yamlFile, Class c) {
+ ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
+ try {
+ return mapper.readValue(yamlFile, c);
+ } catch (InvalidFormatException e) {
+ throw new IllegalArgumentException(e);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
}
diff --git a/src/main/java/io/odh/test/framework/manager/ResourceManager.java b/src/main/java/io/odh/test/framework/manager/ResourceManager.java
index 5019276c..946e22a7 100644
--- a/src/main/java/io/odh/test/framework/manager/ResourceManager.java
+++ b/src/main/java/io/odh/test/framework/manager/ResourceManager.java
@@ -7,15 +7,15 @@
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.admissionregistration.v1.ValidatingWebhookConfiguration;
import io.fabric8.kubernetes.api.model.apiextensions.v1.CustomResourceDefinition;
-import io.fabric8.kubernetes.api.model.apps.Deployment;
import io.fabric8.kubernetes.api.model.rbac.ClusterRole;
import io.fabric8.kubernetes.api.model.rbac.ClusterRoleBinding;
import io.odh.test.TestConstants;
import io.odh.test.TestUtils;
+import io.odh.test.framework.manager.resources.DataScienceClusterResource;
+import io.odh.test.framework.manager.resources.NotebookResource;
import io.odh.test.framework.manager.resources.OperatorGroupResource;
import io.odh.test.framework.manager.resources.SubscriptionResource;
import io.odh.test.platform.KubeClient;
-import io.odh.test.utils.DeploymentUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -48,6 +48,8 @@ public static KubeClient getClient() {
private final ResourceType>[] resourceTypes = new ResourceType[]{
new SubscriptionResource(),
new OperatorGroupResource(),
+ new DataScienceClusterResource(),
+ new NotebookResource(),
};
@SafeVarargs
@@ -74,18 +76,10 @@ private void createResource(boolean waitReady, T... reso
}
if (type == null) {
- if (resource instanceof Deployment) {
- Deployment deployment = (Deployment) resource;
- client.getClient().apps().deployments().resource(deployment).create();
- if (waitReady) {
- DeploymentUtils.waitForDeploymentReady(resource.getMetadata().getNamespace(), resource.getMetadata().getName());
- }
- continue;
- } else {
- LOGGER.error("Invalid resource {} {}/{}. Please implement it in ResourceManager",
- resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName());
- continue;
- }
+ // Dealing with default Kubernetes objects
+ client.getClient().resource(resource).create();
+ LOGGER.info("Crating resource {} {}/{}.",
+ resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName());
} else {
type.create(resource);
if (waitReady) {
@@ -109,14 +103,10 @@ public final void deleteResource(T... resources) {
for (T resource : resources) {
ResourceType type = findResourceType(resource);
if (type == null) {
- if (resource instanceof Deployment) {
- Deployment deployment = (Deployment) resource;
- client.getClient().apps().deployments().resource(deployment).delete();
- DeploymentUtils.waitForDeploymentDeletion(resource.getMetadata().getNamespace(), resource.getMetadata().getName());
- } else {
- LOGGER.error("Invalid resource {} {}/{}. Please implement it in ResourceManager",
- resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName());
- }
+ // Dealing with default Kubernetes objects
+ client.getClient().resource(resource).delete();
+ LOGGER.info("Deleting resource {} {}/{}.",
+ resource.getKind(), resource.getMetadata().getNamespace(), resource.getMetadata().getName());
} else {
if (resource.getMetadata().getNamespace() == null) {
LOGGER.info("Deleting of {} {}",
diff --git a/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java
new file mode 100644
index 00000000..7f3f85ca
--- /dev/null
+++ b/src/main/java/io/odh/test/framework/manager/resources/DataScienceClusterResource.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test.framework.manager.resources;
+
+import io.fabric8.kubernetes.api.model.KubernetesResourceList;
+import io.fabric8.kubernetes.client.dsl.MixedOperation;
+import io.fabric8.kubernetes.client.dsl.Resource;
+import io.odh.test.framework.manager.ResourceManager;
+import io.odh.test.framework.manager.ResourceType;
+import io.opendatahub.datasciencecluster.v1.DataScienceCluster;
+
+public class DataScienceClusterResource implements ResourceType {
+ @Override
+ public String getKind() {
+ return "DataScienceCluster";
+ }
+
+ @Override
+ public DataScienceCluster get(String namespace, String name) {
+ return dataScienceCLusterClient().inNamespace(namespace).withName(name).get();
+ }
+
+ @Override
+ public void create(DataScienceCluster resource) {
+ dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).create();
+ }
+
+ @Override
+ public void delete(DataScienceCluster resource) {
+ dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).withName(resource.getMetadata().getName()).delete();
+ }
+
+ @Override
+ public void update(DataScienceCluster resource) {
+ dataScienceCLusterClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).update();
+ }
+
+ @Override
+ public boolean waitForReadiness(DataScienceCluster resource) {
+ return resource != null;
+ }
+
+ public static MixedOperation, Resource> dataScienceCLusterClient() {
+ return ResourceManager.getClient().getClient().resources(DataScienceCluster.class);
+ }
+
+}
diff --git a/src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java b/src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java
new file mode 100644
index 00000000..f2afd1ba
--- /dev/null
+++ b/src/main/java/io/odh/test/framework/manager/resources/NotebookResource.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test.framework.manager.resources;
+
+import io.fabric8.kubernetes.api.model.KubernetesResourceList;
+import io.fabric8.kubernetes.client.dsl.MixedOperation;
+import io.fabric8.kubernetes.client.dsl.Resource;
+import io.odh.test.TestUtils;
+import io.odh.test.framework.manager.ResourceManager;
+import io.odh.test.framework.manager.ResourceType;
+import org.kubeflow.v1.Notebook;
+
+import java.io.IOException;
+import java.io.InputStream;
+import org.apache.commons.io.IOUtils;
+
+
+public class NotebookResource implements ResourceType {
+
+ private static final String NOTEBOOK_TEMPLATE_PATH = "notebook.yaml";
+ @Override
+ public String getKind() {
+ return "Notebook";
+ }
+
+ @Override
+ public Notebook get(String namespace, String name) {
+ return notebookClient().inNamespace(namespace).withName(name).get();
+ }
+
+ @Override
+ public void create(Notebook resource) {
+ notebookClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).create();
+ }
+
+ @Override
+ public void delete(Notebook resource) {
+ notebookClient().inNamespace(resource.getMetadata().getNamespace()).withName(resource.getMetadata().getName()).delete();
+ }
+
+ @Override
+ public void update(Notebook resource) {
+ notebookClient().inNamespace(resource.getMetadata().getNamespace()).resource(resource).update();
+ }
+
+ @Override
+ public boolean waitForReadiness(Notebook resource) {
+ return resource != null;
+ }
+
+ public static MixedOperation, Resource> notebookClient() {
+ return ResourceManager.getClient().getClient().resources(Notebook.class);
+ }
+
+ public static Notebook loadDefaultNotebook(String namespace, String name) throws IOException {
+ InputStream is = TestUtils.getFileFromResourceAsStream(NOTEBOOK_TEMPLATE_PATH);
+ String notebookString = IOUtils.toString(is, "UTF-8");
+ notebookString = notebookString.replace("my-project", namespace).replace("my-workbench", name);
+ return TestUtils.configFromYaml(notebookString, Notebook.class);
+ }
+}
diff --git a/src/main/java/io/odh/test/install/InstallTypes.java b/src/main/java/io/odh/test/install/InstallTypes.java
new file mode 100644
index 00000000..ad33d18a
--- /dev/null
+++ b/src/main/java/io/odh/test/install/InstallTypes.java
@@ -0,0 +1,10 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test.install;
+
+public enum InstallTypes {
+ OLM,
+ BUNDLE
+}
diff --git a/src/main/java/io/odh/test/install/OlmInstall.java b/src/main/java/io/odh/test/install/OlmInstall.java
index 45f45568..9c9a3ebf 100644
--- a/src/main/java/io/odh/test/install/OlmInstall.java
+++ b/src/main/java/io/odh/test/install/OlmInstall.java
@@ -68,8 +68,6 @@ private void createAndModifySubscription() {
Subscription subscription = prepareSubscription();
ResourceManager.getInstance().createResourceWithWait(subscription);
-// ResourceManager.RESOURCE_STACK.push(new ResourceItem(this::deleteCSV));
-
}
public void updateSubscription() {
Subscription subscription = prepareSubscription();
diff --git a/src/main/java/io/odh/test/platform/KubeClient.java b/src/main/java/io/odh/test/platform/KubeClient.java
index 0e12898c..97cde87a 100644
--- a/src/main/java/io/odh/test/platform/KubeClient.java
+++ b/src/main/java/io/odh/test/platform/KubeClient.java
@@ -185,6 +185,10 @@ public List listPods(String namespaceName) {
return client.pods().inNamespace(namespaceName).list().getItems();
}
+ public List listPods(String namespaceName, LabelSelector selector) {
+ return client.pods().inNamespace(namespaceName).withLabelSelector(selector).list().getItems();
+ }
+
/**
* Returns list of pods by prefix in pod name
*
diff --git a/src/main/java/io/odh/test/utils/NamespaceUtils.java b/src/main/java/io/odh/test/utils/NamespaceUtils.java
new file mode 100644
index 00000000..72398ee7
--- /dev/null
+++ b/src/main/java/io/odh/test/utils/NamespaceUtils.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test.utils;
+
+import io.odh.test.TestConstants;
+import io.odh.test.TestUtils;
+import io.odh.test.framework.manager.ResourceManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.time.Duration;
+
+public class NamespaceUtils {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(NamespaceUtils.class);
+ private static final long DELETION_TIMEOUT = Duration.ofMinutes(2).toMillis();
+
+ private NamespaceUtils() { }
+
+ public static void waitForNamespaceReadiness(String name) {
+ LOGGER.info("Waiting for Namespace: {} readiness", name);
+
+ TestUtils.waitFor("Namespace: " + name, TestConstants.GLOBAL_POLL_INTERVAL_SHORT, DELETION_TIMEOUT,
+ () -> ResourceManager.getClient().getNamespace(name) != null);
+ LOGGER.info("Namespace: {} is ready", name);
+ }
+
+ public static void waitForNamespaceDeletion(String name) {
+ LOGGER.info("Waiting for Namespace: {} deletion", name);
+
+ TestUtils.waitFor("Namespace: " + name, TestConstants.GLOBAL_POLL_INTERVAL_SHORT, DELETION_TIMEOUT,
+ () -> ResourceManager.getClient().getNamespace(name) == null);
+ LOGGER.info("Namespace: {} was deleted", name);
+ }
+}
diff --git a/src/main/java/io/odh/test/utils/PodUtils.java b/src/main/java/io/odh/test/utils/PodUtils.java
new file mode 100644
index 00000000..57da893f
--- /dev/null
+++ b/src/main/java/io/odh/test/utils/PodUtils.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test.utils;
+
+import io.fabric8.kubernetes.api.model.ContainerStatus;
+import io.fabric8.kubernetes.api.model.LabelSelector;
+import io.fabric8.kubernetes.api.model.Pod;
+import io.fabric8.kubernetes.client.readiness.Readiness;
+import io.odh.test.TestConstants;
+import io.odh.test.TestUtils;
+import io.odh.test.framework.manager.ResourceManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.time.Duration;
+import java.util.List;
+
+public class PodUtils {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PodUtils.class);
+ private static final long DELETION_TIMEOUT = Duration.ofMinutes(5).toMillis();
+ private static final long READINESS_TIMEOUT = Duration.ofMinutes(10).toMillis();
+
+ private PodUtils() { }
+
+ public static void waitForPodsReady(String namespaceName, LabelSelector selector, int expectPods, boolean containers, Runnable onTimeout) {
+ TestUtils.waitFor("readiness of all Pods matching: " + selector,
+ TestConstants.GLOBAL_POLL_INTERVAL_MEDIUM, READINESS_TIMEOUT,
+ () -> {
+ List pods = ResourceManager.getClient().listPods(namespaceName, selector);
+ if (pods.isEmpty() && expectPods == 0) {
+ LOGGER.debug("Expected Pods are ready");
+ return true;
+ }
+ if (pods.isEmpty()) {
+ LOGGER.debug("Pods matching: {}/{} are not ready", namespaceName, selector);
+ return false;
+ }
+ if (pods.size() != expectPods) {
+ LOGGER.debug("Expected Pods: {}/{} are not ready", namespaceName, selector);
+ return false;
+ }
+ for (Pod pod : pods) {
+ if (!Readiness.isPodReady(pod)) {
+ LOGGER.debug("Pod not ready: {}/{}", namespaceName, pod.getMetadata().getName());
+ return false;
+ } else {
+ if (containers) {
+ for (ContainerStatus cs : pod.getStatus().getContainerStatuses()) {
+ if (!Boolean.TRUE.equals(cs.getReady())) {
+ LOGGER.debug("Container: {} of Pod: {}/{} not ready", namespaceName, pod.getMetadata().getName(), cs.getName());
+ return false;
+ }
+ }
+ }
+ }
+ }
+ LOGGER.info("Pods matching: {}/{} are ready", namespaceName, selector);
+ return true;
+ }, onTimeout);
+ }
+}
diff --git a/src/main/resources/notebook.yaml b/src/main/resources/notebook.yaml
new file mode 100644
index 00000000..15d91261
--- /dev/null
+++ b/src/main/resources/notebook.yaml
@@ -0,0 +1,146 @@
+apiVersion: kubeflow.org/v1
+kind: Notebook
+metadata:
+ annotations:
+ notebooks.opendatahub.io/inject-oauth: 'true'
+ opendatahub.io/service-mesh: 'false'
+ opendatahub.io/accelerator-name: ''
+ labels:
+ app: my-workbench
+ opendatahub.io/dashboard: 'true'
+ opendatahub.io/odh-managed: 'true'
+ sidecar.istio.io/inject: 'false'
+ name: my-workbench
+ namespace: my-project
+spec:
+ template:
+ spec:
+ affinity: {}
+ containers:
+ - env:
+ - name: NOTEBOOK_ARGS
+ value: |-
+ --ServerApp.port=8888
+ --ServerApp.token=''
+ --ServerApp.password=''
+ --ServerApp.base_url=/notebook/my-project/my-workbench
+ --ServerApp.quit_button=False
+ --ServerApp.tornado_settings={"user":"","hub_host":"odh_dashboard_route","hub_prefix":"/projects/my-project"}
+ - name: JUPYTER_IMAGE
+ value: image-registry.openshift-image-registry.svc:5000/opendatahub/jupyter-pytorch-notebook:2023.2
+ image: image-registry.openshift-image-registry.svc:5000/opendatahub/jupyter-pytorch-notebook:2023.2
+ imagePullPolicy: Always
+ livenessProbe:
+ failureThreshold: 3
+ httpGet:
+ path: /notebook/my-project/my-workbench/api
+ port: notebook-port
+ scheme: HTTP
+ initialDelaySeconds: 10
+ periodSeconds: 5
+ successThreshold: 1
+ timeoutSeconds: 1
+ name: my-workbench
+ ports:
+ - containerPort: 8888
+ name: notebook-port
+ protocol: TCP
+ readinessProbe:
+ failureThreshold: 3
+ httpGet:
+ path: /notebook/my-project/my-workbench/api
+ port: notebook-port
+ scheme: HTTP
+ initialDelaySeconds: 10
+ periodSeconds: 5
+ successThreshold: 1
+ timeoutSeconds: 1
+ resources:
+ limits:
+ cpu: "2"
+ memory: 4Gi
+ requests:
+ cpu: "1"
+ memory: 1Gi
+ volumeMounts:
+ - mountPath: /opt/app-root/src
+ name: my-workbench
+ - mountPath: /dev/shm
+ name: shm
+ workingDir: /opt/app-root/src
+ - args:
+ - --provider=openshift
+ - --https-address=:8443
+ - --http-address=
+ - --openshift-service-account=my-workbench
+ - --cookie-secret-file=/etc/oauth/config/cookie_secret
+ - --cookie-expire=24h0m0s
+ - --tls-cert=/etc/tls/private/tls.crt
+ - --tls-key=/etc/tls/private/tls.key
+ - --upstream=http://localhost:8888
+ - --upstream-ca=/var/run/secrets/kubernetes.io/serviceaccount/ca.crt
+ - --email-domain=*
+ - --skip-provider-button
+ - --openshift-sar={"verb":"get","resource":"notebooks","resourceAPIGroup":"kubeflow.org","resourceName":"my-workbench","namespace":"$(NAMESPACE)"}
+ - --logout-url=odh_dashboard_route/projects/my-project?notebookLogout=my-workbench
+ env:
+ - name: NAMESPACE
+ valueFrom:
+ fieldRef:
+ fieldPath: metadata.namespace
+ image: registry.redhat.io/openshift4/ose-oauth-proxy:v4.10
+ imagePullPolicy: Always
+ livenessProbe:
+ failureThreshold: 3
+ httpGet:
+ path: /oauth/healthz
+ port: oauth-proxy
+ scheme: HTTPS
+ initialDelaySeconds: 30
+ periodSeconds: 5
+ successThreshold: 1
+ timeoutSeconds: 1
+ name: oauth-proxy
+ ports:
+ - containerPort: 8443
+ name: oauth-proxy
+ protocol: TCP
+ readinessProbe:
+ failureThreshold: 3
+ httpGet:
+ path: /oauth/healthz
+ port: oauth-proxy
+ scheme: HTTPS
+ initialDelaySeconds: 5
+ periodSeconds: 5
+ successThreshold: 1
+ timeoutSeconds: 1
+ resources:
+ limits:
+ cpu: 100m
+ memory: 64Mi
+ requests:
+ cpu: 100m
+ memory: 64Mi
+ volumeMounts:
+ - mountPath: /etc/oauth/config
+ name: oauth-config
+ - mountPath: /etc/tls/private
+ name: tls-certificates
+ enableServiceLinks: false
+ serviceAccountName: my-workbench
+ volumes:
+ - name: my-workbench
+ persistentVolumeClaim:
+ claimName: my-workbench
+ - emptyDir:
+ medium: Memory
+ name: shm
+ - name: oauth-config
+ secret:
+ defaultMode: 420
+ secretName: my-workbench-oauth-config
+ - name: tls-certificates
+ secret:
+ defaultMode: 420
+ secretName: my-workbench-tls
diff --git a/src/test/java/io/odh/test/e2e/Abstract.java b/src/test/java/io/odh/test/e2e/Abstract.java
index 7ff8a288..c0e0231e 100644
--- a/src/test/java/io/odh/test/e2e/Abstract.java
+++ b/src/test/java/io/odh/test/e2e/Abstract.java
@@ -4,15 +4,21 @@
*/
package io.odh.test.e2e;
+import io.odh.test.Environment;
import io.odh.test.framework.manager.ResourceManager;
import io.odh.test.framework.TestExceptionCallbackListener;
+import io.odh.test.install.InstallTypes;
+import io.odh.test.install.OlmInstall;
import io.odh.test.framework.TestSeparator;
import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.DisplayNameGenerator;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.ExtendWith;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
@DisplayNameGeneration(DisplayNameGenerator.IndicativeSentences.class)
@ExtendWith(TestExceptionCallbackListener.class)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@@ -22,6 +28,20 @@ public class Abstract implements TestSeparator {
ResourceManager.getInstance();
}
+ @BeforeAll
+ void setupEnvironment() {
+ if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.OLM.toString())) {
+ OlmInstall olmInstall = new OlmInstall();
+ olmInstall.create();
+ } else if (Environment.OPERATOR_INSTALL_TYPE.equals(InstallTypes.BUNDLE.toString())) {
+ LOGGER.error("Bundle install is not implemented yet!");
+ assertTrue(false);
+ } else {
+ LOGGER.error("Unknown install type {}! You should implement it at first!", Environment.OPERATOR_INSTALL_TYPE);
+ assertTrue(false);
+ }
+ }
+
@AfterAll
void teardownEnvironment() {
ResourceManager.getInstance().deleteResources();
diff --git a/src/test/java/io/odh/test/e2e/standard/NotebookST.java b/src/test/java/io/odh/test/e2e/standard/NotebookST.java
new file mode 100644
index 00000000..e65f0059
--- /dev/null
+++ b/src/test/java/io/odh/test/e2e/standard/NotebookST.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright Skodjob authors.
+ * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
+ */
+package io.odh.test.e2e.standard;
+
+import io.fabric8.kubernetes.api.model.LabelSelector;
+import io.fabric8.kubernetes.api.model.LabelSelectorBuilder;
+import io.fabric8.kubernetes.api.model.Namespace;
+import io.fabric8.kubernetes.api.model.NamespaceBuilder;
+import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
+import io.fabric8.kubernetes.api.model.PersistentVolumeClaimBuilder;
+import io.fabric8.kubernetes.api.model.Quantity;
+import io.odh.test.OdhAnnotationsLabels;
+import io.odh.test.e2e.Abstract;
+import io.odh.test.framework.manager.ResourceManager;
+import io.odh.test.framework.manager.resources.NotebookResource;
+import io.odh.test.utils.PodUtils;
+import io.opendatahub.datasciencecluster.v1.DataScienceCluster;
+import io.opendatahub.datasciencecluster.v1.DataScienceClusterBuilder;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.ComponentsBuilder;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Codeflare;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.CodeflareBuilder;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Dashboard;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.DashboardBuilder;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Datasciencepipelines;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.DatasciencepipelinesBuilder;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.KserveBuilder;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Workbenches;
+import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.WorkbenchesBuilder;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.kubeflow.v1.Notebook;
+import org.kubeflow.v1.NotebookBuilder;
+
+import java.io.IOException;
+
+public class NotebookST extends Abstract {
+
+ private static final String DS_PROJECT_NAME = "test-notebooks";
+ private static final String DS_PROJECT_NAMESPACE = "test-notebooks";
+
+ private static final String NTB_NAME = "test-odh-notebook";
+ private static final String NTB_NAMESPACE = "test-odh-notebook";
+ @Test
+ void testCreateSimpleNotebook() throws IOException {
+ // Create namespace
+ Namespace ns = new NamespaceBuilder()
+ .withNewMetadata()
+ .withName(NTB_NAMESPACE)
+ .addToLabels(OdhAnnotationsLabels.LABEL_DASHBOARD, "true")
+ .addToAnnotations(OdhAnnotationsLabels.ANNO_SERVICE_MESH, "false")
+ .endMetadata()
+ .build();
+ ResourceManager.getInstance().createResourceWithoutWait(ns);
+
+ PersistentVolumeClaim pvc = new PersistentVolumeClaimBuilder()
+ .withNewMetadata()
+ .withName(NTB_NAME)
+ .withNamespace(NTB_NAMESPACE)
+ .addToLabels(OdhAnnotationsLabels.LABEL_DASHBOARD, "true")
+ .endMetadata()
+ .withNewSpec()
+ .addToAccessModes("ReadWriteOnce")
+ .withNewResources()
+ .addToRequests("storage", new Quantity("10Gi"))
+ .endResources()
+ .withVolumeMode("Filesystem")
+ .endSpec()
+ .build();
+ ResourceManager.getInstance().createResourceWithoutWait(pvc);
+
+ Notebook notebook = new NotebookBuilder(NotebookResource.loadDefaultNotebook(NTB_NAMESPACE, NTB_NAME)).build();
+ ResourceManager.getInstance().createResourceWithoutWait(notebook);
+
+ LabelSelector lblSelector = new LabelSelectorBuilder()
+ .withMatchLabels(ResourceManager.getClient().listPods(NTB_NAMESPACE).get(0).getMetadata().getLabels())
+ .build();
+
+ PodUtils.waitForPodsReady(NTB_NAMESPACE, lblSelector, 1, true, () -> { });
+
+ }
+
+ @BeforeAll
+ void deployDataScienceCluster() {
+ // Create namespace
+ Namespace ns = new NamespaceBuilder().withNewMetadata().withName(DS_PROJECT_NAMESPACE).endMetadata().build();
+ ResourceManager.getInstance().createResourceWithoutWait(ns);
+
+ // Create DSC
+ DataScienceCluster dsc = new DataScienceClusterBuilder()
+ .withNewMetadata()
+ .withName(DS_PROJECT_NAME)
+ .withNamespace(DS_PROJECT_NAMESPACE)
+ .endMetadata()
+ .withNewSpec()
+ .withComponents(
+ new ComponentsBuilder()
+ .withWorkbenches(
+ new WorkbenchesBuilder().withManagementState(Workbenches.ManagementState.MANAGED).build()
+ )
+ .withDashboard(
+ new DashboardBuilder().withManagementState(Dashboard.ManagementState.MANAGED).build()
+ )
+ .withKserve(
+ new KserveBuilder().withManagementState(Kserve.ManagementState.REMOVED).build()
+ )
+ .withCodeflare(
+ new CodeflareBuilder().withManagementState(Codeflare.ManagementState.REMOVED).build()
+ )
+ .withDatasciencepipelines(
+ new DatasciencepipelinesBuilder().withManagementState(Datasciencepipelines.ManagementState.REMOVED).build()
+ )
+ .build())
+ .endSpec()
+ .build();
+ // Deploy DSC
+ ResourceManager.getInstance().createResourceWithWait(dsc);
+ // TODO - tady musi byt wait na DSC pody
+ }
+}