From 0d75f576ed343ee4841227250ecc4247167a7db2 Mon Sep 17 00:00:00 2001 From: David Kornel Date: Fri, 10 Nov 2023 13:51:25 +0100 Subject: [PATCH] Add first test for gitops deployed opendatascience cluster --- pom.xml | 36 ++- src/main/java/io/odh/test/TestConstants.java | 2 +- .../java/io/odh/test/platform/KubeClient.java | 273 ++++++++++++++++++ .../ExtensionContextParameterResolver.java | 22 ++ .../io/odh/test/separator/TestSeparator.java | 32 ++ src/main/resources/logback.xml | 30 ++ src/test/java/io/odh/test/e2e/Abstract.java | 17 ++ src/test/java/io/odh/test/e2e/TestBase.java | 4 - .../e2e/deployed/DataScienceClusterIT.java | 39 +++ src/test/java/io/odh/test/unit/UnitTests.java | 3 +- 10 files changed, 447 insertions(+), 11 deletions(-) create mode 100644 src/main/java/io/odh/test/platform/KubeClient.java create mode 100644 src/main/java/io/odh/test/separator/ExtensionContextParameterResolver.java create mode 100644 src/main/java/io/odh/test/separator/TestSeparator.java create mode 100644 src/main/resources/logback.xml create mode 100644 src/test/java/io/odh/test/e2e/Abstract.java delete mode 100644 src/test/java/io/odh/test/e2e/TestBase.java create mode 100644 src/test/java/io/odh/test/e2e/deployed/DataScienceClusterIT.java diff --git a/pom.xml b/pom.xml index f4d7a8ae..0097e170 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,6 @@ 1.0-SNAPSHOT - true 17 17 UTF-8 @@ -20,6 +19,9 @@ 5.8.2 1.8.2 3.2.2 + 1.4.11 + 2.0.9 + 2.2 @@ -62,13 +64,11 @@ org.junit.jupiter junit-jupiter-api ${junit.jupiter.version} - test org.junit.jupiter junit-jupiter-params ${junit.jupiter.version} - test org.junit.platform @@ -108,6 +108,24 @@ opendatahub-crds 1.0-SNAPSHOT + + org.hamcrest + hamcrest + compile + ${hamcrest.version} + + + ch.qos.logback + logback-classic + runtime + ${logback.version} + + + org.slf4j + slf4j-api + ${slf4j.version} + compile + @@ -118,6 +136,11 @@ ${maven.surefire.version} UnitTests + + + junit.jupiter.extensions.autodetection.enabled = true + + @@ -141,8 +164,11 @@ **/ST*.java **/*ST.java - ${it.skip} - ${it.skip} + + + junit.jupiter.extensions.autodetection.enabled = true + + diff --git a/src/main/java/io/odh/test/TestConstants.java b/src/main/java/io/odh/test/TestConstants.java index 324c7c4a..dc1475f0 100644 --- a/src/main/java/io/odh/test/TestConstants.java +++ b/src/main/java/io/odh/test/TestConstants.java @@ -1,5 +1,5 @@ package io.odh.test; public class TestConstants { - public static final String test = "test"; + public static final String ODH_NAMESPACE = "opendatahub"; } diff --git a/src/main/java/io/odh/test/platform/KubeClient.java b/src/main/java/io/odh/test/platform/KubeClient.java new file mode 100644 index 00000000..212026f6 --- /dev/null +++ b/src/main/java/io/odh/test/platform/KubeClient.java @@ -0,0 +1,273 @@ +package io.odh.test.platform; + +import io.fabric8.kubernetes.api.model.ConfigMap; +import io.fabric8.kubernetes.api.model.KubernetesResourceList; +import io.fabric8.kubernetes.api.model.LabelSelector; +import io.fabric8.kubernetes.api.model.Namespace; +import io.fabric8.kubernetes.api.model.Node; +import io.fabric8.kubernetes.api.model.Pod; +import io.fabric8.kubernetes.api.model.apps.Deployment; +import io.fabric8.kubernetes.api.model.apps.StatefulSet; +import io.fabric8.kubernetes.api.model.batch.v1.Job; +import io.fabric8.kubernetes.api.model.batch.v1.JobList; +import io.fabric8.kubernetes.api.model.batch.v1.JobStatus; +import io.fabric8.kubernetes.client.Config; +import io.fabric8.kubernetes.client.KubernetesClient; +import io.fabric8.kubernetes.client.KubernetesClientBuilder; +import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.fabric8.kubernetes.client.dsl.RollableScalableResource; +import io.fabric8.openshift.client.OpenShiftClient; +import io.opendatahub.datasciencecluster.v1.DataScienceCluster; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.stream.Collectors; + +public class KubeClient { + protected final KubernetesClient client; + protected String namespace; + + private static final Logger LOGGER = LoggerFactory.getLogger(KubeClient.class); + + public KubeClient(String namespace) { + LOGGER.debug("Creating client in namespace: {}", namespace); + Config config = Config.autoConfigure(System.getenv().getOrDefault("KUBE_CONTEXT", null)); + + this.client = new KubernetesClientBuilder() + .withConfig(config) + .build() + .adapt(OpenShiftClient.class); + this.namespace = namespace; + } + + public KubeClient(Config config, String namespace) { + this.client = new KubernetesClientBuilder() + .withConfig(config) + .build() + .adapt(OpenShiftClient.class); + this.namespace = namespace; + } + + public KubeClient(KubernetesClient client, String namespace) { + LOGGER.debug("Creating client in namespace: {}", namespace); + this.client = client; + this.namespace = namespace; + } + + // ============================ + // ---------> CLIENT <--------- + // ============================ + + public KubernetesClient getClient() { + return client; + } + + // =============================== + // ---------> NAMESPACE <--------- + // =============================== + + public KubeClient inNamespace(String namespace) { + LOGGER.debug("Using namespace: {}", namespace); + this.namespace = namespace; + return this; + } + + public String getNamespace() { + return namespace; + } + + public Namespace getNamespace(String namespace) { + return client.namespaces().withName(namespace).get(); + } + + public boolean namespaceExists(String namespace) { + return client.namespaces().list().getItems().stream().map(n -> n.getMetadata().getName()) + .collect(Collectors.toList()).contains(namespace); + } + + /** + * Gets namespace status + */ + public boolean getNamespaceStatus(String namespaceName) { + return client.namespaces().withName(namespaceName).isReady(); + } + + // ================================ + // ---------> CONFIG MAP <--------- + // ================================ + public ConfigMap getConfigMap(String namespaceName, String configMapName) { + return client.configMaps().inNamespace(namespaceName).withName(configMapName).get(); + } + + public ConfigMap getConfigMap(String configMapName) { + return getConfigMap(namespace, configMapName); + } + + + public boolean getConfigMapStatus(String configMapName) { + return client.configMaps().inNamespace(getNamespace()).withName(configMapName).isReady(); + } + + // ========================= + // ---------> POD <--------- + // ========================= + public List listPods() { + return client.pods().inNamespace(namespace).list().getItems(); + } + + public List listPods(String namespaceName) { + return client.pods().inNamespace(namespaceName).list().getItems(); + } + + /** + * Returns list of pods by prefix in pod name + * @param namespaceName Namespace name + * @param podNamePrefix prefix with which the name should begin + * @return List of pods + */ + public List listPodsByPrefixInName(String namespaceName, String podNamePrefix) { + return listPods(namespaceName) + .stream().filter(p -> p.getMetadata().getName().startsWith(podNamePrefix)) + .collect(Collectors.toList()); + } + + /** + * Gets pod + */ + public Pod getPod(String namespaceName, String name) { + return client.pods().inNamespace(namespaceName).withName(name).get(); + } + + public Pod getPod(String name) { + return getPod(namespace, name); + } + + public String getLogs(String namespaceName, String podName) { + return client.pods().inNamespace(namespaceName).withName(podName).getLog(); + } + + // ================================== + // ---------> STATEFUL SET <--------- + // ================================== + + /** + * Gets stateful set + */ + public StatefulSet getStatefulSet(String namespaceName, String statefulSetName) { + return client.apps().statefulSets().inNamespace(namespaceName).withName(statefulSetName).get(); + } + + public StatefulSet getStatefulSet(String statefulSetName) { + return getStatefulSet(namespace, statefulSetName); + } + + /** + * Gets stateful set + */ + public RollableScalableResource statefulSet(String namespaceName, String statefulSetName) { + return client.apps().statefulSets().inNamespace(namespaceName).withName(statefulSetName); + } + + public RollableScalableResource statefulSet(String statefulSetName) { + return statefulSet(namespace, statefulSetName); + } + // ================================ + // ---------> DEPLOYMENT <--------- + // ================================ + + /** + * Gets deployment + */ + + public Deployment getDeployment(String namespaceName, String deploymentName) { + return client.apps().deployments().inNamespace(namespaceName).withName(deploymentName).get(); + } + + public Deployment getDeployment(String deploymentName) { + return client.apps().deployments().inNamespace(namespace).withName(deploymentName).get(); + } + + public Deployment getDeploymentFromAnyNamespaces(String deploymentName) { + return client.apps().deployments().inAnyNamespace().list().getItems().stream().filter( + deployment -> deployment.getMetadata().getName().equals(deploymentName)) + .findFirst() + .orElseThrow(); + } + + /** + * Gets deployment status + */ + public LabelSelector getDeploymentSelectors(String namespaceName, String deploymentName) { + return client.apps().deployments().inNamespace(namespaceName).withName(deploymentName).get().getSpec().getSelector(); + } + + // ========================== + // ---------> NODE <--------- + // ========================== + + public String getNodeAddress() { + return listNodes().get(0).getStatus().getAddresses().get(0).getAddress(); + } + + public List listNodes() { + return client.nodes().list().getItems(); + } + + public List listWorkerNodes() { + return listNodes().stream().filter(node -> node.getMetadata().getLabels().containsKey("node-role.kubernetes.io/worker")).collect(Collectors.toList()); + } + + public List listMasterNodes() { + return listNodes().stream().filter(node -> node.getMetadata().getLabels().containsKey("node-role.kubernetes.io/master")).collect(Collectors.toList()); + } + + // ========================= + // ---------> JOB <--------- + // ========================= + + public boolean jobExists(String jobName) { + return client.batch().v1().jobs().inNamespace(namespace).list().getItems().stream().anyMatch(j -> j.getMetadata().getName().startsWith(jobName)); + } + + public Job getJob(String jobName) { + return client.batch().v1().jobs().inNamespace(namespace).withName(jobName).get(); + } + + public boolean checkSucceededJobStatus(String jobName) { + return checkSucceededJobStatus(getNamespace(), jobName, 1); + } + + public boolean checkSucceededJobStatus(String namespaceName, String jobName, int expectedSucceededPods) { + return getJobStatus(namespaceName, jobName).getSucceeded().equals(expectedSucceededPods); + } + + public boolean checkFailedJobStatus(String namespaceName, String jobName, int expectedFailedPods) { + return getJobStatus(namespaceName, jobName).getFailed().equals(expectedFailedPods); + } + + // Pods Statuses: 0 Running / 0 Succeeded / 1 Failed + public JobStatus getJobStatus(String namespaceName, String jobName) { + return client.batch().v1().jobs().inNamespace(namespaceName).withName(jobName).get().getStatus(); + } + + public JobStatus getJobStatus(String jobName) { + return getJobStatus(namespace, jobName); + } + + public JobList getJobList() { + return client.batch().v1().jobs().inNamespace(namespace).list(); + } + + public List listJobs(String namePrefix) { + return client.batch().v1().jobs().inNamespace(getNamespace()).list().getItems().stream() + .filter(job -> job.getMetadata().getName().startsWith(namePrefix)).collect(Collectors.toList()); + } + + public MixedOperation, Resource> dataScienceClusterClient() { + return client.resources(DataScienceCluster.class); + } + +} + diff --git a/src/main/java/io/odh/test/separator/ExtensionContextParameterResolver.java b/src/main/java/io/odh/test/separator/ExtensionContextParameterResolver.java new file mode 100644 index 00000000..b9870631 --- /dev/null +++ b/src/main/java/io/odh/test/separator/ExtensionContextParameterResolver.java @@ -0,0 +1,22 @@ +/* + * Copyright Tealc authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.separator; + +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; + +public class ExtensionContextParameterResolver implements ParameterResolver { + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { + return parameterContext.getParameter().getType() == ExtensionContext.class; + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) throws ParameterResolutionException { + return extensionContext; + } +} \ No newline at end of file diff --git a/src/main/java/io/odh/test/separator/TestSeparator.java b/src/main/java/io/odh/test/separator/TestSeparator.java new file mode 100644 index 00000000..f4396df5 --- /dev/null +++ b/src/main/java/io/odh/test/separator/TestSeparator.java @@ -0,0 +1,32 @@ +/* + * Copyright Tealc authors. + * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). + */ +package io.odh.test.separator; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collections; + +@ExtendWith(ExtensionContextParameterResolver.class) +public interface TestSeparator { + Logger LOGGER = LoggerFactory.getLogger(TestSeparator.class); + String SEPARATOR_CHAR = "#"; + + @BeforeEach + default void beforeEachTest(ExtensionContext testContext) { + LOGGER.info(String.join("", Collections.nCopies(76, SEPARATOR_CHAR))); + LOGGER.info(String.format("%s.%s-STARTED", testContext.getRequiredTestClass().getName(), testContext.getRequiredTestMethod().getName())); + } + + @AfterEach + default void afterEachTest(ExtensionContext testContext) { + LOGGER.info(String.format("%s.%s-FINISHED", testContext.getRequiredTestClass().getName(), testContext.getRequiredTestMethod().getName())); + LOGGER.info(String.join("", Collections.nCopies(76, SEPARATOR_CHAR))); + } +} diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml new file mode 100644 index 00000000..a8aaf08d --- /dev/null +++ b/src/main/resources/logback.xml @@ -0,0 +1,30 @@ + + + + + + + + + %d{yyyy-MM-dd'T'HH:mm:ss,GMT} %highlight(%-5p) [%c{0}:%L] %m%n + + + + + ${LOG_DIR}/testFile.log + true + true + + %d{yyyy-MM-dd'T'HH:mm:ss.SSS'Z',GMT} [%thread] %-5p [%c{0}:%L] %m%n + + + + + + + + + + + + diff --git a/src/test/java/io/odh/test/e2e/Abstract.java b/src/test/java/io/odh/test/e2e/Abstract.java new file mode 100644 index 00000000..0ebfaf53 --- /dev/null +++ b/src/test/java/io/odh/test/e2e/Abstract.java @@ -0,0 +1,17 @@ +package io.odh.test.e2e; + +import io.odh.test.platform.KubeClient; +import io.odh.test.TestConstants; +import io.odh.test.separator.TestSeparator; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.TestInstance; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public class Abstract implements TestSeparator { + protected KubeClient kubeClient; + + @BeforeAll + void init() { + kubeClient = new KubeClient(TestConstants.ODH_NAMESPACE); + } +} diff --git a/src/test/java/io/odh/test/e2e/TestBase.java b/src/test/java/io/odh/test/e2e/TestBase.java deleted file mode 100644 index c748285f..00000000 --- a/src/test/java/io/odh/test/e2e/TestBase.java +++ /dev/null @@ -1,4 +0,0 @@ -package io.odh.test.e2e; - -public class TestBase { -} diff --git a/src/test/java/io/odh/test/e2e/deployed/DataScienceClusterIT.java b/src/test/java/io/odh/test/e2e/deployed/DataScienceClusterIT.java new file mode 100644 index 00000000..d3a48617 --- /dev/null +++ b/src/test/java/io/odh/test/e2e/deployed/DataScienceClusterIT.java @@ -0,0 +1,39 @@ +package io.odh.test.e2e.deployed; + +import io.fabric8.kubernetes.api.model.KubernetesResourceList; +import io.fabric8.kubernetes.client.dsl.MixedOperation; +import io.fabric8.kubernetes.client.dsl.Resource; +import io.odh.test.TestConstants; +import io.odh.test.e2e.Abstract; +import io.opendatahub.datasciencecluster.v1.DataScienceCluster; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Codeflare; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Dashboard; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Datasciencepipelines; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Kserve; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Modelmeshserving; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Ray; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Trustyai; +import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Workbenches; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class DataScienceClusterIT extends Abstract { + + @Test + void checkDataScienceClusterExists() { + MixedOperation, Resource> cli = + kubeClient.dataScienceClusterClient(); + + DataScienceCluster cluster = cli.inNamespace(TestConstants.ODH_NAMESPACE).withName("default").get(); + + assertEquals(cluster.getSpec().getComponents().getKserve().getManagementState(), Kserve.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getCodeflare().getManagementState(), Codeflare.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getDashboard().getManagementState(), Dashboard.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getRay().getManagementState(), Ray.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getModelmeshserving().getManagementState(), Modelmeshserving.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getDatasciencepipelines().getManagementState(), Datasciencepipelines.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getTrustyai().getManagementState(), Trustyai.ManagementState.MANAGED); + assertEquals(cluster.getSpec().getComponents().getWorkbenches().getManagementState(), Workbenches.ManagementState.MANAGED); + } +} diff --git a/src/test/java/io/odh/test/unit/UnitTests.java b/src/test/java/io/odh/test/unit/UnitTests.java index 7699eb5b..0114c9a5 100644 --- a/src/test/java/io/odh/test/unit/UnitTests.java +++ b/src/test/java/io/odh/test/unit/UnitTests.java @@ -6,6 +6,7 @@ import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.kubernetes.client.server.mock.EnableKubernetesMockClient; import io.fabric8.kubernetes.client.server.mock.KubernetesMockServer; +import io.odh.test.separator.TestSeparator; import io.opendatahub.datasciencecluster.v1.DataScienceCluster; import io.opendatahub.datasciencecluster.v1.DataScienceClusterBuilder; import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.ComponentsBuilder; @@ -16,7 +17,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; @EnableKubernetesMockClient(crud = true) -public class UnitTests { +public class UnitTests implements TestSeparator { private KubernetesClient kubernetesClient; private KubernetesMockServer server;