Skip to content

Commit

Permalink
Delete kubernetes utils
Browse files Browse the repository at this point in the history
Signed-off-by: David Kornel <[email protected]>
  • Loading branch information
kornys committed May 6, 2024
1 parent b943c98 commit 6f9a311
Show file tree
Hide file tree
Showing 14 changed files with 112 additions and 137 deletions.
5 changes: 0 additions & 5 deletions src/main/java/io/odh/test/Environment.java
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,6 @@ public class Environment {
public static final String USER_PATH = System.getProperty("user.dir");

private static final String CONFIG_FILE_PATH_ENV = "ENV_FILE";
private static final String TOKEN_ENV = "KUBE_TOKEN";
private static final String URL_ENV = "KUBE_URL";
private static final String PRODUCT_ENV = "PRODUCT";
private static final String LOG_DIR_ENV = "LOG_DIR";

Expand Down Expand Up @@ -74,9 +72,6 @@ public class Environment {
* Set values
*/
public static final String PRODUCT = getOrDefault(PRODUCT_ENV, PRODUCT_ODH);
public static final String RUN_USER = getOrDefault("USER", null);
public static final String KUBE_TOKEN = getOrDefault(TOKEN_ENV, null);
public static final String KUBE_URL = getOrDefault(URL_ENV, null);

//Install
public static final boolean SKIP_INSTALL_OPERATOR_DEPS = getOrDefault(SKIP_INSTALL_OPERATOR_DEPS_ENV, Boolean::valueOf, false);
Expand Down
78 changes: 78 additions & 0 deletions src/main/java/io/odh/test/TestUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,14 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.exc.InvalidFormatException;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import io.fabric8.kubernetes.api.model.EndpointSubset;
import io.fabric8.kubernetes.api.model.Endpoints;
import io.fabric8.kubernetes.client.KubernetesClientException;
import io.fabric8.kubernetes.client.dsl.Resource;
import io.fabric8.openshift.api.model.operatorhub.v1alpha1.InstallPlan;
import io.opendatahub.datasciencecluster.v1.datascienceclusterstatus.Conditions;
import io.skodjob.testframe.resources.KubeResourceManager;
import io.skodjob.testframe.utils.KubeUtils;
import io.skodjob.testframe.wait.Wait;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.extension.ExtensionContext;
Expand All @@ -22,6 +30,8 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
Expand Down Expand Up @@ -157,4 +167,72 @@ public static <T> T runUntilPass(int retry, Callable<T> fn) {
}
throw new IllegalStateException(String.format("Command wasn't pass in %s attempts", retry));
}

public static io.opendatahub.datasciencecluster.v1.datascienceclusterstatus.Conditions getDscConditionByType(List<Conditions> conditions, String type) {
return conditions.stream().filter(c -> c.getType().equals(type)).findFirst().orElseGet(null);
}

public static org.kubeflow.v1.notebookstatus.Conditions getNotebookConditionByType(List<org.kubeflow.v1.notebookstatus.Conditions> conditions, String type) {
return conditions.stream().filter(c -> c.getType().equals(type)).findFirst().orElseGet(null);
}

public static io.kserve.serving.v1beta1.inferenceservicestatus.Conditions getInferenceServiceConditionByType(List<io.kserve.serving.v1beta1.inferenceservicestatus.Conditions> conditions, String type) {
return conditions.stream().filter(c -> c.getType().equals(type)).findFirst().orElseGet(null);
}

public static void clearOdhRemainingResources() {
KubeResourceManager.getKubeClient().getClient().apiextensions().v1().customResourceDefinitions().list().getItems()
.stream().filter(crd -> crd.getMetadata().getName().contains("opendatahub.io")).toList()
.forEach(crd -> {
LOGGER.info("Deleting CRD {}", crd.getMetadata().getName());
KubeResourceManager.getKubeClient().getClient().resource(crd).delete();
});
KubeResourceManager.getKubeClient().getClient().namespaces().withName("opendatahub").delete();
}

/**
* TODO - this should be removed when https://github.com/opendatahub-io/opendatahub-operator/issues/765 will be resolved
*/
public static void deleteDefaultDSCI() {
LOGGER.info("Clearing DSCI ...");
KubeResourceManager.getKubeCmdClient().exec(false, true, Long.valueOf(GLOBAL_TIMEOUT).intValue(), "delete", "dsci", "--all");
}

public static void waitForInstallPlan(String namespace, String csvName) {
Wait.until(String.format("Install plan with new version: %s:%s", namespace, csvName),
GLOBAL_POLL_INTERVAL_SHORT, GLOBAL_TIMEOUT, () -> {
try {
InstallPlan ip = KubeUtils.getNonApprovedInstallPlan(namespace, csvName);
LOGGER.debug("Found InstallPlan {} - {}", ip.getMetadata().getName(), ip.getSpec().getClusterServiceVersionNames());
return true;
} catch (NoSuchElementException ex) {
LOGGER.debug("No new install plan available. Checking again ...");
return false;
}
}, () -> { });
}

public static void waitForEndpoints(String name, Resource<Endpoints> endpoints) {
Wait.until("%s service endpoints to come up".formatted(name), GLOBAL_POLL_INTERVAL_SHORT, GLOBAL_TIMEOUT, () -> {
try {
Endpoints endpointset = endpoints.get();
if (endpointset == null) {
return false;
}
List<EndpointSubset> subsets = endpointset.getSubsets();
if (subsets.isEmpty()) {
return false;
}
for (EndpointSubset subset : subsets) {
return !subset.getAddresses().isEmpty();
}
} catch (KubernetesClientException e) {
if (e.getCode() == 404) {
return false;
}
throw e;
}
return false;
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
package io.odh.test.framework.listeners;

import io.odh.test.Environment;
import io.odh.test.platform.KubernetesUtils;
import io.odh.test.TestUtils;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;

Expand All @@ -14,7 +14,7 @@ public class OdhResourceCleaner implements AfterAllCallback {
@Override
public void afterAll(ExtensionContext extensionContext) {
if (!Environment.SKIP_INSTALL_OPERATOR && !Environment.SKIP_DEPLOY_DSCI_DSC) {
KubernetesUtils.clearOdhRemainingResources();
TestUtils.clearOdhRemainingResources();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import io.fabric8.kubernetes.client.dsl.Resource;
import io.odh.test.OdhConstants;
import io.odh.test.TestConstants;
import io.odh.test.platform.KubernetesUtils;
import io.odh.test.TestUtils;
import io.odh.test.utils.PodUtils;
import io.opendatahub.datasciencecluster.v1.DataScienceCluster;
import io.skodjob.testframe.interfaces.ResourceType;
Expand Down Expand Up @@ -77,11 +77,11 @@ public boolean waitForReadiness(DataScienceCluster resource) {

DataScienceCluster dsc = dataScienceCLusterClient().withName(resource.getMetadata().getName()).get();

String dashboardStatus = KubernetesUtils.getDscConditionByType(dsc.getStatus().getConditions(), "dashboardReady").getStatus();
String dashboardStatus = TestUtils.getDscConditionByType(dsc.getStatus().getConditions(), "dashboardReady").getStatus();
LOGGER.debug("DataScienceCluster {} Dashboard status: {}", resource.getMetadata().getName(), dashboardStatus);
dscReady = dashboardStatus.equals("True");

String workbenchesStatus = KubernetesUtils.getDscConditionByType(dsc.getStatus().getConditions(), "workbenchesReady").getStatus();
String workbenchesStatus = TestUtils.getDscConditionByType(dsc.getStatus().getConditions(), "workbenchesReady").getStatus();
LOGGER.debug("DataScienceCluster {} Workbenches status: {}", resource.getMetadata().getName(), workbenchesStatus);
dscReady = dscReady && workbenchesStatus.equals("True");

Expand Down Expand Up @@ -143,13 +143,13 @@ record ConditionExpectation(String conditionType, String expectedStatus) {
for (ConditionExpectation conditionExpectation : conditionExpectations) {
String conditionType = conditionExpectation.conditionType;
String expectedStatus = conditionExpectation.expectedStatus;
String conditionStatus = KubernetesUtils.getDscConditionByType(dsc.getStatus().getConditions(), conditionType).getStatus();
String conditionStatus = TestUtils.getDscConditionByType(dsc.getStatus().getConditions(), conditionType).getStatus();
LOGGER.debug("DataScienceCluster {} {} status: {}", resource.getMetadata().getName(), conditionType, conditionStatus);
dscReady = dscReady && Objects.equals(conditionStatus, expectedStatus);
}

// Wait for ReconcileComplete condition (for the whole DSC)
String reconcileStatus = KubernetesUtils.getDscConditionByType(dsc.getStatus().getConditions(), "ReconcileComplete").getStatus();
String reconcileStatus = TestUtils.getDscConditionByType(dsc.getStatus().getConditions(), "ReconcileComplete").getStatus();
LOGGER.debug("DataScienceCluster {} ReconcileComplete status: {}", resource.getMetadata().getName(), reconcileStatus);
dscReady = dscReady && reconcileStatus.equals("True");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import io.fabric8.kubernetes.client.dsl.Resource;
import io.kserve.serving.v1beta1.InferenceService;
import io.odh.test.TestConstants;
import io.odh.test.platform.KubernetesUtils;
import io.odh.test.TestUtils;
import io.odh.test.utils.PodUtils;
import io.skodjob.testframe.interfaces.NamespacedResourceType;
import io.skodjob.testframe.resources.KubeResourceManager;
Expand Down Expand Up @@ -62,11 +62,11 @@ public boolean waitForReadiness(InferenceService resource) {

InferenceService inferenceService = get(resource.getMetadata().getNamespace(), resource.getMetadata().getName());

String predictorReadyStatus = KubernetesUtils.getInferenceServiceConditionByType(inferenceService.getStatus().getConditions(), "PredictorReady").getStatus();
String predictorReadyStatus = TestUtils.getInferenceServiceConditionByType(inferenceService.getStatus().getConditions(), "PredictorReady").getStatus();
LOGGER.debug("InferenceService {} PredictorReady status: {}", resource.getMetadata().getName(), predictorReadyStatus);
isReady = predictorReadyStatus.equals("True");

String readyStatus = KubernetesUtils.getInferenceServiceConditionByType(inferenceService.getStatus().getConditions(), "Ready").getStatus();
String readyStatus = TestUtils.getInferenceServiceConditionByType(inferenceService.getStatus().getConditions(), "Ready").getStatus();
LOGGER.debug("InferenceService {} Ready status: {}", resource.getMetadata().getName(), readyStatus);
isReady = isReady && readyStatus.equals("True");

Expand Down
5 changes: 2 additions & 3 deletions src/main/java/io/odh/test/install/BundleInstall.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import io.odh.test.Environment;
import io.odh.test.TestConstants;
import io.odh.test.TestUtils;
import io.odh.test.platform.KubernetesUtils;
import io.skodjob.testframe.resources.KubeResourceManager;
import io.skodjob.testframe.resources.ResourceItem;
import org.slf4j.Logger;
Expand Down Expand Up @@ -90,7 +89,7 @@ private void modifyOperatorImage() {
public void create() {
modifyOperatorImage();
KubeResourceManager.getInstance().createOrUpdateResourceWithWait(resources.toArray(new HasMetadata[0]));
KubeResourceManager.getInstance().pushToStack(new ResourceItem<>(KubernetesUtils::deleteDefaultDSCI, null));
KubeResourceManager.getInstance().pushToStack(new ResourceItem<>(TestUtils::deleteDefaultDSCI, null));
}

public void createWithoutResourceManager() {
Expand All @@ -99,7 +98,7 @@ public void createWithoutResourceManager() {
}

public void deleteWithoutResourceManager() {
KubernetesUtils.deleteDefaultDSCI();
TestUtils.deleteDefaultDSCI();
KubeResourceManager.getKubeClient().delete(resources);
}
}
4 changes: 2 additions & 2 deletions src/main/java/io/odh/test/install/OlmInstall.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import io.odh.test.Environment;
import io.odh.test.OdhConstants;
import io.odh.test.TestConstants;
import io.odh.test.platform.KubernetesUtils;
import io.odh.test.TestUtils;
import io.odh.test.utils.DeploymentUtils;
import io.skodjob.testframe.resources.KubeResourceManager;
import io.skodjob.testframe.resources.ResourceItem;
Expand Down Expand Up @@ -97,7 +97,7 @@ private void createAndModifySubscription() {
Subscription subscription = prepareSubscription();

KubeResourceManager.getInstance().createOrUpdateResourceWithWait(subscription);
KubeResourceManager.getInstance().pushToStack(new ResourceItem<>(KubernetesUtils::deleteDefaultDSCI, null));
KubeResourceManager.getInstance().pushToStack(new ResourceItem<>(TestUtils::deleteDefaultDSCI, null));
}
public void updateSubscription() {
Subscription subscription = prepareSubscription();
Expand Down
96 changes: 0 additions & 96 deletions src/main/java/io/odh/test/platform/KubernetesUtils.java

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@
import io.odh.test.Environment;
import io.odh.test.OdhConstants;
import io.odh.test.TestSuite;
import io.odh.test.TestUtils;
import io.odh.test.e2e.Abstract;
import io.odh.test.framework.manager.resources.DataScienceClusterResource;
import io.odh.test.install.InstallTypes;
import io.odh.test.platform.KubernetesUtils;
import io.odh.test.utils.CsvUtils;
import io.opendatahub.datasciencecluster.v1.DataScienceCluster;
import io.opendatahub.datasciencecluster.v1.datascienceclusterspec.components.Codeflare;
Expand Down Expand Up @@ -81,13 +81,13 @@ void checkDataScienceClusterStatus() {
assertEquals("Ready", cluster.getStatus().getPhase());
assertNull(cluster.getStatus().getErrorMessage());

assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "dashboardReady").getStatus());
assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "workbenchesReady").getStatus());
assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "data-science-pipelines-operatorReady").getStatus());
assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "kserveReady").getStatus());
assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "codeflareReady").getStatus());
assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "model-meshReady").getStatus());
assertEquals("True", KubernetesUtils.getDscConditionByType(cluster.getStatus().getConditions(), "kueueReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "dashboardReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "workbenchesReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "data-science-pipelines-operatorReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "kserveReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "codeflareReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "model-meshReady").getStatus());
assertEquals("True", TestUtils.getDscConditionByType(cluster.getStatus().getConditions(), "kueueReady").getStatus());
}

@Test
Expand Down
Loading

0 comments on commit 6f9a311

Please sign in to comment.