diff --git a/dataproc_jupyter_plugin/controllers/dataproc.py b/dataproc_jupyter_plugin/controllers/dataproc.py index db822234..06ae8448 100644 --- a/dataproc_jupyter_plugin/controllers/dataproc.py +++ b/dataproc_jupyter_plugin/controllers/dataproc.py @@ -13,44 +13,111 @@ # limitations under the License. import json - import aiohttp import tornado from jupyter_server.base.handlers import APIHandler - -from dataproc_jupyter_plugin import credentials +from dataproc_jupyter_plugin.commons.constants import DATAPROC_SERVICE_NAME +from dataproc_jupyter_plugin import credentials, urls from dataproc_jupyter_plugin.services import dataproc -class ClusterListController(APIHandler): +class RuntimeController(APIHandler): @tornado.web.authenticated async def get(self): try: page_token = self.get_argument("pageToken") page_size = self.get_argument("pageSize") + dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) async with aiohttp.ClientSession() as client_session: client = dataproc.Client( - await credentials.get_cached(), self.log, client_session + await credentials.get_cached(), + self.log, + dataproc_url, + client_session, ) - cluster_list = await client.list_clusters(page_size, page_token) - self.finish(json.dumps(cluster_list)) + runtime_list = await client.list_runtime(page_size, page_token) + self.finish(json.dumps(runtime_list)) except Exception as e: - self.log.exception("Error fetching cluster list") + self.log.exception(f"Error fetching runtime template list: {str(e)}") self.finish({"error": str(e)}) -class RuntimeController(APIHandler): +class ClusterListController(APIHandler): @tornado.web.authenticated async def get(self): try: page_token = self.get_argument("pageToken") page_size = self.get_argument("pageSize") - async with aiohttp.ClientSession() as client_session: - client = dataproc.Client( - await credentials.get_cached(), self.log, client_session - ) - runtime_list = await client.list_runtime(page_size, page_token) - self.finish(json.dumps(runtime_list)) + dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) + client = dataproc.Client( + await credentials.get_cached(), self.log, dataproc_url + ) + cluster_list = await client.list_clusters(page_size, page_token) + self.finish(json.dumps(cluster_list)) except Exception as e: - self.log.exception(f"Error fetching runtime template list: {str(e)}") + self.log.exception(f"Error fetching cluster list: {str(e)}") + self.finish({"error": str(e)}) + + +class ClusterDetailController(APIHandler): + @tornado.web.authenticated + async def get(self): + try: + cluster = self.get_argument("cluster") + dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) + client = dataproc.Client( + await credentials.get_cached(), self.log, dataproc_url + ) + get_cluster = await client.get_cluster_detail(cluster) + self.finish(json.dumps(get_cluster)) + except Exception as e: + self.log.exception(f"Error fetching a cluster: {str(e)}") + self.finish({"error": str(e)}) + + +class StopClusterController(APIHandler): + @tornado.web.authenticated + async def post(self): + try: + cluster = self.get_argument("cluster") + dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) + client = dataproc.Client( + await credentials.get_cached(), self.log, dataproc_url + ) + stop_cluster = await client.stop_cluster(cluster) + self.finish(json.dumps(stop_cluster)) + except Exception as e: + self.log.exception(f"Error stopping a cluster: {str(e)}") + self.finish({"error": str(e)}) + + +class StartClusterController(APIHandler): + @tornado.web.authenticated + async def post(self): + try: + cluster = self.get_argument("cluster") + dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) + client = dataproc.Client( + await credentials.get_cached(), self.log, dataproc_url + ) + start_cluster = await client.start_cluster(cluster) + self.finish(json.dumps(start_cluster)) + except Exception as e: + self.log.exception(f"Error starting a cluster: {str(e)}") + self.finish({"error": str(e)}) + + +class DeleteClusterController(APIHandler): + @tornado.web.authenticated + async def delete(self): + try: + cluster = self.get_argument("cluster") + dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) + client = dataproc.Client( + await credentials.get_cached(), self.log, dataproc_url + ) + delete_cluster = await client.delete_cluster(cluster) + self.finish(json.dumps(delete_cluster)) + except Exception as e: + self.log.exception(f"Error deleting a cluster: {str(e)}") self.finish({"error": str(e)}) diff --git a/dataproc_jupyter_plugin/handlers.py b/dataproc_jupyter_plugin/handlers.py index a5822c3a..43cc098e 100644 --- a/dataproc_jupyter_plugin/handlers.py +++ b/dataproc_jupyter_plugin/handlers.py @@ -193,6 +193,10 @@ def full_path(name): "dagRunTask": airflow.DagRunTaskController, "dagRunTaskLogs": airflow.DagRunTaskLogsController, "clusterList": dataproc.ClusterListController, + "clusterDetail": dataproc.ClusterDetailController, + "stopCluster": dataproc.StopClusterController, + "startCluster": dataproc.StartClusterController, + "deleteCluster": dataproc.DeleteClusterController, "runtimeList": dataproc.RuntimeController, "createJobScheduler": executor.ExecutorController, "dagList": airflow.DagListController, diff --git a/dataproc_jupyter_plugin/services/dataproc.py b/dataproc_jupyter_plugin/services/dataproc.py index 249fb12c..8c739351 100644 --- a/dataproc_jupyter_plugin/services/dataproc.py +++ b/dataproc_jupyter_plugin/services/dataproc.py @@ -12,6 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import proto +import google.oauth2.credentials as oauth2 +from google.cloud import dataproc_v1 as dataproc +from google.protobuf.empty_pb2 import Empty from dataproc_jupyter_plugin import urls from dataproc_jupyter_plugin.commons.constants import ( CONTENT_TYPE, @@ -20,7 +24,7 @@ class Client: - def __init__(self, credentials, log, client_session): + def __init__(self, credentials, log, dataproc_url, client_session=None): self.log = log if not ( ("access_token" in credentials) @@ -33,6 +37,8 @@ def __init__(self, credentials, log, client_session): self.project_id = credentials["project_id"] self.region_id = credentials["region_id"] self.client_session = client_session + self.dataproc_url = dataproc_url + self.api_endpoint = f"{self.region_id}-{dataproc_url.split('/')[2]}:443" def create_headers(self): return { @@ -40,10 +46,9 @@ def create_headers(self): "Authorization": f"Bearer {self._access_token}", } - async def list_clusters(self, page_size, page_token): + async def list_runtime(self, page_size, page_token): try: - dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) - api_endpoint = f"{dataproc_url}/v1/projects/{self.project_id}/regions/{self.region_id}/clusters?pageSize={page_size}&pageToken={page_token}" + api_endpoint = f"{self.dataproc_url}/v1/projects/{self.project_id}/locations/{self.region_id}/sessionTemplates?pageSize={page_size}&pageToken={page_token}" async with self.client_session.get( api_endpoint, headers=self.create_headers() ) as response: @@ -52,27 +57,157 @@ async def list_clusters(self, page_size, page_token): return resp else: return { - "error": f"Failed to fetch clusters: {response.status} {await response.text()}" + "error": f"Failed to fetch runtimes: {response.status} {await response.text()}" } + except Exception as e: + self.log.exception(f"Error fetching runtime list: {str(e)}") + return {"error": str(e)} + async def list_clusters(self, page_size, page_token): + try: + # Create a client + client = dataproc.ClusterControllerAsyncClient( + client_options={"api_endpoint": self.api_endpoint}, + credentials=oauth2.Credentials(self._access_token), + ) + + # Initialize request argument(s) + request = dataproc.ListClustersRequest( + project_id=self.project_id, + page_size=int(page_size), + page_token=page_token, + region=self.region_id, + ) + + # Make the request + page_result = await client.list_clusters(request=request) + clusters_list = [] + + # Handle the response + async for response in page_result: + clusters_list.append( + proto.Message.to_dict( + response, + use_integers_for_enums=False, + preserving_proto_field_name=False, + ) + ) + return clusters_list except Exception as e: - self.log.exception("Error fetching cluster list") + self.log.exception(f"Error fetching cluster list: {str(e)}") return {"error": str(e)} - async def list_runtime(self, page_size, page_token): + async def get_cluster_detail(self, cluster): try: - dataproc_url = await urls.gcp_service_url(DATAPROC_SERVICE_NAME) - api_endpoint = f"{dataproc_url}/v1/projects/{self.project_id}/locations/{self.region_id}/sessionTemplates?pageSize={page_size}&pageToken={page_token}" - async with self.client_session.get( - api_endpoint, headers=self.create_headers() - ) as response: - if response.status == 200: - resp = await response.json() - return resp - else: - return { - "error": f"Failed to fetch runtimes: {response.status} {await response.text()}" - } + # Create a client + client = dataproc.ClusterControllerAsyncClient( + client_options={"api_endpoint": self.api_endpoint}, + credentials=oauth2.Credentials(self._access_token), + ) + + # Initialize request argument(s) + request = dataproc.GetClusterRequest( + project_id=self.project_id, + region=self.region_id, + cluster_name=cluster, + ) + + # Make the request + response = await client.get_cluster(request=request) + + # Handle the response + return proto.Message.to_dict( + response, + use_integers_for_enums=False, + preserving_proto_field_name=False, + ) except Exception as e: - self.log.exception(f"Error fetching runtime list: {str(e)}") + self.log.exception(f"Error fetching cluster detail: {str(e)}") + return {"error": str(e)} + + async def stop_cluster(self, cluster): + try: + # Create a client + client = dataproc.ClusterControllerAsyncClient( + client_options={"api_endpoint": self.api_endpoint}, + credentials=oauth2.Credentials(self._access_token), + ) + + # Initialize request argument(s) + request = dataproc.StopClusterRequest( + project_id=self.project_id, + region=self.region_id, + cluster_name=cluster, + ) + + operation = await client.stop_cluster(request=request) + + response = await operation.result() + # Handle the response + return proto.Message.to_dict( + response, + use_integers_for_enums=False, + preserving_proto_field_name=False, + ) + except Exception as e: + self.log.exception(f"Error stopping a cluster: {str(e)}") + return {"error": str(e)} + + async def start_cluster(self, cluster): + try: + # Create a client + client = dataproc.ClusterControllerAsyncClient( + client_options={"api_endpoint": self.api_endpoint}, + credentials=oauth2.Credentials(self._access_token), + ) + + # Initialize request argument(s) + request = dataproc.StartClusterRequest( + project_id=self.project_id, + region=self.region_id, + cluster_name=cluster, + ) + + operation = await client.start_cluster(request=request) + + response = await operation.result() + # Handle the response + return proto.Message.to_dict( + response, + use_integers_for_enums=False, + preserving_proto_field_name=False, + ) + except Exception as e: + self.log.exception(f"Error starting a cluster: {str(e)}") + return {"error": str(e)} + + async def delete_cluster(self, cluster): + try: + # Create a client + client = dataproc.ClusterControllerAsyncClient( + client_options={"api_endpoint": self.api_endpoint}, + credentials=oauth2.Credentials(self._access_token), + ) + + # Initialize request argument(s) + request = dataproc.DeleteClusterRequest( + project_id=self.project_id, + region=self.region_id, + cluster_name=cluster, + ) + + operation = await client.delete_cluster(request=request) + + response = await operation.result() + # Handle the response + if isinstance(response, Empty): + return "Deleted successfully" + else: + return proto.Message.to_dict( + response, + use_integers_for_enums=False, + preserving_proto_field_name=False, + ) + except Exception as e: + self.log.exception(f"Error deleting a cluster: {str(e)}") return {"error": str(e)} diff --git a/dataproc_jupyter_plugin/tests/test_dataproc.py b/dataproc_jupyter_plugin/tests/test_dataproc.py index d914c32b..c9e53df1 100644 --- a/dataproc_jupyter_plugin/tests/test_dataproc.py +++ b/dataproc_jupyter_plugin/tests/test_dataproc.py @@ -20,22 +20,14 @@ async def test_list_clusters(monkeypatch, jp_fetch): mocks.patch_mocks(monkeypatch) - mock_project_id = "credentials-project" mock_page_token = "mock-page-token" - mock_region_id = "mock-region" - mock_page_size = "mock_page_size" + mock_page_size = 1 response = await jp_fetch( "dataproc-plugin", "clusterList", params={"pageSize": mock_page_size, "pageToken": mock_page_token}, ) assert response.code == 200 - payload = json.loads(response.body) - assert ( - payload["api_endpoint"] - == f"https://dataproc.googleapis.com//v1/projects/credentials-project/regions/{mock_region_id}/clusters?pageSize={mock_page_size}&pageToken={mock_page_token}" - ) - assert payload["headers"]["Authorization"] == f"Bearer mock-token" async def test_list_runtime(monkeypatch, jp_fetch): diff --git a/pyproject.toml b/pyproject.toml index f126ad39..2be63789 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,10 +27,11 @@ dependencies = [ "google-cloud-jupyter-config>=0.0.10", "kernels-mixer>=0.0.13", "pendulum>=3.0.0", - "pydantic~=1.10.0", - "bigframes~=0.22.0", + "pydantic>=1.10.0", + "bigframes>=0.22.0", "aiohttp~=3.9.5", - "google-cloud-storage~=2.18.2" + "google-cloud-dataproc>=5.10.2", + "google-cloud-storage>=2.18.2" ] dynamic = ["version", "description", "authors", "urls", "keywords"] diff --git a/src/cluster/clusterServices.tsx b/src/cluster/clusterServices.tsx index 0a8d6252..e7401ce8 100644 --- a/src/cluster/clusterServices.tsx +++ b/src/cluster/clusterServices.tsx @@ -17,23 +17,15 @@ import { toast } from 'react-toastify'; import 'react-toastify/dist/ReactToastify.css'; -import { - API_HEADER_BEARER, - API_HEADER_CONTENT_TYPE, - ClusterStatus, - HTTP_METHOD, - POLLING_TIME_LIMIT, - gcpServiceUrls -} from '../utils/const'; +import { ClusterStatus, POLLING_TIME_LIMIT } from '../utils/const'; import { authApi, toastifyCustomStyle, - loggedFetch, getProjectId, - authenticatedFetch, statusValue } from '../utils/utils'; import { DataprocLoggingService, LOG_LEVEL } from '../utils/loggingService'; +import { requestAPI } from '../handler/handler'; interface IClusterRenderData { status: { state: ClusterStatus }; @@ -80,42 +72,33 @@ export class ClusterService { const projectId = await getProjectId(); setProjectId(projectId); - const queryParams = new URLSearchParams(); - queryParams.append('pageSize', '50'); - queryParams.append('pageToken', pageToken); + const serviceURL = `clusterList?pageSize=50&pageToken=${pageToken}`; + + const formattedResponse: any = await requestAPI(serviceURL); - const response = await authenticatedFetch({ - uri: 'clusters', - regionIdentifier: 'regions', - method: HTTP_METHOD.GET, - queryParams: queryParams - }); - const formattedResponse = await response.json(); let transformClusterListData = []; - if (formattedResponse && formattedResponse.clusters) { - transformClusterListData = formattedResponse.clusters.map( - (data: any) => { - const statusVal = statusValue(data); - // Extracting zone from zoneUri - // Example: "projects/{project}/zones/{zone}" + if (formattedResponse) { + transformClusterListData = formattedResponse.map((data: any) => { + const statusVal = statusValue(data); + // Extracting zone from zoneUri + // Example: "projects/{project}/zones/{zone}" - const zoneUri = data.config.gceClusterConfig.zoneUri.split('/'); + const zoneUri = data.config.gceClusterConfig.zoneUri.split('/'); - return { - clusterUuid: data.clusterUuid, - status: statusVal, - clusterName: data.clusterName, - clusterImage: data.config.softwareConfig.imageVersion, - region: data.labels['goog-dataproc-location'], - zone: zoneUri[zoneUri.length - 1], - totalWorkersNode: data.config.workerConfig - ? data.config.workerConfig.numInstances - : 0, - schedulesDeletion: data.config.lifecycleConfig ? 'On' : 'Off', - actions: renderActions(data) - }; - } - ); + return { + clusterUuid: data.clusterUuid, + status: statusVal, + clusterName: data.clusterName, + clusterImage: data.config.softwareConfig.imageVersion, + region: data.labels['goog-dataproc-location'], + zone: zoneUri[zoneUri.length - 1], + totalWorkersNode: data.config.workerConfig + ? data.config.workerConfig.numInstances + : 0, + schedulesDeletion: data.config.lifecycleConfig ? 'On' : 'Off', + actions: renderActions(data) + }; + }); } const existingClusterData = previousClustersList ?? []; //setStateAction never type issue @@ -139,9 +122,9 @@ export class ClusterService { setIsLoading(false); setLoggedIn(true); } - if (formattedResponse?.error?.code) { + if (formattedResponse?.error) { if (!toast.isActive('clusterListingError')) { - toast.error(formattedResponse?.error?.message, { + toast.error(formattedResponse?.error, { ...toastifyCustomStyle, toastId: 'clusterListingError' }); @@ -167,51 +150,35 @@ export class ClusterService { setClusterInfo: (value: IClusterDetailsResponse) => void ) => { const credentials = await authApi(); - const { DATAPROC } = await gcpServiceUrls; if (credentials) { setProjectName(credentials.project_id || ''); - loggedFetch( - `${DATAPROC}/projects/${credentials.project_id}/regions/${credentials.region_id}/clusters/${clusterSelected}`, - { - method: 'GET', - headers: { - 'Content-Type': API_HEADER_CONTENT_TYPE, - Authorization: API_HEADER_BEARER + credentials.access_token + + try { + const serviceURL = `clusterDetail?cluster=${clusterSelected}`; + + let responseResult: any = await requestAPI(serviceURL); + if (responseResult) { + if (responseResult.error && responseResult.error.code === 404) { + setErrorView(true); } - } - ) - .then((response: Response) => { - response - .json() - .then((responseResult: IClusterDetailsResponse) => { - if (responseResult.error && responseResult.error.code === 404) { - setErrorView(true); - } - if (responseResult?.error?.code) { - toast.error( - responseResult?.error?.message, - toastifyCustomStyle - ); - } - setClusterInfo(responseResult); - setIsLoading(false); - }) - .catch((e: Error) => { - console.log(e); - setIsLoading(false); - }); - }) - .catch((err: Error) => { + if (responseResult?.error?.code) { + toast.error(responseResult?.error?.message, toastifyCustomStyle); + } + setClusterInfo(responseResult); setIsLoading(false); - DataprocLoggingService.log( - 'Error listing clusters Details', - LOG_LEVEL.ERROR - ); - toast.error( - `Failed to fetch cluster details ${clusterSelected} : ${err}`, - toastifyCustomStyle - ); - }); + } + } catch (err) { + setIsLoading(false); + console.error('Error listing clusters Details', err); + DataprocLoggingService.log( + 'Error listing clusters Details', + LOG_LEVEL.ERROR + ); + toast.error( + `Failed to fetch cluster details ${clusterSelected} : ${err}`, + toastifyCustomStyle + ); + } } }; @@ -221,19 +188,15 @@ export class ClusterService { timer: any ) => { try { - const response = await authenticatedFetch({ - uri: `clusters/${selectedCluster}`, - method: HTTP_METHOD.GET, - regionIdentifier: 'regions' - }); - const formattedResponse = await response.json(); + const serviceURL = `clusterDetail?cluster=${selectedCluster}`; + let formattedResponse: any = await requestAPI(serviceURL); if (formattedResponse.status.state === ClusterStatus.STATUS_STOPPED) { ClusterService.startClusterApi(selectedCluster); clearInterval(timer.current); } - if (formattedResponse?.error?.code) { - toast.error(formattedResponse?.error?.message, toastifyCustomStyle); + if (formattedResponse?.error) { + toast.error(formattedResponse?.error, toastifyCustomStyle); } listClustersAPI(); } catch (error) { @@ -253,21 +216,19 @@ export class ClusterService { statusApi: (value: string) => void ) => { setRestartEnabled(true); - try { - const response = await authenticatedFetch({ - uri: `clusters/${selectedCluster}:stop`, - method: HTTP_METHOD.POST, - regionIdentifier: 'regions' + const serviceURL = `stopCluster?cluster=${selectedCluster}`; + + let formattedResponse: any = await requestAPI(serviceURL, { + method: 'POST' }); - const formattedResponse = await response.json(); - console.log(formattedResponse); + listClustersAPI(); timer.current = setInterval(() => { statusApi(selectedCluster); }, POLLING_TIME_LIMIT); - if (formattedResponse?.error?.code) { - toast.error(formattedResponse?.error?.message, toastifyCustomStyle); + if (formattedResponse?.error) { + toast.error(formattedResponse?.error, toastifyCustomStyle); } // This is an artifact of the refactoring listClustersAPI(); @@ -283,43 +244,27 @@ export class ClusterService { }; static deleteClusterApi = async (selectedcluster: string) => { - const credentials = await authApi(); - const { DATAPROC } = await gcpServiceUrls; - if (credentials) { - loggedFetch( - `${DATAPROC}/projects/${credentials.project_id}/regions/${credentials.region_id}/clusters/${selectedcluster}`, - { - method: 'DELETE', - headers: { - 'Content-Type': API_HEADER_CONTENT_TYPE, - Authorization: API_HEADER_BEARER + credentials.access_token - } - } - ) - .then((response: Response) => { - response - .json() - .then(async (responseResult: Response) => { - console.log(responseResult); - const formattedResponse = await responseResult.json(); - if (formattedResponse?.error?.code) { - toast.error( - formattedResponse?.error?.message, - toastifyCustomStyle - ); - } else { - toast.success( - `Cluster ${selectedcluster} deleted successfully`, - toastifyCustomStyle - ); - } - }) - .catch((e: Error) => console.log(e)); - }) - .catch((err: Error) => { - DataprocLoggingService.log('Error deleting cluster', LOG_LEVEL.ERROR); - toast.error(`Error deleting cluster : ${err}`, toastifyCustomStyle); - }); + try { + const serviceURL = `deleteCluster?cluster=${selectedcluster}`; + + let formattedResponse: any = await requestAPI(serviceURL, { + method: 'DELETE' + }); + + if (formattedResponse?.error) { + toast.error(formattedResponse?.error, toastifyCustomStyle); + } else { + toast.success( + `Cluster ${selectedcluster} deleted successfully`, + toastifyCustomStyle + ); + } + } catch (error) { + DataprocLoggingService.log('Error deleting cluster', LOG_LEVEL.ERROR); + toast.error( + `Error deleting cluster ${selectedcluster} : ${error}`, + toastifyCustomStyle + ); } }; @@ -327,44 +272,25 @@ export class ClusterService { selectedcluster: string, operation: 'start' | 'stop' ) => { - const credentials = await authApi(); - const { DATAPROC } = await gcpServiceUrls; - if (credentials) { - loggedFetch( - `${DATAPROC}/projects/${credentials.project_id}/regions/${credentials.region_id}/clusters/${selectedcluster}:${operation}`, - { - method: 'POST', - headers: { - 'Content-Type': API_HEADER_CONTENT_TYPE, - Authorization: API_HEADER_BEARER + credentials.access_token - } - } - ) - .then((response: Response) => { - response - .json() - .then(async (responseResult: Response) => { - console.log(responseResult); - const formattedResponse = await responseResult.json(); - if (formattedResponse?.error?.code) { - toast.error( - formattedResponse?.error?.message, - toastifyCustomStyle - ); - } - }) - .catch((e: Error) => console.log(e)); - }) - .catch((err: Error) => { - DataprocLoggingService.log( - `Error ${operation} cluster`, - LOG_LEVEL.ERROR - ); - toast.error( - `Failed to ${operation} the cluster ${selectedcluster} : ${err}`, - toastifyCustomStyle - ); - }); + try { + const serviceURL = + operation === 'stop' + ? `stopCluster?cluster=${selectedcluster}` + : `startCluster?cluster=${selectedcluster}`; + + let formattedResponse: any = await requestAPI(serviceURL, { + method: 'POST' + }); + + if (formattedResponse?.error) { + toast.error(formattedResponse?.error, toastifyCustomStyle); + } + } catch (err) { + DataprocLoggingService.log(`Error ${operation} cluster`, LOG_LEVEL.ERROR); + toast.error( + `Failed to ${operation} the cluster ${selectedcluster} : ${err}`, + toastifyCustomStyle + ); } }; diff --git a/src/scheduler/schedulerServices.tsx b/src/scheduler/schedulerServices.tsx index 2116edc8..12f1f228 100644 --- a/src/scheduler/schedulerServices.tsx +++ b/src/scheduler/schedulerServices.tsx @@ -102,8 +102,8 @@ export class SchedulerService { const formattedResponse: any = await requestAPI(serviceURL); let transformClusterListData = []; - if (formattedResponse && formattedResponse.clusters) { - transformClusterListData = formattedResponse.clusters.map( + if (formattedResponse) { + transformClusterListData = formattedResponse.map( (data: IClusterAPIResponse) => { return { clusterName: data.clusterName