From ce8c1edf56fe8122034293279bdef0bd45ad3226 Mon Sep 17 00:00:00 2001 From: Christian Vogt Date: Fri, 20 Oct 2023 16:04:05 -0400 Subject: [PATCH] handle kserve in global model serving page --- frontend/src/k8sTypes.ts | 5 +++ .../modelServing/__tests__/utils.spec.ts | 37 +++++++++++++++++++ .../global/DeleteInferenceServiceModal.tsx | 26 +++++++++---- .../screens/global/InferenceServiceModel.tsx | 36 ------------------ .../global/InferenceServiceServingRuntime.tsx | 13 +++++++ .../screens/global/InferenceServiceTable.tsx | 27 ++++++++++---- .../global/InferenceServiceTableRow.tsx | 29 +++++++++++---- .../screens/global/ModelServingGlobal.tsx | 2 +- .../InferenceServiceServingRuntime.spec.tsx | 17 +++++++++ .../pages/modelServing/screens/global/data.ts | 8 ++-- frontend/src/pages/modelServing/utils.ts | 4 ++ 11 files changed, 141 insertions(+), 63 deletions(-) delete mode 100644 frontend/src/pages/modelServing/screens/global/InferenceServiceModel.tsx create mode 100644 frontend/src/pages/modelServing/screens/global/InferenceServiceServingRuntime.tsx create mode 100644 frontend/src/pages/modelServing/screens/global/__tests__/InferenceServiceServingRuntime.spec.tsx diff --git a/frontend/src/k8sTypes.ts b/frontend/src/k8sTypes.ts index f717de949c..9743e4b244 100644 --- a/frontend/src/k8sTypes.ts +++ b/frontend/src/k8sTypes.ts @@ -100,6 +100,7 @@ export type ServingRuntimeAnnotations = Partial<{ 'opendatahub.io/accelerator-name': string; 'enable-route': string; 'enable-auth': string; + 'modelmesh-enabled': 'true' | 'false'; }>; export type BuildConfigKind = K8sResourceCommon & { @@ -356,6 +357,10 @@ export type InferenceServiceKind = K8sResourceCommon & { metadata: { name: string; namespace: string; + annotations?: DisplayNameAnnotations & + Partial<{ + 'serving.kserve.io/deploymentMode': 'ModelMesh'; + }>; }; spec: { predictor: { diff --git a/frontend/src/pages/modelServing/__tests__/utils.spec.ts b/frontend/src/pages/modelServing/__tests__/utils.spec.ts index c6ac177ce3..e799250b2b 100644 --- a/frontend/src/pages/modelServing/__tests__/utils.spec.ts +++ b/frontend/src/pages/modelServing/__tests__/utils.spec.ts @@ -1,3 +1,9 @@ +import { + mockServingRuntimeK8sResource, + mockServingRuntimeK8sResourceLegacy, +} from '~/__mocks__/mockServingRuntimeK8sResource'; +import { ServingRuntimeKind } from '~/k8sTypes'; +import { getDisplayNameFromServingRuntimeTemplate } from '~/pages/modelServing/customServingRuntimes/utils'; import { resourcesArePositive } from '~/pages/modelServing/utils'; import { ContainerResources } from '~/types'; @@ -50,3 +56,34 @@ describe('resourcesArePositive', () => { expect(resourcesArePositive(resources)).toBe(true); }); }); + +describe('getDisplayNameFromServingRuntimeTemplate', () => { + it('should provide default name if not found', () => { + const servingRuntime = getDisplayNameFromServingRuntimeTemplate({ + metadata: {}, + spec: {}, + } as ServingRuntimeKind); + expect(servingRuntime).toBe('Unknown Serving Runtime'); + }); + + it('should prioritize name from annotation "opendatahub.io/template-display-name"', () => { + const servingRuntime = getDisplayNameFromServingRuntimeTemplate( + mockServingRuntimeK8sResource({}), + ); + expect(servingRuntime).toBe('OpenVINO Serving Runtime (Supports GPUs)'); + }); + + it('should fallback first to name from annotation "opendatahub.io/template-name"', () => { + const mockServingRuntime = mockServingRuntimeK8sResource({}); + delete mockServingRuntime.metadata.annotations?.['opendatahub.io/template-display-name']; + const servingRuntime = getDisplayNameFromServingRuntimeTemplate(mockServingRuntime); + expect(servingRuntime).toBe('ovms'); + }); + + it('should fallback to ovms serverType', () => { + const servingRuntime = getDisplayNameFromServingRuntimeTemplate( + mockServingRuntimeK8sResourceLegacy({}), + ); + expect(servingRuntime).toBe('OpenVINO Model Server'); + }); +}); diff --git a/frontend/src/pages/modelServing/screens/global/DeleteInferenceServiceModal.tsx b/frontend/src/pages/modelServing/screens/global/DeleteInferenceServiceModal.tsx index e8d18b45cb..12cd6c2608 100644 --- a/frontend/src/pages/modelServing/screens/global/DeleteInferenceServiceModal.tsx +++ b/frontend/src/pages/modelServing/screens/global/DeleteInferenceServiceModal.tsx @@ -1,17 +1,21 @@ import * as React from 'react'; import DeleteModal from '~/pages/projects/components/DeleteModal'; -import { InferenceServiceKind } from '~/k8sTypes'; -import { deleteInferenceService } from '~/api'; +import { InferenceServiceKind, ServingRuntimeKind } from '~/k8sTypes'; +import { deleteInferenceService, deleteServingRuntime } from '~/api'; import { getInferenceServiceDisplayName } from './utils'; type DeleteInferenceServiceModalProps = { inferenceService?: InferenceServiceKind; + servingRuntime?: ServingRuntimeKind; onClose: (deleted: boolean) => void; + isOpen?: boolean; }; const DeleteInferenceServiceModal: React.FC = ({ inferenceService, + servingRuntime, onClose, + isOpen = false, }) => { const [isDeleting, setIsDeleting] = React.useState(false); const [error, setError] = React.useState(); @@ -29,16 +33,24 @@ const DeleteInferenceServiceModal: React.FC = return ( onBeforeClose(false)} submitButtonLabel="Delete deployed model" onDelete={() => { if (inferenceService) { setIsDeleting(true); - deleteInferenceService( - inferenceService.metadata.name, - inferenceService.metadata.namespace, - ) + Promise.all([ + deleteInferenceService( + inferenceService.metadata.name, + inferenceService.metadata.namespace, + ), + servingRuntime + ? deleteServingRuntime( + servingRuntime.metadata.name, + servingRuntime.metadata.namespace, + ) + : undefined, + ]) .then(() => { onBeforeClose(true); }) diff --git a/frontend/src/pages/modelServing/screens/global/InferenceServiceModel.tsx b/frontend/src/pages/modelServing/screens/global/InferenceServiceModel.tsx deleted file mode 100644 index ff7f73286b..0000000000 --- a/frontend/src/pages/modelServing/screens/global/InferenceServiceModel.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import * as React from 'react'; -import { HelperText, HelperTextItem, Skeleton } from '@patternfly/react-core'; -import { InferenceServiceKind } from '~/k8sTypes'; -import { getDisplayNameFromK8sResource } from '~/pages/projects/utils'; -import { ModelServingContext } from '~/pages/modelServing/ModelServingContext'; - -type InferenceServiceModelProps = { - inferenceService: InferenceServiceKind; -}; - -const InferenceServiceModel: React.FC = ({ inferenceService }) => { - const { - servingRuntimes: { data: servingRuntimes, loaded, error }, - } = React.useContext(ModelServingContext); - const servingRuntime = servingRuntimes.find( - ({ metadata: { name } }) => name === inferenceService.spec.predictor.model.runtime, - ); - - if (!loaded) { - return ; - } - - if (error) { - return ( - - - Failed to get model server for this deployed model. {error.message}. - - - ); - } - - return <>{servingRuntime ? getDisplayNameFromK8sResource(servingRuntime) : 'Unknown'}; -}; - -export default InferenceServiceModel; diff --git a/frontend/src/pages/modelServing/screens/global/InferenceServiceServingRuntime.tsx b/frontend/src/pages/modelServing/screens/global/InferenceServiceServingRuntime.tsx new file mode 100644 index 0000000000..2365eae69e --- /dev/null +++ b/frontend/src/pages/modelServing/screens/global/InferenceServiceServingRuntime.tsx @@ -0,0 +1,13 @@ +import * as React from 'react'; +import { ServingRuntimeKind } from '~/k8sTypes'; +import { getDisplayNameFromServingRuntimeTemplate } from '~/pages/modelServing/customServingRuntimes/utils'; + +type Props = { + servingRuntime?: ServingRuntimeKind; +}; + +const InferenceServiceServingRuntime: React.FC = ({ servingRuntime }) => ( + <>{servingRuntime ? getDisplayNameFromServingRuntimeTemplate(servingRuntime) : 'Unknown'} +); + +export default InferenceServiceServingRuntime; diff --git a/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx b/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx index e6ec893a3f..f835d44733 100644 --- a/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx +++ b/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx @@ -24,9 +24,14 @@ const InferenceServiceTable: React.FC = ({ toolbarContent, }) => { const { modelServingProjects: projects } = React.useContext(ProjectsContext); - const [deleteInferenceService, setDeleteInferenceService] = - React.useState(); - const [editInferenceService, setEditInferenceService] = React.useState(); + const [deleteInferenceService, setDeleteInferenceService] = React.useState<{ + inferenceService: InferenceServiceKind; + servingRuntime?: ServingRuntimeKind; + }>(); + const [editInferenceService, setEditInferenceService] = React.useState<{ + inferenceService: InferenceServiceKind; + servingRuntime?: ServingRuntimeKind; + }>(); const isGlobal = !!clearFilters; const mappedColumns = isGlobal ? getGlobalInferenceServiceColumns(projects) @@ -57,13 +62,19 @@ const InferenceServiceTable: React.FC = ({ (sr) => sr.metadata.name === is.spec.predictor.model.runtime, )} isGlobal={isGlobal} - onDeleteInferenceService={setDeleteInferenceService} - onEditInferenceService={setEditInferenceService} + onDeleteInferenceService={(inferenceService, servingRuntime) => + setDeleteInferenceService({ inferenceService, servingRuntime }) + } + onEditInferenceService={(inferenceService, servingRuntime) => + setEditInferenceService({ inferenceService, servingRuntime }) + } /> )} /> { if (deleted) { refresh(); @@ -72,8 +83,8 @@ const InferenceServiceTable: React.FC = ({ }} /> { if (edited) { refresh(); diff --git a/frontend/src/pages/modelServing/screens/global/InferenceServiceTableRow.tsx b/frontend/src/pages/modelServing/screens/global/InferenceServiceTableRow.tsx index 705e4152da..d702517675 100644 --- a/frontend/src/pages/modelServing/screens/global/InferenceServiceTableRow.tsx +++ b/frontend/src/pages/modelServing/screens/global/InferenceServiceTableRow.tsx @@ -3,20 +3,27 @@ import { DropdownDirection } from '@patternfly/react-core'; import { ActionsColumn, Td, Tr } from '@patternfly/react-table'; import { Link } from 'react-router-dom'; import ResourceNameTooltip from '~/components/ResourceNameTooltip'; +import { isModelMesh } from '~/pages/modelServing/utils'; import useModelMetricsEnabled from '~/pages/modelServing/useModelMetricsEnabled'; import { InferenceServiceKind, ServingRuntimeKind } from '~/k8sTypes'; import { getInferenceServiceDisplayName } from './utils'; import InferenceServiceEndpoint from './InferenceServiceEndpoint'; import InferenceServiceProject from './InferenceServiceProject'; -import InferenceServiceModel from './InferenceServiceModel'; import InferenceServiceStatus from './InferenceServiceStatus'; +import InferenceServiceServingRuntime from './InferenceServiceServingRuntime'; type InferenceServiceTableRowProps = { obj: InferenceServiceKind; isGlobal: boolean; servingRuntime?: ServingRuntimeKind; - onDeleteInferenceService: (obj: InferenceServiceKind) => void; - onEditInferenceService: (obj: InferenceServiceKind) => void; + onDeleteInferenceService: ( + inferenceService: InferenceServiceKind, + servingRuntime?: ServingRuntimeKind, + ) => void; + onEditInferenceService: ( + inferenceService: InferenceServiceKind, + servingRuntime?: ServingRuntimeKind, + ) => void; }; const InferenceServiceTableRow: React.FC = ({ @@ -53,8 +60,8 @@ const InferenceServiceTableRow: React.FC = ({ )} {isGlobal && ( - - + + )} @@ -71,15 +78,23 @@ const InferenceServiceTableRow: React.FC = ({ dropdownDirection={isGlobal ? DropdownDirection.down : DropdownDirection.up} items={[ { + // TODO re-enable edit when supported + isDisabled: !isModelMesh(inferenceService), title: 'Edit', onClick: () => { - onEditInferenceService(inferenceService); + onEditInferenceService( + inferenceService, + !isModelMesh(inferenceService) ? servingRuntime : undefined, + ); }, }, { title: 'Delete', onClick: () => { - onDeleteInferenceService(inferenceService); + onDeleteInferenceService( + inferenceService, + !isModelMesh(inferenceService) ? servingRuntime : undefined, + ); }, }, ]} diff --git a/frontend/src/pages/modelServing/screens/global/ModelServingGlobal.tsx b/frontend/src/pages/modelServing/screens/global/ModelServingGlobal.tsx index 6d1d1fe044..abd7911b46 100644 --- a/frontend/src/pages/modelServing/screens/global/ModelServingGlobal.tsx +++ b/frontend/src/pages/modelServing/screens/global/ModelServingGlobal.tsx @@ -12,7 +12,7 @@ const ModelServingGlobal: React.FC = () => { return ( { + it('should handle undefined serving runtime', () => { + const wrapper = render(); + expect(wrapper.container.textContent).toBe('Unknown'); + }); + + it('should display serving runtime name', () => { + const mockServingRuntime = mockServingRuntimeK8sResource({}); + const wrapper = render(); + expect(wrapper.container.textContent).toBe('OpenVINO Serving Runtime (Supports GPUs)'); + }); +}); diff --git a/frontend/src/pages/modelServing/screens/global/data.ts b/frontend/src/pages/modelServing/screens/global/data.ts index 50a9ec2ca0..7a4b6e3690 100644 --- a/frontend/src/pages/modelServing/screens/global/data.ts +++ b/frontend/src/pages/modelServing/screens/global/data.ts @@ -39,9 +39,9 @@ const COL_ENDPOINT: SortableData = { sortable: false, }; -const COL_MODEL_SERVER: SortableData = { - field: 'model', - label: 'Model server', +const COL_SERVING_RUNTIME: SortableData = { + field: 'servingRuntime', + label: 'Serving runtime', width: 20, sortable: false, }; @@ -62,7 +62,7 @@ export const getGlobalInferenceServiceColumns = ( ): SortableData[] => [ COL_NAME, buildProjectCol(projects), - COL_MODEL_SERVER, + COL_SERVING_RUNTIME, COL_ENDPOINT, COL_STATUS, COL_KEBAB, diff --git a/frontend/src/pages/modelServing/utils.ts b/frontend/src/pages/modelServing/utils.ts index 4da138966a..05e6932654 100644 --- a/frontend/src/pages/modelServing/utils.ts +++ b/frontend/src/pages/modelServing/utils.ts @@ -22,6 +22,7 @@ import { K8sAPIOptions, RoleBindingKind, ServingRuntimeKind, + InferenceServiceKind, } from '~/k8sTypes'; import { ContainerResources } from '~/types'; import { getDisplayNameFromK8sResource, translateDisplayNameForK8s } from '~/pages/projects/utils'; @@ -211,3 +212,6 @@ export const isModelServerEditInfoChanged = ( createData.tokens.map((token) => token.name).sort(), )) : true; + +export const isModelMesh = (inferenceService: InferenceServiceKind) => + inferenceService.metadata.annotations?.['serving.kserve.io/deploymentMode'] === 'ModelMesh';