Skip to content

Commit

Permalink
handle kserve in global model serving page
Browse files Browse the repository at this point in the history
  • Loading branch information
christianvogt committed Oct 23, 2023
1 parent 5c15171 commit ce8c1ed
Show file tree
Hide file tree
Showing 11 changed files with 141 additions and 63 deletions.
5 changes: 5 additions & 0 deletions frontend/src/k8sTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ export type ServingRuntimeAnnotations = Partial<{
'opendatahub.io/accelerator-name': string;
'enable-route': string;
'enable-auth': string;
'modelmesh-enabled': 'true' | 'false';
}>;

export type BuildConfigKind = K8sResourceCommon & {
Expand Down Expand Up @@ -356,6 +357,10 @@ export type InferenceServiceKind = K8sResourceCommon & {
metadata: {
name: string;
namespace: string;
annotations?: DisplayNameAnnotations &
Partial<{
'serving.kserve.io/deploymentMode': 'ModelMesh';
}>;
};
spec: {
predictor: {
Expand Down
37 changes: 37 additions & 0 deletions frontend/src/pages/modelServing/__tests__/utils.spec.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
import {
mockServingRuntimeK8sResource,
mockServingRuntimeK8sResourceLegacy,
} from '~/__mocks__/mockServingRuntimeK8sResource';
import { ServingRuntimeKind } from '~/k8sTypes';
import { getDisplayNameFromServingRuntimeTemplate } from '~/pages/modelServing/customServingRuntimes/utils';
import { resourcesArePositive } from '~/pages/modelServing/utils';
import { ContainerResources } from '~/types';

Expand Down Expand Up @@ -50,3 +56,34 @@ describe('resourcesArePositive', () => {
expect(resourcesArePositive(resources)).toBe(true);
});
});

describe('getDisplayNameFromServingRuntimeTemplate', () => {
it('should provide default name if not found', () => {
const servingRuntime = getDisplayNameFromServingRuntimeTemplate({
metadata: {},
spec: {},
} as ServingRuntimeKind);
expect(servingRuntime).toBe('Unknown Serving Runtime');
});

it('should prioritize name from annotation "opendatahub.io/template-display-name"', () => {
const servingRuntime = getDisplayNameFromServingRuntimeTemplate(
mockServingRuntimeK8sResource({}),
);
expect(servingRuntime).toBe('OpenVINO Serving Runtime (Supports GPUs)');
});

it('should fallback first to name from annotation "opendatahub.io/template-name"', () => {
const mockServingRuntime = mockServingRuntimeK8sResource({});
delete mockServingRuntime.metadata.annotations?.['opendatahub.io/template-display-name'];
const servingRuntime = getDisplayNameFromServingRuntimeTemplate(mockServingRuntime);
expect(servingRuntime).toBe('ovms');
});

it('should fallback to ovms serverType', () => {
const servingRuntime = getDisplayNameFromServingRuntimeTemplate(
mockServingRuntimeK8sResourceLegacy({}),
);
expect(servingRuntime).toBe('OpenVINO Model Server');
});
});
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
import * as React from 'react';
import DeleteModal from '~/pages/projects/components/DeleteModal';
import { InferenceServiceKind } from '~/k8sTypes';
import { deleteInferenceService } from '~/api';
import { InferenceServiceKind, ServingRuntimeKind } from '~/k8sTypes';
import { deleteInferenceService, deleteServingRuntime } from '~/api';
import { getInferenceServiceDisplayName } from './utils';

type DeleteInferenceServiceModalProps = {
inferenceService?: InferenceServiceKind;
servingRuntime?: ServingRuntimeKind;
onClose: (deleted: boolean) => void;
isOpen?: boolean;
};

const DeleteInferenceServiceModal: React.FC<DeleteInferenceServiceModalProps> = ({
inferenceService,
servingRuntime,
onClose,
isOpen = false,
}) => {
const [isDeleting, setIsDeleting] = React.useState(false);
const [error, setError] = React.useState<Error | undefined>();
Expand All @@ -29,16 +33,24 @@ const DeleteInferenceServiceModal: React.FC<DeleteInferenceServiceModalProps> =
return (
<DeleteModal
title="Delete deployed model?"
isOpen={!!inferenceService}
isOpen={isOpen}
onClose={() => onBeforeClose(false)}
submitButtonLabel="Delete deployed model"
onDelete={() => {
if (inferenceService) {
setIsDeleting(true);
deleteInferenceService(
inferenceService.metadata.name,
inferenceService.metadata.namespace,
)
Promise.all([
deleteInferenceService(
inferenceService.metadata.name,
inferenceService.metadata.namespace,
),
servingRuntime
? deleteServingRuntime(
servingRuntime.metadata.name,
servingRuntime.metadata.namespace,
)
: undefined,
])
.then(() => {
onBeforeClose(true);
})
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import * as React from 'react';
import { ServingRuntimeKind } from '~/k8sTypes';
import { getDisplayNameFromServingRuntimeTemplate } from '~/pages/modelServing/customServingRuntimes/utils';

type Props = {
servingRuntime?: ServingRuntimeKind;
};

const InferenceServiceServingRuntime: React.FC<Props> = ({ servingRuntime }) => (
<>{servingRuntime ? getDisplayNameFromServingRuntimeTemplate(servingRuntime) : 'Unknown'}</>
);

export default InferenceServiceServingRuntime;
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,14 @@ const InferenceServiceTable: React.FC<InferenceServiceTableProps> = ({
toolbarContent,
}) => {
const { modelServingProjects: projects } = React.useContext(ProjectsContext);
const [deleteInferenceService, setDeleteInferenceService] =
React.useState<InferenceServiceKind>();
const [editInferenceService, setEditInferenceService] = React.useState<InferenceServiceKind>();
const [deleteInferenceService, setDeleteInferenceService] = React.useState<{
inferenceService: InferenceServiceKind;
servingRuntime?: ServingRuntimeKind;
}>();
const [editInferenceService, setEditInferenceService] = React.useState<{
inferenceService: InferenceServiceKind;
servingRuntime?: ServingRuntimeKind;
}>();
const isGlobal = !!clearFilters;
const mappedColumns = isGlobal
? getGlobalInferenceServiceColumns(projects)
Expand Down Expand Up @@ -57,13 +62,19 @@ const InferenceServiceTable: React.FC<InferenceServiceTableProps> = ({
(sr) => sr.metadata.name === is.spec.predictor.model.runtime,
)}
isGlobal={isGlobal}
onDeleteInferenceService={setDeleteInferenceService}
onEditInferenceService={setEditInferenceService}
onDeleteInferenceService={(inferenceService, servingRuntime) =>
setDeleteInferenceService({ inferenceService, servingRuntime })
}
onEditInferenceService={(inferenceService, servingRuntime) =>
setEditInferenceService({ inferenceService, servingRuntime })
}
/>
)}
/>
<DeleteInferenceServiceModal
inferenceService={deleteInferenceService}
isOpen={!!deleteInferenceService?.inferenceService}
inferenceService={deleteInferenceService?.inferenceService}
servingRuntime={deleteInferenceService?.servingRuntime}
onClose={(deleted) => {
if (deleted) {
refresh();
Expand All @@ -72,8 +83,8 @@ const InferenceServiceTable: React.FC<InferenceServiceTableProps> = ({
}}
/>
<ManageInferenceServiceModal
isOpen={editInferenceService !== undefined}
editInfo={editInferenceService}
isOpen={!!editInferenceService?.inferenceService}
editInfo={editInferenceService?.inferenceService}
onClose={(edited) => {
if (edited) {
refresh();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,27 @@ import { DropdownDirection } from '@patternfly/react-core';
import { ActionsColumn, Td, Tr } from '@patternfly/react-table';
import { Link } from 'react-router-dom';
import ResourceNameTooltip from '~/components/ResourceNameTooltip';
import { isModelMesh } from '~/pages/modelServing/utils';
import useModelMetricsEnabled from '~/pages/modelServing/useModelMetricsEnabled';
import { InferenceServiceKind, ServingRuntimeKind } from '~/k8sTypes';
import { getInferenceServiceDisplayName } from './utils';
import InferenceServiceEndpoint from './InferenceServiceEndpoint';
import InferenceServiceProject from './InferenceServiceProject';
import InferenceServiceModel from './InferenceServiceModel';
import InferenceServiceStatus from './InferenceServiceStatus';
import InferenceServiceServingRuntime from './InferenceServiceServingRuntime';

type InferenceServiceTableRowProps = {
obj: InferenceServiceKind;
isGlobal: boolean;
servingRuntime?: ServingRuntimeKind;
onDeleteInferenceService: (obj: InferenceServiceKind) => void;
onEditInferenceService: (obj: InferenceServiceKind) => void;
onDeleteInferenceService: (
inferenceService: InferenceServiceKind,
servingRuntime?: ServingRuntimeKind,
) => void;
onEditInferenceService: (
inferenceService: InferenceServiceKind,
servingRuntime?: ServingRuntimeKind,
) => void;
};

const InferenceServiceTableRow: React.FC<InferenceServiceTableRowProps> = ({
Expand Down Expand Up @@ -53,8 +60,8 @@ const InferenceServiceTableRow: React.FC<InferenceServiceTableRowProps> = ({
</Td>
)}
{isGlobal && (
<Td dataLabel="Model server">
<InferenceServiceModel inferenceService={inferenceService} />
<Td dataLabel="Serving Runtime">
<InferenceServiceServingRuntime servingRuntime={servingRuntime} />
</Td>
)}
<Td dataLabel="Inference endpoint">
Expand All @@ -71,15 +78,23 @@ const InferenceServiceTableRow: React.FC<InferenceServiceTableRowProps> = ({
dropdownDirection={isGlobal ? DropdownDirection.down : DropdownDirection.up}
items={[
{
// TODO re-enable edit when supported
isDisabled: !isModelMesh(inferenceService),
title: 'Edit',
onClick: () => {
onEditInferenceService(inferenceService);
onEditInferenceService(
inferenceService,
!isModelMesh(inferenceService) ? servingRuntime : undefined,
);
},
},
{
title: 'Delete',
onClick: () => {
onDeleteInferenceService(inferenceService);
onDeleteInferenceService(
inferenceService,
!isModelMesh(inferenceService) ? servingRuntime : undefined,
);
},
},
]}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const ModelServingGlobal: React.FC = () => {

return (
<ApplicationsPage
title="Model serving"
title="Deployed models"
description="Manage and view the health and performance of your deployed models."
loaded
empty={servingRuntimes.length === 0 || inferenceServices.length === 0}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import * as React from 'react';
import { render } from '@testing-library/react';
import InferenceServiceServingRuntime from '~/pages/modelServing/screens/global/InferenceServiceServingRuntime';
import { mockServingRuntimeK8sResource } from '~/__mocks__/mockServingRuntimeK8sResource';

describe('InferenceServiceServingRuntime', () => {
it('should handle undefined serving runtime', () => {
const wrapper = render(<InferenceServiceServingRuntime />);
expect(wrapper.container.textContent).toBe('Unknown');
});

it('should display serving runtime name', () => {
const mockServingRuntime = mockServingRuntimeK8sResource({});
const wrapper = render(<InferenceServiceServingRuntime servingRuntime={mockServingRuntime} />);
expect(wrapper.container.textContent).toBe('OpenVINO Serving Runtime (Supports GPUs)');
});
});
8 changes: 4 additions & 4 deletions frontend/src/pages/modelServing/screens/global/data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ const COL_ENDPOINT: SortableData<InferenceServiceKind> = {
sortable: false,
};

const COL_MODEL_SERVER: SortableData<InferenceServiceKind> = {
field: 'model',
label: 'Model server',
const COL_SERVING_RUNTIME: SortableData<InferenceServiceKind> = {
field: 'servingRuntime',
label: 'Serving runtime',
width: 20,
sortable: false,
};
Expand All @@ -62,7 +62,7 @@ export const getGlobalInferenceServiceColumns = (
): SortableData<InferenceServiceKind>[] => [
COL_NAME,
buildProjectCol(projects),
COL_MODEL_SERVER,
COL_SERVING_RUNTIME,
COL_ENDPOINT,
COL_STATUS,
COL_KEBAB,
Expand Down
4 changes: 4 additions & 0 deletions frontend/src/pages/modelServing/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import {
K8sAPIOptions,
RoleBindingKind,
ServingRuntimeKind,
InferenceServiceKind,
} from '~/k8sTypes';
import { ContainerResources } from '~/types';
import { getDisplayNameFromK8sResource, translateDisplayNameForK8s } from '~/pages/projects/utils';
Expand Down Expand Up @@ -211,3 +212,6 @@ export const isModelServerEditInfoChanged = (
createData.tokens.map((token) => token.name).sort(),
))
: true;

export const isModelMesh = (inferenceService: InferenceServiceKind) =>
inferenceService.metadata.annotations?.['serving.kserve.io/deploymentMode'] === 'ModelMesh';

0 comments on commit ce8c1ed

Please sign in to comment.