Skip to content

Commit

Permalink
Improve tests
Browse files Browse the repository at this point in the history
  • Loading branch information
alexcreasy committed Jun 12, 2024
1 parent 8e5a181 commit 95c5fdd
Show file tree
Hide file tree
Showing 4 changed files with 111 additions and 36 deletions.
65 changes: 42 additions & 23 deletions frontend/src/__mocks__/mockKserveMetricsConfigMap.ts
Original file line number Diff line number Diff line change
@@ -1,33 +1,14 @@
// import { ConfigMapKind } from '~/k8sTypes';
//
// type MockConfigMapType = {
// data?: Record<string, string>;
// namespace?: string;
// };
// export const mockConfigMap = ({
// data = { key: 'value' },
// namespace = 'test-project',
// }: MockConfigMapType): ConfigMapKind => ({
// kind: 'ConfigMap',
// apiVersion: 'v1',
// metadata: {
// name: 'config-test',
// labels: { 'opendatahub.io/dashboard': 'true' },
// namespace,
// },
// data,
// });

import { ConfigMapKind } from '~/k8sTypes';
import { mockConfigMap } from '~/__mocks__/mockConfigMap';

type MockKserveMetricsConfigMapType = {
namespace?: string;
modelName?: string;
supported: boolean;
supported?: boolean;
config?: string;
};

const MOCK_DATA_METRICS = `
export const MOCK_KSERVE_METRICS_CONFIG_1 = `
{
"config": [
{
Expand Down Expand Up @@ -81,13 +62,51 @@ const MOCK_DATA_METRICS = `
]
}`;

export const MOCK_KSERVE_METRICS_CONFIG_2 =
'{ I am malformed JSON and I am here to ruin your day }';

export const MOCK_KSERVE_METRICS_CONFIG_3 = `
{
"config": [
{
"title": "Number of incoming requests",
"type": "REQUEST_COUNT",
"queries": [
{
"title": "Successful requests",
"query": "sum(increase(ovms_requests_success{namespace='models',name='mnist'}[5m]))"
},
{
"title": "Failed requests",
"query": "sum(increase(ovms_requests_fail{namespace='models',name='mnist'}[5m]))"
}
]
},
{
"title": "Mean Model Latency",
"type": "MEAN_LATENCY",
"queries": [
{
"title": "Mean inference latency",
"query": "sum by (name) (rate(ovms_inference_time_us_sum{namespace='models', name='mnist'}[1m])) / sum by (name) (rate(ovms_inference_time_us_count{namespace='models', name='mnist'}[1m]))"
},
{
"title": "Mean request latency",
"query": "sum by (name) (rate(ovms_request_time_us_sum{name='mnist'}[1m])) / sum by (name) (rate(ovms_request_time_us_count{name='mnist'}[1m]))"
}
]
}
]
}`;

export const mockKserveMetricsConfigMap = ({
namespace = 'test-project',
modelName = 'test-inference-service',
supported = true,
config = MOCK_KSERVE_METRICS_CONFIG_1,
}: MockKserveMetricsConfigMapType): ConfigMapKind => {
const data = {
metrics: MOCK_DATA_METRICS,
metrics: config,
supported: String(supported),
};
return mockConfigMap({ data, namespace, name: `${modelName}-metrics-dashboard` });
Expand Down
10 changes: 9 additions & 1 deletion frontend/src/__tests__/cypress/cypress/pages/modelMetrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ class ModelMetricsGlobal {
getMetricsChart(title: string) {
return new ModelMetricsChart(() => cy.findByTestId(`metrics-card-${title}`).parents());
}

getAllMetricsCharts() {
return cy.findAllByTestId(/metrics-card-.*/);
}
}

class ModelMetricsChart extends Contextual<HTMLTableRowElement> {
Expand Down Expand Up @@ -47,13 +51,17 @@ class ModelMetricsKserve extends ModelMetricsPerformance {
return cy.findByTestId('kserve-configmap-error');
}

findKserveAreasDisabledCard() {
findKserveAreaDisabledCard() {
return cy.findByTestId('kserve-metrics-disabled');
}

findUnsupportedRuntimeCard() {
return cy.findByTestId('kserve-metrics-runtime-unsupported');
}

findInvalidDefinitionError() {
return cy.findByTestId('kserve-invalid-definition-error');
}
}

class ModelMetricsBias extends ModelMetricsGlobal {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,11 @@ import {
TemplateModel,
TrustyAIApplicationsModel,
} from '~/__tests__/cypress/cypress/utils/models';
import { mockKserveMetricsConfigMap } from '~/__mocks__/mockKserveMetricsConfigMap';
import {
MOCK_KSERVE_METRICS_CONFIG_2,
MOCK_KSERVE_METRICS_CONFIG_3,
mockKserveMetricsConfigMap,
} from '~/__mocks__/mockKserveMetricsConfigMap';

type HandlersProps = {
disablePerformanceMetrics?: boolean;
Expand Down Expand Up @@ -621,7 +625,7 @@ describe('Model Metrics', () => {
});
});

describe('KServe performance metrics', () => {
describe.only('KServe performance metrics', () => {

Check failure on line 628 in frontend/src/__tests__/cypress/cypress/tests/mocked/modelServing/modelMetrics.cy.ts

View workflow job for this annotation

GitHub Actions / Tests (18.x)

describe.only not permitted
it('should inform user when area disabled', () => {
initIntercepts({
disableBiasMetrics: false,
Expand All @@ -632,7 +636,7 @@ describe('KServe performance metrics', () => {
inferenceServices: [mockInferenceServiceK8sResource({ isModelMesh: false })],
});
modelMetricsKserve.visit('test-project', 'test-inference-service');
modelMetricsKserve.findKserveAreasDisabledCard().should('be.visible');
modelMetricsKserve.findKserveAreaDisabledCard().should('be.visible');
});

it('should show error when ConfigMap is missing', () => {
Expand Down Expand Up @@ -665,6 +669,46 @@ describe('KServe performance metrics', () => {
modelMetricsKserve.findUnsupportedRuntimeCard().should('be.visible');
});

it('should handle a malformed graph definition gracefully', () => {
initIntercepts({
disableBiasMetrics: false,
disablePerformanceMetrics: false,
disableKServeMetrics: false,
hasServingData: true,
hasBiasData: false,
inferenceServices: [mockInferenceServiceK8sResource({ isModelMesh: false })],
});

cy.interceptK8s(
ConfigMapModel,
mockKserveMetricsConfigMap({ config: MOCK_KSERVE_METRICS_CONFIG_2 }),
);

modelMetricsKserve.visit('test-project', 'test-inference-service');
modelMetricsKserve.findInvalidDefinitionError().should('be.visible');
});

it.only('should display 2 graphs with the given config', () => {

Check failure on line 691 in frontend/src/__tests__/cypress/cypress/tests/mocked/modelServing/modelMetrics.cy.ts

View workflow job for this annotation

GitHub Actions / Tests (18.x)

it.only not permitted
initIntercepts({
disableBiasMetrics: false,
disablePerformanceMetrics: false,
disableKServeMetrics: false,
hasServingData: true,
hasBiasData: false,
inferenceServices: [mockInferenceServiceK8sResource({ isModelMesh: false })],
});

cy.interceptK8s(
ConfigMapModel,
mockKserveMetricsConfigMap({ config: MOCK_KSERVE_METRICS_CONFIG_3 }),
);

modelMetricsKserve.visit('test-project', 'test-inference-service');
modelMetricsKserve.getMetricsChart('Number of incoming requests').shouldHaveData();
modelMetricsKserve.getMetricsChart('Mean Model Latency').shouldHaveData();
modelMetricsKserve.getAllMetricsCharts().should('have.length', 2);
});

it('charts should function when data is available', () => {
initIntercepts({
disableBiasMetrics: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,19 @@ const PerformanceTab: React.FC<PerformanceTabsProps> = ({ model }) => {

if (!modelMesh && !kserveMetricsEnabled) {
return (
<EmptyState variant="full">
<EmptyStateHeader
titleText="Single-model serving platform model metrics are not enabled."
headingLevel="h4"
icon={<EmptyStateIcon icon={WarningTriangleIcon} />}
alt=""
data-testid="kserve-metrics-disabled"
/>
</EmptyState>
<Stack data-testid="performance-metrics-loaded">
<StackItem>
<EmptyState variant="full">
<EmptyStateHeader
titleText="Single-model serving platform model metrics are not enabled."
headingLevel="h4"
icon={<EmptyStateIcon icon={WarningTriangleIcon} />}
alt=""
data-testid="kserve-metrics-disabled"
/>
</EmptyState>
</StackItem>
</Stack>
);
}

Expand Down

0 comments on commit 95c5fdd

Please sign in to comment.