Skip to content

Commit

Permalink
fix(azure-openai): error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
arielweinberger committed Oct 22, 2023
1 parent f8203b6 commit a63515e
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 43 deletions.
3 changes: 2 additions & 1 deletion .prettierignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# Add files here to ignore them from prettier formatting
**/.next
apps/docs
apps/docs
**/dist
2 changes: 1 addition & 1 deletion apps/docs/theme.config.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const config: DocsThemeConfig = {
link: 'https://github.com/pezzolabs/unillm',
},
chat: {
link: 'https://pezzo.cc/discord',
link: 'https://discord.gg/XcEVPePwn2',
},
docsRepositoryBase: 'https://github.com/pezzolabs/unillm/tree/main/apps/docs',
footer: {
Expand Down
53 changes: 35 additions & 18 deletions packages/unillm-node/providers/azure-openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ import { BaseProvider } from "./baseProvider";
import { UnifiedErrorResponse } from "../utils/UnifiedErrorResponse";

type AzureOpenAIError = {
message: string;
type: string;
param: string | null;
code: string | null;
message?: string;
type?: string;
param?: string | null;
code?: string | null;
};

export class AzureOpenAIProvider extends BaseProvider<Providers.AzureOpenAI> {
Expand All @@ -50,10 +50,6 @@ export class AzureOpenAIProvider extends BaseProvider<Providers.AzureOpenAI> {
},
);
} catch (_error: any) {
if (!_error.type) {
throw _error;
}

const error = this.getUnifiedErrorFromAzureOpenAIError(
_error as AzureOpenAIError,
model,
Expand Down Expand Up @@ -167,21 +163,42 @@ export class AzureOpenAIProvider extends BaseProvider<Providers.AzureOpenAI> {

private getUnifiedErrorFromAzureOpenAIError(
error: AzureOpenAIError,
model: ModelTypes[Providers.AzureOpenAI],
deployment: ModelTypes[Providers.AzureOpenAI],
): UnifiedErrorResponse {
let status;

switch (error.type) {
case "invalid_request_error":
status = 400;
break;
default:
status = 500;
let status = 500;

// Sometimes Azure returns a status code
if (typeof error.code === "number") {
status = error.code;
} else if (typeof error.code === "string") {
if (!isNaN(Number(error.code))) {
status = Number(error.code);
} else {
// Sometimes it returns strings
switch (error.code) {
case "DeploymentNotFound":
status = 404;
break;

// Need to handle more cases, but this isn't documented anywhere.
}
}
}

// And sometime it will return the native OpenAI error type, if endpoint and deployment exist
if (error.type) {
switch (error.type) {
case "invalid_request_error":
status = 400;
break;

// Need to handle more cases, but this isn't documented anywhere.
}
}

return new UnifiedErrorResponse(
{
model,
model: `azure:openai:${deployment}`,
},
status,
error,
Expand Down
2 changes: 1 addition & 1 deletion packages/unillm-node/tests/azure-openai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ describe("#createChatCompletion - Azure OpenAI", () => {
).not.toThrow();
});

it("Should throw an error and return a unified error response", async () => {
it.only("Should throw an error and return a unified error response", async () => {
let errorOccurred = false;
try {
await uniLLM.createChatCompletion(`azure:${deployment}`, {
Expand Down
20 changes: 8 additions & 12 deletions packages/unillm-node/tests/utils/validation.util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@ import type {
} from "openai/resources/chat";

import { UnifiedCreateChatCompletionNonStreamResult } from "../../utils/types";
import {
UnifiedErrorResponse,
} from "../../utils/UnifiedErrorResponse";
import { UnifiedErrorResponse } from "../../utils/UnifiedErrorResponse";

/**
* Validates an object against the OpenAI ChatCompletion response schema.
Expand Down Expand Up @@ -101,19 +99,17 @@ export function validateOpenAIChatCompletionChunk(obj: ChatCompletionChunk) {
export function validateOpenAIChatCompletionErrorResponse(
error: UnifiedErrorResponse,
) {
const schema: z.ZodType<UnifiedErrorResponse> = z.strictObject({
name: z.string(),
message: z.string(),
const schema = z.strictObject({
status: z.number(),
headers: z.record(z.string()),
param: z.string().nullable(),
code: z.string().nullable(),
type: z.string(),
param: z.string().nullable().optional(),
code: z.string().nullable().optional(),
type: z.string().optional(),
error: z.strictObject({
message: z.string(),
type: z.string(),
param: z.string().nullable(),
code: z.string().nullable(),
type: z.string().optional(),
param: z.string().nullable().optional(),
code: z.string().nullable().optional(),
}),
metadata: z.strictObject({
model: z.string(),
Expand Down
10 changes: 0 additions & 10 deletions packages/unillm-node/utils/UnifiedErrorResponse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,5 @@ export class UnifiedErrorResponse extends APIError {
headers: Headers | undefined,
) {
super(status, error, message, headers);

if ((error as any).param === undefined) {
(error as any).param = null;
(this as any).param = null;
}

if ((error as any).code === undefined) {
(error as any).code = null;
(this as any).code = null;
}
}
}

0 comments on commit a63515e

Please sign in to comment.