Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/onfinish #5759

Merged
merged 2 commits into from
Nov 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ export interface ChatOptions {
config: LLMConfig;

onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onFinish: (message: string, responseRes: Response) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
onBeforeTool?: (tool: ChatMessageTool) => void;
Expand Down
6 changes: 4 additions & 2 deletions app/client/platforms/alibaba.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ export class QwenApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -172,7 +173,7 @@ export class QwenApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -188,6 +189,7 @@ export class QwenApi implements LLMApi {
"[Alibaba] request response content type: ",
contentType,
);
responseRes = res;

if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
Expand Down Expand Up @@ -254,7 +256,7 @@ export class QwenApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
5 changes: 3 additions & 2 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -317,13 +317,14 @@ export class ClaudeApi implements LLMApi {
};

try {
controller.signal.onabort = () => options.onFinish("");
controller.signal.onabort = () =>
options.onFinish("", new Response(null, { status: 400 }));

const res = await fetch(path, payload);
const resJson = await res.json();

const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
} catch (e) {
console.error("failed to chat", e);
options.onError?.(e as Error);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/baidu.ts
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ export class ErnieApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -191,7 +192,7 @@ export class ErnieApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -204,7 +205,7 @@ export class ErnieApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Baidu] request response content type: ", contentType);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -267,7 +268,7 @@ export class ErnieApi implements LLMApi {

const resJson = await res.json();
const message = resJson?.result;
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/bytedance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ export class DoubaoApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -159,7 +160,7 @@ export class DoubaoApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -175,7 +176,7 @@ export class DoubaoApi implements LLMApi {
"[ByteDance] request response content type: ",
contentType,
);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -241,7 +242,7 @@ export class DoubaoApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/glm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ export class ChatGLMApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ export class GeminiProApi implements LLMApi {
);
}
const message = apiClient.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/iflytek.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ export class SparkApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// Animate response text to make it look smooth
function animateResponseText() {
Expand All @@ -143,7 +144,7 @@ export class SparkApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -156,7 +157,7 @@ export class SparkApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Spark] request response content type: ", contentType);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -231,7 +232,7 @@ export class SparkApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/moonshot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ export class MoonshotApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ export class ChatGPTApi implements LLMApi {

const resJson = await res.json();
const message = await this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/tencent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ export class HunyuanApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -171,7 +172,7 @@ export class HunyuanApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -187,7 +188,7 @@ export class HunyuanApi implements LLMApi {
"[Tencent] request response content type: ",
contentType,
);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -253,7 +254,7 @@ export class HunyuanApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/xai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ export class XAIApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Caution: Response object might be consumed.

The Response object being passed to onFinish has already been consumed by res.json(). This could lead to issues if the caller tries to read the response body again.

Consider one of these approaches:

- const resJson = await res.json();
- const message = this.extractMessage(resJson);
- options.onFinish(message, res);
+ const resClone = res.clone();
+ const resJson = await res.json();
+ const message = this.extractMessage(resJson);
+ options.onFinish(message, resClone);

Or:

- const resJson = await res.json();
- const message = this.extractMessage(resJson);
- options.onFinish(message, res);
+ const resJson = await res.json();
+ const message = this.extractMessage(resJson);
+ options.onFinish(message, { status: res.status, statusText: res.statusText, headers: res.headers });
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
options.onFinish(message, res);
const resClone = res.clone();
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message, resClone);
```
Solution 2 (using response metadata):
```suggestion
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message, { status: res.status, statusText: res.statusText, headers: res.headers });

}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
35 changes: 17 additions & 18 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -649,13 +649,14 @@ export const useChatStore = createPersistStore(
stream: false,
providerName,
},
onFinish(message) {
if (!isValidMessage(message)) return;
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
onFinish(message, responseRes) {
if (responseRes?.status === 200) {
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
}
Comment on lines +652 to +659
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Improve error handling and code clarity.

While the status check is a good addition, consider these improvements:

  1. Handle non-200 status codes explicitly
  2. Restructure the assignment for better readability
-            onFinish(message, responseRes) {
-              if (responseRes?.status === 200) {
-                get().updateCurrentSession(
-                  (session) =>
-                    (session.topic =
-                      message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
-                );
-              }
+            onFinish(message, responseRes) {
+              if (responseRes?.status !== 200) {
+                console.error("[Topic] Failed to update topic:", responseRes?.status);
+                return;
+              }
+              get().updateCurrentSession((session) => {
+                const newTopic = message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC;
+                session.topic = newTopic;
+              });
             },
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
onFinish(message, responseRes) {
if (responseRes?.status === 200) {
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
}
onFinish(message, responseRes) {
if (responseRes?.status !== 200) {
console.error("[Topic] Failed to update topic:", responseRes?.status);
return;
}
get().updateCurrentSession((session) => {
const newTopic = message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC;
session.topic = newTopic;
});
},
🧰 Tools
🪛 Biome

[error] 656-658: The assignment should not be in an expression.

The use of assignments in expressions is confusing.
Expressions are often considered as side-effect free.

(lint/suspicious/noAssignInExpressions)

},
});
}
Expand All @@ -669,7 +670,7 @@ export const useChatStore = createPersistStore(

const historyMsgLength = countMessages(toBeSummarizedMsgs);

if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
const n = toBeSummarizedMsgs.length;
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
Math.max(0, n - modelConfig.historyMessageCount),
Expand Down Expand Up @@ -715,22 +716,20 @@ export const useChatStore = createPersistStore(
onUpdate(message) {
session.memoryPrompt = message;
},
onFinish(message) {
console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
});
onFinish(message, responseRes) {
if (responseRes?.status === 200) {
console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
});
}
Comment on lines +719 to +726
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Improve error handling and remove redundant comment.

Consider these improvements:

  1. Handle non-200 status codes explicitly
  2. Remove the redundant comment about local storage
-            onFinish(message, responseRes) {
-              if (responseRes?.status === 200) {
-                console.log("[Memory] ", message);
-                get().updateCurrentSession((session) => {
-                  session.lastSummarizeIndex = lastSummarizeIndex;
-                  session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
-                });
-              }
+            onFinish(message, responseRes) {
+              if (responseRes?.status !== 200) {
+                console.error("[Memory] Failed to update memory prompt:", responseRes?.status);
+                return;
+              }
+              console.log("[Memory] ", message);
+              get().updateCurrentSession((session) => {
+                session.lastSummarizeIndex = lastSummarizeIndex;
+                session.memoryPrompt = message;
+              });
             },
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
onFinish(message, responseRes) {
if (responseRes?.status === 200) {
console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
});
}
onFinish(message, responseRes) {
if (responseRes?.status !== 200) {
console.error("[Memory] Failed to update memory prompt:", responseRes?.status);
return;
}
console.log("[Memory] ", message);
get().updateCurrentSession((session) => {
session.lastSummarizeIndex = lastSummarizeIndex;
session.memoryPrompt = message;
});
},

},
onError(err) {
console.error("[Summarize] ", err);
},
});
}

function isValidMessage(message: any): boolean {
return typeof message === "string" && !message.startsWith("```json");
}
},

updateStat(message: ChatMessage) {
Expand Down
4 changes: 3 additions & 1 deletion app/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,9 @@ export function isVisionModel(model: string) {
model.includes("gpt-4-turbo") && !model.includes("preview");

return (
visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo
visionKeywords.some((keyword) => model.includes(keyword)) ||
isGpt4Turbo ||
isDalle3(model)
);
}

Expand Down
4 changes: 3 additions & 1 deletion app/utils/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ export function stream(
let finished = false;
let running = false;
let runTools: any[] = [];
let responseRes: Response;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Potential undefined responseRes in options.onFinish

There's a risk that responseRes may be undefined when options.onFinish is called. Since responseRes is assigned within the onopen callback, if finish() is invoked before onopen is triggered (e.g., due to an early error or abort), responseRes will remain undefined. This could lead to runtime errors if onFinish expects a defined Response object.

Consider updating the variable declaration to allow for undefined and ensure that onFinish can handle this case. Apply this diff to adjust the variable declaration:

- let responseRes: Response;
+ let responseRes: Response | undefined;

Additionally, ensure that any usage of responseRes in onFinish accounts for the possibility of it being undefined.

Also applies to: 276-276


// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -272,7 +273,7 @@ export function stream(
}
console.debug("[ChatAPI] end");
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes); // 将res传递给onFinish
}
};

Expand Down Expand Up @@ -304,6 +305,7 @@ export function stream(
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Request] response content type: ", contentType);
responseRes = res;

if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
Expand Down
2 changes: 1 addition & 1 deletion app/utils/stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ type StreamResponse = {
headers: Record<string, string>;
};

export function fetch(url: string, options?: RequestInit): Promise<any> {
export function fetch(url: string, options?: RequestInit): Promise<Response> {
if (window.__TAURI__) {
const {
signal,
Expand Down
Loading