-
Notifications
You must be signed in to change notification settings - Fork 60k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
update max_tokens to max_completions_tokens #5677
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -63,7 +63,7 @@ export interface RequestPayload { | |
presence_penalty: number; | ||
frequency_penalty: number; | ||
top_p: number; | ||
max_tokens?: number; | ||
max_completions_tokens?: number; | ||
} | ||
|
||
export interface DalleRequestPayload { | ||
|
@@ -228,13 +228,16 @@ export class ChatGPTApi implements LLMApi { | |
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0, | ||
frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0, | ||
top_p: !isO1 ? modelConfig.top_p : 1, | ||
// max_tokens: Math.max(modelConfig.max_tokens, 1024), | ||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. | ||
// max_completions_tokens: Math.max(modelConfig.max_completions_tokens, 1024), | ||
// Please do not ask me why not send max_completions_tokens, no reason, this param is just shit, I dont want to explain anymore. | ||
Comment on lines
+231
to
+232
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Clarify the decision to comment out The commented-out
Could you please provide more context on why Consider the following options:
|
||
}; | ||
|
||
// add max_tokens to vision model | ||
// add max_completions_tokens to vision model | ||
if (visionModel) { | ||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000); | ||
requestPayload["max_completions_tokens"] = Math.max( | ||
modelConfig.max_completions_tokens, | ||
4000, | ||
); | ||
} | ||
} | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -65,7 +65,7 @@ export const DEFAULT_CONFIG = { | |
providerName: "OpenAI" as ServiceProvider, | ||
temperature: 0.5, | ||
top_p: 1, | ||
max_tokens: 4000, | ||
max_completions_tokens: 4000, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Codebase verification Incomplete Refactor: 'max_tokens' Still in Use The renaming of
Please update all instances of 🔗 Analysis chainLGTM! Verify usage across the codebase. The renaming of To ensure this change doesn't break any existing functionality, please run the following script to check for any remaining usage of If any occurrences are found, they may need to be updated to use 🏁 Scripts executedThe following scripts were executed for the analysis: Script: #!/bin/bash
# Description: Check for any remaining usage of 'max_tokens' in the codebase
# Test: Search for 'max_tokens' in all TypeScript and JavaScript files
rg --type-add 'script:*.{ts,js}' --type script 'max_tokens'
Length of output: 160 Script: #!/bin/bash
# Description: Check for any remaining usage of 'max_tokens' in the codebase
# Test: Search for 'max_tokens' in all TypeScript and JavaScript files
rg --type-add 'script:*.{ts,js}' --type script 'max_tokens'
Length of output: 3812 |
||
presence_penalty: 0, | ||
frequency_penalty: 0, | ||
sendMemory: true, | ||
|
@@ -127,7 +127,7 @@ export const ModalConfigValidator = { | |
model(x: string) { | ||
return x as ModelType; | ||
}, | ||
max_tokens(x: number) { | ||
max_completions_tokens(x: number) { | ||
return limitNumber(x, 0, 512000, 1024); | ||
}, | ||
presence_penalty(x: number) { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
只是o1 参数变了又不是所有的都变了 怎么能这么改?看清楚问题