-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
0e662e6
commit 032876e
Showing
7 changed files
with
366 additions
and
28 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
import OpenAI from "npm:openai"; | ||
|
||
import { config } from "npm:dotenv"; | ||
config(); | ||
|
||
export let isEnabled = true; | ||
|
||
type ChatCompletionError = { | ||
error: { | ||
message: string; | ||
type: string; | ||
param: null; // Not sure about this one tbh, | ||
code: string; | ||
}; | ||
}; | ||
|
||
type response = { | ||
oaires: OpenAI.Chat.Completions.ChatCompletion; | ||
messages: OpenAI.Chat.ChatCompletionMessage[]; | ||
}; | ||
|
||
function isError( | ||
value: ChatCompletionError | OpenAI.Chat.Completions.ChatCompletion, | ||
): value is ChatCompletionError { | ||
return "error" in value; | ||
} | ||
|
||
const db = await Deno.openKv("./db.sqlite") | ||
|
||
export async function send( | ||
messages: OpenAI.Chat.ChatCompletionMessage[], | ||
prompt: string, | ||
userid: string, | ||
model: string, | ||
api_key: string | ||
): Promise<response> { | ||
// here we go | ||
|
||
if (!isEnabled) { | ||
throw "not_enabled"; | ||
} | ||
|
||
if (messages.length === 0) { | ||
messages.push({ | ||
role: "system", | ||
content: `You are ${model}, an LLM hosted by OpenRouter.`, | ||
}); | ||
} | ||
|
||
messages.push({ | ||
role: "user", | ||
content: prompt, | ||
}); | ||
|
||
const res = await fetch("https://openrouter.ai/api/v1/chat/completions", { | ||
method: "POST", | ||
headers: { | ||
"Content-Type": "application/json", | ||
Authorization: `Bearer ${api_key}`, | ||
}, | ||
body: JSON.stringify({ | ||
model, | ||
messages: messages, | ||
}), | ||
}); | ||
|
||
const resp: OpenAI.Chat.Completions.ChatCompletion | ChatCompletionError = | ||
await res.json(); | ||
|
||
if (isError(resp)) { | ||
// Fuck. | ||
throw resp.error.message; // well at least they know why the fuck it crashed?? | ||
} | ||
|
||
messages.push(resp.choices[0].message); | ||
|
||
return { | ||
oaires: resp, | ||
messages, | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,132 @@ | ||
// Definitions of subtypes are below | ||
|
||
type Request = { | ||
// Either "messages" or "prompt" is required | ||
messages?: Message[]; | ||
prompt?: string; | ||
|
||
// If "model" is unspecified, uses the user's default | ||
model?: string; // See "Supported Models" section | ||
|
||
// Additional optional parameters | ||
frequency_penalty?: number; | ||
logit_bias?: { [key: number]: number }; // Only available for OpenAI models | ||
max_tokens?: number; // Required for some models, so defaults to 512 | ||
n?: number; | ||
presence_penalty?: number; | ||
response_format?: { type: 'text' | 'json_object' }; | ||
seed?: number; // Only available for OpenAI models | ||
stop?: string | string[]; | ||
stream?: boolean; // Enable streaming | ||
temperature?: number; | ||
top_p?: number; | ||
|
||
// Function-calling | ||
tools?: Tool[]; | ||
tool_choice?: ToolChoice; | ||
|
||
// OpenRouter-only parameters | ||
transforms?: string[] // See "Prompt Transforms" section | ||
models?: string[] // See "Fallback Models" section | ||
route?: 'fallback' // See "Fallback Models" section | ||
}; | ||
|
||
// Subtypes: | ||
|
||
type TextContent = { | ||
type: 'text'; | ||
text: string; | ||
}; | ||
|
||
type ImageContentPart = { | ||
type: 'image_url'; | ||
image_url: { | ||
url: string; // URL or base64 encoded image data | ||
detail?: string; // Optional, defaults to 'auto' | ||
}; | ||
}; | ||
|
||
type ContentPart = TextContent | ImageContentPart; | ||
|
||
type Message = { | ||
role: 'user' | 'assistant' | 'system' | 'tool'; | ||
content: string | ||
| ContentPart[]; // Only for the 'user' role | ||
name?: string; | ||
}; | ||
|
||
type FunctionDescription = { | ||
description?: string; | ||
name: string; | ||
parameters: object; // JSON Schema object | ||
}; | ||
|
||
type Tool = { | ||
type: 'function'; | ||
function: FunctionDescription; | ||
}; | ||
|
||
type ToolChoice = 'none' | 'auto' | { | ||
type: 'function'; | ||
function: { | ||
name: string; | ||
}; | ||
}; | ||
|
||
// Definitions of subtypes are below | ||
|
||
type Response = { | ||
id: string; | ||
// Depending on whether you set "stream" to "true" and | ||
// whether you passed in "messages" or a "prompt", you | ||
// will get a different output shape | ||
choices: (NonStreamingChoice | StreamingChoice | NonChatChoice | Error)[]; | ||
created: number; // Unix timestamp | ||
model: string; | ||
object: 'chat.completion'; | ||
}; | ||
|
||
// Subtypes: | ||
|
||
type NonChatChoice = { | ||
finish_reason: string | null; | ||
text: string; | ||
} | ||
|
||
type NonStreamingChoice = { | ||
finish_reason: string | null; // Depends on the model. Ex: 'stop' | 'length' | 'content_filter' | 'tool_calls' | 'function_call' | ||
message: { | ||
content: string | null; | ||
role: string; | ||
tool_calls?: ToolCall[]; | ||
// Deprecated, replaced by tool_calls | ||
function_call?: FunctionCall; | ||
}; | ||
}; | ||
|
||
type StreamingChoice = { | ||
finish_reason: string | null; | ||
delta: { | ||
content: string | null; | ||
role?: string; | ||
tool_calls?: ToolCall[]; | ||
// Deprecated, replaced by tool_calls | ||
function_call?: FunctionCall; | ||
}; | ||
}; | ||
|
||
type Error = { | ||
code: number; // See "Error Handling" section | ||
message: string; | ||
} | ||
|
||
type FunctionCall = { | ||
name: string; | ||
arguments: string; // JSON format arguments | ||
}; | ||
|
||
type ToolCall = { | ||
id: string; | ||
type: 'function'; | ||
function: FunctionCall; | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
# 11/2/2023 | ||
|
||
Scrapped the llama2.ts file in favor of openrouter | ||
Added openrouter support | ||
Worked on the basis for model swapping | ||
Rethought a bit of code I had in place | ||
Switched GPT4 back to turbo so I can use functions | ||
Began integration of VDB (GPT4 only for now while I get my shit together) |
Oops, something went wrong.