Skip to content

Commit

Permalink
begin tool integration
Browse files Browse the repository at this point in the history
  • Loading branch information
Erisfiregamer1 committed Dec 4, 2023
1 parent 032876e commit 15e1c6f
Show file tree
Hide file tree
Showing 2 changed files with 89 additions and 43 deletions.
65 changes: 40 additions & 25 deletions bots/chatgpt.ts
Original file line number Diff line number Diff line change
@@ -1,39 +1,45 @@
import OpenAI from "npm:openai";

import { config } from "npm:dotenv";
config();

export let isEnabled = true;

import * as types from "./types.ts"

if (!Deno.env.get("OPENAI_API_KEY")) {
console.warn("No OpenAI API key provided! ChatGPT will be unavailable.");
isEnabled = false;
}

type ChatCompletionError = {
error: {
message: string;
type: string;
param: null; // Not sure about this one tbh,
code: string;
};
};

type response = {
oaires: OpenAI.Chat.Completions.ChatCompletion;
messages: OpenAI.Chat.ChatCompletionMessage[];
oaires: types.Response;
messages: types.Message[];
};

function isError(
value: ChatCompletionError | OpenAI.Chat.Completions.ChatCompletion,
): value is ChatCompletionError {
return "error" in value;
}

// const db = await Deno.openKv("./db.sqlite")

const tools: types.Tool[] = [{
type: "function",
function: {
name: "use-database",
description: "Check the Vector Database for information on a subject. Irrelevant data means no relevant data is available.",
parameters: {
type: "object",
properties: {
test: {
type: "string",
description: "This is the 'test' parameter."
}
},
required: ["test"]
}
}
}]

/*async function doTools(
): Promise<response> {
}*/

export async function send(
messages: OpenAI.Chat.ChatCompletionMessage[],
messages: types.Message[],
prompt: string,
userid: string,
): Promise<response> {
Expand Down Expand Up @@ -65,17 +71,26 @@ export async function send(
model: "gpt-3.5-turbo-16k",
messages: messages,
user: userid,
tools
}),
});

const resp: OpenAI.Chat.Completions.ChatCompletion | ChatCompletionError =
const resp: types.OpenAIResponse | types.OpenAIError =
await res.json();

if (isError(resp)) {
if (types.isError(resp)) {
// Fuck.
throw resp.error.message; // well at least they know why the fuck it crashed??
}

if (types.isStreaming(resp.choices[0])) {
throw "oh no"
}

if (resp.choices[0].finish_reason === "tool_calls") {
// Do nothing for now. Bot should complain about passing critical component null again
}

messages.push(resp.choices[0].message);

return {
Expand Down
67 changes: 49 additions & 18 deletions bots/types.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// Definitions of subtypes are below

type Request = {
export type Request = {
// Either "messages" or "prompt" is required
messages?: Message[];
prompt?: string;
Expand Down Expand Up @@ -33,40 +33,41 @@ type Request = {

// Subtypes:

type TextContent = {
export type TextContent = {
type: 'text';
text: string;
};

type ImageContentPart = {
export type ImageContentPart = {
type: 'image_url';
image_url: {
url: string; // URL or base64 encoded image data
detail?: string; // Optional, defaults to 'auto'
};
};

type ContentPart = TextContent | ImageContentPart;
export type ContentPart = TextContent | ImageContentPart;

type Message = {
export type Message = {
role: 'user' | 'assistant' | 'system' | 'tool';
content: string
| ContentPart[]; // Only for the 'user' role
| ContentPart[]
| null;
name?: string;
};

type FunctionDescription = {
export type FunctionDescription = {
description?: string;
name: string;
parameters: object; // JSON Schema object
};

type Tool = {
export type Tool = {
type: 'function';
function: FunctionDescription;
};

type ToolChoice = 'none' | 'auto' | {
export type ToolChoice = 'none' | 'auto' | {
type: 'function';
function: {
name: string;
Expand All @@ -75,7 +76,7 @@ type ToolChoice = 'none' | 'auto' | {

// Definitions of subtypes are below

type Response = {
export type Response = {
id: string;
// Depending on whether you set "stream" to "true" and
// whether you passed in "messages" or a "prompt", you
Expand All @@ -86,25 +87,36 @@ type Response = {
object: 'chat.completion';
};

export type OpenAIResponse = {
id: string;
// Depending on whether you set "stream" to "true" and
// whether you passed in "messages" or a "prompt", you
// will get a different output shape
choices: (NonStreamingChoice | StreamingChoice)[];
created: number; // Unix timestamp
model: string;
object: 'chat.completion';
};

// Subtypes:

type NonChatChoice = {
export type NonChatChoice = {
finish_reason: string | null;
text: string;
}

type NonStreamingChoice = {
export type NonStreamingChoice = {
finish_reason: string | null; // Depends on the model. Ex: 'stop' | 'length' | 'content_filter' | 'tool_calls' | 'function_call'
message: {
content: string | null;
role: string;
role: 'assistant';
tool_calls?: ToolCall[];
// Deprecated, replaced by tool_calls
function_call?: FunctionCall;
};
};

type StreamingChoice = {
export type StreamingChoice = {
finish_reason: string | null;
delta: {
content: string | null;
Expand All @@ -115,18 +127,37 @@ type StreamingChoice = {
};
};

type Error = {
export type Error = {
code: number; // See "Error Handling" section
message: string;
}

type FunctionCall = {
export type OpenAIError = {
error: {
code: number; // See "Error Handling" section
message: string;
}
}

export type FunctionCall = {
name: string;
arguments: string; // JSON format arguments
};

type ToolCall = {
export type ToolCall = {
id: string;
type: 'function';
function: FunctionCall;
};
};

export function isError(
value: OpenAIError | OpenAIResponse,
): value is OpenAIError {
return "error" in value;
}

export function isStreaming(
value: StreamingChoice | NonStreamingChoice,
): value is StreamingChoice {
return "delta" in value;
}

0 comments on commit 15e1c6f

Please sign in to comment.