Skip to content

Commit

Permalink
i said WATCH YO CODE
Browse files Browse the repository at this point in the history
  • Loading branch information
Erisfiregamer1 committed Dec 4, 2023
1 parent 15e1c6f commit b281e93
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 93 deletions.
35 changes: 23 additions & 12 deletions bots/chatgpt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@ export let isEnabled = true;

import * as types from "./types.ts"

import * as vdb from "../vdb.ts"

if (!Deno.env.get("OPENAI_API_KEY")) {
console.warn("No OpenAI API key provided! ChatGPT will be unavailable.");
isEnabled = false;
}

type response = {
oaires: types.Response;
oaires: types.OpenAIResponse;
messages: types.Message[];
};

Expand All @@ -32,11 +34,22 @@ const tools: types.Tool[] = [{
}
}]

/*async function doTools(
async function doTools(
oaires: types.OpenAIResponse,
messages: types.Message[]
): Promise<response> {
if (oaires.choices[0].finish_reason !== "tool_calls") {
throw "What The Shit?"
}

}*/
const toolCalls = oaires.choices[0].message.tool_calls!

toolCalls.forEach((tool) => {
if (tool.function.name === "use-database") {

}
})
}

export async function send(
messages: types.Message[],
Expand Down Expand Up @@ -75,26 +88,24 @@ export async function send(
}),
});

const resp: types.OpenAIResponse | types.OpenAIError =
let resp: types.OpenAIResponse | types.OpenAIError =
await res.json();

if (types.isError(resp)) {
// Fuck.
throw resp.error.message; // well at least they know why the fuck it crashed??
}

if (types.isStreaming(resp.choices[0])) {
throw "oh no"
let finalresp = {
oaires: resp,
messages
}

if (resp.choices[0].finish_reason === "tool_calls") {
// Do nothing for now. Bot should complain about passing critical component null again
finalresp = await doTools(resp, messages)
}

messages.push(resp.choices[0].message);

return {
oaires: resp,
messages,
};
return finalresp
}
112 changes: 59 additions & 53 deletions bots/gpt_4.ts
Original file line number Diff line number Diff line change
@@ -1,73 +1,74 @@
import OpenAI from "npm:openai";
export let isEnabled = true;

import { config } from "npm:dotenv";
config();
import * as types from "./types.ts"

export let isEnabled = true;
import * as vdb from "../vdb.ts"

if (!Deno.env.get("OPENAI_API_KEY")) {
console.warn("No OpenAI API key provided! GPT-4 will be unavailable.");
console.warn("No OpenAI API key provided! ChatGPT will be unavailable.");
isEnabled = false;
}

type ChatCompletionError = {
error: {
message: string;
type: string;
param: null; // Not sure about this one tbh,
code: string;
};
};

type response = {
oaires: OpenAI.Chat.Completions.ChatCompletion;
messages: OpenAI.Chat.ChatCompletionMessage[];
oaires: types.OpenAIResponse;
messages: types.Message[];
};

function isError(
value: ChatCompletionError | OpenAI.Chat.Completions.ChatCompletion,
): value is ChatCompletionError {
return "error" in value;
}

// const db = await Deno.openKv("./db.sqlite")

const tools: types.Tool[] = [{
type: "function",
function: {
name: "use-database",
description: "Check the Vector Database for information on a subject. Irrelevant data means no relevant data is available.",
parameters: {
type: "object",
properties: {
test: {
type: "string",
description: "This is the 'test' parameter."
}
},
required: ["test"]
}
}
}]

async function doTools(
oaires: types.OpenAIResponse,
messages: types.Message[]
): Promise<response> {
if (oaires.choices[0].finish_reason !== "tool_calls") {
throw "What The Shit?"
}

const toolCalls = oaires.choices[0].message.tool_calls!

toolCalls.forEach((tool) => {
if (tool.function.name === "use-database") {

}
})
}

export async function send(
messages: OpenAI.Chat.ChatCompletionMessage[],
messages: types.Message[],
prompt: string,
userid: string,
): Promise<response> {
// here we go

if (!isEnabled) {
throw "not_enabled"; // how did you get here.
throw "not_enabled";
}

if (messages.length === 0) {
messages.push({
role: "system",
content: "You are GPT-4, an LLM by OpenAI.",
content: "You are ChatGPT, an LLM by OpenAI.",
});
}

/*const content_arr = []
content_arr.push({
type: "text",
text: prompt
})
if (images.length !== 0) {
images.forEach((imgurl) => {
content_arr.push({
type: "image_url",
image_url: imgurl
})
})
}*/

messages.push({
role: "user",
content: prompt,
Expand All @@ -80,26 +81,31 @@ export async function send(
Authorization: `Bearer ${Deno.env.get("OPENAI_API_KEY")}`,
},
body: JSON.stringify({
max_tokens: 4096,
model: "gpt-4-1106-preview",
model: "gpt-3.5-turbo-16k",
messages: messages,
user: userid,
tools
}),
});

const resp: OpenAI.Chat.Completions.ChatCompletion | ChatCompletionError =
let resp: types.OpenAIResponse | types.OpenAIError =
await res.json();

if (isError(resp)) {
if (types.isError(resp)) {
// Fuck.
console.log(resp.error.message)
throw resp.error.message; // well at least they know why the fuck it crashed??
}

console.log(resp);

return {
let finalresp = {
oaires: resp,
messages,
};
messages
}

if (resp.choices[0].finish_reason === "tool_calls") {
finalresp = await doTools(resp, messages)
}

messages.push(resp.choices[0].message);

return finalresp
}
23 changes: 3 additions & 20 deletions bots/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export type Response = {
// Depending on whether you set "stream" to "true" and
// whether you passed in "messages" or a "prompt", you
// will get a different output shape
choices: (NonStreamingChoice | StreamingChoice | NonChatChoice | Error)[];
choices: (NonStreamingChoice | NonChatChoice | Error)[];
created: number; // Unix timestamp
model: string;
object: 'chat.completion';
Expand All @@ -92,7 +92,7 @@ export type OpenAIResponse = {
// Depending on whether you set "stream" to "true" and
// whether you passed in "messages" or a "prompt", you
// will get a different output shape
choices: (NonStreamingChoice | StreamingChoice)[];
choices: NonStreamingChoice[];
created: number; // Unix timestamp
model: string;
object: 'chat.completion';
Expand All @@ -116,17 +116,6 @@ export type NonStreamingChoice = {
};
};

export type StreamingChoice = {
finish_reason: string | null;
delta: {
content: string | null;
role?: string;
tool_calls?: ToolCall[];
// Deprecated, replaced by tool_calls
function_call?: FunctionCall;
};
};

export type Error = {
code: number; // See "Error Handling" section
message: string;
Expand Down Expand Up @@ -154,10 +143,4 @@ export function isError(
value: OpenAIError | OpenAIResponse,
): value is OpenAIError {
return "error" in value;
}

export function isStreaming(
value: StreamingChoice | NonStreamingChoice,
): value is StreamingChoice {
return "delta" in value;
}
}
16 changes: 8 additions & 8 deletions main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@ import * as gpt4 from "./bots/gpt_4.ts";
// import * as palm from "./bots/palm.ts";
import * as openrouter from "./bots/openrouter.ts";


import OpenAI from "npm:openai";
import * as types from "./bots/types.ts"

type messagedata = {
id: string;
messages: OpenAI.Chat.ChatCompletionMessage[];
};
messages: types.Message[]
}

type gptresponse = {
oaires: OpenAI.Chat.Completions.ChatCompletion;
messages: OpenAI.Chat.ChatCompletionMessage[];
oaires: types.OpenAIResponse;
messages: types.Message[];
};

import "./slashcode.ts";
Expand Down Expand Up @@ -169,7 +168,7 @@ client.on("messageCreate", async (message) => {
const msg = await message.reply("Sending message...");

let resp: gptresponse;
if (llm.startsWith("openrouter^")) {
/*if (llm.startsWith("openrouter^")) {
const llm_real = llm.split("^")
const api_key = (await db.get<string>([
Expand Down Expand Up @@ -210,7 +209,8 @@ client.on("messageCreate", async (message) => {
await message.reply(chunk);
}
});
} else if (llm === "chatgpt") {
} else */
if (llm === "chatgpt") {
if (!chatgpt.isEnabled) {
msg.edit(
"This LLM isn't enabled! Please switch to a different LLM to use this bot.",
Expand Down

0 comments on commit b281e93

Please sign in to comment.