Skip to content

Commit

Permalink
Jesus Fucking Finally (No message persist for ChatGPT right now!)
Browse files Browse the repository at this point in the history
  • Loading branch information
Erisfiregamer1 committed Feb 10, 2024
1 parent 2fb1203 commit fa0df26
Show file tree
Hide file tree
Showing 7 changed files with 192 additions and 82 deletions.
53 changes: 40 additions & 13 deletions bots/chatgpt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import * as types from "./types.ts";

import * as vdb from "../vdb.ts";

import { safeEval } from "../lib/eval.ts";

if (!Deno.env.get("OPENAI_API_KEY")) {
console.warn("No OpenAI API key provided! ChatGPT will be unavailable.");
isEnabled = false;
Expand Down Expand Up @@ -33,12 +35,30 @@ const tools: types.Tool[] = [{
required: ["query"],
},
},
}, {
type: "function",
function: {
name: "eval",
description:
"Evaluates JS code within a heavily limited sandbox (a worker with no access other then network). Times out after 10 seconds.",
parameters: {
type: "object",
properties: {
code: {
type: "string",
description: "Code to be evaluated",
},
},
required: ["code"],
},
},
}];

async function doTools(
oaires: types.Response,
messages: types.Message[],
): Promise<response> {
callback: Function,
): Promise<types.Response> {
if (oaires.choices[0].finish_reason !== "tool_calls") {
throw "What The Shit?";
}
Expand All @@ -56,6 +76,13 @@ async function doTools(
content: databaseResponse,
tool_call_id: tool.id,
};
} else if (tool.function.name === "eval") {
const respons = await safeEval(JSON.parse(tool.function.arguments).code);
return {
role: "tool",
content: respons,
tool_call_id: tool.id,
};
} else {
return {
role: "tool",
Expand All @@ -72,9 +99,7 @@ async function doTools(
messages.push(result);
});

const newres = await send(messages, null, "tool_res");

console.log(newres);
const newres = await send(messages, null, "tool_res", callback);

return newres;
}
Expand All @@ -83,7 +108,8 @@ export async function send(
messages: types.Message[],
prompt: string | null,
userid: string,
): Promise<response> {
callback: Function,
): Promise<types.Response> {
// here we go

if (!isEnabled) {
Expand All @@ -93,7 +119,7 @@ export async function send(
if (messages.length === 0) {
messages.push({
role: "system",
content: "You are ChatGPT, an LLM by OpenAI.",
content: "You are ChatGPT, an LLM by OpenAI. You are running through a Discord bot named LLM Bot, by Eris.",
});
}

Expand All @@ -104,8 +130,6 @@ export async function send(
});
}

console.log(messages);

const res = await fetch("https://api.openai.com/v1/chat/completions", {
method: "POST",
headers: {
Expand All @@ -127,15 +151,18 @@ export async function send(
throw resp.error.message; // well at least they know why the fuck it crashed??
}

let finalresp: response = {
oaires: resp,
messages,
};
let finalresp = resp

messages.push(resp.choices[0].message);

if (resp.choices[0].finish_reason === "tool_calls") {
finalresp = await doTools(resp, messages);
callback("function", resp);

finalresp = await doTools(resp, messages, callback);
} else {

callback("complete", finalresp)

}

return finalresp;
Expand Down
46 changes: 24 additions & 22 deletions bots/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ type response = {

// const db = await Deno.openKv("./db.sqlite")

async function processGeminiMessages(messages: types.Message[]): Promise<(types.GeminiContentPartImage | types.GeminiContentPartText)[]> {
async function processGeminiMessages(
messages: types.Message[],
): Promise<(types.GeminiContentPartImage | types.GeminiContentPartText)[]> {
const geminiFormattedMessages = [];

for (const message of messages) {
Expand Down Expand Up @@ -47,7 +49,7 @@ async function getImageData(url: string) {
try {
const response = await fetch(url);

const contentType = response.headers.get('Content-Type');
const contentType = response.headers.get("Content-Type");

const blob = await response.blob();

Expand All @@ -63,20 +65,20 @@ async function getImageData(url: string) {

// Step 6: Get the base64-encoded image data

const resultString = reader.result as string
const resultString = reader.result as string;

const base64ImageData = resultString.split(',')[1];
const base64ImageData = resultString.split(",")[1];

return { contentType, base64ImageData };
} catch (error) {
console.error('Error:', error);
console.error("Error:", error);
}
}

export async function send(
messages: types.Message[],
prompt: string | null,
images: string[]
images: string[],
): Promise<response> {
// here we go

Expand All @@ -102,26 +104,26 @@ export async function send(
images.forEach((image) => {
messages.push({
role: "image",
content: image
})
})

content: image,
});
});

let useImageModel = false;

console.log(useImageModel)
console.log(useImageModel);

// Check if any object has the specified property set to the target value
for (let i = 0; i < messages.length; i++) {
if (messages[i].role === "image") {
useImageModel = true;
break; // Stop the loop since we found a match
// Check if any object has the specified property set to the target value
for (let i = 0; i < messages.length; i++) {
if (messages[i].role === "image") {
useImageModel = true;
break; // Stop the loop since we found a match
}
}
}

let geminiFormattedMessages: (types.GeminiContentPartText | types.GeminiContentPartImage)[] = [];
let geminiFormattedMessages:
(types.GeminiContentPartText | types.GeminiContentPartImage)[] = [];

geminiFormattedMessages = await processGeminiMessages(messages)
geminiFormattedMessages = await processGeminiMessages(messages);

// Gemini message system is a motherfucker and I hate it but we gotta deal with it. Messages look like this:

Expand All @@ -135,9 +137,9 @@ for (let i = 0; i < messages.length; i++) {
*/

const res = await fetch(
`https://generativelanguage.googleapis.com/v1beta/models/${useImageModel === true ? 'gemini-pro-vision' : 'gemini-pro'}:generateContent?key=${
Deno.env.get("GEMINI_API_KEY")
}`,
`https://generativelanguage.googleapis.com/v1beta/models/${
useImageModel === true ? "gemini-pro-vision" : "gemini-pro"
}:generateContent?key=${Deno.env.get("GEMINI_API_KEY")}`,
{
method: "POST",
headers: {
Expand Down
10 changes: 5 additions & 5 deletions bots/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -141,13 +141,13 @@ export type GeminiContentPartText = {

export type GeminiContentPartImage = {
inlineData: {
mimeType: string;
data: string;
}
}
mimeType: string;
data: string;
};
};

type GeminiContent = {
parts: GeminiContentPartText[]
parts: GeminiContentPartText[];
role: string;
};

Expand Down
46 changes: 46 additions & 0 deletions lib/eval.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
export function safeEval(code: string): Promise<string> {
return new Promise((resolve, reject) => {
const worker = new Worker(
import.meta.resolve("./eval_worker.js"),
{
type: "module",
name,
deno: {
//@ts-ignore ignore the namespace annotation. Deno < 1.22 required this
namespace: false,
permissions: {
env: false,
hrtime: false,
net: true,
ffi: false,
read: false,
run: false,
write: false,
},
},
},
);

let timeoutId: number;

worker.onmessage = (msg) => {
console.log(msg.data);
clearTimeout(timeoutId);
if (typeof msg.data !== "string") {
worker.terminate();
reject("Worker returned a corrupt message!");
} else {
worker.terminate();
resolve(msg.data);
}
};

worker.postMessage(code);

timeoutId = setTimeout(() => {
console.log("early termination");
worker.terminate(); // What's taking YOU so long, hmm?
reject("Worker did not respond in time!");
}, 10000);
});
}
14 changes: 14 additions & 0 deletions lib/eval_worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
// deno-lint-ignore no-global-assign
console = null

self.onmessage = async (e) => {
try {

const response = `${eval(e.data)}`

postMessage(response)

} catch (err) {
postMessage(`Error occured during code processing: ${err}`)
}
}
Loading

0 comments on commit fa0df26

Please sign in to comment.