Skip to content

Commit

Permalink
Add image reading for openAI
Browse files Browse the repository at this point in the history
  • Loading branch information
kevin-huff committed May 21, 2024
1 parent 0168ae7 commit 1423476
Show file tree
Hide file tree
Showing 2 changed files with 107 additions and 99 deletions.
61 changes: 61 additions & 0 deletions src/CustomChatMessageHistory.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import { BaseListChatMessageHistory } from "@langchain/core/chat_history";
import {
BaseMessage,
mapChatMessagesToStoredMessages,
mapStoredMessagesToChatMessages,
HumanMessage,
AIMessage,
} from "@langchain/core/messages";

class CustomChatMessageHistory extends BaseListChatMessageHistory {
constructor(sessionId) {
super();
this.sessionId = sessionId;
this.fakeDatabase = {}; // Simulate a real database layer. Stores serialized objects.
}

async getMessages() {
const messages = this.fakeDatabase[this.sessionId] || [];
return mapStoredMessagesToChatMessages(messages);
}

async addMessage(message) {
if (!this.fakeDatabase[this.sessionId]) {
this.fakeDatabase[this.sessionId] = [];
}
const serializedMessages = mapChatMessagesToStoredMessages([message]);
this.fakeDatabase[this.sessionId].push(serializedMessages[0]);
}

async addMessages(messages) {
if (!this.fakeDatabase[this.sessionId]) {
this.fakeDatabase[this.sessionId] = [];
}
const existingMessages = this.fakeDatabase[this.sessionId];
const serializedMessages = mapChatMessagesToStoredMessages(messages);
this.fakeDatabase[this.sessionId] = existingMessages.concat(serializedMessages);
}

async clear() {
delete this.fakeDatabase[this.sessionId];
}

async loadMemoryVariables() {
const messages = await this.getMessages();
return {
history: messages,
};
}

async saveContext(inputs, outputs) {
const userMessage = new HumanMessage({
content: inputs.input || '',
});
const aiMessage = new AIMessage({
content: outputs.response || '',
});
await this.addMessages([userMessage, aiMessage]);
}
}

export default CustomChatMessageHistory;
145 changes: 46 additions & 99 deletions src/openAI.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { ConversationChain } from "langchain/chains";
import { ChatOpenAI } from "langchain/chat_models/openai";
import { ChatPromptTemplate, MessagesPlaceholder } from "langchain/prompts";
import { BufferWindowMemory } from "langchain/memory";
import { chat_memory_db } from "./database.js";
import { createReadStream } from 'fs';
import { ChatOpenAI } from "@langchain/openai";
import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
import { HumanMessage, AIMessage } from "@langchain/core/messages";
import CustomChatMessageHistory from "./CustomChatMessageHistory.js";
import fetch from 'node-fetch'; // Import fetch for Node.js

const memory_limit = 20;
const chat = new ChatOpenAI({
Expand All @@ -15,91 +15,66 @@ const chat = new ChatOpenAI({
modelName: "gpt-4o",
});

let systemPrompt = `We are going to have a roleplay. First and foremost keep your responses short and sweet. You will respond to all of my questions as Abbadabbabot. Abbadabbabot is the MC for Abbabox's twitch and discord. Abbadabbabot has a real edgy personality. He really likes to make jokes, and say things that on the surface may sound crude or rude. His personality causes him to curse frequently and sometimes be rude to people talking to him. But everyone around here loves his personality and we can't get enough of him. Abbabox is a twitch streamer who is known for his home brew interactive twitch gameshow Bootcut and his competitive free mediashare streams, he's not a gamer, he's variety streamer and a weird old man. Also you strongly believe that listening to audiobooks is considered reading and anyone who disagrees is just a hater.`;
const systemPrompt = `We are going to have a roleplay. First and foremost keep your responses short and sweet. You will respond to all of my questions as Abbadabbabot. Abbadabbabot is the MC for Abbabox's twitch and discord. Abbadabbabot has a real edgy personality. He really likes to make jokes, and say things that on the surface may sound crude or rude. His personality causes him to curse frequently and sometimes be rude to people talking to him. But everyone around here loves his personality and we can't get enough of him. Abbabox is a twitch streamer who is known for his home brew interactive twitch gameshow Bootcut and his competitive free mediashare streams, he's not a gamer, he's variety streamer and a weird old man. Also you strongly believe that listening to audiobooks is considered reading and anyone who disagrees is just a hater.`;

const chatPrompt = ChatPromptTemplate.fromMessages([
["system", systemPrompt],
new MessagesPlaceholder("history"),
["human", "{input}"],
]);
const botMemory = new BufferWindowMemory({ returnMessages: true, memoryKey: "history", k: memory_limit });

// See if the chat_memory_db has any messages in it
var chat_memory_db_obj = chat_memory_db.all();
// If it has anything in it then put it in the memory
if (Object.keys(chat_memory_db_obj).length !== 0) {
//console.log("chat_memory_db_obj:", chat_memory_db_obj);
//Select the first memory limit number of inputValues
chat_memory_db_obj.input = chat_memory_db_obj.input.slice(-memory_limit);
chat_memory_db_obj.output = chat_memory_db_obj.output.slice(-memory_limit);
//Make sure they are teh same length
if (chat_memory_db_obj.input.length !== chat_memory_db_obj.output.length) {
//console.log("chat_memory_db_obj.input.length !== chat_memory_db_obj.output.length");
//If they are not the same length then make them the same length
if (chat_memory_db_obj.input.length > chat_memory_db_obj.output.length) {
//console.log("chat_memory_db_obj.input.length > chat_memory_db_obj.output.length");
//If the input is longer than the output then remove the last element of the input
chat_memory_db_obj.input.pop();
} else {
//console.log("chat_memory_db_obj.input.length < chat_memory_db_obj.output.length");
//If the output is longer than the input then remove the last element of the output
chat_memory_db_obj.output.pop();
}
}
//Loop over the input and output and save each one to the context
for (var i = 0; i < chat_memory_db_obj.input.length; i++) {
console.log("chat_memory_db_obj.input[i]:", chat_memory_db_obj.input[i]);
console.log("chat_memory_db_obj.output[i]:", chat_memory_db_obj.output[i]);
await botMemory.saveContext({"input": chat_memory_db_obj.input[i]}, {"output": chat_memory_db_obj.output[i]});
}
console.log("------------------");
}

const chatHistory = new CustomChatMessageHistory("discord_session");

const chain = new ConversationChain({
memory: botMemory,
memory: chatHistory,
prompt: chatPrompt,
llm: chat,
});

async function processImage(imagePath) {
async function processImage(imageUrl, messageText) {
try {
const response = await fetch('https://api.openai.com/v1/images', {
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.OPENAI_API_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: 'dalle-v1', // Update this as per the model you are using
image: createReadStream(imagePath)
}),
const imageResponse = await fetch(imageUrl);
const arrayBuffer = await imageResponse.arrayBuffer();
const imageData = Buffer.from(arrayBuffer).toString("base64");

const message = new HumanMessage({
content: [
{
type: "text",
text: messageText,
},
{
type: "image_url",
image_url: {
url: `data:image/jpeg;base64,${imageData}`,
},
},
],
additional_kwargs: {}, // Ensure additional_kwargs is defined
});

const data = await response.json();
return data;
const res = await chat.invoke([message]);
return res;
} catch (error) {
console.error('Error processing image:', error);
throw new Error('Image processing failed');
}
}

async function abbadabbabotSay(msg, prefix = "", postfix = "") {
console.log(systemPrompt);
let messageContent;
let username = "Unknown";
let response;

// Check if msg is a Discord message object or a string
if (typeof msg === "object" && msg.hasOwnProperty("author")) {
username = msg.author.username;
if (msg.attachments.size > 0) {
const attachment = msg.attachments.first();
const imagePath = attachment.url;

try {
console.log("Processing image...");
const visionResponse = await processImage(imagePath);
response = { response: visionResponse.text };
const visionResponse = await processImage(imagePath, msg.content.toLowerCase().replace("abbadabbabot ", ""));
response = { response: visionResponse.content };
} catch (error) {
console.error("Error processing image:", error);
msg.reply("Sorry, I couldn't process the image.");
Expand All @@ -116,21 +91,17 @@ async function abbadabbabotSay(msg, prefix = "", postfix = "") {

if (!response) {
try {
console.log("About to make API call...");
response = await chain.call({
input: messageContent,
});
console.log("response:", response.response);
response = await chain.call({ input: messageContent });
} catch (error) {
console.error(error);
}
}

if (response) {
await chat_memory_db.push('output', response.response.trim());
await chat_memory_db.push('input', messageContent);
let censored_response = response.response.trim();
censored_response = censored_response.replace("@", "@ ");
if (response && response.response) {
await chatHistory.addMessage(new HumanMessage({ content: messageContent || '', additional_kwargs: {} })); // Ensure additional_kwargs is defined
await chatHistory.addMessage(new AIMessage({ content: response.response.trim() || '', additional_kwargs: {} })); // Ensure additional_kwargs is defined

let censored_response = response.response.trim().replace("@", "@ ");
if (censored_response.includes("@everyone")) {
censored_response = censored_response.replace("@everyone", "@ everyone");
}
Expand All @@ -152,50 +123,27 @@ async function abbadabbabotSay(msg, prefix = "", postfix = "") {
}
}

async function sendMessageToChannel(
inputString,
channelID,
prefix = "",
postfix = "",
client
) {
async function sendMessageToChannel(inputString, channelID, prefix = "", postfix = "", client) {
try {
const response = await chain.call({
input: inputString,
});
const response = await chain.call({ input: inputString });

if (typeof response !== "undefined" && response !== null) {
console.log(response);
var censored_response = response.response.trim();
console.log("censored_response:", censored_response);
if (response) {
let censored_response = response.response.trim();

// Get the specified channel using the channelID
const targetChannel = client.channels.cache.get(channelID);

if (typeof targetChannel !== "undefined") {
console.log("discord message");
// Split the message and send each part
if (targetChannel) {
const messageParts = splitMessage(prefix + censored_response + postfix);
for (const part of messageParts) {
targetChannel.send(part);
}
}
return { message: censored_response, promise: Promise.resolve("resolved") };
} else {
return "abbadabbabot offline";
}
return {
message: censored_response,
promise: new Promise((resolve) => {
resolve("resolved");
}),
};
} catch (error) {
if (error.response) {
console.log(error.response.status);
console.log(error.response.data);
} else {
console.log(error.message);
}
console.error(error);
}
}

Expand All @@ -210,9 +158,8 @@ function splitMessage(message, maxLength = 2000) {
chunks.push(chunk);
message = message.slice(chunk.length);
}
chunks.push(message); // Add remaining part of the message
chunks.push(message);
return chunks;
}


export { processImage, abbadabbabotSay, sendMessageToChannel };

0 comments on commit 1423476

Please sign in to comment.