Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add command mode and allow changing the active mode #6

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
root = true

[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true

[*.mjs]
indent_style = space
indent_size = 2
93 changes: 72 additions & 21 deletions Chat.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { encode } from "gpt-tokenizer/esm/model/davinci-codex"; // tokenizer

// Map of model shortcodes to full model names
export const MODELS = {
g: 'gpt-4o',
g: 'gpt-4o',
G: 'gpt-4-32k-0314',
h: 'claude-3-haiku-20240307',
s: 'claude-3-5-sonnet-20240620',
Expand All @@ -20,18 +20,40 @@ export const MODELS = {
I: 'gemini-1.5-pro-latest'
};

const DEFAULT_MODEL = "s";

// Select which model to use based on a string. When empty, use default model.
export function selectModel(model) {
return MODELS[model] || model || MODELS[DEFAULT_MODEL];
}

// Create a new Chat interface object.
function newChat(ask, getMessages) {
return { ask, getMessages };
}

// Factory function to create a stateful OpenAI chat
export function openAIChat(clientClass) {
function openAIChat(clientClass, { system, model, temperature = 0.0, max_tokens = 4096, stream = true, old_messages}) {
const messages = [];

async function ask(userMessage, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
if (system) {
messages.push({ role: "system", content: system })
}


// Ignore "system" messages (as they are added above).
if (old_messages) {
old_messages.forEach(m => {
if (m.role != "system") {
messages.push(m);
}
});
}

async function ask(userMessage) {
model = MODELS[model] || model;
const client = new clientClass({ apiKey: await getToken(clientClass.name.toLowerCase()) });

if (messages.length === 0) {
messages.push({ role: "system", content: system });
}

messages.push({ role: "user", content: userMessage });

const params = { messages, model, temperature, max_tokens, stream };
Expand All @@ -50,14 +72,25 @@ export function openAIChat(clientClass) {
return result;
}

return ask;
const getMessages = () => messages;

return newChat(ask, getMessages);
}

// Factory function to create a stateful Anthropic chat
export function anthropicChat(clientClass) {
function anthropicChat(clientClass, { system, model, temperature = 0.0, max_tokens = 4096, stream = true, old_messages }) {
const messages = [];

async function ask(userMessage, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
// Ignore "system" messages (they are an arg in the call).
if (old_messages) {
old_messages.forEach(m => {
if (m.role != "system") {
messages.push(m);
}
});
}

async function ask(userMessage) {
model = MODELS[model] || model;
const client = new clientClass({ apiKey: await getToken(clientClass.name.toLowerCase()) });

Expand All @@ -70,7 +103,7 @@ export function anthropicChat(clientClass) {
.stream({ ...params, messages })
.on('text', (text) => {
process.stdout.write(text);
result += text;
result += text;
});
await response.finalMessage();

Expand All @@ -79,13 +112,24 @@ export function anthropicChat(clientClass) {
return result;
}

return ask;
const getMessages = () => messages;

return newChat(ask, getMessages);
}

export function geminiChat(clientClass) {
function geminiChat(clientClass, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
const messages = [];

async function ask(userMessage, { system, model, temperature = 0.0, max_tokens = 4096, stream = true }) {
// Convert to the format used by gemini.
if (old_messages) {
old_messages.forEach(m => {
if (m.role != "system") {
messages.push({ role: m.role, parts: [{ text: m.content }] });
}
});
}

async function ask(userMessage) {
model = MODELS[model] || model;
const client = new clientClass(await getToken(clientClass.name.toLowerCase()));

Expand Down Expand Up @@ -117,20 +161,27 @@ export function geminiChat(clientClass) {
return result;
}

return ask;
// Gemini messages do not have the same format as openAI/Anthropic. Pop
// the inner list and rename 'model' to 'assistant'.
const getMessages = () => messages.map(m => ({
role: m.role == "model" ? "assistant" : m.role,
content: m.parts[0].text,
}));

return newChat(ask, getMessages);
}

// Generic asker function that dispatches to the correct asker based on the model name
export function chat(model) {
export function createChat(model, opts) {
model = MODELS[model] || model;
if (model.startsWith('gpt')) {
return openAIChat(OpenAI);
return openAIChat(OpenAI, { model, ... opts });
} else if (model.startsWith('claude')) {
return anthropicChat(Anthropic);
return anthropicChat(Anthropic, { model, ... opts });
} else if (model.startsWith('llama')) {
return openAIChat(Groq);
return openAIChat(Groq, { model, ... opts });
} else if (model.startsWith('gemini')) {
return geminiChat(GoogleGenerativeAI);
return geminiChat(GoogleGenerativeAI, { model, ... opts });
} else {
throw new Error(`Unsupported model: ${model}`);
}
Expand All @@ -151,7 +202,7 @@ export function tokenCount(inputText) {
// Encode the input string into tokens
const tokens = encode(inputText);

// Get the number of tokens
// Get the number of tokens
const numberOfTokens = tokens.length;

// Return the number of tokens
Expand Down
33 changes: 0 additions & 33 deletions Claude.mjs

This file was deleted.

57 changes: 0 additions & 57 deletions GPT.mjs

This file was deleted.

2 changes: 0 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,3 @@ Some AI scripts I use daily.
- `aiemu`: moved to [here](https://github.com/victorTaelin/aiemu)

- `chatsh [model]`: like ChatGPT but in the terminal

TODO: remove `Claude.mjs`/`GPT.mjs` and just use `Ask.mjs` in all files
Loading