-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #5 from genericness/testing
Llama 3 groq
- Loading branch information
Showing
1 changed file
with
74 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,74 @@ | ||
import * as types from "../main.d.ts"; | ||
|
||
export const information: types.information = { | ||
llmFileVersion: "1.0", | ||
env: ["GROQ_API_KEY"], | ||
functions: false, | ||
multiModal: false, | ||
callbackSupport: true, | ||
streamingSupport: false, | ||
id: "llama3-groq", | ||
name: "Llama 3 Groq", | ||
description: "Meta's new flagship model. Powered by Groq!", | ||
highCostLLM: false, | ||
}; | ||
|
||
// const db = await Deno.openKv("./db.sqlite") | ||
|
||
export async function send( | ||
prompt: string | null, | ||
messages: types.Message[], | ||
callback?: | ||
| ((information: types.callbackData, complete: boolean) => void) | ||
| null, | ||
requirements?: types.Requirements, | ||
): Promise<types.Response> { | ||
if (!requirements?.env?.GROQ_API_KEY) { | ||
throw new DOMException("env.GROQ_API_KEY", "NotFoundError"); | ||
} | ||
|
||
if (requirements.streaming) { | ||
throw new DOMException("streaming", "NotSupportedError"); | ||
} | ||
// here we go | ||
|
||
if (messages.length === 0) { | ||
messages.push({ | ||
role: "system", | ||
content: | ||
"You are Llama, an LLM by Meta. You are running through a Discord bot named LLM Bot, by Eris.", | ||
}); | ||
} | ||
|
||
messages.push({ | ||
role: "user", | ||
content: prompt, | ||
}); | ||
|
||
const res = await fetch("https://api.groq.com/openai/v1/chat/completions", { | ||
method: "POST", | ||
headers: { | ||
"Content-Type": "application/json", | ||
Authorization: `Bearer ${requirements.env.GROQ_API_KEY}`, | ||
}, | ||
body: JSON.stringify({ | ||
model: "llama3-70b-8192", | ||
messages: messages, | ||
}), | ||
}); | ||
|
||
const resp: types.Response = await res.json(); | ||
|
||
if (resp.error) { | ||
// Fuck. | ||
throw resp.error.message; // well at least they know why the fuck it crashed?? | ||
} | ||
|
||
messages.push(resp.choices[0].message); | ||
|
||
resp.messages = messages; | ||
|
||
if (callback) callback({ data: resp.choices[0].message.content }, true); | ||
|
||
return resp; | ||
} |