Skip to content

Commit

Permalink
update moa example
Browse files Browse the repository at this point in the history
  • Loading branch information
kousun12 committed Jul 29, 2024
1 parent 1b4de00 commit 2a7b20b
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 33 deletions.
11 changes: 9 additions & 2 deletions examples/descript/generate.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env -S npx ts-node --transpileOnly
import fs from "fs";
import { Substrate, TranscribeSpeech } from "substrate";
import { ComputeText, sb, Substrate, TranscribeSpeech } from "substrate";
import { currentDir } from "./util";

/**
Expand All @@ -9,7 +9,8 @@ import { currentDir } from "./util";
* https://media.substrate.run/kaufman-bafta-short.mp3
* https://media.substrate.run/dfw-clip.m4a
*/
const sample = "https://media.substrate.run/my-dinner-andre.m4a"; // NB: this is a ~2hr long file
// const sample = "https://media.substrate.run/my-dinner-andre.m4a"; // NB: this is a ~2hr long file
const sample = "https://media.substrate.run/federer-dartmouth.m4a";
const substrate = new Substrate({ apiKey: process.env["SUBSTRATE_API_KEY"] });

const audio_uri = process.argv[2] || sample;
Expand All @@ -19,6 +20,12 @@ async function main() {
{ audio_uri, segment: true, align: true },
{ cache_age: 60 * 60 * 24 * 7 },
);
// const summarize = new ComputeText({
// model: "Llama3Instruct70B",
// prompt: sb.interpolate`summarize this transcript: <TRANSCRIPT>${transcribe.future.text}</TRANSCRIPT>`,
// max_tokens: 800,
// });

const res = await substrate.run(transcribe);
const transcript = res.get(transcribe);

Expand Down
6 changes: 2 additions & 4 deletions examples/image-generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,11 @@
import { Substrate, ComputeText, GenerateImage } from "substrate";

async function main() {
const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"];

const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY });
const substrate = new Substrate({ apiKey: process.env["SUBSTRATE_API_KEY"] });

const scene = new ComputeText({
prompt:
"describe a highly detailed forest scene with something suprising happening in one sentence, be concise, like hemmingway would write it.",
"describe a highly detailed forest scene with something surprising happening in one sentence, be concise, like Hemingway would write it.",
});

const styles = [
Expand Down
56 changes: 37 additions & 19 deletions examples/mixture-of-agents/ask.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,46 +4,64 @@ import { Substrate, Box, sb, ComputeText } from "substrate";
import fs from "fs";
import { currentDir, sampleQuestion, aggregate, jqList } from "./util";

const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"];
const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY });

const models = [
"Mistral7BInstruct",
"Mixtral8x7BInstruct",
"Llama3Instruct8B",
"Llama3Instruct405B",
"claude-3-5-sonnet-20240620",
"Llama3Instruct70B",
"gpt-4o-mini",
"Llama3Instruct8B",
"Mixtral8x7BInstruct",
];
const max_tokens = 800;
const aggregatorModel = "claude-3-5-sonnet-20240620";
const max_tokens = 400;
const temperature = 0.4;
const opts = { cache_age: 60 * 60 * 24 * 7 };

const numLayers = 3;
const question = process.argv[2] || sampleQuestion;

function getMixture(q: string, prev: any = null) {
const prompt = prev
? sb.concat(aggregate, "\n\nquestion: ", q, "\n\nprevious:\n\n", prev)
: q;
function getPrompt(prev: any = null) {
return prev
? sb.concat(
aggregate,
"\n\nuser query: ",
question,
"\n\nprevious responses:\n\n",
prev,
)
: question;
}

function getMixture(prev: any = null) {
return new Box({
value: models.map(
(model) =>
new ComputeText({ prompt, model, max_tokens }, opts).future.text,
new ComputeText(
{ prompt: getPrompt(prev), model, max_tokens, temperature },
opts,
).future.text,
),
});
}

function getLastLayer(layers: Box[]) {
return sb.jq<"string">(layers[layers.length - 1]!.future.value, jqList);
}

async function main() {
const SUBSTRATE_API_KEY = process.env["SUBSTRATE_API_KEY"];
const substrate = new Substrate({ apiKey: SUBSTRATE_API_KEY });
const layers: Box[] = [getMixture(question)];
const lastLayer = () =>
sb.jq<"string">(layers[layers.length - 1]!.future.value, jqList);

for (let i = 0; i < numLayers - 1; i++) {
layers.push(getMixture(question, lastLayer()));
layers.push(getMixture(getLastLayer(layers)));
}

const final = new ComputeText(
{
prompt: sb.concat(aggregate, "\n\n", lastLayer()),
model: "Llama3Instruct70B",
max_tokens,
prompt: getPrompt(getLastLayer(layers)),
model: aggregatorModel,
max_tokens: 800,
temperature,
},
opts,
);
Expand Down
15 changes: 9 additions & 6 deletions examples/mixture-of-agents/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -159,11 +159,13 @@
<script>
const question = "{{ question }}";
const modelNames = [
"Mistral7BInstruct",
"Mixtral8x7BInstruct",
"Llama3Instruct8B",
"Llama3Instruct70B",
];
"Llama 3.1 405B",
"Claude 3.5",
"Llama 3.1 70B",
"GPT-4o Mini",
"Llama 3.1 8B",
"Mixtral 8x7B",
]
const individualResults = "{{ individual }}";
const aggResults = "{{ summaries }}";

Expand All @@ -187,7 +189,8 @@
if (showingIndividual) {
contentArea.textContent =
individualResults[currentLayer][currentIndex].trim();
cardTitle.textContent = modelNames[currentIndex];

cardTitle.textContent = `${modelNames[currentIndex]} - Layer ${ currentLayer + 1 }`;
} else {
contentArea.textContent = aggResults[currentLayer].trim();
cardTitle.textContent = `MoA Layer ${currentLayer + 1}`;
Expand Down
4 changes: 2 additions & 2 deletions examples/mixture-of-agents/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import { fileURLToPath } from "url";
import { dirname } from "path";

export const sampleQuestion =
"What was Arendt's notion of Freedom? How did she distinguish it from Action?";
export const aggregate = `You have been provided with a set of responses from various open-source models to the latest user query. Your task is to synthesize these responses into a single, high-quality response. It is crucial to critically evaluate the information provided in these responses, recognizing that some of it may be biased or incorrect. Your response should not simply replicate the given answers but should offer a refined, accurate, and comprehensive reply to the instruction. Ensure your response is well-structured, well-considered, and adheres to the highest standards of accuracy and reliability. Do not respond as if we're having a conversation, just output an objective response.`;
"The following is a hypothetical short story written by Asimov after seeing the world in 2024. Go beyond the obvious, and come up with a creative story that is incisive, allegorical, and relevant. Respond starting with the title on the first line, followed by two newlines, and then the story.";
export const aggregate = `You have been provided with a set of responses to the a user query. Your task is to synthesize these responses into a single, high-quality response. It is crucial to critically evaluate the information provided in these responses, recognizing that some of it may be biased or incorrect. Your response should not simply replicate the given answers but should offer a refined, accurate, and comprehensive reply to the original user query. Ensure your response is well-structured, well-considered, and adheres to the highest standards of accuracy and reliability. Do not respond conversationally or acknowledge the asking of the query, just output an objective response.`;
export const jqList = `to_entries | map(((.key + 1) | tostring) + ". " + .value) | join("\n")`;

// @ts-ignore
Expand Down

0 comments on commit 2a7b20b

Please sign in to comment.