Skip to content

Commit

Permalink
use gpt-3.5-turbo
Browse files Browse the repository at this point in the history
  • Loading branch information
mckaywrigley committed Mar 1, 2023
1 parent 72744dd commit 4466a62
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 143 deletions.
47 changes: 9 additions & 38 deletions components/Search.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { OpenAIModel, SearchQuery, Source } from "@/types";
import { createPrompt } from "@/utils/answer";
import { SearchQuery, Source } from "@/types";
import { IconArrowRight, IconBolt, IconSearch } from "@tabler/icons-react";
import endent from "endent";
import { FC, KeyboardEvent, useEffect, useRef, useState } from "react";

interface SearchProps {
Expand All @@ -13,7 +13,6 @@ export const Search: FC<SearchProps> = ({ onSearch, onAnswerUpdate, onDone }) =>
const inputRef = useRef<HTMLInputElement>(null);

const [query, setQuery] = useState<string>("");
const [model, setModel] = useState<OpenAIModel>(OpenAIModel.DAVINCI_CODE);
const [apiKey, setApiKey] = useState<string>("");
const [showSettings, setShowSettings] = useState<boolean>(false);
const [loading, setLoading] = useState<boolean>(false);
Expand All @@ -35,7 +34,7 @@ export const Search: FC<SearchProps> = ({ onSearch, onAnswerUpdate, onDone }) =>
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({ query, model })
body: JSON.stringify({ query })
});

if (!response.ok) {
Expand All @@ -50,13 +49,17 @@ export const Search: FC<SearchProps> = ({ onSearch, onAnswerUpdate, onDone }) =>

const handleStream = async (sources: Source[]) => {
try {
let prompt = createPrompt(query, sources, model);
const prompt = endent`Provide a 2-3 sentence answer to the query based on the followin sources. Be original, concise, accurate, and helpful. Cite sources as [1] or [2] or [3] after each sentence (not just the very end) to back up your answer (Ex: Correct: [1], Correct: [2][3], Incorrect: [1, 2]).
${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")}
`;

const response = await fetch("/api/answer", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({ prompt, model, apiKey })
body: JSON.stringify({ prompt, apiKey })
});

if (!response.ok) {
Expand Down Expand Up @@ -102,43 +105,27 @@ export const Search: FC<SearchProps> = ({ onSearch, onAnswerUpdate, onDone }) =>
return;
}

if (!model) {
alert("Please select a model.");
return;
}

localStorage.setItem("CLARITY_KEY", apiKey);
localStorage.setItem("CLARITY_MODEL", model);

setShowSettings(false);
inputRef.current?.focus();
};

const handleClear = () => {
localStorage.removeItem("CLARITY_KEY");
localStorage.removeItem("CLARITY_MODEL");

setApiKey("");
setModel(OpenAIModel.DAVINCI_CODE);
};

useEffect(() => {
const CLARITY_KEY = localStorage.getItem("CLARITY_KEY");
const CLARITY_MODEL = localStorage.getItem("CLARITY_MODEL");

if (CLARITY_KEY) {
setApiKey(CLARITY_KEY);
} else {
setShowSettings(true);
}

if (CLARITY_MODEL) {
setModel(CLARITY_MODEL as OpenAIModel);
} else {
setShowSettings(true);
setModel(OpenAIModel.DAVINCI_CODE);
}

inputRef.current?.focus();
}, []);

Expand Down Expand Up @@ -190,22 +177,6 @@ export const Search: FC<SearchProps> = ({ onSearch, onAnswerUpdate, onDone }) =>

{showSettings && (
<>
<select
value={model}
onChange={(e) => setModel(e.target.value as OpenAIModel)}
className="max-w-[400px] block w-full cursor-pointer rounded-md border border-gray-300 p-2 text-black shadow-sm focus:border-blue-500 focus:outline-none focus:ring-2 focus:ring-blue-500 sm:text-sm"
>
{Object.values(OpenAIModel).map((model) => (
<option
key={model}
value={model}
className="bg-gray-900 text-white"
>
{model}
</option>
))}
</select>

<input
type="password"
className="max-w-[400px] block w-full rounded-md border border-gray-300 p-2 text-black shadow-sm focus:border-blue-500 focus:outline-none focus:ring-2 focus:ring-blue-500 sm:text-sm"
Expand Down
6 changes: 2 additions & 4 deletions pages/api/answer.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { OpenAIModel } from "@/types";
import { OpenAIStream } from "@/utils/answer";

export const config = {
Expand All @@ -7,13 +6,12 @@ export const config = {

const handler = async (req: Request): Promise<Response> => {
try {
const { prompt, model, apiKey } = (await req.json()) as {
const { prompt, apiKey } = (await req.json()) as {
prompt: string;
model: OpenAIModel;
apiKey: string;
};

const stream = await OpenAIStream(prompt, model, apiKey);
const stream = await OpenAIStream(prompt, apiKey);

return new Response(stream);
} catch (error) {
Expand Down
6 changes: 3 additions & 3 deletions pages/api/sources.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { Readability } from "@mozilla/readability";
import * as cheerio from "cheerio";
import { JSDOM } from "jsdom";
import type { NextApiRequest, NextApiResponse } from "next";
import { cleanSourceText, getSourceCount, shortenSourceText } from "../../utils/sources";
import { cleanSourceText } from "../../utils/sources";

type Data = {
sources: Source[];
Expand All @@ -16,7 +16,7 @@ const searchHandler = async (req: NextApiRequest, res: NextApiResponse<Data>) =>
model: OpenAIModel;
};

const sourceCount = getSourceCount(model);
const sourceCount = 4;

// GET LINKS
const response = await fetch(`https://www.google.com/search?q=${query}`);
Expand Down Expand Up @@ -69,7 +69,7 @@ const searchHandler = async (req: NextApiRequest, res: NextApiResponse<Data>) =>
const filteredSources = sources.filter((source) => source !== undefined);

for (const source of filteredSources) {
source.text = shortenSourceText(source.text, model);
source.text = source.text.slice(0, 1500);
}

res.status(200).json({ sources: filteredSources });
Expand Down
4 changes: 1 addition & 3 deletions types/index.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
export enum OpenAIModel {
DAVINCI_TEXT = "text-davinci-003",
CURIE_TEXT = "text-curie-001",
DAVINCI_CODE = "code-davinci-002"
DAVINCI_TURBO = "gpt-3.5-turbo"
}

export type Source = {
Expand Down
76 changes: 9 additions & 67 deletions utils/answer.ts
Original file line number Diff line number Diff line change
@@ -1,82 +1,24 @@
import { OpenAIModel, Source } from "@/types";
import endent from "endent";
import { OpenAIModel } from "@/types";
import { createParser, ParsedEvent, ReconnectInterval } from "eventsource-parser";

const createTextDavinciPrompt = (query: string, sources: Source[]) => {
return endent`INSTRUCTIONS
Provide a 2-3 sentence answer to the query based on the sources. Be original, concise, accurate, and helpful. Cite sources as [1] or [2] or [3] after each sentence to back up your answer (Ex: Correct: [1], Correct: [2][3], Incorrect: [1, 2]).
###
SOURCES
${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")}
###
QUERY
${query}
###
ANSWER`;
};

const createTextCuriePrompt = (query: string, sources: Source[]) => {
return endent`INSTRUCTIONS
Provide a 2-3 sentence answer to the query based on the sources. Be original, concise, accurate, and helpful.
###
SOURCES
${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")}
###
QUERY
${query}
###
ANSWER`;
};

const createCodeDavinciPrompt = (query: string, sources: Source[]) => {
return endent`INSTRUCTIONS
Provide a 2-3 sentence answer to the query based on the sources. Be original, concise, accurate, and helpful.
###
SOURCES
${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")}
###
QUERY
${query}
###
ANSWER`;
};

export const createPrompt = (query: string, sources: Source[], model: OpenAIModel) => {
switch (model) {
case OpenAIModel.DAVINCI_TEXT:
return createTextDavinciPrompt(query, sources);
case OpenAIModel.CURIE_TEXT:
return createTextCuriePrompt(query, sources);
case OpenAIModel.DAVINCI_CODE:
return createCodeDavinciPrompt(query, sources);
default:
return createCodeDavinciPrompt(query, sources);
}
};

export const OpenAIStream = async (prompt: string, model: OpenAIModel, apiKey: string) => {
export const OpenAIStream = async (prompt: string, apiKey: string) => {
const encoder = new TextEncoder();
const decoder = new TextDecoder();

const res = await fetch("https://api.openai.com/v1/completions", {
const res = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`
},
method: "POST",
body: JSON.stringify({
model,
prompt,
model: OpenAIModel.DAVINCI_TURBO,
messages: [
{ role: "system", content: "You are a helpful assistant that accurately answers the user's queries based on the given text." },
{ role: "user", content: prompt }
],
max_tokens: 120,
temperature: 0.0,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
n: 1,
stop: ["###"],
stream: true
})
});
Expand All @@ -98,7 +40,7 @@ export const OpenAIStream = async (prompt: string, model: OpenAIModel, apiKey: s

try {
const json = JSON.parse(data);
const text = json.choices[0].text;
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
Expand Down
28 changes: 0 additions & 28 deletions utils/sources.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import { OpenAIModel } from "@/types";

export const cleanSourceText = (text: string) => {
return text
.trim()
Expand All @@ -9,29 +7,3 @@ export const cleanSourceText = (text: string) => {
.replace(/\t/g, "")
.replace(/\n+(\s*\n)*/g, "\n");
};

export const getSourceCount = (model: OpenAIModel) => {
switch (model) {
case OpenAIModel.DAVINCI_TEXT:
return 3;
case OpenAIModel.CURIE_TEXT:
return 4;
case OpenAIModel.DAVINCI_CODE:
return 5;
default:
return 3;
}
};

export const shortenSourceText = (text: string, model: OpenAIModel) => {
switch (model) {
case OpenAIModel.DAVINCI_TEXT:
return text.slice(0, 1500);
case OpenAIModel.CURIE_TEXT:
return text.slice(0, 1500);
case OpenAIModel.DAVINCI_CODE:
return text.slice(0, 3000);
default:
return text.slice(0, 1500);
}
};

0 comments on commit 4466a62

Please sign in to comment.