diff --git a/components/Search.tsx b/components/Search.tsx index fc9c85e..a3f2566 100644 --- a/components/Search.tsx +++ b/components/Search.tsx @@ -1,6 +1,6 @@ -import { OpenAIModel, SearchQuery, Source } from "@/types"; -import { createPrompt } from "@/utils/answer"; +import { SearchQuery, Source } from "@/types"; import { IconArrowRight, IconBolt, IconSearch } from "@tabler/icons-react"; +import endent from "endent"; import { FC, KeyboardEvent, useEffect, useRef, useState } from "react"; interface SearchProps { @@ -13,7 +13,6 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => const inputRef = useRef(null); const [query, setQuery] = useState(""); - const [model, setModel] = useState(OpenAIModel.DAVINCI_CODE); const [apiKey, setApiKey] = useState(""); const [showSettings, setShowSettings] = useState(false); const [loading, setLoading] = useState(false); @@ -35,7 +34,7 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ query, model }) + body: JSON.stringify({ query }) }); if (!response.ok) { @@ -50,13 +49,17 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => const handleStream = async (sources: Source[]) => { try { - let prompt = createPrompt(query, sources, model); + const prompt = endent`Provide a 2-3 sentence answer to the query based on the followin sources. Be original, concise, accurate, and helpful. Cite sources as [1] or [2] or [3] after each sentence (not just the very end) to back up your answer (Ex: Correct: [1], Correct: [2][3], Incorrect: [1, 2]). + + ${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")} + `; + const response = await fetch("/api/answer", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ prompt, model, apiKey }) + body: JSON.stringify({ prompt, apiKey }) }); if (!response.ok) { @@ -102,13 +105,7 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => return; } - if (!model) { - alert("Please select a model."); - return; - } - localStorage.setItem("CLARITY_KEY", apiKey); - localStorage.setItem("CLARITY_MODEL", model); setShowSettings(false); inputRef.current?.focus(); @@ -116,15 +113,12 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => const handleClear = () => { localStorage.removeItem("CLARITY_KEY"); - localStorage.removeItem("CLARITY_MODEL"); setApiKey(""); - setModel(OpenAIModel.DAVINCI_CODE); }; useEffect(() => { const CLARITY_KEY = localStorage.getItem("CLARITY_KEY"); - const CLARITY_MODEL = localStorage.getItem("CLARITY_MODEL"); if (CLARITY_KEY) { setApiKey(CLARITY_KEY); @@ -132,13 +126,6 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => setShowSettings(true); } - if (CLARITY_MODEL) { - setModel(CLARITY_MODEL as OpenAIModel); - } else { - setShowSettings(true); - setModel(OpenAIModel.DAVINCI_CODE); - } - inputRef.current?.focus(); }, []); @@ -190,22 +177,6 @@ export const Search: FC = ({ onSearch, onAnswerUpdate, onDone }) => {showSettings && ( <> - - => { try { - const { prompt, model, apiKey } = (await req.json()) as { + const { prompt, apiKey } = (await req.json()) as { prompt: string; - model: OpenAIModel; apiKey: string; }; - const stream = await OpenAIStream(prompt, model, apiKey); + const stream = await OpenAIStream(prompt, apiKey); return new Response(stream); } catch (error) { diff --git a/pages/api/sources.ts b/pages/api/sources.ts index e44f6ee..7b4c14e 100644 --- a/pages/api/sources.ts +++ b/pages/api/sources.ts @@ -3,7 +3,7 @@ import { Readability } from "@mozilla/readability"; import * as cheerio from "cheerio"; import { JSDOM } from "jsdom"; import type { NextApiRequest, NextApiResponse } from "next"; -import { cleanSourceText, getSourceCount, shortenSourceText } from "../../utils/sources"; +import { cleanSourceText } from "../../utils/sources"; type Data = { sources: Source[]; @@ -16,7 +16,7 @@ const searchHandler = async (req: NextApiRequest, res: NextApiResponse) => model: OpenAIModel; }; - const sourceCount = getSourceCount(model); + const sourceCount = 4; // GET LINKS const response = await fetch(`https://www.google.com/search?q=${query}`); @@ -69,7 +69,7 @@ const searchHandler = async (req: NextApiRequest, res: NextApiResponse) => const filteredSources = sources.filter((source) => source !== undefined); for (const source of filteredSources) { - source.text = shortenSourceText(source.text, model); + source.text = source.text.slice(0, 1500); } res.status(200).json({ sources: filteredSources }); diff --git a/types/index.ts b/types/index.ts index 47140da..84cab3b 100644 --- a/types/index.ts +++ b/types/index.ts @@ -1,7 +1,5 @@ export enum OpenAIModel { - DAVINCI_TEXT = "text-davinci-003", - CURIE_TEXT = "text-curie-001", - DAVINCI_CODE = "code-davinci-002" + DAVINCI_TURBO = "gpt-3.5-turbo" } export type Source = { diff --git a/utils/answer.ts b/utils/answer.ts index 2f4e356..dd954a2 100644 --- a/utils/answer.ts +++ b/utils/answer.ts @@ -1,82 +1,24 @@ -import { OpenAIModel, Source } from "@/types"; -import endent from "endent"; +import { OpenAIModel } from "@/types"; import { createParser, ParsedEvent, ReconnectInterval } from "eventsource-parser"; -const createTextDavinciPrompt = (query: string, sources: Source[]) => { - return endent`INSTRUCTIONS - Provide a 2-3 sentence answer to the query based on the sources. Be original, concise, accurate, and helpful. Cite sources as [1] or [2] or [3] after each sentence to back up your answer (Ex: Correct: [1], Correct: [2][3], Incorrect: [1, 2]). - ### - SOURCES - - ${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")} - ### - QUERY - ${query} - ### - ANSWER`; -}; - -const createTextCuriePrompt = (query: string, sources: Source[]) => { - return endent`INSTRUCTIONS - Provide a 2-3 sentence answer to the query based on the sources. Be original, concise, accurate, and helpful. - ### - SOURCES - - ${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")} - ### - QUERY - ${query} - ### - ANSWER`; -}; - -const createCodeDavinciPrompt = (query: string, sources: Source[]) => { - return endent`INSTRUCTIONS - Provide a 2-3 sentence answer to the query based on the sources. Be original, concise, accurate, and helpful. - ### - SOURCES - - ${sources.map((source, idx) => `Source [${idx + 1}]:\n${source.text}`).join("\n\n")} - ### - QUERY - ${query} - ### - ANSWER`; -}; - -export const createPrompt = (query: string, sources: Source[], model: OpenAIModel) => { - switch (model) { - case OpenAIModel.DAVINCI_TEXT: - return createTextDavinciPrompt(query, sources); - case OpenAIModel.CURIE_TEXT: - return createTextCuriePrompt(query, sources); - case OpenAIModel.DAVINCI_CODE: - return createCodeDavinciPrompt(query, sources); - default: - return createCodeDavinciPrompt(query, sources); - } -}; - -export const OpenAIStream = async (prompt: string, model: OpenAIModel, apiKey: string) => { +export const OpenAIStream = async (prompt: string, apiKey: string) => { const encoder = new TextEncoder(); const decoder = new TextDecoder(); - const res = await fetch("https://api.openai.com/v1/completions", { + const res = await fetch("https://api.openai.com/v1/chat/completions", { headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}` }, method: "POST", body: JSON.stringify({ - model, - prompt, + model: OpenAIModel.DAVINCI_TURBO, + messages: [ + { role: "system", content: "You are a helpful assistant that accurately answers the user's queries based on the given text." }, + { role: "user", content: prompt } + ], max_tokens: 120, temperature: 0.0, - top_p: 1, - frequency_penalty: 0, - presence_penalty: 0, - n: 1, - stop: ["###"], stream: true }) }); @@ -98,7 +40,7 @@ export const OpenAIStream = async (prompt: string, model: OpenAIModel, apiKey: s try { const json = JSON.parse(data); - const text = json.choices[0].text; + const text = json.choices[0].delta.content; const queue = encoder.encode(text); controller.enqueue(queue); } catch (e) { diff --git a/utils/sources.ts b/utils/sources.ts index 6057e2c..5a3ba0a 100644 --- a/utils/sources.ts +++ b/utils/sources.ts @@ -1,5 +1,3 @@ -import { OpenAIModel } from "@/types"; - export const cleanSourceText = (text: string) => { return text .trim() @@ -9,29 +7,3 @@ export const cleanSourceText = (text: string) => { .replace(/\t/g, "") .replace(/\n+(\s*\n)*/g, "\n"); }; - -export const getSourceCount = (model: OpenAIModel) => { - switch (model) { - case OpenAIModel.DAVINCI_TEXT: - return 3; - case OpenAIModel.CURIE_TEXT: - return 4; - case OpenAIModel.DAVINCI_CODE: - return 5; - default: - return 3; - } -}; - -export const shortenSourceText = (text: string, model: OpenAIModel) => { - switch (model) { - case OpenAIModel.DAVINCI_TEXT: - return text.slice(0, 1500); - case OpenAIModel.CURIE_TEXT: - return text.slice(0, 1500); - case OpenAIModel.DAVINCI_CODE: - return text.slice(0, 3000); - default: - return text.slice(0, 1500); - } -};