Skip to content

Commit

Permalink
more examples
Browse files Browse the repository at this point in the history
  • Loading branch information
functorism committed Jul 6, 2024
1 parent d999d18 commit 42a05a8
Show file tree
Hide file tree
Showing 7 changed files with 144 additions and 19 deletions.
16 changes: 16 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,19 @@ UI Settings:
- API Key: sk-...
- Headers
- x-portkey-provider: anthropic

### HuggingFace Text Generation Inference

Run

```
docker compose -f docker-compose-tf-tgi.yml up
```

UI Settings:

- Model: HuggingFaceH4/zephyr-7b-beta
- OpenAI API URL: http://localhost:8080/v1
- API Key: unused-for-hf-tgi
- Headers
- Check "Send Minimal Headers"
29 changes: 29 additions & 0 deletions docker-compose-hf-tgi.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
services:
minichat:
build: .
ports:
- "3216:3216"

hf-tgi:
image: ghcr.io/huggingface/text-generation-inference
ports:
- "8080:80"
volumes:
- hf-tgi:/data
env_file:
- .env
environment:
MODEL_ID: "HuggingFaceH4/zephyr-7b-beta"
CORS_ALLOW_ORIGIN: "http://localhost:3216"
HF_API_TOKEN: "${HF_API_TOKEN}"
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [ gpu ]

volumes:
hf-tgi:
driver: local
27 changes: 27 additions & 0 deletions docker-compose-vllm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# untested due to https://github.com/vllm-project/vllm/issues/2393

services:
minichat:
build: .
ports:
- "3216:3216"

vllm:
image: vllm/vllm-openai:latest
ports:
- "8000:8000"
volumes:
- vllm:/root/.cache/huggingface
environment:
HUGGING_FACE_HUB_TOKEN: "${HUGGING_FACE_HUB_TOKEN}"
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [ gpu ]

volumes:
vllm:
driver: local
61 changes: 47 additions & 14 deletions src/components/gen-options-panel.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
import { useContext, useEffect, useRef } from "react";
import { AppStateContext } from "../control/state.js";
import { twMerge } from "tailwind-merge";
import { LucideDelete, LucidePlus, LucideX } from "lucide-react";
import {
LucideCheck,
LucideDelete,
LucidePlus,
LucideToggleLeft,
LucideToggleRight,
LucideX,
} from "lucide-react";
import { Tooltip } from "./tooltip.js";

export const GenOptionsPanel = ({}: {}) => {
Expand Down Expand Up @@ -118,7 +125,7 @@ const Headers = () => {
<dialog
ref={dialogRef}
className={twMerge(
"w-[80svw] h-[80svh] rounded shadow p-4 relative",
"w-[80svw] h-[80svh] rounded shadow p-4 relative outline-none",
state.darkMode ? "bg-zinc-900 text-white" : "bg-yellow-50 text-black"
)}
>
Expand All @@ -132,18 +139,44 @@ const Headers = () => {
/>
</Tooltip>
</div>
<span
className="flex gap-1 text-xs place-items-center select-none"
onClick={() => {
api.setOptions((o) => ({
...o,
headers: [...o.headers, ["", ""] as [string, string]],
}));
}}
>
<LucidePlus className="inline text-xs" width={12} />
Add Header
</span>
<div className="flex gap-4">
<span
className="flex gap-2 text-xs place-items-center select-none"
onClick={() => {
api.setOptions((o) => ({
...o,
headers: [...o.headers, ["", ""] as [string, string]],
}));
}}
>
<LucidePlus className="inline text-xs" width={12} />
Add Header
</span>
<Tooltip tooltip="Omit default headers from OpenAI SDK">
<span
className="flex gap-2 text-xs place-items-center select-none"
onClick={() => {
api.setOptions((o) => ({
...o,
sendMinimalHeaders: !o.sendMinimalHeaders,
}));
}}
>
{state.options.sendMinimalHeaders ? (
<LucideToggleRight
className="inline text-xs text-green-500"
width={12}
/>
) : (
<LucideToggleLeft
className="inline text-xs text-red-800"
width={12}
/>
)}
Send Minimal Headers
</span>
</Tooltip>
</div>

{state.options.headers.map(([key, value], i) => (
<div className="flex gap-2 p-2" key={i}>
Expand Down
Empty file removed src/components/textarea.tsx
Empty file.
28 changes: 23 additions & 5 deletions src/control/send.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import "openai/shims/web";
import OpenAI from "openai";
import { AppStateContext, GenOptions } from "./state.js";
import { useCallback, useContext } from "react";
Expand All @@ -9,16 +10,33 @@ async function* chatComplete(messages: Messages, opts: GenOptions) {
baseURL: opts.apiUrl,
apiKey: opts.apiKey,
dangerouslyAllowBrowser: true,
defaultHeaders: {
Authorization: `Bearer ${opts.apiKey}`,
...Object.fromEntries(opts.headers),
fetch: async (url: RequestInfo, init?: RequestInit) => {
const headers = {
...(opts.sendMinimalHeaders
? {
"Content-Type": "application/json",
}
: init?.headers),
...Object.fromEntries(opts.headers),
};

const res = await fetch(url, {
...init,
headers,
});

return res;
},
});

const { top_p, temperature, max_tokens, model } = opts;

const stream = await openai.chat.completions.create({
...opts,
model,
top_p,
temperature,
max_tokens,
messages,
max_tokens: 4096,
stream: true,
});

Expand Down
2 changes: 2 additions & 0 deletions src/control/state.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ const zGenOptions = z.object({
apiKey: z.string().default("unused-ollama-key"),
apiUrl: z.string().default("http://localhost:11434/v1"),
headers: z.array(z.tuple([z.string(), z.string()])).default([]),
sendMinimalHeaders: z.boolean().default(false),
});

const getGenOptions = (): GenOptions => {
Expand Down Expand Up @@ -53,6 +54,7 @@ export type GenOptions = {
apiKey: string;
apiUrl: string;
headers: Array<[string, string]>;
sendMinimalHeaders: boolean;
};

export type AppState = {
Expand Down

0 comments on commit 42a05a8

Please sign in to comment.