diff --git a/.env.example b/.env.example
index 968e93484..126bd5f20 100644
--- a/.env.example
+++ b/.env.example
@@ -29,5 +29,11 @@ GOOGLE_GENERATIVE_AI_API_KEY=
# EXAMPLE http://localhost:11434
OLLAMA_API_BASE_URL=
+# You only need this environment variable set if you want to use OpenAI Like models
+OPENAI_LIKE_API_BASE_URL=
+
+# Get your OpenAI Like API Key
+OPENAI_LIKE_API_KEY=
+
# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug
diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx
index b7421349e..c1175f700 100644
--- a/app/components/chat/BaseChat.tsx
+++ b/app/components/chat/BaseChat.tsx
@@ -28,7 +28,7 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
const [provider, setProvider] = useState(DEFAULT_PROVIDER);
return (
- {
setProvider(e.target.value);
@@ -42,9 +42,12 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
{provider}
))}
-
- Ollama
-
+
+ Ollama
+
+
+ OpenAILike
+
(
);
},
-);
\ No newline at end of file
+);
diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts
index b1a47f428..fedb0bc5f 100644
--- a/app/lib/.server/llm/api-key.ts
+++ b/app/lib/.server/llm/api-key.ts
@@ -19,6 +19,17 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
+ case "OpenAILike":
+ return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
+ default:
+ return "";
+ }
+}
+
+export function getBaseURL(cloudflareEnv: Env, provider: string) {
+ switch (provider) {
+ case 'OpenAILike':
+ return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
default:
return "";
}
diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts
index 68983756d..17881980d 100644
--- a/app/lib/.server/llm/model.ts
+++ b/app/lib/.server/llm/model.ts
@@ -1,6 +1,6 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
-import { getAPIKey } from '~/lib/.server/llm/api-key';
+import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
@@ -14,7 +14,14 @@ export function getAnthropicModel(apiKey: string, model: string) {
return anthropic(model);
}
+export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
+ const openai = createOpenAI({
+ baseURL,
+ apiKey,
+ });
+ return openai(model);
+}
export function getOpenAIModel(apiKey: string, model: string) {
const openai = createOpenAI({
apiKey,
@@ -54,7 +61,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getModel(provider: string, model: string, env: Env) {
const apiKey = getAPIKey(env, provider);
-
+ const baseURL = getBaseURL(env, provider);
switch (provider) {
case 'Anthropic':
@@ -67,6 +74,8 @@ export function getModel(provider: string, model: string, env: Env) {
return getOpenRouterModel(apiKey, model);
case 'Google':
return getGoogleModel(apiKey, model)
+ case 'OpenAILike':
+ return getOpenAILikeModel(baseURL,apiKey, model);
default:
return getOllamaModel(model);
}
diff --git a/app/utils/constants.ts b/app/utils/constants.ts
index 6db860c53..0a806741d 100644
--- a/app/utils/constants.ts
+++ b/app/utils/constants.ts
@@ -36,7 +36,9 @@ export let MODEL_LIST: ModelInfo[] = [...staticModels];
async function getOllamaModels(): Promise {
try {
- const response = await fetch(`http://localhost:11434/api/tags`);
+ const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
+ const url = new URL(base_url).toString();
+ const response = await fetch(`${url}/api/tags`);
const data = await response.json();
return data.models.map((model: any) => ({
@@ -49,9 +51,36 @@ async function getOllamaModels(): Promise {
}
}
+async function getOpenAILikeModels(): Promise {
+
+ try {
+ const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
+ if (!base_url) {
+ return [];
+ }
+ const url = new URL(base_url).toString();
+ const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
+ const response = await fetch(`${url}/models`, {
+ headers: {
+ Authorization: `Bearer ${api_key}`,
+ }
+ });
+ const res = await response.json();
+ return res.data.map((model: any) => ({
+ name: model.id,
+ label: model.id,
+ provider: 'OpenAILike',
+ }));
+ }catch (e) {
+ return []
+ }
+
+}
async function initializeModelList(): Promise {
const ollamaModels = await getOllamaModels();
- MODEL_LIST = [...ollamaModels, ...staticModels];
+ const openAiLikeModels = await getOpenAILikeModels();
+ console.log(openAiLikeModels);
+ MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
}
initializeModelList().then();
export { getOllamaModels, initializeModelList };
diff --git a/vite.config.ts b/vite.config.ts
index 58e76cde5..1afe2b117 100644
--- a/vite.config.ts
+++ b/vite.config.ts
@@ -27,6 +27,7 @@ export default defineConfig((config) => {
chrome129IssuePlugin(),
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
],
+ envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
};
});
diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts
index f3259893e..dcca11908 100644
--- a/worker-configuration.d.ts
+++ b/worker-configuration.d.ts
@@ -4,4 +4,6 @@ interface Env {
GROQ_API_KEY: string;
OPEN_ROUTER_API_KEY: string;
OLLAMA_API_BASE_URL: string;
+ OPENAI_LIKE_API_KEY: string;
+ OPENAI_LIKE_API_BASE_URL: string;
}