Skip to content

Commit

Permalink
Merge branch 'lobehub:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
123hi123 authored Nov 19, 2024
2 parents 8d682a6 + 41d92d8 commit ccde4ba
Show file tree
Hide file tree
Showing 20 changed files with 495 additions and 19 deletions.
58 changes: 58 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,64 @@

# Changelog

### [Version 1.32.1](https://github.com/lobehub/lobe-chat/compare/v1.32.0...v1.32.1)

<sup>Released on **2024-11-19**</sup>

#### 🐛 Bug Fixes

- **misc**: Keyword search for chat history & sessions.

#### 💄 Styles

- **misc**: Support o1 models using streaming.

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

#### What's fixed

- **misc**: Keyword search for chat history & sessions, closes [#4725](https://github.com/lobehub/lobe-chat/issues/4725) ([415d772](https://github.com/lobehub/lobe-chat/commit/415d772))

#### Styles

- **misc**: Support o1 models using streaming, closes [#4732](https://github.com/lobehub/lobe-chat/issues/4732) ([7e9e71a](https://github.com/lobehub/lobe-chat/commit/7e9e71a))

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

## [Version 1.32.0](https://github.com/lobehub/lobe-chat/compare/v1.31.11...v1.32.0)

<sup>Released on **2024-11-19**</sup>

#### ✨ Features

- **misc**: Add support InternLM (书生浦语) provider.

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

#### What's improved

- **misc**: Add support InternLM (书生浦语) provider, closes [#4711](https://github.com/lobehub/lobe-chat/issues/4711) ([aaae059](https://github.com/lobehub/lobe-chat/commit/aaae059))

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.31.11](https://github.com/lobehub/lobe-chat/compare/v1.31.10...v1.31.11)

<sup>Released on **2024-11-18**</sup>
Expand Down
2 changes: 2 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,8 @@ ENV \
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Minimax
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral
Expand Down
2 changes: 2 additions & 0 deletions Dockerfile.database
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ ENV \
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Minimax
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@lobehub/chat",
"version": "1.31.11",
"version": "1.32.1",
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
"keywords": [
"framework",
Expand Down
2 changes: 2 additions & 0 deletions src/app/(main)/settings/llm/ProviderList/providers.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
GoogleProviderCard,
GroqProviderCard,
HunyuanProviderCard,
InternLMProviderCard,
MinimaxProviderCard,
MistralProviderCard,
MoonshotProviderCard,
Expand Down Expand Up @@ -85,6 +86,7 @@ export const useProviderList = (): ProviderItem[] => {
MinimaxProviderCard,
Ai360ProviderCard,
TaichuProviderCard,
InternLMProviderCard,
SiliconCloudProviderCard,
],
[
Expand Down
6 changes: 6 additions & 0 deletions src/config/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,9 @@ export const getLLMConfig = () => {

ENABLED_XAI: z.boolean(),
XAI_API_KEY: z.string().optional(),

ENABLED_INTERNLM: z.boolean(),
INTERNLM_API_KEY: z.string().optional(),
},
runtimeEnv: {
API_KEY_SELECT_MODE: process.env.API_KEY_SELECT_MODE,
Expand Down Expand Up @@ -246,6 +249,9 @@ export const getLLMConfig = () => {

ENABLED_XAI: !!process.env.XAI_API_KEY,
XAI_API_KEY: process.env.XAI_API_KEY,

ENABLED_INTERNLM: !!process.env.INTERNLM_API_KEY,
INTERNLM_API_KEY: process.env.INTERNLM_API_KEY,
},
});
};
Expand Down
4 changes: 4 additions & 0 deletions src/config/modelProviders/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import GoogleProvider from './google';
import GroqProvider from './groq';
import HuggingFaceProvider from './huggingface';
import HunyuanProvider from './hunyuan';
import InternLMProvider from './internlm';
import MinimaxProvider from './minimax';
import MistralProvider from './mistral';
import MoonshotProvider from './moonshot';
Expand Down Expand Up @@ -69,6 +70,7 @@ export const LOBE_DEFAULT_MODEL_LIST: ChatModelCard[] = [
HunyuanProvider.chatModels,
WenxinProvider.chatModels,
SenseNovaProvider.chatModels,
InternLMProvider.chatModels,
].flat();

export const DEFAULT_MODEL_PROVIDER_LIST = [
Expand Down Expand Up @@ -105,6 +107,7 @@ export const DEFAULT_MODEL_PROVIDER_LIST = [
MinimaxProvider,
Ai360Provider,
TaichuProvider,
InternLMProvider,
SiliconCloudProvider,
];

Expand All @@ -131,6 +134,7 @@ export { default as GoogleProviderCard } from './google';
export { default as GroqProviderCard } from './groq';
export { default as HuggingFaceProviderCard } from './huggingface';
export { default as HunyuanProviderCard } from './hunyuan';
export { default as InternLMProviderCard } from './internlm';
export { default as MinimaxProviderCard } from './minimax';
export { default as MistralProviderCard } from './mistral';
export { default as MoonshotProviderCard } from './moonshot';
Expand Down
42 changes: 42 additions & 0 deletions src/config/modelProviders/internlm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { ModelProviderCard } from '@/types/llm';

const InternLM: ModelProviderCard = {
chatModels: [
{
description: '我们最新的模型系列,有着卓越的推理性能,支持 1M 的上下文长度以及更强的指令跟随和工具调用能力。',
displayName: 'InternLM2.5',
enabled: true,
functionCall: true,
id: 'internlm2.5-latest',
maxOutput: 4096,
pricing: {
input: 0,
output: 0,
},
tokens: 32_768,
},
{
description: '我们仍在维护的老版本模型,有 7B、20B 多种模型参数量可选。',
displayName: 'InternLM2 Pro Chat',
functionCall: true,
id: 'internlm2-pro-chat',
maxOutput: 4096,
pricing: {
input: 0,
output: 0,
},
tokens: 32_768,
},
],
checkModel: 'internlm2.5-latest',
description:
'致力于大模型研究与开发工具链的开源组织。为所有 AI 开发者提供高效、易用的开源平台,让最前沿的大模型与算法技术触手可及',
disableBrowserRequest: true,
id: 'internlm',
modelList: { showModelFetcher: true },
modelsUrl: 'https://internlm.intern-ai.org.cn/doc/docs/Models#%E8%8E%B7%E5%8F%96%E6%A8%A1%E5%9E%8B%E5%88%97%E8%A1%A8',
name: 'InternLM',
url: 'https://internlm.intern-ai.org.cn',
};

export default InternLM;
42 changes: 38 additions & 4 deletions src/database/server/models/__tests__/session.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -231,8 +231,18 @@ describe('SessionModel', () => {

it('should return sessions with matching title', async () => {
await serverDB.insert(sessions).values([
{ id: '1', userId, title: 'Hello World', description: 'Some description' },
{ id: '2', userId, title: 'Another Session', description: 'Another description' },
{ id: '1', userId },
{ id: '2', userId },
]);

await serverDB.insert(agents).values([
{ id: 'agent-1', userId, model: 'gpt-3.5-turbo', title: 'Hello, Agent 1' },
{ id: 'agent-2', userId, model: 'gpt-4', title: 'Agent 2' },
]);

await serverDB.insert(agentsToSessions).values([
{ agentId: 'agent-1', sessionId: '1' },
{ agentId: 'agent-2', sessionId: '2' },
]);

const result = await sessionModel.queryByKeyword('hello');
Expand All @@ -241,9 +251,21 @@ describe('SessionModel', () => {
});

it('should return sessions with matching description', async () => {
// The sessions has no title and desc,
// see: https://github.com/lobehub/lobe-chat/pull/4725
await serverDB.insert(sessions).values([
{ id: '1', userId, title: 'Session 1', description: 'Description with keyword' },
{ id: '2', userId, title: 'Session 2', description: 'Another description' },
{ id: '1', userId },
{ id: '2', userId },
]);

await serverDB.insert(agents).values([
{ id: 'agent-1', userId, model: 'gpt-3.5-turbo', title: 'Agent 1', description: 'Description with Keyword' },
{ id: 'agent-2', userId, model: 'gpt-4', title: 'Agent 2' },
]);

await serverDB.insert(agentsToSessions).values([
{ agentId: 'agent-1', sessionId: '1' },
{ agentId: 'agent-2', sessionId: '2' },
]);

const result = await sessionModel.queryByKeyword('keyword');
Expand All @@ -253,11 +275,23 @@ describe('SessionModel', () => {

it('should return sessions with matching title or description', async () => {
await serverDB.insert(sessions).values([
{ id: '1', userId },
{ id: '2', userId },
{ id: '3', userId },
]);

await serverDB.insert(agents).values([
{ id: '1', userId, title: 'Title with keyword', description: 'Some description' },
{ id: '2', userId, title: 'Another Session', description: 'Description with keyword' },
{ id: '3', userId, title: 'Third Session', description: 'Third description' },
]);

await serverDB.insert(agentsToSessions).values([
{ agentId: '1', sessionId: '1' },
{ agentId: '2', sessionId: '2' },
{ agentId: '3', sessionId: '3' },
]);

const result = await sessionModel.queryByKeyword('keyword');
expect(result).toHaveLength(2);
expect(result.map((s) => s.id)).toEqual(['1', '2']);
Expand Down
53 changes: 43 additions & 10 deletions src/database/server/models/session.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ export class SessionModel {

const keywordLowerCase = keyword.toLowerCase();

const data = await this.findSessions({ keyword: keywordLowerCase });
const data = await this.findSessionsByKeywords({ keyword: keywordLowerCase });

return data.map((item) => this.mapSessionItem(item as any));
}
Expand Down Expand Up @@ -281,20 +281,53 @@ export class SessionModel {
pinned !== undefined ? eq(sessions.pinned, pinned) : eq(sessions.userId, this.userId),
keyword
? or(
like(
sql`lower(${sessions.title})` as unknown as Column,
`%${keyword.toLowerCase()}%`,
),
like(
sql`lower(${sessions.description})` as unknown as Column,
`%${keyword.toLowerCase()}%`,
),
)
like(
sql`lower(${sessions.title})` as unknown as Column,
`%${keyword.toLowerCase()}%`,
),
like(
sql`lower(${sessions.description})` as unknown as Column,
`%${keyword.toLowerCase()}%`,
),
)
: eq(sessions.userId, this.userId),
group ? eq(sessions.groupId, group) : isNull(sessions.groupId),
),

with: { agentsToSessions: { columns: {}, with: { agent: true } }, group: true },
});
}

async findSessionsByKeywords(params: {
current?: number;
keyword: string;
pageSize?: number;
}) {
const { keyword, pageSize = 9999, current = 0 } = params;
const offset = current * pageSize;
const results = await serverDB.query.agents.findMany({
limit: pageSize,
offset,
orderBy: [desc(agents.updatedAt)],
where: and(
eq(agents.userId, this.userId),
or(
like(
sql`lower(${agents.title})` as unknown as Column,
`%${keyword.toLowerCase()}%`,
),
like(
sql`lower(${agents.description})` as unknown as Column,
`%${keyword.toLowerCase()}%`,
),
)
),
with: { agentsToSessions: { columns: {}, with: { session: true } } },
});
try {
// @ts-expect-error
return results.map((item) => item.agentsToSessions[0].session);
} catch {}
return []
}
}
7 changes: 6 additions & 1 deletion src/database/server/models/topic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,12 @@ export class TopicModel {
serverDB
.select()
.from(messages)
.where(and(eq(messages.topicId, topics.id), or(matchKeyword(messages.content)))),
.where(
and(
eq(messages.topicId, topics.id),
matchKeyword(messages.content)
)
),
),
),
),
Expand Down
7 changes: 7 additions & 0 deletions src/libs/agent-runtime/AgentRuntime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { LobeGoogleAI } from './google';
import { LobeGroq } from './groq';
import { LobeHuggingFaceAI } from './huggingface';
import { LobeHunyuanAI } from './hunyuan';
import { LobeInternLMAI } from './internlm';
import { LobeMinimaxAI } from './minimax';
import { LobeMistralAI } from './mistral';
import { LobeMoonshotAI } from './moonshot';
Expand Down Expand Up @@ -141,6 +142,7 @@ class AgentRuntime {
groq: Partial<ClientOptions>;
huggingface: { apiKey?: string; baseURL?: string };
hunyuan: Partial<ClientOptions>;
internlm: Partial<ClientOptions>;
minimax: Partial<ClientOptions>;
mistral: Partial<ClientOptions>;
moonshot: Partial<ClientOptions>;
Expand Down Expand Up @@ -335,6 +337,11 @@ class AgentRuntime {
runtimeModel = new LobeCloudflareAI(params.cloudflare ?? {});
break;
}

case ModelProvider.InternLM: {
runtimeModel = new LobeInternLMAI(params.internlm);
break;
}
}
return new AgentRuntime(runtimeModel);
}
Expand Down
2 changes: 1 addition & 1 deletion src/libs/agent-runtime/github/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export const LobeGithubAI = LobeOpenAICompatibleFactory({
const { model } = payload;

if (o1Models.has(model)) {
return pruneO1Payload(payload) as any;
return { ...pruneO1Payload(payload), stream: false } as any;
}

return { ...payload, stream: payload.stream ?? true };
Expand Down
Loading

0 comments on commit ccde4ba

Please sign in to comment.