Skip to content

Commit

Permalink
4.8.9 test (#2299)
Browse files Browse the repository at this point in the history
* perf: read file prompt

* perf: read file prompt

* perf: free plan tip

* feat: cron job usage

* perf: app templates

* perf: get llm model by name

* feat: support outlink upload file

* fix: upload limit
  • Loading branch information
c121914yu authored Aug 8, 2024
1 parent c6dd307 commit d682a82
Show file tree
Hide file tree
Showing 18 changed files with 1,056 additions and 782 deletions.
18 changes: 13 additions & 5 deletions packages/service/core/ai/model.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,28 @@
export const getLLMModel = (model?: string) => {
return global.llmModels.find((item) => item.model === model) ?? global.llmModels[0];
return (
global.llmModels.find((item) => item.model === model || item.name === model) ??
global.llmModels[0]
);
};
export const getDatasetModel = (model?: string) => {
return (
global.llmModels?.filter((item) => item.datasetProcess)?.find((item) => item.model === model) ??
global.llmModels[0]
global.llmModels
?.filter((item) => item.datasetProcess)
?.find((item) => item.model === model || item.name === model) ?? global.llmModels[0]
);
};

export const getVectorModel = (model?: string) => {
return global.vectorModels.find((item) => item.model === model) || global.vectorModels[0];
return (
global.vectorModels.find((item) => item.model === model || item.name === model) ||
global.vectorModels[0]
);
};

export function getAudioSpeechModel(model?: string) {
return (
global.audioSpeechModels.find((item) => item.model === model) || global.audioSpeechModels[0]
global.audioSpeechModels.find((item) => item.model === model || item.name === model) ||
global.audioSpeechModels[0]
);
}

Expand Down
2 changes: 1 addition & 1 deletion packages/service/core/workflow/dispatch/tools/readFiles.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
})
.filter(Boolean)
.slice(0, maxFiles);
console.log(parseUrlList);

const readFilesResult = await Promise.all(
parseUrlList
.map(async (url) => {
Expand Down
1 change: 1 addition & 0 deletions projects/app/public/imgs/app/templates/flux.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 5 additions & 4 deletions projects/app/src/components/core/ai/AISettingModal/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import { getDocPath } from '@/web/common/system/doc';
import AIModelSelector from '@/components/Select/AIModelSelector';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { getWebLLMModel } from '@/web/common/system/utils';

const AIChatSettingsModal = ({
onClose,
Expand All @@ -44,18 +45,18 @@ const AIChatSettingsModal = ({
const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
const useVision = watch('aiChatVision');
const selectedModel = llmModelList.find((item) => item.model === model) || llmModelList[0];
const selectedModel = getWebLLMModel(model);
const llmSupportVision = !!selectedModel?.vision;

const tokenLimit = useMemo(() => {
return llmModelList.find((item) => item.model === model)?.maxResponse || 4096;
}, [llmModelList, model]);
return selectedModel?.maxResponse || 4096;
}, [selectedModel?.maxResponse]);

const onChangeModel = (e: string) => {
setValue('model', e);

// update max tokens
const modelData = llmModelList.find((item) => item.model === e);
const modelData = getWebLLMModel(e);
if (modelData) {
setValue('maxToken', modelData.maxResponse / 2);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ const ChatInput = ({
const { previewUrl } = await uploadFile2DB({
file: copyFile.rawFile,
bucketName: 'chat',
outLinkAuthData,
metadata: {
chatId
},
Expand Down Expand Up @@ -168,7 +169,7 @@ const ChatInput = ({
{
manual: false,
errorToast: t('common:upload_file_error'),
refreshDeps: [fileList]
refreshDeps: [fileList, outLinkAuthData, chatId]
}
);
const onSelectFile = useCallback(
Expand Down
8 changes: 3 additions & 5 deletions projects/app/src/components/core/dataset/SearchParamsTip.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
import { useTranslation } from 'next-i18next';
import React, { useMemo } from 'react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { getWebLLMModel } from '@/web/common/system/utils';

const SearchParamsTip = ({
searchMode,
Expand Down Expand Up @@ -34,11 +35,8 @@ const SearchParamsTip = ({

const extensionModelName = useMemo(
() =>
datasetSearchUsingExtensionQuery
? llmModelList.find((item) => item.model === queryExtensionModel)?.name ??
llmModelList[0]?.name
: undefined,
[datasetSearchUsingExtensionQuery, llmModelList, queryExtensionModel]
datasetSearchUsingExtensionQuery ? getWebLLMModel(queryExtensionModel)?.name : undefined,
[datasetSearchUsingExtensionQuery, queryExtensionModel, llmModelList]
);

return (
Expand Down
41 changes: 22 additions & 19 deletions projects/app/src/pages/api/common/file/upload.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
Expand All @@ -10,6 +9,7 @@ import { ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
import { addLog } from '@fastgpt/service/common/system/log';
import { authFrequencyLimit } from '@/service/common/frequencyLimit/api';
import { addSeconds } from 'date-fns';
import { authChatCert } from '@/service/support/permission/auth/chat';

const authUploadLimit = (tmbId: string) => {
if (!global.feConfigs.uploadFileMaxAmount) return;
Expand All @@ -21,19 +21,18 @@ const authUploadLimit = (tmbId: string) => {
};

async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const start = Date.now();
/* Creates the multer uploader */
const upload = getUploadModel({
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
});
const filePaths: string[] = [];

try {
const { teamId, tmbId } = await authCert({ req, authToken: true });
const start = Date.now();
/* Creates the multer uploader */
const upload = getUploadModel({
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
});
const { file, bucketName, metadata } = await upload.doUpload(req, res);

await authUploadLimit(tmbId);
const { teamId, tmbId, outLinkUid } = await authChatCert({ req, authToken: true });

const { file, bucketName, metadata } = await upload.doUpload(req, res);
await authUploadLimit(outLinkUid || tmbId);

addLog.info(`Upload file success ${file.originalname}, cost ${Date.now() - start}ms`);

Expand All @@ -51,15 +50,19 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
metadata: metadata
});

return {
fileId,
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken({
bucketName,
teamId,
tmbId,
fileId
})}`
};
jsonRes(res, {
data: {
fileId,
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken(
{
bucketName,
teamId,
tmbId,
fileId
}
)}`
}
});
} catch (error) {
jsonRes(res, {
code: 500,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import { AppContext } from '@/pages/app/detail/components/context';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import VariableTip from '@/components/common/Textarea/MyTextarea/VariableTip';
import { getWebLLMModel } from '@/web/common/system/utils';

const DatasetSelectModal = dynamic(() => import('@/components/core/app/DatasetSelectModal'));
const DatasetParamsModal = dynamic(() => import('@/components/core/app/DatasetParamsModal'));
Expand Down Expand Up @@ -121,8 +122,7 @@ const EditForm = ({
[appForm.chatConfig.variables, t]
);

const selectedModel =
llmModelList.find((item) => item.model === appForm.aiSettings.model) ?? llmModelList[0];
const selectedModel = getWebLLMModel(appForm.aiSettings.model);
const tokenLimit = useMemo(() => {
return selectedModel?.quoteMaxToken || 3000;
}, [selectedModel.quoteMaxToken]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import ValueTypeLabel from './render/ValueTypeLabel';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { isWorkflowStartOutput } from '@fastgpt/global/core/workflow/template/system/workflowStart';
import { getWebLLMModel } from '@/web/common/system/utils';

const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation();
Expand All @@ -46,8 +47,7 @@ const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
if (item.flowNodeType === FlowNodeTypeEnum.chatNode) {
const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const quoteMaxToken =
llmModelList.find((item) => item.model === model)?.quoteMaxToken || 3000;
const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 3000;

maxTokens = Math.max(maxTokens, quoteMaxToken);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
import { useContextSelector } from 'use-context-selector';
import { WorkflowContext } from '@/pages/app/detail/components/WorkflowComponents/context';
import { getWebLLMModel } from '@/web/common/system/utils';

const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
Expand All @@ -36,8 +37,7 @@ const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
if (item.flowNodeType === FlowNodeTypeEnum.chatNode) {
const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const quoteMaxToken =
llmModelList.find((item) => item.model === model)?.quoteMaxToken || 3000;
const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 3000;

maxTokens = Math.max(maxTokens, quoteMaxToken);
}
Expand Down
2 changes: 1 addition & 1 deletion projects/app/src/pages/chat/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ export async function getServerSideProps(context: any) {
props: {
appId: context?.query?.appId || '',
chatId: context?.query?.chatId || '',
...(await serviceSideProps(context, ['file', 'app', 'chat']))
...(await serviceSideProps(context, ['file', 'app', 'chat', 'workflow']))
}
};
}
Expand Down
2 changes: 1 addition & 1 deletion projects/app/src/pages/chat/share.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,7 @@ export async function getServerSideProps(context: any) {
appIntro: app?.appId?.intro ?? 'intro',
shareId: shareId ?? '',
authToken: authToken ?? '',
...(await serviceSideProps(context, ['file', 'app', 'chat']))
...(await serviceSideProps(context, ['file', 'app', 'chat', 'workflow']))
}
};
}
2 changes: 1 addition & 1 deletion projects/app/src/pages/chat/team.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ export async function getServerSideProps(context: any) {
chatId: context?.query?.chatId || '',
teamId: context?.query?.teamId || '',
teamToken: context?.query?.teamToken || '',
...(await serviceSideProps(context, ['file', 'app', 'chat']))
...(await serviceSideProps(context, ['file', 'app', 'chat', 'workflow']))
}
};
}
Expand Down
10 changes: 9 additions & 1 deletion projects/app/src/service/support/permission/auth/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,15 @@ export async function authChatCrud({
3. share page (body: shareId outLinkUid)
4. team chat page (body: teamId teamToken)
*/
export async function authChatCert(props: AuthModeType) {
export async function authChatCert(props: AuthModeType): Promise<{
teamId: string;
tmbId: string;
authType: AuthUserTypeEnum;
apikey: string;
isOwner: boolean;
canWrite: boolean;
outLinkUid?: string;
}> {
const { teamId, teamToken, shareId, outLinkUid } = props.req.body as OutLinkChatAuthProps;

if (shareId && outLinkUid) {
Expand Down
11 changes: 11 additions & 0 deletions projects/app/src/web/common/file/controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,36 @@ import { UploadImgProps } from '@fastgpt/global/common/file/api';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { preUploadImgProps } from '@fastgpt/global/common/file/api';
import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/file/img';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';

/**
* upload file to mongo gridfs
*/
export const uploadFile2DB = ({
file,
bucketName,
outLinkAuthData,
metadata = {},
percentListen
}: {
file: File;
bucketName: `${BucketNameEnum}`;
outLinkAuthData?: OutLinkChatAuthProps;
metadata?: Record<string, any>;
percentListen?: (percent: number) => void;
}) => {
const form = new FormData();
form.append('metadata', JSON.stringify(metadata));
form.append('bucketName', bucketName);
form.append('file', file, encodeURIComponent(file.name));

if (outLinkAuthData) {
for (const key in outLinkAuthData) {
// @ts-ignore
outLinkAuthData[key] && form.append(key, outLinkAuthData[key]);
}
}

return postUploadFiles(form, (e) => {
if (!e.total) return;

Expand Down
7 changes: 7 additions & 0 deletions projects/app/src/web/common/system/utils.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { useSystemStore } from './useSystemStore';

export const downloadFetch = async ({ url, filename }: { url: string; filename: string }) => {
const a = document.createElement('a');
a.href = url;
Expand All @@ -6,3 +8,8 @@ export const downloadFetch = async ({ url, filename }: { url: string; filename:
a.click();
document.body.removeChild(a);
};

export const getWebLLMModel = (model?: string) => {
const list = useSystemStore.getState().llmModelList;
return list.find((item) => item.model === model || item.name === model) ?? list[0];
};
Loading

0 comments on commit d682a82

Please sign in to comment.