Skip to content

Commit

Permalink
4.7.1 production (#1173)
Browse files Browse the repository at this point in the history
Co-authored-by: heheer <[email protected]>
  • Loading branch information
c121914yu and newfish-cmyk authored Apr 11, 2024
1 parent db2dd91 commit c314312
Show file tree
Hide file tree
Showing 19 changed files with 198 additions and 119 deletions.
14 changes: 8 additions & 6 deletions docSite/content/docs/development/upgrading/471.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
title: 'V4.7.1(进行中)'
title: 'V4.7.1(需要初始化)'
description: 'FastGPT V4.7.1 更新说明'
icon: 'upgrade'
draft: false
Expand Down Expand Up @@ -32,8 +32,10 @@ curl --location --request POST 'https://{{host}}/api/admin/clearInvalidData' \
3. 新增 - 集成 Laf 云函数,可以读取 Laf 账号中的云函数作为 HTTP 模块。
4. 新增 - 定时器,清理垃圾数据。(采用小范围清理,会清理最近n个小时的,所以请保证服务持续运行,长时间不允许,可以继续执行 clearInvalidData 的接口进行全量清理。)
5. 商业版新增 - 后台配置系统通知。
6. 修改 - csv导入模板,取消 header 校验,自动获取前两列。
7. 修复 - 工具调用模块连线数据类型校验错误。
8. 修复 - 自定义索引输入时,解构数据失败。
9. 修复 - rerank 模型数据格式。
10. 修复 - 问题补全历史记录BUG
6. 优化 - 支持ip模式导出知识库。
7. 修改 - csv导入模板,取消 header 校验,自动获取前两列。
8. 修复 - 工具调用模块连线数据类型校验错误。
9. 修复 - 自定义索引输入时,解构数据失败。
10. 修复 - rerank 模型数据格式。
11. 修复 - 问题补全历史记录BUG
12. 修复 - 分享页面特殊情况下加载缓慢问题(由于ssr时候数据库不会触发连接)
4 changes: 2 additions & 2 deletions packages/global/common/file/tools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ export const formatFileSize = (bytes: number): string => {
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
};

export const detectFileEncoding = (buffers: string | Buffer) => {
return (detect(buffers)?.encoding || 'utf-8') as BufferEncoding;
export const detectFileEncoding = (buffer: Buffer) => {
return detect(buffer.slice(0, 200))?.encoding?.toLocaleLowerCase();
};
62 changes: 35 additions & 27 deletions packages/service/common/file/gridfs/controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { ReadFileByBufferParams } from '../read/type';
import { MongoRwaTextBuffer } from '../../buffer/rawText/schema';
import { readFileRawContent } from '../read/utils';
import { PassThrough } from 'stream';

export function getGFSCollection(bucket: `${BucketNameEnum}`) {
MongoFileSchema;
Expand Down Expand Up @@ -113,31 +114,39 @@ export async function getDownloadStream({
fileId: string;
}) {
const bucket = getGridBucket(bucketName);
const stream = bucket.openDownloadStream(new Types.ObjectId(fileId));
const copyStream = stream.pipe(new PassThrough());

return bucket.openDownloadStream(new Types.ObjectId(fileId));
}

export const readFileEncode = async ({
bucketName,
fileId
}: {
bucketName: `${BucketNameEnum}`;
fileId: string;
}) => {
const encodeStream = await getDownloadStream({ bucketName, fileId });
let buffers: Buffer = Buffer.from([]);
for await (const chunk of encodeStream) {
buffers = Buffer.concat([buffers, chunk]);
if (buffers.length > 10) {
encodeStream.abort();
break;
}
}
/* get encoding */
const buffer = await (() => {
return new Promise<Buffer>((resolve, reject) => {
let tmpBuffer: Buffer = Buffer.from([]);

stream.on('data', (chunk) => {
if (tmpBuffer.length < 20) {
tmpBuffer = Buffer.concat([tmpBuffer, chunk]);
}
if (tmpBuffer.length >= 20) {
resolve(tmpBuffer);
}
});
stream.on('end', () => {
resolve(tmpBuffer);
});
stream.on('error', (err) => {
reject(err);
});
});
})();

const encoding = detectFileEncoding(buffers);
const encoding = detectFileEncoding(buffer);

return encoding as BufferEncoding;
};
return {
fileStream: copyStream,
encoding
// encoding: 'utf-8'
};
}

export const readFileContentFromMongo = async ({
teamId,
Expand All @@ -162,9 +171,8 @@ export const readFileContentFromMongo = async ({
};
}

const [file, encoding, fileStream] = await Promise.all([
const [file, { encoding, fileStream }] = await Promise.all([
getFileById({ bucketName, fileId }),
readFileEncode({ bucketName, fileId }),
getDownloadStream({ bucketName, fileId })
]);

Expand All @@ -176,12 +184,12 @@ export const readFileContentFromMongo = async ({

const fileBuffers = await (() => {
return new Promise<Buffer>((resolve, reject) => {
let buffers = Buffer.from([]);
let buffer = Buffer.from([]);
fileStream.on('data', (chunk) => {
buffers = Buffer.concat([buffers, chunk]);
buffer = Buffer.concat([buffer, chunk]);
});
fileStream.on('end', () => {
resolve(buffers);
resolve(buffer);
});
fileStream.on('error', (err) => {
reject(err);
Expand Down
6 changes: 5 additions & 1 deletion packages/service/common/file/read/pptx.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,11 @@ export const readPptxRawText = async ({
buffer,
encoding
}: ReadFileByBufferParams): Promise<ReadFileResponse> => {
const result = await parseOffice({ buffer, encoding, extension: 'pptx' });
const result = await parseOffice({
buffer,
encoding: encoding as BufferEncoding,
extension: 'pptx'
});

return {
rawText: result
Expand Down
20 changes: 19 additions & 1 deletion packages/service/common/file/read/rawText.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,26 @@
import { ReadFileByBufferParams, ReadFileResponse } from './type.d';
import iconv from 'iconv-lite';

const rawEncodingList = [
'ascii',
'utf8',
'utf-8',
'utf16le',
'utf-16le',
'ucs2',
'ucs-2',
'base64',
'base64url',
'latin1',
'binary',
'hex'
];

// 加载源文件内容
export const readFileRawText = ({ buffer, encoding }: ReadFileByBufferParams): ReadFileResponse => {
const content = buffer.toString(encoding);
const content = rawEncodingList.includes(encoding)
? buffer.toString(encoding as BufferEncoding)
: iconv.decode(buffer, 'gbk');

return {
rawText: content
Expand Down
2 changes: 1 addition & 1 deletion packages/service/common/file/read/type.d.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
export type ReadFileByBufferParams = {
teamId: string;
buffer: Buffer;
encoding: BufferEncoding;
encoding: string;
metadata?: Record<string, any>;
};

Expand Down
2 changes: 1 addition & 1 deletion packages/service/common/vectorStore/pg/controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ export const deleteDatasetDataVector = async (
}
return Promise.reject('deleteDatasetData: no where');
})();
console.log(where, '===');

try {
await PgClient.delete(PgDatasetTableName, {
where: [where]
Expand Down
7 changes: 5 additions & 2 deletions packages/service/common/vectorStore/pg/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,11 @@ export const connectPg = async (): Promise<Pool> => {
max: Number(process.env.DB_MAX_LINK || 20),
min: 10,
keepAlive: true,
idleTimeoutMillis: 60000,
connectionTimeoutMillis: 20000
idleTimeoutMillis: 600000,
connectionTimeoutMillis: 20000,
query_timeout: 30000,
statement_timeout: 40000,
idle_in_transaction_session_timeout: 60000
});

global.pgClient.on('error', async (err) => {
Expand Down
1 change: 1 addition & 0 deletions packages/service/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"decompress": "^4.2.1",
"encoding": "^0.1.13",
"file-type": "^19.0.0",
"iconv-lite": "^0.6.3",
"json5": "^2.2.3",
"jsonwebtoken": "^9.0.2",
"mammoth": "^1.6.0",
Expand Down
23 changes: 17 additions & 6 deletions packages/web/components/common/Textarea/PromptEditor/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,6 @@ export function registerLexicalTextEntity<T extends TextNode>(

export function textToEditorState(text: string = '') {
const paragraph = text?.split('\n');

return JSON.stringify({
root: {
children: paragraph.map((p) => {
Expand Down Expand Up @@ -206,11 +205,23 @@ export function textToEditorState(text: string = '') {
}

export function editorStateToText(editor: LexicalEditor) {
const stringifiedEditorState = JSON.stringify(editor.getEditorState().toJSON());
const parsedEditorState = editor.parseEditorState(stringifiedEditorState);
const editorStateTextString = parsedEditorState.read(() => $getRoot().getTextContent());

return editorStateTextString;
const editorStateTextString: string[] = [];
const paragraphs = editor.getEditorState().toJSON().root.children;
paragraphs.forEach((paragraph: any) => {
const children = paragraph.children;
const paragraphText: string[] = [];
children.forEach((child: any) => {
if (child.type === 'linebreak') {
paragraphText.push(`
`);
} else if (child.text) {
paragraphText.push(child.text);
}
});
editorStateTextString.push(paragraphText.join(''));
});
return editorStateTextString.join(`
`);
}

const varRegex = /\{\{([a-zA-Z_][a-zA-Z0-9_]*)\}\}/g;
Expand Down
4 changes: 4 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion projects/app/src/components/ChatBox/components/ChatItem.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,11 @@ const ChatItem = ({
<Flex flexDirection={'column'} key={chat.dataId} gap={2}>
{chat.value.map((value, i) => {
const key = `${chat.dataId}-ai-${i}`;

if (value.text) {
let source = (value.text?.content || '').trim();

if (!source && chat.value.length > 1) return <></>;
if (!source && chat.value.length > 1) return null;

if (
isLastChild &&
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,23 @@ const NodeLaf = (props: NodeProps<FlowModuleItemType>) => {

const lafFunctionSelectList = useMemo(
() =>
lafData?.lafFunctions.map((item) => ({
label: item.description ? `${item.name} (${item.description})` : item.name,
value: item.requestUrl
})) || [],
lafData?.lafFunctions.map((item) => {
const functionName = item.path.slice(1);
return {
alias: functionName,
label: item.description ? (
<Box>
<Box>{functionName}</Box>
<Box fontSize={'xs'} color={'gray.500'}>
{item.description}
</Box>
</Box>
) : (
functionName
),
value: item.requestUrl
};
}) || [],
[lafData?.lafFunctions]
);

Expand All @@ -111,6 +124,16 @@ const NodeLaf = (props: NodeProps<FlowModuleItemType>) => {

if (!lafFunction) return;

// update intro
if (lafFunction.description) {
onChangeNode({
moduleId,
type: 'attr',
key: 'intro',
value: lafFunction.description
});
}

const bodyParams =
lafFunction?.request?.content?.['application/json']?.schema?.properties || {};

Expand Down Expand Up @@ -232,7 +255,7 @@ const NodeLaf = (props: NodeProps<FlowModuleItemType>) => {
);

if (!lafFunction) return;
const url = `${feConfigs.lafEnv}/app/${lafData?.lafApp?.appid}/function${lafFunction?.path}?templateid=fastgptflow`;
const url = `${feConfigs.lafEnv}/app/${lafData?.lafApp?.appid}/function${lafFunction?.path}?templateid=FastGPT_Laf`;
window.open(url, '_blank');
}}
>
Expand Down
9 changes: 2 additions & 7 deletions projects/app/src/pages/api/common/file/read.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authFileToken } from '@fastgpt/service/support/permission/controller';
import {
getDownloadStream,
getFileById,
readFileEncode
} from '@fastgpt/service/common/file/gridfs/controller';
import { getDownloadStream, getFileById } from '@fastgpt/service/common/file/gridfs/controller';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';

export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
Expand All @@ -21,9 +17,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('fileId is empty');
}

const [file, encoding, fileStream] = await Promise.all([
const [file, { fileStream, encoding }] = await Promise.all([
getFileById({ bucketName, fileId }),
readFileEncode({ bucketName, fileId }),
getDownloadStream({ bucketName, fileId })
]);

Expand Down
6 changes: 5 additions & 1 deletion projects/app/src/pages/chat/share.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,12 @@ import { useTranslation } from 'next-i18next';
import { getInitOutLinkChatInfo } from '@/web/core/chat/api';
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
import { useChatStore } from '@/web/core/chat/storeChat';
import { ChatRoleEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import MyBox from '@/components/common/MyBox';
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
import { OutLinkWithAppType } from '@fastgpt/global/support/outLink/type';
import { addLog } from '@fastgpt/service/common/system/log';
import { connectToDatabase } from '@/service/mongo';

const OutLink = ({
appName,
Expand Down Expand Up @@ -397,6 +399,7 @@ export async function getServerSideProps(context: any) {

const app = await (async () => {
try {
await connectToDatabase();
const app = (await MongoOutLink.findOne(
{
shareId
Expand All @@ -407,6 +410,7 @@ export async function getServerSideProps(context: any) {
.lean()) as OutLinkWithAppType;
return app;
} catch (error) {
addLog.error('getServerSideProps', error);
return undefined;
}
})();
Expand Down
Loading

0 comments on commit c314312

Please sign in to comment.