Skip to content

Commit

Permalink
perf: logger (#186)
Browse files Browse the repository at this point in the history
* feat: finish response

* perf: logger

* docs

* perf: log

* docs
  • Loading branch information
c121914yu authored Aug 17, 2023
1 parent 324e4a0 commit 40168c5
Show file tree
Hide file tree
Showing 26 changed files with 500 additions and 93 deletions.
9 changes: 2 additions & 7 deletions client/.env.template
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,5 @@ OPENAI_BASE_URL=https://api.openai.com/v1
# 此处逻辑:优先走 ONEAPI_URL,如果填写了 ONEAPI_URL,key 也需要是 ONEAPI 的 key
CHAT_API_KEY=sk-xxxx
# db
MONGODB_URI=mongodb://username:[email protected]:27017/?authSource=admin
MONGODB_NAME=fastgpt
PG_HOST=0.0.0.0
PG_PORT=8100
PG_USER=root
PG_PASSWORD=psw
PG_DB_NAME=dbname
MONGODB_URI=mongodb://username:[email protected]:27017/fastgpt
PG_URL=postgresql://username:password@host:port/postgres
2 changes: 2 additions & 0 deletions client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,8 @@
"request-ip": "^3.3.0",
"sass": "^1.58.3",
"tunnel": "^0.0.6",
"winston": "^3.10.0",
"winston-mongodb": "^5.1.1",
"zustand": "^4.3.5"
},
"devDependencies": {
Expand Down
285 changes: 283 additions & 2 deletions client/pnpm-lock.yaml

Large diffs are not rendered by default.

16 changes: 13 additions & 3 deletions client/src/constants/flow/ModuleTemplate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,9 @@ export const ChatModule: FlowModuleTemplateType = {
{
key: TaskResponseKeyEnum.answerText,
label: '模型回复',
description: '直接响应,无需配置',
type: FlowOutputItemTypeEnum.hidden,
description: '如果外接了内容,会在回复结束时自动添加\n\n',
valueType: FlowValueTypeEnum.string,
type: FlowOutputItemTypeEnum.source,
targets: []
},
{
Expand Down Expand Up @@ -285,7 +286,16 @@ export const AnswerModule: FlowModuleTemplateType = {
'可以使用 \\n 来实现换行。也可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容'
}
],
outputs: []
outputs: [
{
key: 'finish',
label: '回复结束',
description: '回复完成后触发',
valueType: FlowValueTypeEnum.boolean,
type: FlowOutputItemTypeEnum.source,
targets: []
}
]
};
export const TFSwitchModule: FlowModuleTemplateType = {
logo: '',
Expand Down
5 changes: 3 additions & 2 deletions client/src/pages/api/openapi/v1/chat/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authApp, authShareChat, AuthUserTypeEnum } from '@/service/utils/auth';
import { sseErrRes, jsonRes } from '@/service/response';
import { withNextCors } from '@/service/utils/tools';
import { addLog, withNextCors } from '@/service/utils/tools';
import { ChatRoleEnum, ChatSourceEnum, sseResponseEventEnum } from '@/constants/chat';
import {
dispatchHistory,
Expand Down Expand Up @@ -181,7 +181,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
});
}

console.log(`finish time: ${(Date.now() - startTime) / 1000}s`);
addLog.info(`completions running time: ${(Date.now() - startTime) / 1000}s`);

if (stream) {
sseResponse({
Expand Down Expand Up @@ -351,6 +351,7 @@ export async function dispatchModules({
res,
stream,
detail,
outputs: module.outputs,
userOpenaiAccount: user?.openaiAccount,
...params
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@ import NodeCard from '../modules/NodeCard';
import { FlowModuleItemType } from '@/types/flow';
import Container from '../modules/Container';
import RenderInput from '../render/RenderInput';
import RenderOutput from '../render/RenderOutput';

const NodeAnswer = ({ data }: NodeProps<FlowModuleItemType>) => {
const { moduleId, inputs, outputs, onChangeNode } = data;
return (
<NodeCard minW={'400px'} {...data}>
<Container borderTop={'2px solid'} borderTopColor={'myGray.200'}>
<RenderInput moduleId={moduleId} onChangeNode={onChangeNode} flowInputList={inputs} />
<RenderOutput onChangeNode={onChangeNode} moduleId={moduleId} flowOutputList={outputs} />
</Container>
</NodeCard>
);
Expand Down
2 changes: 1 addition & 1 deletion client/src/service/events/generateQA.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ export async function generateQA(): Promise<any> {
// task preemption
if (!data) {
reduceQueue();
global.qaQueueLen <= 0 && console.log(`没有需要【QA】的数据, ${global.qaQueueLen}`);
global.qaQueueLen <= 0 && console.log(`【QA】任务完成`);
return;
}

Expand Down
2 changes: 1 addition & 1 deletion client/src/service/events/generateVector.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ export async function generateVector(): Promise<any> {
// task preemption
if (!data) {
reduceQueue();
global.vectorQueueLen <= 0 && console.log(`没有需要【索引】的数据, ${global.vectorQueueLen}`);
global.vectorQueueLen <= 0 && console.log(`【索引】任务完成`);
return;
}

Expand Down
21 changes: 13 additions & 8 deletions client/src/service/events/pushBill.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { BillSourceEnum } from '@/constants/user';
import { getModel } from '../utils/data';
import { ChatHistoryItemResType } from '@/types/chat';
import { formatPrice } from '@/utils/user';
import { addLog } from '../utils/tools';

export const pushTaskBill = async ({
appName,
Expand Down Expand Up @@ -48,7 +49,11 @@ export const pushTaskBill = async ({
: [])
]);

console.log('finish bill:', formatPrice(total));
addLog.info(`finish completions`, {
source,
userId,
price: formatPrice(total)
});
};

export const updateShareChatBill = async ({
Expand All @@ -66,8 +71,8 @@ export const updateShareChatBill = async ({
lastTime: new Date()
}
);
} catch (error) {
console.log('update shareChat error', error);
} catch (err) {
addLog.error('update shareChat error', { err });
}
};

Expand All @@ -82,7 +87,7 @@ export const pushSplitDataBill = async ({
totalTokens: number;
appName: string;
}) => {
console.log(`splitData generate success. token len: ${totalTokens}.`);
addLog.info('splitData generate success', { totalTokens });

let billId;

Expand All @@ -107,8 +112,8 @@ export const pushSplitDataBill = async ({
await User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
});
} catch (error) {
console.log('创建账单失败:', error);
} catch (err) {
addLog.error('Create completions bill error', { err });
billId && Bill.findByIdAndDelete(billId);
}
};
Expand Down Expand Up @@ -156,8 +161,8 @@ export const pushGenerateVectorBill = async ({
await User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
});
} catch (error) {
console.log('创建账单失败:', error);
} catch (err) {
addLog.error('Create generateVector bill error', { err });
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {
Expand Down
32 changes: 31 additions & 1 deletion client/src/service/moduleDispatch/chat/oneapi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { ChatModelItemType } from '@/types/model';
import { UserModelSchema } from '@/types/mongoSchema';
import { textCensor } from '@/service/api/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app';

export type ChatProps = {
res: NextApiResponse;
Expand All @@ -31,6 +32,7 @@ export type ChatProps = {
systemPrompt?: string;
limitPrompt?: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
outputs: AppModuleItemType['outputs'];
};
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
Expand All @@ -52,8 +54,12 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
userChatInput,
systemPrompt = '',
limitPrompt = '',
userOpenaiAccount
userOpenaiAccount,
outputs
} = props as ChatProps;
if (!userChatInput) {
return Promise.reject('Question is empty');
}

// temperature adapt
const modelConstantsData = getChatModel(model);
Expand Down Expand Up @@ -142,6 +148,8 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
messages: completeMessages
});

targetResponse({ res, detail, outputs });

return {
answerText: answer,
totalTokens,
Expand Down Expand Up @@ -304,6 +312,28 @@ function getMaxTokens({
};
}

function targetResponse({
res,
outputs,
detail
}: {
res: NextApiResponse;
outputs: AppModuleItemType['outputs'];
detail: boolean;
}) {
const targets =
outputs.find((output) => output.key === TaskResponseKeyEnum.answerText)?.targets || [];

if (targets.length === 0) return;
sseResponse({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
}

async function streamResponse({
res,
detail,
Expand Down
4 changes: 3 additions & 1 deletion client/src/service/moduleDispatch/tools/answer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export type AnswerProps = {
};
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
finish: boolean;
};

export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
Expand All @@ -27,6 +28,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
}

return {
[TaskResponseKeyEnum.answerText]: text
[TaskResponseKeyEnum.answerText]: text,
finish: true
};
};
37 changes: 36 additions & 1 deletion client/src/service/mongo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import { PRICE_SCALE } from '@/constants/common';
import { connectPg, PgClient } from './pg';
import { createHashPassword } from '@/utils/tools';
import { PgTrainingTableName } from '@/constants/plugin';
import { createLogger, format, transports } from 'winston';
import 'winston-mongodb';

/**
* connect MongoDB and init data
Expand All @@ -32,14 +34,16 @@ export async function connectToDatabase(): Promise<void> {
});
}

// logger
initLogger();

// init function
getInitConfig();

try {
mongoose.set('strictQuery', true);
global.mongodb = await mongoose.connect(process.env.MONGODB_URI as string, {
bufferCommands: true,
dbName: process.env.MONGODB_NAME,
maxConnecting: Number(process.env.DB_MAX_LINK || 5),
maxPoolSize: Number(process.env.DB_MAX_LINK || 5),
minPoolSize: 2
Expand All @@ -57,6 +61,37 @@ export async function connectToDatabase(): Promise<void> {
startQueue();
}

function initLogger() {
global.logger = createLogger({
transports: [
new transports.MongoDB({
db: process.env.MONGODB_URI as string,
collection: 'server_logs',
options: {
useUnifiedTopology: true
},
cappedSize: 500000000,
tryReconnect: true,
metaKey: 'meta',
format: format.combine(format.timestamp(), format.json())
}),
new transports.Console({
format: format.combine(
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
format.printf((info) => {
if (info.level === 'error') {
console.log(info.meta);
return `${info.level}: ${[info.timestamp]}: ${info.message}`;
}
return `${info.level}: ${[info.timestamp]}: ${info.message}${
info.meta ? `: ${JSON.stringify(info.meta)}` : ''
}`;
})
)
})
]
});
}
async function initRootUser() {
try {
const rootUser = await User.findOne({
Expand Down
8 changes: 2 additions & 6 deletions client/src/service/pg.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,9 @@ export const connectPg = async () => {
}

global.pgClient = new Pool({
host: process.env.PG_HOST,
port: process.env.PG_PORT ? +process.env.PG_PORT : 5432,
user: process.env.PG_USER,
password: process.env.PG_PASSWORD,
database: process.env.PG_DB_NAME,
connectionString: process.env.PG_URL,
max: Number(process.env.DB_MAX_LINK || 5),
idleTimeoutMillis: 30000,
keepAlive: true,
connectionTimeoutMillis: 5000
});

Expand Down
Loading

0 comments on commit 40168c5

Please sign in to comment.