mirror of
https://github.com/vastxie/99AI.git
synced 2025-11-13 12:13:43 +08:00
NineAI 2.4.2
This commit is contained in:
93
dist/modules/chatgpt/baidu.js
vendored
Normal file
93
dist/modules/chatgpt/baidu.js
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sendMessageFromBaidu = exports.getAccessToken = void 0;
|
||||
const axios = require('axios');
|
||||
const getApiModelMaps = () => {
|
||||
let res = {};
|
||||
const maps = {
|
||||
'ERNIE-Bot': 'completions',
|
||||
'ERNIE-Bot-turbo': 'eb-instant',
|
||||
'BLOOMZ-7B': 'bloomz_7b1',
|
||||
'ERNIE-Bot-4': 'completions_pro',
|
||||
'Llama-2-7b-chat': 'llama_2_7b',
|
||||
'Llama-2-13b-chat': 'llama_2_13b',
|
||||
'ChatGLM2-6B-32K': 'chatglm2_6b_32k',
|
||||
'Qianfan-Chinese-Llama-2-7B': 'qianfan_chinese_llama_2_7b',
|
||||
};
|
||||
Object.keys(maps).map(key => {
|
||||
res[`${key.toLowerCase()}`] = maps[key];
|
||||
});
|
||||
return res;
|
||||
};
|
||||
function getAccessToken(key, secret) {
|
||||
let url = `https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=${key}&client_secret=${secret}`;
|
||||
return new Promise((resolve, reject) => {
|
||||
axios
|
||||
.post(url)
|
||||
.then((response) => {
|
||||
resolve(response.data.access_token);
|
||||
})
|
||||
.catch((error) => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.getAccessToken = getAccessToken;
|
||||
function sendMessageFromBaidu(messagesHistory, { onProgress, accessToken, model, temperature = 0.95 }) {
|
||||
const endUrl = getApiModelMaps()[model.trim().toLowerCase()];
|
||||
return new Promise((resolve, reject) => {
|
||||
const url = `https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/${endUrl}?access_token=${accessToken}`;
|
||||
var options = {
|
||||
method: 'POST',
|
||||
url,
|
||||
responseType: 'stream',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
data: {
|
||||
stream: true,
|
||||
messages: messagesHistory,
|
||||
},
|
||||
};
|
||||
axios(options)
|
||||
.then((response) => {
|
||||
const stream = response.data;
|
||||
let resData = {};
|
||||
let cacheChunk = '';
|
||||
let cacheResText = '';
|
||||
stream.on('data', (chunk) => {
|
||||
const lines = chunk
|
||||
.toString()
|
||||
.split('\n\n')
|
||||
.filter((line) => line.trim() !== '');
|
||||
for (const line of lines) {
|
||||
const message = line.replace('data: ', '');
|
||||
try {
|
||||
const msg = cacheChunk + message;
|
||||
const parseData = JSON.parse(msg);
|
||||
cacheChunk = '';
|
||||
const { is_end, result } = parseData;
|
||||
result && (cacheResText += result);
|
||||
if (is_end) {
|
||||
resData = parseData;
|
||||
resData.text = cacheResText;
|
||||
}
|
||||
onProgress(parseData);
|
||||
}
|
||||
catch (error) {
|
||||
cacheChunk = message;
|
||||
}
|
||||
}
|
||||
});
|
||||
stream.on('end', () => {
|
||||
cacheResText = '';
|
||||
cacheChunk = '';
|
||||
resolve(resData);
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
reject(new Error(error));
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.sendMessageFromBaidu = sendMessageFromBaidu;
|
||||
48
dist/modules/chatgpt/chatBox.entity.js
vendored
Normal file
48
dist/modules/chatgpt/chatBox.entity.js
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatBoxEntity = void 0;
|
||||
const typeorm_1 = require("typeorm");
|
||||
const baseEntity_1 = require("../../common/entity/baseEntity");
|
||||
let ChatBoxEntity = class ChatBoxEntity extends baseEntity_1.BaseEntity {
|
||||
};
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '分类ID' }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatBoxEntity.prototype, "typeId", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '应用ID', nullable: true }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatBoxEntity.prototype, "appId", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '快速描述词', nullable: true, type: 'text' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatBoxEntity.prototype, "prompt", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '标题名称' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatBoxEntity.prototype, "title", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '排序ID', default: 100 }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatBoxEntity.prototype, "order", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '开启状态', default: true }),
|
||||
__metadata("design:type", Boolean)
|
||||
], ChatBoxEntity.prototype, "status", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '跳转地址' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatBoxEntity.prototype, "url", void 0);
|
||||
ChatBoxEntity = __decorate([
|
||||
(0, typeorm_1.Entity)({ name: 'chat_box' })
|
||||
], ChatBoxEntity);
|
||||
exports.ChatBoxEntity = ChatBoxEntity;
|
||||
36
dist/modules/chatgpt/chatBoxType.entity.js
vendored
Normal file
36
dist/modules/chatgpt/chatBoxType.entity.js
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatBoxTypeEntity = void 0;
|
||||
const typeorm_1 = require("typeorm");
|
||||
const baseEntity_1 = require("../../common/entity/baseEntity");
|
||||
let ChatBoxTypeEntity = class ChatBoxTypeEntity extends baseEntity_1.BaseEntity {
|
||||
};
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '分类名称' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatBoxTypeEntity.prototype, "name", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'icon图标' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatBoxTypeEntity.prototype, "icon", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '排序ID', default: 10 }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatBoxTypeEntity.prototype, "order", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '是否打开', default: true }),
|
||||
__metadata("design:type", Boolean)
|
||||
], ChatBoxTypeEntity.prototype, "status", void 0);
|
||||
ChatBoxTypeEntity = __decorate([
|
||||
(0, typeorm_1.Entity)({ name: 'chat_box_type' })
|
||||
], ChatBoxTypeEntity);
|
||||
exports.ChatBoxTypeEntity = ChatBoxTypeEntity;
|
||||
40
dist/modules/chatgpt/chatPre.entity.js
vendored
Normal file
40
dist/modules/chatgpt/chatPre.entity.js
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatPreEntity = void 0;
|
||||
const typeorm_1 = require("typeorm");
|
||||
const baseEntity_1 = require("../../common/entity/baseEntity");
|
||||
let ChatPreEntity = class ChatPreEntity extends baseEntity_1.BaseEntity {
|
||||
};
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '分类ID' }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatPreEntity.prototype, "typeId", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '预设问题描述词', nullable: true, type: 'text' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatPreEntity.prototype, "prompt", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '标题名称' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatPreEntity.prototype, "title", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '排序ID', default: 100 }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatPreEntity.prototype, "order", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '开启状态', default: true }),
|
||||
__metadata("design:type", Boolean)
|
||||
], ChatPreEntity.prototype, "status", void 0);
|
||||
ChatPreEntity = __decorate([
|
||||
(0, typeorm_1.Entity)({ name: 'chat_pre' })
|
||||
], ChatPreEntity);
|
||||
exports.ChatPreEntity = ChatPreEntity;
|
||||
36
dist/modules/chatgpt/chatPreType.entity.js
vendored
Normal file
36
dist/modules/chatgpt/chatPreType.entity.js
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatPreTypeEntity = void 0;
|
||||
const typeorm_1 = require("typeorm");
|
||||
const baseEntity_1 = require("../../common/entity/baseEntity");
|
||||
let ChatPreTypeEntity = class ChatPreTypeEntity extends baseEntity_1.BaseEntity {
|
||||
};
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '分类名称' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatPreTypeEntity.prototype, "name", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'icon图标', nullable: true }),
|
||||
__metadata("design:type", String)
|
||||
], ChatPreTypeEntity.prototype, "icon", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '排序ID', default: 10 }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatPreTypeEntity.prototype, "order", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '是否打开', default: true }),
|
||||
__metadata("design:type", Boolean)
|
||||
], ChatPreTypeEntity.prototype, "status", void 0);
|
||||
ChatPreTypeEntity = __decorate([
|
||||
(0, typeorm_1.Entity)({ name: 'chat_pre_type' })
|
||||
], ChatPreTypeEntity);
|
||||
exports.ChatPreTypeEntity = ChatPreTypeEntity;
|
||||
310
dist/modules/chatgpt/chatgpt.controller.js
vendored
Normal file
310
dist/modules/chatgpt/chatgpt.controller.js
vendored
Normal file
@@ -0,0 +1,310 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
||||
return function (target, key) { decorator(target, key, paramIndex); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatgptController = void 0;
|
||||
const jwtAuth_guard_1 = require("./../../common/auth/jwtAuth.guard");
|
||||
const swagger_1 = require("@nestjs/swagger");
|
||||
const chatgpt_service_1 = require("./chatgpt.service");
|
||||
const common_1 = require("@nestjs/common");
|
||||
const chatProcess_dto_1 = require("./dto/chatProcess.dto");
|
||||
const chatDraw_dto_1 = require("./dto/chatDraw.dto");
|
||||
const adminAuth_guard_1 = require("../../common/auth/adminAuth.guard");
|
||||
const superAuth_guard_1 = require("../../common/auth/superAuth.guard");
|
||||
const globalConfig_service_1 = require("../globalConfig/globalConfig.service");
|
||||
let ChatgptController = class ChatgptController {
|
||||
constructor(chatgptService, globalConfigService) {
|
||||
this.chatgptService = chatgptService;
|
||||
this.globalConfigService = globalConfigService;
|
||||
}
|
||||
chatProcess(body, req, res) {
|
||||
return this.chatgptService.chatProcess(body, req, res);
|
||||
}
|
||||
chatProcessSync(body, req) {
|
||||
return this.chatgptService.chatProcess(Object.assign({}, body), req);
|
||||
}
|
||||
async mjAssociate(body, req) {
|
||||
const mjCustomLianxiangPrompt = await this.globalConfigService.getConfigs(['mjCustomLianxiangPrompt']);
|
||||
body.systemMessage =
|
||||
mjCustomLianxiangPrompt ||
|
||||
`midjourney是一款AI绘画工具,只要你输入你想到的文字,就能通过人工智能产出相对应的图片、我希望你作为MidJourney程序的提示词(prompt)生成器。你的工作是根据我给你的一段提示内容扩展为更详细和更有创意的描述,以激发人工智能的独特和有趣的图像。请记住,人工智能能够理解广泛的语言,并能解释抽象的概念,所以请自由发挥想象力和描述力,尽可能地发挥。例如,你可以描述一个未来城市的场景,或一个充满奇怪生物的超现实景观。你的描述越详细、越有想象力,产生的图像就越有趣、Midjourney prompt的标准公式为:(image we're prompting).(5 descriptivekeywords). (camera type). (camera lens type). (time of day)(style of photograph).(type offilm)、请记住这个公式,后续统一使用该公式进行prompt生成、最终把我给你的提示变成一整段连续不分开的完整内容,并且只需要用英文回复您的联想、一定不要回复别内容、包括解释、我只需要纯粹的内容。`;
|
||||
return this.chatgptService.chatProcess(Object.assign(Object.assign({}, body), { cusromPrompt: true }), req);
|
||||
}
|
||||
async mjFanyi(body, req) {
|
||||
const mjCustomFanyiPrompt = await this.globalConfigService.getConfigs(['mjCustomFanyiPrompt']);
|
||||
body.systemMessage =
|
||||
mjCustomFanyiPrompt ||
|
||||
`接下来我会给你一些内容、我希望你帮我翻译成英文、不管我给你任何语言、你都回复我英文、如果给你了英文、依然回复我更加优化的英文、并且期望你不需要做任何多余的解释、给我英文即可、不要加任何东西、我只需要英文!`;
|
||||
return this.chatgptService.chatProcess(Object.assign(Object.assign({}, body), { cusromPrompt: true }), req);
|
||||
}
|
||||
async chatmind(body, req, res) {
|
||||
const mindCustomPrompt = await this.globalConfigService.getConfigs(['mindCustomPrompt']);
|
||||
body.systemMessage =
|
||||
mindCustomPrompt ||
|
||||
`我希望你使用markdown格式回答我得问题、我的需求是得到一份markdown格式的大纲、尽量做的精细、层级多一点、不管我问你什么、都需要您回复我一个大纲出来、我想使用大纲做思维导图、除了大纲之外、不要无关内容和总结。`;
|
||||
return this.chatgptService.chatProcess(Object.assign(Object.assign({}, body), { cusromPrompt: true }), req, res);
|
||||
}
|
||||
async draw(body, req) {
|
||||
return await this.chatgptService.draw(body, req);
|
||||
}
|
||||
async setChatBoxType(req, body) {
|
||||
return await this.chatgptService.setChatBoxType(req, body);
|
||||
}
|
||||
async delChatBoxType(req, body) {
|
||||
return await this.chatgptService.delChatBoxType(req, body);
|
||||
}
|
||||
async queryChatBoxType() {
|
||||
return await this.chatgptService.queryChatBoxType();
|
||||
}
|
||||
async setChatBox(req, body) {
|
||||
return await this.chatgptService.setChatBox(req, body);
|
||||
}
|
||||
async delChatBox(req, body) {
|
||||
return await this.chatgptService.delChatBox(req, body);
|
||||
}
|
||||
async queryChatBox() {
|
||||
return await this.chatgptService.queryChatBox();
|
||||
}
|
||||
async queryChatBoxFrontend() {
|
||||
return await this.chatgptService.queryChatBoxFrontend();
|
||||
}
|
||||
async setChatPreType(req, body) {
|
||||
return await this.chatgptService.setChatPreType(req, body);
|
||||
}
|
||||
async delChatPreType(req, body) {
|
||||
return await this.chatgptService.delChatPreType(req, body);
|
||||
}
|
||||
async queryChatPreType() {
|
||||
return await this.chatgptService.queryChatPreType();
|
||||
}
|
||||
async setChatPre(req, body) {
|
||||
return await this.chatgptService.setChatPre(req, body);
|
||||
}
|
||||
async delChatPre(req, body) {
|
||||
return await this.chatgptService.delChatPre(req, body);
|
||||
}
|
||||
async queryChatPre() {
|
||||
return await this.chatgptService.queryChatPre();
|
||||
}
|
||||
async queryChatPreList() {
|
||||
return await this.chatgptService.queryChatPreList();
|
||||
}
|
||||
};
|
||||
__decorate([
|
||||
(0, common_1.Post)('chat-process'),
|
||||
(0, swagger_1.ApiOperation)({ summary: 'gpt聊天对话' }),
|
||||
(0, common_1.UseGuards)(jwtAuth_guard_1.JwtAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Body)()),
|
||||
__param(1, (0, common_1.Req)()),
|
||||
__param(2, (0, common_1.Res)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [chatProcess_dto_1.ChatProcessDto, Object, Object]),
|
||||
__metadata("design:returntype", void 0)
|
||||
], ChatgptController.prototype, "chatProcess", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('chat-sync'),
|
||||
(0, swagger_1.ApiOperation)({ summary: 'gpt聊天对话' }),
|
||||
(0, common_1.UseGuards)(jwtAuth_guard_1.JwtAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Body)()),
|
||||
__param(1, (0, common_1.Req)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [chatProcess_dto_1.ChatProcessDto, Object]),
|
||||
__metadata("design:returntype", void 0)
|
||||
], ChatgptController.prototype, "chatProcessSync", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('mj-associate'),
|
||||
(0, swagger_1.ApiOperation)({ summary: 'gpt描述词绘画联想' }),
|
||||
(0, common_1.UseGuards)(jwtAuth_guard_1.JwtAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Body)()),
|
||||
__param(1, (0, common_1.Req)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [chatProcess_dto_1.ChatProcessDto, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "mjAssociate", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('mj-fy'),
|
||||
(0, swagger_1.ApiOperation)({ summary: 'gpt描述词绘画翻译' }),
|
||||
(0, common_1.UseGuards)(jwtAuth_guard_1.JwtAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Body)()),
|
||||
__param(1, (0, common_1.Req)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [chatProcess_dto_1.ChatProcessDto, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "mjFanyi", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('chat-mind'),
|
||||
(0, swagger_1.ApiOperation)({ summary: 'mind思维导图提示' }),
|
||||
(0, common_1.UseGuards)(jwtAuth_guard_1.JwtAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Body)()),
|
||||
__param(1, (0, common_1.Req)()),
|
||||
__param(2, (0, common_1.Res)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [chatProcess_dto_1.ChatProcessDto, Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "chatmind", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('chat-draw'),
|
||||
(0, swagger_1.ApiOperation)({ summary: 'gpt绘画' }),
|
||||
(0, common_1.UseGuards)(jwtAuth_guard_1.JwtAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Body)()),
|
||||
__param(1, (0, common_1.Req)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [chatDraw_dto_1.ChatDrawDto, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "draw", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('setChatBoxType'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改分类类型' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "setChatBoxType", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('delChatBoxType'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改ChatBoxType' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "delChatBoxType", null);
|
||||
__decorate([
|
||||
(0, common_1.Get)('queryChatBoxTypes'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '查询ChatBoxType' }),
|
||||
(0, common_1.UseGuards)(adminAuth_guard_1.AdminAuthGuard),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "queryChatBoxType", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('setChatBox'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改ChatBox' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "setChatBox", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('delChatBox'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改ChatBox提示词' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "delChatBox", null);
|
||||
__decorate([
|
||||
(0, common_1.Get)('queryChatBoxs'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '查询ChatBox列表' }),
|
||||
(0, common_1.UseGuards)(adminAuth_guard_1.AdminAuthGuard),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "queryChatBox", null);
|
||||
__decorate([
|
||||
(0, common_1.Get)('queryChatBoxFrontend'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '查询ChatBox分类加详细' }),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "queryChatBoxFrontend", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('setChatPreType'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改预设分类类型' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "setChatPreType", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('delChatPretype'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改ChatPretype' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "delChatPreType", null);
|
||||
__decorate([
|
||||
(0, common_1.Get)('queryChatPretypes'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '查询ChatPretype' }),
|
||||
(0, common_1.UseGuards)(adminAuth_guard_1.AdminAuthGuard),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "queryChatPreType", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('setChatPre'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改ChatPre' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "setChatPre", null);
|
||||
__decorate([
|
||||
(0, common_1.Post)('delChatPre'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '添加修改ChatPre提示词' }),
|
||||
(0, common_1.UseGuards)(superAuth_guard_1.SuperAuthGuard),
|
||||
(0, swagger_1.ApiBearerAuth)(),
|
||||
__param(0, (0, common_1.Req)()),
|
||||
__param(1, (0, common_1.Body)()),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", [Object, Object]),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "delChatPre", null);
|
||||
__decorate([
|
||||
(0, common_1.Get)('queryChatPres'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '查询ChatPre列表' }),
|
||||
(0, common_1.UseGuards)(adminAuth_guard_1.AdminAuthGuard),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "queryChatPre", null);
|
||||
__decorate([
|
||||
(0, common_1.Get)('queryChatPreList'),
|
||||
(0, swagger_1.ApiOperation)({ summary: '查询ChatPre列表' }),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ChatgptController.prototype, "queryChatPreList", null);
|
||||
ChatgptController = __decorate([
|
||||
(0, swagger_1.ApiTags)('chatgpt'),
|
||||
(0, common_1.Controller)('chatgpt'),
|
||||
__metadata("design:paramtypes", [chatgpt_service_1.ChatgptService, globalConfig_service_1.GlobalConfigService])
|
||||
], ChatgptController);
|
||||
exports.ChatgptController = ChatgptController;
|
||||
72
dist/modules/chatgpt/chatgpt.module.js
vendored
Normal file
72
dist/modules/chatgpt/chatgpt.module.js
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatgptModule = void 0;
|
||||
const common_1 = require("@nestjs/common");
|
||||
const chatgpt_controller_1 = require("./chatgpt.controller");
|
||||
const chatgpt_service_1 = require("./chatgpt.service");
|
||||
const userBalance_service_1 = require("../userBalance/userBalance.service");
|
||||
const typeorm_1 = require("@nestjs/typeorm");
|
||||
const balance_entity_1 = require("../userBalance/balance.entity");
|
||||
const user_service_1 = require("../user/user.service");
|
||||
const user_entity_1 = require("../user/user.entity");
|
||||
const verification_service_1 = require("../verification/verification.service");
|
||||
const verifycation_entity_1 = require("../verification/verifycation.entity");
|
||||
const chatLog_service_1 = require("../chatLog/chatLog.service");
|
||||
const chatLog_entity_1 = require("../chatLog/chatLog.entity");
|
||||
const accountLog_entity_1 = require("../userBalance/accountLog.entity");
|
||||
const config_entity_1 = require("../globalConfig/config.entity");
|
||||
const gptkeys_entity_1 = require("./gptkeys.entity");
|
||||
const whiteList_entity_1 = require("./whiteList.entity");
|
||||
const cramiPackage_entity_1 = require("../crami/cramiPackage.entity");
|
||||
const chatGroup_entity_1 = require("../chatGroup/chatGroup.entity");
|
||||
const app_entity_1 = require("../app/app.entity");
|
||||
const userBalance_entity_1 = require("../userBalance/userBalance.entity");
|
||||
const salesUsers_entity_1 = require("../sales/salesUsers.entity");
|
||||
const redisCache_service_1 = require("../redisCache/redisCache.service");
|
||||
const fingerprint_entity_1 = require("../userBalance/fingerprint.entity");
|
||||
const midjourney_entity_1 = require("../midjourney/midjourney.entity");
|
||||
const chatBoxType_entity_1 = require("./chatBoxType.entity");
|
||||
const chatBox_entity_1 = require("./chatBox.entity");
|
||||
const chatPreType_entity_1 = require("./chatPreType.entity");
|
||||
const chatPre_entity_1 = require("./chatPre.entity");
|
||||
let ChatgptModule = class ChatgptModule {
|
||||
};
|
||||
ChatgptModule = __decorate([
|
||||
(0, common_1.Global)(),
|
||||
(0, common_1.Module)({
|
||||
imports: [
|
||||
typeorm_1.TypeOrmModule.forFeature([
|
||||
balance_entity_1.BalanceEntity,
|
||||
user_entity_1.UserEntity,
|
||||
verifycation_entity_1.VerifycationEntity,
|
||||
chatLog_entity_1.ChatLogEntity,
|
||||
accountLog_entity_1.AccountLogEntity,
|
||||
config_entity_1.ConfigEntity,
|
||||
gptkeys_entity_1.GptKeysEntity,
|
||||
whiteList_entity_1.WhiteListEntity,
|
||||
user_entity_1.UserEntity,
|
||||
cramiPackage_entity_1.CramiPackageEntity,
|
||||
chatGroup_entity_1.ChatGroupEntity,
|
||||
app_entity_1.AppEntity,
|
||||
userBalance_entity_1.UserBalanceEntity,
|
||||
salesUsers_entity_1.SalesUsersEntity,
|
||||
fingerprint_entity_1.FingerprintLogEntity,
|
||||
midjourney_entity_1.MidjourneyEntity,
|
||||
chatBoxType_entity_1.ChatBoxTypeEntity,
|
||||
chatBox_entity_1.ChatBoxEntity,
|
||||
chatPreType_entity_1.ChatPreTypeEntity,
|
||||
chatPre_entity_1.ChatPreEntity
|
||||
]),
|
||||
],
|
||||
controllers: [chatgpt_controller_1.ChatgptController],
|
||||
providers: [chatgpt_service_1.ChatgptService, userBalance_service_1.UserBalanceService, user_service_1.UserService, verification_service_1.VerificationService, chatLog_service_1.ChatLogService, redisCache_service_1.RedisCacheService],
|
||||
exports: [chatgpt_service_1.ChatgptService]
|
||||
})
|
||||
], ChatgptModule);
|
||||
exports.ChatgptModule = ChatgptModule;
|
||||
850
dist/modules/chatgpt/chatgpt.service.js
vendored
Normal file
850
dist/modules/chatgpt/chatgpt.service.js
vendored
Normal file
@@ -0,0 +1,850 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
var __param = (this && this.__param) || function (paramIndex, decorator) {
|
||||
return function (target, key) { decorator(target, key, paramIndex); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatgptService = void 0;
|
||||
const upload_service_1 = require("./../upload/upload.service");
|
||||
const user_service_1 = require("./../user/user.service");
|
||||
const nestjs_config_1 = require("nestjs-config");
|
||||
const common_1 = require("@nestjs/common");
|
||||
const errorMessage_constant_1 = require("../../common/constants/errorMessage.constant");
|
||||
const utils_1 = require("../../common/utils");
|
||||
const axios_1 = require("axios");
|
||||
const userBalance_service_1 = require("../userBalance/userBalance.service");
|
||||
const balance_constant_1 = require("../../common/constants/balance.constant");
|
||||
const chatLog_service_1 = require("../chatLog/chatLog.service");
|
||||
const uuid = require("uuid");
|
||||
const config_entity_1 = require("../globalConfig/config.entity");
|
||||
const typeorm_1 = require("typeorm");
|
||||
const typeorm_2 = require("@nestjs/typeorm");
|
||||
const badwords_service_1 = require("../badwords/badwords.service");
|
||||
const autoreply_service_1 = require("../autoreply/autoreply.service");
|
||||
const gptkeys_entity_1 = require("./gptkeys.entity");
|
||||
const globalConfig_service_1 = require("../globalConfig/globalConfig.service");
|
||||
const fanyi_service_1 = require("../fanyi/fanyi.service");
|
||||
const app_entity_1 = require("../app/app.entity");
|
||||
const chatGroup_service_1 = require("../chatGroup/chatGroup.service");
|
||||
const models_service_1 = require("../models/models.service");
|
||||
const baidu_1 = require("./baidu");
|
||||
const helper_1 = require("./helper");
|
||||
const store_1 = require("./store");
|
||||
const zhipu_1 = require("./zhipu");
|
||||
const openai_1 = require("./openai");
|
||||
const chatBoxType_entity_1 = require("./chatBoxType.entity");
|
||||
const chatBox_entity_1 = require("./chatBox.entity");
|
||||
const chatPre_entity_1 = require("./chatPre.entity");
|
||||
const chatPreType_entity_1 = require("./chatPreType.entity");
|
||||
let ChatgptService = class ChatgptService {
|
||||
constructor(gptKeysEntity, configEntity, chatBoxTypeEntity, chatBoxEntity, appEntity, chatPreTypeEntity, chatPreEntity, configService, userBalanceService, chatLogService, userService, uploadService, badwordsService, autoreplyService, globalConfigService, fanyiService, chatGroupService, modelsService) {
|
||||
this.gptKeysEntity = gptKeysEntity;
|
||||
this.configEntity = configEntity;
|
||||
this.chatBoxTypeEntity = chatBoxTypeEntity;
|
||||
this.chatBoxEntity = chatBoxEntity;
|
||||
this.appEntity = appEntity;
|
||||
this.chatPreTypeEntity = chatPreTypeEntity;
|
||||
this.chatPreEntity = chatPreEntity;
|
||||
this.configService = configService;
|
||||
this.userBalanceService = userBalanceService;
|
||||
this.chatLogService = chatLogService;
|
||||
this.userService = userService;
|
||||
this.uploadService = uploadService;
|
||||
this.badwordsService = badwordsService;
|
||||
this.autoreplyService = autoreplyService;
|
||||
this.globalConfigService = globalConfigService;
|
||||
this.fanyiService = fanyiService;
|
||||
this.chatGroupService = chatGroupService;
|
||||
this.modelsService = modelsService;
|
||||
this.nineStore = null;
|
||||
this.whiteListUser = [];
|
||||
this.keyPool = {
|
||||
list3: [],
|
||||
list4: [],
|
||||
};
|
||||
}
|
||||
async onModuleInit() {
|
||||
let chatgpt = await (0, utils_1.importDynamic)('chatgpt-nine-ai');
|
||||
let KeyvRedis = await (0, utils_1.importDynamic)('@keyv/redis');
|
||||
let Keyv = await (0, utils_1.importDynamic)('keyv');
|
||||
chatgpt = (chatgpt === null || chatgpt === void 0 ? void 0 : chatgpt.default) ? chatgpt.default : chatgpt;
|
||||
KeyvRedis = (KeyvRedis === null || KeyvRedis === void 0 ? void 0 : KeyvRedis.default) ? KeyvRedis.default : KeyvRedis;
|
||||
Keyv = (Keyv === null || Keyv === void 0 ? void 0 : Keyv.default) ? Keyv.default : Keyv;
|
||||
const { ChatGPTAPI, ChatGPTError, ChatGPTUnofficialProxyAPI } = chatgpt;
|
||||
const port = +process.env.REDIS_PORT;
|
||||
const host = process.env.REDIS_HOST;
|
||||
const password = process.env.REDIS_PASSWORD;
|
||||
const username = process.env.REDIS_USER;
|
||||
const redisUrl = `redis://${username || ''}:${password || ''}@${host}:${port}`;
|
||||
const store = new KeyvRedis(redisUrl);
|
||||
const messageStore = new Keyv({ store, namespace: 'nineai-chatlog' });
|
||||
this.nineStore = new store_1.NineStore({ store: messageStore, namespace: 'chat' });
|
||||
}
|
||||
async getRequestParams(inputOpt, systemMessage, currentRequestModelKey, modelInfo = null) {
|
||||
var _a;
|
||||
if (!modelInfo) {
|
||||
modelInfo = (_a = (await this.modelsService.getBaseConfig())) === null || _a === void 0 ? void 0 : _a.modelInfo;
|
||||
}
|
||||
const { timeout = 60 } = currentRequestModelKey;
|
||||
const { topN: temperature, model } = modelInfo;
|
||||
const { parentMessageId = 0 } = inputOpt;
|
||||
const globalTimeoutMs = await this.globalConfigService.getConfigs(['openaiTimeoutMs']);
|
||||
const timeoutMs = timeout * 1000 || globalTimeoutMs || 100 * 1000;
|
||||
const options = {
|
||||
parentMessageId,
|
||||
timeoutMs: +timeoutMs,
|
||||
completionParams: {
|
||||
model,
|
||||
temperature: temperature,
|
||||
},
|
||||
};
|
||||
systemMessage && (options.systemMessage = systemMessage);
|
||||
return options;
|
||||
}
|
||||
async chatSyncFree(prompt) {
|
||||
const currentRequestModelKey = await this.modelsService.getRandomDrawKey();
|
||||
const systemMessage = await this.globalConfigService.getConfigs(['systemPreMessage']);
|
||||
const { maxModelTokens = 8000, maxResponseTokens = 4096, key, model } = currentRequestModelKey;
|
||||
const proxyUrl = await this.getModelProxyUrl(currentRequestModelKey);
|
||||
const { context: messagesHistory } = await this.nineStore.buildMessageFromParentMessageId(prompt, { parentMessageId: '', systemMessage });
|
||||
try {
|
||||
const response = await (0, openai_1.sendMessageFromOpenAi)(messagesHistory, {
|
||||
apiKey: (0, utils_1.removeSpecialCharacters)(key),
|
||||
model,
|
||||
proxyUrl: proxyUrl,
|
||||
onProgress: null,
|
||||
});
|
||||
return response === null || response === void 0 ? void 0 : response.text;
|
||||
}
|
||||
catch (error) {
|
||||
console.log('error: ', error);
|
||||
}
|
||||
}
|
||||
async chatProcess(body, req, res) {
|
||||
var _a, _b, _c;
|
||||
const abortController = req.abortController;
|
||||
const { options = {}, appId, cusromPrompt, systemMessage = '' } = body;
|
||||
let setSystemMessage = systemMessage;
|
||||
const { parentMessageId } = options;
|
||||
const { prompt } = body;
|
||||
const { groupId, usingNetwork } = options;
|
||||
const groupInfo = await this.chatGroupService.getGroupInfoFromId(groupId);
|
||||
const groupConfig = (groupInfo === null || groupInfo === void 0 ? void 0 : groupInfo.config) ? JSON.parse(groupInfo.config) : await this.modelsService.getBaseConfig();
|
||||
const { keyType, model, topN: temperature, systemMessage: customSystemMessage, rounds } = groupConfig.modelInfo;
|
||||
let currentRequestModelKey = null;
|
||||
if (!cusromPrompt) {
|
||||
currentRequestModelKey = await this.modelsService.getCurrentModelKeyInfo(model);
|
||||
}
|
||||
else {
|
||||
currentRequestModelKey = await this.modelsService.getRandomDrawKey();
|
||||
}
|
||||
if (!currentRequestModelKey) {
|
||||
throw new common_1.HttpException('当前流程所需要的模型已被管理员下架、请联系管理员上架专属模型!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
const { deduct, deductType, key: modelKey, secret, modelName, id: keyId, accessToken } = currentRequestModelKey;
|
||||
await this.userService.checkUserStatus(req.user);
|
||||
await this.userBalanceService.validateBalance(req, deductType === 1 ? 'model3' : 'model4', deduct);
|
||||
res && res.setHeader('Content-type', 'application/octet-stream; charset=utf-8');
|
||||
await this.badwordsService.checkBadWords(prompt, req.user.id);
|
||||
const autoReplyRes = await this.autoreplyService.checkAutoReply(prompt);
|
||||
if (autoReplyRes && res) {
|
||||
const msg = { message: autoReplyRes, code: 500 };
|
||||
res.write(JSON.stringify(msg));
|
||||
return res.end();
|
||||
}
|
||||
if (appId) {
|
||||
const appInfo = await this.appEntity.findOne({ where: { id: appId, status: (0, typeorm_1.In)([1, 3, 4, 5]) } });
|
||||
if (!appInfo) {
|
||||
throw new common_1.HttpException('你当前使用的应用已被下架、请删除当前对话开启新的对话吧!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
appInfo.preset && (setSystemMessage = appInfo.preset);
|
||||
}
|
||||
else if (cusromPrompt) {
|
||||
setSystemMessage = systemMessage;
|
||||
}
|
||||
else if (customSystemMessage) {
|
||||
setSystemMessage = customSystemMessage;
|
||||
}
|
||||
else {
|
||||
const currentDate = new Date().toISOString().split('T')[0];
|
||||
const systemPreMessage = await this.globalConfigService.getConfigs(['systemPreMessage']);
|
||||
setSystemMessage = systemPreMessage + `\n Current date: ${currentDate}`;
|
||||
}
|
||||
let netWorkPrompt = '';
|
||||
if (usingNetwork) {
|
||||
netWorkPrompt = await (0, utils_1.compileNetwork)(prompt);
|
||||
const currentDate = new Date().toISOString().split('T')[0];
|
||||
const systemPreMessage = await this.globalConfigService.getConfigs(['systemPreMessage']);
|
||||
setSystemMessage = systemPreMessage + `\n Current date: ${currentDate}`;
|
||||
}
|
||||
const mergedOptions = await this.getRequestParams(options, setSystemMessage, currentRequestModelKey, groupConfig.modelInfo);
|
||||
const { maxModelTokens = 8000, maxResponseTokens = 4096, key } = currentRequestModelKey;
|
||||
res && res.status(200);
|
||||
let response = null;
|
||||
let othersInfo = null;
|
||||
try {
|
||||
if (res) {
|
||||
let lastChat = null;
|
||||
let isSuccess = false;
|
||||
res.on('close', async () => {
|
||||
if (isSuccess)
|
||||
return;
|
||||
abortController.abort();
|
||||
const prompt_tokens = (await (0, openai_1.getTokenCount)(prompt)) || 0;
|
||||
const completion_tokens = (await (0, openai_1.getTokenCount)(lastChat === null || lastChat === void 0 ? void 0 : lastChat.text)) || 0;
|
||||
const total_tokens = prompt_tokens + completion_tokens;
|
||||
const curIp = (0, utils_1.getClientIp)(req);
|
||||
await this.chatLogService.saveChatLog({
|
||||
appId,
|
||||
curIp,
|
||||
userId: req.user.id,
|
||||
type: balance_constant_1.DeductionKey.CHAT_TYPE,
|
||||
prompt,
|
||||
answer: '',
|
||||
promptTokens: prompt_tokens,
|
||||
completionTokens: 0,
|
||||
totalTokens: prompt_tokens,
|
||||
model: model,
|
||||
role: 'user',
|
||||
groupId,
|
||||
requestOptions: JSON.stringify({
|
||||
options: null,
|
||||
prompt,
|
||||
}),
|
||||
});
|
||||
await this.chatLogService.saveChatLog({
|
||||
appId,
|
||||
curIp,
|
||||
userId: req.user.id,
|
||||
type: balance_constant_1.DeductionKey.CHAT_TYPE,
|
||||
prompt: prompt,
|
||||
answer: lastChat === null || lastChat === void 0 ? void 0 : lastChat.text,
|
||||
promptTokens: prompt_tokens,
|
||||
completionTokens: completion_tokens,
|
||||
totalTokens: total_tokens,
|
||||
model: model,
|
||||
role: 'assistant',
|
||||
groupId,
|
||||
requestOptions: JSON.stringify({
|
||||
options: {
|
||||
model: model,
|
||||
temperature,
|
||||
},
|
||||
prompt,
|
||||
}),
|
||||
conversationOptions: JSON.stringify({
|
||||
conversationId: lastChat === null || lastChat === void 0 ? void 0 : lastChat.conversationId,
|
||||
model: model,
|
||||
parentMessageId: lastChat === null || lastChat === void 0 ? void 0 : lastChat.id,
|
||||
temperature,
|
||||
}),
|
||||
});
|
||||
await this.userBalanceService.deductFromBalance(req.user.id, `model${deductType === 1 ? 3 : 4}`, deduct, total_tokens);
|
||||
});
|
||||
if (Number(keyType) === 1) {
|
||||
const { key, maxToken, maxTokenRes, proxyResUrl } = await this.formatModelToken(currentRequestModelKey);
|
||||
const { parentMessageId, completionParams, systemMessage } = mergedOptions;
|
||||
const { model, temperature } = completionParams;
|
||||
const { context: messagesHistory } = await this.nineStore.buildMessageFromParentMessageId(usingNetwork ? netWorkPrompt : prompt, {
|
||||
parentMessageId,
|
||||
systemMessage,
|
||||
maxModelToken: maxToken,
|
||||
maxResponseTokens: maxTokenRes,
|
||||
maxRounds: (0, helper_1.addOneIfOdd)(rounds),
|
||||
});
|
||||
let firstChunk = true;
|
||||
response = await (0, openai_1.sendMessageFromOpenAi)(messagesHistory, {
|
||||
maxToken,
|
||||
maxTokenRes,
|
||||
apiKey: modelKey,
|
||||
model,
|
||||
temperature,
|
||||
proxyUrl: proxyResUrl,
|
||||
onProgress: (chat) => {
|
||||
res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`);
|
||||
lastChat = chat;
|
||||
firstChunk = false;
|
||||
},
|
||||
});
|
||||
isSuccess = true;
|
||||
}
|
||||
if (Number(keyType) === 2) {
|
||||
let firstChunk = true;
|
||||
const { context: messagesHistory } = await this.nineStore.buildMessageFromParentMessageId(usingNetwork ? netWorkPrompt : prompt, {
|
||||
parentMessageId,
|
||||
maxRounds: (0, helper_1.addOneIfOdd)(rounds),
|
||||
});
|
||||
response = await (0, baidu_1.sendMessageFromBaidu)(usingNetwork ? netWorkPrompt : messagesHistory, {
|
||||
temperature,
|
||||
accessToken,
|
||||
model,
|
||||
onProgress: (data) => {
|
||||
res.write(firstChunk ? JSON.stringify(data) : `\n${JSON.stringify(data)}`);
|
||||
firstChunk = false;
|
||||
lastChat = data;
|
||||
},
|
||||
});
|
||||
isSuccess = true;
|
||||
}
|
||||
if (Number(keyType) === 3) {
|
||||
let firstChunk = true;
|
||||
const { context: messagesHistory } = await this.nineStore.buildMessageFromParentMessageId(usingNetwork ? netWorkPrompt : prompt, {
|
||||
parentMessageId,
|
||||
maxRounds: (0, helper_1.addOneIfOdd)(rounds),
|
||||
});
|
||||
response = await (0, zhipu_1.sendMessageFromZhipu)(usingNetwork ? netWorkPrompt : messagesHistory, {
|
||||
temperature,
|
||||
key,
|
||||
model,
|
||||
onProgress: (data) => {
|
||||
res.write(firstChunk ? JSON.stringify(data) : `\n${JSON.stringify(data)}`);
|
||||
firstChunk = false;
|
||||
lastChat = data;
|
||||
},
|
||||
});
|
||||
isSuccess = true;
|
||||
}
|
||||
const userMessageData = {
|
||||
id: this.nineStore.getUuid(),
|
||||
text: prompt,
|
||||
role: 'user',
|
||||
name: undefined,
|
||||
usage: null,
|
||||
parentMessageId: parentMessageId,
|
||||
conversationId: response === null || response === void 0 ? void 0 : response.conversationId,
|
||||
};
|
||||
othersInfo = { model, parentMessageId };
|
||||
await this.nineStore.setData(userMessageData);
|
||||
const assistantMessageData = {
|
||||
id: response.id,
|
||||
text: response.text,
|
||||
role: 'assistant',
|
||||
name: undefined,
|
||||
usage: response.usage,
|
||||
parentMessageId: userMessageData.id,
|
||||
conversationId: response === null || response === void 0 ? void 0 : response.conversationId,
|
||||
};
|
||||
await this.nineStore.setData(assistantMessageData);
|
||||
othersInfo = { model, parentMessageId: userMessageData.id };
|
||||
}
|
||||
else {
|
||||
const { key, maxToken, maxTokenRes, proxyResUrl } = await this.formatModelToken(currentRequestModelKey);
|
||||
const { parentMessageId, completionParams, systemMessage } = mergedOptions;
|
||||
const { model, temperature } = completionParams;
|
||||
const { context: messagesHistory } = await this.nineStore.buildMessageFromParentMessageId(usingNetwork ? netWorkPrompt : prompt, {
|
||||
parentMessageId,
|
||||
systemMessage,
|
||||
maxRounds: (0, helper_1.addOneIfOdd)(rounds),
|
||||
});
|
||||
response = await (0, openai_1.sendMessageFromOpenAi)(messagesHistory, {
|
||||
apiKey: modelKey,
|
||||
model,
|
||||
temperature,
|
||||
proxyUrl: proxyResUrl,
|
||||
onProgress: null,
|
||||
});
|
||||
}
|
||||
const formatResponse = await (0, helper_1.unifiedFormattingResponse)(keyType, response, othersInfo);
|
||||
const { prompt_tokens = 0, completion_tokens = 0, total_tokens = 0 } = formatResponse.usage;
|
||||
await this.userBalanceService.deductFromBalance(req.user.id, `model${deductType === 1 ? 3 : 4}`, deduct, total_tokens);
|
||||
await this.modelsService.saveUseLog(keyId, total_tokens);
|
||||
const curIp = (0, utils_1.getClientIp)(req);
|
||||
await this.chatLogService.saveChatLog({
|
||||
appId,
|
||||
curIp,
|
||||
userId: req.user.id,
|
||||
type: balance_constant_1.DeductionKey.CHAT_TYPE,
|
||||
prompt,
|
||||
answer: '',
|
||||
promptTokens: prompt_tokens,
|
||||
completionTokens: 0,
|
||||
totalTokens: total_tokens,
|
||||
model: formatResponse.model,
|
||||
role: 'user',
|
||||
groupId,
|
||||
requestOptions: JSON.stringify({
|
||||
options: null,
|
||||
prompt,
|
||||
}),
|
||||
});
|
||||
await this.chatLogService.saveChatLog({
|
||||
appId,
|
||||
curIp,
|
||||
userId: req.user.id,
|
||||
type: balance_constant_1.DeductionKey.CHAT_TYPE,
|
||||
prompt: prompt,
|
||||
answer: formatResponse === null || formatResponse === void 0 ? void 0 : formatResponse.text,
|
||||
promptTokens: prompt_tokens,
|
||||
completionTokens: completion_tokens,
|
||||
totalTokens: total_tokens,
|
||||
model: model,
|
||||
role: 'assistant',
|
||||
groupId,
|
||||
requestOptions: JSON.stringify({
|
||||
options: {
|
||||
model: model,
|
||||
temperature,
|
||||
},
|
||||
prompt,
|
||||
}),
|
||||
conversationOptions: JSON.stringify({
|
||||
conversationId: response.conversationId,
|
||||
model: model,
|
||||
parentMessageId: response.id,
|
||||
temperature,
|
||||
}),
|
||||
});
|
||||
common_1.Logger.debug(`本次调用: ${req.user.id} model: ${model} key -> ${key}, 模型名称: ${modelName}, 最大回复token: ${maxResponseTokens}`, 'ChatgptService');
|
||||
const userBalance = await this.userBalanceService.queryUserBalance(req.user.id);
|
||||
response.userBanance = Object.assign({}, userBalance);
|
||||
response.result && (response.result = '');
|
||||
response.is_end = true;
|
||||
if (res) {
|
||||
return res.write(`\n${JSON.stringify(response)}`);
|
||||
}
|
||||
else {
|
||||
return response.text;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log('chat-error <----------------------------------------->', modelKey, error);
|
||||
const code = (error === null || error === void 0 ? void 0 : error.statusCode) || 400;
|
||||
const status = ((_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.status) || (error === null || error === void 0 ? void 0 : error.statusCode) || 400;
|
||||
console.log('chat-error-detail <----------------------------------------->', 'code: ', code, 'message', error === null || error === void 0 ? void 0 : error.message, 'statusText:', (_b = error === null || error === void 0 ? void 0 : error.response) === null || _b === void 0 ? void 0 : _b.statusText, 'status', (_c = error === null || error === void 0 ? void 0 : error.response) === null || _c === void 0 ? void 0 : _c.status);
|
||||
if (error.status && error.status === 402) {
|
||||
const errMsg = { message: `Catch Error ${error.message}`, code: 402 };
|
||||
if (res) {
|
||||
return res.write(JSON.stringify(errMsg));
|
||||
}
|
||||
else {
|
||||
throw new common_1.HttpException(error.message, common_1.HttpStatus.PAYMENT_REQUIRED);
|
||||
}
|
||||
}
|
||||
if (!status) {
|
||||
if (res) {
|
||||
return res.write(JSON.stringify({ message: error.message, code: 500 }));
|
||||
}
|
||||
else {
|
||||
throw new common_1.HttpException(error.message, common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
let message = errorMessage_constant_1.OpenAiErrorCodeMessage[status] ? errorMessage_constant_1.OpenAiErrorCodeMessage[status] : '服务异常、请重新试试吧!!!';
|
||||
if ((error === null || error === void 0 ? void 0 : error.message.includes('The OpenAI account associated with this API key has been deactivated.')) && Number(keyType) === 1) {
|
||||
await this.modelsService.lockKey(keyId, '当前模型key已被封禁、已冻结当前调用Key、尝试重新对话试试吧!', -1);
|
||||
message = '当前模型key已被封禁';
|
||||
}
|
||||
if ((error === null || error === void 0 ? void 0 : error.statusCode) === 429 && error.message.includes('billing') && Number(keyType) === 1) {
|
||||
await this.modelsService.lockKey(keyId, '当前模型key余额已耗尽、已冻结当前调用Key、尝试重新对话试试吧!', -3);
|
||||
message = '当前模型key余额已耗尽';
|
||||
}
|
||||
if ((error === null || error === void 0 ? void 0 : error.statusCode) === 429 && (error === null || error === void 0 ? void 0 : error.statusText) === 'Too Many Requests') {
|
||||
message = '当前模型调用过于频繁、请重新试试吧!';
|
||||
}
|
||||
if ((error === null || error === void 0 ? void 0 : error.statusCode) === 401 && error.message.includes('Incorrect API key provided') && Number(keyType) === 1) {
|
||||
await this.modelsService.lockKey(keyId, '提供了错误的模型秘钥', -2);
|
||||
message = '提供了错误的模型秘钥、已冻结当前调用Key、请重新尝试对话!';
|
||||
}
|
||||
if ((error === null || error === void 0 ? void 0 : error.statusCode) === 404 && error.message.includes('This is not a chat model and thus not supported') && Number(keyType) === 1) {
|
||||
await this.modelsService.lockKey(keyId, '当前模型不是聊天模型', -4);
|
||||
message = '当前模型不是聊天模型、已冻结当前调用Key、请重新尝试对话!';
|
||||
}
|
||||
if (code === 400) {
|
||||
console.log('400 error', error, error.message);
|
||||
}
|
||||
const errMsg = { message: message || 'Please check the back-end console', code: code === 401 ? 400 : code || 500 };
|
||||
if (res) {
|
||||
return res.write(JSON.stringify(errMsg));
|
||||
}
|
||||
else {
|
||||
throw new common_1.HttpException(errMsg.message, common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
res && res.end();
|
||||
}
|
||||
}
|
||||
async draw(body, req) {
|
||||
var _a, _b, _c, _d;
|
||||
await this.badwordsService.checkBadWords(body.prompt, req.user.id);
|
||||
await this.userService.checkUserStatus(req.user);
|
||||
const money = (body === null || body === void 0 ? void 0 : body.quality) === 'hd' ? 4 : 2;
|
||||
await this.userBalanceService.validateBalance(req, 'mjDraw', money);
|
||||
let images = [];
|
||||
const detailKeyInfo = await this.modelsService.getRandomDrawKey();
|
||||
const keyId = detailKeyInfo === null || detailKeyInfo === void 0 ? void 0 : detailKeyInfo.id;
|
||||
const { key, proxyResUrl } = await this.formatModelToken(detailKeyInfo);
|
||||
common_1.Logger.log(`draw paompt info <==**==> ${body.prompt}, key ===> ${key}`, 'DrawService');
|
||||
try {
|
||||
const api = `${proxyResUrl}/v1/images/generations`;
|
||||
const params = Object.assign(Object.assign({}, body), { model: 'dall-e-3' });
|
||||
console.log('dall-e draw params: ', params);
|
||||
const res = await axios_1.default.post(api, Object.assign(Object.assign({}, params), { response_format: 'b64_json' }), { headers: { Authorization: `Bearer ${key}` } });
|
||||
images = res.data.data;
|
||||
const task = [];
|
||||
for (const item of images) {
|
||||
const filename = uuid.v4().slice(0, 10) + '.png';
|
||||
const buffer = Buffer.from(item.b64_json, 'base64');
|
||||
task.push(this.uploadService.uploadFile({ filename, buffer }));
|
||||
}
|
||||
const urls = await Promise.all(task);
|
||||
await this.userBalanceService.deductFromBalance(req.user.id, 'mjDraw', (params === null || params === void 0 ? void 0 : params.quality) === 'standard' ? 2 : 4, money);
|
||||
const curIp = (0, utils_1.getClientIp)(req);
|
||||
const taskLog = [];
|
||||
const cosType = await this.uploadService.getUploadType();
|
||||
const [width, height] = body.size.split('x');
|
||||
urls.forEach((url) => {
|
||||
taskLog.push(this.chatLogService.saveChatLog({
|
||||
curIp,
|
||||
userId: req.user.id,
|
||||
type: balance_constant_1.DeductionKey.PAINT_TYPE,
|
||||
prompt: body.prompt,
|
||||
answer: url,
|
||||
fileInfo: JSON.stringify({
|
||||
cosType,
|
||||
width,
|
||||
height,
|
||||
cosUrl: url,
|
||||
}),
|
||||
promptTokens: 0,
|
||||
completionTokens: 0,
|
||||
totalTokens: 0,
|
||||
model: 'dall-e-3',
|
||||
}));
|
||||
});
|
||||
await Promise.all(taskLog);
|
||||
return urls;
|
||||
}
|
||||
catch (error) {
|
||||
const status = ((_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.status) || 500;
|
||||
console.log('openai-draw error: ', JSON.stringify(error), key, status);
|
||||
const message = (_d = (_c = (_b = error === null || error === void 0 ? void 0 : error.response) === null || _b === void 0 ? void 0 : _b.data) === null || _c === void 0 ? void 0 : _c.error) === null || _d === void 0 ? void 0 : _d.message;
|
||||
if (status === 429) {
|
||||
throw new common_1.HttpException('当前请求已过载、请稍等会儿再试试吧!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
if (status === 400 && message.includes('This request has been blocked by our content filters')) {
|
||||
throw new common_1.HttpException('您的请求已被系统拒绝。您的提示可能存在一些非法的文本。', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
if (status === 400 && message.includes('Billing hard limit has been reached')) {
|
||||
await this.modelsService.lockKey(keyId, '当前模型key已被封禁、已冻结当前调用Key、尝试重新对话试试吧!', -1);
|
||||
throw new common_1.HttpException('当前Key余额已不足、请重新再试一次吧!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
if (status === 500) {
|
||||
throw new common_1.HttpException('绘制图片失败,请检查你的提示词是否有非法描述!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
if (status === 401) {
|
||||
throw new common_1.HttpException('绘制图片失败,此次绘画被拒绝了!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
throw new common_1.HttpException('绘制图片失败,请稍后试试吧!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
async getAllKeyList() {
|
||||
const list = await this.gptKeysEntity.find({
|
||||
where: { status: 1 },
|
||||
select: ['id', 'key', 'weight', 'model', 'maxModelTokens', 'maxResponseTokens', 'openaiProxyUrl', 'openaiTimeoutMs'],
|
||||
});
|
||||
const list3 = list.filter((t) => t.model.includes('gpt-3'));
|
||||
const list4 = list.filter((t) => t.model.includes('gpt-4'));
|
||||
this.keyPool = {
|
||||
list3,
|
||||
list4,
|
||||
};
|
||||
}
|
||||
async getModelProxyUrl(modelKey) {
|
||||
const openaiBaseUrl = await this.globalConfigService.getConfigs(['openaiBaseUrl']);
|
||||
return (modelKey === null || modelKey === void 0 ? void 0 : modelKey.proxyUrl) || openaiBaseUrl || 'https://api.openai.com';
|
||||
}
|
||||
async formatModelToken(detailKeyInfo) {
|
||||
const { openaiModel3MaxTokens = 0, openaiModel3MaxTokensRes = 0, openaiModel3MaxTokens16k = 0, openaiModel3MaxTokens16kRes = 0, openaiModel4MaxTokens = 0, openaiModel4MaxTokensRes = 0, openaiModel4MaxTokens32k = 0, openaiModel4MaxTokens32kRes = 0, openaiBaseUrl = '', } = await this.globalConfigService.getConfigs([
|
||||
'openaiModel3MaxTokens',
|
||||
'openaiModel3MaxTokensRes',
|
||||
'openaiModel3MaxTokens16k',
|
||||
'openaiModel3MaxTokens16kRes',
|
||||
'openaiModel4MaxTokens',
|
||||
'openaiModel4MaxTokensRes',
|
||||
'openaiModel4MaxTokens32k',
|
||||
'openaiModel4MaxTokens32kRes',
|
||||
'openaiBaseUrl',
|
||||
]);
|
||||
let maxToken = null;
|
||||
let maxTokenRes = null;
|
||||
let proxyResUrl = null;
|
||||
let { model, maxModelTokens = 0, maxResponseTokens = 0, proxyUrl = '', key } = detailKeyInfo;
|
||||
if (model.toLowerCase().includes('gpt-4')) {
|
||||
maxModelTokens >= 8192 && (maxModelTokens = 8192);
|
||||
maxTokenRes >= 4096 && (maxModelTokens = 4096);
|
||||
maxToken = maxModelTokens || openaiModel4MaxTokens || 8192;
|
||||
maxTokenRes = maxResponseTokens || openaiModel4MaxTokensRes || 4096;
|
||||
if (model.toLowerCase().includes('32k')) {
|
||||
maxModelTokens >= 32768 && (maxModelTokens = 32768);
|
||||
maxTokenRes >= 16384 && (maxModelTokens = 16384);
|
||||
maxToken = maxModelTokens || openaiModel4MaxTokens32k || 32768;
|
||||
maxTokenRes = maxResponseTokens || openaiModel4MaxTokens32kRes || 16384;
|
||||
}
|
||||
if (model.toLowerCase().includes('1106')) {
|
||||
maxModelTokens >= 16380 && (maxModelTokens = 16380);
|
||||
maxTokenRes >= 4096 && (maxModelTokens = 4096);
|
||||
maxToken = maxModelTokens || 16380;
|
||||
maxTokenRes = maxResponseTokens || 4096;
|
||||
}
|
||||
}
|
||||
if (model.toLowerCase().includes('gpt-3')) {
|
||||
maxModelTokens >= 4096 && (maxModelTokens = 4096);
|
||||
maxTokenRes >= 2000 && (maxModelTokens = 2000);
|
||||
maxToken = maxModelTokens || openaiModel3MaxTokens || 4096;
|
||||
maxTokenRes = maxResponseTokens || openaiModel3MaxTokensRes || 2000;
|
||||
if (model.toLowerCase().includes('16k')) {
|
||||
maxModelTokens >= 16384 && (maxModelTokens = 16384);
|
||||
maxTokenRes >= 8192 && (maxModelTokens = 8192);
|
||||
maxToken = maxModelTokens || openaiModel3MaxTokens16k || 16384;
|
||||
maxTokenRes = maxResponseTokens || openaiModel3MaxTokens16kRes || 8192;
|
||||
}
|
||||
if (model.toLowerCase().includes('1106')) {
|
||||
maxModelTokens >= 16384 && (maxModelTokens = 16384);
|
||||
maxTokenRes >= 4096 && (maxModelTokens = 4096);
|
||||
maxToken = maxModelTokens || 16384;
|
||||
maxTokenRes = maxResponseTokens || 4096;
|
||||
}
|
||||
}
|
||||
proxyResUrl = proxyUrl || openaiBaseUrl || 'https://api.openai.com';
|
||||
if (maxTokenRes >= maxToken) {
|
||||
maxTokenRes = Math.floor(maxToken / 2);
|
||||
}
|
||||
return {
|
||||
key,
|
||||
maxToken,
|
||||
maxTokenRes,
|
||||
proxyResUrl,
|
||||
};
|
||||
}
|
||||
async setChatBoxType(req, body) {
|
||||
try {
|
||||
const { name, icon, order, id, status } = body;
|
||||
if (id) {
|
||||
return await this.chatBoxTypeEntity.update({ id }, { name, icon, order, status });
|
||||
}
|
||||
else {
|
||||
return await this.chatBoxTypeEntity.save({ name, icon, order, status });
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log('error: ', error);
|
||||
}
|
||||
}
|
||||
async delChatBoxType(req, body) {
|
||||
const { id } = body;
|
||||
if (!id) {
|
||||
throw new common_1.HttpException('非法操作!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
const count = await this.chatBoxEntity.count({ where: { typeId: id } });
|
||||
if (count) {
|
||||
throw new common_1.HttpException('当前分类下有未处理数据不可移除!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
return await this.chatBoxTypeEntity.delete({ id });
|
||||
}
|
||||
async queryChatBoxType() {
|
||||
return await this.chatBoxTypeEntity.find({
|
||||
order: { order: 'DESC' },
|
||||
});
|
||||
}
|
||||
async setChatBox(req, body) {
|
||||
const { title, prompt, appId, order, status, typeId, id, url } = body;
|
||||
if (!typeId) {
|
||||
throw new common_1.HttpException('缺失必要参数!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
try {
|
||||
const params = { title, order, status, typeId, url };
|
||||
params.appId = appId || 0;
|
||||
params.prompt = prompt || '';
|
||||
if (id) {
|
||||
return await this.chatBoxEntity.update({ id }, params);
|
||||
}
|
||||
else {
|
||||
return await this.chatBoxEntity.save(params);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log('error: ', error);
|
||||
}
|
||||
}
|
||||
async delChatBox(req, body) {
|
||||
const { id } = body;
|
||||
if (!id) {
|
||||
throw new common_1.HttpException('非法操作!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
return await this.chatBoxEntity.delete({ id });
|
||||
}
|
||||
async queryChatBox() {
|
||||
const data = await this.chatBoxEntity.find({
|
||||
order: { order: 'DESC' },
|
||||
});
|
||||
const typeIds = [...new Set(data.map((t) => t.typeId))];
|
||||
const appIds = [...new Set(data.map((t) => t.appId))];
|
||||
const typeRes = await this.chatBoxTypeEntity.find({ where: { id: (0, typeorm_1.In)(typeIds) } });
|
||||
const appRes = await this.appEntity.find({ where: { id: (0, typeorm_1.In)(appIds) } });
|
||||
return data.map((item) => {
|
||||
const { typeId, appId } = item;
|
||||
item.typeInfo = typeRes.find((t) => t.id === typeId);
|
||||
item.appInfo = appRes.find((t) => t.id === appId);
|
||||
return item;
|
||||
});
|
||||
}
|
||||
async queryChatBoxFrontend() {
|
||||
const typeRes = await this.chatBoxTypeEntity.find({ order: { order: 'DESC' }, where: { status: true } });
|
||||
const boxinfos = await this.chatBoxEntity.find({ where: { status: true } });
|
||||
const appIds = [...new Set(boxinfos.map((t) => t.appId))];
|
||||
const appInfos = await this.appEntity.find({ where: { id: (0, typeorm_1.In)(appIds) } });
|
||||
boxinfos.forEach((item) => {
|
||||
const app = appInfos.find((k) => k.id === item.appId);
|
||||
item.coverImg = app === null || app === void 0 ? void 0 : app.coverImg;
|
||||
return item;
|
||||
});
|
||||
return typeRes.map((t) => {
|
||||
t.childList = boxinfos.filter((box) => box.typeId === t.id && box.status);
|
||||
return t;
|
||||
});
|
||||
}
|
||||
async setChatPreType(req, body) {
|
||||
try {
|
||||
const { name, icon, order, id, status } = body;
|
||||
if (id) {
|
||||
return await this.chatPreTypeEntity.update({ id }, { name, icon, order, status });
|
||||
}
|
||||
else {
|
||||
return await this.chatPreTypeEntity.save({ name, icon, order, status });
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log('error: ', error);
|
||||
}
|
||||
}
|
||||
async delChatPreType(req, body) {
|
||||
const { id } = body;
|
||||
if (!id) {
|
||||
throw new common_1.HttpException('非法操作!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
const count = await this.chatBoxEntity.count({ where: { typeId: id } });
|
||||
if (count) {
|
||||
throw new common_1.HttpException('当前分类下有未处理数据不可移除!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
return await this.chatPreTypeEntity.delete({ id });
|
||||
}
|
||||
async queryChatPreType() {
|
||||
return await this.chatPreTypeEntity.find({
|
||||
order: { order: 'DESC' },
|
||||
});
|
||||
}
|
||||
async setChatPre(req, body) {
|
||||
const { title, prompt, appId, order, status, typeId, id, url } = body;
|
||||
if (!typeId) {
|
||||
throw new common_1.HttpException('缺失必要参数!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
try {
|
||||
const params = { title, prompt, order, status, typeId, url };
|
||||
if (id) {
|
||||
return await this.chatPreEntity.update({ id }, params);
|
||||
}
|
||||
else {
|
||||
return await this.chatPreEntity.save(params);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.log('error: ', error);
|
||||
}
|
||||
}
|
||||
async delChatPre(req, body) {
|
||||
const { id } = body;
|
||||
if (!id) {
|
||||
throw new common_1.HttpException('非法操作!', common_1.HttpStatus.BAD_REQUEST);
|
||||
}
|
||||
return await this.chatPreEntity.delete({ id });
|
||||
}
|
||||
async queryChatPre() {
|
||||
const data = await this.chatPreEntity.find({
|
||||
order: { order: 'DESC' },
|
||||
});
|
||||
const typeIds = [...new Set(data.map((t) => t.typeId))];
|
||||
const typeRes = await this.chatPreTypeEntity.find({ where: { id: (0, typeorm_1.In)(typeIds) } });
|
||||
return data.map((item) => {
|
||||
const { typeId, appId } = item;
|
||||
item.typeInfo = typeRes.find((t) => t.id === typeId);
|
||||
return item;
|
||||
});
|
||||
}
|
||||
async queryChatPreList() {
|
||||
const typeRes = await this.chatPreTypeEntity.find({ order: { order: 'DESC' }, where: { status: true } });
|
||||
const chatPreData = await this.chatPreEntity.find({ where: { status: true } });
|
||||
return typeRes.map((t) => {
|
||||
t.childList = chatPreData.filter((box) => box.typeId === t.id && box.status);
|
||||
return t;
|
||||
});
|
||||
}
|
||||
async getMaxTokenFromModelWithOpenAi(model, maxModelToken, maxResToken) {
|
||||
let maxToken = 4096;
|
||||
let maxRes = 2048;
|
||||
if (model.toLowerCase().includes('gpt-4')) {
|
||||
maxToken = maxModelToken >= 8196 ? 8196 : maxModelToken;
|
||||
maxRes = maxResToken >= 4096 ? 4096 : maxResToken;
|
||||
if (model.toLowerCase().includes('32k')) {
|
||||
maxToken = maxModelToken >= 32768 ? 32768 : maxModelToken;
|
||||
maxRes = maxResToken >= 16000 ? 16000 : maxResToken;
|
||||
}
|
||||
if (model.toLowerCase().includes('gpt-4-1106') || model.toLowerCase().includes('gpt-4-vision-preview')) {
|
||||
maxToken = maxModelToken >= 128000 ? 128000 : maxModelToken;
|
||||
maxRes = maxResToken >= 4096 ? 4096 : maxResToken;
|
||||
}
|
||||
}
|
||||
if (model.toLowerCase().includes('gpt-3')) {
|
||||
maxToken = maxModelToken >= 4096 ? 4096 : maxModelToken;
|
||||
maxRes = maxResToken >= 2048 ? 2048 : maxResToken;
|
||||
if (model.toLowerCase().includes('16k')) {
|
||||
maxToken = maxModelToken >= 16384 ? 16384 : maxModelToken;
|
||||
maxRes = maxResToken >= 8000 ? 8000 : maxResToken;
|
||||
}
|
||||
if (model.toLowerCase().includes('1106')) {
|
||||
maxToken = maxModelToken >= 16384 ? 16384 : maxModelToken;
|
||||
maxRes = maxResToken >= 8000 ? 8000 : maxResToken;
|
||||
}
|
||||
}
|
||||
return {
|
||||
maxToken,
|
||||
maxRes,
|
||||
};
|
||||
}
|
||||
};
|
||||
ChatgptService = __decorate([
|
||||
(0, common_1.Injectable)(),
|
||||
__param(0, (0, typeorm_2.InjectRepository)(gptkeys_entity_1.GptKeysEntity)),
|
||||
__param(1, (0, typeorm_2.InjectRepository)(config_entity_1.ConfigEntity)),
|
||||
__param(2, (0, typeorm_2.InjectRepository)(chatBoxType_entity_1.ChatBoxTypeEntity)),
|
||||
__param(3, (0, typeorm_2.InjectRepository)(chatBox_entity_1.ChatBoxEntity)),
|
||||
__param(4, (0, typeorm_2.InjectRepository)(app_entity_1.AppEntity)),
|
||||
__param(5, (0, typeorm_2.InjectRepository)(chatPreType_entity_1.ChatPreTypeEntity)),
|
||||
__param(6, (0, typeorm_2.InjectRepository)(chatPre_entity_1.ChatPreEntity)),
|
||||
__metadata("design:paramtypes", [typeorm_1.Repository,
|
||||
typeorm_1.Repository,
|
||||
typeorm_1.Repository,
|
||||
typeorm_1.Repository,
|
||||
typeorm_1.Repository,
|
||||
typeorm_1.Repository,
|
||||
typeorm_1.Repository,
|
||||
nestjs_config_1.ConfigService,
|
||||
userBalance_service_1.UserBalanceService,
|
||||
chatLog_service_1.ChatLogService,
|
||||
user_service_1.UserService,
|
||||
upload_service_1.UploadService,
|
||||
badwords_service_1.BadwordsService,
|
||||
autoreply_service_1.AutoreplyService,
|
||||
globalConfig_service_1.GlobalConfigService,
|
||||
fanyi_service_1.FanyiService,
|
||||
chatGroup_service_1.ChatGroupService,
|
||||
models_service_1.ModelsService])
|
||||
], ChatgptService);
|
||||
exports.ChatgptService = ChatgptService;
|
||||
32
dist/modules/chatgpt/dto/chatDraw.dto.js
vendored
Normal file
32
dist/modules/chatgpt/dto/chatDraw.dto.js
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatDrawDto = void 0;
|
||||
const swagger_1 = require("@nestjs/swagger");
|
||||
class ChatDrawDto {
|
||||
}
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: 'Draw a cute little dog', description: '绘画描述信息' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatDrawDto.prototype, "prompt", void 0);
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: 1, description: '绘画张数', required: true }),
|
||||
__metadata("design:type", Number)
|
||||
], ChatDrawDto.prototype, "n", void 0);
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: '1024x1024', description: '图片尺寸', required: true }),
|
||||
__metadata("design:type", String)
|
||||
], ChatDrawDto.prototype, "size", void 0);
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: 'standard', description: '图片质量', required: true }),
|
||||
__metadata("design:type", String)
|
||||
], ChatDrawDto.prototype, "quality", void 0);
|
||||
exports.ChatDrawDto = ChatDrawDto;
|
||||
48
dist/modules/chatgpt/dto/chatProcess.dto.js
vendored
Normal file
48
dist/modules/chatgpt/dto/chatProcess.dto.js
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ChatProcessDto = exports.Options = void 0;
|
||||
const class_validator_1 = require("class-validator");
|
||||
const swagger_1 = require("@nestjs/swagger");
|
||||
const class_transformer_1 = require("class-transformer");
|
||||
class Options {
|
||||
}
|
||||
__decorate([
|
||||
(0, class_validator_1.IsString)(),
|
||||
__metadata("design:type", String)
|
||||
], Options.prototype, "parentMessageId", void 0);
|
||||
exports.Options = Options;
|
||||
class ChatProcessDto {
|
||||
}
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: 'hello, Who are you', description: '对话信息' }),
|
||||
(0, class_validator_1.IsNotEmpty)({ message: '提问信息不能为空!' }),
|
||||
__metadata("design:type", String)
|
||||
], ChatProcessDto.prototype, "prompt", void 0);
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: '{ parentMessageId: 0 }', description: '上次对话信息', required: false }),
|
||||
(0, class_transformer_1.Type)(() => Options),
|
||||
__metadata("design:type", Options)
|
||||
], ChatProcessDto.prototype, "options", void 0);
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({
|
||||
example: "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
description: '系统预设信息',
|
||||
}),
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
__metadata("design:type", String)
|
||||
], ChatProcessDto.prototype, "systemMessage", void 0);
|
||||
__decorate([
|
||||
(0, swagger_1.ApiProperty)({ example: 1, description: '应用id', required: false }),
|
||||
(0, class_validator_1.IsOptional)(),
|
||||
__metadata("design:type", Number)
|
||||
], ChatProcessDto.prototype, "appId", void 0);
|
||||
exports.ChatProcessDto = ChatProcessDto;
|
||||
72
dist/modules/chatgpt/gptkeys.entity.js
vendored
Normal file
72
dist/modules/chatgpt/gptkeys.entity.js
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.GptKeysEntity = void 0;
|
||||
const typeorm_1 = require("typeorm");
|
||||
const baseEntity_1 = require("../../common/entity/baseEntity");
|
||||
let GptKeysEntity = class GptKeysEntity extends baseEntity_1.BaseEntity {
|
||||
};
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ unique: true, comment: 'gpt key', length: 255 }),
|
||||
__metadata("design:type", String)
|
||||
], GptKeysEntity.prototype, "key", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '使用的状态: 0:禁用 1:启用', default: 0 }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "status", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '绑定的模型是?', default: 'gpt-3.5-turbo' }),
|
||||
__metadata("design:type", String)
|
||||
], GptKeysEntity.prototype, "model", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'key的余额', type: 'decimal', precision: 10, scale: 2, default: 0 }),
|
||||
__metadata("design:type", String)
|
||||
], GptKeysEntity.prototype, "balance", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'key的余额类型', default: '', nullable: true }),
|
||||
__metadata("design:type", String)
|
||||
], GptKeysEntity.prototype, "type", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'key的状态: 1:有效 2:余额耗尽 -1:被封号', default: 1 }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "keyStatus", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'key的到期时间', nullable: true }),
|
||||
__metadata("design:type", Date)
|
||||
], GptKeysEntity.prototype, "expireTime", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'key权重', default: 1 }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "weight", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: 'key的使用次数', default: 0 }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "useCount", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '模型支持的最大Token', nullable: true }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "maxModelTokens", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '模型设置的最大回复Token', nullable: true }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "maxResponseTokens", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '当前模型的代理地址', nullable: true }),
|
||||
__metadata("design:type", String)
|
||||
], GptKeysEntity.prototype, "openaiProxyUrl", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '当前模型的超时时间单位ms', nullable: true }),
|
||||
__metadata("design:type", Number)
|
||||
], GptKeysEntity.prototype, "openaiTimeoutMs", void 0);
|
||||
GptKeysEntity = __decorate([
|
||||
(0, typeorm_1.Entity)({ name: 'gpt_keys' })
|
||||
], GptKeysEntity);
|
||||
exports.GptKeysEntity = GptKeysEntity;
|
||||
66
dist/modules/chatgpt/helper.js
vendored
Normal file
66
dist/modules/chatgpt/helper.js
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.addOneIfOdd = exports.unifiedFormattingResponse = void 0;
|
||||
function unifiedFormattingResponse(keyType, response, others) {
|
||||
let formatRes = {
|
||||
keyType,
|
||||
model: '',
|
||||
parentMessageId: '',
|
||||
text: '',
|
||||
usage: {
|
||||
prompt_tokens: 0,
|
||||
completion_tokens: 0,
|
||||
total_tokens: 0,
|
||||
}
|
||||
};
|
||||
if ([1].includes(Number(keyType))) {
|
||||
const { model, parentMessageId } = response === null || response === void 0 ? void 0 : response.detail;
|
||||
let { usage } = response === null || response === void 0 ? void 0 : response.detail;
|
||||
if (!usage) {
|
||||
usage = {
|
||||
prompt_tokens: 0,
|
||||
completion_tokens: 0,
|
||||
total_tokens: 0
|
||||
};
|
||||
}
|
||||
const { prompt_tokens, completion_tokens, total_tokens } = usage;
|
||||
formatRes = {
|
||||
keyType,
|
||||
model,
|
||||
parentMessageId,
|
||||
text: response.text,
|
||||
usage: {
|
||||
prompt_tokens,
|
||||
completion_tokens,
|
||||
total_tokens
|
||||
}
|
||||
};
|
||||
}
|
||||
if ([2, 3].includes(Number(keyType))) {
|
||||
const { usage, text } = response;
|
||||
const { prompt_tokens, completion_tokens, total_tokens } = usage;
|
||||
const { model, parentMessageId } = others;
|
||||
formatRes = {
|
||||
keyType,
|
||||
model,
|
||||
parentMessageId,
|
||||
text,
|
||||
usage: {
|
||||
prompt_tokens,
|
||||
completion_tokens,
|
||||
total_tokens
|
||||
}
|
||||
};
|
||||
}
|
||||
return formatRes;
|
||||
}
|
||||
exports.unifiedFormattingResponse = unifiedFormattingResponse;
|
||||
function addOneIfOdd(num) {
|
||||
if (num % 2 !== 0) {
|
||||
return num + 1;
|
||||
}
|
||||
else {
|
||||
return num;
|
||||
}
|
||||
}
|
||||
exports.addOneIfOdd = addOneIfOdd;
|
||||
120
dist/modules/chatgpt/openai.js
vendored
Normal file
120
dist/modules/chatgpt/openai.js
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getTokenCount = exports.sendMessageFromOpenAi = void 0;
|
||||
const axios_1 = require("axios");
|
||||
const tiktoken_1 = require("@dqbd/tiktoken");
|
||||
const utils_1 = require("../../common/utils");
|
||||
const tokenizer = (0, tiktoken_1.get_encoding)('cl100k_base');
|
||||
function getFullUrl(proxyUrl) {
|
||||
const processedUrl = proxyUrl.endsWith('/') ? proxyUrl.slice(0, -1) : proxyUrl;
|
||||
const baseUrl = processedUrl || 'https://api.openai.com';
|
||||
return `${baseUrl}/v1/chat/completions`;
|
||||
}
|
||||
function sendMessageFromOpenAi(messagesHistory, inputs) {
|
||||
var _a;
|
||||
const { onProgress, maxToken, apiKey, model, temperature = 0.95, proxyUrl } = inputs;
|
||||
console.log('current request options: ', apiKey, model, maxToken, proxyUrl);
|
||||
const max_tokens = compilerToken(model, maxToken);
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url: getFullUrl(proxyUrl),
|
||||
responseType: 'stream',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${(0, utils_1.removeSpecialCharacters)(apiKey)}`,
|
||||
},
|
||||
data: {
|
||||
max_tokens,
|
||||
stream: true,
|
||||
temperature,
|
||||
model,
|
||||
messages: messagesHistory
|
||||
},
|
||||
};
|
||||
const prompt = (_a = messagesHistory[messagesHistory.length - 1]) === null || _a === void 0 ? void 0 : _a.content;
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const response = await (0, axios_1.default)(options);
|
||||
const stream = response.data;
|
||||
let result = { text: '' };
|
||||
stream.on('data', (chunk) => {
|
||||
var _a;
|
||||
const splitArr = chunk.toString().split('\n\n').filter((line) => line.trim() !== '');
|
||||
for (const line of splitArr) {
|
||||
const data = line.replace('data:', '');
|
||||
let ISEND = false;
|
||||
try {
|
||||
ISEND = JSON.parse(data).choices[0].finish_reason === 'stop';
|
||||
}
|
||||
catch (error) {
|
||||
ISEND = false;
|
||||
}
|
||||
if (data === '[DONE]' || ISEND) {
|
||||
result.text = result.text.trim();
|
||||
return result;
|
||||
}
|
||||
try {
|
||||
const parsedData = JSON.parse(data);
|
||||
if (parsedData.id) {
|
||||
result.id = parsedData.id;
|
||||
}
|
||||
if ((_a = parsedData.choices) === null || _a === void 0 ? void 0 : _a.length) {
|
||||
const delta = parsedData.choices[0].delta;
|
||||
result.delta = delta.content;
|
||||
if (delta === null || delta === void 0 ? void 0 : delta.content)
|
||||
result.text += delta.content;
|
||||
if (delta.role) {
|
||||
result.role = delta.role;
|
||||
}
|
||||
result.detail = parsedData;
|
||||
}
|
||||
onProgress && onProgress({ text: result.text });
|
||||
}
|
||||
catch (error) {
|
||||
console.log('parse Error', data);
|
||||
}
|
||||
}
|
||||
});
|
||||
stream.on('end', () => {
|
||||
if (result.detail && result.text) {
|
||||
const promptTokens = getTokenCount(prompt);
|
||||
const completionTokens = getTokenCount(result.text);
|
||||
result.detail.usage = {
|
||||
prompt_tokens: promptTokens,
|
||||
completion_tokens: completionTokens,
|
||||
total_tokens: promptTokens + completionTokens,
|
||||
estimated: true
|
||||
};
|
||||
}
|
||||
return resolve(result);
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.sendMessageFromOpenAi = sendMessageFromOpenAi;
|
||||
function getTokenCount(text) {
|
||||
if (!text)
|
||||
return 0;
|
||||
text = text.replace(/<\|endoftext\|>/g, '');
|
||||
return tokenizer.encode(text).length;
|
||||
}
|
||||
exports.getTokenCount = getTokenCount;
|
||||
function compilerToken(model, maxToken) {
|
||||
let max = 0;
|
||||
if (model.includes(3.5)) {
|
||||
max = maxToken > 4096 ? 4096 : maxToken;
|
||||
}
|
||||
if (model.includes('gpt-4')) {
|
||||
max = maxToken > 8192 ? 8192 : maxToken;
|
||||
}
|
||||
if (model.includes('preview')) {
|
||||
max = maxToken > 4096 ? 4096 : maxToken;
|
||||
}
|
||||
if (model.includes('32k')) {
|
||||
max = maxToken > 32768 ? 32768 : maxToken;
|
||||
}
|
||||
return max;
|
||||
}
|
||||
89
dist/modules/chatgpt/store.js
vendored
Normal file
89
dist/modules/chatgpt/store.js
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.NineStore = void 0;
|
||||
const uuid_1 = require("uuid");
|
||||
const tiktoken_1 = require("@dqbd/tiktoken");
|
||||
const tokenizer = (0, tiktoken_1.get_encoding)('cl100k_base');
|
||||
class NineStore {
|
||||
constructor(options) {
|
||||
const { store, namespace, expires } = this.formatOptions(options);
|
||||
this.store = store;
|
||||
this.namespace = namespace;
|
||||
this.expires = expires;
|
||||
}
|
||||
formatOptions(options) {
|
||||
const { store, expires = 1000 * 60 * 60 * 24 * 3, namespace = 'chat' } = options;
|
||||
return { store, namespace, expires };
|
||||
}
|
||||
generateKey(key) {
|
||||
return this.namespace ? `${this.namespace}-${key}` : key;
|
||||
}
|
||||
async getData(id) {
|
||||
const res = await this.store.get(id);
|
||||
return res;
|
||||
}
|
||||
async setData(message, expires = this.expires) {
|
||||
await this.store.set(message.id, message, expires);
|
||||
}
|
||||
async buildMessageFromParentMessageId(text, options) {
|
||||
let { maxRounds, maxModelToken, maxResponseTokens, systemMessage = '', name } = options;
|
||||
let { parentMessageId } = options;
|
||||
let messages = [];
|
||||
let nextNumTokensEstimate = 0;
|
||||
if (systemMessage) {
|
||||
messages.push({ role: 'system', content: systemMessage });
|
||||
}
|
||||
const systemMessageOffset = messages.length;
|
||||
let round = 0;
|
||||
let nextMessages = text ? messages.concat([{ role: 'user', content: text, name }]) : messages;
|
||||
do {
|
||||
if (!parentMessageId) {
|
||||
break;
|
||||
}
|
||||
const parentMessage = await this.getData(parentMessageId);
|
||||
if (!parentMessage) {
|
||||
break;
|
||||
}
|
||||
const { text, name, role } = parentMessage;
|
||||
nextMessages = nextMessages.slice(0, systemMessageOffset).concat([
|
||||
{ role, content: text, name },
|
||||
...nextMessages.slice(systemMessageOffset)
|
||||
]);
|
||||
round++;
|
||||
if (maxRounds && round >= maxRounds) {
|
||||
break;
|
||||
}
|
||||
if (maxModelToken && maxResponseTokens) {
|
||||
const maxNumTokens = maxModelToken - maxResponseTokens;
|
||||
nextNumTokensEstimate = await this._getTokenCount(nextMessages);
|
||||
const isValidPrompt = nextNumTokensEstimate + 200 <= maxNumTokens;
|
||||
if (!isValidPrompt) {
|
||||
nextMessages = this._recursivePruning(nextMessages, maxNumTokens, systemMessage);
|
||||
}
|
||||
}
|
||||
parentMessageId = parentMessage.parentMessageId;
|
||||
} while (true);
|
||||
const maxTokens = Math.max(1, Math.min(maxModelToken - nextNumTokensEstimate, maxResponseTokens));
|
||||
console.log('本次携带上下文的长度', nextMessages.length, nextNumTokensEstimate);
|
||||
return { context: nextMessages, round: nextMessages.length, historyToken: nextNumTokensEstimate };
|
||||
}
|
||||
_getTokenCount(messages) {
|
||||
let text = messages.reduce((pre, cur) => {
|
||||
return pre += cur.content;
|
||||
}, '');
|
||||
text = text.replace(/<\|endoftext\|>/g, '');
|
||||
return tokenizer.encode(text).length;
|
||||
}
|
||||
_recursivePruning(messages, maxNumTokens, systemMessage) {
|
||||
const currentTokens = this._getTokenCount(messages);
|
||||
if (currentTokens <= maxNumTokens) {
|
||||
return messages;
|
||||
}
|
||||
messages.splice(systemMessage ? 1 : 0, 1);
|
||||
return this._recursivePruning(messages, maxNumTokens, systemMessage);
|
||||
}
|
||||
getUuid() {
|
||||
return (0, uuid_1.v4)();
|
||||
}
|
||||
}
|
||||
exports.NineStore = NineStore;
|
||||
36
dist/modules/chatgpt/whiteList.entity.js
vendored
Normal file
36
dist/modules/chatgpt/whiteList.entity.js
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WhiteListEntity = void 0;
|
||||
const typeorm_1 = require("typeorm");
|
||||
const baseEntity_1 = require("../../common/entity/baseEntity");
|
||||
let WhiteListEntity = class WhiteListEntity extends baseEntity_1.BaseEntity {
|
||||
};
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ unique: true, comment: '用户ID' }),
|
||||
__metadata("design:type", Number)
|
||||
], WhiteListEntity.prototype, "userId", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '使用次数限制', default: 0 }),
|
||||
__metadata("design:type", Number)
|
||||
], WhiteListEntity.prototype, "count", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '当前用户状态', default: 1 }),
|
||||
__metadata("design:type", Number)
|
||||
], WhiteListEntity.prototype, "status", void 0);
|
||||
__decorate([
|
||||
(0, typeorm_1.Column)({ comment: '已经使用的次数', default: 0 }),
|
||||
__metadata("design:type", Number)
|
||||
], WhiteListEntity.prototype, "useCount", void 0);
|
||||
WhiteListEntity = __decorate([
|
||||
(0, typeorm_1.Entity)({ name: 'white_list' })
|
||||
], WhiteListEntity);
|
||||
exports.WhiteListEntity = WhiteListEntity;
|
||||
101
dist/modules/chatgpt/zhipu.js
vendored
Normal file
101
dist/modules/chatgpt/zhipu.js
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sendMessageFromZhipu = exports.compilerStream = exports.compilerMetaJsonStr = exports.generateToken = void 0;
|
||||
const axios = require('axios');
|
||||
const jwt = require('jsonwebtoken');
|
||||
function generateToken(apikey, expSeconds = 1000 * 60 * 60 * 24 * 360) {
|
||||
const [id, secret] = apikey.split('.');
|
||||
const payload = {
|
||||
api_key: id,
|
||||
exp: Math.round(Date.now()) + expSeconds * 1000,
|
||||
timestamp: Math.round(Date.now()),
|
||||
};
|
||||
return jwt.sign(payload, secret, { algorithm: 'HS256', header: { alg: 'HS256', sign_type: 'SIGN' } });
|
||||
}
|
||||
exports.generateToken = generateToken;
|
||||
function compilerMetaJsonStr(data) {
|
||||
let jsonStr = {};
|
||||
try {
|
||||
jsonStr = JSON.parse(data);
|
||||
}
|
||||
catch (error) {
|
||||
jsonStr = {
|
||||
usage: {
|
||||
completion_tokens: 49,
|
||||
prompt_tokens: 333,
|
||||
total_tokens: 399
|
||||
},
|
||||
};
|
||||
console.error('json parse error from zhipu!', data);
|
||||
}
|
||||
return jsonStr;
|
||||
}
|
||||
exports.compilerMetaJsonStr = compilerMetaJsonStr;
|
||||
function compilerStream(streamArr) {
|
||||
var _a;
|
||||
if (streamArr.length === 3) {
|
||||
return {
|
||||
event: streamArr[0].replace('event:', ''),
|
||||
id: streamArr[1].replace('id:', ''),
|
||||
is_end: false,
|
||||
result: streamArr[2].replace('data:', '').trim()
|
||||
};
|
||||
}
|
||||
if (streamArr.length === 4) {
|
||||
return {
|
||||
event: streamArr[0].replace('event:', ''),
|
||||
id: streamArr[1].replace('id:', ''),
|
||||
result: streamArr[2].replace('data:', '').trim(),
|
||||
is_end: true,
|
||||
usage: (_a = compilerMetaJsonStr(streamArr[3].replace('meta:', ''))) === null || _a === void 0 ? void 0 : _a.usage
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.compilerStream = compilerStream;
|
||||
async function sendMessageFromZhipu(messagesHistory, { onProgress, key, model, temperature = 0.95 }) {
|
||||
const token = await generateToken(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
const url = `https://open.bigmodel.cn/api/paas/v3/model-api/${model}/sse-invoke`;
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url,
|
||||
responseType: 'stream',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': token
|
||||
},
|
||||
data: {
|
||||
prompt: messagesHistory,
|
||||
temperature
|
||||
}
|
||||
};
|
||||
axios(options)
|
||||
.then(response => {
|
||||
const stream = response.data;
|
||||
let resData;
|
||||
let cacheResText = '';
|
||||
stream.on('data', chunk => {
|
||||
const stramArr = chunk.toString().split("\n").filter((line) => line.trim() !== "");
|
||||
const parseData = compilerStream(stramArr);
|
||||
if (!parseData)
|
||||
return;
|
||||
const { id, result, is_end } = parseData;
|
||||
result && (cacheResText += result.trim());
|
||||
if (is_end) {
|
||||
parseData.is_end = false;
|
||||
resData = parseData;
|
||||
resData.text = cacheResText;
|
||||
}
|
||||
onProgress(parseData);
|
||||
});
|
||||
stream.on('end', () => {
|
||||
resolve(resData);
|
||||
cacheResText = '';
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
console.log('error: ', error);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.sendMessageFromZhipu = sendMessageFromZhipu;
|
||||
Reference in New Issue
Block a user