mirror of
https://github.com/Jerryplusy/crystelf-plugin.git
synced 2025-12-05 15:41:56 +00:00
Compare commits
8 Commits
a7e701cbb2
...
875ca65ed9
| Author | SHA1 | Date | |
|---|---|---|---|
| 875ca65ed9 | |||
| f2de86d421 | |||
| 8237606ed3 | |||
| dfd585409a | |||
| 619f2a3295 | |||
|
|
7a3df0af50 | ||
| 61a9462247 | |||
|
|
d0b6b939cd |
85
apps/ai.js
85
apps/ai.js
@ -91,11 +91,11 @@ async function index(e) {
|
|||||||
if (e.user_id === e.bot.uin) {
|
if (e.user_id === e.bot.uin) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const userMessage = await extractUserMessage(e.msg, nickname, e);
|
const messageData = await extractUserMessage(e.msg, nickname, e);
|
||||||
if (!userMessage || userMessage.length === 0) {
|
if (!messageData || !messageData.text || messageData.text.length === 0) {
|
||||||
return e.reply(segment.image(await Meme.getMeme(aiConfig.character, 'default')));
|
return e.reply(segment.image(await Meme.getMeme(aiConfig.character, 'default')));
|
||||||
}
|
}
|
||||||
const result = await processMessage(userMessage, e, aiConfig);
|
const result = await processMessage(messageData, e, aiConfig);
|
||||||
if (result && result.length > 0) {
|
if (result && result.length > 0) {
|
||||||
await sendResponse(e, result);
|
await sendResponse(e, result);
|
||||||
}
|
}
|
||||||
@ -113,6 +113,7 @@ async function extractUserMessage(msg, nickname, e) {
|
|||||||
let at = [];
|
let at = [];
|
||||||
const aiConfig = await ConfigControl.get('ai');
|
const aiConfig = await ConfigControl.get('ai');
|
||||||
const maxMessageLength = aiConfig?.maxMessageLength || 100;
|
const maxMessageLength = aiConfig?.maxMessageLength || 100;
|
||||||
|
const originalMessages = [];
|
||||||
e.message.forEach((message) => {
|
e.message.forEach((message) => {
|
||||||
logger.info(message);
|
logger.info(message);
|
||||||
if (message.type === 'text' && message.text !== '' && message.text !== '\n'){
|
if (message.type === 'text' && message.text !== '' && message.text !== '\n'){
|
||||||
@ -124,19 +125,34 @@ async function extractUserMessage(msg, nickname, e) {
|
|||||||
text.push(displayText);
|
text.push(displayText);
|
||||||
} else if (message.type === 'at') {
|
} else if (message.type === 'at') {
|
||||||
at.push(message.qq);
|
at.push(message.qq);
|
||||||
|
} else if (message.type === 'image') {
|
||||||
|
if (message.image) {
|
||||||
|
originalMessages.push({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: message.url
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
let returnMessage = '';
|
let returnMessage = '';
|
||||||
if (text.length > 0) {
|
if (text.length > 0) {
|
||||||
text.forEach((message) => {
|
text.forEach((message) => {
|
||||||
if(message === '') {
|
if(message === '') {
|
||||||
} else {
|
} else {
|
||||||
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]说:${message}\n`
|
const tempMessage = `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]说:${message}\n`
|
||||||
|
returnMessage += tempMessage;
|
||||||
|
originalMessages.push({
|
||||||
|
type: 'text',
|
||||||
|
content: tempMessage
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if(at.length == 1 && at[0] == e.bot.uin && text.length == 0){
|
if(at.length == 1 && at[0] == e.bot.uin && text.length == 0){
|
||||||
return [];
|
return { text: [], originalMessages: originalMessages };
|
||||||
}
|
}
|
||||||
if (at.length > 0) {
|
if (at.length > 0) {
|
||||||
for (const at1 of at) {
|
for (const at1 of at) {
|
||||||
@ -144,14 +160,15 @@ async function extractUserMessage(msg, nickname, e) {
|
|||||||
//returnMessage += `[${e.sender?.nickname},id:${e.user_id}]@(at)了你,你的id是${at}\n`;
|
//returnMessage += `[${e.sender?.nickname},id:${e.user_id}]@(at)了你,你的id是${at}\n`;
|
||||||
} else {
|
} else {
|
||||||
const atNickname = await e.group.pickMember(at1).nickname || '一个人';
|
const atNickname = await e.group.pickMember(at1).nickname || '一个人';
|
||||||
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]@(at)了${atNickname},id是${at1}\n`;
|
const tempMessage = `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]@(at)了${atNickname},id是${at1}\n`
|
||||||
|
returnMessage += tempMessage;
|
||||||
|
originalMessages.push({
|
||||||
|
type: 'text',
|
||||||
|
content: tempMessage
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const imgUrls = await YunzaiUtils.getImages(e, 1, true);
|
|
||||||
if (imgUrls) {
|
|
||||||
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]发送了一张图片(你可能暂时无法查看)\n`;
|
|
||||||
}
|
|
||||||
if(e.source || e.reply_id){
|
if(e.source || e.reply_id){
|
||||||
let reply;
|
let reply;
|
||||||
if(e.getReply) reply = await e.getReply();
|
if(e.getReply) reply = await e.getReply();
|
||||||
@ -163,18 +180,33 @@ async function extractUserMessage(msg, nickname, e) {
|
|||||||
const msgArr = Array.isArray(reply) ? reply : reply.message || [];
|
const msgArr = Array.isArray(reply) ? reply : reply.message || [];
|
||||||
msgArr.forEach((msg) => {
|
msgArr.forEach((msg) => {
|
||||||
if(msg.type === 'text'){
|
if(msg.type === 'text'){
|
||||||
returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一段文本:${msg.text}\n`
|
const tempMessage = `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一段文本:${msg.text}\n`
|
||||||
|
returnMessage += tempMessage;
|
||||||
|
originalMessages.push({
|
||||||
|
type: 'text',
|
||||||
|
content: tempMessage
|
||||||
|
});
|
||||||
}
|
}
|
||||||
if(msg.type === 'image'){
|
if(msg.type === 'image'){
|
||||||
returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一张图片(你可能暂时无法查看)\n`;
|
returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一张图片(你可能暂时无法查看)\n`;
|
||||||
|
originalMessages.push({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: msg.url
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return returnMessage;
|
const imgUrls = await YunzaiUtils.getImages(e, 1, true);
|
||||||
|
if (imgUrls) {
|
||||||
|
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]发送了一张图片(你可能暂时无法查看)\n`;
|
||||||
|
}
|
||||||
|
return { text: returnMessage, originalMessages: originalMessages };
|
||||||
}
|
}
|
||||||
logger.warn('[crystelf-ai] 字符串匹配失败');
|
logger.warn('[crystelf-ai] 字符串匹配失败');
|
||||||
return [];
|
return { text: [], originalMessages: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -202,11 +234,11 @@ async function processMessage(userMessage, e, aiConfig) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* 关键词模式
|
* 关键词模式
|
||||||
* @param userMessage
|
* @param messageData
|
||||||
* @param e
|
* @param e
|
||||||
* @returns {Promise<[{type: string, data: string}]>}
|
* @returns {Promise<[{type: string, data: string}]>}
|
||||||
*/
|
*/
|
||||||
async function handleKeywordMode(userMessage, e) {
|
async function handleKeywordMode(messageData, e) {
|
||||||
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
|
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
|
||||||
|
|
||||||
if (matchResult && matchResult.matched) {
|
if (matchResult && matchResult.matched) {
|
||||||
@ -229,17 +261,17 @@ async function handleKeywordMode(userMessage, e) {
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleAiMode(userMessage, e, aiConfig) {
|
async function handleAiMode(messageData, e, aiConfig) {
|
||||||
return await callAiForResponse(userMessage, e, aiConfig);
|
return await callAiForResponse(messageData, e, aiConfig);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleMixMode(userMessage, e, aiConfig) {
|
async function handleMixMode(messageData, e, aiConfig) {
|
||||||
const isTooLong = await KeywordMatcher.isMessageTooLong(e.msg);
|
const isTooLong = await KeywordMatcher.isMessageTooLong(e.msg);
|
||||||
|
|
||||||
if (isTooLong) {
|
if (isTooLong) {
|
||||||
//消息太长,使用AI回复
|
//消息太长,使用AI回复
|
||||||
logger.info('[crystelf-ai] 消息过长,使用ai回复');
|
logger.info('[crystelf-ai] 消息过长,使用ai回复');
|
||||||
return await callAiForResponse(userMessage, e, aiConfig);
|
return await callAiForResponse(messageData, e, aiConfig);
|
||||||
} else {
|
} else {
|
||||||
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
|
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
|
||||||
if (matchResult && matchResult.matched) {
|
if (matchResult && matchResult.matched) {
|
||||||
@ -264,7 +296,7 @@ async function handleMixMode(userMessage, e, aiConfig) {
|
|||||||
};
|
};
|
||||||
const newChatHistory = [
|
const newChatHistory = [
|
||||||
...chatHistory,
|
...chatHistory,
|
||||||
{ role: 'user', content: userMessage },
|
{ role: 'user', content: messageData.text },
|
||||||
{ role: 'assistant', content: JSON.stringify(resMessage) },
|
{ role: 'assistant', content: JSON.stringify(resMessage) },
|
||||||
];
|
];
|
||||||
SessionManager.updateChatHistory(e.group_id, newChatHistory);
|
SessionManager.updateChatHistory(e.group_id, newChatHistory);
|
||||||
@ -274,12 +306,12 @@ async function handleMixMode(userMessage, e, aiConfig) {
|
|||||||
} else {
|
} else {
|
||||||
logger.info('[crystelf-ai] 关键词匹配失败,使用ai回复');
|
logger.info('[crystelf-ai] 关键词匹配失败,使用ai回复');
|
||||||
//关键词匹配失败,使用AI回复
|
//关键词匹配失败,使用AI回复
|
||||||
return await callAiForResponse(userMessage, e, aiConfig);
|
return await callAiForResponse(messageData, e, aiConfig);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function callAiForResponse(userMessage, e, aiConfig) {
|
async function callAiForResponse(messageData, e, aiConfig) {
|
||||||
try {
|
try {
|
||||||
//创建session
|
//创建session
|
||||||
const session = SessionManager.createOrGetSession(e.group_id, e.user_id, e);
|
const session = SessionManager.createOrGetSession(e.group_id, e.user_id, e);
|
||||||
@ -299,7 +331,10 @@ async function callAiForResponse(userMessage, e, aiConfig) {
|
|||||||
//构建聊天历史
|
//构建聊天历史
|
||||||
const historyLen = aiConfig.chatHistory;
|
const historyLen = aiConfig.chatHistory;
|
||||||
const chatHistory = session.chatHistory.slice(-historyLen | -10);
|
const chatHistory = session.chatHistory.slice(-historyLen | -10);
|
||||||
const aiResult = await AiCaller.callAi(userMessage, chatHistory, memories, e);
|
|
||||||
|
// 根据多模态开关决定调用方式
|
||||||
|
const aiResult = await AiCaller.callAi(messageData.text, chatHistory, memories, e, messageData.originalMessages);
|
||||||
|
|
||||||
if (!aiResult.success) {
|
if (!aiResult.success) {
|
||||||
logger.error(`[crystelf-ai] AI调用失败: ${aiResult.error}`);
|
logger.error(`[crystelf-ai] AI调用失败: ${aiResult.error}`);
|
||||||
SessionManager.deactivateSession(e.group_id, e.user_id);
|
SessionManager.deactivateSession(e.group_id, e.user_id);
|
||||||
@ -313,14 +348,14 @@ async function callAiForResponse(userMessage, e, aiConfig) {
|
|||||||
//处理响应
|
//处理响应
|
||||||
const processedResponse = await ResponseHandler.processResponse(
|
const processedResponse = await ResponseHandler.processResponse(
|
||||||
aiResult.response,
|
aiResult.response,
|
||||||
userMessage,
|
messageData.text,
|
||||||
e.group_id,
|
e.group_id,
|
||||||
e.user_id
|
e.user_id
|
||||||
);
|
);
|
||||||
//更新session
|
//更新session
|
||||||
const newChatHistory = [
|
const newChatHistory = [
|
||||||
...chatHistory,
|
...chatHistory,
|
||||||
{ role: 'user', content: userMessage },
|
{ role: 'user', content: messageData.text },
|
||||||
{ role: 'assistant', content: aiResult.response },
|
{ role: 'assistant', content: aiResult.response },
|
||||||
];
|
];
|
||||||
SessionManager.updateChatHistory(e.group_id, newChatHistory);
|
SessionManager.updateChatHistory(e.group_id, newChatHistory);
|
||||||
|
|||||||
@ -133,10 +133,10 @@ export class CrystelfMusic extends plugin {
|
|||||||
}
|
}
|
||||||
const adapter = await YunzaiUtils.getAdapter(e);
|
const adapter = await YunzaiUtils.getAdapter(e);
|
||||||
await Message.emojiLike(e,e.message_id,60,e.group_id,adapter);
|
await Message.emojiLike(e,e.message_id,60,e.group_id,adapter);
|
||||||
|
await musicSearch.clearGroupSearch(e.group_id);
|
||||||
const result = await musicSearch.handleSelection(e, index);
|
const result = await musicSearch.handleSelection(e, index);
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
await this.sendMusicResult(e, result);
|
await this.sendMusicResult(e, result);
|
||||||
await musicSearch.clearGroupSearch(e.group_id);
|
|
||||||
} else {
|
} else {
|
||||||
await e.reply(`${result.message}`, true);
|
await e.reply(`${result.message}`, true);
|
||||||
}
|
}
|
||||||
@ -159,15 +159,22 @@ export class CrystelfMusic extends plugin {
|
|||||||
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
|
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
|
||||||
} else {
|
} else {
|
||||||
const extension = await this.getFileExtension();
|
const extension = await this.getFileExtension();
|
||||||
const sanitizedTitle = song.displayTitle.replace(/\s+/g, '_');
|
// 过滤非法字符
|
||||||
const sanitizedArtist = song.displayArtist.replace(/\s+/g, '_');
|
const sanitize = (str) => str.replace(/[\\/:*?"<>|]/g, '').replace(/\s+/g, '_');
|
||||||
|
|
||||||
|
const sanitizedTitle = sanitize(song.displayTitle);
|
||||||
|
const sanitizedArtist = sanitize(song.displayArtist);
|
||||||
const filename = `${sanitizedTitle} - ${sanitizedArtist}.${extension}`;
|
const filename = `${sanitizedTitle} - ${sanitizedArtist}.${extension}`;
|
||||||
|
try {
|
||||||
await Group.sendGroupFile(e, e.group_id, `file://${audioFile}`, filename, adapter);
|
await Group.sendGroupFile(e, e.group_id, `file://${audioFile}`, filename, adapter);
|
||||||
|
} catch (fileErr) {
|
||||||
|
logger.warn(`[crystelf-music] 文件发送失败,尝试转为语音: ${fileErr.message}`);
|
||||||
|
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
musicSearch.clearUserSelection(e.group_id, e.user_id);
|
musicSearch.clearUserSelection(e.group_id, e.user_id);
|
||||||
logger.info(`[crystelf-music] 音乐发送成功: ${song.displayTitle}`);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[crystelf-music] 发送音乐结果失败:', error);
|
logger.error('[crystelf-music] 发送音乐失败:', error);
|
||||||
await e.reply('发送音乐失败,请稍后重试', true);
|
await e.reply('发送音乐失败,请稍后重试', true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -8,6 +8,10 @@
|
|||||||
"apiKey": "",
|
"apiKey": "",
|
||||||
"?modelType": "模型名称,请根据baseApi填写的服务商的对应的模型",
|
"?modelType": "模型名称,请根据baseApi填写的服务商的对应的模型",
|
||||||
"modelType": "deepseek-ai/DeepSeek-V3.2-Exp",
|
"modelType": "deepseek-ai/DeepSeek-V3.2-Exp",
|
||||||
|
"?multimodalEnabled": "是否启用多模态模型模式,启用后将忽略文本模型",
|
||||||
|
"multimodalEnabled": false,
|
||||||
|
"?multimodalModel": "多模态模型名称",
|
||||||
|
"multimodalModel": "Qwen/Qwen2.5-VL-72B-Instruct",
|
||||||
"?temperature": "聊天温度,可选0-2.0,温度越高创造性越高",
|
"?temperature": "聊天温度,可选0-2.0,温度越高创造性越高",
|
||||||
"temperature": 1.2,
|
"temperature": 1.2,
|
||||||
"?concurrency": "最大同时聊天群数,一个群最多一个人聊天",
|
"?concurrency": "最大同时聊天群数,一个群最多一个人聊天",
|
||||||
|
|||||||
@ -36,14 +36,39 @@ class AiCaller {
|
|||||||
* @param chatHistory 聊天历史
|
* @param chatHistory 聊天历史
|
||||||
* @param memories 记忆
|
* @param memories 记忆
|
||||||
* @param e
|
* @param e
|
||||||
|
* @param originalMessages 原始消息数组
|
||||||
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>}
|
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>}
|
||||||
*/
|
*/
|
||||||
async callAi(prompt, chatHistory = [], memories = [], e) {
|
async callAi(prompt, chatHistory = [], memories = [], e, originalMessages = []) {
|
||||||
if (!this.isInitialized || !this.config) {
|
if (!this.isInitialized || !this.config) {
|
||||||
logger.error('[crystelf-ai] 未初始化或配置无效');
|
logger.error('[crystelf-ai] 未初始化或配置无效');
|
||||||
return { success: false, error: 'AI调用器未初始化' };
|
return { success: false, error: 'AI调用器未初始化' };
|
||||||
}
|
}
|
||||||
|
try {
|
||||||
|
if (this.config.multimodalEnabled) {
|
||||||
|
return await this.callMultimodalAi(originalMessages, chatHistory, memories, e);
|
||||||
|
} else {
|
||||||
|
return await this.callTextAi(prompt, chatHistory, memories, e);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
|
||||||
|
SessionManager.deactivateSession(e.group_id, e.user_id);
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error.message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 文本AI模型
|
||||||
|
* @param prompt 用户输入
|
||||||
|
* @param chatHistory 聊天历史
|
||||||
|
* @param memories 记忆
|
||||||
|
* @param e
|
||||||
|
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
|
||||||
|
*/
|
||||||
|
async callTextAi(prompt, chatHistory = [], memories = [], e) {
|
||||||
try {
|
try {
|
||||||
const fullPrompt = this.buildPrompt(prompt);
|
const fullPrompt = this.buildPrompt(prompt);
|
||||||
const apiCaller = this.openaiChat;
|
const apiCaller = this.openaiChat;
|
||||||
@ -68,14 +93,96 @@ class AiCaller {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
|
throw error;
|
||||||
SessionManager.deactivateSession(e.group_id, e.user_id);
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 多模态AI调用
|
||||||
|
* @param originalMessages 原始消息数组
|
||||||
|
* @param chatHistory 聊天历史
|
||||||
|
* @param memories 记忆
|
||||||
|
* @param e
|
||||||
|
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
|
||||||
|
*/
|
||||||
|
async callMultimodalAi(originalMessages, chatHistory = [], memories = [], e) {
|
||||||
|
try {
|
||||||
|
const messages = await this.formatMultimodalMessages(originalMessages, chatHistory, memories, e);
|
||||||
|
const apiCaller = this.openaiChat;
|
||||||
|
const result = await apiCaller.callAi({
|
||||||
|
messages: messages,
|
||||||
|
model: this.config.multimodalModel,
|
||||||
|
temperature: this.config.temperature,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
response: result.aiResponse,
|
||||||
|
rawResponse: result.aiResponse,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: error.message,
|
error: '多模态AI调用失败',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 将原始消息格式转换为多模态格式
|
||||||
|
* @param originalMessages 原始消息数组
|
||||||
|
* @param chatHistory 聊天历史
|
||||||
|
* @param memories 记忆
|
||||||
|
* @param e
|
||||||
|
* @returns {Array} 多模态格式的消息数组
|
||||||
|
*/
|
||||||
|
async formatMultimodalMessages(originalMessages, chatHistory = [], memories = [], e) {
|
||||||
|
const messages = [];
|
||||||
|
const systemPrompt = await this.getSystemPrompt(e, memories);
|
||||||
|
messages.push({
|
||||||
|
role: 'system',
|
||||||
|
content: [
|
||||||
|
{ type: 'text', text: systemPrompt }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
for (const history of chatHistory) {
|
||||||
|
const role = history.role === 'user' ? 'user' : 'assistant';
|
||||||
|
messages.push({
|
||||||
|
role,
|
||||||
|
content: [
|
||||||
|
{ type: 'text', text: history.content }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const mergedUserContent = [];
|
||||||
|
for (const msg of originalMessages) {
|
||||||
|
if (msg.type === 'text' && msg.content) {
|
||||||
|
mergedUserContent.push({
|
||||||
|
type: 'text',
|
||||||
|
text: msg.content
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg.type === 'image_url' && msg.image_url?.url) {
|
||||||
|
mergedUserContent.push({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: { url: msg.image_url.url }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (mergedUserContent.length > 0) {
|
||||||
|
messages.push({
|
||||||
|
role: 'user',
|
||||||
|
content: mergedUserContent
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 构造完整的prompt
|
* 构造完整的prompt
|
||||||
|
|||||||
@ -22,18 +22,23 @@ class OpenaiChat {
|
|||||||
* @param model 模型
|
* @param model 模型
|
||||||
* @param temperature 温度
|
* @param temperature 温度
|
||||||
* @param customPrompt 提示词
|
* @param customPrompt 提示词
|
||||||
|
* @param messages 多模态消息数组
|
||||||
* @returns {Promise<{success: boolean, aiResponse: string}|{}>}
|
* @returns {Promise<{success: boolean, aiResponse: string}|{}>}
|
||||||
*/
|
*/
|
||||||
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) {
|
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt, messages = [] }) {
|
||||||
if (!this.openai) {
|
if (!this.openai) {
|
||||||
logger.error('[crystelf-ai] ai未初始化..');
|
logger.error('[crystelf-ai] ai未初始化..');
|
||||||
return { success: false };
|
return { success: false };
|
||||||
}
|
}
|
||||||
|
let finalMessages;
|
||||||
|
if (messages.length > 0) {
|
||||||
|
finalMessages = messages;
|
||||||
|
} else {
|
||||||
let systemMessage = {
|
let systemMessage = {
|
||||||
role: 'system',
|
role: 'system',
|
||||||
content: customPrompt || '',
|
content: customPrompt || '',
|
||||||
};
|
};
|
||||||
const messages = [
|
finalMessages = [
|
||||||
systemMessage,
|
systemMessage,
|
||||||
...chatHistory,
|
...chatHistory,
|
||||||
{
|
{
|
||||||
@ -41,15 +46,16 @@ class OpenaiChat {
|
|||||||
content: prompt,
|
content: prompt,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// logger.info("[DEBUG] 请求体:", {
|
// logger.info("[DEBUG] 请求体:", {
|
||||||
//model: model,
|
//model: model,
|
||||||
// messages,
|
// messages: finalMessages,
|
||||||
//});
|
//});
|
||||||
|
|
||||||
const completion = await this.openai.chat.completions.create({
|
const completion = await this.openai.chat.completions.create({
|
||||||
messages: messages,
|
messages: finalMessages,
|
||||||
model: model,
|
model: model,
|
||||||
temperature: temperature,
|
temperature: temperature,
|
||||||
frequency_penalty: 0.2,
|
frequency_penalty: 0.2,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user