Compare commits

..

8 Commits

Author SHA1 Message Date
875ca65ed9 🚀 fix(openaiChat): simplify messages handling by removing unnecessary checks and defaults
 feat(aiCaller): enhance multimodal messages formatting for better processing of user content

 feat(ai): update image URL references for consistency across message handling
2025-11-30 13:02:22 +08:00
f2de86d421 Merge remote-tracking branch 'origin/main'
# Conflicts:
#	apps/music.js
2025-11-30 11:13:59 +08:00
8237606ed3 🧹 chore(apps/music.js): clean up group search after sending music results to improve functionality 2025-11-30 11:11:57 +08:00
dfd585409a feat(openaiChat): add support for multi-modal messages in AI chat function. 2025-11-30 11:10:54 +08:00
619f2a3295 🔧 fix(apps/music.js): simplify music result sending and sanitize filenames for better compatibility 2025-11-30 11:07:38 +08:00
Jerry
7a3df0af50
Merge pull request #11 from devil233-ui/patch-2
Sanitize filenames and enhance error handling
2025-11-30 11:05:25 +08:00
61a9462247 feat(apps/ai.js): enhance message processing to support multimodal inputs and improve user message extraction.
 feat(config/ai.json): add configuration options for multimodal model support.
 feat(lib/ai/aiCaller.js): implement multimodal AI call handling and formatting for diverse message types.
2025-11-30 10:50:12 +08:00
devil233
d0b6b939cd
Sanitize filenames and enhance error handling
Refactor music file handling to sanitize filenames and improve error logging.
2025-11-30 06:58:03 +08:00
5 changed files with 214 additions and 55 deletions

View File

@ -91,11 +91,11 @@ async function index(e) {
if (e.user_id === e.bot.uin) {
return;
}
const userMessage = await extractUserMessage(e.msg, nickname, e);
if (!userMessage || userMessage.length === 0) {
const messageData = await extractUserMessage(e.msg, nickname, e);
if (!messageData || !messageData.text || messageData.text.length === 0) {
return e.reply(segment.image(await Meme.getMeme(aiConfig.character, 'default')));
}
const result = await processMessage(userMessage, e, aiConfig);
const result = await processMessage(messageData, e, aiConfig);
if (result && result.length > 0) {
await sendResponse(e, result);
}
@ -113,6 +113,7 @@ async function extractUserMessage(msg, nickname, e) {
let at = [];
const aiConfig = await ConfigControl.get('ai');
const maxMessageLength = aiConfig?.maxMessageLength || 100;
const originalMessages = [];
e.message.forEach((message) => {
logger.info(message);
if (message.type === 'text' && message.text !== '' && message.text !== '\n'){
@ -124,19 +125,34 @@ async function extractUserMessage(msg, nickname, e) {
text.push(displayText);
} else if (message.type === 'at') {
at.push(message.qq);
} else if (message.type === 'image') {
if (message.image) {
originalMessages.push({
type: 'image_url',
image_url: {
url: message.url
}
});
}
}
});
let returnMessage = '';
if (text.length > 0) {
text.forEach((message) => {
if(message === '') {
} else {
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]说:${message}\n`
const tempMessage = `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]说:${message}\n`
returnMessage += tempMessage;
originalMessages.push({
type: 'text',
content: tempMessage
});
}
});
}
if(at.length == 1 && at[0] == e.bot.uin && text.length == 0){
return [];
return { text: [], originalMessages: originalMessages };
}
if (at.length > 0) {
for (const at1 of at) {
@ -144,14 +160,15 @@ async function extractUserMessage(msg, nickname, e) {
//returnMessage += `[${e.sender?.nickname},id:${e.user_id}]@(at)了你,你的id是${at}\n`;
} else {
const atNickname = await e.group.pickMember(at1).nickname || '一个人';
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]@(at)了${atNickname},id是${at1}\n`;
const tempMessage = `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]@(at)了${atNickname},id是${at1}\n`
returnMessage += tempMessage;
originalMessages.push({
type: 'text',
content: tempMessage
});
}
}
}
const imgUrls = await YunzaiUtils.getImages(e, 1, true);
if (imgUrls) {
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]发送了一张图片(你可能暂时无法查看)\n`;
}
if(e.source || e.reply_id){
let reply;
if(e.getReply) reply = await e.getReply();
@ -163,18 +180,33 @@ async function extractUserMessage(msg, nickname, e) {
const msgArr = Array.isArray(reply) ? reply : reply.message || [];
msgArr.forEach((msg) => {
if(msg.type === 'text'){
returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一段文本:${msg.text}\n`
const tempMessage = `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一段文本:${msg.text}\n`
returnMessage += tempMessage;
originalMessages.push({
type: 'text',
content: tempMessage
});
}
if(msg.type === 'image'){
returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一张图片(你可能暂时无法查看)\n`;
originalMessages.push({
type: 'image_url',
image_url: {
url: msg.url
}
});
}
})
}
}
return returnMessage;
const imgUrls = await YunzaiUtils.getImages(e, 1, true);
if (imgUrls) {
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]发送了一张图片(你可能暂时无法查看)\n`;
}
return { text: returnMessage, originalMessages: originalMessages };
}
logger.warn('[crystelf-ai] 字符串匹配失败');
return [];
return { text: [], originalMessages: [] };
}
/**
@ -202,11 +234,11 @@ async function processMessage(userMessage, e, aiConfig) {
/**
* 关键词模式
* @param userMessage
* @param messageData
* @param e
* @returns {Promise<[{type: string, data: string}]>}
*/
async function handleKeywordMode(userMessage, e) {
async function handleKeywordMode(messageData, e) {
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
if (matchResult && matchResult.matched) {
@ -229,17 +261,17 @@ async function handleKeywordMode(userMessage, e) {
];
}
async function handleAiMode(userMessage, e, aiConfig) {
return await callAiForResponse(userMessage, e, aiConfig);
async function handleAiMode(messageData, e, aiConfig) {
return await callAiForResponse(messageData, e, aiConfig);
}
async function handleMixMode(userMessage, e, aiConfig) {
async function handleMixMode(messageData, e, aiConfig) {
const isTooLong = await KeywordMatcher.isMessageTooLong(e.msg);
if (isTooLong) {
//消息太长,使用AI回复
logger.info('[crystelf-ai] 消息过长,使用ai回复');
return await callAiForResponse(userMessage, e, aiConfig);
return await callAiForResponse(messageData, e, aiConfig);
} else {
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
if (matchResult && matchResult.matched) {
@ -264,7 +296,7 @@ async function handleMixMode(userMessage, e, aiConfig) {
};
const newChatHistory = [
...chatHistory,
{ role: 'user', content: userMessage },
{ role: 'user', content: messageData.text },
{ role: 'assistant', content: JSON.stringify(resMessage) },
];
SessionManager.updateChatHistory(e.group_id, newChatHistory);
@ -274,12 +306,12 @@ async function handleMixMode(userMessage, e, aiConfig) {
} else {
logger.info('[crystelf-ai] 关键词匹配失败,使用ai回复');
//关键词匹配失败,使用AI回复
return await callAiForResponse(userMessage, e, aiConfig);
return await callAiForResponse(messageData, e, aiConfig);
}
}
}
async function callAiForResponse(userMessage, e, aiConfig) {
async function callAiForResponse(messageData, e, aiConfig) {
try {
//创建session
const session = SessionManager.createOrGetSession(e.group_id, e.user_id, e);
@ -299,7 +331,10 @@ async function callAiForResponse(userMessage, e, aiConfig) {
//构建聊天历史
const historyLen = aiConfig.chatHistory;
const chatHistory = session.chatHistory.slice(-historyLen | -10);
const aiResult = await AiCaller.callAi(userMessage, chatHistory, memories, e);
// 根据多模态开关决定调用方式
const aiResult = await AiCaller.callAi(messageData.text, chatHistory, memories, e, messageData.originalMessages);
if (!aiResult.success) {
logger.error(`[crystelf-ai] AI调用失败: ${aiResult.error}`);
SessionManager.deactivateSession(e.group_id, e.user_id);
@ -313,14 +348,14 @@ async function callAiForResponse(userMessage, e, aiConfig) {
//处理响应
const processedResponse = await ResponseHandler.processResponse(
aiResult.response,
userMessage,
messageData.text,
e.group_id,
e.user_id
);
//更新session
const newChatHistory = [
...chatHistory,
{ role: 'user', content: userMessage },
{ role: 'user', content: messageData.text },
{ role: 'assistant', content: aiResult.response },
];
SessionManager.updateChatHistory(e.group_id, newChatHistory);

View File

@ -133,10 +133,10 @@ export class CrystelfMusic extends plugin {
}
const adapter = await YunzaiUtils.getAdapter(e);
await Message.emojiLike(e,e.message_id,60,e.group_id,adapter);
await musicSearch.clearGroupSearch(e.group_id);
const result = await musicSearch.handleSelection(e, index);
if (result.success) {
await this.sendMusicResult(e, result);
await musicSearch.clearGroupSearch(e.group_id);
} else {
await e.reply(`${result.message}`, true);
}
@ -159,15 +159,22 @@ export class CrystelfMusic extends plugin {
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
} else {
const extension = await this.getFileExtension();
const sanitizedTitle = song.displayTitle.replace(/\s+/g, '_');
const sanitizedArtist = song.displayArtist.replace(/\s+/g, '_');
// 过滤非法字符
const sanitize = (str) => str.replace(/[\\/:*?"<>|]/g, '').replace(/\s+/g, '_');
const sanitizedTitle = sanitize(song.displayTitle);
const sanitizedArtist = sanitize(song.displayArtist);
const filename = `${sanitizedTitle} - ${sanitizedArtist}.${extension}`;
await Group.sendGroupFile(e, e.group_id, `file://${audioFile}`, filename, adapter);
try {
await Group.sendGroupFile(e, e.group_id, `file://${audioFile}`, filename, adapter);
} catch (fileErr) {
logger.warn(`[crystelf-music] 文件发送失败,尝试转为语音: ${fileErr.message}`);
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
}
}
musicSearch.clearUserSelection(e.group_id, e.user_id);
logger.info(`[crystelf-music] 音乐发送成功: ${song.displayTitle}`);
} catch (error) {
logger.error('[crystelf-music] 发送音乐结果失败:', error);
logger.error('[crystelf-music] 发送音乐失败:', error);
await e.reply('发送音乐失败,请稍后重试', true);
}
}
@ -177,7 +184,7 @@ export class CrystelfMusic extends plugin {
* @returns {string} 文件扩展名
*/
async getFileExtension() {
const musicConfig =await ConfigControl.get('music');
const musicConfig = await ConfigControl.get('music');
//if(musicConfig.quality === '3'){
//return 'flac'
//}

View File

@ -8,6 +8,10 @@
"apiKey": "",
"?modelType": "模型名称,请根据baseApi填写的服务商的对应的模型",
"modelType": "deepseek-ai/DeepSeek-V3.2-Exp",
"?multimodalEnabled": "是否启用多模态模型模式,启用后将忽略文本模型",
"multimodalEnabled": false,
"?multimodalModel": "多模态模型名称",
"multimodalModel": "Qwen/Qwen2.5-VL-72B-Instruct",
"?temperature": "聊天温度,可选0-2.0,温度越高创造性越高",
"temperature": 1.2,
"?concurrency": "最大同时聊天群数,一个群最多一个人聊天",

View File

@ -36,14 +36,39 @@ class AiCaller {
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @param originalMessages 原始消息数组
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>}
*/
async callAi(prompt, chatHistory = [], memories = [], e) {
async callAi(prompt, chatHistory = [], memories = [], e, originalMessages = []) {
if (!this.isInitialized || !this.config) {
logger.error('[crystelf-ai] 未初始化或配置无效');
return { success: false, error: 'AI调用器未初始化' };
}
try {
if (this.config.multimodalEnabled) {
return await this.callMultimodalAi(originalMessages, chatHistory, memories, e);
} else {
return await this.callTextAi(prompt, chatHistory, memories, e);
}
} catch (error) {
logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
SessionManager.deactivateSession(e.group_id, e.user_id);
return {
success: false,
error: error.message,
};
}
}
/**
* 文本AI模型
* @param prompt 用户输入
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
*/
async callTextAi(prompt, chatHistory = [], memories = [], e) {
try {
const fullPrompt = this.buildPrompt(prompt);
const apiCaller = this.openaiChat;
@ -52,7 +77,7 @@ class AiCaller {
chatHistory: chatHistory,
model: this.config.modelType,
temperature: this.config.temperature,
customPrompt: await this.getSystemPrompt(e,memories),
customPrompt: await this.getSystemPrompt(e, memories),
});
if (result.success) {
@ -68,15 +93,97 @@ class AiCaller {
};
}
} catch (error) {
logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
SessionManager.deactivateSession(e.group_id, e.user_id);
return {
success: false,
error: error.message,
};
throw error;
}
}
/**
* 多模态AI调用
* @param originalMessages 原始消息数组
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
*/
async callMultimodalAi(originalMessages, chatHistory = [], memories = [], e) {
try {
const messages = await this.formatMultimodalMessages(originalMessages, chatHistory, memories, e);
const apiCaller = this.openaiChat;
const result = await apiCaller.callAi({
messages: messages,
model: this.config.multimodalModel,
temperature: this.config.temperature,
});
if (result.success) {
return {
success: true,
response: result.aiResponse,
rawResponse: result.aiResponse,
};
} else {
return {
success: false,
error: '多模态AI调用失败',
};
}
} catch (error) {
throw error;
}
}
/**
* 将原始消息格式转换为多模态格式
* @param originalMessages 原始消息数组
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @returns {Array} 多模态格式的消息数组
*/
async formatMultimodalMessages(originalMessages, chatHistory = [], memories = [], e) {
const messages = [];
const systemPrompt = await this.getSystemPrompt(e, memories);
messages.push({
role: 'system',
content: [
{ type: 'text', text: systemPrompt }
]
});
for (const history of chatHistory) {
const role = history.role === 'user' ? 'user' : 'assistant';
messages.push({
role,
content: [
{ type: 'text', text: history.content }
]
});
}
const mergedUserContent = [];
for (const msg of originalMessages) {
if (msg.type === 'text' && msg.content) {
mergedUserContent.push({
type: 'text',
text: msg.content
});
}
if (msg.type === 'image_url' && msg.image_url?.url) {
mergedUserContent.push({
type: 'image_url',
image_url: { url: msg.image_url.url }
});
}
}
if (mergedUserContent.length > 0) {
messages.push({
role: 'user',
content: mergedUserContent
});
}
return messages;
}
/**
* 构造完整的prompt
* @param prompt

View File

@ -22,34 +22,40 @@ class OpenaiChat {
* @param model 模型
* @param temperature 温度
* @param customPrompt 提示词
* @param messages 多模态消息数组
* @returns {Promise<{success: boolean, aiResponse: string}|{}>}
*/
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) {
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt, messages = [] }) {
if (!this.openai) {
logger.error('[crystelf-ai] ai未初始化..');
return { success: false };
}
let systemMessage = {
role: 'system',
content: customPrompt || '',
};
const messages = [
systemMessage,
...chatHistory,
{
role: 'user',
content: prompt,
},
];
let finalMessages;
if (messages.length > 0) {
finalMessages = messages;
} else {
let systemMessage = {
role: 'system',
content: customPrompt || '',
};
finalMessages = [
systemMessage,
...chatHistory,
{
role: 'user',
content: prompt,
},
];
}
try {
// logger.info("[DEBUG] 请求体:", {
//model: model,
// messages,
// messages: finalMessages,
//});
const completion = await this.openai.chat.completions.create({
messages: messages,
messages: finalMessages,
model: model,
temperature: temperature,
frequency_penalty: 0.2,