Compare commits

..

No commits in common. "875ca65ed9c66c0a74936e0a517906cb2fa91208" and "a7e701cbb25866cc47355a75567428e229626bc0" have entirely different histories.

5 changed files with 55 additions and 214 deletions

View File

@ -91,11 +91,11 @@ async function index(e) {
if (e.user_id === e.bot.uin) { if (e.user_id === e.bot.uin) {
return; return;
} }
const messageData = await extractUserMessage(e.msg, nickname, e); const userMessage = await extractUserMessage(e.msg, nickname, e);
if (!messageData || !messageData.text || messageData.text.length === 0) { if (!userMessage || userMessage.length === 0) {
return e.reply(segment.image(await Meme.getMeme(aiConfig.character, 'default'))); return e.reply(segment.image(await Meme.getMeme(aiConfig.character, 'default')));
} }
const result = await processMessage(messageData, e, aiConfig); const result = await processMessage(userMessage, e, aiConfig);
if (result && result.length > 0) { if (result && result.length > 0) {
await sendResponse(e, result); await sendResponse(e, result);
} }
@ -113,7 +113,6 @@ async function extractUserMessage(msg, nickname, e) {
let at = []; let at = [];
const aiConfig = await ConfigControl.get('ai'); const aiConfig = await ConfigControl.get('ai');
const maxMessageLength = aiConfig?.maxMessageLength || 100; const maxMessageLength = aiConfig?.maxMessageLength || 100;
const originalMessages = [];
e.message.forEach((message) => { e.message.forEach((message) => {
logger.info(message); logger.info(message);
if (message.type === 'text' && message.text !== '' && message.text !== '\n'){ if (message.type === 'text' && message.text !== '' && message.text !== '\n'){
@ -125,34 +124,19 @@ async function extractUserMessage(msg, nickname, e) {
text.push(displayText); text.push(displayText);
} else if (message.type === 'at') { } else if (message.type === 'at') {
at.push(message.qq); at.push(message.qq);
} else if (message.type === 'image') {
if (message.image) {
originalMessages.push({
type: 'image_url',
image_url: {
url: message.url
}
});
}
} }
}); });
let returnMessage = ''; let returnMessage = '';
if (text.length > 0) { if (text.length > 0) {
text.forEach((message) => { text.forEach((message) => {
if(message === '') { if(message === '') {
} else { } else {
const tempMessage = `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]说:${message}\n` returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]说:${message}\n`
returnMessage += tempMessage;
originalMessages.push({
type: 'text',
content: tempMessage
});
} }
}); });
} }
if(at.length == 1 && at[0] == e.bot.uin && text.length == 0){ if(at.length == 1 && at[0] == e.bot.uin && text.length == 0){
return { text: [], originalMessages: originalMessages }; return [];
} }
if (at.length > 0) { if (at.length > 0) {
for (const at1 of at) { for (const at1 of at) {
@ -160,15 +144,14 @@ async function extractUserMessage(msg, nickname, e) {
//returnMessage += `[${e.sender?.nickname},id:${e.user_id}]@(at)了你,你的id是${at}\n`; //returnMessage += `[${e.sender?.nickname},id:${e.user_id}]@(at)了你,你的id是${at}\n`;
} else { } else {
const atNickname = await e.group.pickMember(at1).nickname || '一个人'; const atNickname = await e.group.pickMember(at1).nickname || '一个人';
const tempMessage = `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]@(at)了${atNickname},id是${at1}\n` returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]@(at)了${atNickname},id是${at1}\n`;
returnMessage += tempMessage;
originalMessages.push({
type: 'text',
content: tempMessage
});
} }
} }
} }
const imgUrls = await YunzaiUtils.getImages(e, 1, true);
if (imgUrls) {
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]发送了一张图片(你可能暂时无法查看)\n`;
}
if(e.source || e.reply_id){ if(e.source || e.reply_id){
let reply; let reply;
if(e.getReply) reply = await e.getReply(); if(e.getReply) reply = await e.getReply();
@ -180,33 +163,18 @@ async function extractUserMessage(msg, nickname, e) {
const msgArr = Array.isArray(reply) ? reply : reply.message || []; const msgArr = Array.isArray(reply) ? reply : reply.message || [];
msgArr.forEach((msg) => { msgArr.forEach((msg) => {
if(msg.type === 'text'){ if(msg.type === 'text'){
const tempMessage = `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一段文本:${msg.text}\n` returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一段文本:${msg.text}\n`
returnMessage += tempMessage;
originalMessages.push({
type: 'text',
content: tempMessage
});
} }
if(msg.type === 'image'){ if(msg.type === 'image'){
returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一张图片(你可能暂时无法查看)\n`; returnMessage += `[${e.sender?.nickname}]引用了[被引用消息:${reply.user_id == e.bot.uin ? '你' : reply.sender?.nickname},id:${reply.user_id},seq:${reply.message_id}]发的一张图片(你可能暂时无法查看)\n`;
originalMessages.push({
type: 'image_url',
image_url: {
url: msg.url
}
});
} }
}) })
} }
} }
const imgUrls = await YunzaiUtils.getImages(e, 1, true); return returnMessage;
if (imgUrls) {
returnMessage += `[${e.sender?.nickname},id:${e.user_id},seq:${e.message_id}]发送了一张图片(你可能暂时无法查看)\n`;
}
return { text: returnMessage, originalMessages: originalMessages };
} }
logger.warn('[crystelf-ai] 字符串匹配失败'); logger.warn('[crystelf-ai] 字符串匹配失败');
return { text: [], originalMessages: [] }; return [];
} }
/** /**
@ -234,11 +202,11 @@ async function processMessage(userMessage, e, aiConfig) {
/** /**
* 关键词模式 * 关键词模式
* @param messageData * @param userMessage
* @param e * @param e
* @returns {Promise<[{type: string, data: string}]>} * @returns {Promise<[{type: string, data: string}]>}
*/ */
async function handleKeywordMode(messageData, e) { async function handleKeywordMode(userMessage, e) {
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai'); const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
if (matchResult && matchResult.matched) { if (matchResult && matchResult.matched) {
@ -261,17 +229,17 @@ async function handleKeywordMode(messageData, e) {
]; ];
} }
async function handleAiMode(messageData, e, aiConfig) { async function handleAiMode(userMessage, e, aiConfig) {
return await callAiForResponse(messageData, e, aiConfig); return await callAiForResponse(userMessage, e, aiConfig);
} }
async function handleMixMode(messageData, e, aiConfig) { async function handleMixMode(userMessage, e, aiConfig) {
const isTooLong = await KeywordMatcher.isMessageTooLong(e.msg); const isTooLong = await KeywordMatcher.isMessageTooLong(e.msg);
if (isTooLong) { if (isTooLong) {
//消息太长,使用AI回复 //消息太长,使用AI回复
logger.info('[crystelf-ai] 消息过长,使用ai回复'); logger.info('[crystelf-ai] 消息过长,使用ai回复');
return await callAiForResponse(messageData, e, aiConfig); return await callAiForResponse(userMessage, e, aiConfig);
} else { } else {
const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai'); const matchResult = await KeywordMatcher.matchKeywords(e.msg, 'ai');
if (matchResult && matchResult.matched) { if (matchResult && matchResult.matched) {
@ -296,7 +264,7 @@ async function handleMixMode(messageData, e, aiConfig) {
}; };
const newChatHistory = [ const newChatHistory = [
...chatHistory, ...chatHistory,
{ role: 'user', content: messageData.text }, { role: 'user', content: userMessage },
{ role: 'assistant', content: JSON.stringify(resMessage) }, { role: 'assistant', content: JSON.stringify(resMessage) },
]; ];
SessionManager.updateChatHistory(e.group_id, newChatHistory); SessionManager.updateChatHistory(e.group_id, newChatHistory);
@ -306,12 +274,12 @@ async function handleMixMode(messageData, e, aiConfig) {
} else { } else {
logger.info('[crystelf-ai] 关键词匹配失败,使用ai回复'); logger.info('[crystelf-ai] 关键词匹配失败,使用ai回复');
//关键词匹配失败,使用AI回复 //关键词匹配失败,使用AI回复
return await callAiForResponse(messageData, e, aiConfig); return await callAiForResponse(userMessage, e, aiConfig);
} }
} }
} }
async function callAiForResponse(messageData, e, aiConfig) { async function callAiForResponse(userMessage, e, aiConfig) {
try { try {
//创建session //创建session
const session = SessionManager.createOrGetSession(e.group_id, e.user_id, e); const session = SessionManager.createOrGetSession(e.group_id, e.user_id, e);
@ -331,10 +299,7 @@ async function callAiForResponse(messageData, e, aiConfig) {
//构建聊天历史 //构建聊天历史
const historyLen = aiConfig.chatHistory; const historyLen = aiConfig.chatHistory;
const chatHistory = session.chatHistory.slice(-historyLen | -10); const chatHistory = session.chatHistory.slice(-historyLen | -10);
const aiResult = await AiCaller.callAi(userMessage, chatHistory, memories, e);
// 根据多模态开关决定调用方式
const aiResult = await AiCaller.callAi(messageData.text, chatHistory, memories, e, messageData.originalMessages);
if (!aiResult.success) { if (!aiResult.success) {
logger.error(`[crystelf-ai] AI调用失败: ${aiResult.error}`); logger.error(`[crystelf-ai] AI调用失败: ${aiResult.error}`);
SessionManager.deactivateSession(e.group_id, e.user_id); SessionManager.deactivateSession(e.group_id, e.user_id);
@ -348,14 +313,14 @@ async function callAiForResponse(messageData, e, aiConfig) {
//处理响应 //处理响应
const processedResponse = await ResponseHandler.processResponse( const processedResponse = await ResponseHandler.processResponse(
aiResult.response, aiResult.response,
messageData.text, userMessage,
e.group_id, e.group_id,
e.user_id e.user_id
); );
//更新session //更新session
const newChatHistory = [ const newChatHistory = [
...chatHistory, ...chatHistory,
{ role: 'user', content: messageData.text }, { role: 'user', content: userMessage },
{ role: 'assistant', content: aiResult.response }, { role: 'assistant', content: aiResult.response },
]; ];
SessionManager.updateChatHistory(e.group_id, newChatHistory); SessionManager.updateChatHistory(e.group_id, newChatHistory);

View File

@ -133,10 +133,10 @@ export class CrystelfMusic extends plugin {
} }
const adapter = await YunzaiUtils.getAdapter(e); const adapter = await YunzaiUtils.getAdapter(e);
await Message.emojiLike(e,e.message_id,60,e.group_id,adapter); await Message.emojiLike(e,e.message_id,60,e.group_id,adapter);
await musicSearch.clearGroupSearch(e.group_id);
const result = await musicSearch.handleSelection(e, index); const result = await musicSearch.handleSelection(e, index);
if (result.success) { if (result.success) {
await this.sendMusicResult(e, result); await this.sendMusicResult(e, result);
await musicSearch.clearGroupSearch(e.group_id);
} else { } else {
await e.reply(`${result.message}`, true); await e.reply(`${result.message}`, true);
} }
@ -159,22 +159,15 @@ export class CrystelfMusic extends plugin {
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter); await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
} else { } else {
const extension = await this.getFileExtension(); const extension = await this.getFileExtension();
// 过滤非法字符 const sanitizedTitle = song.displayTitle.replace(/\s+/g, '_');
const sanitize = (str) => str.replace(/[\\/:*?"<>|]/g, '').replace(/\s+/g, '_'); const sanitizedArtist = song.displayArtist.replace(/\s+/g, '_');
const sanitizedTitle = sanitize(song.displayTitle);
const sanitizedArtist = sanitize(song.displayArtist);
const filename = `${sanitizedTitle} - ${sanitizedArtist}.${extension}`; const filename = `${sanitizedTitle} - ${sanitizedArtist}.${extension}`;
try { await Group.sendGroupFile(e, e.group_id, `file://${audioFile}`, filename, adapter);
await Group.sendGroupFile(e, e.group_id, `file://${audioFile}`, filename, adapter);
} catch (fileErr) {
logger.warn(`[crystelf-music] 文件发送失败,尝试转为语音: ${fileErr.message}`);
await Group.sendGroupRecord(e, e.group_id, `file://${audioFile}`, adapter);
}
} }
musicSearch.clearUserSelection(e.group_id, e.user_id); musicSearch.clearUserSelection(e.group_id, e.user_id);
logger.info(`[crystelf-music] 音乐发送成功: ${song.displayTitle}`);
} catch (error) { } catch (error) {
logger.error('[crystelf-music] 发送音乐失败:', error); logger.error('[crystelf-music] 发送音乐结果失败:', error);
await e.reply('发送音乐失败,请稍后重试', true); await e.reply('发送音乐失败,请稍后重试', true);
} }
} }
@ -184,7 +177,7 @@ export class CrystelfMusic extends plugin {
* @returns {string} 文件扩展名 * @returns {string} 文件扩展名
*/ */
async getFileExtension() { async getFileExtension() {
const musicConfig = await ConfigControl.get('music'); const musicConfig =await ConfigControl.get('music');
//if(musicConfig.quality === '3'){ //if(musicConfig.quality === '3'){
//return 'flac' //return 'flac'
//} //}

View File

@ -8,10 +8,6 @@
"apiKey": "", "apiKey": "",
"?modelType": "模型名称,请根据baseApi填写的服务商的对应的模型", "?modelType": "模型名称,请根据baseApi填写的服务商的对应的模型",
"modelType": "deepseek-ai/DeepSeek-V3.2-Exp", "modelType": "deepseek-ai/DeepSeek-V3.2-Exp",
"?multimodalEnabled": "是否启用多模态模型模式,启用后将忽略文本模型",
"multimodalEnabled": false,
"?multimodalModel": "多模态模型名称",
"multimodalModel": "Qwen/Qwen2.5-VL-72B-Instruct",
"?temperature": "聊天温度,可选0-2.0,温度越高创造性越高", "?temperature": "聊天温度,可选0-2.0,温度越高创造性越高",
"temperature": 1.2, "temperature": 1.2,
"?concurrency": "最大同时聊天群数,一个群最多一个人聊天", "?concurrency": "最大同时聊天群数,一个群最多一个人聊天",

View File

@ -36,39 +36,14 @@ class AiCaller {
* @param chatHistory 聊天历史 * @param chatHistory 聊天历史
* @param memories 记忆 * @param memories 记忆
* @param e * @param e
* @param originalMessages 原始消息数组
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>} * @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>}
*/ */
async callAi(prompt, chatHistory = [], memories = [], e, originalMessages = []) { async callAi(prompt, chatHistory = [], memories = [], e) {
if (!this.isInitialized || !this.config) { if (!this.isInitialized || !this.config) {
logger.error('[crystelf-ai] 未初始化或配置无效'); logger.error('[crystelf-ai] 未初始化或配置无效');
return { success: false, error: 'AI调用器未初始化' }; return { success: false, error: 'AI调用器未初始化' };
} }
try {
if (this.config.multimodalEnabled) {
return await this.callMultimodalAi(originalMessages, chatHistory, memories, e);
} else {
return await this.callTextAi(prompt, chatHistory, memories, e);
}
} catch (error) {
logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
SessionManager.deactivateSession(e.group_id, e.user_id);
return {
success: false,
error: error.message,
};
}
}
/**
* 文本AI模型
* @param prompt 用户输入
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
*/
async callTextAi(prompt, chatHistory = [], memories = [], e) {
try { try {
const fullPrompt = this.buildPrompt(prompt); const fullPrompt = this.buildPrompt(prompt);
const apiCaller = this.openaiChat; const apiCaller = this.openaiChat;
@ -77,7 +52,7 @@ class AiCaller {
chatHistory: chatHistory, chatHistory: chatHistory,
model: this.config.modelType, model: this.config.modelType,
temperature: this.config.temperature, temperature: this.config.temperature,
customPrompt: await this.getSystemPrompt(e, memories), customPrompt: await this.getSystemPrompt(e,memories),
}); });
if (result.success) { if (result.success) {
@ -93,97 +68,15 @@ class AiCaller {
}; };
} }
} catch (error) { } catch (error) {
throw error; logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
SessionManager.deactivateSession(e.group_id, e.user_id);
return {
success: false,
error: error.message,
};
} }
} }
/**
* 多模态AI调用
* @param originalMessages 原始消息数组
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
*/
async callMultimodalAi(originalMessages, chatHistory = [], memories = [], e) {
try {
const messages = await this.formatMultimodalMessages(originalMessages, chatHistory, memories, e);
const apiCaller = this.openaiChat;
const result = await apiCaller.callAi({
messages: messages,
model: this.config.multimodalModel,
temperature: this.config.temperature,
});
if (result.success) {
return {
success: true,
response: result.aiResponse,
rawResponse: result.aiResponse,
};
} else {
return {
success: false,
error: '多模态AI调用失败',
};
}
} catch (error) {
throw error;
}
}
/**
* 将原始消息格式转换为多模态格式
* @param originalMessages 原始消息数组
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @returns {Array} 多模态格式的消息数组
*/
async formatMultimodalMessages(originalMessages, chatHistory = [], memories = [], e) {
const messages = [];
const systemPrompt = await this.getSystemPrompt(e, memories);
messages.push({
role: 'system',
content: [
{ type: 'text', text: systemPrompt }
]
});
for (const history of chatHistory) {
const role = history.role === 'user' ? 'user' : 'assistant';
messages.push({
role,
content: [
{ type: 'text', text: history.content }
]
});
}
const mergedUserContent = [];
for (const msg of originalMessages) {
if (msg.type === 'text' && msg.content) {
mergedUserContent.push({
type: 'text',
text: msg.content
});
}
if (msg.type === 'image_url' && msg.image_url?.url) {
mergedUserContent.push({
type: 'image_url',
image_url: { url: msg.image_url.url }
});
}
}
if (mergedUserContent.length > 0) {
messages.push({
role: 'user',
content: mergedUserContent
});
}
return messages;
}
/** /**
* 构造完整的prompt * 构造完整的prompt
* @param prompt * @param prompt

View File

@ -22,40 +22,34 @@ class OpenaiChat {
* @param model 模型 * @param model 模型
* @param temperature 温度 * @param temperature 温度
* @param customPrompt 提示词 * @param customPrompt 提示词
* @param messages 多模态消息数组
* @returns {Promise<{success: boolean, aiResponse: string}|{}>} * @returns {Promise<{success: boolean, aiResponse: string}|{}>}
*/ */
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt, messages = [] }) { async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) {
if (!this.openai) { if (!this.openai) {
logger.error('[crystelf-ai] ai未初始化..'); logger.error('[crystelf-ai] ai未初始化..');
return { success: false }; return { success: false };
} }
let finalMessages; let systemMessage = {
if (messages.length > 0) { role: 'system',
finalMessages = messages; content: customPrompt || '',
} else { };
let systemMessage = { const messages = [
role: 'system', systemMessage,
content: customPrompt || '', ...chatHistory,
}; {
finalMessages = [ role: 'user',
systemMessage, content: prompt,
...chatHistory, },
{ ];
role: 'user',
content: prompt,
},
];
}
try { try {
// logger.info("[DEBUG] 请求体:", { // logger.info("[DEBUG] 请求体:", {
//model: model, //model: model,
// messages: finalMessages, // messages,
//}); //});
const completion = await this.openai.chat.completions.create({ const completion = await this.openai.chat.completions.create({
messages: finalMessages, messages: messages,
model: model, model: model,
temperature: temperature, temperature: temperature,
frequency_penalty: 0.2, frequency_penalty: 0.2,