Compare commits

..

No commits in common. "23f6013e19df4589447f3d5ab2afd04cc98d5b2a" and "894dba0aa05b0ce5cc89c63c85e2167be2bb01e8" have entirely different histories.

4 changed files with 14 additions and 259 deletions

View File

@ -352,23 +352,10 @@ async function callAiForResponse(messageData, e, aiConfig) {
e.user_id
);
//更新session
let userMessageContent, assistantMessageContent;
const usedMultimodal = aiConfig.multimodalEnabled &&
(!aiConfig.smartMultimodal || messageData.originalMessages?.some(msg => msg.type === 'image_url'));
if (usedMultimodal && messageData.originalMessages) {
userMessageContent = messageData.originalMessages.map(msg => {
if (msg.type === 'text') return msg.content;
if (msg.type === 'image_url') return `[图片消息]`;
}).filter(Boolean).join('');
} else {
userMessageContent = messageData.text;
}
assistantMessageContent = aiResult.response;
const newChatHistory = [
...chatHistory,
{ role: 'user', content: userMessageContent },
{ role: 'assistant', content: assistantMessageContent },
{ role: 'user', content: messageData.text },
{ role: 'assistant', content: aiResult.response },
];
SessionManager.updateChatHistory(e.group_id, newChatHistory);
SessionManager.deactivateSession(e.group_id, e.user_id);

View File

@ -10,8 +10,6 @@
"modelType": "deepseek-ai/DeepSeek-V3.2-Exp",
"?multimodalEnabled": "是否启用多模态模型模式,启用后将忽略文本模型",
"multimodalEnabled": false,
"?smartMultimodal": "智能多模态模式,开启时只有文字用文本模型,有图片才用多模态模型",
"smartMultimodal": false,
"?multimodalModel": "多模态模型名称",
"multimodalModel": "Qwen/Qwen2.5-VL-72B-Instruct",
"?temperature": "聊天温度,可选0-2.0,温度越高创造性越高",

View File

@ -2,7 +2,6 @@ import ConfigControl from '../config/configControl.js';
import OpenaiChat from '../../modules/openai/openaiChat.js';
import { getSystemPrompt } from '../../constants/ai/prompts.js';
import SessionManager from "./sessionManager.js";
import UserConfigManager from './userConfigManager.js';
//ai调用器
class AiCaller {
@ -10,7 +9,6 @@ class AiCaller {
this.openaiChat = new OpenaiChat();
this.isInitialized = false;
this.config = null;
this.userOpenaiInstances = new Map();
}
/**
@ -24,7 +22,6 @@ class AiCaller {
return;
}
this.openaiChat.init(this.config.apiKey, this.config.baseApi);
await UserConfigManager.init();
this.isInitialized = true;
logger.info('[crystelf-ai] 初始化完成');
@ -47,26 +44,11 @@ class AiCaller {
logger.error('[crystelf-ai] 未初始化或配置无效');
return { success: false, error: 'AI调用器未初始化' };
}
try {
const userId = e.user_id;
const userConfig = await UserConfigManager.getUserConfig(String(userId));
logger.info(`[crystelf-ai] 用户 ${userId} 使用配置 - 智能多模态: ${userConfig.smartMultimodal}, 多模态启用: ${userConfig.multimodalEnabled}`);
if (userConfig.smartMultimodal && userConfig.multimodalEnabled) {
const hasImage = originalMessages.some(msg => msg.type === 'image_url');
logger.info(`[crystelf-ai] 智能多模态模式 - 检测到图片: ${hasImage}, 消息类型统计: ${JSON.stringify(originalMessages.map(msg => msg.type))}`);
if (hasImage) {
logger.info('[crystelf-ai] 检测到图片,使用多模态模型');
return await this.callMultimodalAi(originalMessages, chatHistory, memories, e, userConfig);
if (this.config.multimodalEnabled) {
return await this.callMultimodalAi(originalMessages, chatHistory, memories, e);
} else {
logger.info('[crystelf-ai] 纯文本消息,使用文本模型');
return await this.callTextAi(prompt, chatHistory, memories, e, userConfig);
}
} else if (userConfig.multimodalEnabled) {
return await this.callMultimodalAi(originalMessages, chatHistory, memories, e, userConfig);
} else {
return await this.callTextAi(prompt, chatHistory, memories, e, userConfig);
return await this.callTextAi(prompt, chatHistory, memories, e);
}
} catch (error) {
logger.error(`[crystelf-ai] 调用失败: ${error.message}`);
@ -84,25 +66,17 @@ class AiCaller {
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @param userConfig 用户特定配置
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
*/
async callTextAi(prompt, chatHistory = [], memories = [], e, userConfig = null) {
async callTextAi(prompt, chatHistory = [], memories = [], e) {
try {
const config = userConfig || this.config;
const fullPrompt = this.buildPrompt(prompt);
const apiCaller = await this.getUserOpenaiInstance(e.user_id, config);
const formattedChatHistory = chatHistory.map(msg => ({
role: msg.role,
content: msg.content
}));
const apiCaller = this.openaiChat;
const result = await apiCaller.callAi({
prompt: fullPrompt,
chatHistory: formattedChatHistory,
model: config.modelType,
temperature: config.temperature,
chatHistory: chatHistory,
model: this.config.modelType,
temperature: this.config.temperature,
customPrompt: await this.getSystemPrompt(e, memories),
});
@ -129,18 +103,16 @@ class AiCaller {
* @param chatHistory 聊天历史
* @param memories 记忆
* @param e
* @param userConfig 用户特定配置
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}>}
*/
async callMultimodalAi(originalMessages, chatHistory = [], memories = [], e, userConfig = null) {
async callMultimodalAi(originalMessages, chatHistory = [], memories = [], e) {
try {
const config = userConfig || this.config;
const messages = await this.formatMultimodalMessages(originalMessages, chatHistory, memories, e);
const apiCaller = await this.getUserOpenaiInstance(e.user_id, config);
const apiCaller = this.openaiChat;
const result = await apiCaller.callAi({
messages: messages,
model: config.multimodalModel,
temperature: config.temperature,
model: this.config.multimodalModel,
temperature: this.config.temperature,
});
if (result.success) {
@ -257,29 +229,6 @@ class AiCaller {
return result || '刚刚';
}
/**
* 获取用户的OpenAI实例
* @param {string} userId - 用户QQ号
* @param {Object} config - 用户配置
* @returns {OpenaiChat} OpenAI实例
*/
async getUserOpenaiInstance(userId, config) {
if (config.apiKey === this.config.apiKey && config.baseApi === this.config.baseApi) {
logger.info(`[crystelf-ai] 用户 ${userId} 使用全局OpenAI实例`);
return this.openaiChat;
}
const cacheKey = `${userId}_${config.apiKey}_${config.baseApi}`;
if (this.userOpenaiInstances.has(cacheKey)) {
logger.info(`[crystelf-ai] 用户 ${userId} 使用缓存的OpenAI实例`);
return this.userOpenaiInstances.get(cacheKey);
}
const userOpenaiChat = new OpenaiChat();
userOpenaiChat.init(config.apiKey, config.baseApi);
this.userOpenaiInstances.set(cacheKey, userOpenaiChat);
logger.info(`[crystelf-ai] 为用户 ${userId} 创建新的OpenAI实例`);
return userOpenaiChat;
}
/**
* 获取系统提示词
* @param {object} e 上下文事件对象

View File

@ -1,179 +0,0 @@
import fs from 'fs/promises';
import path from 'path';
import ConfigControl from '../config/configControl.js';
/**
* 用户AI配置管理器
* 处理每个用户的独立AI配置支持用户自定义API密钥模型等设置
*/
class UserConfigManager {
constructor() {
this.basePath = path.join(process.cwd(), 'data', 'crystelf');
this.userConfigs = new Map();
this.globalConfig = null;
}
async init() {
try {
await fs.mkdir(this.basePath, { recursive: true });
this.globalConfig = await ConfigControl.get('ai');
} catch (error) {
logger.error(`[crystelf-ai] 用户配置管理器初始化失败: ${error.message}`);
}
}
/**
* 获取用户的AI配置
* @param {string} userId - 用户QQ号
* @returns {Promise<Object>} 合并后的用户配置
*/
async getUserConfig(userId) {
try {
if (this.userConfigs.has(userId)) {
const cachedConfig = this.userConfigs.get(userId);
logger.info(`[crystelf-ai] 使用缓存的用户配置 ${userId}: apiKey=${!!cachedConfig.apiKey}, model=${cachedConfig.modelType}`);
return cachedConfig;
}
const userConfigPath = path.join(this.basePath, 'ai', userId, 'ai.json');
logger.info(`[crystelf-ai] 尝试加载用户配置: ${userConfigPath}`);
let userConfig = {};
try {
const configData = await fs.readFile(userConfigPath, 'utf-8');
userConfig = JSON.parse(configData);
} catch (error) {
if (error.code === 'ENOENT') {
} else {
logger.warn(`[crystelf-ai] 用户 ${userId} 的配置文件解析失败,使用默认配置: ${error.message}`);
}
}
const mergedConfig = this.mergeConfigs(this.globalConfig, userConfig);
this.userConfigs.set(userId, mergedConfig);
return mergedConfig;
} catch (error) {
logger.error(`[crystelf-ai] 获取用户 ${userId} 配置失败: ${error.message}`);
return this.globalConfig;
}
}
/**
* 保存用户配置
* @param {string} userId - 用户QQ号
* @param {Object} config - 用户配置
*/
async saveUserConfig(userId, config) {
try {
const userConfigDir = path.join(this.basePath, 'ai', userId);
const userConfigPath = path.join(userConfigDir, 'ai.json');
const filteredConfig = this.filterUserConfig(config);
await fs.mkdir(userConfigDir, { recursive: true });
await fs.writeFile(userConfigPath, JSON.stringify(filteredConfig, null, 2));
const mergedConfig = this.mergeConfigs(this.globalConfig, filteredConfig);
this.userConfigs.set(userId, mergedConfig);
} catch (error) {
logger.error(`[crystelf-ai] 保存用户 ${userId} 配置失败: ${error.message}`);
throw error;
}
}
/**
* 合并全局配置和用户配置
* @param {Object} globalConfig - 全局配置
* @param {Object} userConfig - 用户配置
* @returns {Object} 合并后的配置
*/
mergeConfigs(globalConfig, userConfig) {
if (!globalConfig) return userConfig;
if (!userConfig || Object.keys(userConfig).length === 0) return globalConfig;
const mergedConfig = JSON.parse(JSON.stringify(globalConfig));
for (const [key, value] of Object.entries(userConfig)) {
if (this.isUserConfigurable(key)) {
mergedConfig[key] = value;
}
}
return mergedConfig;
}
/**
* 判断配置项是否允许用户自定义
* @param {string} key - 配置项键名
* @returns {boolean} 是否允许用户配置
*/
isUserConfigurable(key) {
const forbiddenKeys = [
'blacklist', 'whitelist', 'blackWords',
'enableGroups', 'disableGroups'
];
return !forbiddenKeys.includes(key);
}
/**
* 过滤用户配置移除不允许的配置项
* @param {Object} config - 原始配置
* @returns {Object} 过滤后的配置
*/
filterUserConfig(config) {
const filtered = {};
for (const [key, value] of Object.entries(config)) {
if (this.isUserConfigurable(key)) {
filtered[key] = value;
}
}
return filtered;
}
/**
* 清除用户配置缓存
* @param {string} userId - 用户QQ号如果不传则清除所有缓存
*/
clearCache(userId) {
if (userId) {
this.userConfigs.delete(userId);
} else {
this.userConfigs.clear();
}
}
/**
* 重新加载全局配置
*/
async reloadGlobalConfig() {
this.globalConfig = await ConfigControl.get('ai');
this.clearCache(); // 清除缓存,下次获取时会重新合并配置
}
/**
* 获取用户配置目录路径
* @returns {string} 用户配置目录路径
*/
getUserConfigPath() {
return this.basePath;
}
/**
* 检查用户是否存在自定义配置
* @param {string} userId - 用户QQ号
* @returns {Promise<boolean>} 是否存在自定义配置
*/
async hasUserConfig(userId) {
try {
const userConfigPath = path.join(this.basePath, 'ai', userId, 'ai.json');
await fs.access(userConfigPath);
return true;
} catch {
return false;
}
}
}
export default new UserConfigManager();