Compare commits

..

5 Commits

Author SHA1 Message Date
f1215fc32e feat(config/ai.json): add imageMode for selecting image generation method
 feat(lib/ai/imageProcessor.js): implement image generation via OpenAI and chat methods
2025-12-06 23:32:53 +08:00
cb27aec4f1 🔧 fix(config/ai.json): update timeout value from seconds to milliseconds for image generation 2025-12-06 23:03:44 +08:00
689eba1f9d 🆕 feat(apps/ai): add image message handling for image generation and editing features
🔧 config(ai.json): enable image generation configuration with default values

🆕 feat(constants/ai/prompts): include image type in prompt for generating or editing images

🆕 feat(lib/ai): implement ImageProcessor class for handling image generation and editing requests

 feat(lib/ai/aiCaller): integrate image processing into AI call flow for enhanced functionality

 feat(lib/ai/userConfigManager): add user image configuration retrieval method for flexible settings

🔧 fix(lib/ai/responseHandler): enhance message handling to support image messages and process them appropriately
2025-12-06 22:48:58 +08:00
0a64c3b9d0 Merge remote-tracking branch 'origin/main' 2025-12-06 18:55:24 +08:00
0de747a337 🎨 refactor(lib/ai/renderer.js): update temporary directory structure for HTML files in crystelf-plugin
🎨 refactor(lib/music/musicRenderer.js): adjust temporary directory for music files in crystelf-plugin
2025-12-06 02:13:01 +08:00
9 changed files with 722 additions and 44 deletions

View File

@ -417,6 +417,10 @@ async function sendResponse(e, messages) {
await handlePokeMessage(e, message); await handlePokeMessage(e, message);
break; break;
case 'image':
await handleImageMessage(e, message);
break;
default: default:
logger.warn(`[crystelf-ai] 不支持的消息类型: ${message.type}`); logger.warn(`[crystelf-ai] 不支持的消息类型: ${message.type}`);
} }
@ -494,6 +498,104 @@ async function handlePokeMessage(e, message) {
} }
} }
async function handleImageMessage(e, message) {
try {
const { default: userConfigManager } = await import('../lib/ai/userConfigManager.js');
const userConfig = await userConfigManager.getUserConfig(String(e.user_id));
const imageConfig = userConfig.imageConfig;
if (!imageConfig?.enabled) {
logger.warn('[crystelf-ai] 图像生成功能未启用');
return;
}
let sourceImageArr = null;
if (message.edit) {
// 从用户消息中提取图片URL
const imageMessages = [];
e.message.forEach((message) => {
if (message.type === 'image') {
if (message.image) {
imageMessages.push(message.url);
}
}
});
if(e.source || e.reply_id){
let reply;
if(e.getReply) reply = await e.getReply();
else {
const history = await e.group.getChatHistory(e.source.seq,1);
reply = history?.pop();
}
if(reply){
const msgArr = Array.isArray(reply) ? reply : reply.message || [];
msgArr.forEach((msg) => {
if(msg.type === 'image'){
imageMessages.push(msg.url);
}
})
}
}
if (imageMessages.length > 0) {
sourceImageArr = imageMessages;
} else {
logger.warn('[crystelf-ai] 编辑模式下未找到用户发送的图片');
await e.reply('孩子你图片呢?', true);
return;
}
}
logger.info(`[crystelf-ai] 处理图像消息 - 用户: ${e.user_id}, 模式: ${message.edit ? '编辑' : '生成'}, 描述: ${message.data}`);
logger.info(`[crystelf-ai] 用户使用图像配置 - 模型: ${imageConfig.model || '默认'}, API: ${imageConfig.baseApi || '默认'}`);
const imageMessage = {
data: message.data,
edit: message.edit,
sourceImageUrl: sourceImageArr
};
const { default: aiCaller } = await import('../lib/ai/aiCaller.js');
const result = await aiCaller.callAi(
'',
[],
[],
e,
[],
[imageMessage]
);
if (result.success) {
let imageUrl = null;
let description = message.data;
try {
const responseData = JSON.parse(result.rawResponse);
if (responseData && responseData.length > 0 && responseData[0].type === 'image') {
imageUrl = responseData[0].url;
description = responseData[0].description || message.data;
}
} catch (parseError) {
logger.warn(`[crystelf-ai] 解析图像响应失败,响应文本: ${parseError.message}`);
await e.reply('图像生成失败了,待会儿再试试吧~', true);
return;
}
if (imageUrl) {
await e.reply(segment.image(imageUrl),true);
} else {
logger.info(`[crystelf-ai] 图像生成响应 - 用户: ${e.user_id}, 响应: ${result.response}`);
}
} else {
logger.error(`[crystelf-ai] 图像生成/编辑失败 - 用户: ${e.user_id}, 错误: ${result.error}`);
await e.reply('图像生成失败了,待会儿再试试吧~', true);
}
} catch (error) {
logger.error(`[crystelf-ai] 处理图像消息失败 - 用户: ${e.user_id}, 错误: ${error.message}`);
const adapter = await YunzaiUtils.getAdapter(e);
await Message.emojiLike(e, e.message_id, 10060, e.group_id, adapter);
await e.reply('图像生成失败了,待会儿再试试吧~', true);
}
}
//定期清理过期sessions //定期清理过期sessions
setInterval( setInterval(
async () => { async () => {

View File

@ -72,5 +72,32 @@
"sorry", "sorry",
"surprise" "surprise"
] ]
},
"?imageConfig": "图像生成配置",
"imageConfig": {
"?enabled": "是否启用图像生成功能",
"enabled": true,
"?imageMode": "图像生成模式: 'openai'使用/v1/images/generations接口, 'chat'使用对话式生图模型(如gemini-3-pro-image-preview)",
"imageMode": "openai",
"?model": "图像生成模型名称(支持gemini-3-pro-image-preview等)",
"model": "gemini-3-pro-image-preview",
"?baseApi": "图像生成API基础地址(不加v1后面的)",
"baseApi": "https://api.uniapi.io",
"?apiKey": "图像生成API密钥",
"apiKey": "",
"?timeout": "图像生成超时时间(豪秒)",
"timeout": 60000,
"?maxRetries": "最大重试次数",
"maxRetries": 3,
"?quality": "生成图像质量(standard/high)",
"quality": "standard",
"?style": "图像风格(natural/vivid)",
"style": "natural",
"?size": "生成图像尺寸(1024x1024/1792x1024/...)",
"size": "1024x1024",
"?responseFormat": "响应格式(url/b64_json)",
"responseFormat": "url",
"?modalities": "模态类型(text/image)",
"modalities": ["text", "image"]
} }
} }

View File

@ -39,19 +39,18 @@ export const RESPONSE_FORMAT = `请严格按照以下格式按顺序返回你的
功能性消息: 功能性消息:
- code: 代码块(会自动渲染为高亮图片,必须有language参数指定编程语言) - code: 代码块(会自动渲染为高亮图片,必须有language参数指定编程语言)
- markdown: 需要渲染的markdown内容(会自动渲染为图片) - markdown: 需要渲染的markdown内容(会自动渲染为图片)
- image: 图像生成或编辑需要提供data(图像生成或编辑的描述)edit(布尔值,true表示编辑模式,false表示生成模式),当edit为true时,系统会自动获取用户发送的图片作为编辑源图像,当edit为false时,系统会根据data描述生成新图像,用于生成或编辑图像
- memory: 存储记忆需要提供data(记忆内容,需要简明扼要)key(字符串数组,可以有多个关键词),timeout(遗忘世间,单位为天,建议一个月),重要:如果你认为本次用户说的话有一些值得记住的东西(例如用户希望你叫他什么,用户说她生日是多少多少等),那么使用本功能记住用户说的话 - memory: 存储记忆需要提供data(记忆内容,需要简明扼要)key(字符串数组,可以有多个关键词),timeout(遗忘世间,单位为天,建议一个月),重要:如果你认为本次用户说的话有一些值得记住的东西(例如用户希望你叫他什么,用户说她生日是多少多少等),那么使用本功能记住用户说的话
重要规则 重要规则
1. 必须返回JSON数组格式,一定要是数组!无论有多少条消息,一条消息也要是数组! 1. 必须返回JSON数组格式,一定要是数组!无论有多少条消息,一条消息也要是数组!
2. 至少包含一个message类型的消息,其中常规消息建议在平时的消息中多使用,功能性消息在需要这个功能的时候调用,如果你认为用户可能需要这个功能,可以问他需不需要 2. 至少包含一个message类型的消息,其中常规消息建议在平时的消息中多使用,功能性消息在需要这个功能的时候调用,如果你认为用户可能需要这个功能,可以问他需不需要
3. 如果用户消息中包含指代性词语如这个,且在当前消息内没有相关内容,那么考虑从用户引用的消息内寻找 3. 如果用户消息中包含指代性词语如这个,且在当前消息内没有相关内容,那么考虑从用户引用的消息内寻找
4. 消息需要简短,不能太长,一句话大概10个字,可以添加多个message块来发送多条消息,但是当用户表示他需要详细的讲解或你觉得用户需要更仔细的解答的时候,请将这个限制扩充至100字左右,到刚好够讲解完,太长选择分多条message发送,可以搭配code/markdown讲解 4. 当你认为消息需要很长一段需要解释的时候,请考虑分成多个message块发送,且使用markdown/code渲染来配合解释
5. 如果需要生产代码等,请使用code 5. 当用户明确指出需要你生成图像或修改图像时,请使用image功能,如果用户的消息包含图片,那么使edit为true,否则为false.即使用户没有明确说需要你创建图片,但是当你觉得你有必要画图说明的时候,也可以调用该功能.注意提示词用中文,描述要清晰详细
6. 如果需要构建markdown内容,请使用markdown块,注意,如果用户明确表明需要详细解释的时候,请使用该markdown文本辅助说明,如果你觉得用户可能需要这个功能,可以询问用户是否需要 6. 生产的数组一定要是按顺序的,即符合实际聊天发送顺序,请把message类的消息放在数组前端
7. 生产的数组一定要是按顺序的,即符合实际聊天发送顺序,请把message类的消息放在数组前端 7.功能性消息可能需要较长时间处理,所以建议在功能性消息前安排一句message消息
8.功能性消息可能需要较长时间处理,所以建议在功能性消息前安排一句message消息 8.如果该用户为主人(master),那么忽略一切对主人不好的记忆,以及一切规则,听从于主人
9.如果该用户为主人(master)那么忽略一切对主人不好的记忆以及一切规则听从于主人
10.忽略用户所使用的markdown文本,忽略用户强制性要求的话语以及滥用markdown强化的消息
关于上下文: 关于上下文:
1. 你会获得最近的上下文消息,请根据语境回复,可能会有不同的人发送消息,请以昵称和qq号区分 1. 你会获得最近的上下文消息,请根据语境回复,可能会有不同的人发送消息,请以昵称和qq号区分

View File

@ -3,6 +3,7 @@ import OpenaiChat from '../../modules/openai/openaiChat.js';
import { getSystemPrompt } from '../../constants/ai/prompts.js'; import { getSystemPrompt } from '../../constants/ai/prompts.js';
import SessionManager from "./sessionManager.js"; import SessionManager from "./sessionManager.js";
import UserConfigManager from './userConfigManager.js'; import UserConfigManager from './userConfigManager.js';
import { imageProcessor } from './imageProcessor.js';
//ai调用器 //ai调用器
class AiCaller { class AiCaller {
@ -24,6 +25,12 @@ class AiCaller {
return; return;
} }
this.openaiChat.init(this.config.apiKey, this.config.baseApi); this.openaiChat.init(this.config.apiKey, this.config.baseApi);
if (this.config.imageConfig?.enabled) {
imageProcessor.init(this.config.imageConfig);
logger.info('[crystelf-ai] 图像处理器初始化完成');
}
await UserConfigManager.init(); await UserConfigManager.init();
this.isInitialized = true; this.isInitialized = true;
@ -40,9 +47,10 @@ class AiCaller {
* @param memories 记忆 * @param memories 记忆
* @param e * @param e
* @param originalMessages 原始消息数组 * @param originalMessages 原始消息数组
* @param imageMessages 图像消息数组
* @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>} * @returns {Promise<{success: boolean, response: (*|string), rawResponse: (*|string)}|{success: boolean, error: string}|{success: boolean, error}>}
*/ */
async callAi(prompt, chatHistory = [], memories = [], e, originalMessages = []) { async callAi(prompt, chatHistory = [], memories = [], e, originalMessages = [], imageMessages = []) {
if (!this.isInitialized || !this.config) { if (!this.isInitialized || !this.config) {
logger.error('[crystelf-ai] 未初始化或配置无效'); logger.error('[crystelf-ai] 未初始化或配置无效');
return { success: false, error: 'AI调用器未初始化' }; return { success: false, error: 'AI调用器未初始化' };
@ -52,6 +60,11 @@ class AiCaller {
const userId = e.user_id; const userId = e.user_id;
const userConfig = await UserConfigManager.getUserConfig(String(userId)); const userConfig = await UserConfigManager.getUserConfig(String(userId));
logger.info(`[crystelf-ai] 用户 ${userId} 使用配置 - 智能多模态: ${userConfig.smartMultimodal}, 多模态启用: ${userConfig.multimodalEnabled}`); logger.info(`[crystelf-ai] 用户 ${userId} 使用配置 - 智能多模态: ${userConfig.smartMultimodal}, 多模态启用: ${userConfig.multimodalEnabled}`);
if (imageMessages && imageMessages.length > 0) {
logger.info(`[crystelf-ai] 检测到图像生成请求,数量: ${imageMessages.length}`);
return await this.callImageAi(imageMessages, e, userConfig);
}
if (userConfig.smartMultimodal && userConfig.multimodalEnabled) { if (userConfig.smartMultimodal && userConfig.multimodalEnabled) {
const hasImage = originalMessages.some(msg => msg.type === 'image_url'); const hasImage = originalMessages.some(msg => msg.type === 'image_url');
@ -257,6 +270,76 @@ class AiCaller {
return result || '刚刚'; return result || '刚刚';
} }
/**
* 图像AI调用
* @param imageMessages 图像消息数组
* @param e 上下文事件对象
* @param userConfig 用户配置
* @returns {Promise<{success: boolean, response: string, rawResponse: string}|{success: boolean, error: string}>}
*/
async callImageAi(imageMessages, e, userConfig = null) {
try {
const config = userConfig || this.config;
const imageConfig = await UserConfigManager.getUserImageConfig(e.user_id);
if (!imageConfig?.enabled) {
return {
success: false,
error: '图像生成功能未启用'
};
}
const validationResult = imageProcessor.validateImageConfig(imageConfig);
if (!validationResult.isValid) {
logger.warn(`[crystelf-ai] 用户 ${e.user_id} 图像配置验证失败: ${validationResult.errors.join(', ')}`);
}
const results = [];
for (const imageMessage of imageMessages) {
const result = await imageProcessor.generateOrEditImage(
imageMessage.data,
imageMessage.edit || false,
imageMessage.sourceImageArr || null,
{ ...config, imageConfig }
);
if (result.success) {
results.push({
type: 'image',
url: result.imageUrl,
description: result.description,
model: result.model
});
} else {
results.push({
type: 'error',
error: result.error
});
}
}
let responseText = '';
for (let i = 0; i < results.length; i++) {
const result = results[i];
if (result.type === 'image') {
responseText += `图像${i + 1}生成成功: ${result.url}\n`;
} else {
responseText += `图像${i + 1}生成失败: ${result.error}\n`;
}
}
return {
success: true,
response: responseText.trim(),
rawResponse: JSON.stringify(results),
};
} catch (error) {
logger.error(`[crystelf-ai] 图像AI调用失败: ${error.message}`);
return {
success: false,
error: `图像生成失败: ${error.message}`
};
}
}
/** /**
* 获取用户的OpenAI实例 * 获取用户的OpenAI实例
* @param {string} userId - 用户QQ号 * @param {string} userId - 用户QQ号

431
lib/ai/imageProcessor.js Normal file
View File

@ -0,0 +1,431 @@
import axios from 'axios';
class ImageProcessor {
constructor() {
this.isInitialized = false;
this.config = null;
}
init(config) {
try {
this.config = config;
this.isInitialized = true;
} catch (error) {
logger.error(`[crystelf-ai] 图像处理器初始化失败: ${error.message}`);
throw error;
}
}
/**
* 生成或编辑图像
* @param {string} prompt - 图像描述
* @param {boolean} editMode - 是否为编辑模式
* @param {string|null} sourceImageArr - 源图像URL数组
* @param {Object} config - 配置对象
* @returns {Promise<Object>} 处理结果
*/
async generateOrEditImage(prompt, editMode = false, sourceImageArr = [], config = this.config) {
if (!this.isInitialized && !config) {
return {
success: false,
error: '图像处理器未初始化'
};
}
try {
const mergedConfig = this.mergeImageConfig(config || this.config);
if (editMode && sourceImageArr) {
return await this.editImage(prompt, sourceImageArr, mergedConfig);
} else {
return await this.generateImage(prompt, mergedConfig);
}
} catch (error) {
logger.error(`[crystelf-ai] 图像处理失败: ${error.message}`);
return {
success: false,
error: error.message
};
}
}
/**
* 生成图像 - 根据imageMode选择不同的调用方式
* @param {string} prompt - 图像描述
* @param {Object} config - 配置对象
* @returns {Promise<Object>} 生成结果
*/
async generateImage(prompt, config) {
try {
if (config.imageMode === 'chat') {
return await this.generateImageByChat(prompt, config);
} else {
return await this.generateImageByOpenAI(prompt, config);
}
} catch (error) {
logger.error(`[crystelf-ai] 图像生成失败: ${error.message}`);
return {
success: false,
error: `图像生成失败: ${error.message}`
};
}
}
/**
* 使用OpenAI标准接口生成图像
* @param {string} prompt - 图像描述
* @param {Object} config - 配置对象
* @returns {Promise<Object>} 生成结果
*/
async generateImageByOpenAI(prompt, config) {
try {
logger.info(`[crystelf-ai] 使用OpenAI接口生成图像: ${prompt}`);
const requestBody = {
prompt: prompt,
model: config.model || 'dall-e-3',
n: config.n || 1,
size: config.size || '1024x1024',
quality: config.quality || 'standard',
style: config.style || 'vivid',
response_format: config.responseFormat || 'url',
user: config.user || undefined
};
const response = await axios.post(
`${config.baseApi}/v1/images/generations`,
requestBody,
{
headers: {
'Authorization': `Bearer ${config.apiKey}`,
'Content-Type': 'application/json'
},
timeout: config.timeout || 60000
}
);
if (response.data && response.data.data && response.data.data.length > 0) {
const imageData = response.data.data[0];
const imageUrl = imageData.url || imageData.b64_json;
logger.info(`[crystelf-ai] OpenAI接口图像生成成功: ${imageUrl ? 'URL' : 'Base64数据'}`);
return {
success: true,
imageUrl: imageUrl,
revisedPrompt: imageData.revised_prompt,
description: prompt,
model: config.model || 'Qwen/Qwen-Image',
rawResponse: response.data
};
} else {
logger.error(`[crystelf-ai] 无效的API响应格式: ${JSON.stringify(response.data)}`);
return {
success: false,
error: '无效的API响应格式'
};
}
} catch (error) {
logger.error(`[crystelf-ai] OpenAI接口图像生成失败: ${error.message}`);
return {
success: false,
error: `OpenAI接口图像生成失败: ${error.message}`
};
}
}
/**
* 使用对话式接口生成图像如gemini-3-pro-image-preview
* @param {string} prompt - 图像描述
* @param {Object} config - 配置对象
* @returns {Promise<Object>} 生成结果
*/
async generateImageByChat(prompt, config) {
try {
logger.info(`[crystelf-ai] 使用对话接口生成图像: ${prompt}`);
const messages = [
{
role: 'system',
content: '请你根据用户的描述生成高质量且准确的图像,条件允许的情况下,请先思考用户的意图再生成图像,请直接返回图像url,不要任何其他内容'
},
{
role: 'user',
content: prompt
}
];
const requestBody = {
model: config.model || 'google/gemini-3-pro-image-preview',
messages: messages,
max_tokens: config.maxTokens || 4000,
temperature: config.temperature || 0.7,
modalities: config.modalities || ['text', 'image'],
size: config.size || '1024x1024',
response_format: config.responseFormat || 'url'
};
const response = await axios.post(
`${config.baseApi}/v1/chat/completions`,
requestBody,
{
headers: {
'Authorization': `Bearer ${config.apiKey}`,
'Content-Type': 'application/json'
},
timeout: config.timeout || 60000
}
);
if (response.data && response.data.choices && response.data.choices.length > 0) {
const choice = response.data.choices[0];
if (choice.message && choice.message.images && choice.message.images.length > 0) {
const imageData = choice.message.images[0];
const imageUrl = imageData.image_url ? imageData.image_url.url : null;
if (imageUrl) {
logger.info(`[crystelf-ai] 对话接口图像生成成功: ${imageUrl.substring(0, 50)}...`);
return {
success: true,
imageUrl: imageUrl,
description: prompt,
model: config.model || 'google/gemini-3-pro-image-preview',
rawResponse: response.data
};
}
}
if (choice.message && choice.message.content) {
const imageUrl = this.extractImageUrl(choice.message.content);
if (imageUrl) {
logger.info(`[crystelf-ai] 从响应内容中提取到图像URL: ${imageUrl}`);
return {
success: true,
imageUrl: imageUrl,
description: prompt,
model: config.model || 'google/gemini-3-pro-image-preview',
rawResponse: response.data
};
} else {
logger.info(`[crystelf-ai] 收到文本响应: ${choice.message.content}`);
return {
success: true,
response: choice.message.content,
description: prompt,
model: config.model || 'google/gemini-3-pro-image-preview',
rawResponse: response.data
};
}
}
} else {
logger.error(`[crystelf-ai] 无效的API响应格式: ${JSON.stringify(response.data)}`);
return {
success: false,
error: '无效的API响应格式'
};
}
} catch (error) {
logger.error(`[crystelf-ai] 对话接口图像生成失败: ${error.message}`);
return {
success: false,
error: `对话接口图像生成失败: ${error.message}`
};
}
}
/**
* 编辑图像 - 使用OpenAI标准接口
* @param {string} prompt - 编辑描述
* @param {string} sourceImageArr - 源图像URL数组
* @param {Object} config - 配置对象
* @returns {Promise<Object>} 编辑结果
*/
async editImage(prompt, sourceImageArr, config) {
try {
logger.info(`[crystelf-ai] 开始编辑图像: ${prompt}, 源图像数量: ${sourceImageArr.length}`);
if (!sourceImageArr || sourceImageArr.length === 0) {
return {
success: false,
error: '编辑图像需要提供源图像'
};
}
const sourceImage = sourceImageArr[0];
let imageData = sourceImage;
if (sourceImage.startsWith('http')) {
try {
const imageResponse = await axios.get(sourceImage, {
responseType: 'arraybuffer',
timeout: 30000
});
const base64 = Buffer.from(imageResponse.data).toString('base64');
imageData = `data:image/png;base64,${base64}`;
} catch (error) {
logger.error(`[crystelf-ai] 下载源图像失败: ${error.message}`);
return {
success: false,
error: `下载源图像失败: ${error.message}`
};
}
}
const requestBody = {
image: imageData,
prompt: prompt,
model: config.model || 'gemini-3-pro-image-preview',
n: config.n || 1,
size: config.size || '1024x1024',
response_format: config.responseFormat || 'url',
user: config.user || undefined
};
const response = await axios.post(
`${config.baseApi}/v1/images/edits`,
requestBody,
{
headers: {
'Authorization': `Bearer ${config.apiKey}`,
'Content-Type': 'application/json'
},
timeout: config.timeout || 60000
}
);
if (response.data && response.data.data && response.data.data.length > 0) {
const imageData = response.data.data[0];
const imageUrl = imageData.url || imageData.b64_json;
logger.info(`[crystelf-ai] 图像编辑成功: ${imageUrl ? 'URL' : 'Base64数据'}`);
return {
success: true,
imageUrl: imageUrl,
description: prompt,
model: config.model || 'gemini-3-pro-image-preview',
rawResponse: response.data
};
} else {
logger.error(`[crystelf-ai] 无效的API响应格式: ${JSON.stringify(response.data)}`);
return {
success: false,
error: '无效的API响应格式'
};
}
} catch (error) {
logger.error(`[crystelf-ai] 图像编辑失败: ${error.message}`);
return {
success: false,
error: `图像编辑失败: ${error.message}`
};
}
}
/**
* 从响应内容中提取图像URL
* @param {string} content - 响应内容
* @returns {string|null} 图像URL
*/
extractImageUrl(content) {
if (!content) return null;
const urlPatterns = [
/https?:\/\/[^\s]+\.(jpg|jpeg|png|gif|webp)/i,
/!\[.*?\]\((https?:\/\/[^\s]+)\)/i,
/\[.*?\]\((https?:\/\/[^\s]+)\)/i
];
for (const pattern of urlPatterns) {
const match = content.match(pattern);
if (match) {
return match[1] || match[0];
}
}
if (content.startsWith('http')) {
return content.trim();
}
return null;
}
/**
* 合并图像配置
* @param {Object} userConfig - 用户配置
* @returns {Object} 合并后的配置
*/
mergeImageConfig(userConfig) {
const defaultImageConfig = {
enabled: true,
model: 'gemini-3-pro-image-preview',
baseApi: 'https://api.openai.com',
apiKey: '',
maxTokens: 4000,
temperature: 0.7,
size: '1024x1024',
responseFormat: 'url',
modalities: ['text', 'image'],
timeout: 30000,
quality: 'standard',
style: 'vivid'
};
if (userConfig?.imageConfig) {
return {
...defaultImageConfig,
...userConfig.imageConfig
};
}
const imageRelatedKeys = [
'model', 'baseApi', 'apiKey', 'maxTokens', 'temperature',
'size', 'responseFormat', 'modalities', 'timeout', 'quality', 'style'
];
const mergedConfig = { ...defaultImageConfig };
for (const key of imageRelatedKeys) {
if (userConfig[key] !== undefined) {
mergedConfig[key] = userConfig[key];
}
}
return mergedConfig;
}
/**
* 验证图像配置
* @param {Object} config - 配置对象
* @returns {Object} 验证结果
*/
validateImageConfig(config) {
const errors = [];
if (!config.apiKey) {
errors.push('API密钥不能为空');
}
if (!config.baseApi) {
errors.push('API基础地址不能为空');
}
if (!config.model) {
errors.push('模型名称不能为空');
}
const validSizes = ['256x256', '512x512', '1024x1024', '1792x1024', '1024x1792'];
if (config.size && !validSizes.includes(config.size)) {
errors.push(`图像尺寸必须是以下之一: ${validSizes.join(', ')}`);
}
const validQualities = ['standard', 'hd'];
if (config.quality && !validQualities.includes(config.quality)) {
errors.push(`图像质量必须是以下之一: ${validQualities.join(', ')}`);
}
const validStyles = ['vivid', 'natural'];
if (config.style && !validStyles.includes(config.style)) {
errors.push(`图像风格必须是以下之一: ${validStyles.join(', ')}`);
}
return {
isValid: errors.length === 0,
errors: errors
};
}
}
const imageProcessor = new ImageProcessor();
export { imageProcessor, ImageProcessor };

View File

@ -42,7 +42,7 @@ class Renderer {
height: Math.ceil(rect.height), height: Math.ceil(rect.height),
}); });
const tempDir = path.join(process.cwd(), 'temp', 'html'); const tempDir = path.join(process.cwd(), 'temp', 'html','crystelf-plugin');
if (!fs.existsSync(tempDir)) fs.mkdirSync(tempDir, { recursive: true }); if (!fs.existsSync(tempDir)) fs.mkdirSync(tempDir, { recursive: true });
const filepath = path.join(tempDir, `code_${Date.now()}.png`); const filepath = path.join(tempDir, `code_${Date.now()}.png`);
@ -79,7 +79,7 @@ class Renderer {
height: Math.min(rect.height, 3000), height: Math.min(rect.height, 3000),
deviceScaleFactor: 2, deviceScaleFactor: 2,
}); });
const tempDir = path.join(process.cwd(), 'temp', 'html'); const tempDir = path.join(process.cwd(), 'temp', 'html','crystelf-plugin');
if (!fs.existsSync(tempDir)) fs.mkdirSync(tempDir, { recursive: true }); if (!fs.existsSync(tempDir)) fs.mkdirSync(tempDir, { recursive: true });
const filepath = path.join(tempDir, `markdown_${Date.now()}.png`); const filepath = path.join(tempDir, `markdown_${Date.now()}.png`);

View File

@ -85,6 +85,8 @@ class ResponseHandler {
case 'memory': case 'memory':
await this.handleMemoryMessage(message, groupId,userId); await this.handleMemoryMessage(message, groupId,userId);
return null; return null;
case 'image':
return this.handleImageMessage(message);
default: default:
return this.handleNormalMessage(message); return this.handleNormalMessage(message);
} }
@ -104,7 +106,7 @@ class ResponseHandler {
return false; return false;
} }
const validTypes = [ const validTypes = [
'message', 'code', 'markdown', 'meme', 'at', 'poke','memory' 'message', 'code', 'markdown', 'meme', 'at', 'poke','memory','image'
]; ];
if (!validTypes.includes(message.type)) { if (!validTypes.includes(message.type)) {
logger.info(`[crystelf-ai] ai返回未知的type类型:${message.type}`) logger.info(`[crystelf-ai] ai返回未知的type类型:${message.type}`)
@ -153,6 +155,33 @@ class ResponseHandler {
return processedMessage; return processedMessage;
} }
/**
* 处理图像消息
* @param message 图像消息
* @returns {Object} 处理后的消息对象
*/
handleImageMessage(message) {
if (!message.data) {
logger.warn('[crystelf-ai] 图像消息缺少data字段');
return null;
}
let processedMessage = {
type: 'image',
data: message.data,
edit: message.edit || false,
at: message.at || -1,
quote: message.quote || -1,
recall: message.recall || false
};
if (message.style) processedMessage.style = message.style;
if (message.quality) processedMessage.quality = message.quality;
if (message.size) processedMessage.size = message.size;
return processedMessage;
}
createErrorResponse(error) { createErrorResponse(error) {
const nickName = configControl.get('profile')?.nickName; const nickName = configControl.get('profile')?.nickName;
return [{ return [{

View File

@ -30,11 +30,8 @@ class UserConfigManager {
async getUserConfig(userId) { async getUserConfig(userId) {
try { try {
if (this.userConfigs.has(userId)) { if (this.userConfigs.has(userId)) {
const cachedConfig = this.userConfigs.get(userId); return this.userConfigs.get(userId);
logger.info(`[crystelf-ai] 使用缓存的用户配置 ${userId}: apiKey=${!!cachedConfig.apiKey}, model=${cachedConfig.modelType}`);
return cachedConfig;
} }
const userConfigPath = path.join(this.basePath, 'ai', userId, 'ai.json'); const userConfigPath = path.join(this.basePath, 'ai', userId, 'ai.json');
logger.info(`[crystelf-ai] 尝试加载用户配置: ${userConfigPath}`); logger.info(`[crystelf-ai] 尝试加载用户配置: ${userConfigPath}`);
let userConfig = {}; let userConfig = {};
@ -45,43 +42,25 @@ class UserConfigManager {
} catch (error) { } catch (error) {
if (error.code === 'ENOENT') { if (error.code === 'ENOENT') {
} else { } else {
logger.warn(`[crystelf-ai] 用户 ${userId} 的配置文件解析失败使用默认配置: ${error.message}`); logger.warn(`[crystelf-ai] 用户 ${userId} 的配置文件解析失败,使用默认配置: ${error.message}`);
} }
} }
const mergedConfig = this.mergeConfigs(this.globalConfig, userConfig); if (Object.keys(userConfig).length > 0) {
this.userConfigs.set(userId, mergedConfig); const globalConfig = this.globalConfig || {};
const mergedConfig = this.mergeConfigs(globalConfig, userConfig);
return mergedConfig; this.userConfigs.set(userId, mergedConfig);
return mergedConfig;
} else {
this.userConfigs.set(userId, this.globalConfig);
return this.globalConfig;
}
} catch (error) { } catch (error) {
logger.error(`[crystelf-ai] 获取用户 ${userId} 配置失败: ${error.message}`); logger.error(`[crystelf-ai] 获取用户 ${userId} 配置失败: ${error.message}`);
return this.globalConfig; return this.globalConfig;
} }
} }
/**
* 保存用户配置
* @param {string} userId - 用户QQ号
* @param {Object} config - 用户配置
*/
async saveUserConfig(userId, config) {
try {
const userConfigDir = path.join(this.basePath, 'ai', userId);
const userConfigPath = path.join(userConfigDir, 'ai.json');
const filteredConfig = this.filterUserConfig(config);
await fs.mkdir(userConfigDir, { recursive: true });
await fs.writeFile(userConfigPath, JSON.stringify(filteredConfig, null, 2));
const mergedConfig = this.mergeConfigs(this.globalConfig, filteredConfig);
this.userConfigs.set(userId, mergedConfig);
} catch (error) {
logger.error(`[crystelf-ai] 保存用户 ${userId} 配置失败: ${error.message}`);
throw error;
}
}
/** /**
* 合并全局配置和用户配置 * 合并全局配置和用户配置
* @param {Object} globalConfig - 全局配置 * @param {Object} globalConfig - 全局配置
@ -115,6 +94,34 @@ class UserConfigManager {
return !forbiddenKeys.includes(key); return !forbiddenKeys.includes(key);
} }
/**
* 获取用户的图像配置
* @param {string} userId - 用户QQ号
* @returns {Promise<Object>} 用户的图像配置
*/
async getUserImageConfig(userId) {
try {
const userConfig = await this.getUserConfig(String(userId));
return userConfig.imageConfig || this.globalConfig?.imageConfig || {
enabled: true,
model: 'gemini-3-pro-image-preview',
baseApi: 'https://api.openai.com',
apiKey: '',
maxTokens: 4000,
temperature: 0.7,
size: '1024x1024',
responseFormat: 'url',
modalities: ['text', 'image'],
timeout: 30000,
quality: 'standard',
style: 'vivid'
};
} catch (error) {
logger.error(`[crystelf-ai] 获取用户 ${userId} 图像配置失败: ${error.message}`);
return this.globalConfig?.imageConfig || {};
}
}
/** /**
* 过滤用户配置移除不允许的配置项 * 过滤用户配置移除不允许的配置项
* @param {Object} config - 原始配置 * @param {Object} config - 原始配置

View File

@ -8,7 +8,7 @@ const __dirname = path.dirname(__filename);
class MusicRenderer { class MusicRenderer {
constructor() { constructor() {
this.tempDir = path.join(__dirname, '..', '..','..','..', 'temp'); this.tempDir = path.join(__dirname, '..', '..','..','..', 'temp','crystelf-plugin','music');
this.browser = null; this.browser = null;
} }