feat(openaiChat): add support for multi-modal messages in AI chat function.

This commit is contained in:
Jerry 2025-11-30 11:10:54 +08:00
parent 619f2a3295
commit dfd585409a

View File

@ -22,34 +22,40 @@ class OpenaiChat {
* @param model 模型 * @param model 模型
* @param temperature 温度 * @param temperature 温度
* @param customPrompt 提示词 * @param customPrompt 提示词
* @param messages 多模态消息数组
* @returns {Promise<{success: boolean, aiResponse: string}|{}>} * @returns {Promise<{success: boolean, aiResponse: string}|{}>}
*/ */
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) { async callAi({ prompt, chatHistory = [], model, temperature, customPrompt, messages }) {
if (!this.openai) { if (!this.openai) {
logger.error('[crystelf-ai] ai未初始化..'); logger.error('[crystelf-ai] ai未初始化..');
return { success: false }; return { success: false };
} }
let systemMessage = { let finalMessages;
role: 'system', if (messages && messages.length > 0) {
content: customPrompt || '', finalMessages = messages;
}; } else {
const messages = [ let systemMessage = {
systemMessage, role: 'system',
...chatHistory, content: customPrompt || '',
{ };
role: 'user', finalMessages = [
content: prompt, systemMessage,
}, ...chatHistory,
]; {
role: 'user',
content: prompt,
},
];
}
try { try {
// logger.info("[DEBUG] 请求体:", { // logger.info("[DEBUG] 请求体:", {
//model: model, //model: model,
// messages, // messages: finalMessages,
//}); //});
const completion = await this.openai.chat.completions.create({ const completion = await this.openai.chat.completions.create({
messages: messages, messages: finalMessages,
model: model, model: model,
temperature: temperature, temperature: temperature,
frequency_penalty: 0.2, frequency_penalty: 0.2,