feat(openaiChat): add support for multi-modal messages in AI chat function.

This commit is contained in:
Jerry 2025-11-30 11:10:54 +08:00
parent 619f2a3295
commit dfd585409a

View File

@ -22,18 +22,23 @@ class OpenaiChat {
* @param model 模型 * @param model 模型
* @param temperature 温度 * @param temperature 温度
* @param customPrompt 提示词 * @param customPrompt 提示词
* @param messages 多模态消息数组
* @returns {Promise<{success: boolean, aiResponse: string}|{}>} * @returns {Promise<{success: boolean, aiResponse: string}|{}>}
*/ */
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) { async callAi({ prompt, chatHistory = [], model, temperature, customPrompt, messages }) {
if (!this.openai) { if (!this.openai) {
logger.error('[crystelf-ai] ai未初始化..'); logger.error('[crystelf-ai] ai未初始化..');
return { success: false }; return { success: false };
} }
let finalMessages;
if (messages && messages.length > 0) {
finalMessages = messages;
} else {
let systemMessage = { let systemMessage = {
role: 'system', role: 'system',
content: customPrompt || '', content: customPrompt || '',
}; };
const messages = [ finalMessages = [
systemMessage, systemMessage,
...chatHistory, ...chatHistory,
{ {
@ -41,15 +46,16 @@ class OpenaiChat {
content: prompt, content: prompt,
}, },
]; ];
}
try { try {
// logger.info("[DEBUG] 请求体:", { // logger.info("[DEBUG] 请求体:", {
//model: model, //model: model,
// messages, // messages: finalMessages,
//}); //});
const completion = await this.openai.chat.completions.create({ const completion = await this.openai.chat.completions.create({
messages: messages, messages: finalMessages,
model: model, model: model,
temperature: temperature, temperature: temperature,
frequency_penalty: 0.2, frequency_penalty: 0.2,