Jerrypluay 875ca65ed9 🚀 fix(openaiChat): simplify messages handling by removing unnecessary checks and defaults
 feat(aiCaller): enhance multimodal messages formatting for better processing of user content

 feat(ai): update image URL references for consistency across message handling
2025-11-30 13:02:22 +08:00

80 lines
1.8 KiB
JavaScript

import OpenAI from 'openai';
class OpenaiChat {
constructor() {
this.openai = null;
}
/**
* @param apiKey 密钥
* @param baseUrl openaiAPI地址
*/
init(apiKey, baseUrl) {
this.openai = new OpenAI({
apiKey: apiKey,
baseURL: baseUrl,
});
}
/**
* @param prompt 用户说的话
* @param chatHistory 聊天历史记录
* @param model 模型
* @param temperature 温度
* @param customPrompt 提示词
* @param messages 多模态消息数组
* @returns {Promise<{success: boolean, aiResponse: string}|{}>}
*/
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt, messages = [] }) {
if (!this.openai) {
logger.error('[crystelf-ai] ai未初始化..');
return { success: false };
}
let finalMessages;
if (messages.length > 0) {
finalMessages = messages;
} else {
let systemMessage = {
role: 'system',
content: customPrompt || '',
};
finalMessages = [
systemMessage,
...chatHistory,
{
role: 'user',
content: prompt,
},
];
}
try {
// logger.info("[DEBUG] 请求体:", {
//model: model,
// messages: finalMessages,
//});
const completion = await this.openai.chat.completions.create({
messages: finalMessages,
model: model,
temperature: temperature,
frequency_penalty: 0.2,
presence_penalty: 0.2,
stream:false
});
const aiResponse = completion.choices[0].message.content;
//logger.info(aiResponse);
return {
success: true,
aiResponse: aiResponse,
};
} catch (err) {
logger.error(err);
return { success: false };
}
}
}
export default OpenaiChat;