mirror of
https://github.com/Jerryplusy/crystelf-plugin.git
synced 2025-12-05 15:41:56 +00:00
feat:完善ai功能
This commit is contained in:
parent
1c3ce32678
commit
b084c71dea
12
apps/ai.js
12
apps/ai.js
@ -31,11 +31,11 @@ export class crystelfAI extends plugin {
|
||||
async init() {
|
||||
try {
|
||||
logger.info('[crystelf-ai] 开始初始化...');
|
||||
await SessionManager.init();
|
||||
await KeywordMatcher.init();
|
||||
await AiCaller.init();
|
||||
await MemorySystem.init();
|
||||
await Renderer.init();
|
||||
SessionManager.init();
|
||||
KeywordMatcher.init();
|
||||
AiCaller.init();
|
||||
MemorySystem.init();
|
||||
Renderer.init();
|
||||
this.isInitialized = true;
|
||||
logger.info('[crystelf-ai] 初始化完成');
|
||||
} catch (error) {
|
||||
@ -181,6 +181,7 @@ async function handleMixMode(userMessage, e, aiConfig) {
|
||||
|
||||
if (isTooLong) {
|
||||
//消息太长,使用AI回复
|
||||
logger.info('[crystelf-ai] 消息过长,使用ai回复')
|
||||
return await callAiForResponse(userMessage, e, aiConfig);
|
||||
} else {
|
||||
const matchResult = await KeywordMatcher.matchKeywords(userMessage, 'ai');
|
||||
@ -195,6 +196,7 @@ async function handleMixMode(userMessage, e, aiConfig) {
|
||||
},
|
||||
];
|
||||
} else {
|
||||
logger.info('[crystelf-ai] 关键词匹配失败,使用ai回复')
|
||||
//关键词匹配失败,使用AI回复
|
||||
return await callAiForResponse(userMessage, e, aiConfig);
|
||||
}
|
||||
|
||||
2
index.js
2
index.js
@ -10,7 +10,7 @@ logger.info(
|
||||
);
|
||||
|
||||
updater.checkAndUpdate().catch((err) => {
|
||||
logger.err(err);
|
||||
logger.error(err);
|
||||
});
|
||||
|
||||
await crystelfInit.CSH().then(logger.mark('[crystelf-plugin] crystelf-plugin 完成初始化'));
|
||||
|
||||
@ -99,9 +99,11 @@ class ResponseHandler {
|
||||
|
||||
validateMessage(message) {
|
||||
if (!message || typeof message !== 'object') {
|
||||
logger.info('[crystelf-ai] ai返回为空或不是对象')
|
||||
return false;
|
||||
}
|
||||
if (!message.type) {
|
||||
logger.info('[crystelf-ai] ai响应未包含type值')
|
||||
return false;
|
||||
}
|
||||
const validTypes = [
|
||||
@ -110,8 +112,10 @@ class ResponseHandler {
|
||||
'file', 'memory'
|
||||
];
|
||||
if (!validTypes.includes(message.type)) {
|
||||
logger.info(`[crystelf-ai] ai返回未知的type类型:${message.type}`)
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
switch (message.type) {
|
||||
case 'message':
|
||||
case 'code':
|
||||
@ -131,7 +135,7 @@ class ResponseHandler {
|
||||
return !!(message.data && message.data.name);
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}*/return true;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -100,7 +100,7 @@ class SessionManager {
|
||||
const session = this.sessions.get(groupId)?.get(userId);
|
||||
if (session) {
|
||||
session.active = false;
|
||||
logger.debug(`[crystelf-ai] 标记session不活跃: 群${groupId}, 用户${userId}`);
|
||||
logger.info(`[crystelf-ai] 标记session不活跃: 群${groupId}, 用户${userId}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ class OllamaChat {
|
||||
*/
|
||||
async callAi({ prompt, chatHistory = [], model, temperature }) {
|
||||
if (!this.apiUrl || !this.apiKey) {
|
||||
logger.err('ollama未初始化..');
|
||||
logger.error('ollama未初始化..');
|
||||
return { success: false };
|
||||
}
|
||||
|
||||
@ -51,7 +51,7 @@ class OllamaChat {
|
||||
aiResponse: aiResponse,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.err(err);
|
||||
logger.error(err);
|
||||
return { success: false };
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,7 +12,7 @@ class OpenaiChat {
|
||||
init(apiKey, baseUrl) {
|
||||
this.openai = new OpenAI({
|
||||
apiKey: apiKey,
|
||||
baseUrl: baseUrl,
|
||||
baseURL: baseUrl,
|
||||
});
|
||||
}
|
||||
|
||||
@ -26,7 +26,7 @@ class OpenaiChat {
|
||||
*/
|
||||
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) {
|
||||
if (!this.openai) {
|
||||
logger.err('[crystelf-ai] ai未初始化..');
|
||||
logger.error('[crystelf-ai] ai未初始化..');
|
||||
return { success: false };
|
||||
}
|
||||
let systemMessage = {
|
||||
@ -43,23 +43,29 @@ class OpenaiChat {
|
||||
];
|
||||
|
||||
try {
|
||||
logger.info("[DEBUG] 请求体:", {
|
||||
model: model,
|
||||
messages,
|
||||
});
|
||||
|
||||
const completion = await this.openai.chat.completions.create({
|
||||
messages: messages,
|
||||
model: model,
|
||||
temperature: temperature,
|
||||
frequency_penalty: 0.2,
|
||||
presence_penalty: 0.2,
|
||||
response_format:{"type": "json_object"}
|
||||
response_format:{type: "json_object"},
|
||||
stream:false
|
||||
});
|
||||
|
||||
const aiResponse = completion.choices[0].message.content;
|
||||
|
||||
logger.info(aiResponse);
|
||||
return {
|
||||
success: true,
|
||||
aiResponse: aiResponse,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.err(err);
|
||||
logger.error(err);
|
||||
return { success: false };
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user