mirror of
https://github.com/Jerryplusy/crystelf-plugin.git
synced 2025-07-04 14:19:19 +00:00
支持调用ollama
This commit is contained in:
parent
d37ce5033d
commit
4cc09172ea
61
models/ollama/ollamaChat.js
Normal file
61
models/ollama/ollamaChat.js
Normal file
@ -0,0 +1,61 @@
|
||||
import axios from 'axios';
|
||||
|
||||
class OllamaChat {
|
||||
constructor() {
|
||||
this.apiUrl = null;
|
||||
this.apiKey = null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param apiKey 密钥
|
||||
* @param baseUrl ollamaAPI地址
|
||||
*/
|
||||
init(apiKey, baseUrl) {
|
||||
this.apiKey = apiKey;
|
||||
this.apiUrl = baseUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param prompt 用户命令+提示词(融合)
|
||||
* @param chatHistory 历史记录
|
||||
* @param model 模型
|
||||
* @param temperature 温度
|
||||
* @returns {Promise<{success: boolean}|{success: boolean, aiResponse: (*|string)}>}
|
||||
*/
|
||||
async callAi({ prompt, chatHistory = [], model, temperature }) {
|
||||
if (!this.apiUrl || !this.apiKey) {
|
||||
logger.err('ollama未初始化..');
|
||||
return { success: false };
|
||||
}
|
||||
|
||||
const requestData = {
|
||||
model: model,
|
||||
prompt: prompt,
|
||||
temperature: temperature,
|
||||
history: chatHistory,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await axios.post(`${this.apiUrl}/v1/complete`, requestData, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const aiResponse = response.data?.choices[o]?.text || '';
|
||||
|
||||
return {
|
||||
success: true,
|
||||
aiResponse: aiResponse,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.err(err);
|
||||
return { success: false };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default OllamaChat;
|
@ -1,10 +1,14 @@
|
||||
import OpenAI from 'openai';
|
||||
|
||||
class ChatTools {
|
||||
class OpenaiChat {
|
||||
constructor() {
|
||||
this.openai = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param apiKey 密钥
|
||||
* @param baseUrl openaiAPI地址
|
||||
*/
|
||||
init(apiKey, baseUrl) {
|
||||
this.openai = new OpenAI({
|
||||
apiKey: apiKey,
|
||||
@ -12,13 +16,31 @@ class ChatTools {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param prompt 主内容
|
||||
* @param chatHistory 聊天历史记录
|
||||
* @param model 模型
|
||||
* @param temperature 温度
|
||||
* @param customPrompt 提示词
|
||||
* @returns {Promise<{success: boolean, aiResponse: string}|{}>}
|
||||
*/
|
||||
async callAi({ prompt, chatHistory = [], model, temperature, customPrompt }) {
|
||||
if (!this.openai) {
|
||||
logger.err('ai未初始化..');
|
||||
return {};
|
||||
return { success: false };
|
||||
}
|
||||
let systemMessage = { role: 'system', content: customPrompt || '' };
|
||||
const messages = [systemMessage, ...chatHistory, { role: 'user', content: prompt }];
|
||||
let systemMessage = {
|
||||
role: 'system',
|
||||
content: customPrompt || '',
|
||||
};
|
||||
const messages = [
|
||||
systemMessage,
|
||||
...chatHistory,
|
||||
{
|
||||
role: 'user',
|
||||
content: prompt,
|
||||
},
|
||||
];
|
||||
|
||||
try {
|
||||
const completion = await this.openai.chat.completions.create({
|
||||
@ -37,8 +59,9 @@ class ChatTools {
|
||||
};
|
||||
} catch (err) {
|
||||
logger.err(err);
|
||||
return {};
|
||||
return { success: false };
|
||||
}
|
||||
}
|
||||
}
|
||||
export default ChatTools;
|
||||
|
||||
export default OpenaiChat;
|
@ -16,6 +16,7 @@
|
||||
"author": "Jerry",
|
||||
"License": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^1.8.4",
|
||||
"chalk": "^5.4.1",
|
||||
"openai": "^4.89.0"
|
||||
},
|
||||
|
1101
pnpm-lock.yaml
generated
Normal file
1101
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user