-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathserviceInterface.js
72 lines (60 loc) · 1.69 KB
/
serviceInterface.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import path from 'path';
import fetch from 'node-fetch';
import OpenAI from 'openai';
import MistralClient from '@mistralai/mistralai';
const configPath = path.join(process.cwd(), 'interintel.config.js');
let config;
try {
const importedModule = await import(configPath);
config = importedModule.default;
} catch (error) {
console.error('Failed to import config:', error);
}
const mistralClient = new MistralClient(config.apiKey);
const openai = new OpenAI({
apiKey: config.apiKey,
model: config.aiVersion,
});
async function chatCompletion(aiService, messages, model) {
try {
let response;
if (aiService === 'openai') {
response = await openai.chat.completions.create({
messages: messages,
model: model,
stream: false,
});
return response;
} else if (aiService === 'mistral') {
let chatResponse;
chatResponse = await mistralClient.chat({
model: model, // or a specific model you wish to use
messages: messages,
});
return chatResponse;
} else if (aiService === 'ollama') {
// Ollama specific code
let data = {
messages,
model,
stream: false,
};
const fetchResponse = await fetch('http://localhost:11434/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(data),
});
// Properly resolve the response
response = await fetchResponse.json();
return response.message.content;
} else {
throw new Error('Invalid AI service');
}
} catch (error) {
console.error('Error:', error);
return null;
}
}
export { chatCompletion };