Notes/src/services/llm/providers/ollama_service.ts

97 lines
3.4 KiB
TypeScript
Raw Normal View History

2025-03-11 17:30:50 +00:00
import options from '../../options.js';
import { BaseAIService } from '../base_ai_service.js';
import type { Message, ChatCompletionOptions, ChatResponse } from '../ai_interface.js';
import sanitizeHtml from 'sanitize-html';
import { OllamaMessageFormatter } from '../formatters/ollama_formatter.js';
2025-03-02 19:39:10 -08:00
interface OllamaMessage {
role: string;
content: string;
}
interface OllamaResponse {
model: string;
created_at: string;
message: OllamaMessage;
done: boolean;
total_duration: number;
load_duration: number;
prompt_eval_count: number;
prompt_eval_duration: number;
eval_count: number;
eval_duration: number;
}
2025-03-02 19:39:10 -08:00
export class OllamaService extends BaseAIService {
private formatter: OllamaMessageFormatter;
2025-03-02 19:39:10 -08:00
constructor() {
super('Ollama');
this.formatter = new OllamaMessageFormatter();
2025-03-02 19:39:10 -08:00
}
isAvailable(): boolean {
return super.isAvailable() && !!options.getOption('ollamaBaseUrl');
2025-03-02 19:39:10 -08:00
}
async generateChatCompletion(messages: Message[], opts: ChatCompletionOptions = {}): Promise<ChatResponse> {
if (!this.isAvailable()) {
throw new Error('Ollama service is not available. Check API URL in settings.');
2025-03-02 19:39:10 -08:00
}
const apiBase = options.getOption('ollamaBaseUrl');
const model = opts.model || options.getOption('ollamaDefaultModel') || 'llama3';
2025-03-02 19:39:10 -08:00
const temperature = opts.temperature !== undefined
? opts.temperature
: parseFloat(options.getOption('aiTemperature') || '0.7');
const systemPrompt = this.getSystemPrompt(opts.systemPrompt || options.getOption('aiSystemPrompt'));
try {
// Use the formatter to prepare messages
const formattedMessages = this.formatter.formatMessages(messages, systemPrompt);
console.log(`Sending to Ollama with formatted messages:`, JSON.stringify(formattedMessages, null, 2));
const response = await fetch(`${apiBase}/api/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
model,
messages: formattedMessages,
options: {
temperature
},
stream: false
})
});
if (!response.ok) {
const errorBody = await response.text();
console.error(`Ollama API error: ${response.status} ${response.statusText}`, errorBody);
throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
}
const data: OllamaResponse = await response.json();
console.log('Raw response from Ollama:', JSON.stringify(data, null, 2));
console.log('Parsed Ollama response:', JSON.stringify(data, null, 2));
return {
text: data.message.content,
model: data.model,
provider: this.getName(),
usage: {
promptTokens: data.prompt_eval_count,
completionTokens: data.eval_count,
totalTokens: data.prompt_eval_count + data.eval_count
}
};
} catch (error) {
console.error('Ollama service error:', error);
throw error;
}
2025-03-02 19:39:10 -08:00
}
}