Compare commits

..

No commits in common. "452cb0e91f98db92b8bc88d23fece895cf490b1a" and "c62684407de848b3d4c01e29be7e57d3bfc4da2d" have entirely different histories.

2 changed files with 11 additions and 25 deletions

View File

@ -16,7 +16,6 @@ interface Config {
API_ENDPOINTS: {
SEARXNG: string;
OLLAMA: string;
OLLAMA_AUTH_HEADER: string;
};
}
@ -42,8 +41,6 @@ export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getOllamaAuthHeader = () => loadConfig().API_ENDPOINTS.OLLAMA_AUTH_HEADER;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();

View File

@ -5,29 +5,14 @@ import { HuggingFaceTransformersEmbeddings } from './huggingfaceTransformer';
import {
getGroqApiKey,
getOllamaApiEndpoint,
getOllamaAuthHeader,
getOpenaiApiKey,
} from '../config';
import logger from '../utils/logger';
function getOllamaHeaders() {
const ollamaAuthHeader = getOllamaAuthHeader();
let headers;
if (typeof ollamaAuthHeader !== undefined) {
return {
'Content-Type': 'application/json',
'Authorization': ollamaAuthHeader
};
} else {
return { 'Content-Type': 'application/json' };
}
}
export const getAvailableChatModelProviders = async () => {
const openAIApiKey = getOpenaiApiKey();
const groqApiKey = getGroqApiKey();
const ollamaEndpoint = getOllamaApiEndpoint();
const ollamaAuthHeader = getOllamaAuthHeader();
const models = {};
@ -111,8 +96,11 @@ export const getAvailableChatModelProviders = async () => {
if (ollamaEndpoint) {
try {
const headers = getOllamaHeaders();
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
headers: {
'Content-Type': 'application/json',
},
});
const { models: ollamaModels } = (await response.json()) as any;
@ -120,7 +108,6 @@ export const getAvailableChatModelProviders = async () => {
acc[model.model] = new ChatOllama({
baseUrl: ollamaEndpoint,
model: model.model,
headers,
temperature: 0.7,
});
return acc;
@ -138,7 +125,6 @@ export const getAvailableChatModelProviders = async () => {
export const getAvailableEmbeddingModelProviders = async () => {
const openAIApiKey = getOpenaiApiKey();
const ollamaEndpoint = getOllamaApiEndpoint();
const ollamaAuthHeader = getOllamaAuthHeader();
const models = {};
@ -160,15 +146,18 @@ export const getAvailableEmbeddingModelProviders = async () => {
}
if (ollamaEndpoint) {
const headers = getOllamaHeaders();
try {
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
headers: {
'Content-Type': 'application/json',
},
});
const { models: ollamaModels } = (await response.json()) as any;
models['ollama'] = ollamaModels.reduce((acc, model) => {
acc[model.model] = new OllamaEmbeddings({
baseUrl: ollamaEndpoint,
headers,
model: model.model,
});
return acc;