Compare commits

...

6 Commits

Author SHA1 Message Date
projectmoon 452cb0e91f pass headers to llm instance 2024-06-29 11:42:18 +02:00
projectmoon 3bbe03cb9c also auth on embeddings 2024-06-29 11:07:11 +02:00
projectmoon 2e17e31c18 fix 2024-06-29 10:49:27 +02:00
projectmoon ee59393dc4 import 2024-06-29 10:47:52 +02:00
projectmoon 7229a6d91f better 2024-06-29 10:45:37 +02:00
projectmoon e34b6f3d8d ghetto ollama auth 2024-06-29 10:41:30 +02:00
2 changed files with 25 additions and 11 deletions

View File

@ -16,6 +16,7 @@ interface Config {
API_ENDPOINTS: {
SEARXNG: string;
OLLAMA: string;
OLLAMA_AUTH_HEADER: string;
};
}
@ -41,6 +42,8 @@ export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
export const getOllamaAuthHeader = () => loadConfig().API_ENDPOINTS.OLLAMA_AUTH_HEADER;
export const updateConfig = (config: RecursivePartial<Config>) => {
const currentConfig = loadConfig();

View File

@ -5,14 +5,29 @@ import { HuggingFaceTransformersEmbeddings } from './huggingfaceTransformer';
import {
getGroqApiKey,
getOllamaApiEndpoint,
getOllamaAuthHeader,
getOpenaiApiKey,
} from '../config';
import logger from '../utils/logger';
function getOllamaHeaders() {
const ollamaAuthHeader = getOllamaAuthHeader();
let headers;
if (typeof ollamaAuthHeader !== undefined) {
return {
'Content-Type': 'application/json',
'Authorization': ollamaAuthHeader
};
} else {
return { 'Content-Type': 'application/json' };
}
}
export const getAvailableChatModelProviders = async () => {
const openAIApiKey = getOpenaiApiKey();
const groqApiKey = getGroqApiKey();
const ollamaEndpoint = getOllamaApiEndpoint();
const ollamaAuthHeader = getOllamaAuthHeader();
const models = {};
@ -96,11 +111,8 @@ export const getAvailableChatModelProviders = async () => {
if (ollamaEndpoint) {
try {
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
headers: {
'Content-Type': 'application/json',
},
});
const headers = getOllamaHeaders();
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
const { models: ollamaModels } = (await response.json()) as any;
@ -108,6 +120,7 @@ export const getAvailableChatModelProviders = async () => {
acc[model.model] = new ChatOllama({
baseUrl: ollamaEndpoint,
model: model.model,
headers,
temperature: 0.7,
});
return acc;
@ -125,6 +138,7 @@ export const getAvailableChatModelProviders = async () => {
export const getAvailableEmbeddingModelProviders = async () => {
const openAIApiKey = getOpenaiApiKey();
const ollamaEndpoint = getOllamaApiEndpoint();
const ollamaAuthHeader = getOllamaAuthHeader();
const models = {};
@ -146,18 +160,15 @@ export const getAvailableEmbeddingModelProviders = async () => {
}
if (ollamaEndpoint) {
const headers = getOllamaHeaders();
try {
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
headers: {
'Content-Type': 'application/json',
},
});
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
const { models: ollamaModels } = (await response.json()) as any;
models['ollama'] = ollamaModels.reduce((acc, model) => {
acc[model.model] = new OllamaEmbeddings({
baseUrl: ollamaEndpoint,
headers,
model: model.model,
});
return acc;