Compare commits
6 Commits
c62684407d
...
452cb0e91f
Author | SHA1 | Date |
---|---|---|
projectmoon | 452cb0e91f | |
projectmoon | 3bbe03cb9c | |
projectmoon | 2e17e31c18 | |
projectmoon | ee59393dc4 | |
projectmoon | 7229a6d91f | |
projectmoon | e34b6f3d8d |
|
@ -16,6 +16,7 @@ interface Config {
|
|||
API_ENDPOINTS: {
|
||||
SEARXNG: string;
|
||||
OLLAMA: string;
|
||||
OLLAMA_AUTH_HEADER: string;
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -41,6 +42,8 @@ export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
|||
|
||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||
|
||||
export const getOllamaAuthHeader = () => loadConfig().API_ENDPOINTS.OLLAMA_AUTH_HEADER;
|
||||
|
||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||
const currentConfig = loadConfig();
|
||||
|
||||
|
|
|
@ -5,14 +5,29 @@ import { HuggingFaceTransformersEmbeddings } from './huggingfaceTransformer';
|
|||
import {
|
||||
getGroqApiKey,
|
||||
getOllamaApiEndpoint,
|
||||
getOllamaAuthHeader,
|
||||
getOpenaiApiKey,
|
||||
} from '../config';
|
||||
import logger from '../utils/logger';
|
||||
|
||||
function getOllamaHeaders() {
|
||||
const ollamaAuthHeader = getOllamaAuthHeader();
|
||||
let headers;
|
||||
if (typeof ollamaAuthHeader !== undefined) {
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': ollamaAuthHeader
|
||||
};
|
||||
} else {
|
||||
return { 'Content-Type': 'application/json' };
|
||||
}
|
||||
}
|
||||
|
||||
export const getAvailableChatModelProviders = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
const groqApiKey = getGroqApiKey();
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
const ollamaAuthHeader = getOllamaAuthHeader();
|
||||
|
||||
const models = {};
|
||||
|
||||
|
@ -96,11 +111,8 @@ export const getAvailableChatModelProviders = async () => {
|
|||
|
||||
if (ollamaEndpoint) {
|
||||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
const headers = getOllamaHeaders();
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
|
||||
|
||||
const { models: ollamaModels } = (await response.json()) as any;
|
||||
|
||||
|
@ -108,6 +120,7 @@ export const getAvailableChatModelProviders = async () => {
|
|||
acc[model.model] = new ChatOllama({
|
||||
baseUrl: ollamaEndpoint,
|
||||
model: model.model,
|
||||
headers,
|
||||
temperature: 0.7,
|
||||
});
|
||||
return acc;
|
||||
|
@ -125,6 +138,7 @@ export const getAvailableChatModelProviders = async () => {
|
|||
export const getAvailableEmbeddingModelProviders = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
const ollamaAuthHeader = getOllamaAuthHeader();
|
||||
|
||||
const models = {};
|
||||
|
||||
|
@ -146,18 +160,15 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
|||
}
|
||||
|
||||
if (ollamaEndpoint) {
|
||||
const headers = getOllamaHeaders();
|
||||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
|
||||
const { models: ollamaModels } = (await response.json()) as any;
|
||||
|
||||
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
||||
acc[model.model] = new OllamaEmbeddings({
|
||||
baseUrl: ollamaEndpoint,
|
||||
headers,
|
||||
model: model.model,
|
||||
});
|
||||
return acc;
|
||||
|
|
Loading…
Reference in New Issue