Compare commits
No commits in common. "452cb0e91f98db92b8bc88d23fece895cf490b1a" and "c62684407de848b3d4c01e29be7e57d3bfc4da2d" have entirely different histories.
452cb0e91f
...
c62684407d
|
@ -16,7 +16,6 @@ interface Config {
|
||||||
API_ENDPOINTS: {
|
API_ENDPOINTS: {
|
||||||
SEARXNG: string;
|
SEARXNG: string;
|
||||||
OLLAMA: string;
|
OLLAMA: string;
|
||||||
OLLAMA_AUTH_HEADER: string;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,8 +41,6 @@ export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG;
|
||||||
|
|
||||||
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA;
|
||||||
|
|
||||||
export const getOllamaAuthHeader = () => loadConfig().API_ENDPOINTS.OLLAMA_AUTH_HEADER;
|
|
||||||
|
|
||||||
export const updateConfig = (config: RecursivePartial<Config>) => {
|
export const updateConfig = (config: RecursivePartial<Config>) => {
|
||||||
const currentConfig = loadConfig();
|
const currentConfig = loadConfig();
|
||||||
|
|
||||||
|
|
|
@ -5,29 +5,14 @@ import { HuggingFaceTransformersEmbeddings } from './huggingfaceTransformer';
|
||||||
import {
|
import {
|
||||||
getGroqApiKey,
|
getGroqApiKey,
|
||||||
getOllamaApiEndpoint,
|
getOllamaApiEndpoint,
|
||||||
getOllamaAuthHeader,
|
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
import logger from '../utils/logger';
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
function getOllamaHeaders() {
|
|
||||||
const ollamaAuthHeader = getOllamaAuthHeader();
|
|
||||||
let headers;
|
|
||||||
if (typeof ollamaAuthHeader !== undefined) {
|
|
||||||
return {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'Authorization': ollamaAuthHeader
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return { 'Content-Type': 'application/json' };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getAvailableChatModelProviders = async () => {
|
export const getAvailableChatModelProviders = async () => {
|
||||||
const openAIApiKey = getOpenaiApiKey();
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
const groqApiKey = getGroqApiKey();
|
const groqApiKey = getGroqApiKey();
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
const ollamaAuthHeader = getOllamaAuthHeader();
|
|
||||||
|
|
||||||
const models = {};
|
const models = {};
|
||||||
|
|
||||||
|
@ -111,8 +96,11 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
|
|
||||||
if (ollamaEndpoint) {
|
if (ollamaEndpoint) {
|
||||||
try {
|
try {
|
||||||
const headers = getOllamaHeaders();
|
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
|
@ -120,7 +108,6 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
acc[model.model] = new ChatOllama({
|
acc[model.model] = new ChatOllama({
|
||||||
baseUrl: ollamaEndpoint,
|
baseUrl: ollamaEndpoint,
|
||||||
model: model.model,
|
model: model.model,
|
||||||
headers,
|
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
});
|
});
|
||||||
return acc;
|
return acc;
|
||||||
|
@ -138,7 +125,6 @@ export const getAvailableChatModelProviders = async () => {
|
||||||
export const getAvailableEmbeddingModelProviders = async () => {
|
export const getAvailableEmbeddingModelProviders = async () => {
|
||||||
const openAIApiKey = getOpenaiApiKey();
|
const openAIApiKey = getOpenaiApiKey();
|
||||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||||
const ollamaAuthHeader = getOllamaAuthHeader();
|
|
||||||
|
|
||||||
const models = {};
|
const models = {};
|
||||||
|
|
||||||
|
@ -160,15 +146,18 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ollamaEndpoint) {
|
if (ollamaEndpoint) {
|
||||||
const headers = getOllamaHeaders();
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, { headers });
|
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
models['ollama'] = ollamaModels.reduce((acc, model) => {
|
||||||
acc[model.model] = new OllamaEmbeddings({
|
acc[model.model] = new OllamaEmbeddings({
|
||||||
baseUrl: ollamaEndpoint,
|
baseUrl: ollamaEndpoint,
|
||||||
headers,
|
|
||||||
model: model.model,
|
model: model.model,
|
||||||
});
|
});
|
||||||
return acc;
|
return acc;
|
||||||
|
|
Loading…
Reference in New Issue