feat(providers): fix loading issues
This commit is contained in:
parent
3b4b8a8b02
commit
8539ce82ad
|
@ -5,6 +5,8 @@ import logger from '../../utils/logger';
|
|||
export const loadGroqChatModels = async () => {
|
||||
const groqApiKey = getGroqApiKey();
|
||||
|
||||
if (!groqApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels = {
|
||||
'LLaMA3 8b': new ChatOpenAI(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { loadGroqChatModels } from './groq';
|
||||
import { loadOllamaChatModels } from './ollama';
|
||||
import { loadOpenAIChatModels, loadOpenAIEmbeddingsModel } from './openai';
|
||||
import { loadTransformersEmbeddingsModel } from './transformers';
|
||||
import { loadOllamaChatModels, loadOllamaEmbeddingsModels } from './ollama';
|
||||
import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai';
|
||||
import { loadTransformersEmbeddingsModels } from './transformers';
|
||||
|
||||
const chatModelProviders = {
|
||||
openai: loadOpenAIChatModels,
|
||||
|
@ -10,16 +10,19 @@ const chatModelProviders = {
|
|||
};
|
||||
|
||||
const embeddingModelProviders = {
|
||||
openai: loadOpenAIEmbeddingsModel,
|
||||
local: loadTransformersEmbeddingsModel,
|
||||
ollama: loadOllamaChatModels,
|
||||
openai: loadOpenAIEmbeddingsModels,
|
||||
local: loadTransformersEmbeddingsModels,
|
||||
ollama: loadOllamaEmbeddingsModels,
|
||||
};
|
||||
|
||||
export const getAvailableChatModelProviders = async () => {
|
||||
const models = {};
|
||||
|
||||
for (const provider in chatModelProviders) {
|
||||
models[provider] = await chatModelProviders[provider]();
|
||||
const providerModels = await chatModelProviders[provider]();
|
||||
if (Object.keys(providerModels).length > 0) {
|
||||
models[provider] = providerModels
|
||||
}
|
||||
}
|
||||
|
||||
models['custom_openai'] = {}
|
||||
|
@ -31,7 +34,10 @@ export const getAvailableEmbeddingModelProviders = async () => {
|
|||
const models = {};
|
||||
|
||||
for (const provider in embeddingModelProviders) {
|
||||
models[provider] = await embeddingModelProviders[provider]();
|
||||
const providerModels = await embeddingModelProviders[provider]();
|
||||
if (Object.keys(providerModels).length > 0) {
|
||||
models[provider] = providerModels
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
|
|
|
@ -6,6 +6,8 @@ import { ChatOllama } from '@langchain/community/chat_models/ollama';
|
|||
export const loadOllamaChatModels = async () => {
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
|
||||
if (!ollamaEndpoint) return {};
|
||||
|
||||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
|
@ -31,9 +33,11 @@ export const loadOllamaChatModels = async () => {
|
|||
}
|
||||
};
|
||||
|
||||
export const loadOpenAIEmbeddingsModel = async () => {
|
||||
export const loadOllamaEmbeddingsModels = async () => {
|
||||
const ollamaEndpoint = getOllamaApiEndpoint();
|
||||
|
||||
if (!ollamaEndpoint) return {};
|
||||
|
||||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`, {
|
||||
headers: {
|
||||
|
|
|
@ -5,6 +5,8 @@ import logger from '../../utils/logger';
|
|||
export const loadOpenAIChatModels = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
|
||||
if (!openAIApiKey) return {};
|
||||
|
||||
try {
|
||||
const chatModels = {
|
||||
'GPT-3.5 turbo': new ChatOpenAI({
|
||||
|
@ -36,9 +38,11 @@ export const loadOpenAIChatModels = async () => {
|
|||
}
|
||||
};
|
||||
|
||||
export const loadOpenAIEmbeddingsModel = async () => {
|
||||
export const loadOpenAIEmbeddingsModels = async () => {
|
||||
const openAIApiKey = getOpenaiApiKey();
|
||||
|
||||
if (!openAIApiKey) return {};
|
||||
|
||||
try {
|
||||
const embeddingModels = {
|
||||
'Text embedding 3 small': new OpenAIEmbeddings({
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logger from '../../utils/logger';
|
||||
import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
||||
|
||||
export const loadTransformersEmbeddingsModel = async () => {
|
||||
export const loadTransformersEmbeddingsModels = async () => {
|
||||
try {
|
||||
const embeddingModels = {
|
||||
'BGE Small': new HuggingFaceTransformersEmbeddings({
|
||||
|
|
Loading…
Reference in New Issue