feat(providers): add `displayName` property
This commit is contained in:
parent
40f551c426
commit
1589f16d5a
|
@ -9,26 +9,38 @@ export const loadAnthropicChatModels = async () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const chatModels = {
|
const chatModels = {
|
||||||
'Claude 3.5 Sonnet': new ChatAnthropic({
|
'claude-3-5-sonnet-20240620': {
|
||||||
temperature: 0.7,
|
displayName: 'Claude 3.5 Sonnet',
|
||||||
anthropicApiKey: anthropicApiKey,
|
model: new ChatAnthropic({
|
||||||
model: 'claude-3-5-sonnet-20240620',
|
temperature: 0.7,
|
||||||
}),
|
anthropicApiKey: anthropicApiKey,
|
||||||
'Claude 3 Opus': new ChatAnthropic({
|
model: 'claude-3-5-sonnet-20240620',
|
||||||
temperature: 0.7,
|
}),
|
||||||
anthropicApiKey: anthropicApiKey,
|
},
|
||||||
model: 'claude-3-opus-20240229',
|
'claude-3-opus-20240229': {
|
||||||
}),
|
displayName: 'Claude 3 Opus',
|
||||||
'Claude 3 Sonnet': new ChatAnthropic({
|
model: new ChatAnthropic({
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
anthropicApiKey: anthropicApiKey,
|
anthropicApiKey: anthropicApiKey,
|
||||||
model: 'claude-3-sonnet-20240229',
|
model: 'claude-3-opus-20240229',
|
||||||
}),
|
}),
|
||||||
'Claude 3 Haiku': new ChatAnthropic({
|
},
|
||||||
temperature: 0.7,
|
'claude-3-sonnet-20240229': {
|
||||||
anthropicApiKey: anthropicApiKey,
|
displayName: 'Claude 3 Sonnet',
|
||||||
model: 'claude-3-haiku-20240307',
|
model: new ChatAnthropic({
|
||||||
}),
|
temperature: 0.7,
|
||||||
|
anthropicApiKey: anthropicApiKey,
|
||||||
|
model: 'claude-3-sonnet-20240229',
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
'claude-3-haiku-20240307': {
|
||||||
|
displayName: 'Claude 3 Haiku',
|
||||||
|
model: new ChatAnthropic({
|
||||||
|
temperature: 0.7,
|
||||||
|
anthropicApiKey: anthropicApiKey,
|
||||||
|
model: 'claude-3-haiku-20240307',
|
||||||
|
}),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return chatModels;
|
return chatModels;
|
||||||
|
|
|
@ -9,76 +9,97 @@ export const loadGroqChatModels = async () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const chatModels = {
|
const chatModels = {
|
||||||
'Llama 3.1 70B': new ChatOpenAI(
|
'llama-3.1-70b-versatile': {
|
||||||
{
|
displayName: 'Llama 3.1 70B',
|
||||||
openAIApiKey: groqApiKey,
|
model: new ChatOpenAI(
|
||||||
modelName: 'llama-3.1-70b-versatile',
|
{
|
||||||
temperature: 0.7,
|
openAIApiKey: groqApiKey,
|
||||||
},
|
modelName: 'llama-3.1-70b-versatile',
|
||||||
{
|
temperature: 0.7,
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
},
|
||||||
},
|
{
|
||||||
),
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
'Llama 3.1 8B': new ChatOpenAI(
|
},
|
||||||
{
|
),
|
||||||
openAIApiKey: groqApiKey,
|
},
|
||||||
modelName: 'llama-3.1-8b-instant',
|
'llama-3.1-8b-instant': {
|
||||||
temperature: 0.7,
|
displayName: 'Llama 3.1 8B',
|
||||||
},
|
model: new ChatOpenAI(
|
||||||
{
|
{
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
openAIApiKey: groqApiKey,
|
||||||
},
|
modelName: 'llama-3.1-8b-instant',
|
||||||
),
|
temperature: 0.7,
|
||||||
'LLaMA3 8b': new ChatOpenAI(
|
},
|
||||||
{
|
{
|
||||||
openAIApiKey: groqApiKey,
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
modelName: 'llama3-8b-8192',
|
},
|
||||||
temperature: 0.7,
|
),
|
||||||
},
|
},
|
||||||
{
|
'llama3-8b-8192': {
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
displayName: 'LLaMA3 8B',
|
||||||
},
|
model: new ChatOpenAI(
|
||||||
),
|
{
|
||||||
'LLaMA3 70b': new ChatOpenAI(
|
openAIApiKey: groqApiKey,
|
||||||
{
|
modelName: 'llama3-8b-8192',
|
||||||
openAIApiKey: groqApiKey,
|
temperature: 0.7,
|
||||||
modelName: 'llama3-70b-8192',
|
},
|
||||||
temperature: 0.7,
|
{
|
||||||
},
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
{
|
},
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
),
|
||||||
},
|
},
|
||||||
),
|
'llama3-70b-8192': {
|
||||||
'Mixtral 8x7b': new ChatOpenAI(
|
displayName: 'LLaMA3 70B',
|
||||||
{
|
model: new ChatOpenAI(
|
||||||
openAIApiKey: groqApiKey,
|
{
|
||||||
modelName: 'mixtral-8x7b-32768',
|
openAIApiKey: groqApiKey,
|
||||||
temperature: 0.7,
|
modelName: 'llama3-70b-8192',
|
||||||
},
|
temperature: 0.7,
|
||||||
{
|
},
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
{
|
||||||
},
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
),
|
},
|
||||||
'Gemma 7b': new ChatOpenAI(
|
),
|
||||||
{
|
},
|
||||||
openAIApiKey: groqApiKey,
|
'mixtral-8x7b-32768': {
|
||||||
modelName: 'gemma-7b-it',
|
displayName: 'Mixtral 8x7B',
|
||||||
temperature: 0.7,
|
model: new ChatOpenAI(
|
||||||
},
|
{
|
||||||
{
|
openAIApiKey: groqApiKey,
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
modelName: 'mixtral-8x7b-32768',
|
||||||
},
|
temperature: 0.7,
|
||||||
),
|
},
|
||||||
'Gemma2 9b': new ChatOpenAI(
|
{
|
||||||
{
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
openAIApiKey: groqApiKey,
|
},
|
||||||
modelName: 'gemma2-9b-it',
|
),
|
||||||
temperature: 0.7,
|
},
|
||||||
},
|
'gemma-7b-it': {
|
||||||
{
|
displayName: 'Gemma 7B',
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
model: new ChatOpenAI(
|
||||||
},
|
{
|
||||||
),
|
openAIApiKey: groqApiKey,
|
||||||
|
modelName: 'gemma-7b-it',
|
||||||
|
temperature: 0.7,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
'gemma2-9b-it': {
|
||||||
|
displayName: 'Gemma2 9B',
|
||||||
|
model: new ChatOpenAI(
|
||||||
|
{
|
||||||
|
openAIApiKey: groqApiKey,
|
||||||
|
modelName: 'gemma2-9b-it',
|
||||||
|
temperature: 0.7,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
baseURL: 'https://api.groq.com/openai/v1',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return chatModels;
|
return chatModels;
|
||||||
|
|
|
@ -18,11 +18,15 @@ export const loadOllamaChatModels = async () => {
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
const chatModels = ollamaModels.reduce((acc, model) => {
|
const chatModels = ollamaModels.reduce((acc, model) => {
|
||||||
acc[model.model] = new ChatOllama({
|
acc[model.model] = {
|
||||||
baseUrl: ollamaEndpoint,
|
displayName: model.name,
|
||||||
model: model.model,
|
model: new ChatOllama({
|
||||||
temperature: 0.7,
|
baseUrl: ollamaEndpoint,
|
||||||
});
|
model: model.model,
|
||||||
|
temperature: 0.7,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
|
@ -48,10 +52,14 @@ export const loadOllamaEmbeddingsModels = async () => {
|
||||||
const { models: ollamaModels } = (await response.json()) as any;
|
const { models: ollamaModels } = (await response.json()) as any;
|
||||||
|
|
||||||
const embeddingsModels = ollamaModels.reduce((acc, model) => {
|
const embeddingsModels = ollamaModels.reduce((acc, model) => {
|
||||||
acc[model.model] = new OllamaEmbeddings({
|
acc[model.model] = {
|
||||||
baseUrl: ollamaEndpoint,
|
displayName: model.name,
|
||||||
model: model.model,
|
model: new OllamaEmbeddings({
|
||||||
});
|
baseUrl: ollamaEndpoint,
|
||||||
|
model: model.model,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
|
|
|
@ -9,31 +9,46 @@ export const loadOpenAIChatModels = async () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const chatModels = {
|
const chatModels = {
|
||||||
'GPT-3.5 turbo': new ChatOpenAI({
|
'gpt-3.5-turbo': {
|
||||||
openAIApiKey,
|
displayName: 'GPT-3.5 Turbo',
|
||||||
modelName: 'gpt-3.5-turbo',
|
model: new ChatOpenAI({
|
||||||
temperature: 0.7,
|
openAIApiKey,
|
||||||
}),
|
modelName: 'gpt-3.5-turbo',
|
||||||
'GPT-4': new ChatOpenAI({
|
temperature: 0.7,
|
||||||
openAIApiKey,
|
}),
|
||||||
modelName: 'gpt-4',
|
},
|
||||||
temperature: 0.7,
|
'gpt-4': {
|
||||||
}),
|
displayName: 'GPT-4',
|
||||||
'GPT-4 turbo': new ChatOpenAI({
|
model: new ChatOpenAI({
|
||||||
openAIApiKey,
|
openAIApiKey,
|
||||||
modelName: 'gpt-4-turbo',
|
modelName: 'gpt-4',
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
}),
|
}),
|
||||||
'GPT-4 omni': new ChatOpenAI({
|
},
|
||||||
openAIApiKey,
|
'gpt-4-turbo': {
|
||||||
modelName: 'gpt-4o',
|
displayName: 'GPT-4 turbo',
|
||||||
temperature: 0.7,
|
model: new ChatOpenAI({
|
||||||
}),
|
openAIApiKey,
|
||||||
'GPT-4 omni mini': new ChatOpenAI({
|
modelName: 'gpt-4-turbo',
|
||||||
openAIApiKey,
|
temperature: 0.7,
|
||||||
modelName: 'gpt-4o-mini',
|
}),
|
||||||
temperature: 0.7,
|
},
|
||||||
}),
|
'gpt-4o': {
|
||||||
|
displayName: 'GPT-4 omni',
|
||||||
|
model: new ChatOpenAI({
|
||||||
|
openAIApiKey,
|
||||||
|
modelName: 'gpt-4o',
|
||||||
|
temperature: 0.7,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
'gpt-4o-mini': {
|
||||||
|
displayName: 'GPT-4 omni mini',
|
||||||
|
model: new ChatOpenAI({
|
||||||
|
openAIApiKey,
|
||||||
|
modelName: 'gpt-4o-mini',
|
||||||
|
temperature: 0.7,
|
||||||
|
}),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return chatModels;
|
return chatModels;
|
||||||
|
@ -50,14 +65,20 @@ export const loadOpenAIEmbeddingsModels = async () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const embeddingModels = {
|
const embeddingModels = {
|
||||||
'Text embedding 3 small': new OpenAIEmbeddings({
|
'text-embedding-3-small': {
|
||||||
openAIApiKey,
|
displayName: 'Text Embedding 3 Small',
|
||||||
modelName: 'text-embedding-3-small',
|
model: new OpenAIEmbeddings({
|
||||||
}),
|
openAIApiKey,
|
||||||
'Text embedding 3 large': new OpenAIEmbeddings({
|
modelName: 'text-embedding-3-small',
|
||||||
openAIApiKey,
|
}),
|
||||||
modelName: 'text-embedding-3-large',
|
},
|
||||||
}),
|
'text-embedding-3-large': {
|
||||||
|
displayName: 'Text Embedding 3 Large',
|
||||||
|
model: new OpenAIEmbeddings({
|
||||||
|
openAIApiKey,
|
||||||
|
modelName: 'text-embedding-3-large',
|
||||||
|
}),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return embeddingModels;
|
return embeddingModels;
|
||||||
|
|
|
@ -4,15 +4,24 @@ import { HuggingFaceTransformersEmbeddings } from '../huggingfaceTransformer';
|
||||||
export const loadTransformersEmbeddingsModels = async () => {
|
export const loadTransformersEmbeddingsModels = async () => {
|
||||||
try {
|
try {
|
||||||
const embeddingModels = {
|
const embeddingModels = {
|
||||||
'BGE Small': new HuggingFaceTransformersEmbeddings({
|
'xenova-bge-small-en-v1.5': {
|
||||||
modelName: 'Xenova/bge-small-en-v1.5',
|
displayName: 'BGE Small',
|
||||||
}),
|
model: new HuggingFaceTransformersEmbeddings({
|
||||||
'GTE Small': new HuggingFaceTransformersEmbeddings({
|
modelName: 'Xenova/bge-small-en-v1.5',
|
||||||
modelName: 'Xenova/gte-small',
|
}),
|
||||||
}),
|
},
|
||||||
'Bert Multilingual': new HuggingFaceTransformersEmbeddings({
|
'xenova-gte-small': {
|
||||||
modelName: 'Xenova/bert-base-multilingual-uncased',
|
displayName: 'GTE Small',
|
||||||
}),
|
model: new HuggingFaceTransformersEmbeddings({
|
||||||
|
modelName: 'Xenova/gte-small',
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
'xenova-bert-base-multilingual-uncased': {
|
||||||
|
displayName: 'Bert Multilingual',
|
||||||
|
model: new HuggingFaceTransformersEmbeddings({
|
||||||
|
modelName: 'Xenova/bert-base-multilingual-uncased',
|
||||||
|
}),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return embeddingModels;
|
return embeddingModels;
|
||||||
|
|
|
@ -10,38 +10,54 @@ import {
|
||||||
getOpenaiApiKey,
|
getOpenaiApiKey,
|
||||||
updateConfig,
|
updateConfig,
|
||||||
} from '../config';
|
} from '../config';
|
||||||
|
import logger from '../utils/logger';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
router.get('/', async (_, res) => {
|
router.get('/', async (_, res) => {
|
||||||
const config = {};
|
try {
|
||||||
|
const config = {};
|
||||||
|
|
||||||
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
|
||||||
getAvailableChatModelProviders(),
|
getAvailableChatModelProviders(),
|
||||||
getAvailableEmbeddingModelProviders(),
|
getAvailableEmbeddingModelProviders(),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
config['chatModelProviders'] = {};
|
config['chatModelProviders'] = {};
|
||||||
config['embeddingModelProviders'] = {};
|
config['embeddingModelProviders'] = {};
|
||||||
|
|
||||||
for (const provider in chatModelProviders) {
|
for (const provider in chatModelProviders) {
|
||||||
config['chatModelProviders'][provider] = Object.keys(
|
config['chatModelProviders'][provider] = Object.keys(
|
||||||
chatModelProviders[provider],
|
chatModelProviders[provider],
|
||||||
);
|
).map((model) => {
|
||||||
|
return {
|
||||||
|
name: model,
|
||||||
|
displayName: chatModelProviders[provider][model].displayName,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const provider in embeddingModelProviders) {
|
||||||
|
config['embeddingModelProviders'][provider] = Object.keys(
|
||||||
|
embeddingModelProviders[provider],
|
||||||
|
).map((model) => {
|
||||||
|
return {
|
||||||
|
name: model,
|
||||||
|
displayName: embeddingModelProviders[provider][model].displayName,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
config['openaiApiKey'] = getOpenaiApiKey();
|
||||||
|
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
||||||
|
config['anthropicApiKey'] = getAnthropicApiKey();
|
||||||
|
config['groqApiKey'] = getGroqApiKey();
|
||||||
|
|
||||||
|
res.status(200).json(config);
|
||||||
|
} catch (err: any) {
|
||||||
|
res.status(500).json({ message: 'An error has occurred.' });
|
||||||
|
logger.error(`Error getting config: ${err.message}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const provider in embeddingModelProviders) {
|
|
||||||
config['embeddingModelProviders'][provider] = Object.keys(
|
|
||||||
embeddingModelProviders[provider],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
config['openaiApiKey'] = getOpenaiApiKey();
|
|
||||||
config['ollamaApiUrl'] = getOllamaApiEndpoint();
|
|
||||||
config['anthropicApiKey'] = getAnthropicApiKey();
|
|
||||||
config['groqApiKey'] = getGroqApiKey();
|
|
||||||
|
|
||||||
res.status(200).json(config);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post('/', async (req, res) => {
|
router.post('/', async (req, res) => {
|
||||||
|
|
|
@ -26,7 +26,7 @@ router.post('/', async (req, res) => {
|
||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
|
|
||||||
if (chatModels[provider] && chatModels[provider][chatModel]) {
|
if (chatModels[provider] && chatModels[provider][chatModel]) {
|
||||||
llm = chatModels[provider][chatModel] as BaseChatModel | undefined;
|
llm = chatModels[provider][chatModel].model as BaseChatModel | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
|
|
@ -26,7 +26,7 @@ router.post('/', async (req, res) => {
|
||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
|
|
||||||
if (chatModels[provider] && chatModels[provider][chatModel]) {
|
if (chatModels[provider] && chatModels[provider][chatModel]) {
|
||||||
llm = chatModels[provider][chatModel] as BaseChatModel | undefined;
|
llm = chatModels[provider][chatModel].model as BaseChatModel | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
|
|
@ -26,7 +26,7 @@ router.post('/', async (req, res) => {
|
||||||
let llm: BaseChatModel | undefined;
|
let llm: BaseChatModel | undefined;
|
||||||
|
|
||||||
if (chatModels[provider] && chatModels[provider][chatModel]) {
|
if (chatModels[provider] && chatModels[provider][chatModel]) {
|
||||||
llm = chatModels[provider][chatModel] as BaseChatModel | undefined;
|
llm = chatModels[provider][chatModel].model as BaseChatModel | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm) {
|
if (!llm) {
|
||||||
|
|
|
@ -45,9 +45,8 @@ export const handleConnection = async (
|
||||||
chatModelProviders[chatModelProvider][chatModel] &&
|
chatModelProviders[chatModelProvider][chatModel] &&
|
||||||
chatModelProvider != 'custom_openai'
|
chatModelProvider != 'custom_openai'
|
||||||
) {
|
) {
|
||||||
llm = chatModelProviders[chatModelProvider][chatModel] as unknown as
|
llm = chatModelProviders[chatModelProvider][chatModel]
|
||||||
| BaseChatModel
|
.model as unknown as BaseChatModel | undefined;
|
||||||
| undefined;
|
|
||||||
} else if (chatModelProvider == 'custom_openai') {
|
} else if (chatModelProvider == 'custom_openai') {
|
||||||
llm = new ChatOpenAI({
|
llm = new ChatOpenAI({
|
||||||
modelName: chatModel,
|
modelName: chatModel,
|
||||||
|
@ -65,7 +64,7 @@ export const handleConnection = async (
|
||||||
) {
|
) {
|
||||||
embeddings = embeddingModelProviders[embeddingModelProvider][
|
embeddings = embeddingModelProviders[embeddingModelProvider][
|
||||||
embeddingModel
|
embeddingModel
|
||||||
] as Embeddings | undefined;
|
].model as Embeddings | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!llm || !embeddings) {
|
if (!llm || !embeddings) {
|
||||||
|
|
|
@ -64,7 +64,7 @@ const Searchvideos = ({
|
||||||
|
|
||||||
const data = await res.json();
|
const data = await res.json();
|
||||||
|
|
||||||
const videos = data.videos ?? [];
|
const videos = data.videos ?? [];
|
||||||
setVideos(videos);
|
setVideos(videos);
|
||||||
setSlides(
|
setSlides(
|
||||||
videos.map((video: Video) => {
|
videos.map((video: Video) => {
|
||||||
|
|
|
@ -49,10 +49,10 @@ export const Select = ({ className, options, ...restProps }: SelectProps) => {
|
||||||
|
|
||||||
interface SettingsType {
|
interface SettingsType {
|
||||||
chatModelProviders: {
|
chatModelProviders: {
|
||||||
[key: string]: string[];
|
[key: string]: [Record<string, any>];
|
||||||
};
|
};
|
||||||
embeddingModelProviders: {
|
embeddingModelProviders: {
|
||||||
[key: string]: string[];
|
[key: string]: [Record<string, any>];
|
||||||
};
|
};
|
||||||
openaiApiKey: string;
|
openaiApiKey: string;
|
||||||
groqApiKey: string;
|
groqApiKey: string;
|
||||||
|
@ -68,6 +68,10 @@ const SettingsDialog = ({
|
||||||
setIsOpen: (isOpen: boolean) => void;
|
setIsOpen: (isOpen: boolean) => void;
|
||||||
}) => {
|
}) => {
|
||||||
const [config, setConfig] = useState<SettingsType | null>(null);
|
const [config, setConfig] = useState<SettingsType | null>(null);
|
||||||
|
const [chatModels, setChatModels] = useState<Record<string, any>>({});
|
||||||
|
const [embeddingModels, setEmbeddingModels] = useState<Record<string, any>>(
|
||||||
|
{},
|
||||||
|
);
|
||||||
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<
|
const [selectedChatModelProvider, setSelectedChatModelProvider] = useState<
|
||||||
string | null
|
string | null
|
||||||
>(null);
|
>(null);
|
||||||
|
@ -118,7 +122,7 @@ const SettingsDialog = ({
|
||||||
const chatModel =
|
const chatModel =
|
||||||
localStorage.getItem('chatModel') ||
|
localStorage.getItem('chatModel') ||
|
||||||
(data.chatModelProviders &&
|
(data.chatModelProviders &&
|
||||||
data.chatModelProviders[chatModelProvider]?.[0]) ||
|
data.chatModelProviders[chatModelProvider]?.[0].name) ||
|
||||||
'';
|
'';
|
||||||
const embeddingModelProvider =
|
const embeddingModelProvider =
|
||||||
localStorage.getItem('embeddingModelProvider') ||
|
localStorage.getItem('embeddingModelProvider') ||
|
||||||
|
@ -127,7 +131,7 @@ const SettingsDialog = ({
|
||||||
const embeddingModel =
|
const embeddingModel =
|
||||||
localStorage.getItem('embeddingModel') ||
|
localStorage.getItem('embeddingModel') ||
|
||||||
(data.embeddingModelProviders &&
|
(data.embeddingModelProviders &&
|
||||||
data.embeddingModelProviders[embeddingModelProvider]?.[0]) ||
|
data.embeddingModelProviders[embeddingModelProvider]?.[0].name) ||
|
||||||
'';
|
'';
|
||||||
|
|
||||||
setSelectedChatModelProvider(chatModelProvider);
|
setSelectedChatModelProvider(chatModelProvider);
|
||||||
|
@ -136,6 +140,8 @@ const SettingsDialog = ({
|
||||||
setSelectedEmbeddingModel(embeddingModel);
|
setSelectedEmbeddingModel(embeddingModel);
|
||||||
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || '');
|
||||||
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || '');
|
setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || '');
|
||||||
|
setChatModels(data.chatModelProviders || {});
|
||||||
|
setEmbeddingModels(data.embeddingModelProviders || {});
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -229,7 +235,8 @@ const SettingsDialog = ({
|
||||||
setSelectedChatModel('');
|
setSelectedChatModel('');
|
||||||
} else {
|
} else {
|
||||||
setSelectedChatModel(
|
setSelectedChatModel(
|
||||||
config.chatModelProviders[e.target.value][0],
|
config.chatModelProviders[e.target.value][0]
|
||||||
|
.name,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
@ -264,8 +271,8 @@ const SettingsDialog = ({
|
||||||
return chatModelProvider
|
return chatModelProvider
|
||||||
? chatModelProvider.length > 0
|
? chatModelProvider.length > 0
|
||||||
? chatModelProvider.map((model) => ({
|
? chatModelProvider.map((model) => ({
|
||||||
value: model,
|
value: model.name,
|
||||||
label: model,
|
label: model.displayName,
|
||||||
}))
|
}))
|
||||||
: [
|
: [
|
||||||
{
|
{
|
||||||
|
@ -341,7 +348,8 @@ const SettingsDialog = ({
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setSelectedEmbeddingModelProvider(e.target.value);
|
setSelectedEmbeddingModelProvider(e.target.value);
|
||||||
setSelectedEmbeddingModel(
|
setSelectedEmbeddingModel(
|
||||||
config.embeddingModelProviders[e.target.value][0],
|
config.embeddingModelProviders[e.target.value][0]
|
||||||
|
.name,
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
options={Object.keys(
|
options={Object.keys(
|
||||||
|
@ -374,8 +382,8 @@ const SettingsDialog = ({
|
||||||
return embeddingModelProvider
|
return embeddingModelProvider
|
||||||
? embeddingModelProvider.length > 0
|
? embeddingModelProvider.length > 0
|
||||||
? embeddingModelProvider.map((model) => ({
|
? embeddingModelProvider.map((model) => ({
|
||||||
label: model,
|
label: model.displayName,
|
||||||
value: model,
|
value: model.name,
|
||||||
}))
|
}))
|
||||||
: [
|
: [
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue