import { Dialog, Transition } from '@headlessui/react'; import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react'; import React, { Fragment, useEffect, useState } from 'react'; interface SettingsType { chatModelProviders: { [key: string]: string[]; }; embeddingModelProviders: { [key: string]: string[]; }; openaiApiKey: string; groqApiKey: string; ollamaApiUrl: string; } const SettingsDialog = ({ isOpen, setIsOpen, }: { isOpen: boolean; setIsOpen: (isOpen: boolean) => void; }) => { const [config, setConfig] = useState(null); const [selectedChatModelProvider, setSelectedChatModelProvider] = useState< string | null >(null); const [selectedChatModel, setSelectedChatModel] = useState( null, ); const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] = useState(null); const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState< string | null >(null); const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState(''); const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState(''); const [isLoading, setIsLoading] = useState(false); const [isUpdating, setIsUpdating] = useState(false); useEffect(() => { if (isOpen) { const fetchConfig = async () => { setIsLoading(true); const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { headers: { 'Content-Type': 'application/json', }, }); const data = (await res.json()) as SettingsType; setConfig(data); const chatModelProvidersKeys = Object.keys( data.chatModelProviders || {}, ); const embeddingModelProvidersKeys = Object.keys( data.embeddingModelProviders || {}, ); const defaultChatModelProvider = chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : ''; const defaultEmbeddingModelProvider = embeddingModelProvidersKeys.length > 0 ? embeddingModelProvidersKeys[0] : ''; const chatModelProvider = localStorage.getItem('chatModelProvider') || defaultChatModelProvider || ''; const chatModel = localStorage.getItem('chatModel') || (data.chatModelProviders && data.chatModelProviders[chatModelProvider]?.[0]) || ''; const embeddingModelProvider = localStorage.getItem('embeddingModelProvider') || defaultEmbeddingModelProvider || ''; const embeddingModel = localStorage.getItem('embeddingModel') || (data.embeddingModelProviders && data.embeddingModelProviders[embeddingModelProvider]?.[0]) || ''; setSelectedChatModelProvider(chatModelProvider); setSelectedChatModel(chatModel); setSelectedEmbeddingModelProvider(embeddingModelProvider); setSelectedEmbeddingModel(embeddingModel); setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || ''); setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseUrl') || ''); setIsLoading(false); }; fetchConfig(); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [isOpen]); const handleSubmit = async () => { setIsUpdating(true); try { await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify(config), }); localStorage.setItem('chatModelProvider', selectedChatModelProvider!); localStorage.setItem('chatModel', selectedChatModel!); localStorage.setItem( 'embeddingModelProvider', selectedEmbeddingModelProvider!, ); localStorage.setItem('embeddingModel', selectedEmbeddingModel!); localStorage.setItem('openAIApiKey', customOpenAIApiKey!); localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!); } catch (err) { console.log(err); } finally { setIsUpdating(false); setIsOpen(false); window.location.reload(); } }; return ( setIsOpen(false)} >
Settings {config && !isLoading && (
{config.chatModelProviders && (

Chat model Provider

)} {selectedChatModelProvider && selectedChatModelProvider != 'custom_openai' && (

Chat Model

)} {selectedChatModelProvider && selectedChatModelProvider === 'custom_openai' && ( <>

Model name

setSelectedChatModel(e.target.value) } className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" />

Custom OpenAI API Key

setCustomOpenAIApiKey(e.target.value) } className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" />

Custom OpenAI Base URL

setCustomOpenAIBaseURL(e.target.value) } className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" />
)} {/* Embedding models */} {config.embeddingModelProviders && (

Embedding model Provider

)} {selectedEmbeddingModelProvider && (

Embedding Model

)}

OpenAI API Key

setConfig({ ...config, openaiApiKey: e.target.value, }) } className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" />

Ollama API URL

setConfig({ ...config, ollamaApiUrl: e.target.value, }) } className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" />

GROQ API Key

setConfig({ ...config, groqApiKey: e.target.value, }) } className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" />
)} {isLoading && (
)}

We'll refresh the page after updating the settings.

); }; export default SettingsDialog;