import { cn } from '@/lib/utils'; import { Dialog, DialogPanel, DialogTitle, Transition, TransitionChild, } from '@headlessui/react'; import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react'; import React, { Fragment, useEffect, useState, type SelectHTMLAttributes, } from 'react'; import ThemeSwitcher from './theme/Switcher'; interface InputProps extends React.InputHTMLAttributes {} const Input = ({ className, ...restProps }: InputProps) => { return ( ); }; interface SelectProps extends SelectHTMLAttributes { options: { value: string; label: string; disabled?: boolean }[]; } export const Select = ({ className, options, ...restProps }: SelectProps) => { return ( ); }; interface SettingsType { chatModelProviders: { [key: string]: [Record]; }; embeddingModelProviders: { [key: string]: [Record]; }; openaiApiKey: string; groqApiKey: string; anthropicApiKey: string; ollamaApiUrl: string; } const SettingsDialog = ({ isOpen, setIsOpen, }: { isOpen: boolean; setIsOpen: (isOpen: boolean) => void; }) => { const [config, setConfig] = useState(null); const [chatModels, setChatModels] = useState>({}); const [embeddingModels, setEmbeddingModels] = useState>( {}, ); const [selectedChatModelProvider, setSelectedChatModelProvider] = useState< string | null >(null); const [selectedChatModel, setSelectedChatModel] = useState( null, ); const [selectedEmbeddingModelProvider, setSelectedEmbeddingModelProvider] = useState(null); const [selectedEmbeddingModel, setSelectedEmbeddingModel] = useState< string | null >(null); const [customOpenAIApiKey, setCustomOpenAIApiKey] = useState(''); const [customOpenAIBaseURL, setCustomOpenAIBaseURL] = useState(''); const [isLoading, setIsLoading] = useState(false); const [isUpdating, setIsUpdating] = useState(false); useEffect(() => { if (isOpen) { const fetchConfig = async () => { setIsLoading(true); const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, { headers: { 'Content-Type': 'application/json', }, }); const data = (await res.json()) as SettingsType; setConfig(data); const chatModelProvidersKeys = Object.keys( data.chatModelProviders || {}, ); const embeddingModelProvidersKeys = Object.keys( data.embeddingModelProviders || {}, ); const defaultChatModelProvider = chatModelProvidersKeys.length > 0 ? chatModelProvidersKeys[0] : ''; const defaultEmbeddingModelProvider = embeddingModelProvidersKeys.length > 0 ? embeddingModelProvidersKeys[0] : ''; const chatModelProvider = localStorage.getItem('chatModelProvider') || defaultChatModelProvider || ''; const chatModel = localStorage.getItem('chatModel') || (data.chatModelProviders && data.chatModelProviders[chatModelProvider]?.[0].name) || ''; const embeddingModelProvider = localStorage.getItem('embeddingModelProvider') || defaultEmbeddingModelProvider || ''; const embeddingModel = localStorage.getItem('embeddingModel') || (data.embeddingModelProviders && data.embeddingModelProviders[embeddingModelProvider]?.[0].name) || ''; setSelectedChatModelProvider(chatModelProvider); setSelectedChatModel(chatModel); setSelectedEmbeddingModelProvider(embeddingModelProvider); setSelectedEmbeddingModel(embeddingModel); setCustomOpenAIApiKey(localStorage.getItem('openAIApiKey') || ''); setCustomOpenAIBaseURL(localStorage.getItem('openAIBaseURL') || ''); setChatModels(data.chatModelProviders || {}); setEmbeddingModels(data.embeddingModelProviders || {}); setIsLoading(false); }; fetchConfig(); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [isOpen]); const handleSubmit = async () => { setIsUpdating(true); try { await fetch(`${process.env.NEXT_PUBLIC_API_URL}/settings`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ chatModelProvider: selectedChatModelProvider, chatModel: selectedChatModel, embeddingModelProvider: selectedEmbeddingModelProvider, embeddingModel: selectedEmbeddingModel, openAIApiKey: customOpenAIApiKey, openAIBaseURL: customOpenAIBaseURL, }), }); // Still keep localStorage for quick access on the client-side localStorage.setItem('chatModelProvider', selectedChatModelProvider!); localStorage.setItem('chatModel', selectedChatModel!); localStorage.setItem('embeddingModelProvider', selectedEmbeddingModelProvider!); localStorage.setItem('embeddingModel', selectedEmbeddingModel!); localStorage.setItem('openAIApiKey', customOpenAIApiKey!); localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!); } catch (err) { console.error(err); } finally { setIsUpdating(false); setIsOpen(false); window.location.reload(); } }; return ( setIsOpen(false)} >
Settings {config && !isLoading && (

Theme

{config.chatModelProviders && (

Chat model Provider

setSelectedChatModel(e.target.value) } options={(() => { const chatModelProvider = config.chatModelProviders[ selectedChatModelProvider ]; return chatModelProvider ? chatModelProvider.length > 0 ? chatModelProvider.map((model) => ({ value: model.name, label: model.displayName, })) : [ { value: '', label: 'No models available', disabled: true, }, ] : [ { value: '', label: 'Invalid provider, please check backend logs', disabled: true, }, ]; })()} />
)} {selectedChatModelProvider && selectedChatModelProvider === 'custom_openai' && ( <>

Model name

setSelectedChatModel(e.target.value) } />

Custom OpenAI API Key

setCustomOpenAIApiKey(e.target.value) } />

Custom OpenAI Base URL

setCustomOpenAIBaseURL(e.target.value) } />
)} {/* Embedding models */} {config.embeddingModelProviders && (

Embedding model Provider

setSelectedEmbeddingModel(e.target.value) } options={(() => { const embeddingModelProvider = config.embeddingModelProviders[ selectedEmbeddingModelProvider ]; return embeddingModelProvider ? embeddingModelProvider.length > 0 ? embeddingModelProvider.map((model) => ({ label: model.displayName, value: model.name, })) : [ { label: 'No embedding models available', value: '', disabled: true, }, ] : [ { label: 'Invalid provider, please check backend logs', value: '', disabled: true, }, ]; })()} />
)}

OpenAI API Key

setConfig({ ...config, openaiApiKey: e.target.value, }) } />

Ollama API URL

setConfig({ ...config, ollamaApiUrl: e.target.value, }) } />

GROQ API Key

setConfig({ ...config, groqApiKey: e.target.value, }) } />

Anthropic API Key

setConfig({ ...config, anthropicApiKey: e.target.value, }) } />
)} {isLoading && (
)}

We'll refresh the page after updating the settings.

); }; export default SettingsDialog;