added persistent storage for the settings
This commit is contained in:
parent
9bfe56f7ce
commit
5b882f4e13
|
@ -17,3 +17,14 @@ export const chats = sqliteTable('chats', {
|
||||||
createdAt: text('createdAt').notNull(),
|
createdAt: text('createdAt').notNull(),
|
||||||
focusMode: text('focusMode').notNull(),
|
focusMode: text('focusMode').notNull(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const settings = sqliteTable('settings', {
|
||||||
|
id: integer('id').primaryKey(),
|
||||||
|
chatModelProvider: text('chatModelProvider'),
|
||||||
|
chatModel: text('chatModel'),
|
||||||
|
embeddingModelProvider: text('embeddingModelProvider'),
|
||||||
|
embeddingModel: text('embeddingModel'),
|
||||||
|
openAIApiKey: text('openAIApiKey'),
|
||||||
|
openAIBaseURL: text('openAIBaseURL'),
|
||||||
|
// TODO: add user auth
|
||||||
|
});
|
|
@ -5,6 +5,7 @@ import configRouter from './config';
|
||||||
import modelsRouter from './models';
|
import modelsRouter from './models';
|
||||||
import suggestionsRouter from './suggestions';
|
import suggestionsRouter from './suggestions';
|
||||||
import chatsRouter from './chats';
|
import chatsRouter from './chats';
|
||||||
|
import settingsRouter from './settings';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
|
@ -14,5 +15,6 @@ router.use('/config', configRouter);
|
||||||
router.use('/models', modelsRouter);
|
router.use('/models', modelsRouter);
|
||||||
router.use('/suggestions', suggestionsRouter);
|
router.use('/suggestions', suggestionsRouter);
|
||||||
router.use('/chats', chatsRouter);
|
router.use('/chats', chatsRouter);
|
||||||
|
router.use('/settings', settingsRouter);
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
import express from 'express';
|
||||||
|
import db from '../db';
|
||||||
|
import { settings } from '../db/schema';
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
router.post('/', async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { chatModelProvider, chatModel, embeddingModelProvider, embeddingModel, openAIApiKey, openAIBaseURL } = req.body;
|
||||||
|
|
||||||
|
// TODO: Add user authentication
|
||||||
|
|
||||||
|
await db.insert(settings).values({
|
||||||
|
chatModelProvider,
|
||||||
|
chatModel,
|
||||||
|
embeddingModelProvider,
|
||||||
|
embeddingModel,
|
||||||
|
openAIApiKey,
|
||||||
|
openAIBaseURL,
|
||||||
|
}).execute();
|
||||||
|
|
||||||
|
res.status(200).json({ message: 'Settings saved successfully' });
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ message: 'An error occurred while saving settings' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get('/', async (req, res) => {
|
||||||
|
try {
|
||||||
|
// TODO: Add user authentication
|
||||||
|
const userSettings = await db.query.settings.findFirst();
|
||||||
|
res.status(200).json(userSettings);
|
||||||
|
} catch (err) {
|
||||||
|
res.status(500).json({ message: 'An error occurred while fetching settings' });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
|
@ -21,6 +21,25 @@ export type Message = {
|
||||||
sources?: Document[];
|
sources?: Document[];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
const fetchSettings = async () => {
|
||||||
|
try {
|
||||||
|
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/settings`);
|
||||||
|
const settings = await res.json();
|
||||||
|
|
||||||
|
if (settings) {
|
||||||
|
localStorage.setItem('chatModelProvider', settings.chatModelProvider);
|
||||||
|
localStorage.setItem('chatModel', settings.chatModel);
|
||||||
|
localStorage.setItem('embeddingModelProvider', settings.embeddingModelProvider);
|
||||||
|
localStorage.setItem('embeddingModel', settings.embeddingModel);
|
||||||
|
localStorage.setItem('openAIApiKey', settings.openAIApiKey);
|
||||||
|
localStorage.setItem('openAIBaseURL', settings.openAIBaseURL);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to fetch settings:', err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const useSocket = (
|
const useSocket = (
|
||||||
url: string,
|
url: string,
|
||||||
setIsWSReady: (ready: boolean) => void,
|
setIsWSReady: (ready: boolean) => void,
|
||||||
|
@ -31,6 +50,8 @@ const useSocket = (
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!ws) {
|
if (!ws) {
|
||||||
const connectWs = async () => {
|
const connectWs = async () => {
|
||||||
|
await fetchSettings();
|
||||||
|
|
||||||
let chatModel = localStorage.getItem('chatModel');
|
let chatModel = localStorage.getItem('chatModel');
|
||||||
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
let chatModelProvider = localStorage.getItem('chatModelProvider');
|
||||||
let embeddingModel = localStorage.getItem('embeddingModel');
|
let embeddingModel = localStorage.getItem('embeddingModel');
|
||||||
|
@ -306,6 +327,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
|
||||||
|
|
||||||
const messagesRef = useRef<Message[]>([]);
|
const messagesRef = useRef<Message[]>([]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
fetchSettings();
|
||||||
|
}, []);
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
messagesRef.current = messages;
|
messagesRef.current = messages;
|
||||||
}, [messages]);
|
}, [messages]);
|
||||||
|
|
|
@ -146,31 +146,35 @@ const SettingsDialog = ({
|
||||||
|
|
||||||
const handleSubmit = async () => {
|
const handleSubmit = async () => {
|
||||||
setIsUpdating(true);
|
setIsUpdating(true);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/config`, {
|
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/settings`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
body: JSON.stringify(config),
|
body: JSON.stringify({
|
||||||
|
chatModelProvider: selectedChatModelProvider,
|
||||||
|
chatModel: selectedChatModel,
|
||||||
|
embeddingModelProvider: selectedEmbeddingModelProvider,
|
||||||
|
embeddingModel: selectedEmbeddingModel,
|
||||||
|
openAIApiKey: customOpenAIApiKey,
|
||||||
|
openAIBaseURL: customOpenAIBaseURL,
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Still keep localStorage for quick access on the client-side
|
||||||
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
localStorage.setItem('chatModelProvider', selectedChatModelProvider!);
|
||||||
localStorage.setItem('chatModel', selectedChatModel!);
|
localStorage.setItem('chatModel', selectedChatModel!);
|
||||||
localStorage.setItem(
|
localStorage.setItem('embeddingModelProvider', selectedEmbeddingModelProvider!);
|
||||||
'embeddingModelProvider',
|
|
||||||
selectedEmbeddingModelProvider!,
|
|
||||||
);
|
|
||||||
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
|
localStorage.setItem('embeddingModel', selectedEmbeddingModel!);
|
||||||
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
|
localStorage.setItem('openAIApiKey', customOpenAIApiKey!);
|
||||||
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
|
localStorage.setItem('openAIBaseURL', customOpenAIBaseURL!);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(err);
|
console.error(err);
|
||||||
} finally {
|
} finally {
|
||||||
setIsUpdating(false);
|
setIsUpdating(false);
|
||||||
setIsOpen(false);
|
setIsOpen(false);
|
||||||
|
|
||||||
window.location.reload();
|
window.location.reload();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue