Compare commits

..

No commits in common. "fb6ec2fc8a2c653322fa4404b57ca23c4d46a0c1" and "c3dac38b6a0bcdbc801a73580953dfd6148c8ccf" have entirely different histories.

1 changed files with 47 additions and 36 deletions

View File

@ -62,6 +62,12 @@ const useSocket = (
'embeddingModelProvider', 'embeddingModelProvider',
); );
if (
!chatModel ||
!chatModelProvider ||
!embeddingModel ||
!embeddingModelProvider
) {
const providers = await fetch( const providers = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/models`, `${process.env.NEXT_PUBLIC_API_URL}/models`,
{ {
@ -71,32 +77,26 @@ const useSocket = (
}, },
).then(async (res) => await res.json()); ).then(async (res) => await res.json());
if (
!chatModel ||
!chatModelProvider ||
!embeddingModel ||
!embeddingModelProvider
) {
if (!chatModel || !chatModelProvider) {
const chatModelProviders = providers.chatModelProviders; const chatModelProviders = providers.chatModelProviders;
chatModelProvider = Object.keys(chatModelProviders)[0]; chatModelProvider = Object.keys(chatModelProviders)[0];
if (chatModelProvider === 'custom_openai') { if (chatModelProvider === 'custom_openai') {
toast.error('Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL'); toast.error(
'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL',
);
setError(true); setError(true);
return; return;
} else { } else {
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
if ( if (
!chatModelProviders || !chatModelProviders ||
Object.keys(chatModelProviders).length === 0 Object.keys(chatModelProviders).length === 0
) )
return toast.error('No chat models available'); return toast.error('No chat models available');
} }
}
if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders; const embeddingModelProviders = providers.embeddingModelProviders;
if ( if (
@ -109,7 +109,6 @@ const useSocket = (
embeddingModel = Object.keys( embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider], embeddingModelProviders[embeddingModelProvider],
)[0]; )[0];
}
localStorage.setItem('chatModel', chatModel!); localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider); localStorage.setItem('chatModelProvider', chatModelProvider);
@ -119,6 +118,15 @@ const useSocket = (
embeddingModelProvider, embeddingModelProvider,
); );
} else { } else {
const providers = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/models`,
{
headers: {
'Content-Type': 'app lication/json',
},
},
).then(async (res) => await res.json());
const chatModelProviders = providers.chatModelProviders; const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders; const embeddingModelProviders = providers.embeddingModelProviders;
@ -187,6 +195,8 @@ const useSocket = (
const timeoutId = setTimeout(() => { const timeoutId = setTimeout(() => {
if (ws.readyState !== 1) { if (ws.readyState !== 1) {
ws.close();
setError(true);
toast.error( toast.error(
'Failed to connect to the server. Please try again later.', 'Failed to connect to the server. Please try again later.',
); );
@ -198,6 +208,7 @@ const useSocket = (
reconnectTimeout.current = 0; reconnectTimeout.current = 0;
reconnectAttempts.current = 0; reconnectAttempts.current = 0;
clearTimeout(timeoutId); clearTimeout(timeoutId);
setError(false);
setIsWSReady(true); setIsWSReady(true);
}; };
@ -220,7 +231,7 @@ const useSocket = (
if (data.type === 'error') { if (data.type === 'error') {
toast.error(data.data); toast.error(data.data);
} }
}) });
setWs(ws); setWs(ws);
}; };