feat(chat-window): lint & beautify

This commit is contained in:
ItzCrazyKns 2024-08-04 18:14:46 +05:30 committed by GitHub
parent c4932c659a
commit 9c1936ec2c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 37 additions and 48 deletions

View File

@ -38,53 +38,54 @@ const useSocket = (
'embeddingModelProvider', 'embeddingModelProvider',
); );
const providers = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/models`,
{
headers: {
'Content-Type': 'application/json',
},
},
).then(async (res) => await res.json());
if ( if (
!chatModel || !chatModel ||
!chatModelProvider || !chatModelProvider ||
!embeddingModel || !embeddingModel ||
!embeddingModelProvider !embeddingModelProvider
) { ) {
const providers = await fetch( if (!chatModel || !chatModelProvider) {
`${process.env.NEXT_PUBLIC_API_URL}/models`, const chatModelProviders = providers.chatModelProviders;
{
headers: {
'Content-Type': 'application/json',
},
},
).then(async (res) => await res.json());
const chatModelProviders = providers.chatModelProviders; chatModelProvider = Object.keys(chatModelProviders)[0];
chatModelProvider = Object.keys(chatModelProviders)[0]; if (chatModelProvider === 'custom_openai') {
toast.error('Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL');
if (chatModelProvider === 'custom_openai') { setError(true);
toast.error( return;
'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL', } else {
); chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
setError(true); if (
return; !chatModelProviders ||
} else { Object.keys(chatModelProviders).length === 0
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; )
return toast.error('No chat models available');
if ( }
!chatModelProviders ||
Object.keys(chatModelProviders).length === 0
)
return toast.error('No chat models available');
} }
const embeddingModelProviders = providers.embeddingModelProviders; if (!embeddingModel || !embeddingModelProvider) {
const embeddingModelProviders = providers.embeddingModelProviders;
if ( if (
!embeddingModelProviders || !embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0 Object.keys(embeddingModelProviders).length === 0
) )
return toast.error('No embedding models available'); return toast.error('No embedding models available');
embeddingModelProvider = Object.keys(embeddingModelProviders)[0]; embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
embeddingModel = Object.keys( embeddingModel = Object.keys(
embeddingModelProviders[embeddingModelProvider], embeddingModelProviders[embeddingModelProvider],
)[0]; )[0];
}
localStorage.setItem('chatModel', chatModel!); localStorage.setItem('chatModel', chatModel!);
localStorage.setItem('chatModelProvider', chatModelProvider); localStorage.setItem('chatModelProvider', chatModelProvider);
@ -94,15 +95,6 @@ const useSocket = (
embeddingModelProvider, embeddingModelProvider,
); );
} else { } else {
const providers = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/models`,
{
headers: {
'Content-Type': 'app lication/json',
},
},
).then(async (res) => await res.json());
const chatModelProviders = providers.chatModelProviders; const chatModelProviders = providers.chatModelProviders;
const embeddingModelProviders = providers.embeddingModelProviders; const embeddingModelProviders = providers.embeddingModelProviders;
@ -171,8 +163,6 @@ const useSocket = (
const timeoutId = setTimeout(() => { const timeoutId = setTimeout(() => {
if (ws.readyState !== 1) { if (ws.readyState !== 1) {
ws.close();
setError(true);
toast.error( toast.error(
'Failed to connect to the server. Please try again later.', 'Failed to connect to the server. Please try again later.',
); );
@ -182,7 +172,6 @@ const useSocket = (
ws.onopen = () => { ws.onopen = () => {
console.log('[DEBUG] open'); console.log('[DEBUG] open');
clearTimeout(timeoutId); clearTimeout(timeoutId);
setError(false);
setIsWSReady(true); setIsWSReady(true);
}; };
@ -203,7 +192,7 @@ const useSocket = (
if (data.type === 'error') { if (data.type === 'error') {
toast.error(data.data); toast.error(data.data);
} }
}); })
setWs(ws); setWs(ws);
}; };