feat(ws-managers): implement better error handling

This commit is contained in:
ItzCrazyKns 2024-05-06 19:59:13 +05:30
parent ed47191d9b
commit e8fe74ae7c
No known key found for this signature in database
GPG Key ID: 8162927C7CCE3065
2 changed files with 71 additions and 58 deletions

View File

@ -14,74 +14,87 @@ export const handleConnection = async (
ws: WebSocket, ws: WebSocket,
request: IncomingMessage, request: IncomingMessage,
) => { ) => {
const searchParams = new URL(request.url, `http://${request.headers.host}`) try {
.searchParams; const searchParams = new URL(request.url, `http://${request.headers.host}`)
.searchParams;
const [chatModelProviders, embeddingModelProviders] = await Promise.all([ const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(), getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(), getAvailableEmbeddingModelProviders(),
]); ]);
const chatModelProvider = const chatModelProvider =
searchParams.get('chatModelProvider') || Object.keys(chatModelProviders)[0]; searchParams.get('chatModelProvider') ||
const chatModel = Object.keys(chatModelProviders)[0];
searchParams.get('chatModel') || const chatModel =
Object.keys(chatModelProviders[chatModelProvider])[0]; searchParams.get('chatModel') ||
Object.keys(chatModelProviders[chatModelProvider])[0];
const embeddingModelProvider = const embeddingModelProvider =
searchParams.get('embeddingModelProvider') || searchParams.get('embeddingModelProvider') ||
Object.keys(embeddingModelProviders)[0]; Object.keys(embeddingModelProviders)[0];
const embeddingModel = const embeddingModel =
searchParams.get('embeddingModel') || searchParams.get('embeddingModel') ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0]; Object.keys(embeddingModelProviders[embeddingModelProvider])[0];
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined; let embeddings: Embeddings | undefined;
if ( if (
chatModelProviders[chatModelProvider] && chatModelProviders[chatModelProvider] &&
chatModelProviders[chatModelProvider][chatModel] && chatModelProviders[chatModelProvider][chatModel] &&
chatModelProvider != 'custom_openai' chatModelProvider != 'custom_openai'
) { ) {
llm = chatModelProviders[chatModelProvider][chatModel] as llm = chatModelProviders[chatModelProvider][chatModel] as
| BaseChatModel | BaseChatModel
| undefined; | undefined;
} else if (chatModelProvider == 'custom_openai') { } else if (chatModelProvider == 'custom_openai') {
llm = new ChatOpenAI({ llm = new ChatOpenAI({
modelName: chatModel, modelName: chatModel,
openAIApiKey: searchParams.get('openAIApiKey'), openAIApiKey: searchParams.get('openAIApiKey'),
temperature: 0.7, temperature: 0.7,
configuration: { configuration: {
baseURL: searchParams.get('openAIBaseURL'), baseURL: searchParams.get('openAIBaseURL'),
}, },
}); });
} }
if ( if (
embeddingModelProviders[embeddingModelProvider] && embeddingModelProviders[embeddingModelProvider] &&
embeddingModelProviders[embeddingModelProvider][embeddingModel] embeddingModelProviders[embeddingModelProvider][embeddingModel]
) { ) {
embeddings = embeddingModelProviders[embeddingModelProvider][ embeddings = embeddingModelProviders[embeddingModelProvider][
embeddingModel embeddingModel
] as Embeddings | undefined; ] as Embeddings | undefined;
} }
if (!llm || !embeddings) { if (!llm || !embeddings) {
ws.send(
JSON.stringify({
type: 'error',
data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.',
key: 'INVALID_MODEL_SELECTED',
}),
);
ws.close();
}
ws.on(
'message',
async (message) =>
await handleMessage(message.toString(), ws, llm, embeddings),
);
ws.on('close', () => logger.debug('Connection closed'));
} catch (err) {
ws.send( ws.send(
JSON.stringify({ JSON.stringify({
type: 'error', type: 'error',
data: 'Invalid LLM or embeddings model selected, please refresh the page and try again.', data: 'Internal server error.',
key: 'INVALID_MODEL_SELECTED', key: 'INTERNAL_SERVER_ERROR',
}), }),
); );
ws.close(); ws.close();
logger.error(err);
} }
ws.on(
'message',
async (message) =>
await handleMessage(message.toString(), ws, llm, embeddings),
);
ws.on('close', () => logger.debug('Connection closed'));
}; };

View File

@ -50,13 +50,13 @@ const useSocket = (url: string) => {
!chatModelProviders || !chatModelProviders ||
Object.keys(chatModelProviders).length === 0 Object.keys(chatModelProviders).length === 0
) )
return console.error('No chat models available'); return toast.error('No chat models available');
if ( if (
!embeddingModelProviders || !embeddingModelProviders ||
Object.keys(embeddingModelProviders).length === 0 Object.keys(embeddingModelProviders).length === 0
) )
return console.error('No embedding models available'); return toast.error('No embedding models available');
chatModelProvider = Object.keys(chatModelProviders)[0]; chatModelProvider = Object.keys(chatModelProviders)[0];
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0]; chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];