From fd65af53c3b9c4272b158a2bc320749711e09c36 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 21 Apr 2024 20:52:47 +0530 Subject: [PATCH] feat(providers): add error handling --- src/lib/providers.ts | 38 +++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/src/lib/providers.ts b/src/lib/providers.ts index 2dfde58..c730da8 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -10,22 +10,26 @@ export const getAvailableProviders = async () => { const models = {}; if (openAIApiKey) { - models['openai'] = { - 'gpt-3.5-turbo': new ChatOpenAI({ - openAIApiKey, - modelName: 'gpt-3.5-turbo', - temperature: 0.7, - }), - 'gpt-4': new ChatOpenAI({ - openAIApiKey, - modelName: 'gpt-4', - temperature: 0.7, - }), - embeddings: new OpenAIEmbeddings({ - openAIApiKey, - modelName: 'text-embedding-3-large', - }), - }; + try { + models['openai'] = { + 'gpt-3.5-turbo': new ChatOpenAI({ + openAIApiKey, + modelName: 'gpt-3.5-turbo', + temperature: 0.7, + }), + 'gpt-4': new ChatOpenAI({ + openAIApiKey, + modelName: 'gpt-4', + temperature: 0.7, + }), + embeddings: new OpenAIEmbeddings({ + openAIApiKey, + modelName: 'text-embedding-3-large', + }), + }; + } catch (err) { + console.log(`Error loading OpenAI models: ${err}`); + } } if (ollamaEndpoint) { @@ -50,7 +54,7 @@ export const getAvailableProviders = async () => { }); } } catch (err) { - console.log(err); + console.log(`Error loading Ollama models: ${err}`); } }