From 09463999c2d93d9be6799a10a95e8e3a5c737cd4 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Thu, 9 May 2024 20:42:03 +0530 Subject: [PATCH] feat(routes): add suggestions route --- src/routes/index.ts | 2 ++ src/routes/suggestions.ts | 46 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 src/routes/suggestions.ts diff --git a/src/routes/index.ts b/src/routes/index.ts index 04390cd..257e677 100644 --- a/src/routes/index.ts +++ b/src/routes/index.ts @@ -3,6 +3,7 @@ import imagesRouter from './images'; import videosRouter from './videos'; import configRouter from './config'; import modelsRouter from './models'; +import suggestionsRouter from './suggestions'; const router = express.Router(); @@ -10,5 +11,6 @@ router.use('/images', imagesRouter); router.use('/videos', videosRouter); router.use('/config', configRouter); router.use('/models', modelsRouter); +router.use('/suggestions', suggestionsRouter); export default router; diff --git a/src/routes/suggestions.ts b/src/routes/suggestions.ts new file mode 100644 index 0000000..10e5715 --- /dev/null +++ b/src/routes/suggestions.ts @@ -0,0 +1,46 @@ +import express from 'express'; +import generateSuggestions from '../agents/suggestionGeneratorAgent'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { getAvailableChatModelProviders } from '../lib/providers'; +import { HumanMessage, AIMessage } from '@langchain/core/messages'; +import logger from '../utils/logger'; + +const router = express.Router(); + +router.post('/', async (req, res) => { + try { + let { chat_history, chat_model, chat_model_provider } = req.body; + + chat_history = chat_history.map((msg: any) => { + if (msg.role === 'user') { + return new HumanMessage(msg.content); + } else if (msg.role === 'assistant') { + return new AIMessage(msg.content); + } + }); + + const chatModels = await getAvailableChatModelProviders(); + const provider = chat_model_provider || Object.keys(chatModels)[0]; + const chatModel = chat_model || Object.keys(chatModels[provider])[0]; + + let llm: BaseChatModel | undefined; + + if (chatModels[provider] && chatModels[provider][chatModel]) { + llm = chatModels[provider][chatModel] as BaseChatModel | undefined; + } + + if (!llm) { + res.status(500).json({ message: 'Invalid LLM model selected' }); + return; + } + + const suggestions = await generateSuggestions({ chat_history }, llm); + + res.status(200).json({ suggestions: suggestions }); + } catch (err) { + res.status(500).json({ message: 'An error has occurred.' }); + logger.error(`Error in generating suggestions: ${err.message}`); + } +}); + +export default router;