From 0f6986fc9b1f6bb131f686d439afdd1d4fc7d37d Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Thu, 9 May 2024 20:41:43 +0530 Subject: [PATCH] feat(agents): add suggestion generator agent --- src/agents/suggestionGeneratorAgent.ts | 55 ++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 src/agents/suggestionGeneratorAgent.ts diff --git a/src/agents/suggestionGeneratorAgent.ts b/src/agents/suggestionGeneratorAgent.ts new file mode 100644 index 0000000..59bd9ea --- /dev/null +++ b/src/agents/suggestionGeneratorAgent.ts @@ -0,0 +1,55 @@ +import { RunnableSequence, RunnableMap } from '@langchain/core/runnables'; +import ListLineOutputParser from '../lib/outputParsers/listLineOutputParser'; +import { PromptTemplate } from '@langchain/core/prompts'; +import formatChatHistoryAsString from '../utils/formatHistory'; +import { BaseMessage } from '@langchain/core/messages'; +import { BaseChatModel } from '@langchain/core/language_models/chat_models'; +import { ChatOpenAI } from '@langchain/openai'; + +const suggestionGeneratorPrompt = ` +You are an AI suggestion generator for an AI powered search engine. You will be given a conversation below. You need to generate 4-5 suggestions based on the conversation. The suggestion should be relevant to the conversation that can be used by the user to ask the chat model for more information. +You need to make sure the suggestions are relevant to the conversation and are helpful to the user. Keep a note that the user might use these suggestions to ask a chat model for more information. +Make sure the suggestions are medium in length and are informative and relevant to the conversation. + +Provide these suggestions separated by newlines between the XML tags and . For example: + + +Suggestion 1 +Suggestion 2 +Suggestion 3 + + +Conversation: +{chat_history} +`; + +type SuggestionGeneratorInput = { + chat_history: BaseMessage[]; +}; + +const outputParser = new ListLineOutputParser({ + key: 'suggestions', +}); + +const createSuggestionGeneratorChain = (llm: BaseChatModel) => { + return RunnableSequence.from([ + RunnableMap.from({ + chat_history: (input: SuggestionGeneratorInput) => + formatChatHistoryAsString(input.chat_history), + }), + PromptTemplate.fromTemplate(suggestionGeneratorPrompt), + llm, + outputParser, + ]); +}; + +const generateSuggestions = ( + input: SuggestionGeneratorInput, + llm: BaseChatModel, +) => { + (llm as ChatOpenAI).temperature = 0; + const suggestionGeneratorChain = createSuggestionGeneratorChain(llm); + return suggestionGeneratorChain.invoke(input); +}; + +export default generateSuggestions;