feat(image-search): Use LLM from config

This commit is contained in:
ItzCrazyKns 2024-04-20 22:12:07 +05:30
parent 23b7feee0c
commit 5924690df2
No known key found for this signature in database
GPG Key ID: 8162927C7CCE3065
1 changed files with 17 additions and 6 deletions

View File

@ -1,7 +1,8 @@
import express from 'express';
import handleImageSearch from '../agents/imageSearchAgent';
import { ChatOpenAI } from '@langchain/openai';
import { getOpenaiApiKey } from '../config';
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { getAvailableProviders } from '../lib/providers';
import { getChatModel, getChatModelProvider } from '../config';
const router = express.Router();
@ -9,10 +10,20 @@ router.post('/', async (req, res) => {
try {
const { query, chat_history } = req.body;
const llm = new ChatOpenAI({
temperature: 0.7,
openAIApiKey: getOpenaiApiKey(),
});
const models = await getAvailableProviders();
const provider = getChatModelProvider();
const chatModel = getChatModel();
let llm: BaseChatModel | undefined;
if (models[provider] && models[provider][chatModel]) {
llm = models[provider][chatModel] as BaseChatModel | undefined;
}
if (!llm) {
res.status(500).json({ message: 'Invalid LLM model selected' });
return;
}
const images = await handleImageSearch({ query, chat_history }, llm);