feat(image-search): Use LLM from config
This commit is contained in:
parent
23b7feee0c
commit
5924690df2
|
@ -1,7 +1,8 @@
|
|||
import express from 'express';
|
||||
import handleImageSearch from '../agents/imageSearchAgent';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getOpenaiApiKey } from '../config';
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { getAvailableProviders } from '../lib/providers';
|
||||
import { getChatModel, getChatModelProvider } from '../config';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
@ -9,10 +10,20 @@ router.post('/', async (req, res) => {
|
|||
try {
|
||||
const { query, chat_history } = req.body;
|
||||
|
||||
const llm = new ChatOpenAI({
|
||||
temperature: 0.7,
|
||||
openAIApiKey: getOpenaiApiKey(),
|
||||
});
|
||||
const models = await getAvailableProviders();
|
||||
const provider = getChatModelProvider();
|
||||
const chatModel = getChatModel();
|
||||
|
||||
let llm: BaseChatModel | undefined;
|
||||
|
||||
if (models[provider] && models[provider][chatModel]) {
|
||||
llm = models[provider][chatModel] as BaseChatModel | undefined;
|
||||
}
|
||||
|
||||
if (!llm) {
|
||||
res.status(500).json({ message: 'Invalid LLM model selected' });
|
||||
return;
|
||||
}
|
||||
|
||||
const images = await handleImageSearch({ query, chat_history }, llm);
|
||||
|
||||
|
|
Loading…
Reference in New Issue