From edc40d8fe6f9381803e8eee83b6371b1303533bb Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Wed, 1 May 2024 19:43:06 +0530 Subject: [PATCH] feat(providers): add Groq provider --- sample.config.toml | 3 +- src/config.ts | 3 ++ src/lib/providers.ts | 66 ++++++++++++++++++++++++++++++-- src/routes/config.ts | 3 ++ ui/components/SettingsDialog.tsx | 16 ++++++++ 5 files changed, 86 insertions(+), 5 deletions(-) diff --git a/sample.config.toml b/sample.config.toml index 2d09b4b..e283826 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -1,11 +1,12 @@ [GENERAL] PORT = 3001 # Port to run the server on SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" -CHAT_MODEL_PROVIDER = "openai" # "openai" or "ollama" +CHAT_MODEL_PROVIDER = "openai" # "openai" or "ollama" or "groq" CHAT_MODEL = "gpt-3.5-turbo" # Name of the model to use [API_KEYS] OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef +GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL diff --git a/src/config.ts b/src/config.ts index f373847..25dcbf4 100644 --- a/src/config.ts +++ b/src/config.ts @@ -13,6 +13,7 @@ interface Config { }; API_KEYS: { OPENAI: string; + GROQ: string; }; API_ENDPOINTS: { SEARXNG: string; @@ -41,6 +42,8 @@ export const getChatModel = () => loadConfig().GENERAL.CHAT_MODEL; export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; +export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; + export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG; export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; diff --git a/src/lib/providers.ts b/src/lib/providers.ts index 71ed079..aea69de 100644 --- a/src/lib/providers.ts +++ b/src/lib/providers.ts @@ -1,11 +1,16 @@ import { ChatOpenAI, OpenAIEmbeddings } from '@langchain/openai'; import { ChatOllama } from '@langchain/community/chat_models/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama'; -import { getOllamaApiEndpoint, getOpenaiApiKey } from '../config'; +import { + getGroqApiKey, + getOllamaApiEndpoint, + getOpenaiApiKey, +} from '../config'; import logger from '../utils/logger'; export const getAvailableProviders = async () => { const openAIApiKey = getOpenaiApiKey(); + const groqApiKey = getGroqApiKey(); const ollamaEndpoint = getOllamaApiEndpoint(); const models = {}; @@ -13,17 +18,17 @@ export const getAvailableProviders = async () => { if (openAIApiKey) { try { models['openai'] = { - 'gpt-3.5-turbo': new ChatOpenAI({ + 'GPT-3.5 turbo': new ChatOpenAI({ openAIApiKey, modelName: 'gpt-3.5-turbo', temperature: 0.7, }), - 'gpt-4': new ChatOpenAI({ + 'GPT-4': new ChatOpenAI({ openAIApiKey, modelName: 'gpt-4', temperature: 0.7, }), - 'gpt-4-turbo': new ChatOpenAI({ + 'GPT-4 turbo': new ChatOpenAI({ openAIApiKey, modelName: 'gpt-4-turbo', temperature: 0.7, @@ -38,6 +43,59 @@ export const getAvailableProviders = async () => { } } + if (groqApiKey) { + try { + models['groq'] = { + 'LLaMA3 8b': new ChatOpenAI( + { + openAIApiKey: groqApiKey, + modelName: 'llama3-8b-8192', + temperature: 0.7, + }, + { + baseURL: 'https://api.groq.com/openai/v1', + }, + ), + 'LLaMA3 70b': new ChatOpenAI( + { + openAIApiKey: groqApiKey, + modelName: 'llama3-70b-8192', + temperature: 0.7, + }, + { + baseURL: 'https://api.groq.com/openai/v1', + }, + ), + 'Mixtral 8x7b': new ChatOpenAI( + { + openAIApiKey: groqApiKey, + modelName: 'gemma-7b-it', + temperature: 0.7, + }, + { + baseURL: 'https://api.groq.com/openai/v1', + }, + ), + 'Gemma 7b': new ChatOpenAI( + { + openAIApiKey: groqApiKey, + modelName: 'llama3-70b-8192', + temperature: 0.7, + }, + { + baseURL: 'https://api.groq.com/openai/v1', + }, + ), + embeddings: new OpenAIEmbeddings({ + openAIApiKey: openAIApiKey, + modelName: 'text-embedding-3-large', + }), + }; + } catch (err) { + logger.error(`Error loading Groq models: ${err}`); + } + } + if (ollamaEndpoint) { try { const response = await fetch(`${ollamaEndpoint}/api/tags`); diff --git a/src/routes/config.ts b/src/routes/config.ts index ecdec17..4d22ec5 100644 --- a/src/routes/config.ts +++ b/src/routes/config.ts @@ -3,6 +3,7 @@ import { getAvailableProviders } from '../lib/providers'; import { getChatModel, getChatModelProvider, + getGroqApiKey, getOllamaApiEndpoint, getOpenaiApiKey, updateConfig, @@ -30,6 +31,7 @@ router.get('/', async (_, res) => { config['openeaiApiKey'] = getOpenaiApiKey(); config['ollamaApiUrl'] = getOllamaApiEndpoint(); + config['groqApiKey'] = getGroqApiKey(); res.status(200).json(config); }); @@ -44,6 +46,7 @@ router.post('/', async (req, res) => { }, API_KEYS: { OPENAI: config.openeaiApiKey, + GROQ: config.groqApiKey, }, API_ENDPOINTS: { OLLAMA: config.ollamaApiUrl, diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index c62c967..f005b8c 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -9,6 +9,7 @@ interface SettingsType { selectedProvider: string; selectedChatModel: string; openeaiApiKey: string; + groqApiKey: string; ollamaApiUrl: string; } @@ -194,6 +195,21 @@ const SettingsDialog = ({ className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" /> +
+

GROQ API Key

+ + setConfig({ + ...config, + groqApiKey: e.target.value, + }) + } + className="bg-[#111111] px-3 py-2 flex items-center overflow-hidden border border-[#1C1C1C] text-white rounded-lg text-sm" + /> +
)} {isLoading && (