From 177746235a347e1468ade07a7ef425d4011a0bc2 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns <95534749+ItzCrazyKns@users.noreply.github.com> Date: Thu, 28 Nov 2024 20:47:18 +0530 Subject: [PATCH] feat(providers): add gemini --- package.json | 1 + sample.config.toml | 1 + src/config.ts | 3 ++ src/lib/providers/gemini.ts | 69 ++++++++++++++++++++++++++++++++ src/lib/providers/index.ts | 3 ++ src/routes/config.ts | 5 ++- ui/components/SettingsDialog.tsx | 17 ++++++++ yarn.lock | 53 ++++++++++++++++++++++++ 8 files changed, 151 insertions(+), 1 deletion(-) create mode 100644 src/lib/providers/gemini.ts diff --git a/package.json b/package.json index 6a67746..0368b21 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "@langchain/anthropic": "^0.2.3", "@langchain/community": "^0.2.16", "@langchain/openai": "^0.0.25", + "@langchain/google-genai": "^0.0.23", "@xenova/transformers": "^2.17.1", "axios": "^1.6.8", "better-sqlite3": "^11.0.0", diff --git a/sample.config.toml b/sample.config.toml index dddcc03..50ba95d 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -7,6 +7,7 @@ KEEP_ALIVE = "5m" # How long to keep Ollama models loaded into memory. (Instead OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef +GEMINI = "" # Gemini API key - sk-1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL diff --git a/src/config.ts b/src/config.ts index 8624e7f..001c259 100644 --- a/src/config.ts +++ b/src/config.ts @@ -14,6 +14,7 @@ interface Config { OPENAI: string; GROQ: string; ANTHROPIC: string; + GEMINI: string; }; API_ENDPOINTS: { SEARXNG: string; @@ -43,6 +44,8 @@ export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC; +export const getGeminiApiKey = () => loadConfig().API_KEYS.GEMINI; + export const getSearxngApiEndpoint = () => process.env.SEARXNG_API_URL || loadConfig().API_ENDPOINTS.SEARXNG; diff --git a/src/lib/providers/gemini.ts b/src/lib/providers/gemini.ts new file mode 100644 index 0000000..95764cf --- /dev/null +++ b/src/lib/providers/gemini.ts @@ -0,0 +1,69 @@ +import { + ChatGoogleGenerativeAI, + GoogleGenerativeAIEmbeddings, +} from '@langchain/google-genai'; +import { getGeminiApiKey } from '../../config'; +import logger from '../../utils/logger'; + +export const loadGeminiChatModels = async () => { + const geminiApiKey = getGeminiApiKey(); + + if (!geminiApiKey) return {}; + + try { + const chatModels = { + 'gemini-1.5-flash': { + displayName: 'Gemini 1.5 Flash', + model: new ChatGoogleGenerativeAI({ + modelName: 'gemini-1.5-flash', + temperature: 0.7, + apiKey: geminiApiKey, + }), + }, + 'gemini-1.5-flash-8b': { + displayName: 'Gemini 1.5 Flash 8B', + model: new ChatGoogleGenerativeAI({ + modelName: 'gemini-1.5-flash-8b', + temperature: 0.7, + apiKey: geminiApiKey, + }), + }, + 'gemini-1.5-pro': { + displayName: 'Gemini 1.5 Pro', + model: new ChatGoogleGenerativeAI({ + modelName: 'gemini-1.5-pro', + temperature: 0.7, + apiKey: geminiApiKey, + }), + }, + }; + + return chatModels; + } catch (err) { + logger.error(`Error loading Gemini models: ${err}`); + return {}; + } +}; + +export const loadGeminiEmbeddingsModels = async () => { + const geminiApiKey = getGeminiApiKey(); + + if (!geminiApiKey) return {}; + + try { + const embeddingModels = { + 'text-embedding-004': { + displayName: 'Text Embedding', + model: new GoogleGenerativeAIEmbeddings({ + apiKey: geminiApiKey, + modelName: 'text-embedding-004', + }), + }, + }; + + return embeddingModels; + } catch (err) { + logger.error(`Error loading Gemini embeddings model: ${err}`); + return {}; + } +}; diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts index d919fd4..98846e7 100644 --- a/src/lib/providers/index.ts +++ b/src/lib/providers/index.ts @@ -3,18 +3,21 @@ import { loadOllamaChatModels, loadOllamaEmbeddingsModels } from './ollama'; import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai'; import { loadAnthropicChatModels } from './anthropic'; import { loadTransformersEmbeddingsModels } from './transformers'; +import { loadGeminiChatModels, loadGeminiEmbeddingsModels } from './gemini'; const chatModelProviders = { openai: loadOpenAIChatModels, groq: loadGroqChatModels, ollama: loadOllamaChatModels, anthropic: loadAnthropicChatModels, + gemini: loadGeminiChatModels, }; const embeddingModelProviders = { openai: loadOpenAIEmbeddingsModels, local: loadTransformersEmbeddingsModels, ollama: loadOllamaEmbeddingsModels, + gemini: loadGeminiEmbeddingsModels, }; export const getAvailableChatModelProviders = async () => { diff --git a/src/routes/config.ts b/src/routes/config.ts index f635e4b..38192b7 100644 --- a/src/routes/config.ts +++ b/src/routes/config.ts @@ -7,6 +7,7 @@ import { getGroqApiKey, getOllamaApiEndpoint, getAnthropicApiKey, + getGeminiApiKey, getOpenaiApiKey, updateConfig, } from '../config'; @@ -52,7 +53,8 @@ router.get('/', async (_, res) => { config['ollamaApiUrl'] = getOllamaApiEndpoint(); config['anthropicApiKey'] = getAnthropicApiKey(); config['groqApiKey'] = getGroqApiKey(); - + config['geminiApiKey'] = getGeminiApiKey(); + res.status(200).json(config); } catch (err: any) { res.status(500).json({ message: 'An error has occurred.' }); @@ -68,6 +70,7 @@ router.post('/', async (req, res) => { OPENAI: config.openaiApiKey, GROQ: config.groqApiKey, ANTHROPIC: config.anthropicApiKey, + GEMINI: config.geminiApiKey, }, API_ENDPOINTS: { OLLAMA: config.ollamaApiUrl, diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index 716dd7d..163857b 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -63,6 +63,7 @@ interface SettingsType { openaiApiKey: string; groqApiKey: string; anthropicApiKey: string; + geminiApiKey: string; ollamaApiUrl: string; } @@ -476,6 +477,22 @@ const SettingsDialog = ({ } /> +
+

+ Gemini API Key +

+ + setConfig({ + ...config, + geminiApiKey: e.target.value, + }) + } + /> +
)} {isLoading && ( diff --git a/yarn.lock b/yarn.lock index 0ea5916..5764b3c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -293,6 +293,11 @@ resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz#c57c8afbb4054a3ab8317591a0b7320360b444ae" integrity sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA== +"@google/generative-ai@^0.7.0": + version "0.7.1" + resolved "https://registry.yarnpkg.com/@google/generative-ai/-/generative-ai-0.7.1.tgz#eb187c75080c0706245699dbc06816c830d8c6a7" + integrity sha512-WTjMLLYL/xfA5BW6xAycRPiAX7FNHKAxrid/ayqC1QMam0KAK0NbMeS9Lubw80gVg5xFMLE+H7pw4wdNzTOlxw== + "@huggingface/jinja@^0.2.2": version "0.2.2" resolved "https://registry.yarnpkg.com/@huggingface/jinja/-/jinja-0.2.2.tgz#faeb205a9d6995089bef52655ddd8245d3190627" @@ -380,6 +385,23 @@ zod "^3.22.4" zod-to-json-schema "^3.22.3" +"@langchain/core@>=0.2.16 <0.3.0": + version "0.2.36" + resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.36.tgz#75754c33aa5b9310dcf117047374a1ae011005a4" + integrity sha512-qHLvScqERDeH7y2cLuJaSAlMwg3f/3Oc9nayRSXRU2UuaK/SOhI42cxiPLj1FnuHJSmN0rBQFkrLx02gI4mcVg== + dependencies: + ansi-styles "^5.0.0" + camelcase "6" + decamelize "1.2.0" + js-tiktoken "^1.0.12" + langsmith "^0.1.56-rc.1" + mustache "^4.2.0" + p-queue "^6.6.2" + p-retry "4" + uuid "^10.0.0" + zod "^3.22.4" + zod-to-json-schema "^3.22.3" + "@langchain/core@>=0.2.9 <0.3.0": version "0.2.15" resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.15.tgz#1bb99ac4fffe935c7ba37edcaa91abfba3c82219" @@ -415,6 +437,15 @@ zod "^3.22.4" zod-to-json-schema "^3.22.3" +"@langchain/google-genai@^0.0.23": + version "0.0.23" + resolved "https://registry.yarnpkg.com/@langchain/google-genai/-/google-genai-0.0.23.tgz#e73af501bc1df4c7642b531759b82dc3eb7ae459" + integrity sha512-MTSCJEoKsfU1inz0PWvAjITdNFM4s41uvBCwLpcgx3jWJIEisczFD82x86ahYqJlb2fD6tohYSaCH/4tKAdkXA== + dependencies: + "@google/generative-ai" "^0.7.0" + "@langchain/core" ">=0.2.16 <0.3.0" + zod-to-json-schema "^3.22.4" + "@langchain/openai@^0.0.25", "@langchain/openai@~0.0.19": version "0.0.25" resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.25.tgz#8332abea1e3acb9b1169f90636e518c0ee90622e" @@ -712,6 +743,11 @@ resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.5.tgz#74fef9ffbaa198eb8b588be029f38b00299caa2c" integrity sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw== +"@types/uuid@^10.0.0": + version "10.0.0" + resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-10.0.0.tgz#e9c07fe50da0f53dc24970cca94d619ff03f6f6d" + integrity sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ== + "@types/uuid@^9.0.1": version "9.0.8" resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-9.0.8.tgz#7545ba4fc3c003d6c756f651f3bf163d8f0f29ba" @@ -1900,6 +1936,18 @@ langchainhub@~0.0.8: resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.8.tgz#fd4b96dc795e22e36c1a20bad31b61b0c33d3110" integrity sha512-Woyb8YDHgqqTOZvWIbm2CaFDGfZ4NTSyXV687AG4vXEfoNo7cGQp7nhl7wL3ehenKWmNEmcxCLgOZzW8jE6lOQ== +langsmith@^0.1.56-rc.1: + version "0.1.68" + resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.68.tgz#848332e822fe5e6734a07f1c36b6530cc1798afb" + integrity sha512-otmiysWtVAqzMx3CJ4PrtUBhWRG5Co8Z4o7hSZENPjlit9/j3/vm3TSvbaxpDYakZxtMjhkcJTqrdYFipISEiQ== + dependencies: + "@types/uuid" "^10.0.0" + commander "^10.0.1" + p-queue "^6.6.2" + p-retry "4" + semver "^7.6.3" + uuid "^10.0.0" + langsmith@~0.1.1, langsmith@~0.1.7: version "0.1.14" resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.14.tgz#2b889dbcfb49547614df276a4a5a063092a1585d" @@ -2568,6 +2616,11 @@ semver@^7.3.5, semver@^7.5.3, semver@^7.5.4: dependencies: lru-cache "^6.0.0" +semver@^7.6.3: + version "7.6.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" + integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== + send@0.18.0: version "0.18.0" resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"