diff --git a/README.md b/README.md index d1388b0..884fad6 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,8 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models** + - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models**. + - `ANTHROPIC`: Your Anthropic API key. **You only need to fill this if you wish to use Anthropic models**. **Note**: You can change these after starting Perplexica from the settings dialog. diff --git a/package.json b/package.json index 4f2bb32..ccdaa8d 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ }, "dependencies": { "@iarna/toml": "^2.2.5", + "@langchain/anthropic": "^0.2.3", "@langchain/community": "^0.2.16", "@langchain/openai": "^0.0.25", "@xenova/transformers": "^2.17.1", diff --git a/sample.config.toml b/sample.config.toml index 8d35666..f6c6943 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -5,6 +5,7 @@ SIMILARITY_MEASURE = "cosine" # "cosine" or "dot" [API_KEYS] OPENAI = "" # OpenAI API key - sk-1234567890abcdef1234567890abcdef GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef +ANTHROPIC = "" # Anthropic API key - sk-ant-1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL diff --git a/src/config.ts b/src/config.ts index 7c0c7f1..9ebc182 100644 --- a/src/config.ts +++ b/src/config.ts @@ -12,6 +12,7 @@ interface Config { API_KEYS: { OPENAI: string; GROQ: string; + ANTHROPIC: string; }; API_ENDPOINTS: { SEARXNG: string; @@ -37,6 +38,8 @@ export const getOpenaiApiKey = () => loadConfig().API_KEYS.OPENAI; export const getGroqApiKey = () => loadConfig().API_KEYS.GROQ; +export const getAnthropicApiKey = () => loadConfig().API_KEYS.ANTHROPIC; + export const getSearxngApiEndpoint = () => loadConfig().API_ENDPOINTS.SEARXNG; export const getOllamaApiEndpoint = () => loadConfig().API_ENDPOINTS.OLLAMA; diff --git a/src/lib/providers/anthropic.ts b/src/lib/providers/anthropic.ts new file mode 100644 index 0000000..58cd164 --- /dev/null +++ b/src/lib/providers/anthropic.ts @@ -0,0 +1,39 @@ +import { ChatAnthropic } from '@langchain/anthropic'; +import { getAnthropicApiKey } from '../../config'; +import logger from '../../utils/logger'; + +export const loadAnthropicChatModels = async () => { + const anthropicApiKey = getAnthropicApiKey(); + + if (!anthropicApiKey) return {}; + + try { + const chatModels = { + 'Claude 3.5 Sonnet': new ChatAnthropic({ + temperature: 0.7, + anthropicApiKey: anthropicApiKey, + model: 'claude-3-5-sonnet-20240620', + }), + 'Claude 3 Opus': new ChatAnthropic({ + temperature: 0.7, + anthropicApiKey: anthropicApiKey, + model: 'claude-3-opus-20240229', + }), + 'Claude 3 Sonnet': new ChatAnthropic({ + temperature: 0.7, + anthropicApiKey: anthropicApiKey, + model: 'claude-3-sonnet-20240229', + }), + 'Claude 3 Haiku': new ChatAnthropic({ + temperature: 0.7, + anthropicApiKey: anthropicApiKey, + model: 'claude-3-haiku-20240307', + }), + }; + + return chatModels; + } catch (err) { + logger.error(`Error loading Anthropic models: ${err}`); + return {}; + } +}; diff --git a/src/lib/providers/index.ts b/src/lib/providers/index.ts index b1d4502..d919fd4 100644 --- a/src/lib/providers/index.ts +++ b/src/lib/providers/index.ts @@ -1,12 +1,14 @@ import { loadGroqChatModels } from './groq'; import { loadOllamaChatModels, loadOllamaEmbeddingsModels } from './ollama'; import { loadOpenAIChatModels, loadOpenAIEmbeddingsModels } from './openai'; +import { loadAnthropicChatModels } from './anthropic'; import { loadTransformersEmbeddingsModels } from './transformers'; const chatModelProviders = { openai: loadOpenAIChatModels, groq: loadGroqChatModels, ollama: loadOllamaChatModels, + anthropic: loadAnthropicChatModels, }; const embeddingModelProviders = { @@ -21,11 +23,11 @@ export const getAvailableChatModelProviders = async () => { for (const provider in chatModelProviders) { const providerModels = await chatModelProviders[provider](); if (Object.keys(providerModels).length > 0) { - models[provider] = providerModels + models[provider] = providerModels; } } - models['custom_openai'] = {} + models['custom_openai'] = {}; return models; }; @@ -36,7 +38,7 @@ export const getAvailableEmbeddingModelProviders = async () => { for (const provider in embeddingModelProviders) { const providerModels = await embeddingModelProviders[provider](); if (Object.keys(providerModels).length > 0) { - models[provider] = providerModels + models[provider] = providerModels; } } diff --git a/src/routes/config.ts b/src/routes/config.ts index bf13b63..f255560 100644 --- a/src/routes/config.ts +++ b/src/routes/config.ts @@ -6,6 +6,7 @@ import { import { getGroqApiKey, getOllamaApiEndpoint, + getAnthropicApiKey, getOpenaiApiKey, updateConfig, } from '../config'; @@ -37,6 +38,7 @@ router.get('/', async (_, res) => { config['openaiApiKey'] = getOpenaiApiKey(); config['ollamaApiUrl'] = getOllamaApiEndpoint(); + config['anthropicApiKey'] = getAnthropicApiKey(); config['groqApiKey'] = getGroqApiKey(); res.status(200).json(config); @@ -49,6 +51,7 @@ router.post('/', async (req, res) => { API_KEYS: { OPENAI: config.openaiApiKey, GROQ: config.groqApiKey, + ANTHROPIC: config.anthropicApiKey, }, API_ENDPOINTS: { OLLAMA: config.ollamaApiUrl, diff --git a/ui/components/SettingsDialog.tsx b/ui/components/SettingsDialog.tsx index d6ee18d..788469b 100644 --- a/ui/components/SettingsDialog.tsx +++ b/ui/components/SettingsDialog.tsx @@ -56,6 +56,7 @@ interface SettingsType { }; openaiApiKey: string; groqApiKey: string; + anthropicApiKey: string; ollamaApiUrl: string; } @@ -439,6 +440,22 @@ const SettingsDialog = ({ } /> +
+ Anthropic API Key +
+ + setConfig({ + ...config, + anthropicApiKey: e.target.value, + }) + } + /> +