diff --git a/docs/API/SEARCH.md b/docs/API/SEARCH.md index a573021..f87e788 100644 --- a/docs/API/SEARCH.md +++ b/docs/API/SEARCH.md @@ -26,6 +26,7 @@ The API accepts a JSON object in the request body, where you define the focus mo "provider": "openai", "model": "text-embedding-3-large" }, + "optimizationMode": "speed", "focusMode": "webSearch", "query": "What is Perplexica", "history": [ @@ -37,7 +38,7 @@ The API accepts a JSON object in the request body, where you define the focus mo ### Request Parameters -- **`chatModel`** (object, optional): Defines the chat model to be used for the query. For model details you can send a GET request at `http://localhost:3001/api/models`. +- **`chatModel`** (object, optional): Defines the chat model to be used for the query. For model details you can send a GET request at `http://localhost:3001/api/models`. Make sure to use the key value (For example "gpt-4o-mini" instead of the display name "GPT 4 omni mini"). - `provider`: Specifies the provider for the chat model (e.g., `openai`, `ollama`). - `model`: The specific model from the chosen provider (e.g., `gpt-4o-mini`). @@ -45,7 +46,7 @@ The API accepts a JSON object in the request body, where you define the focus mo - `customOpenAIBaseURL`: If you’re using a custom OpenAI instance, provide the base URL. - `customOpenAIKey`: The API key for a custom OpenAI instance. -- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. For model details you can send a GET request at `http://localhost:3001/api/models`. +- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. For model details you can send a GET request at `http://localhost:3001/api/models`. Make sure to use the key value (For example "text-embedding-3-large" instead of the display name "Text Embedding 3 Large"). - `provider`: The provider for the embedding model (e.g., `openai`). - `model`: The specific embedding model (e.g., `text-embedding-3-large`). @@ -54,9 +55,15 @@ The API accepts a JSON object in the request body, where you define the focus mo - `webSearch`, `academicSearch`, `writingAssistant`, `wolframAlphaSearch`, `youtubeSearch`, `redditSearch`. +- **`optimizationMode`** (string, optional): Specifies the optimization mode to control the balance between performance and quality. Available modes: + + - `speed`: Prioritize speed and return the fastest answer. + - `balanced`: Provide a balanced answer with good speed and reasonable quality. + - **`query`** (string, required): The search query or question. - **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example: + ```json [ ["human", "What is Perplexica?"], diff --git a/src/agents/academicSearchAgent.ts b/src/agents/academicSearchAgent.ts index a72e3a2..4a10c98 100644 --- a/src/agents/academicSearchAgent.ts +++ b/src/agents/academicSearchAgent.ts @@ -118,7 +118,6 @@ const createBasicAcademicSearchRetrieverChain = (llm: BaseChatModel) => { engines: [ 'arxiv', 'google scholar', - 'internetarchivescholar', 'pubmed', ], }); @@ -143,6 +142,7 @@ const createBasicAcademicSearchRetrieverChain = (llm: BaseChatModel) => { const createBasicAcademicSearchAnsweringChain = ( llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const basicAcademicSearchRetrieverChain = createBasicAcademicSearchRetrieverChain(llm); @@ -168,26 +168,33 @@ const createBasicAcademicSearchAnsweringChain = ( (doc) => doc.pageContent && doc.pageContent.length > 0, ); - const [docEmbeddings, queryEmbedding] = await Promise.all([ - embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)), - embeddings.embedQuery(query), - ]); + if (optimizationMode === 'speed') { + return docsWithContent.slice(0, 15); + } else if (optimizationMode === 'balanced') { + console.log('Balanced mode'); + const [docEmbeddings, queryEmbedding] = await Promise.all([ + embeddings.embedDocuments( + docsWithContent.map((doc) => doc.pageContent), + ), + embeddings.embedQuery(query), + ]); - const similarity = docEmbeddings.map((docEmbedding, i) => { - const sim = computeSimilarity(queryEmbedding, docEmbedding); + const similarity = docEmbeddings.map((docEmbedding, i) => { + const sim = computeSimilarity(queryEmbedding, docEmbedding); - return { - index: i, - similarity: sim, - }; - }); + return { + index: i, + similarity: sim, + }; + }); - const sortedDocs = similarity - .sort((a, b) => b.similarity - a.similarity) - .slice(0, 15) - .map((sim) => docsWithContent[sim.index]); + const sortedDocs = similarity + .sort((a, b) => b.similarity - a.similarity) + .slice(0, 15) + .map((sim) => docsWithContent[sim.index]); - return sortedDocs; + return sortedDocs; + } }; return RunnableSequence.from([ @@ -224,12 +231,17 @@ const basicAcademicSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const emitter = new eventEmitter(); try { const basicAcademicSearchAnsweringChain = - createBasicAcademicSearchAnsweringChain(llm, embeddings); + createBasicAcademicSearchAnsweringChain( + llm, + embeddings, + optimizationMode, + ); const stream = basicAcademicSearchAnsweringChain.streamEvents( { @@ -258,8 +270,15 @@ const handleAcademicSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { - const emitter = basicAcademicSearch(message, history, llm, embeddings); + const emitter = basicAcademicSearch( + message, + history, + llm, + embeddings, + optimizationMode, + ); return emitter; }; diff --git a/src/agents/redditSearchAgent.ts b/src/agents/redditSearchAgent.ts index 9c2c443..2c44c13 100644 --- a/src/agents/redditSearchAgent.ts +++ b/src/agents/redditSearchAgent.ts @@ -138,6 +138,7 @@ const createBasicRedditSearchRetrieverChain = (llm: BaseChatModel) => { const createBasicRedditSearchAnsweringChain = ( llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const basicRedditSearchRetrieverChain = createBasicRedditSearchRetrieverChain(llm); @@ -163,27 +164,33 @@ const createBasicRedditSearchAnsweringChain = ( (doc) => doc.pageContent && doc.pageContent.length > 0, ); - const [docEmbeddings, queryEmbedding] = await Promise.all([ - embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)), - embeddings.embedQuery(query), - ]); + if (optimizationMode === 'speed') { + return docsWithContent.slice(0, 15); + } else if (optimizationMode === 'balanced') { + const [docEmbeddings, queryEmbedding] = await Promise.all([ + embeddings.embedDocuments( + docsWithContent.map((doc) => doc.pageContent), + ), + embeddings.embedQuery(query), + ]); - const similarity = docEmbeddings.map((docEmbedding, i) => { - const sim = computeSimilarity(queryEmbedding, docEmbedding); + const similarity = docEmbeddings.map((docEmbedding, i) => { + const sim = computeSimilarity(queryEmbedding, docEmbedding); - return { - index: i, - similarity: sim, - }; - }); + return { + index: i, + similarity: sim, + }; + }); - const sortedDocs = similarity - .filter((sim) => sim.similarity > 0.3) - .sort((a, b) => b.similarity - a.similarity) - .slice(0, 15) - .map((sim) => docsWithContent[sim.index]); + const sortedDocs = similarity + .filter((sim) => sim.similarity > 0.3) + .sort((a, b) => b.similarity - a.similarity) + .slice(0, 15) + .map((sim) => docsWithContent[sim.index]); - return sortedDocs; + return sortedDocs; + } }; return RunnableSequence.from([ @@ -220,12 +227,13 @@ const basicRedditSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const emitter = new eventEmitter(); try { const basicRedditSearchAnsweringChain = - createBasicRedditSearchAnsweringChain(llm, embeddings); + createBasicRedditSearchAnsweringChain(llm, embeddings, optimizationMode); const stream = basicRedditSearchAnsweringChain.streamEvents( { chat_history: history, @@ -253,8 +261,15 @@ const handleRedditSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { - const emitter = basicRedditSearch(message, history, llm, embeddings); + const emitter = basicRedditSearch( + message, + history, + llm, + embeddings, + optimizationMode, + ); return emitter; }; diff --git a/src/agents/webSearchAgent.ts b/src/agents/webSearchAgent.ts index 77ec181..51653a0 100644 --- a/src/agents/webSearchAgent.ts +++ b/src/agents/webSearchAgent.ts @@ -216,12 +216,34 @@ const createBasicWebSearchRetrieverChain = (llm: BaseChatModel) => { await Promise.all( docGroups.map(async (doc) => { const res = await llm.invoke(` - You are a text summarizer. You need to summarize the text provided inside the \`text\` XML block. - You need to summarize the text into 1 or 2 sentences capturing the main idea of the text. - You need to make sure that you don't miss any point while summarizing the text. - You will also be given a \`query\` XML block which will contain the query of the user. Try to answer the query in the summary from the text provided. - If the query says Summarize then you just need to summarize the text without answering the query. - Only return the summarized text without any other messages, text or XML block. + You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the + text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query. + If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary. + + - **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague. + - **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query. + - **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format. + + The text will be shared inside the \`text\` XML tag, and the query inside the \`query\` XML tag. + + + + Docker is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers. + It was first released in 2013 and is developed by Docker, Inc. Docker is designed to make it easier to create, deploy, and run applications + by using containers. + + + + What is Docker and how does it work? + + + Response: + Docker is a revolutionary platform-as-a-service product developed by Docker, Inc., that uses container technology to make application + deployment more efficient. It allows developers to package their software with all necessary dependencies, making it easier to run in + any environment. Released in 2013, Docker has transformed the way applications are built, deployed, and managed. + + + Everything below is the actual data you will be working with. Good luck! ${question} @@ -273,6 +295,7 @@ const createBasicWebSearchRetrieverChain = (llm: BaseChatModel) => { const createBasicWebSearchAnsweringChain = ( llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const basicWebSearchRetrieverChain = createBasicWebSearchRetrieverChain(llm); @@ -301,27 +324,33 @@ const createBasicWebSearchAnsweringChain = ( (doc) => doc.pageContent && doc.pageContent.length > 0, ); - const [docEmbeddings, queryEmbedding] = await Promise.all([ - embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)), - embeddings.embedQuery(query), - ]); + if (optimizationMode === 'speed') { + return docsWithContent.slice(0, 15); + } else if (optimizationMode === 'balanced') { + const [docEmbeddings, queryEmbedding] = await Promise.all([ + embeddings.embedDocuments( + docsWithContent.map((doc) => doc.pageContent), + ), + embeddings.embedQuery(query), + ]); - const similarity = docEmbeddings.map((docEmbedding, i) => { - const sim = computeSimilarity(queryEmbedding, docEmbedding); + const similarity = docEmbeddings.map((docEmbedding, i) => { + const sim = computeSimilarity(queryEmbedding, docEmbedding); - return { - index: i, - similarity: sim, - }; - }); + return { + index: i, + similarity: sim, + }; + }); - const sortedDocs = similarity - .filter((sim) => sim.similarity > 0.3) - .sort((a, b) => b.similarity - a.similarity) - .slice(0, 15) - .map((sim) => docsWithContent[sim.index]); + const sortedDocs = similarity + .filter((sim) => sim.similarity > 0.3) + .sort((a, b) => b.similarity - a.similarity) + .slice(0, 15) + .map((sim) => docsWithContent[sim.index]); - return sortedDocs; + return sortedDocs; + } }; return RunnableSequence.from([ @@ -358,6 +387,7 @@ const basicWebSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const emitter = new eventEmitter(); @@ -365,6 +395,7 @@ const basicWebSearch = ( const basicWebSearchAnsweringChain = createBasicWebSearchAnsweringChain( llm, embeddings, + optimizationMode, ); const stream = basicWebSearchAnsweringChain.streamEvents( @@ -394,8 +425,15 @@ const handleWebSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { - const emitter = basicWebSearch(message, history, llm, embeddings); + const emitter = basicWebSearch( + message, + history, + llm, + embeddings, + optimizationMode, + ); return emitter; }; diff --git a/src/agents/youtubeSearchAgent.ts b/src/agents/youtubeSearchAgent.ts index e9b6553..2f53bc9 100644 --- a/src/agents/youtubeSearchAgent.ts +++ b/src/agents/youtubeSearchAgent.ts @@ -138,6 +138,7 @@ const createBasicYoutubeSearchRetrieverChain = (llm: BaseChatModel) => { const createBasicYoutubeSearchAnsweringChain = ( llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const basicYoutubeSearchRetrieverChain = createBasicYoutubeSearchRetrieverChain(llm); @@ -163,27 +164,33 @@ const createBasicYoutubeSearchAnsweringChain = ( (doc) => doc.pageContent && doc.pageContent.length > 0, ); - const [docEmbeddings, queryEmbedding] = await Promise.all([ - embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)), - embeddings.embedQuery(query), - ]); + if (optimizationMode === 'speed') { + return docsWithContent.slice(0, 15); + } else { + const [docEmbeddings, queryEmbedding] = await Promise.all([ + embeddings.embedDocuments( + docsWithContent.map((doc) => doc.pageContent), + ), + embeddings.embedQuery(query), + ]); - const similarity = docEmbeddings.map((docEmbedding, i) => { - const sim = computeSimilarity(queryEmbedding, docEmbedding); + const similarity = docEmbeddings.map((docEmbedding, i) => { + const sim = computeSimilarity(queryEmbedding, docEmbedding); - return { - index: i, - similarity: sim, - }; - }); + return { + index: i, + similarity: sim, + }; + }); - const sortedDocs = similarity - .filter((sim) => sim.similarity > 0.3) - .sort((a, b) => b.similarity - a.similarity) - .slice(0, 15) - .map((sim) => docsWithContent[sim.index]); + const sortedDocs = similarity + .filter((sim) => sim.similarity > 0.3) + .sort((a, b) => b.similarity - a.similarity) + .slice(0, 15) + .map((sim) => docsWithContent[sim.index]); - return sortedDocs; + return sortedDocs; + } }; return RunnableSequence.from([ @@ -220,12 +227,13 @@ const basicYoutubeSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { const emitter = new eventEmitter(); try { const basicYoutubeSearchAnsweringChain = - createBasicYoutubeSearchAnsweringChain(llm, embeddings); + createBasicYoutubeSearchAnsweringChain(llm, embeddings, optimizationMode); const stream = basicYoutubeSearchAnsweringChain.streamEvents( { @@ -254,8 +262,15 @@ const handleYoutubeSearch = ( history: BaseMessage[], llm: BaseChatModel, embeddings: Embeddings, + optimizationMode: 'speed' | 'balanced' | 'quality', ) => { - const emitter = basicYoutubeSearch(message, history, llm, embeddings); + const emitter = basicYoutubeSearch( + message, + history, + llm, + embeddings, + optimizationMode, + ); return emitter; }; diff --git a/src/routes/models.ts b/src/routes/models.ts index 36df25a..c4f5d40 100644 --- a/src/routes/models.ts +++ b/src/routes/models.ts @@ -12,7 +12,19 @@ router.get('/', async (req, res) => { const [chatModelProviders, embeddingModelProviders] = await Promise.all([ getAvailableChatModelProviders(), getAvailableEmbeddingModelProviders(), - ]); + ]); + + Object.keys(chatModelProviders).forEach((provider) => { + Object.keys(chatModelProviders[provider]).forEach((model) => { + delete chatModelProviders[provider][model].model; + }); + }); + + Object.keys(embeddingModelProviders).forEach((provider) => { + Object.keys(embeddingModelProviders[provider]).forEach((model) => { + delete embeddingModelProviders[provider][model].model; + }); + }); res.status(200).json({ chatModelProviders, embeddingModelProviders }); } catch (err) { diff --git a/src/routes/search.ts b/src/routes/search.ts index 9eec29f..6684632 100644 --- a/src/routes/search.ts +++ b/src/routes/search.ts @@ -25,6 +25,7 @@ interface embeddingModel { } interface ChatRequestBody { + optimizationMode: 'speed' | 'balanced'; focusMode: string; chatModel?: chatModel; embeddingModel?: embeddingModel; @@ -41,6 +42,7 @@ router.post('/', async (req, res) => { } body.history = body.history || []; + body.optimizationMode = body.optimizationMode || 'balanced'; const history: BaseMessage[] = body.history.map((msg) => { if (msg[0] === 'human') { @@ -119,7 +121,7 @@ router.post('/', async (req, res) => { return res.status(400).json({ message: 'Invalid focus mode' }); } - const emitter = searchHandler(body.query, history, llm, embeddings); + const emitter = searchHandler(body.query, history, llm, embeddings, body.optimizationMode); let message = ''; let sources = []; diff --git a/src/websocket/messageHandler.ts b/src/websocket/messageHandler.ts index 332910c..d230386 100644 --- a/src/websocket/messageHandler.ts +++ b/src/websocket/messageHandler.ts @@ -22,7 +22,7 @@ type Message = { type WSMessage = { message: Message; - copilot: boolean; + optimizationMode: string; type: string; focusMode: string; history: Array<[string, string]>; @@ -138,6 +138,7 @@ export const handleMessage = async ( history, llm, embeddings, + parsedWSMessage.optimizationMode, ); handleEmitterEvents(emitter, ws, id, parsedMessage.chatId); diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx index 96fe859..624cdd3 100644 --- a/ui/components/ChatWindow.tsx +++ b/ui/components/ChatWindow.tsx @@ -315,6 +315,7 @@ const ChatWindow = ({ id }: { id?: string }) => { const [messages, setMessages] = useState([]); const [focusMode, setFocusMode] = useState('webSearch'); + const [optimizationMode, setOptimizationMode] = useState('speed'); const [isMessagesLoaded, setIsMessagesLoaded] = useState(false); @@ -386,6 +387,7 @@ const ChatWindow = ({ id }: { id?: string }) => { content: message, }, focusMode: focusMode, + optimizationMode: optimizationMode, history: [...chatHistory, ['human', message]], }), ); @@ -548,6 +550,8 @@ const ChatWindow = ({ id }: { id?: string }) => { sendMessage={sendMessage} focusMode={focusMode} setFocusMode={setFocusMode} + optimizationMode={optimizationMode} + setOptimizationMode={setOptimizationMode} /> )} diff --git a/ui/components/DeleteChat.tsx b/ui/components/DeleteChat.tsx index 165f86e..f981e32 100644 --- a/ui/components/DeleteChat.tsx +++ b/ui/components/DeleteChat.tsx @@ -1,5 +1,13 @@ -import { Delete, Trash } from 'lucide-react'; -import { Dialog, Transition } from '@headlessui/react'; +import { Trash } from 'lucide-react'; +import { + Description, + Dialog, + DialogBackdrop, + DialogPanel, + DialogTitle, + Transition, + TransitionChild, +} from '@headlessui/react'; import { Fragment, useState } from 'react'; import { toast } from 'sonner'; import { Chat } from '@/app/library/page'; @@ -64,10 +72,10 @@ const DeleteChat = ({ } }} > - +
- - - + + Delete Confirmation - - + + Are you sure you want to delete this chat? - +
-
-
+ +
diff --git a/ui/components/EmptyChat.tsx b/ui/components/EmptyChat.tsx index ea3642b..63f186c 100644 --- a/ui/components/EmptyChat.tsx +++ b/ui/components/EmptyChat.tsx @@ -4,10 +4,14 @@ const EmptyChat = ({ sendMessage, focusMode, setFocusMode, + optimizationMode, + setOptimizationMode, }: { sendMessage: (message: string) => void; focusMode: string; setFocusMode: (mode: string) => void; + optimizationMode: string; + setOptimizationMode: (mode: string) => void; }) => { return (
@@ -19,6 +23,8 @@ const EmptyChat = ({ sendMessage={sendMessage} focusMode={focusMode} setFocusMode={setFocusMode} + optimizationMode={optimizationMode} + setOptimizationMode={setOptimizationMode} />
diff --git a/ui/components/EmptyChatMessageInput.tsx b/ui/components/EmptyChatMessageInput.tsx index 39d3f16..845bed9 100644 --- a/ui/components/EmptyChatMessageInput.tsx +++ b/ui/components/EmptyChatMessageInput.tsx @@ -3,15 +3,20 @@ import { useEffect, useRef, useState } from 'react'; import TextareaAutosize from 'react-textarea-autosize'; import CopilotToggle from './MessageInputActions/Copilot'; import Focus from './MessageInputActions/Focus'; +import Optimization from './MessageInputActions/Optimization'; const EmptyChatMessageInput = ({ sendMessage, focusMode, setFocusMode, + optimizationMode, + setOptimizationMode, }: { sendMessage: (message: string) => void; focusMode: string; setFocusMode: (mode: string) => void; + optimizationMode: string; + setOptimizationMode: (mode: string) => void; }) => { const [copilotEnabled, setCopilotEnabled] = useState(false); const [message, setMessage] = useState(''); @@ -66,14 +71,13 @@ const EmptyChatMessageInput = ({ placeholder="Ask anything..." />
-
+
- {/* */}
-
- +
- - + +
diff --git a/ui/package.json b/ui/package.json index 04512b6..e5dc677 100644 --- a/ui/package.json +++ b/ui/package.json @@ -11,7 +11,7 @@ "format:write": "prettier . --write" }, "dependencies": { - "@headlessui/react": "^1.7.18", + "@headlessui/react": "^2.1.9", "@icons-pack/react-simple-icons": "^9.4.0", "@langchain/openai": "^0.0.25", "@tailwindcss/typography": "^0.5.12", diff --git a/ui/yarn.lock b/ui/yarn.lock index d348a8c..fad2596 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -66,13 +66,51 @@ resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.0.tgz#a5417ae8427873f1dd08b70b3574b453e67b5f7f" integrity sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g== -"@headlessui/react@^1.7.18": - version "1.7.18" - resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-1.7.18.tgz#30af4634d2215b2ca1aa29d07f33d02bea82d9d7" - integrity sha512-4i5DOrzwN4qSgNsL4Si61VMkUcWbcSKueUV7sFhpHzQcSShdlHENE5+QBntMSRvHt8NyoFO2AGG8si9lq+w4zQ== +"@floating-ui/core@^1.6.0": + version "1.6.8" + resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.6.8.tgz#aa43561be075815879305965020f492cdb43da12" + integrity sha512-7XJ9cPU+yI2QeLS+FCSlqNFZJq8arvswefkZrYI1yQBbftw6FyrZOxYSh+9S7z7TpeWlRt9zJ5IhM1WIL334jA== dependencies: - "@tanstack/react-virtual" "^3.0.0-beta.60" - client-only "^0.0.1" + "@floating-ui/utils" "^0.2.8" + +"@floating-ui/dom@^1.0.0": + version "1.6.11" + resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.6.11.tgz#8631857838d34ee5712339eb7cbdfb8ad34da723" + integrity sha512-qkMCxSR24v2vGkhYDo/UzxfJN3D4syqSjyuTFz6C7XcpU1pASPRieNI0Kj5VP3/503mOfYiGY891ugBX1GlABQ== + dependencies: + "@floating-ui/core" "^1.6.0" + "@floating-ui/utils" "^0.2.8" + +"@floating-ui/react-dom@^2.1.2": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-2.1.2.tgz#a1349bbf6a0e5cb5ded55d023766f20a4d439a31" + integrity sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A== + dependencies: + "@floating-ui/dom" "^1.0.0" + +"@floating-ui/react@^0.26.16": + version "0.26.24" + resolved "https://registry.yarnpkg.com/@floating-ui/react/-/react-0.26.24.tgz#072b9dfeca4e79ef4e3000ef1c28e0ffc86f4ed4" + integrity sha512-2ly0pCkZIGEQUq5H8bBK0XJmc1xIK/RM3tvVzY3GBER7IOD1UgmC2Y2tjj4AuS+TC+vTE1KJv2053290jua0Sw== + dependencies: + "@floating-ui/react-dom" "^2.1.2" + "@floating-ui/utils" "^0.2.8" + tabbable "^6.0.0" + +"@floating-ui/utils@^0.2.8": + version "0.2.8" + resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.8.tgz#21a907684723bbbaa5f0974cf7730bd797eb8e62" + integrity sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig== + +"@headlessui/react@^2.1.9": + version "2.1.9" + resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-2.1.9.tgz#d8d3ff64255177a87706cc4f24f42aeac65b1695" + integrity sha512-ckWw7vlKtnoa1fL2X0fx1a3t/Li9MIKDVXn3SgG65YlxvDAsNrY39PPCxVM7sQRA7go2fJsuHSSauKFNaJHH7A== + dependencies: + "@floating-ui/react" "^0.26.16" + "@react-aria/focus" "^3.17.1" + "@react-aria/interactions" "^3.21.3" + "@tanstack/react-virtual" "^3.8.1" "@humanwhocodes/config-array@^0.11.14": version "0.11.14" @@ -278,6 +316,57 @@ resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== +"@react-aria/focus@^3.17.1": + version "3.18.3" + resolved "https://registry.yarnpkg.com/@react-aria/focus/-/focus-3.18.3.tgz#4fe32de1e7530beab8da2e7b89f0f17d22a47e5e" + integrity sha512-WKUElg+5zS0D3xlVn8MntNnkzJql2J6MuzAMP8Sv5WTgFDse/XGR842dsxPTIyKKdrWVCRegCuwa4m3n/GzgJw== + dependencies: + "@react-aria/interactions" "^3.22.3" + "@react-aria/utils" "^3.25.3" + "@react-types/shared" "^3.25.0" + "@swc/helpers" "^0.5.0" + clsx "^2.0.0" + +"@react-aria/interactions@^3.21.3", "@react-aria/interactions@^3.22.3": + version "3.22.3" + resolved "https://registry.yarnpkg.com/@react-aria/interactions/-/interactions-3.22.3.tgz#3ba50db12f6ed443ae061eed79e41509eaa3d8e6" + integrity sha512-RRUb/aG+P0IKTIWikY/SylB6bIbLZeztnZY2vbe7RAG5MgVaCgn5HQ45SI15GlTmhsFG8CnF6slJsUFJiNHpbQ== + dependencies: + "@react-aria/ssr" "^3.9.6" + "@react-aria/utils" "^3.25.3" + "@react-types/shared" "^3.25.0" + "@swc/helpers" "^0.5.0" + +"@react-aria/ssr@^3.9.6": + version "3.9.6" + resolved "https://registry.yarnpkg.com/@react-aria/ssr/-/ssr-3.9.6.tgz#a9e8b351acdc8238f2b5215b0ce904636c6ea690" + integrity sha512-iLo82l82ilMiVGy342SELjshuWottlb5+VefO3jOQqQRNYnJBFpUSadswDPbRimSgJUZuFwIEYs6AabkP038fA== + dependencies: + "@swc/helpers" "^0.5.0" + +"@react-aria/utils@^3.25.3": + version "3.25.3" + resolved "https://registry.yarnpkg.com/@react-aria/utils/-/utils-3.25.3.tgz#cad9bffc07b045cdc283df2cb65c18747acbf76d" + integrity sha512-PR5H/2vaD8fSq0H/UB9inNbc8KDcVmW6fYAfSWkkn+OAdhTTMVKqXXrZuZBWyFfSD5Ze7VN6acr4hrOQm2bmrA== + dependencies: + "@react-aria/ssr" "^3.9.6" + "@react-stately/utils" "^3.10.4" + "@react-types/shared" "^3.25.0" + "@swc/helpers" "^0.5.0" + clsx "^2.0.0" + +"@react-stately/utils@^3.10.4": + version "3.10.4" + resolved "https://registry.yarnpkg.com/@react-stately/utils/-/utils-3.10.4.tgz#310663a834b67048d305e1680ed258130092fe51" + integrity sha512-gBEQEIMRh5f60KCm7QKQ2WfvhB2gLUr9b72sqUdIZ2EG+xuPgaIlCBeSicvjmjBvYZwOjoOEnmIkcx2GHp/HWw== + dependencies: + "@swc/helpers" "^0.5.0" + +"@react-types/shared@^3.25.0": + version "3.25.0" + resolved "https://registry.yarnpkg.com/@react-types/shared/-/shared-3.25.0.tgz#7223baf72256e918a3c29081bb1ecc6fad4fbf58" + integrity sha512-OZSyhzU6vTdW3eV/mz5i6hQwQUhkRs7xwY2d1aqPvTdMe0+2cY7Fwp45PAiwYLEj73i9ro2FxF9qC4DvHGSCgQ== + "@rushstack/eslint-patch@^1.3.3": version "1.10.1" resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.10.1.tgz#7ca168b6937818e9a74b47ac4e2112b2e1a024cf" @@ -290,6 +379,13 @@ dependencies: tslib "^2.4.0" +"@swc/helpers@^0.5.0": + version "0.5.13" + resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.13.tgz#33e63ff3cd0cade557672bd7888a39ce7d115a8c" + integrity sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w== + dependencies: + tslib "^2.4.0" + "@tailwindcss/typography@^0.5.12": version "0.5.12" resolved "https://registry.yarnpkg.com/@tailwindcss/typography/-/typography-0.5.12.tgz#c0532fd594427b7f4e8e38eff7bf272c63a1dca4" @@ -300,17 +396,17 @@ lodash.merge "^4.6.2" postcss-selector-parser "6.0.10" -"@tanstack/react-virtual@^3.0.0-beta.60": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.2.0.tgz#fb70f9c6baee753a5a0f7618ac886205d5a02af9" - integrity sha512-OEdMByf2hEfDa6XDbGlZN8qO6bTjlNKqjM3im9JG+u3mCL8jALy0T/67oDI001raUUPh1Bdmfn4ZvPOV5knpcg== +"@tanstack/react-virtual@^3.8.1": + version "3.10.8" + resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.10.8.tgz#bf4b06f157ed298644a96ab7efc1a2b01ab36e3c" + integrity sha512-VbzbVGSsZlQktyLrP5nxE+vE1ZR+U0NFAWPbJLoG2+DKPwd2D7dVICTVIIaYlJqX1ZCEnYDbaOpmMwbsyhBoIA== dependencies: - "@tanstack/virtual-core" "3.2.0" + "@tanstack/virtual-core" "3.10.8" -"@tanstack/virtual-core@3.2.0": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.2.0.tgz#874d36135e4badce2719e7bdc556ce240cbaff14" - integrity sha512-P5XgYoAw/vfW65byBbJQCw+cagdXDT/qH6wmABiLt4v4YBT2q2vqCOhihe+D1Nt325F/S/0Tkv6C5z0Lv+VBQQ== +"@tanstack/virtual-core@3.10.8": + version "3.10.8" + resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.8.tgz#975446a667755222f62884c19e5c3c66d959b8b4" + integrity sha512-PBu00mtt95jbKFi6Llk9aik8bnR3tR/oQP1o3TSi+iG//+Q2RTIzCEgKkHG8BB86kxMNW6O8wku+Lmi+QFR6jA== "@types/json5@^0.0.29": version "0.0.29" @@ -779,11 +875,16 @@ chokidar@^3.5.3: optionalDependencies: fsevents "~2.3.2" -client-only@0.0.1, client-only@^0.0.1: +client-only@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== +clsx@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999" + integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== + clsx@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.0.tgz#e851283bcb5c80ee7608db18487433f7b23f77cb" @@ -2995,6 +3096,11 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +tabbable@^6.0.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-6.2.0.tgz#732fb62bc0175cfcec257330be187dcfba1f3b97" + integrity sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew== + tailwind-merge@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.2.2.tgz#87341e7604f0e20499939e152cd2841f41f7a3df"