Compare commits

...

20 Commits

Author SHA1 Message Date
yeet dc9f4b123a Merge remote-tracking branch 'origin/master' into ollama-auth 2024-10-11 10:26:16 +02:00
yeet b048c4b173 Reapply "Merge remote-tracking branch 'origin/master' into ollama-auth"
This reverts commit d75f1c743e.
2024-10-11 10:26:10 +02:00
ItzCrazyKns 0a7167eb04 feat(search-api): add `optimizationMode` 2024-10-11 10:54:08 +05:30
ItzCrazyKns 7cce853618 feat(providers): add optimization modes 2024-10-11 10:35:59 +05:30
ItzCrazyKns 877735b852 feat(package): update `headlessui` 2024-10-11 10:35:33 +05:30
yeet d75f1c743e Revert "Merge remote-tracking branch 'origin/master' into ollama-auth"
This reverts commit 3631e537d4, reversing
changes made to b86cf3c315.
2024-10-07 22:39:06 +02:00
yeet 3631e537d4 Merge remote-tracking branch 'origin/master' into ollama-auth 2024-10-07 22:25:35 +02:00
ItzCrazyKns 1680a1786e feat(image-build): improve build time by caching 2024-10-03 10:41:05 +05:30
ItzCrazyKns 66f1e19ce8 feat(image-build): use Docker buildx, publish multi arch images 2024-10-03 09:37:15 +05:30
ItzCrazyKns ae3fc5f802 feat(docs): modify updating docs 2024-10-02 22:54:16 +05:30
ItzCrazyKns 9f88d16ef1 feat(docker-compose): use env vars from compose 2024-10-02 22:54:00 +05:30
ItzCrazyKns c233362e70 feat(dockerfile): specify default args 2024-10-02 22:53:45 +05:30
ItzCrazyKns 1aaf172246 feat(build-workflow): update head 2024-10-02 22:01:49 +05:30
ItzCrazyKns 4bba674134 feat(build-workflow): update branch 2024-10-02 22:00:46 +05:30
ItzCrazyKns dcfe43ebda trigger build 2024-10-02 22:00:04 +05:30
ItzCrazyKns fc5e35b1b1 feat(docker): add prebuilt images 2024-10-02 21:59:40 +05:30
ItzCrazyKns 425a08432b feat(groq): add Llama 3.2 2024-09-26 21:37:05 +05:30
ItzCrazyKns e3488366c1 Update SEARCH.md 2024-09-25 17:56:19 +05:30
ItzCrazyKns 8902abdcee Update SEARCH.md 2024-09-25 17:54:35 +05:30
ItzCrazyKns 15203c123d feat(docs): update search docs 2024-09-25 17:49:16 +05:30
25 changed files with 640 additions and 163 deletions

70
.github/workflows/docker-build.yaml vendored Normal file
View File

@ -0,0 +1,70 @@
name: Build & Push Docker Images
on:
push:
branches:
- master
release:
types: [published]
jobs:
build-and-push:
runs-on: ubuntu-latest
strategy:
matrix:
service: [backend, app]
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
install: true
- name: Log in to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract version from release tag
if: github.event_name == 'release'
id: version
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
- name: Build and push Docker image for ${{ matrix.service }}
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
run: |
docker buildx create --use
if [[ "${{ matrix.service }}" == "backend" ]]; then \
DOCKERFILE=backend.dockerfile; \
IMAGE_NAME=perplexica-backend; \
else \
DOCKERFILE=app.dockerfile; \
IMAGE_NAME=perplexica-frontend; \
fi
docker buildx build --platform linux/amd64,linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:main \
--cache-to=type=inline \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:main \
--push .
- name: Build and push release Docker image for ${{ matrix.service }}
if: github.event_name == 'release'
run: |
docker buildx create --use
if [[ "${{ matrix.service }}" == "backend" ]]; then \
DOCKERFILE=backend.dockerfile; \
IMAGE_NAME=perplexica-backend; \
else \
DOCKERFILE=app.dockerfile; \
IMAGE_NAME=perplexica-frontend; \
fi
docker buildx build --platform linux/amd64,linux/arm64 \
--cache-from=type=registry,ref=itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
--cache-to=type=inline \
-f $DOCKERFILE \
-t itzcrazykns1337/${IMAGE_NAME}:${{ env.RELEASE_VERSION }} \
--push .

3
.gitignore vendored
View File

@ -35,4 +35,5 @@ logs/
Thumbs.db
# Db
db.sqlite
db.sqlite
/searxng

View File

@ -1,7 +1,7 @@
FROM node:alpine
ARG NEXT_PUBLIC_WS_URL
ARG NEXT_PUBLIC_API_URL
ARG NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
ARG NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
ENV NEXT_PUBLIC_WS_URL=${NEXT_PUBLIC_WS_URL}
ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
@ -9,7 +9,7 @@ WORKDIR /home/perplexica
COPY ui /home/perplexica/
RUN yarn install
RUN yarn install --frozen-lockfile
RUN yarn build
CMD ["yarn", "start"]

View File

@ -1,20 +1,16 @@
FROM node:slim
ARG SEARXNG_API_URL
ENV SEARXNG_API_URL=${SEARXNG_API_URL}
WORKDIR /home/perplexica
COPY src /home/perplexica/src
COPY tsconfig.json /home/perplexica/
COPY config.toml /home/perplexica/
COPY drizzle.config.ts /home/perplexica/
COPY package.json /home/perplexica/
COPY yarn.lock /home/perplexica/
RUN mkdir /home/perplexica/data
RUN yarn install
RUN yarn install --frozen-lockfile
RUN yarn build
CMD ["yarn", "start"]

View File

@ -13,8 +13,9 @@ services:
build:
context: .
dockerfile: backend.dockerfile
args:
- SEARXNG_API_URL=http://searxng:8080
image: itzcrazykns1337/perplexica-backend:main
environment:
- SEARXNG_API_URL=http://searxng:8080
depends_on:
- searxng
ports:
@ -35,6 +36,7 @@ services:
args:
- NEXT_PUBLIC_API_URL=http://127.0.0.1:3001/api
- NEXT_PUBLIC_WS_URL=ws://127.0.0.1:3001
image: itzcrazykns1337/perplexica-frontend:main
depends_on:
- perplexica-backend
ports:

View File

@ -6,7 +6,9 @@ Perplexicas Search API makes it easy to use our AI-powered search engine. You
## Endpoint
### **POST** `/api/search`
### **POST** `http://localhost:3001/api/search`
**Note**: Replace `3001` with any other port if you've changed the default PORT
### Request
@ -24,15 +26,19 @@ The API accepts a JSON object in the request body, where you define the focus mo
"provider": "openai",
"model": "text-embedding-3-large"
},
"optimizationMode": "speed",
"focusMode": "webSearch",
"query": "What is Perplexica",
"history": []
"history": [
["human", "Hi, how are you?"],
["assistant", "I am doing well, how can I help you today?"]
]
}
```
### Request Parameters
- **`chatModel`** (object, optional): Defines the chat model to be used for the query.
- **`chatModel`** (object, optional): Defines the chat model to be used for the query. For model details you can send a GET request at `http://localhost:3001/api/models`. Make sure to use the key value (For example "gpt-4o-mini" instead of the display name "GPT 4 omni mini").
- `provider`: Specifies the provider for the chat model (e.g., `openai`, `ollama`).
- `model`: The specific model from the chosen provider (e.g., `gpt-4o-mini`).
@ -40,7 +46,7 @@ The API accepts a JSON object in the request body, where you define the focus mo
- `customOpenAIBaseURL`: If youre using a custom OpenAI instance, provide the base URL.
- `customOpenAIKey`: The API key for a custom OpenAI instance.
- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching.
- **`embeddingModel`** (object, optional): Defines the embedding model for similarity-based searching. For model details you can send a GET request at `http://localhost:3001/api/models`. Make sure to use the key value (For example "text-embedding-3-large" instead of the display name "Text Embedding 3 Large").
- `provider`: The provider for the embedding model (e.g., `openai`).
- `model`: The specific embedding model (e.g., `text-embedding-3-large`).
@ -49,9 +55,15 @@ The API accepts a JSON object in the request body, where you define the focus mo
- `webSearch`, `academicSearch`, `writingAssistant`, `wolframAlphaSearch`, `youtubeSearch`, `redditSearch`.
- **`optimizationMode`** (string, optional): Specifies the optimization mode to control the balance between performance and quality. Available modes:
- `speed`: Prioritize speed and return the fastest answer.
- `balanced`: Provide a balanced answer with good speed and reasonable quality.
- **`query`** (string, required): The search query or question.
- **`history`** (array, optional): An array of message pairs representing the conversation history. Each pair consists of a role (either 'human' or 'assistant') and the message content. This allows the system to use the context of the conversation to refine results. Example:
```json
[
["human", "What is Perplexica?"],

View File

@ -10,15 +10,21 @@ To update Perplexica to the latest version, follow these steps:
git clone https://github.com/ItzCrazyKns/Perplexica.git
```
2. Navigate to the Project Directory
2. Navigate to the Project Directory.
3. Update and Rebuild Docker Containers:
3. Pull latest images from registry.
```bash
docker compose up -d --build
docker compose pull
```
4. Once the command completes running go to http://localhost:3000 and verify the latest changes.
4. Update and Recreate containers.
```bash
docker compose up -d
```
5. Once the command completes running go to http://localhost:3000 and verify the latest changes.
## For non Docker users

View File

@ -118,7 +118,6 @@ const createBasicAcademicSearchRetrieverChain = (llm: BaseChatModel) => {
engines: [
'arxiv',
'google scholar',
'internetarchivescholar',
'pubmed',
],
});
@ -143,6 +142,7 @@ const createBasicAcademicSearchRetrieverChain = (llm: BaseChatModel) => {
const createBasicAcademicSearchAnsweringChain = (
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const basicAcademicSearchRetrieverChain =
createBasicAcademicSearchRetrieverChain(llm);
@ -168,26 +168,33 @@ const createBasicAcademicSearchAnsweringChain = (
(doc) => doc.pageContent && doc.pageContent.length > 0,
);
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
embeddings.embedQuery(query),
]);
if (optimizationMode === 'speed') {
return docsWithContent.slice(0, 15);
} else if (optimizationMode === 'balanced') {
console.log('Balanced mode');
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(
docsWithContent.map((doc) => doc.pageContent),
),
embeddings.embedQuery(query),
]);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
return {
index: i,
similarity: sim,
};
});
return {
index: i,
similarity: sim,
};
});
const sortedDocs = similarity
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
const sortedDocs = similarity
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
return sortedDocs;
return sortedDocs;
}
};
return RunnableSequence.from([
@ -224,12 +231,17 @@ const basicAcademicSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = new eventEmitter();
try {
const basicAcademicSearchAnsweringChain =
createBasicAcademicSearchAnsweringChain(llm, embeddings);
createBasicAcademicSearchAnsweringChain(
llm,
embeddings,
optimizationMode,
);
const stream = basicAcademicSearchAnsweringChain.streamEvents(
{
@ -258,8 +270,15 @@ const handleAcademicSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = basicAcademicSearch(message, history, llm, embeddings);
const emitter = basicAcademicSearch(
message,
history,
llm,
embeddings,
optimizationMode,
);
return emitter;
};

View File

@ -138,6 +138,7 @@ const createBasicRedditSearchRetrieverChain = (llm: BaseChatModel) => {
const createBasicRedditSearchAnsweringChain = (
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const basicRedditSearchRetrieverChain =
createBasicRedditSearchRetrieverChain(llm);
@ -163,27 +164,33 @@ const createBasicRedditSearchAnsweringChain = (
(doc) => doc.pageContent && doc.pageContent.length > 0,
);
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
embeddings.embedQuery(query),
]);
if (optimizationMode === 'speed') {
return docsWithContent.slice(0, 15);
} else if (optimizationMode === 'balanced') {
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(
docsWithContent.map((doc) => doc.pageContent),
),
embeddings.embedQuery(query),
]);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
return {
index: i,
similarity: sim,
};
});
return {
index: i,
similarity: sim,
};
});
const sortedDocs = similarity
.filter((sim) => sim.similarity > 0.3)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
const sortedDocs = similarity
.filter((sim) => sim.similarity > 0.3)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
return sortedDocs;
return sortedDocs;
}
};
return RunnableSequence.from([
@ -220,12 +227,13 @@ const basicRedditSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = new eventEmitter();
try {
const basicRedditSearchAnsweringChain =
createBasicRedditSearchAnsweringChain(llm, embeddings);
createBasicRedditSearchAnsweringChain(llm, embeddings, optimizationMode);
const stream = basicRedditSearchAnsweringChain.streamEvents(
{
chat_history: history,
@ -253,8 +261,15 @@ const handleRedditSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = basicRedditSearch(message, history, llm, embeddings);
const emitter = basicRedditSearch(
message,
history,
llm,
embeddings,
optimizationMode,
);
return emitter;
};

View File

@ -216,12 +216,34 @@ const createBasicWebSearchRetrieverChain = (llm: BaseChatModel) => {
await Promise.all(
docGroups.map(async (doc) => {
const res = await llm.invoke(`
You are a text summarizer. You need to summarize the text provided inside the \`text\` XML block.
You need to summarize the text into 1 or 2 sentences capturing the main idea of the text.
You need to make sure that you don't miss any point while summarizing the text.
You will also be given a \`query\` XML block which will contain the query of the user. Try to answer the query in the summary from the text provided.
If the query says Summarize then you just need to summarize the text without answering the query.
Only return the summarized text without any other messages, text or XML block.
You are a web search summarizer, tasked with summarizing a piece of text retrieved from a web search. Your job is to summarize the
text into a detailed, 2-4 paragraph explanation that captures the main ideas and provides a comprehensive answer to the query.
If the query is \"summarize\", you should provide a detailed summary of the text. If the query is a specific question, you should answer it in the summary.
- **Journalistic tone**: The summary should sound professional and journalistic, not too casual or vague.
- **Thorough and detailed**: Ensure that every key point from the text is captured and that the summary directly answers the query.
- **Not too lengthy, but detailed**: The summary should be informative but not excessively long. Focus on providing detailed information in a concise format.
The text will be shared inside the \`text\` XML tag, and the query inside the \`query\` XML tag.
<example>
<text>
Docker is a set of platform-as-a-service products that use OS-level virtualization to deliver software in packages called containers.
It was first released in 2013 and is developed by Docker, Inc. Docker is designed to make it easier to create, deploy, and run applications
by using containers.
</text>
<query>
What is Docker and how does it work?
</query>
Response:
Docker is a revolutionary platform-as-a-service product developed by Docker, Inc., that uses container technology to make application
deployment more efficient. It allows developers to package their software with all necessary dependencies, making it easier to run in
any environment. Released in 2013, Docker has transformed the way applications are built, deployed, and managed.
</example>
Everything below is the actual data you will be working with. Good luck!
<query>
${question}
@ -273,6 +295,7 @@ const createBasicWebSearchRetrieverChain = (llm: BaseChatModel) => {
const createBasicWebSearchAnsweringChain = (
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const basicWebSearchRetrieverChain = createBasicWebSearchRetrieverChain(llm);
@ -301,27 +324,33 @@ const createBasicWebSearchAnsweringChain = (
(doc) => doc.pageContent && doc.pageContent.length > 0,
);
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
embeddings.embedQuery(query),
]);
if (optimizationMode === 'speed') {
return docsWithContent.slice(0, 15);
} else if (optimizationMode === 'balanced') {
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(
docsWithContent.map((doc) => doc.pageContent),
),
embeddings.embedQuery(query),
]);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
return {
index: i,
similarity: sim,
};
});
return {
index: i,
similarity: sim,
};
});
const sortedDocs = similarity
.filter((sim) => sim.similarity > 0.3)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
const sortedDocs = similarity
.filter((sim) => sim.similarity > 0.3)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
return sortedDocs;
return sortedDocs;
}
};
return RunnableSequence.from([
@ -358,6 +387,7 @@ const basicWebSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = new eventEmitter();
@ -365,6 +395,7 @@ const basicWebSearch = (
const basicWebSearchAnsweringChain = createBasicWebSearchAnsweringChain(
llm,
embeddings,
optimizationMode,
);
const stream = basicWebSearchAnsweringChain.streamEvents(
@ -394,8 +425,15 @@ const handleWebSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = basicWebSearch(message, history, llm, embeddings);
const emitter = basicWebSearch(
message,
history,
llm,
embeddings,
optimizationMode,
);
return emitter;
};

View File

@ -138,6 +138,7 @@ const createBasicYoutubeSearchRetrieverChain = (llm: BaseChatModel) => {
const createBasicYoutubeSearchAnsweringChain = (
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const basicYoutubeSearchRetrieverChain =
createBasicYoutubeSearchRetrieverChain(llm);
@ -163,27 +164,33 @@ const createBasicYoutubeSearchAnsweringChain = (
(doc) => doc.pageContent && doc.pageContent.length > 0,
);
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(docsWithContent.map((doc) => doc.pageContent)),
embeddings.embedQuery(query),
]);
if (optimizationMode === 'speed') {
return docsWithContent.slice(0, 15);
} else {
const [docEmbeddings, queryEmbedding] = await Promise.all([
embeddings.embedDocuments(
docsWithContent.map((doc) => doc.pageContent),
),
embeddings.embedQuery(query),
]);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
const similarity = docEmbeddings.map((docEmbedding, i) => {
const sim = computeSimilarity(queryEmbedding, docEmbedding);
return {
index: i,
similarity: sim,
};
});
return {
index: i,
similarity: sim,
};
});
const sortedDocs = similarity
.filter((sim) => sim.similarity > 0.3)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
const sortedDocs = similarity
.filter((sim) => sim.similarity > 0.3)
.sort((a, b) => b.similarity - a.similarity)
.slice(0, 15)
.map((sim) => docsWithContent[sim.index]);
return sortedDocs;
return sortedDocs;
}
};
return RunnableSequence.from([
@ -220,12 +227,13 @@ const basicYoutubeSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = new eventEmitter();
try {
const basicYoutubeSearchAnsweringChain =
createBasicYoutubeSearchAnsweringChain(llm, embeddings);
createBasicYoutubeSearchAnsweringChain(llm, embeddings, optimizationMode);
const stream = basicYoutubeSearchAnsweringChain.streamEvents(
{
@ -254,8 +262,15 @@ const handleYoutubeSearch = (
history: BaseMessage[],
llm: BaseChatModel,
embeddings: Embeddings,
optimizationMode: 'speed' | 'balanced' | 'quality',
) => {
const emitter = basicYoutubeSearch(message, history, llm, embeddings);
const emitter = basicYoutubeSearch(
message,
history,
llm,
embeddings,
optimizationMode,
);
return emitter;
};

View File

@ -9,6 +9,45 @@ export const loadGroqChatModels = async () => {
try {
const chatModels = {
'llama-3.2-3b-preview': {
displayName: 'Llama 3.2 3B',
model: new ChatOpenAI(
{
openAIApiKey: groqApiKey,
modelName: 'llama-3.2-3b-preview',
temperature: 0.7,
},
{
baseURL: 'https://api.groq.com/openai/v1',
},
),
},
'llama-3.2-11b-text-preview': {
displayName: 'Llama 3.2 11B Text',
model: new ChatOpenAI(
{
openAIApiKey: groqApiKey,
modelName: 'llama-3.2-11b-text-preview',
temperature: 0.7,
},
{
baseURL: 'https://api.groq.com/openai/v1',
},
),
},
'llama-3.2-90b-text-preview': {
displayName: 'Llama 3.2 90B Text',
model: new ChatOpenAI(
{
openAIApiKey: groqApiKey,
modelName: 'llama-3.2-90b-text-preview',
temperature: 0.7,
},
{
baseURL: 'https://api.groq.com/openai/v1',
},
),
},
'llama-3.1-70b-versatile': {
displayName: 'Llama 3.1 70B',
model: new ChatOpenAI(

View File

@ -12,7 +12,19 @@ router.get('/', async (req, res) => {
const [chatModelProviders, embeddingModelProviders] = await Promise.all([
getAvailableChatModelProviders(),
getAvailableEmbeddingModelProviders(),
]);
]);
Object.keys(chatModelProviders).forEach((provider) => {
Object.keys(chatModelProviders[provider]).forEach((model) => {
delete chatModelProviders[provider][model].model;
});
});
Object.keys(embeddingModelProviders).forEach((provider) => {
Object.keys(embeddingModelProviders[provider]).forEach((model) => {
delete embeddingModelProviders[provider][model].model;
});
});
res.status(200).json({ chatModelProviders, embeddingModelProviders });
} catch (err) {

View File

@ -25,6 +25,7 @@ interface embeddingModel {
}
interface ChatRequestBody {
optimizationMode: 'speed' | 'balanced';
focusMode: string;
chatModel?: chatModel;
embeddingModel?: embeddingModel;
@ -41,6 +42,7 @@ router.post('/', async (req, res) => {
}
body.history = body.history || [];
body.optimizationMode = body.optimizationMode || 'balanced';
const history: BaseMessage[] = body.history.map((msg) => {
if (msg[0] === 'human') {
@ -119,7 +121,7 @@ router.post('/', async (req, res) => {
return res.status(400).json({ message: 'Invalid focus mode' });
}
const emitter = searchHandler(body.query, history, llm, embeddings);
const emitter = searchHandler(body.query, history, llm, embeddings, body.optimizationMode);
let message = '';
let sources = [];

View File

@ -22,7 +22,7 @@ type Message = {
type WSMessage = {
message: Message;
copilot: boolean;
optimizationMode: string;
type: string;
focusMode: string;
history: Array<[string, string]>;
@ -138,6 +138,7 @@ export const handleMessage = async (
history,
llm,
embeddings,
parsedWSMessage.optimizationMode,
);
handleEmitterEvents(emitter, ws, id, parsedMessage.chatId);

View File

@ -315,6 +315,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
const [messages, setMessages] = useState<Message[]>([]);
const [focusMode, setFocusMode] = useState('webSearch');
const [optimizationMode, setOptimizationMode] = useState('speed');
const [isMessagesLoaded, setIsMessagesLoaded] = useState(false);
@ -386,6 +387,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
content: message,
},
focusMode: focusMode,
optimizationMode: optimizationMode,
history: [...chatHistory, ['human', message]],
}),
);
@ -548,6 +550,8 @@ const ChatWindow = ({ id }: { id?: string }) => {
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
)}
</div>

View File

@ -1,5 +1,13 @@
import { Delete, Trash } from 'lucide-react';
import { Dialog, Transition } from '@headlessui/react';
import { Trash } from 'lucide-react';
import {
Description,
Dialog,
DialogBackdrop,
DialogPanel,
DialogTitle,
Transition,
TransitionChild,
} from '@headlessui/react';
import { Fragment, useState } from 'react';
import { toast } from 'sonner';
import { Chat } from '@/app/library/page';
@ -64,10 +72,10 @@ const DeleteChat = ({
}
}}
>
<Dialog.Backdrop className="fixed inset-0 bg-black/30" />
<DialogBackdrop className="fixed inset-0 bg-black/30" />
<div className="fixed inset-0 overflow-y-auto">
<div className="flex min-h-full items-center justify-center p-4 text-center">
<Transition.Child
<TransitionChild
as={Fragment}
enter="ease-out duration-200"
enterFrom="opacity-0 scale-95"
@ -76,13 +84,13 @@ const DeleteChat = ({
leaveFrom="opacity-100 scale-200"
leaveTo="opacity-0 scale-95"
>
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<Dialog.Title className="text-lg font-medium leading-6 dark:text-white">
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<DialogTitle className="text-lg font-medium leading-6 dark:text-white">
Delete Confirmation
</Dialog.Title>
<Dialog.Description className="text-sm dark:text-white/70 text-black/70">
</DialogTitle>
<Description className="text-sm dark:text-white/70 text-black/70">
Are you sure you want to delete this chat?
</Dialog.Description>
</Description>
<div className="flex flex-row items-end justify-end space-x-4 mt-6">
<button
onClick={() => {
@ -101,8 +109,8 @@ const DeleteChat = ({
Delete
</button>
</div>
</Dialog.Panel>
</Transition.Child>
</DialogPanel>
</TransitionChild>
</div>
</div>
</Dialog>

View File

@ -4,10 +4,14 @@ const EmptyChat = ({
sendMessage,
focusMode,
setFocusMode,
optimizationMode,
setOptimizationMode,
}: {
sendMessage: (message: string) => void;
focusMode: string;
setFocusMode: (mode: string) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
return (
<div className="relative">
@ -19,6 +23,8 @@ const EmptyChat = ({
sendMessage={sendMessage}
focusMode={focusMode}
setFocusMode={setFocusMode}
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
</div>
</div>

View File

@ -3,15 +3,20 @@ import { useEffect, useRef, useState } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
import CopilotToggle from './MessageInputActions/Copilot';
import Focus from './MessageInputActions/Focus';
import Optimization from './MessageInputActions/Optimization';
const EmptyChatMessageInput = ({
sendMessage,
focusMode,
setFocusMode,
optimizationMode,
setOptimizationMode,
}: {
sendMessage: (message: string) => void;
focusMode: string;
setFocusMode: (mode: string) => void;
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
const [copilotEnabled, setCopilotEnabled] = useState(false);
const [message, setMessage] = useState('');
@ -66,14 +71,13 @@ const EmptyChatMessageInput = ({
placeholder="Ask anything..."
/>
<div className="flex flex-row items-center justify-between mt-4">
<div className="flex flex-row items-center space-x-1 -mx-2">
<div className="flex flex-row items-center space-x-4">
<Focus focusMode={focusMode} setFocusMode={setFocusMode} />
{/* <Attach /> */}
</div>
<div className="flex flex-row items-center space-x-4 -mx-2">
<CopilotToggle
copilotEnabled={copilotEnabled}
setCopilotEnabled={setCopilotEnabled}
<div className="flex flex-row items-center space-x-1 sm:space-x-4">
<Optimization
optimizationMode={optimizationMode}
setOptimizationMode={setOptimizationMode}
/>
<button
disabled={message.trim().length === 0}

View File

@ -7,7 +7,12 @@ import {
SwatchBook,
} from 'lucide-react';
import { cn } from '@/lib/utils';
import { Popover, Transition } from '@headlessui/react';
import {
Popover,
PopoverButton,
PopoverPanel,
Transition,
} from '@headlessui/react';
import { SiReddit, SiYoutube } from '@icons-pack/react-simple-icons';
import { Fragment } from 'react';
@ -70,10 +75,10 @@ const Focus = ({
setFocusMode: (mode: string) => void;
}) => {
return (
<Popover className="fixed w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
<Popover.Button
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
<PopoverButton
type="button"
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
className=" text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
>
{focusMode !== 'webSearch' ? (
<div className="flex flex-row items-center space-x-1">
@ -86,7 +91,7 @@ const Focus = ({
) : (
<ScanEye />
)}
</Popover.Button>
</PopoverButton>
<Transition
as={Fragment}
enter="transition ease-out duration-150"
@ -96,10 +101,10 @@ const Focus = ({
leaveFrom="opacity-100 translate-y-0"
leaveTo="opacity-0 translate-y-1"
>
<Popover.Panel className="absolute z-10 w-full">
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-1 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-2 max-h-[200px] md:max-h-none overflow-y-auto">
<PopoverPanel className="absolute z-10 w-64 md:w-[500px] left-0">
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
{focusModes.map((mode, i) => (
<Popover.Button
<PopoverButton
onClick={() => setFocusMode(mode.key)}
key={i}
className={cn(
@ -123,10 +128,10 @@ const Focus = ({
<p className="text-black/70 dark:text-white/70 text-xs">
{mode.description}
</p>
</Popover.Button>
</PopoverButton>
))}
</div>
</Popover.Panel>
</PopoverPanel>
</Transition>
</Popover>
);

View File

@ -0,0 +1,104 @@
import { ChevronDown, Sliders, Star, Zap } from 'lucide-react';
import { cn } from '@/lib/utils';
import {
Popover,
PopoverButton,
PopoverPanel,
Transition,
} from '@headlessui/react';
import { Fragment } from 'react';
const OptimizationModes = [
{
key: 'speed',
title: 'Speed',
description: 'Prioritize speed and get the quickest possible answer.',
icon: <Zap size={20} className="text-[#FF9800]" />,
},
{
key: 'balanced',
title: 'Balanced',
description: 'Find the right balance between speed and accuracy',
icon: <Sliders size={20} className="text-[#4CAF50]" />,
},
{
key: 'quality',
title: 'Quality (Soon)',
description: 'Get the most thorough and accurate answer',
icon: (
<Star
size={16}
className="text-[#2196F3] dark:text-[#BBDEFB] fill-[#BBDEFB] dark:fill-[#2196F3]"
/>
),
},
];
const Optimization = ({
optimizationMode,
setOptimizationMode,
}: {
optimizationMode: string;
setOptimizationMode: (mode: string) => void;
}) => {
return (
<Popover className="relative w-full max-w-[15rem] md:max-w-md lg:max-w-lg">
<PopoverButton
type="button"
className="p-2 text-black/50 dark:text-white/50 rounded-xl hover:bg-light-secondary dark:hover:bg-dark-secondary active:scale-95 transition duration-200 hover:text-black dark:hover:text-white"
>
<div className="flex flex-row items-center space-x-1">
{
OptimizationModes.find((mode) => mode.key === optimizationMode)
?.icon
}
<p className="text-xs font-medium">
{
OptimizationModes.find((mode) => mode.key === optimizationMode)
?.title
}
</p>
<ChevronDown size={20} />
</div>
</PopoverButton>
<Transition
as={Fragment}
enter="transition ease-out duration-150"
enterFrom="opacity-0 translate-y-1"
enterTo="opacity-100 translate-y-0"
leave="transition ease-in duration-150"
leaveFrom="opacity-100 translate-y-0"
leaveTo="opacity-0 translate-y-1"
>
<PopoverPanel className="absolute z-10 w-64 md:w-[250px] right-0">
<div className="flex flex-col gap-2 bg-light-primary dark:bg-dark-primary border rounded-lg border-light-200 dark:border-dark-200 w-full p-4 max-h-[200px] md:max-h-none overflow-y-auto">
{OptimizationModes.map((mode, i) => (
<PopoverButton
onClick={() => setOptimizationMode(mode.key)}
key={i}
disabled={mode.key === 'quality'}
className={cn(
'p-2 rounded-lg flex flex-col items-start justify-start text-start space-y-1 duration-200 cursor-pointer transition',
optimizationMode === mode.key
? 'bg-light-secondary dark:bg-dark-secondary'
: 'hover:bg-light-secondary dark:hover:bg-dark-secondary',
mode.key === 'quality' && 'opacity-50 cursor-not-allowed',
)}
>
<div className="flex flex-row items-center space-x-1 text-black dark:text-white">
{mode.icon}
<p className="text-sm font-medium">{mode.title}</p>
</div>
<p className="text-black/70 dark:text-white/70 text-xs">
{mode.description}
</p>
</PopoverButton>
))}
</div>
</PopoverPanel>
</Transition>
</Popover>
);
};
export default Optimization;

View File

@ -1,5 +1,11 @@
/* eslint-disable @next/next/no-img-element */
import { Dialog, Transition } from '@headlessui/react';
import {
Dialog,
DialogPanel,
DialogTitle,
Transition,
TransitionChild,
} from '@headlessui/react';
import { Document } from '@langchain/core/documents';
import { Fragment, useState } from 'react';
@ -74,7 +80,7 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
<Dialog as="div" className="relative z-50" onClose={closeModal}>
<div className="fixed inset-0 overflow-y-auto">
<div className="flex min-h-full items-center justify-center p-4 text-center">
<Transition.Child
<TransitionChild
as={Fragment}
enter="ease-out duration-200"
enterFrom="opacity-0 scale-95"
@ -83,10 +89,10 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
leaveFrom="opacity-100 scale-200"
leaveTo="opacity-0 scale-95"
>
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<Dialog.Title className="text-lg font-medium leading-6 dark:text-white">
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<DialogTitle className="text-lg font-medium leading-6 dark:text-white">
Sources
</Dialog.Title>
</DialogTitle>
<div className="grid grid-cols-2 gap-2 overflow-auto max-h-[300px] mt-2 pr-2">
{sources.map((source, i) => (
<a
@ -122,8 +128,8 @@ const MessageSources = ({ sources }: { sources: Document[] }) => {
</a>
))}
</div>
</Dialog.Panel>
</Transition.Child>
</DialogPanel>
</TransitionChild>
</div>
</div>
</Dialog>

View File

@ -1,5 +1,11 @@
import { cn } from '@/lib/utils';
import { Dialog, Transition } from '@headlessui/react';
import {
Dialog,
DialogPanel,
DialogTitle,
Transition,
TransitionChild,
} from '@headlessui/react';
import { CloudUpload, RefreshCcw, RefreshCw } from 'lucide-react';
import React, {
Fragment,
@ -192,7 +198,7 @@ const SettingsDialog = ({
className="relative z-50"
onClose={() => setIsOpen(false)}
>
<Transition.Child
<TransitionChild
as={Fragment}
enter="ease-out duration-300"
enterFrom="opacity-0"
@ -202,10 +208,10 @@ const SettingsDialog = ({
leaveTo="opacity-0"
>
<div className="fixed inset-0 bg-white/50 dark:bg-black/50" />
</Transition.Child>
</TransitionChild>
<div className="fixed inset-0 overflow-y-auto">
<div className="flex min-h-full items-center justify-center p-4 text-center">
<Transition.Child
<TransitionChild
as={Fragment}
enter="ease-out duration-200"
enterFrom="opacity-0 scale-95"
@ -214,10 +220,10 @@ const SettingsDialog = ({
leaveFrom="opacity-100 scale-200"
leaveTo="opacity-0 scale-95"
>
<Dialog.Panel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<Dialog.Title className="text-xl font-medium leading-6 dark:text-white">
<DialogPanel className="w-full max-w-md transform rounded-2xl bg-light-secondary dark:bg-dark-secondary border border-light-200 dark:border-dark-200 p-6 text-left align-middle shadow-xl transition-all">
<DialogTitle className="text-xl font-medium leading-6 dark:text-white">
Settings
</Dialog.Title>
</DialogTitle>
{config && !isLoading && (
<div className="flex flex-col space-y-4 mt-6">
<div className="flex flex-col space-y-1">
@ -495,8 +501,8 @@ const SettingsDialog = ({
)}
</button>
</div>
</Dialog.Panel>
</Transition.Child>
</DialogPanel>
</TransitionChild>
</div>
</div>
</Dialog>

View File

@ -11,7 +11,7 @@
"format:write": "prettier . --write"
},
"dependencies": {
"@headlessui/react": "^1.7.18",
"@headlessui/react": "^2.1.9",
"@icons-pack/react-simple-icons": "^9.4.0",
"@langchain/openai": "^0.0.25",
"@tailwindcss/typography": "^0.5.12",

View File

@ -66,13 +66,51 @@
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.0.tgz#a5417ae8427873f1dd08b70b3574b453e67b5f7f"
integrity sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==
"@headlessui/react@^1.7.18":
version "1.7.18"
resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-1.7.18.tgz#30af4634d2215b2ca1aa29d07f33d02bea82d9d7"
integrity sha512-4i5DOrzwN4qSgNsL4Si61VMkUcWbcSKueUV7sFhpHzQcSShdlHENE5+QBntMSRvHt8NyoFO2AGG8si9lq+w4zQ==
"@floating-ui/core@^1.6.0":
version "1.6.8"
resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.6.8.tgz#aa43561be075815879305965020f492cdb43da12"
integrity sha512-7XJ9cPU+yI2QeLS+FCSlqNFZJq8arvswefkZrYI1yQBbftw6FyrZOxYSh+9S7z7TpeWlRt9zJ5IhM1WIL334jA==
dependencies:
"@tanstack/react-virtual" "^3.0.0-beta.60"
client-only "^0.0.1"
"@floating-ui/utils" "^0.2.8"
"@floating-ui/dom@^1.0.0":
version "1.6.11"
resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.6.11.tgz#8631857838d34ee5712339eb7cbdfb8ad34da723"
integrity sha512-qkMCxSR24v2vGkhYDo/UzxfJN3D4syqSjyuTFz6C7XcpU1pASPRieNI0Kj5VP3/503mOfYiGY891ugBX1GlABQ==
dependencies:
"@floating-ui/core" "^1.6.0"
"@floating-ui/utils" "^0.2.8"
"@floating-ui/react-dom@^2.1.2":
version "2.1.2"
resolved "https://registry.yarnpkg.com/@floating-ui/react-dom/-/react-dom-2.1.2.tgz#a1349bbf6a0e5cb5ded55d023766f20a4d439a31"
integrity sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A==
dependencies:
"@floating-ui/dom" "^1.0.0"
"@floating-ui/react@^0.26.16":
version "0.26.24"
resolved "https://registry.yarnpkg.com/@floating-ui/react/-/react-0.26.24.tgz#072b9dfeca4e79ef4e3000ef1c28e0ffc86f4ed4"
integrity sha512-2ly0pCkZIGEQUq5H8bBK0XJmc1xIK/RM3tvVzY3GBER7IOD1UgmC2Y2tjj4AuS+TC+vTE1KJv2053290jua0Sw==
dependencies:
"@floating-ui/react-dom" "^2.1.2"
"@floating-ui/utils" "^0.2.8"
tabbable "^6.0.0"
"@floating-ui/utils@^0.2.8":
version "0.2.8"
resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.8.tgz#21a907684723bbbaa5f0974cf7730bd797eb8e62"
integrity sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==
"@headlessui/react@^2.1.9":
version "2.1.9"
resolved "https://registry.yarnpkg.com/@headlessui/react/-/react-2.1.9.tgz#d8d3ff64255177a87706cc4f24f42aeac65b1695"
integrity sha512-ckWw7vlKtnoa1fL2X0fx1a3t/Li9MIKDVXn3SgG65YlxvDAsNrY39PPCxVM7sQRA7go2fJsuHSSauKFNaJHH7A==
dependencies:
"@floating-ui/react" "^0.26.16"
"@react-aria/focus" "^3.17.1"
"@react-aria/interactions" "^3.21.3"
"@tanstack/react-virtual" "^3.8.1"
"@humanwhocodes/config-array@^0.11.14":
version "0.11.14"
@ -278,6 +316,57 @@
resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33"
integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==
"@react-aria/focus@^3.17.1":
version "3.18.3"
resolved "https://registry.yarnpkg.com/@react-aria/focus/-/focus-3.18.3.tgz#4fe32de1e7530beab8da2e7b89f0f17d22a47e5e"
integrity sha512-WKUElg+5zS0D3xlVn8MntNnkzJql2J6MuzAMP8Sv5WTgFDse/XGR842dsxPTIyKKdrWVCRegCuwa4m3n/GzgJw==
dependencies:
"@react-aria/interactions" "^3.22.3"
"@react-aria/utils" "^3.25.3"
"@react-types/shared" "^3.25.0"
"@swc/helpers" "^0.5.0"
clsx "^2.0.0"
"@react-aria/interactions@^3.21.3", "@react-aria/interactions@^3.22.3":
version "3.22.3"
resolved "https://registry.yarnpkg.com/@react-aria/interactions/-/interactions-3.22.3.tgz#3ba50db12f6ed443ae061eed79e41509eaa3d8e6"
integrity sha512-RRUb/aG+P0IKTIWikY/SylB6bIbLZeztnZY2vbe7RAG5MgVaCgn5HQ45SI15GlTmhsFG8CnF6slJsUFJiNHpbQ==
dependencies:
"@react-aria/ssr" "^3.9.6"
"@react-aria/utils" "^3.25.3"
"@react-types/shared" "^3.25.0"
"@swc/helpers" "^0.5.0"
"@react-aria/ssr@^3.9.6":
version "3.9.6"
resolved "https://registry.yarnpkg.com/@react-aria/ssr/-/ssr-3.9.6.tgz#a9e8b351acdc8238f2b5215b0ce904636c6ea690"
integrity sha512-iLo82l82ilMiVGy342SELjshuWottlb5+VefO3jOQqQRNYnJBFpUSadswDPbRimSgJUZuFwIEYs6AabkP038fA==
dependencies:
"@swc/helpers" "^0.5.0"
"@react-aria/utils@^3.25.3":
version "3.25.3"
resolved "https://registry.yarnpkg.com/@react-aria/utils/-/utils-3.25.3.tgz#cad9bffc07b045cdc283df2cb65c18747acbf76d"
integrity sha512-PR5H/2vaD8fSq0H/UB9inNbc8KDcVmW6fYAfSWkkn+OAdhTTMVKqXXrZuZBWyFfSD5Ze7VN6acr4hrOQm2bmrA==
dependencies:
"@react-aria/ssr" "^3.9.6"
"@react-stately/utils" "^3.10.4"
"@react-types/shared" "^3.25.0"
"@swc/helpers" "^0.5.0"
clsx "^2.0.0"
"@react-stately/utils@^3.10.4":
version "3.10.4"
resolved "https://registry.yarnpkg.com/@react-stately/utils/-/utils-3.10.4.tgz#310663a834b67048d305e1680ed258130092fe51"
integrity sha512-gBEQEIMRh5f60KCm7QKQ2WfvhB2gLUr9b72sqUdIZ2EG+xuPgaIlCBeSicvjmjBvYZwOjoOEnmIkcx2GHp/HWw==
dependencies:
"@swc/helpers" "^0.5.0"
"@react-types/shared@^3.25.0":
version "3.25.0"
resolved "https://registry.yarnpkg.com/@react-types/shared/-/shared-3.25.0.tgz#7223baf72256e918a3c29081bb1ecc6fad4fbf58"
integrity sha512-OZSyhzU6vTdW3eV/mz5i6hQwQUhkRs7xwY2d1aqPvTdMe0+2cY7Fwp45PAiwYLEj73i9ro2FxF9qC4DvHGSCgQ==
"@rushstack/eslint-patch@^1.3.3":
version "1.10.1"
resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.10.1.tgz#7ca168b6937818e9a74b47ac4e2112b2e1a024cf"
@ -290,6 +379,13 @@
dependencies:
tslib "^2.4.0"
"@swc/helpers@^0.5.0":
version "0.5.13"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.13.tgz#33e63ff3cd0cade557672bd7888a39ce7d115a8c"
integrity sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w==
dependencies:
tslib "^2.4.0"
"@tailwindcss/typography@^0.5.12":
version "0.5.12"
resolved "https://registry.yarnpkg.com/@tailwindcss/typography/-/typography-0.5.12.tgz#c0532fd594427b7f4e8e38eff7bf272c63a1dca4"
@ -300,17 +396,17 @@
lodash.merge "^4.6.2"
postcss-selector-parser "6.0.10"
"@tanstack/react-virtual@^3.0.0-beta.60":
version "3.2.0"
resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.2.0.tgz#fb70f9c6baee753a5a0f7618ac886205d5a02af9"
integrity sha512-OEdMByf2hEfDa6XDbGlZN8qO6bTjlNKqjM3im9JG+u3mCL8jALy0T/67oDI001raUUPh1Bdmfn4ZvPOV5knpcg==
"@tanstack/react-virtual@^3.8.1":
version "3.10.8"
resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.10.8.tgz#bf4b06f157ed298644a96ab7efc1a2b01ab36e3c"
integrity sha512-VbzbVGSsZlQktyLrP5nxE+vE1ZR+U0NFAWPbJLoG2+DKPwd2D7dVICTVIIaYlJqX1ZCEnYDbaOpmMwbsyhBoIA==
dependencies:
"@tanstack/virtual-core" "3.2.0"
"@tanstack/virtual-core" "3.10.8"
"@tanstack/virtual-core@3.2.0":
version "3.2.0"
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.2.0.tgz#874d36135e4badce2719e7bdc556ce240cbaff14"
integrity sha512-P5XgYoAw/vfW65byBbJQCw+cagdXDT/qH6wmABiLt4v4YBT2q2vqCOhihe+D1Nt325F/S/0Tkv6C5z0Lv+VBQQ==
"@tanstack/virtual-core@3.10.8":
version "3.10.8"
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.10.8.tgz#975446a667755222f62884c19e5c3c66d959b8b4"
integrity sha512-PBu00mtt95jbKFi6Llk9aik8bnR3tR/oQP1o3TSi+iG//+Q2RTIzCEgKkHG8BB86kxMNW6O8wku+Lmi+QFR6jA==
"@types/json5@^0.0.29":
version "0.0.29"
@ -779,11 +875,16 @@ chokidar@^3.5.3:
optionalDependencies:
fsevents "~2.3.2"
client-only@0.0.1, client-only@^0.0.1:
client-only@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
clsx@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999"
integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==
clsx@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.0.tgz#e851283bcb5c80ee7608db18487433f7b23f77cb"
@ -2995,6 +3096,11 @@ supports-preserve-symlinks-flag@^1.0.0:
resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09"
integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==
tabbable@^6.0.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-6.2.0.tgz#732fb62bc0175cfcec257330be187dcfba1f3b97"
integrity sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==
tailwind-merge@^2.2.2:
version "2.2.2"
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.2.2.tgz#87341e7604f0e20499939e152cd2841f41f7a3df"