Compare commits
No commits in common. "c5ad647b77893cf54c61cae04dc59a771a41d334" and "7d5d10b49e7905e17f2220fe084136dd66492520" have entirely different histories.
c5ad647b77
...
7d5d10b49e
|
@ -0,0 +1 @@
|
||||||
|
patreon: itzcrazykns
|
|
@ -146,9 +146,9 @@ If you find Perplexica useful, consider giving us a star on GitHub. This helps m
|
||||||
|
|
||||||
We also accept donations to help sustain our project. If you would like to contribute, you can use the following options to donate. Thank you for your support!
|
We also accept donations to help sustain our project. If you would like to contribute, you can use the following options to donate. Thank you for your support!
|
||||||
|
|
||||||
| Ethereum |
|
| Cards | Ethereum |
|
||||||
| ----------------------------------------------------- |
|
| ----------------------------------- | ----------------------------------------------------- |
|
||||||
| Address: `0xB025a84b2F269570Eb8D4b05DEdaA41D8525B6DD` |
|
| https://www.patreon.com/itzcrazykns | Address: `0xB025a84b2F269570Eb8D4b05DEdaA41D8525B6DD` |
|
||||||
|
|
||||||
## Contribution
|
## Contribution
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@ To update Perplexica to the latest version, follow these steps:
|
||||||
```bash
|
```bash
|
||||||
docker compose up -d --build
|
docker compose up -d --build
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Once the command completes running go to http://localhost:3000 and verify the latest changes.
|
4. Once the command completes running go to http://localhost:3000 and verify the latest changes.
|
||||||
|
|
||||||
## For non Docker users
|
## For non Docker users
|
||||||
|
|
|
@ -52,8 +52,8 @@ const basicRedditSearchResponsePrompt = `
|
||||||
Place these citations at the end of that particular sentence. You can cite the same sentence multiple times if it is relevant to the user's query like [number1][number2].
|
Place these citations at the end of that particular sentence. You can cite the same sentence multiple times if it is relevant to the user's query like [number1][number2].
|
||||||
However you do not need to cite it using the same number. You can use different numbers to cite the same sentence multiple times. The number refers to the number of the search result (passed in the context) used to generate that part of the answer.
|
However you do not need to cite it using the same number. You can use different numbers to cite the same sentence multiple times. The number refers to the number of the search result (passed in the context) used to generate that part of the answer.
|
||||||
|
|
||||||
Anything inside the following \`context\` HTML block provided below is for your knowledge returned by Reddit and is not shared by the user. You have to answer question on the basis of it and cite the relevant information from it but you do not have to
|
Anything inside the following \`context\` HTML block provided below is for your knowledge returned by Reddit and is not shared by the user. You have to answer question on the basis of it and cite the relevant information from it but you do not have to
|
||||||
talk about the context in your response.
|
talk about the context in your response.
|
||||||
|
|
||||||
<context>
|
<context>
|
||||||
{context}
|
{context}
|
||||||
|
@ -177,9 +177,9 @@ const createBasicRedditSearchAnsweringChain = (
|
||||||
});
|
});
|
||||||
|
|
||||||
const sortedDocs = similarity
|
const sortedDocs = similarity
|
||||||
.filter((sim) => sim.similarity > 0.3)
|
|
||||||
.sort((a, b) => b.similarity - a.similarity)
|
.sort((a, b) => b.similarity - a.similarity)
|
||||||
.slice(0, 15)
|
.slice(0, 15)
|
||||||
|
.filter((sim) => sim.similarity > 0.3)
|
||||||
.map((sim) => docsWithContent[sim.index]);
|
.map((sim) => docsWithContent[sim.index]);
|
||||||
|
|
||||||
return sortedDocs;
|
return sortedDocs;
|
||||||
|
|
|
@ -52,8 +52,8 @@ const basicWebSearchResponsePrompt = `
|
||||||
Place these citations at the end of that particular sentence. You can cite the same sentence multiple times if it is relevant to the user's query like [number1][number2].
|
Place these citations at the end of that particular sentence. You can cite the same sentence multiple times if it is relevant to the user's query like [number1][number2].
|
||||||
However you do not need to cite it using the same number. You can use different numbers to cite the same sentence multiple times. The number refers to the number of the search result (passed in the context) used to generate that part of the answer.
|
However you do not need to cite it using the same number. You can use different numbers to cite the same sentence multiple times. The number refers to the number of the search result (passed in the context) used to generate that part of the answer.
|
||||||
|
|
||||||
Anything inside the following \`context\` HTML block provided below is for your knowledge returned by the search engine and is not shared by the user. You have to answer question on the basis of it and cite the relevant information from it but you do not have to
|
Anything inside the following \`context\` HTML block provided below is for your knowledge returned by the search engine and is not shared by the user. You have to answer question on the basis of it and cite the relevant information from it but you do not have to
|
||||||
talk about the context in your response.
|
talk about the context in your response.
|
||||||
|
|
||||||
<context>
|
<context>
|
||||||
{context}
|
{context}
|
||||||
|
@ -175,8 +175,8 @@ const createBasicWebSearchAnsweringChain = (
|
||||||
});
|
});
|
||||||
|
|
||||||
const sortedDocs = similarity
|
const sortedDocs = similarity
|
||||||
.filter((sim) => sim.similarity > 0.5)
|
|
||||||
.sort((a, b) => b.similarity - a.similarity)
|
.sort((a, b) => b.similarity - a.similarity)
|
||||||
|
.filter((sim) => sim.similarity > 0.5)
|
||||||
.slice(0, 15)
|
.slice(0, 15)
|
||||||
.map((sim) => docsWithContent[sim.index]);
|
.map((sim) => docsWithContent[sim.index]);
|
||||||
|
|
||||||
|
|
|
@ -52,8 +52,8 @@ const basicYoutubeSearchResponsePrompt = `
|
||||||
Place these citations at the end of that particular sentence. You can cite the same sentence multiple times if it is relevant to the user's query like [number1][number2].
|
Place these citations at the end of that particular sentence. You can cite the same sentence multiple times if it is relevant to the user's query like [number1][number2].
|
||||||
However you do not need to cite it using the same number. You can use different numbers to cite the same sentence multiple times. The number refers to the number of the search result (passed in the context) used to generate that part of the answer.
|
However you do not need to cite it using the same number. You can use different numbers to cite the same sentence multiple times. The number refers to the number of the search result (passed in the context) used to generate that part of the answer.
|
||||||
|
|
||||||
Anything inside the following \`context\` HTML block provided below is for your knowledge returned by Youtube and is not shared by the user. You have to answer question on the basis of it and cite the relevant information from it but you do not have to
|
Anything inside the following \`context\` HTML block provided below is for your knowledge returned by Youtube and is not shared by the user. You have to answer question on the basis of it and cite the relevant information from it but you do not have to
|
||||||
talk about the context in your response.
|
talk about the context in your response.
|
||||||
|
|
||||||
<context>
|
<context>
|
||||||
{context}
|
{context}
|
||||||
|
@ -177,9 +177,9 @@ const createBasicYoutubeSearchAnsweringChain = (
|
||||||
});
|
});
|
||||||
|
|
||||||
const sortedDocs = similarity
|
const sortedDocs = similarity
|
||||||
.filter((sim) => sim.similarity > 0.3)
|
|
||||||
.sort((a, b) => b.similarity - a.similarity)
|
.sort((a, b) => b.similarity - a.similarity)
|
||||||
.slice(0, 15)
|
.slice(0, 15)
|
||||||
|
.filter((sim) => sim.similarity > 0.3)
|
||||||
.map((sim) => docsWithContent[sim.index]);
|
.map((sim) => docsWithContent[sim.index]);
|
||||||
|
|
||||||
return sortedDocs;
|
return sortedDocs;
|
||||||
|
|
|
@ -9,26 +9,6 @@ export const loadGroqChatModels = async () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const chatModels = {
|
const chatModels = {
|
||||||
'Llama 3.1 70B': new ChatOpenAI(
|
|
||||||
{
|
|
||||||
openAIApiKey: groqApiKey,
|
|
||||||
modelName: 'llama-3.1-70b-versatile',
|
|
||||||
temperature: 0.7,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
|
||||||
},
|
|
||||||
),
|
|
||||||
'Llama 3.1 8B': new ChatOpenAI(
|
|
||||||
{
|
|
||||||
openAIApiKey: groqApiKey,
|
|
||||||
modelName: 'llama-3.1-8b-instant',
|
|
||||||
temperature: 0.7,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
baseURL: 'https://api.groq.com/openai/v1',
|
|
||||||
},
|
|
||||||
),
|
|
||||||
'LLaMA3 8b': new ChatOpenAI(
|
'LLaMA3 8b': new ChatOpenAI(
|
||||||
{
|
{
|
||||||
openAIApiKey: groqApiKey,
|
openAIApiKey: groqApiKey,
|
||||||
|
|
|
@ -34,7 +34,7 @@ export default function RootLayout({
|
||||||
unstyled: true,
|
unstyled: true,
|
||||||
classNames: {
|
classNames: {
|
||||||
toast:
|
toast:
|
||||||
'bg-light-primary dark:bg-dark-secondary dark:text-white/70 text-black-70 rounded-lg p-4 flex flex-row items-center space-x-2',
|
'bg-light-primary dark:bg-dark-primary text-white rounded-lg p-4 flex flex-row items-center space-x-2',
|
||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
|
@ -54,33 +54,23 @@ const useSocket = (
|
||||||
).then(async (res) => await res.json());
|
).then(async (res) => await res.json());
|
||||||
|
|
||||||
const chatModelProviders = providers.chatModelProviders;
|
const chatModelProviders = providers.chatModelProviders;
|
||||||
|
|
||||||
chatModelProvider = Object.keys(chatModelProviders)[0];
|
|
||||||
|
|
||||||
if (chatModelProvider === 'custom_openai') {
|
|
||||||
toast.error(
|
|
||||||
'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL',
|
|
||||||
);
|
|
||||||
setError(true);
|
|
||||||
return;
|
|
||||||
} else {
|
|
||||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
|
||||||
|
|
||||||
if (
|
|
||||||
!chatModelProviders ||
|
|
||||||
Object.keys(chatModelProviders).length === 0
|
|
||||||
)
|
|
||||||
return toast.error('No chat models available');
|
|
||||||
}
|
|
||||||
|
|
||||||
const embeddingModelProviders = providers.embeddingModelProviders;
|
const embeddingModelProviders = providers.embeddingModelProviders;
|
||||||
|
|
||||||
|
if (
|
||||||
|
!chatModelProviders ||
|
||||||
|
Object.keys(chatModelProviders).length === 0
|
||||||
|
)
|
||||||
|
return toast.error('No chat models available');
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!embeddingModelProviders ||
|
!embeddingModelProviders ||
|
||||||
Object.keys(embeddingModelProviders).length === 0
|
Object.keys(embeddingModelProviders).length === 0
|
||||||
)
|
)
|
||||||
return toast.error('No embedding models available');
|
return toast.error('No embedding models available');
|
||||||
|
|
||||||
|
chatModelProvider = Object.keys(chatModelProviders)[0];
|
||||||
|
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
|
|
||||||
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
|
||||||
embeddingModel = Object.keys(
|
embeddingModel = Object.keys(
|
||||||
embeddingModelProviders[embeddingModelProvider],
|
embeddingModelProviders[embeddingModelProvider],
|
||||||
|
@ -98,7 +88,7 @@ const useSocket = (
|
||||||
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
`${process.env.NEXT_PUBLIC_API_URL}/models`,
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'app lication/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
).then(async (res) => await res.json());
|
).then(async (res) => await res.json());
|
||||||
|
@ -116,7 +106,6 @@ const useSocket = (
|
||||||
|
|
||||||
if (
|
if (
|
||||||
chatModelProvider &&
|
chatModelProvider &&
|
||||||
chatModelProvider != 'custom_openai' &&
|
|
||||||
!chatModelProviders[chatModelProvider][chatModel]
|
!chatModelProviders[chatModelProvider][chatModel]
|
||||||
) {
|
) {
|
||||||
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
|
||||||
|
@ -198,13 +187,6 @@ const useSocket = (
|
||||||
console.log('[DEBUG] closed');
|
console.log('[DEBUG] closed');
|
||||||
};
|
};
|
||||||
|
|
||||||
ws.addEventListener('message', (e) => {
|
|
||||||
const data = JSON.parse(e.data);
|
|
||||||
if (data.type === 'error') {
|
|
||||||
toast.error(data.data);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
setWs(ws);
|
setWs(ws);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -225,13 +225,9 @@ const SettingsDialog = ({
|
||||||
value={selectedChatModelProvider ?? undefined}
|
value={selectedChatModelProvider ?? undefined}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setSelectedChatModelProvider(e.target.value);
|
setSelectedChatModelProvider(e.target.value);
|
||||||
if (e.target.value === 'custom_openai') {
|
setSelectedChatModel(
|
||||||
setSelectedChatModel('');
|
config.chatModelProviders[e.target.value][0],
|
||||||
} else {
|
);
|
||||||
setSelectedChatModel(
|
|
||||||
config.chatModelProviders[e.target.value][0],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
options={Object.keys(config.chatModelProviders).map(
|
options={Object.keys(config.chatModelProviders).map(
|
||||||
(provider) => ({
|
(provider) => ({
|
||||||
|
|
Loading…
Reference in New Issue