Compare commits

..

24 Commits

Author SHA1 Message Date
projectmoon 04d49f6faa Merge remote-tracking branch 'origin/master' into ollama-auth 2024-10-31 12:52:53 +01:00
ItzCrazyKns 1e99fe8d69 feat(package): bump version 2024-10-31 11:08:49 +05:30
ItzCrazyKns 012dfa5a74 feat(listLineOutputParser): handle unclosed tags 2024-10-30 10:29:21 +05:30
ItzCrazyKns 65d057a05e feat(suggestions): handle custom OpenAI 2024-10-30 10:29:06 +05:30
ItzCrazyKns 3e7645614f feat(image-search): handle custom OpenAI 2024-10-30 10:28:40 +05:30
ItzCrazyKns 7c6ee2ead1 feat(video-search): handle custom OpenAI 2024-10-30 10:28:31 +05:30
ItzCrazyKns 540f38ae68 feat(empty-chat): add settings for mobile 2024-10-30 09:14:09 +05:30
ItzCrazyKns f1c0b5435b feat(delete-chat): use `window.location` to refresh page 2024-10-30 09:11:48 +05:30
ItzCrazyKns b33e5fefba feat(navbar): remove comments 2024-10-29 20:00:31 +05:30
ItzCrazyKns 03d0ff2ca4 feat(navbar): make delete & plus button work 2024-10-29 19:59:58 +05:30
ItzCrazyKns dfb532e4d3 feat(package): bump version 2024-10-18 18:45:23 +05:30
ItzCrazyKns c8cd959496 feat(dockerfile): update backend image 2024-10-18 17:29:26 +05:30
ItzCrazyKns 4576d3de13 feat(dockerfile): update docker image 2024-10-18 17:26:02 +05:30
ItzCrazyKns 8057f28b20 feat(settings): handle no models 2024-10-18 17:07:09 +05:30
ItzCrazyKns 36bb265e1f feat(dockerfile): revert base image 2024-10-18 12:27:56 +05:30
ItzCrazyKns 71fc19f525 feat(dockerfile): update registry 2024-10-18 12:24:55 +05:30
ItzCrazyKns c7c0ebe5b6 feat(dockerfile): use NPM registry 2024-10-18 12:15:04 +05:30
ItzCrazyKns 8fe1b7c5e3 feat(webSearchAgent): revert prompt 2024-10-18 12:01:56 +05:30
ItzCrazyKns 6e0d3baef6 feat(dockerfile): update docker image 2024-10-18 11:50:56 +05:30
ItzCrazyKns 54e0bb317a feat(groq): update deprecated models 2024-10-18 11:05:57 +05:30
ItzCrazyKns 3e6e57dab0 feat(chat-window): fix rewrite, use messageID 2024-10-17 18:51:11 +05:30
ItzCrazyKns 5aad2febda feat(messageHandler): fix duplicate messageIDs 2024-10-17 18:50:43 +05:30
ItzCrazyKns 24e1919c5e feat(dockerfile): update image to prevent python errors 2024-10-17 10:46:18 +05:30
ItzCrazyKns c7abd96b05 feat(readme): add networking 2024-10-17 10:01:00 +05:30
20 changed files with 288 additions and 86 deletions

View File

@ -17,6 +17,9 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v2
with: with:

View File

@ -13,6 +13,7 @@
- [Ollama Connection Errors](#ollama-connection-errors) - [Ollama Connection Errors](#ollama-connection-errors)
- [Using as a Search Engine](#using-as-a-search-engine) - [Using as a Search Engine](#using-as-a-search-engine)
- [Using Perplexica's API](#using-perplexicas-api) - [Using Perplexica's API](#using-perplexicas-api)
- [Expose Perplexica to a network](#expose-perplexica-to-network)
- [One-Click Deployment](#one-click-deployment) - [One-Click Deployment](#one-click-deployment)
- [Upcoming Features](#upcoming-features) - [Upcoming Features](#upcoming-features)
- [Support Us](#support-us) - [Support Us](#support-us)
@ -133,6 +134,10 @@ Perplexica also provides an API for developers looking to integrate its powerful
For more details, check out the full documentation [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/API/SEARCH.md). For more details, check out the full documentation [here](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/API/SEARCH.md).
## Expose Perplexica to network
You can access Perplexica over your home network by following our networking guide [here](https://github.com/ItzCrazyKns/Perplexica/blob/master/docs/installation/NETWORKING.md).
## One-Click Deployment ## One-Click Deployment
[![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267) [![Deploy to RepoCloud](https://d16t0pc4846x52.cloudfront.net/deploylobe.svg)](https://repocloud.io/details/?app_id=267)

View File

@ -1,4 +1,4 @@
FROM node:slim FROM node:18-slim
WORKDIR /home/perplexica WORKDIR /home/perplexica
@ -10,7 +10,7 @@ COPY yarn.lock /home/perplexica/
RUN mkdir /home/perplexica/data RUN mkdir /home/perplexica/data
RUN yarn install --frozen-lockfile RUN yarn install --frozen-lockfile --network-timeout 600000
RUN yarn build RUN yarn build
CMD ["yarn", "start"] CMD ["yarn", "start"]

View File

@ -1,6 +1,6 @@
{ {
"name": "perplexica-backend", "name": "perplexica-backend",
"version": "1.9.0", "version": "1.9.2",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {

View File

@ -23,7 +23,7 @@ class LineListOutputParser extends BaseOutputParser<string[]> {
const startKeyIndex = text.indexOf(`<${this.key}>`); const startKeyIndex = text.indexOf(`<${this.key}>`);
const endKeyIndex = text.indexOf(`</${this.key}>`); const endKeyIndex = text.indexOf(`</${this.key}>`);
if (startKeyIndex === -1 || endKeyIndex === -1) { if (startKeyIndex === -1 && endKeyIndex === -1) {
return []; return [];
} }

View File

@ -22,12 +22,12 @@ export const loadGroqChatModels = async () => {
}, },
), ),
}, },
'llama-3.2-11b-text-preview': { 'llama-3.2-11b-vision-preview': {
displayName: 'Llama 3.2 11B Text', displayName: 'Llama 3.2 11B Vision',
model: new ChatOpenAI( model: new ChatOpenAI(
{ {
openAIApiKey: groqApiKey, openAIApiKey: groqApiKey,
modelName: 'llama-3.2-11b-text-preview', modelName: 'llama-3.2-11b-vision-preview',
temperature: 0.7, temperature: 0.7,
}, },
{ {
@ -35,12 +35,12 @@ export const loadGroqChatModels = async () => {
}, },
), ),
}, },
'llama-3.2-90b-text-preview': { 'llama-3.2-90b-vision-preview': {
displayName: 'Llama 3.2 90B Text', displayName: 'Llama 3.2 90B Vision',
model: new ChatOpenAI( model: new ChatOpenAI(
{ {
openAIApiKey: groqApiKey, openAIApiKey: groqApiKey,
modelName: 'llama-3.2-90b-text-preview', modelName: 'llama-3.2-90b-vision-preview',
temperature: 0.7, temperature: 0.7,
}, },
{ {

View File

@ -4,14 +4,28 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { getAvailableChatModelProviders } from '../lib/providers'; import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai';
const router = express.Router(); const router = express.Router();
interface ChatModel {
provider: string;
model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string;
}
interface ImageSearchBody {
query: string;
chatHistory: any[];
chatModel?: ChatModel;
}
router.post('/', async (req, res) => { router.post('/', async (req, res) => {
try { try {
let { query, chat_history, chat_model_provider, chat_model } = req.body; let body: ImageSearchBody = req.body;
chat_history = chat_history.map((msg: any) => { const chatHistory = body.chatHistory.map((msg: any) => {
if (msg.role === 'user') { if (msg.role === 'user') {
return new HumanMessage(msg.content); return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') { } else if (msg.role === 'assistant') {
@ -19,22 +33,50 @@ router.post('/', async (req, res) => {
} }
}); });
const chatModels = await getAvailableChatModelProviders(); const chatModelProviders = await getAvailableChatModelProviders();
const provider = chat_model_provider ?? Object.keys(chatModels)[0];
const chatModel = chat_model ?? Object.keys(chatModels[provider])[0]; const chatModelProvider =
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
const chatModel =
body.chatModel?.model ||
Object.keys(chatModelProviders[chatModelProvider])[0];
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
if (chatModels[provider] && chatModels[provider][chatModel]) { if (body.chatModel?.provider === 'custom_openai') {
llm = chatModels[provider][chatModel].model as BaseChatModel | undefined; if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({
modelName: body.chatModel.model,
openAIApiKey: body.chatModel.customOpenAIKey,
temperature: 0.7,
configuration: {
baseURL: body.chatModel.customOpenAIBaseURL,
},
}) as unknown as BaseChatModel;
} else if (
chatModelProviders[chatModelProvider] &&
chatModelProviders[chatModelProvider][chatModel]
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
} }
if (!llm) { if (!llm) {
res.status(500).json({ message: 'Invalid LLM model selected' }); return res.status(400).json({ message: 'Invalid model selected' });
return;
} }
const images = await handleImageSearch({ query, chat_history }, llm); const images = await handleImageSearch(
{ query: body.query, chat_history: chatHistory },
llm,
);
res.status(200).json({ images }); res.status(200).json({ images });
} catch (err) { } catch (err) {

View File

@ -4,14 +4,27 @@ import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { getAvailableChatModelProviders } from '../lib/providers'; import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import { ChatOpenAI } from '@langchain/openai';
const router = express.Router(); const router = express.Router();
interface ChatModel {
provider: string;
model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string;
}
interface SuggestionsBody {
chatHistory: any[];
chatModel?: ChatModel;
}
router.post('/', async (req, res) => { router.post('/', async (req, res) => {
try { try {
let { chat_history, chat_model, chat_model_provider } = req.body; let body: SuggestionsBody = req.body;
chat_history = chat_history.map((msg: any) => { const chatHistory = body.chatHistory.map((msg: any) => {
if (msg.role === 'user') { if (msg.role === 'user') {
return new HumanMessage(msg.content); return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') { } else if (msg.role === 'assistant') {
@ -19,22 +32,50 @@ router.post('/', async (req, res) => {
} }
}); });
const chatModels = await getAvailableChatModelProviders(); const chatModelProviders = await getAvailableChatModelProviders();
const provider = chat_model_provider ?? Object.keys(chatModels)[0];
const chatModel = chat_model ?? Object.keys(chatModels[provider])[0]; const chatModelProvider =
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
const chatModel =
body.chatModel?.model ||
Object.keys(chatModelProviders[chatModelProvider])[0];
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
if (chatModels[provider] && chatModels[provider][chatModel]) { if (body.chatModel?.provider === 'custom_openai') {
llm = chatModels[provider][chatModel].model as BaseChatModel | undefined; if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({
modelName: body.chatModel.model,
openAIApiKey: body.chatModel.customOpenAIKey,
temperature: 0.7,
configuration: {
baseURL: body.chatModel.customOpenAIBaseURL,
},
}) as unknown as BaseChatModel;
} else if (
chatModelProviders[chatModelProvider] &&
chatModelProviders[chatModelProvider][chatModel]
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
} }
if (!llm) { if (!llm) {
res.status(500).json({ message: 'Invalid LLM model selected' }); return res.status(400).json({ message: 'Invalid model selected' });
return;
} }
const suggestions = await generateSuggestions({ chat_history }, llm); const suggestions = await generateSuggestions(
{ chat_history: chatHistory },
llm,
);
res.status(200).json({ suggestions: suggestions }); res.status(200).json({ suggestions: suggestions });
} catch (err) { } catch (err) {

View File

@ -4,14 +4,28 @@ import { getAvailableChatModelProviders } from '../lib/providers';
import { HumanMessage, AIMessage } from '@langchain/core/messages'; import { HumanMessage, AIMessage } from '@langchain/core/messages';
import logger from '../utils/logger'; import logger from '../utils/logger';
import handleVideoSearch from '../agents/videoSearchAgent'; import handleVideoSearch from '../agents/videoSearchAgent';
import { ChatOpenAI } from '@langchain/openai';
const router = express.Router(); const router = express.Router();
interface ChatModel {
provider: string;
model: string;
customOpenAIBaseURL?: string;
customOpenAIKey?: string;
}
interface VideoSearchBody {
query: string;
chatHistory: any[];
chatModel?: ChatModel;
}
router.post('/', async (req, res) => { router.post('/', async (req, res) => {
try { try {
let { query, chat_history, chat_model_provider, chat_model } = req.body; let body: VideoSearchBody = req.body;
chat_history = chat_history.map((msg: any) => { const chatHistory = body.chatHistory.map((msg: any) => {
if (msg.role === 'user') { if (msg.role === 'user') {
return new HumanMessage(msg.content); return new HumanMessage(msg.content);
} else if (msg.role === 'assistant') { } else if (msg.role === 'assistant') {
@ -19,22 +33,50 @@ router.post('/', async (req, res) => {
} }
}); });
const chatModels = await getAvailableChatModelProviders(); const chatModelProviders = await getAvailableChatModelProviders();
const provider = chat_model_provider ?? Object.keys(chatModels)[0];
const chatModel = chat_model ?? Object.keys(chatModels[provider])[0]; const chatModelProvider =
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
const chatModel =
body.chatModel?.model ||
Object.keys(chatModelProviders[chatModelProvider])[0];
let llm: BaseChatModel | undefined; let llm: BaseChatModel | undefined;
if (chatModels[provider] && chatModels[provider][chatModel]) { if (body.chatModel?.provider === 'custom_openai') {
llm = chatModels[provider][chatModel].model as BaseChatModel | undefined; if (
!body.chatModel?.customOpenAIBaseURL ||
!body.chatModel?.customOpenAIKey
) {
return res
.status(400)
.json({ message: 'Missing custom OpenAI base URL or key' });
}
llm = new ChatOpenAI({
modelName: body.chatModel.model,
openAIApiKey: body.chatModel.customOpenAIKey,
temperature: 0.7,
configuration: {
baseURL: body.chatModel.customOpenAIBaseURL,
},
}) as unknown as BaseChatModel;
} else if (
chatModelProviders[chatModelProvider] &&
chatModelProviders[chatModelProvider][chatModel]
) {
llm = chatModelProviders[chatModelProvider][chatModel]
.model as unknown as BaseChatModel | undefined;
} }
if (!llm) { if (!llm) {
res.status(500).json({ message: 'Invalid LLM model selected' }); return res.status(400).json({ message: 'Invalid model selected' });
return;
} }
const videos = await handleVideoSearch({ chat_history, query }, llm); const videos = await handleVideoSearch(
{ chat_history: chatHistory, query: body.query },
llm,
);
res.status(200).json({ videos }); res.status(200).json({ videos });
} catch (err) { } catch (err) {

View File

@ -10,8 +10,8 @@ import type { BaseChatModel } from '@langchain/core/language_models/chat_models'
import type { Embeddings } from '@langchain/core/embeddings'; import type { Embeddings } from '@langchain/core/embeddings';
import logger from '../utils/logger'; import logger from '../utils/logger';
import db from '../db'; import db from '../db';
import { chats, messages } from '../db/schema'; import { chats, messages as messagesSchema } from '../db/schema';
import { eq } from 'drizzle-orm'; import { eq, asc, gt } from 'drizzle-orm';
import crypto from 'crypto'; import crypto from 'crypto';
type Message = { type Message = {
@ -71,7 +71,7 @@ const handleEmitterEvents = (
emitter.on('end', () => { emitter.on('end', () => {
ws.send(JSON.stringify({ type: 'messageEnd', messageId: messageId })); ws.send(JSON.stringify({ type: 'messageEnd', messageId: messageId }));
db.insert(messages) db.insert(messagesSchema)
.values({ .values({
content: recievedMessage, content: recievedMessage,
chatId: chatId, chatId: chatId,
@ -106,7 +106,9 @@ export const handleMessage = async (
const parsedWSMessage = JSON.parse(message) as WSMessage; const parsedWSMessage = JSON.parse(message) as WSMessage;
const parsedMessage = parsedWSMessage.message; const parsedMessage = parsedWSMessage.message;
const id = crypto.randomBytes(7).toString('hex'); const humanMessageId =
parsedMessage.messageId ?? crypto.randomBytes(7).toString('hex');
const aiMessageId = crypto.randomBytes(7).toString('hex');
if (!parsedMessage.content) if (!parsedMessage.content)
return ws.send( return ws.send(
@ -141,7 +143,7 @@ export const handleMessage = async (
parsedWSMessage.optimizationMode, parsedWSMessage.optimizationMode,
); );
handleEmitterEvents(emitter, ws, id, parsedMessage.chatId); handleEmitterEvents(emitter, ws, aiMessageId, parsedMessage.chatId);
const chat = await db.query.chats.findFirst({ const chat = await db.query.chats.findFirst({
where: eq(chats.id, parsedMessage.chatId), where: eq(chats.id, parsedMessage.chatId),
@ -159,18 +161,29 @@ export const handleMessage = async (
.execute(); .execute();
} }
const messageExists = await db.query.messages.findFirst({
where: eq(messagesSchema.messageId, humanMessageId),
});
if (!messageExists) {
await db await db
.insert(messages) .insert(messagesSchema)
.values({ .values({
content: parsedMessage.content, content: parsedMessage.content,
chatId: parsedMessage.chatId, chatId: parsedMessage.chatId,
messageId: id, messageId: humanMessageId,
role: 'user', role: 'user',
metadata: JSON.stringify({ metadata: JSON.stringify({
createdAt: new Date(), createdAt: new Date(),
}), }),
}) })
.execute(); .execute();
} else {
await db
.delete(messagesSchema)
.where(gt(messagesSchema.id, messageExists.id))
.execute();
}
} else { } else {
ws.send( ws.send(
JSON.stringify({ JSON.stringify({

View File

@ -355,6 +355,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
console.log('[DEBUG] closed'); console.log('[DEBUG] closed');
} }
}; };
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
const messagesRef = useRef<Message[]>([]); const messagesRef = useRef<Message[]>([]);
@ -373,8 +374,9 @@ const ChatWindow = ({ id }: { id?: string }) => {
} }
}, [isMessagesLoaded, isWSReady]); }, [isMessagesLoaded, isWSReady]);
const sendMessage = async (message: string) => { const sendMessage = async (message: string, messageId?: string) => {
if (loading) return; if (loading) return;
setLoading(true); setLoading(true);
setMessageAppeared(false); setMessageAppeared(false);
@ -382,12 +384,13 @@ const ChatWindow = ({ id }: { id?: string }) => {
let recievedMessage = ''; let recievedMessage = '';
let added = false; let added = false;
const messageId = crypto.randomBytes(7).toString('hex'); messageId = messageId ?? crypto.randomBytes(7).toString('hex');
ws?.send( ws?.send(
JSON.stringify({ JSON.stringify({
type: 'message', type: 'message',
message: { message: {
messageId: messageId,
chatId: chatId!, chatId: chatId!,
content: message, content: message,
}, },
@ -514,7 +517,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)]; return [...prev.slice(0, messages.length > 2 ? index - 1 : 0)];
}); });
sendMessage(message.content); sendMessage(message.content, message.messageId);
}; };
useEffect(() => { useEffect(() => {
@ -541,7 +544,7 @@ const ChatWindow = ({ id }: { id?: string }) => {
<div> <div>
{messages.length > 0 ? ( {messages.length > 0 ? (
<> <>
<Navbar messages={messages} /> <Navbar chatId={chatId!} messages={messages} />
<Chat <Chat
loading={loading} loading={loading}
messages={messages} messages={messages}

View File

@ -16,10 +16,12 @@ const DeleteChat = ({
chatId, chatId,
chats, chats,
setChats, setChats,
redirect = false,
}: { }: {
chatId: string; chatId: string;
chats: Chat[]; chats: Chat[];
setChats: (chats: Chat[]) => void; setChats: (chats: Chat[]) => void;
redirect?: boolean;
}) => { }) => {
const [confirmationDialogOpen, setConfirmationDialogOpen] = useState(false); const [confirmationDialogOpen, setConfirmationDialogOpen] = useState(false);
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
@ -44,6 +46,10 @@ const DeleteChat = ({
const newChats = chats.filter((chat) => chat.id !== chatId); const newChats = chats.filter((chat) => chat.id !== chatId);
setChats(newChats); setChats(newChats);
if (redirect) {
window.location.href = '/';
}
} catch (err: any) { } catch (err: any) {
toast.error(err.message); toast.error(err.message);
} finally { } finally {

View File

@ -1,4 +1,7 @@
import { Settings } from 'lucide-react';
import EmptyChatMessageInput from './EmptyChatMessageInput'; import EmptyChatMessageInput from './EmptyChatMessageInput';
import SettingsDialog from './SettingsDialog';
import { useState } from 'react';
const EmptyChat = ({ const EmptyChat = ({
sendMessage, sendMessage,
@ -13,8 +16,17 @@ const EmptyChat = ({
optimizationMode: string; optimizationMode: string;
setOptimizationMode: (mode: string) => void; setOptimizationMode: (mode: string) => void;
}) => { }) => {
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
return ( return (
<div className="relative"> <div className="relative">
<SettingsDialog isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
<div className="absolute w-full flex flex-row items-center justify-end mr-5 mt-5">
<Settings
className="cursor-pointer lg:hidden"
onClick={() => setIsSettingsOpen(true)}
/>
</div>
<div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8"> <div className="flex flex-col items-center justify-center min-h-screen max-w-screen-sm mx-auto p-2 space-y-8">
<h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8"> <h2 className="text-black/70 dark:text-white/70 text-3xl font-medium -mt-8">
Research begins here. Research begins here.

View File

@ -186,10 +186,10 @@ const MessageBox = ({
<div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4"> <div className="lg:sticky lg:top-20 flex flex-col items-center space-y-3 w-full lg:w-3/12 z-30 h-full pb-4">
<SearchImages <SearchImages
query={history[messageIndex - 1].content} query={history[messageIndex - 1].content}
chat_history={history.slice(0, messageIndex - 1)} chatHistory={history.slice(0, messageIndex - 1)}
/> />
<SearchVideos <SearchVideos
chat_history={history.slice(0, messageIndex - 1)} chatHistory={history.slice(0, messageIndex - 1)}
query={history[messageIndex - 1].content} query={history[messageIndex - 1].content}
/> />
</div> </div>

View File

@ -2,8 +2,15 @@ import { Clock, Edit, Share, Trash } from 'lucide-react';
import { Message } from './ChatWindow'; import { Message } from './ChatWindow';
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { formatTimeDifference } from '@/lib/utils'; import { formatTimeDifference } from '@/lib/utils';
import DeleteChat from './DeleteChat';
const Navbar = ({ messages }: { messages: Message[] }) => { const Navbar = ({
chatId,
messages,
}: {
messages: Message[];
chatId: string;
}) => {
const [title, setTitle] = useState<string>(''); const [title, setTitle] = useState<string>('');
const [timeAgo, setTimeAgo] = useState<string>(''); const [timeAgo, setTimeAgo] = useState<string>('');
@ -39,10 +46,12 @@ const Navbar = ({ messages }: { messages: Message[] }) => {
return ( return (
<div className="fixed z-40 top-0 left-0 right-0 px-4 lg:pl-[104px] lg:pr-6 lg:px-8 flex flex-row items-center justify-between w-full py-4 text-sm text-black dark:text-white/70 border-b bg-light-primary dark:bg-dark-primary border-light-100 dark:border-dark-200"> <div className="fixed z-40 top-0 left-0 right-0 px-4 lg:pl-[104px] lg:pr-6 lg:px-8 flex flex-row items-center justify-between w-full py-4 text-sm text-black dark:text-white/70 border-b bg-light-primary dark:bg-dark-primary border-light-100 dark:border-dark-200">
<Edit <a
size={17} href="/"
className="active:scale-95 transition duration-100 cursor-pointer lg:hidden" className="active:scale-95 transition duration-100 cursor-pointer lg:hidden"
/> >
<Edit size={17} />
</a>
<div className="hidden lg:flex flex-row items-center justify-center space-x-2"> <div className="hidden lg:flex flex-row items-center justify-center space-x-2">
<Clock size={17} /> <Clock size={17} />
<p className="text-xs">{timeAgo} ago</p> <p className="text-xs">{timeAgo} ago</p>
@ -54,10 +63,7 @@ const Navbar = ({ messages }: { messages: Message[] }) => {
size={17} size={17}
className="active:scale-95 transition duration-100 cursor-pointer" className="active:scale-95 transition duration-100 cursor-pointer"
/> />
<Trash <DeleteChat redirect chatId={chatId} chats={[]} setChats={() => {}} />
size={17}
className="text-red-400 active:scale-95 transition duration-100 cursor-pointer"
/>
</div> </div>
</div> </div>
); );

View File

@ -13,10 +13,10 @@ type Image = {
const SearchImages = ({ const SearchImages = ({
query, query,
chat_history, chatHistory,
}: { }: {
query: string; query: string;
chat_history: Message[]; chatHistory: Message[];
}) => { }) => {
const [images, setImages] = useState<Image[] | null>(null); const [images, setImages] = useState<Image[] | null>(null);
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
@ -33,6 +33,9 @@ const SearchImages = ({
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/images`, `${process.env.NEXT_PUBLIC_API_URL}/images`,
{ {
@ -42,9 +45,15 @@ const SearchImages = ({
}, },
body: JSON.stringify({ body: JSON.stringify({
query: query, query: query,
chat_history: chat_history, chatHistory: chatHistory,
chat_model_provider: chatModelProvider, chatModel: {
chat_model: chatModel, provider: chatModelProvider,
model: chatModel,
...(chatModelProvider === 'custom_openai' && {
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
},
}), }),
}, },
); );

View File

@ -26,10 +26,10 @@ declare module 'yet-another-react-lightbox' {
const Searchvideos = ({ const Searchvideos = ({
query, query,
chat_history, chatHistory,
}: { }: {
query: string; query: string;
chat_history: Message[]; chatHistory: Message[];
}) => { }) => {
const [videos, setVideos] = useState<Video[] | null>(null); const [videos, setVideos] = useState<Video[] | null>(null);
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
@ -46,6 +46,9 @@ const Searchvideos = ({
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProvider');
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const res = await fetch( const res = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/videos`, `${process.env.NEXT_PUBLIC_API_URL}/videos`,
{ {
@ -55,9 +58,15 @@ const Searchvideos = ({
}, },
body: JSON.stringify({ body: JSON.stringify({
query: query, query: query,
chat_history: chat_history, chatHistory: chatHistory,
chat_model_provider: chatModelProvider, chatModel: {
chat_model: chatModel, provider: chatModelProvider,
model: chatModel,
...(chatModelProvider === 'custom_openai' && {
customOpenAIBaseURL: customOpenAIBaseURL,
customOpenAIKey: customOpenAIKey,
}),
},
}), }),
}, },
); );

View File

@ -128,7 +128,9 @@ const SettingsDialog = ({
const chatModel = const chatModel =
localStorage.getItem('chatModel') || localStorage.getItem('chatModel') ||
(data.chatModelProviders && (data.chatModelProviders &&
data.chatModelProviders[chatModelProvider]?.[0].name) || data.chatModelProviders[chatModelProvider]?.length > 0
? data.chatModelProviders[chatModelProvider][0].name
: undefined) ||
''; '';
const embeddingModelProvider = const embeddingModelProvider =
localStorage.getItem('embeddingModelProvider') || localStorage.getItem('embeddingModelProvider') ||

View File

@ -4,15 +4,24 @@ export const getSuggestions = async (chatHisory: Message[]) => {
const chatModel = localStorage.getItem('chatModel'); const chatModel = localStorage.getItem('chatModel');
const chatModelProvider = localStorage.getItem('chatModelProvider'); const chatModelProvider = localStorage.getItem('chatModelProvider');
const customOpenAIKey = localStorage.getItem('openAIApiKey');
const customOpenAIBaseURL = localStorage.getItem('openAIBaseURL');
const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, { const res = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/suggestions`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ body: JSON.stringify({
chat_history: chatHisory, chatHistory: chatHisory,
chat_model: chatModel, chatModel: {
chat_model_provider: chatModelProvider, provider: chatModelProvider,
model: chatModel,
...(chatModelProvider === 'custom_openai' && {
customOpenAIKey,
customOpenAIBaseURL,
}),
},
}), }),
}); });

View File

@ -1,6 +1,6 @@
{ {
"name": "perplexica-frontend", "name": "perplexica-frontend",
"version": "1.9.0", "version": "1.9.2",
"license": "MIT", "license": "MIT",
"author": "ItzCrazyKns", "author": "ItzCrazyKns",
"scripts": { "scripts": {