From 100872f2d9da03a5a85eb710b212de93f7779461 Mon Sep 17 00:00:00 2001 From: ItzCrazyKns Date: Sun, 12 May 2024 14:04:05 +0530 Subject: [PATCH] feat(docker-compose): revert network changes --- README.md | 13 ++++++++++++- docker-compose.yaml | 14 ++++++++++++-- sample.config.toml | 2 +- 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 9b81615..9235e4e 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ - [Installation](#installation) - [Getting Started with Docker (Recommended)](#getting-started-with-docker-recommended) - [Non-Docker Installation](#non-docker-installation) + - [Ollama connection errors](#ollama-connection-errors) - [Using as a Search Engine](#using-as-a-search-engine) - [One-Click Deployment](#one-click-deployment) - [Upcoming Features](#upcoming-features) @@ -65,7 +66,7 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. 4. Rename the `sample.config.toml` file to `config.toml`. For Docker setups, you need only fill in the following fields: - `OPENAI`: Your OpenAI API key. **You only need to fill this if you wish to use OpenAI's models**. - - `OLLAMA`: Your Ollama API URL. If Ollama is hosted on the same computer as Perplexica, you should enter it as `http://127.0.0.1:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://127.0.0.1:11434`. For other ports, adjust accordingly. If Ollama is running on some other server use the server's IP with port or domain in place of it. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. + - `OLLAMA`: Your Ollama API URL. You should enter it as `http://host.docker.internal:PORT_NUMBER`. If you installed Ollama on port 11434, use `http://host.docker.internal:11434`. For other ports, adjust accordingly. **You need to fill this if you wish to use Ollama's models instead of OpenAI's**. - `GROQ`: Your Groq API key. **You only need to fill this if you wish to use Groq's hosted models** **Note**: You can change these after starting Perplexica from the settings dialog. @@ -94,6 +95,16 @@ There are mainly 2 ways of installing Perplexica - With Docker, Without Docker. See the [installation documentation](https://github.com/ItzCrazyKns/Perplexica/tree/master/docs/installation) for more information like exposing it your network, etc. +### Ollama connection errors + +If you're facing an Ollama connection error, it is often related to the backend not being able to connect to Ollama's API. How can you fix it? You can fix it by updating your Ollama API URL in the settings menu to the following: + +On Windows: `http://host.docker.internal:11434`
+On Mac: `http://host.docker.internal:11434`
+On Linux: `http://private_ip_of_computer_hosting_ollama:11434` + +You need to edit the ports accordingly. + ## Using as a Search Engine If you wish to use Perplexica as an alternative to traditional search engines like Google or Bing, or if you want to add a shortcut for quick access from your browser's search bar, follow these steps: diff --git a/docker-compose.yaml b/docker-compose.yaml index e11bf4e..dc55c29 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -5,16 +5,21 @@ services: - ./searxng:/etc/searxng:rw ports: - 31336:8080 + networks: + - perplexica-network perplexica-backend: build: context: . dockerfile: backend.dockerfile args: - - SEARXNG_API_URL=http://127.0.0.1:31336 + - SEARXNG_API_URL=http://searxng:8080 depends_on: - searxng - network_mode: host + ports: + - 31338:31338 + networks: + - perplexica-network perplexica-frontend: build: @@ -28,3 +33,8 @@ services: - perplexica-backend ports: - 31337:31337 + networks: + - perplexica-network + +networks: + perplexica-network: \ No newline at end of file diff --git a/sample.config.toml b/sample.config.toml index 5342fe4..3c7f31b 100644 --- a/sample.config.toml +++ b/sample.config.toml @@ -9,4 +9,4 @@ GROQ = "" # Groq API key - gsk_1234567890abcdef1234567890abcdef [API_ENDPOINTS] SEARXNG = "http://localhost:32768" # SearxNG API URL -OLLAMA = "" # Ollama API URL - http://127.0.0.1:11434 +OLLAMA = "" # Ollama API URL - http://host.docker.internal:11434