mirror of
https://github.com/geoffsee/open-gsio.git
synced 2025-09-08 22:56:46 +00:00
Add Docker Compose setup for Ollama and Open-WebUI services
- Replaced single Docker command for Ollama with a `docker-compose` setup. - Updated `start_inference_server.sh` to use `ollama-compose.yml`. - Updated README with new usage instructions for Ollama web UI access.
This commit is contained in:

committed by
Geoff Seemueller

parent
d5393ff964
commit
2884baf000
@@ -93,8 +93,7 @@ bun run server:dev # Restart server
|
|||||||
|
|
||||||
~~~bash
|
~~~bash
|
||||||
# See https://ollama.com/library for available models
|
# See https://ollama.com/library for available models
|
||||||
MODEL_TO_ADD=gemma3
|
use the ollama web ui @ http://localhost:8080
|
||||||
docker exec -it ollama ollama run ${MODEL_TO_ADD}
|
|
||||||
~~~
|
~~~
|
||||||
|
|
||||||
|
|
||||||
@@ -141,3 +140,5 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
~~~
|
~~~
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
38
packages/scripts/ollama-compose.yml
Normal file
38
packages/scripts/ollama-compose.yml
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
services:
|
||||||
|
ollama:
|
||||||
|
volumes:
|
||||||
|
- ollama:/root/.ollama
|
||||||
|
container_name: ollama
|
||||||
|
pull_policy: always
|
||||||
|
tty: true
|
||||||
|
restart: unless-stopped
|
||||||
|
image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest}
|
||||||
|
expose:
|
||||||
|
- 11434
|
||||||
|
ports:
|
||||||
|
- "11434:11434"
|
||||||
|
|
||||||
|
open-webui:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
args:
|
||||||
|
OLLAMA_BASE_URL: '/ollama'
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main}
|
||||||
|
container_name: open-webui
|
||||||
|
volumes:
|
||||||
|
- open-webui:/app/backend/data
|
||||||
|
depends_on:
|
||||||
|
- ollama
|
||||||
|
ports:
|
||||||
|
- ${OPEN_WEBUI_PORT-8080}:8080
|
||||||
|
environment:
|
||||||
|
- 'OLLAMA_BASE_URL=http://ollama:11434'
|
||||||
|
- 'WEBUI_SECRET_KEY='
|
||||||
|
extra_hosts:
|
||||||
|
- host.docker.internal:host-gateway
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
ollama: {}
|
||||||
|
open-webui: {}
|
@@ -5,8 +5,9 @@ if [ "$1" = "mlx-omni-server" ]; then
|
|||||||
mlx-omni-server --log-level debug
|
mlx-omni-server --log-level debug
|
||||||
elif [ "$1" = "ollama" ]; then
|
elif [ "$1" = "ollama" ]; then
|
||||||
echo "starting ollama"
|
echo "starting ollama"
|
||||||
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
|
SCRIPT_DIR=$(dirname $(realpath "$0"))
|
||||||
|
docker-compose -f "${SCRIPT_DIR}/ollama-compose.yml" up -d
|
||||||
else
|
else
|
||||||
printf "Error: First argument must be 'mlx-omni-server'\n"
|
printf "Error: First argument must be 'mlx-omni-server' or 'ollama'\n"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
Reference in New Issue
Block a user