Add Docker Compose setup for Ollama and Open-WebUI services

- Replaced single Docker command for Ollama with a `docker-compose` setup.
- Updated `start_inference_server.sh` to use `ollama-compose.yml`.
- Updated README with new usage instructions for Ollama web UI access.
This commit is contained in:
geoffsee
2025-06-04 14:56:17 -04:00
committed by Geoff Seemueller
parent d5393ff964
commit 2884baf000
3 changed files with 44 additions and 4 deletions

View File

@@ -5,8 +5,9 @@ if [ "$1" = "mlx-omni-server" ]; then
mlx-omni-server --log-level debug
elif [ "$1" = "ollama" ]; then
echo "starting ollama"
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
SCRIPT_DIR=$(dirname $(realpath "$0"))
docker-compose -f "${SCRIPT_DIR}/ollama-compose.yml" up -d
else
printf "Error: First argument must be 'mlx-omni-server'\n"
printf "Error: First argument must be 'mlx-omni-server' or 'ollama'\n"
exit 1
fi