update
This commit is contained in:
@@ -1,20 +1,3 @@
|
||||
# Munich News Daily - Docker Compose Configuration
|
||||
#
|
||||
# GPU Support:
|
||||
# To enable GPU acceleration for Ollama (5-10x faster):
|
||||
# 1. Check GPU availability: ./check-gpu.sh
|
||||
# 2. Start with GPU: ./start-with-gpu.sh
|
||||
# Or manually: docker-compose -f docker-compose.yml -f docker-compose.gpu.yml up -d
|
||||
#
|
||||
# Security:
|
||||
# - Only Backend API (port 5001) is exposed to host
|
||||
# - MongoDB is internal-only (not exposed to host)
|
||||
# - Ollama is internal-only (not exposed to host)
|
||||
# - Crawler and Sender are internal-only
|
||||
# All services communicate via internal Docker network
|
||||
#
|
||||
# See docs/OLLAMA_SETUP.md for detailed setup instructions
|
||||
|
||||
services:
|
||||
# Ollama AI Service (Internal only - not exposed to host)
|
||||
ollama:
|
||||
@@ -29,14 +12,6 @@ services:
|
||||
dns:
|
||||
- 8.8.8.8
|
||||
- 1.1.1.1
|
||||
# GPU support (uncomment if you have NVIDIA GPU)
|
||||
# deploy:
|
||||
# resources:
|
||||
# reservations:
|
||||
# devices:
|
||||
# - driver: nvidia
|
||||
# count: all
|
||||
# capabilities: [gpu]
|
||||
healthcheck:
|
||||
test: [ "CMD-SHELL", "ollama list || exit 1" ]
|
||||
interval: 30s
|
||||
|
||||
Reference in New Issue
Block a user