Files
Munich-news/test-ollama-setup.sh
2025-11-11 17:40:29 +01:00

169 lines
5.1 KiB
Bash
Executable File
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
#!/bin/bash
# Comprehensive test script for Ollama setup (CPU and GPU)
echo "=========================================="
echo "Ollama Setup Test Suite"
echo "=========================================="
echo ""
ERRORS=0
# Test 1: Check if Docker is running
echo "Test 1: Docker availability"
if docker info &> /dev/null; then
echo "✓ Docker is running"
else
echo "✗ Docker is not running"
ERRORS=$((ERRORS + 1))
fi
echo ""
# Test 2: Check if docker-compose files are valid
echo "Test 2: Docker Compose configuration"
if docker-compose config --quiet &> /dev/null; then
echo "✓ docker-compose.yml is valid"
else
echo "✗ docker-compose.yml has errors"
ERRORS=$((ERRORS + 1))
fi
if docker-compose -f docker-compose.yml -f docker-compose.gpu.yml config --quiet &> /dev/null; then
echo "✓ docker-compose.gpu.yml is valid"
else
echo "✗ docker-compose.gpu.yml has errors"
ERRORS=$((ERRORS + 1))
fi
echo ""
# Test 3: Check GPU availability
echo "Test 3: GPU availability"
if command -v nvidia-smi &> /dev/null; then
echo "✓ NVIDIA GPU detected"
nvidia-smi --query-gpu=name --format=csv,noheader | sed 's/^/ - /'
# Test Docker GPU access
if docker run --rm --gpus all nvidia/cuda:12.0.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
echo "✓ Docker can access GPU"
else
echo "⚠ Docker cannot access GPU (install nvidia-container-toolkit)"
fi
else
echo " No NVIDIA GPU detected (CPU mode will be used)"
fi
echo ""
# Test 4: Check if Ollama service is defined
echo "Test 4: Ollama service configuration"
if docker-compose config | grep -q "ollama:"; then
echo "✓ Ollama service is defined"
else
echo "✗ Ollama service not found in docker-compose.yml"
ERRORS=$((ERRORS + 1))
fi
echo ""
# Test 5: Check if .env file exists
echo "Test 5: Environment configuration"
if [ -f "backend/.env" ]; then
echo "✓ backend/.env exists"
# Check Ollama configuration
if grep -q "OLLAMA_ENABLED=true" backend/.env; then
echo "✓ Ollama is enabled"
else
echo "⚠ Ollama is disabled in .env"
fi
if grep -q "OLLAMA_BASE_URL" backend/.env; then
OLLAMA_URL=$(grep "OLLAMA_BASE_URL" backend/.env | cut -d'=' -f2)
echo "✓ Ollama URL configured: $OLLAMA_URL"
else
echo "⚠ OLLAMA_BASE_URL not set"
fi
else
echo "⚠ backend/.env not found (copy from backend/.env.example)"
fi
echo ""
# Test 6: Check helper scripts
echo "Test 6: Helper scripts"
SCRIPTS=("check-gpu.sh" "start-with-gpu.sh" "configure-ollama.sh")
for script in "${SCRIPTS[@]}"; do
if [ -f "$script" ] && [ -x "$script" ]; then
echo "$script exists and is executable"
else
echo "$script missing or not executable"
ERRORS=$((ERRORS + 1))
fi
done
echo ""
# Test 7: Check documentation
echo "Test 7: Documentation"
DOCS=("docs/OLLAMA_SETUP.md" "docs/GPU_SETUP.md" "QUICK_START_GPU.md")
for doc in "${DOCS[@]}"; do
if [ -f "$doc" ]; then
echo "$doc exists"
else
echo "$doc missing"
ERRORS=$((ERRORS + 1))
fi
done
echo ""
# Test 8: Check if Ollama is running (if services are up)
echo "Test 8: Ollama service status"
if docker ps | grep -q "munich-news-ollama"; then
echo "✓ Ollama container is running"
# Check if crawler is running (needed to test Ollama)
if docker ps | grep -q "munich-news-crawler"; then
# Test Ollama API from inside network using Python
if docker-compose exec -T crawler python -c "import requests; requests.get('http://ollama:11434/api/tags', timeout=5)" &> /dev/null; then
echo "✓ Ollama API is accessible (internal network)"
# Check if model is available
if docker-compose exec -T crawler python -c "import requests; r = requests.get('http://ollama:11434/api/tags'); exit(0 if 'phi3' in r.text else 1)" &> /dev/null; then
echo "✓ phi3 model is available"
else
echo "⚠ phi3 model not found (may still be downloading)"
fi
else
echo "⚠ Ollama API not responding from crawler"
fi
else
echo " Crawler not running (needed to test internal Ollama access)"
fi
# Verify port is NOT exposed to host
if nc -z -w 2 localhost 11434 &> /dev/null; then
echo "⚠ WARNING: Ollama port is exposed to host (should be internal only)"
else
echo "✓ Ollama is internal-only (not exposed to host)"
fi
else
echo " Ollama container not running (start with: docker-compose up -d)"
fi
echo ""
# Summary
echo "=========================================="
echo "Test Summary"
echo "=========================================="
if [ $ERRORS -eq 0 ]; then
echo "✓ All tests passed!"
echo ""
echo "Next steps:"
echo "1. Start services: ./start-with-gpu.sh"
echo "2. Test translation: docker-compose exec crawler python crawler_service.py 1"
echo "3. Monitor GPU: watch -n 1 'docker exec munich-news-ollama nvidia-smi'"
else
echo "$ERRORS test(s) failed"
echo ""
echo "Please fix the errors above before proceeding."
fi
echo ""
exit $ERRORS