update
This commit is contained in:
156
test-ollama-setup.sh
Executable file
156
test-ollama-setup.sh
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Comprehensive test script for Ollama setup (CPU and GPU)
|
||||
|
||||
echo "=========================================="
|
||||
echo "Ollama Setup Test Suite"
|
||||
echo "=========================================="
|
||||
echo ""
|
||||
|
||||
ERRORS=0
|
||||
|
||||
# Test 1: Check if Docker is running
|
||||
echo "Test 1: Docker availability"
|
||||
if docker info &> /dev/null; then
|
||||
echo "✓ Docker is running"
|
||||
else
|
||||
echo "✗ Docker is not running"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 2: Check if docker-compose files are valid
|
||||
echo "Test 2: Docker Compose configuration"
|
||||
if docker-compose config --quiet &> /dev/null; then
|
||||
echo "✓ docker-compose.yml is valid"
|
||||
else
|
||||
echo "✗ docker-compose.yml has errors"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
|
||||
if docker-compose -f docker-compose.yml -f docker-compose.gpu.yml config --quiet &> /dev/null; then
|
||||
echo "✓ docker-compose.gpu.yml is valid"
|
||||
else
|
||||
echo "✗ docker-compose.gpu.yml has errors"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 3: Check GPU availability
|
||||
echo "Test 3: GPU availability"
|
||||
if command -v nvidia-smi &> /dev/null; then
|
||||
echo "✓ NVIDIA GPU detected"
|
||||
nvidia-smi --query-gpu=name --format=csv,noheader | sed 's/^/ - /'
|
||||
|
||||
# Test Docker GPU access
|
||||
if docker run --rm --gpus all nvidia/cuda:12.0.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
|
||||
echo "✓ Docker can access GPU"
|
||||
else
|
||||
echo "⚠ Docker cannot access GPU (install nvidia-container-toolkit)"
|
||||
fi
|
||||
else
|
||||
echo "ℹ No NVIDIA GPU detected (CPU mode will be used)"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 4: Check if Ollama service is defined
|
||||
echo "Test 4: Ollama service configuration"
|
||||
if docker-compose config | grep -q "ollama:"; then
|
||||
echo "✓ Ollama service is defined"
|
||||
else
|
||||
echo "✗ Ollama service not found in docker-compose.yml"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 5: Check if .env file exists
|
||||
echo "Test 5: Environment configuration"
|
||||
if [ -f "backend/.env" ]; then
|
||||
echo "✓ backend/.env exists"
|
||||
|
||||
# Check Ollama configuration
|
||||
if grep -q "OLLAMA_ENABLED=true" backend/.env; then
|
||||
echo "✓ Ollama is enabled"
|
||||
else
|
||||
echo "⚠ Ollama is disabled in .env"
|
||||
fi
|
||||
|
||||
if grep -q "OLLAMA_BASE_URL" backend/.env; then
|
||||
OLLAMA_URL=$(grep "OLLAMA_BASE_URL" backend/.env | cut -d'=' -f2)
|
||||
echo "✓ Ollama URL configured: $OLLAMA_URL"
|
||||
else
|
||||
echo "⚠ OLLAMA_BASE_URL not set"
|
||||
fi
|
||||
else
|
||||
echo "⚠ backend/.env not found (copy from backend/.env.example)"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Test 6: Check helper scripts
|
||||
echo "Test 6: Helper scripts"
|
||||
SCRIPTS=("check-gpu.sh" "start-with-gpu.sh" "configure-ollama.sh")
|
||||
for script in "${SCRIPTS[@]}"; do
|
||||
if [ -f "$script" ] && [ -x "$script" ]; then
|
||||
echo "✓ $script exists and is executable"
|
||||
else
|
||||
echo "✗ $script missing or not executable"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
done
|
||||
echo ""
|
||||
|
||||
# Test 7: Check documentation
|
||||
echo "Test 7: Documentation"
|
||||
DOCS=("docs/OLLAMA_SETUP.md" "docs/GPU_SETUP.md" "QUICK_START_GPU.md")
|
||||
for doc in "${DOCS[@]}"; do
|
||||
if [ -f "$doc" ]; then
|
||||
echo "✓ $doc exists"
|
||||
else
|
||||
echo "✗ $doc missing"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
done
|
||||
echo ""
|
||||
|
||||
# Test 8: Check if Ollama is running (if services are up)
|
||||
echo "Test 8: Ollama service status"
|
||||
if docker ps | grep -q "munich-news-ollama"; then
|
||||
echo "✓ Ollama container is running"
|
||||
|
||||
# Test Ollama API
|
||||
if curl -s http://localhost:11434/api/tags &> /dev/null; then
|
||||
echo "✓ Ollama API is accessible"
|
||||
|
||||
# Check if model is available
|
||||
if curl -s http://localhost:11434/api/tags | grep -q "phi3"; then
|
||||
echo "✓ phi3 model is available"
|
||||
else
|
||||
echo "⚠ phi3 model not found (may still be downloading)"
|
||||
fi
|
||||
else
|
||||
echo "⚠ Ollama API not responding"
|
||||
fi
|
||||
else
|
||||
echo "ℹ Ollama container not running (start with: docker-compose up -d)"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# Summary
|
||||
echo "=========================================="
|
||||
echo "Test Summary"
|
||||
echo "=========================================="
|
||||
if [ $ERRORS -eq 0 ]; then
|
||||
echo "✓ All tests passed!"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo "1. Start services: ./start-with-gpu.sh"
|
||||
echo "2. Test translation: docker-compose exec crawler python crawler_service.py 1"
|
||||
echo "3. Monitor GPU: watch -n 1 'docker exec munich-news-ollama nvidia-smi'"
|
||||
else
|
||||
echo "✗ $ERRORS test(s) failed"
|
||||
echo ""
|
||||
echo "Please fix the errors above before proceeding."
|
||||
fi
|
||||
echo ""
|
||||
|
||||
exit $ERRORS
|
||||
Reference in New Issue
Block a user