cleanup and ui change

This commit is contained in:
2025-11-16 20:59:14 +01:00
parent f0e552b1b1
commit 9f167a6534
15 changed files with 165 additions and 155 deletions

32
scripts/README.md Normal file
View File

@@ -0,0 +1,32 @@
# Utility Scripts
This folder contains utility scripts for testing and managing the Munich News Daily system.
## Available Scripts
### Ollama / GPU
- `setup-ollama-model.sh` - Pull and setup the Ollama model (used by Docker)
- `configure-ollama.sh` - Configure Ollama settings
- `pull-ollama-model.sh` - Manually pull Ollama model
- `start-with-gpu.sh` - Start services with GPU support
- `check-gpu.sh` - Check GPU availability
- `check-gpu-api.sh` - Check GPU via API
- `diagnose-gpu.sh` - Diagnose GPU issues
- `test-ollama-setup.sh` - Test Ollama configuration
### Testing
- `test-mongodb-connectivity.sh` - Test MongoDB connection
- `test-newsletter-api.sh` - Test newsletter API endpoints
- `check-articles.sh` - Check articles in database
## Usage
Make scripts executable:
```bash
chmod +x scripts/*.sh
```
Run a script:
```bash
./scripts/script-name.sh
```

49
scripts/check-articles.sh Normal file
View File

@@ -0,0 +1,49 @@
#!/bin/bash
echo "=== Checking Articles by Category ==="
echo ""
echo "1. Articles from TODAY by category:"
docker exec munich-news-mongodb mongosh -u admin -p changeme --authenticationDatabase admin --quiet --eval '
var today = new Date();
today.setHours(0,0,0,0);
db.getSiblingDB("munich_news").articles.aggregate([
{$match: {created_at: {$gte: today}, summary: {$exists: true, $ne: null}}},
{$group: {_id: "$category", count: {$sum: 1}}},
{$sort: {count: -1}}
])
'
echo ""
echo "2. ALL articles by category (any date):"
docker exec munich-news-mongodb mongosh -u admin -p changeme --authenticationDatabase admin --quiet --eval '
db.getSiblingDB("munich_news").articles.aggregate([
{$match: {summary: {$exists: true, $ne: null}}},
{$group: {_id: "$category", count: {$sum: 1}}},
{$sort: {count: -1}}
])
'
echo ""
echo "3. Latest article in each category:"
docker exec munich-news-mongodb mongosh -u admin -p changeme --authenticationDatabase admin --quiet --eval '
db.getSiblingDB("munich_news").articles.aggregate([
{$match: {summary: {$exists: true, $ne: null}}},
{$sort: {created_at: -1}},
{$group: {_id: "$category", latest_article: {$first: "$title"}, latest_date: {$first: "$created_at"}, count: {$sum: 1}}}
])
'
echo ""
echo "4. Science articles from TODAY:"
docker exec munich-news-mongodb mongosh -u admin -p changeme --authenticationDatabase admin --quiet --eval '
var today = new Date();
today.setHours(0,0,0,0);
db.getSiblingDB("munich_news").articles.find(
{category: "science", created_at: {$gte: today}, summary: {$exists: true}},
{title: 1, created_at: 1, category: 1, _id: 0}
).sort({created_at: -1})
'
echo ""
echo "=== Done ==="

46
scripts/check-gpu-api.sh Executable file
View File

@@ -0,0 +1,46 @@
#!/bin/bash
# Check GPU status via API
echo "=========================================="
echo "Ollama GPU Status Check"
echo "=========================================="
echo ""
# Check GPU status
echo "1. GPU Status:"
echo "---"
curl -s http://localhost:5001/api/ollama/gpu-status | python3 -m json.tool
echo ""
echo ""
# Test performance
echo "2. Performance Test:"
echo "---"
curl -s http://localhost:5001/api/ollama/test | python3 -m json.tool
echo ""
echo ""
# List models
echo "3. Available Models:"
echo "---"
curl -s http://localhost:5001/api/ollama/models | python3 -m json.tool
echo ""
echo ""
echo "=========================================="
echo "Quick Summary:"
echo "=========================================="
# Extract key info
GPU_STATUS=$(curl -s http://localhost:5001/api/ollama/gpu-status | python3 -c "import json,sys; data=json.load(sys.stdin); print('GPU Active' if data.get('gpu_in_use') else 'CPU Mode')" 2>/dev/null || echo "Error")
PERF=$(curl -s http://localhost:5001/api/ollama/test | python3 -c "import json,sys; data=json.load(sys.stdin); print(f\"{data.get('duration_seconds', 'N/A')}s - {data.get('performance', 'N/A')}\")" 2>/dev/null || echo "Error")
echo "GPU Status: $GPU_STATUS"
echo "Performance: $PERF"
echo ""
if [ "$GPU_STATUS" = "CPU Mode" ]; then
echo "💡 TIP: Enable GPU for 5-10x faster processing:"
echo " docker-compose -f docker-compose.yml -f docker-compose.gpu.yml up -d"
echo " See docs/GPU_SETUP.md for details"
fi

54
scripts/check-gpu.sh Executable file
View File

@@ -0,0 +1,54 @@
#!/bin/bash
# Script to check GPU availability for Ollama
echo "GPU Availability Check"
echo "======================"
echo ""
# Check for NVIDIA GPU
if command -v nvidia-smi &> /dev/null; then
echo "✓ NVIDIA GPU detected"
echo ""
echo "GPU Information:"
nvidia-smi --query-gpu=index,name,driver_version,memory.total,memory.free --format=csv,noheader | \
awk -F', ' '{printf " GPU %s: %s\n Driver: %s\n Memory: %s total, %s free\n\n", $1, $2, $3, $4, $5}'
# Check CUDA version
if command -v nvcc &> /dev/null; then
echo "CUDA Version:"
nvcc --version | grep "release" | awk '{print " " $0}'
echo ""
fi
# Check Docker GPU support
echo "Checking Docker GPU support..."
if docker run --rm --gpus all nvidia/cuda:12.0.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
echo "✓ Docker can access GPU"
echo ""
echo "Recommendation: Use GPU-accelerated startup"
echo " ./start-with-gpu.sh"
else
echo "✗ Docker cannot access GPU"
echo ""
echo "Install NVIDIA Container Toolkit:"
echo " https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html"
echo ""
echo "After installation, restart Docker:"
echo " sudo systemctl restart docker"
fi
else
echo " No NVIDIA GPU detected"
echo ""
echo "Running Ollama on CPU is supported but slower."
echo ""
echo "Performance comparison:"
echo " CPU: ~1-2s per translation, ~8s per summary"
echo " GPU: ~0.3s per translation, ~2s per summary"
echo ""
echo "Recommendation: Use standard startup"
echo " docker-compose up -d"
fi
echo ""
echo "For more information, see: docs/OLLAMA_SETUP.md"

60
scripts/configure-ollama.sh Executable file
View File

@@ -0,0 +1,60 @@
#!/bin/bash
# Script to configure Ollama settings for Docker Compose or external server
echo "Ollama Configuration Helper"
echo "============================"
echo ""
echo "Choose your Ollama setup:"
echo "1) Docker Compose (Ollama runs in container)"
echo "2) External Server (Ollama runs on host machine)"
echo ""
read -p "Enter choice [1-2]: " choice
ENV_FILE="backend/.env"
if [ ! -f "$ENV_FILE" ]; then
echo "Error: $ENV_FILE not found!"
exit 1
fi
case $choice in
1)
echo "Configuring for Docker Compose..."
# Update OLLAMA_BASE_URL to use internal Docker network
if grep -q "OLLAMA_BASE_URL=" "$ENV_FILE"; then
sed -i.bak 's|OLLAMA_BASE_URL=.*|OLLAMA_BASE_URL=http://ollama:11434|' "$ENV_FILE"
else
echo "OLLAMA_BASE_URL=http://ollama:11434" >> "$ENV_FILE"
fi
echo "✓ Updated OLLAMA_BASE_URL to http://ollama:11434"
echo ""
echo "Next steps:"
echo "1. Start services: docker-compose up -d"
echo "2. Wait for model download: docker-compose logs -f ollama-setup"
echo "3. Test: docker-compose exec crawler python crawler_service.py 1"
;;
2)
echo "Configuring for external Ollama server..."
# Update OLLAMA_BASE_URL to use host machine
if grep -q "OLLAMA_BASE_URL=" "$ENV_FILE"; then
sed -i.bak 's|OLLAMA_BASE_URL=.*|OLLAMA_BASE_URL=http://host.docker.internal:11434|' "$ENV_FILE"
else
echo "OLLAMA_BASE_URL=http://host.docker.internal:11434" >> "$ENV_FILE"
fi
echo "✓ Updated OLLAMA_BASE_URL to http://host.docker.internal:11434"
echo ""
echo "Next steps:"
echo "1. Install Ollama: https://ollama.ai/download"
echo "2. Pull model: ollama pull phi3:latest"
echo "3. Start Ollama: ollama serve"
echo "4. Start services: docker-compose up -d"
;;
*)
echo "Invalid choice!"
exit 1
;;
esac
echo ""
echo "Configuration complete!"

31
scripts/diagnose-gpu.sh Normal file
View File

@@ -0,0 +1,31 @@
#!/bin/bash
# GPU Diagnostic Script for Munich News Ollama
echo "=========================================="
echo "GPU Diagnostic for Munich News Ollama"
echo "=========================================="
echo ""
echo "1. Checking if NVIDIA runtime is available..."
docker run --rm --gpus all nvidia/cuda:11.0-base nvidia-smi 2>&1 | head -20
echo ""
echo "2. Checking Ollama container GPU configuration..."
docker inspect munich-news-ollama | grep -A 10 "DeviceRequests"
echo ""
echo "3. Checking if GPU is accessible inside Ollama container..."
docker exec munich-news-ollama nvidia-smi 2>&1 | head -20
echo ""
echo "4. Checking Ollama logs for GPU messages..."
docker logs munich-news-ollama 2>&1 | grep -i "gpu\|cuda\|nvidia" | tail -10
echo ""
echo "5. Testing Ollama with a simple prompt..."
docker exec munich-news-ollama ollama run phi3:latest "Hello" 2>&1 | head -10
echo ""
echo "=========================================="
echo "Diagnostic complete!"
echo "=========================================="

44
scripts/pull-ollama-model.sh Executable file
View File

@@ -0,0 +1,44 @@
#!/bin/bash
# Pull Ollama model from .env file
set -e
# Load OLLAMA_MODEL from .env
if [ -f backend/.env ]; then
export $(grep -v '^#' backend/.env | grep OLLAMA_MODEL | xargs)
else
echo "Error: backend/.env file not found"
exit 1
fi
# Default to phi3:latest if not set
MODEL=${OLLAMA_MODEL:-phi3:latest}
echo "=========================================="
echo "Pulling Ollama Model: $MODEL"
echo "=========================================="
echo ""
# Check if Ollama container is running
if ! docker-compose ps ollama | grep -q "Up"; then
echo "Error: Ollama container is not running"
echo "Start it with: docker-compose up -d ollama"
exit 1
fi
echo "Pulling model via Ollama API..."
echo ""
# Pull the model
docker-compose exec -T ollama ollama pull "$MODEL"
echo ""
echo "=========================================="
echo "✓ Model $MODEL pulled successfully!"
echo "=========================================="
echo ""
echo "Verify with:"
echo " docker-compose exec ollama ollama list"
echo ""
echo "Test with:"
echo " curl http://localhost:5001/api/ollama/test"

46
scripts/start-with-gpu.sh Executable file
View File

@@ -0,0 +1,46 @@
#!/bin/bash
# Script to start Docker Compose with GPU support if available
echo "Munich News - GPU Detection & Startup"
echo "======================================"
echo ""
# Check if nvidia-smi is available
if command -v nvidia-smi &> /dev/null; then
echo "✓ NVIDIA GPU detected!"
nvidia-smi --query-gpu=name,driver_version,memory.total --format=csv,noheader
echo ""
# Check if nvidia-docker runtime is available
if docker run --rm --gpus all nvidia/cuda:12.0.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
echo "✓ NVIDIA Docker runtime is available"
echo ""
echo "Starting services with GPU support..."
docker compose -f docker-compose.yml -f docker-compose.gpu.yml up -d
echo ""
echo "✓ Services started with GPU acceleration!"
echo ""
echo "To verify GPU is being used by Ollama:"
echo " docker exec munich-news-ollama nvidia-smi"
else
echo "⚠ NVIDIA Docker runtime not found!"
echo ""
echo "To enable GPU support, install nvidia-container-toolkit:"
echo " https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html"
echo ""
echo "Starting services without GPU support..."
docker-compose up -d
fi
else
echo " No NVIDIA GPU detected"
echo "Starting services with CPU-only mode..."
docker-compose up -d
fi
echo ""
echo "Services are starting. Check status with:"
echo " docker-compose ps"
echo ""
echo "View logs:"
echo " docker-compose logs -f ollama"

View File

@@ -0,0 +1,55 @@
#!/bin/bash
echo "=========================================="
echo "MongoDB Connectivity Test"
echo "=========================================="
echo ""
# Test 1: MongoDB not accessible from host
echo "Test 1: MongoDB port not exposed to host"
if nc -z -w 2 localhost 27017 2>&1 | grep -q "succeeded\|open"; then
echo "❌ FAIL: Port 27017 is accessible from host"
else
echo "✅ PASS: Port 27017 is not accessible from host (internal only)"
fi
echo ""
# Test 2: Backend can connect
echo "Test 2: Backend can connect to MongoDB"
if docker-compose exec -T backend python -c "from database import articles_collection; articles_collection.count_documents({})" &> /dev/null; then
echo "✅ PASS: Backend can connect to MongoDB"
else
echo "❌ FAIL: Backend cannot connect to MongoDB"
fi
echo ""
# Test 3: Crawler can connect
echo "Test 3: Crawler can connect to MongoDB"
if docker-compose exec -T crawler python -c "from pymongo import MongoClient; from config import Config; MongoClient(Config.MONGODB_URI).server_info()" &> /dev/null; then
echo "✅ PASS: Crawler can connect to MongoDB"
else
echo "❌ FAIL: Crawler cannot connect to MongoDB"
fi
echo ""
# Test 4: Sender can connect
echo "Test 4: Sender can connect to MongoDB"
if docker-compose exec -T sender python -c "from pymongo import MongoClient; import os; MongoClient(os.getenv('MONGODB_URI')).server_info()" &> /dev/null; then
echo "✅ PASS: Sender can connect to MongoDB"
else
echo "❌ FAIL: Sender cannot connect to MongoDB"
fi
echo ""
# Test 5: Backend API accessible
echo "Test 5: Backend API accessible from host"
if curl -s http://localhost:5001/health | grep -q "healthy"; then
echo "✅ PASS: Backend API is accessible"
else
echo "❌ FAIL: Backend API is not accessible"
fi
echo ""
echo "=========================================="
echo "Test Complete"
echo "=========================================="

48
scripts/test-newsletter-api.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/bin/bash
# Test script for newsletter API endpoints
echo "=========================================="
echo "Newsletter API Test"
echo "=========================================="
echo ""
BASE_URL="http://localhost:5001"
# Test 1: Get stats
echo "Test 1: Get system stats"
echo "GET $BASE_URL/api/admin/stats"
curl -s $BASE_URL/api/admin/stats | python3 -m json.tool | grep -A 3 "subscribers"
echo ""
# Test 2: Send test email
echo "Test 2: Send test email"
read -p "Enter your email address for test: " TEST_EMAIL
if [ -n "$TEST_EMAIL" ]; then
echo "POST $BASE_URL/api/admin/send-test-email"
curl -s -X POST $BASE_URL/api/admin/send-test-email \
-H "Content-Type: application/json" \
-d "{\"email\": \"$TEST_EMAIL\", \"max_articles\": 2}" | python3 -m json.tool
echo ""
else
echo "Skipped (no email provided)"
echo ""
fi
# Test 3: Send newsletter to all subscribers
echo "Test 3: Send newsletter to all subscribers"
read -p "Send newsletter to all active subscribers? (y/N): " CONFIRM
if [ "$CONFIRM" = "y" ] || [ "$CONFIRM" = "Y" ]; then
echo "POST $BASE_URL/api/admin/send-newsletter"
curl -s -X POST $BASE_URL/api/admin/send-newsletter \
-H "Content-Type: application/json" \
-d '{"max_articles": 5}' | python3 -m json.tool
echo ""
else
echo "Skipped"
echo ""
fi
echo "=========================================="
echo "Test Complete"
echo "=========================================="

168
scripts/test-ollama-setup.sh Executable file
View File

@@ -0,0 +1,168 @@
#!/bin/bash
# Comprehensive test script for Ollama setup (CPU and GPU)
echo "=========================================="
echo "Ollama Setup Test Suite"
echo "=========================================="
echo ""
ERRORS=0
# Test 1: Check if Docker is running
echo "Test 1: Docker availability"
if docker info &> /dev/null; then
echo "✓ Docker is running"
else
echo "✗ Docker is not running"
ERRORS=$((ERRORS + 1))
fi
echo ""
# Test 2: Check if docker-compose files are valid
echo "Test 2: Docker Compose configuration"
if docker-compose config --quiet &> /dev/null; then
echo "✓ docker-compose.yml is valid"
else
echo "✗ docker-compose.yml has errors"
ERRORS=$((ERRORS + 1))
fi
if docker-compose -f docker-compose.yml -f docker-compose.gpu.yml config --quiet &> /dev/null; then
echo "✓ docker-compose.gpu.yml is valid"
else
echo "✗ docker-compose.gpu.yml has errors"
ERRORS=$((ERRORS + 1))
fi
echo ""
# Test 3: Check GPU availability
echo "Test 3: GPU availability"
if command -v nvidia-smi &> /dev/null; then
echo "✓ NVIDIA GPU detected"
nvidia-smi --query-gpu=name --format=csv,noheader | sed 's/^/ - /'
# Test Docker GPU access
if docker run --rm --gpus all nvidia/cuda:12.0.0-base-ubuntu22.04 nvidia-smi &> /dev/null; then
echo "✓ Docker can access GPU"
else
echo "⚠ Docker cannot access GPU (install nvidia-container-toolkit)"
fi
else
echo " No NVIDIA GPU detected (CPU mode will be used)"
fi
echo ""
# Test 4: Check if Ollama service is defined
echo "Test 4: Ollama service configuration"
if docker-compose config | grep -q "ollama:"; then
echo "✓ Ollama service is defined"
else
echo "✗ Ollama service not found in docker-compose.yml"
ERRORS=$((ERRORS + 1))
fi
echo ""
# Test 5: Check if .env file exists
echo "Test 5: Environment configuration"
if [ -f "backend/.env" ]; then
echo "✓ backend/.env exists"
# Check Ollama configuration
if grep -q "OLLAMA_ENABLED=true" backend/.env; then
echo "✓ Ollama is enabled"
else
echo "⚠ Ollama is disabled in .env"
fi
if grep -q "OLLAMA_BASE_URL" backend/.env; then
OLLAMA_URL=$(grep "OLLAMA_BASE_URL" backend/.env | cut -d'=' -f2)
echo "✓ Ollama URL configured: $OLLAMA_URL"
else
echo "⚠ OLLAMA_BASE_URL not set"
fi
else
echo "⚠ backend/.env not found (copy from backend/.env.example)"
fi
echo ""
# Test 6: Check helper scripts
echo "Test 6: Helper scripts"
SCRIPTS=("check-gpu.sh" "start-with-gpu.sh" "configure-ollama.sh")
for script in "${SCRIPTS[@]}"; do
if [ -f "$script" ] && [ -x "$script" ]; then
echo "$script exists and is executable"
else
echo "$script missing or not executable"
ERRORS=$((ERRORS + 1))
fi
done
echo ""
# Test 7: Check documentation
echo "Test 7: Documentation"
DOCS=("docs/OLLAMA_SETUP.md" "docs/GPU_SETUP.md" "QUICK_START_GPU.md")
for doc in "${DOCS[@]}"; do
if [ -f "$doc" ]; then
echo "$doc exists"
else
echo "$doc missing"
ERRORS=$((ERRORS + 1))
fi
done
echo ""
# Test 8: Check if Ollama is running (if services are up)
echo "Test 8: Ollama service status"
if docker ps | grep -q "munich-news-ollama"; then
echo "✓ Ollama container is running"
# Check if crawler is running (needed to test Ollama)
if docker ps | grep -q "munich-news-crawler"; then
# Test Ollama API from inside network using Python
if docker-compose exec -T crawler python -c "import requests; requests.get('http://ollama:11434/api/tags', timeout=5)" &> /dev/null; then
echo "✓ Ollama API is accessible (internal network)"
# Check if model is available
if docker-compose exec -T crawler python -c "import requests; r = requests.get('http://ollama:11434/api/tags'); exit(0 if 'phi3' in r.text else 1)" &> /dev/null; then
echo "✓ phi3 model is available"
else
echo "⚠ phi3 model not found (may still be downloading)"
fi
else
echo "⚠ Ollama API not responding from crawler"
fi
else
echo " Crawler not running (needed to test internal Ollama access)"
fi
# Verify port is NOT exposed to host
if nc -z -w 2 localhost 11434 &> /dev/null; then
echo "⚠ WARNING: Ollama port is exposed to host (should be internal only)"
else
echo "✓ Ollama is internal-only (not exposed to host)"
fi
else
echo " Ollama container not running (start with: docker-compose up -d)"
fi
echo ""
# Summary
echo "=========================================="
echo "Test Summary"
echo "=========================================="
if [ $ERRORS -eq 0 ]; then
echo "✓ All tests passed!"
echo ""
echo "Next steps:"
echo "1. Start services: ./start-with-gpu.sh"
echo "2. Test translation: docker-compose exec crawler python crawler_service.py 1"
echo "3. Monitor GPU: watch -n 1 'docker exec munich-news-ollama nvidia-smi'"
else
echo "$ERRORS test(s) failed"
echo ""
echo "Please fix the errors above before proceeding."
fi
echo ""
exit $ERRORS