This commit is contained in:
2025-11-10 19:13:33 +01:00
commit ac5738c29d
64 changed files with 9445 additions and 0 deletions

View File

@@ -0,0 +1,158 @@
from flask import Blueprint, jsonify
from config import Config
from services.ollama_service import call_ollama, list_ollama_models
import os
ollama_bp = Blueprint('ollama', __name__)
@ollama_bp.route('/api/ollama/ping', methods=['GET', 'POST'])
def ping_ollama():
"""Test connection to Ollama server"""
try:
# Check if Ollama is enabled
if not Config.OLLAMA_ENABLED:
return jsonify({
'status': 'disabled',
'message': 'Ollama is not enabled. Set OLLAMA_ENABLED=true in your .env file.',
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': False
}
}), 200
# Send a simple test prompt
test_prompt = "Say 'Hello! I am connected and working.' in one sentence."
system_prompt = "You are a helpful assistant. Respond briefly and concisely."
response_text, error_message = call_ollama(test_prompt, system_prompt)
if response_text:
return jsonify({
'status': 'success',
'message': 'Successfully connected to Ollama',
'response': response_text,
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': True
}
}), 200
else:
# Try to get available models for better error message
available_models, _ = list_ollama_models()
troubleshooting = {
'check_server': f'Verify Ollama is running at {Config.OLLAMA_BASE_URL}',
'check_model': f'Verify model "{Config.OLLAMA_MODEL}" is available (run: ollama list)',
'test_connection': f'Test manually: curl {Config.OLLAMA_BASE_URL}/api/generate -d \'{{"model":"{Config.OLLAMA_MODEL}","prompt":"test"}}\''
}
if available_models:
troubleshooting['available_models'] = available_models
troubleshooting['suggestion'] = f'Try setting OLLAMA_MODEL to one of: {", ".join(available_models[:5])}'
return jsonify({
'status': 'error',
'message': error_message or 'Failed to get response from Ollama',
'error_details': error_message,
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': True
},
'troubleshooting': troubleshooting
}), 500
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Error connecting to Ollama: {str(e)}',
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': Config.OLLAMA_ENABLED
}
}), 500
@ollama_bp.route('/api/ollama/config', methods=['GET'])
def get_ollama_config():
"""Get current Ollama configuration (for debugging)"""
try:
from pathlib import Path
backend_dir = Path(__file__).parent.parent
env_path = backend_dir / '.env'
return jsonify({
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': Config.OLLAMA_ENABLED,
'has_api_key': bool(Config.OLLAMA_API_KEY)
},
'env_file_path': str(env_path),
'env_file_exists': env_path.exists(),
'current_working_directory': os.getcwd()
}), 200
except Exception as e:
return jsonify({
'error': str(e),
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': Config.OLLAMA_ENABLED
}
}), 500
@ollama_bp.route('/api/ollama/models', methods=['GET'])
def get_ollama_models():
"""List available models on Ollama server"""
try:
if not Config.OLLAMA_ENABLED:
return jsonify({
'status': 'disabled',
'message': 'Ollama is not enabled. Set OLLAMA_ENABLED=true in your .env file.',
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': False
}
}), 200
models, error_message = list_ollama_models()
if models is not None:
return jsonify({
'status': 'success',
'models': models,
'current_model': Config.OLLAMA_MODEL,
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': True
}
}), 200
else:
return jsonify({
'status': 'error',
'message': error_message or 'Failed to list models',
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': True
}
}), 500
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Error listing models: {str(e)}',
'ollama_config': {
'base_url': Config.OLLAMA_BASE_URL,
'model': Config.OLLAMA_MODEL,
'enabled': Config.OLLAMA_ENABLED
}
}), 500