|
|
"""Service pour initialiser le serveur MCP avec FastMCP""" |
|
|
|
|
|
from mcp.server.fastmcp import FastMCP |
|
|
from typing import Dict, Any |
|
|
import logging |
|
|
|
|
|
from fastapi import FastAPI |
|
|
|
|
|
from services.stance_model_manager import stance_model_manager |
|
|
from services.label_model_manager import kpa_model_manager |
|
|
from services.stt_service import speech_to_text |
|
|
from services.tts_service import text_to_speech |
|
|
from services.chat_service import generate_chat_response |
|
|
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
|
mcp_server = FastMCP("NLP-Debater-MCP", json_response=True, stateless_http=False) |
|
|
|
|
|
|
|
|
@mcp_server.tool() |
|
|
def detect_stance(topic: str, argument: str) -> Dict[str, Any]: |
|
|
if not stance_model_manager.model_loaded: |
|
|
raise ValueError("Modèle stance non chargé") |
|
|
result = stance_model_manager.predict(topic, argument) |
|
|
return { |
|
|
"predicted_stance": result["predicted_stance"], |
|
|
"confidence": result["confidence"], |
|
|
"probability_con": result["probability_con"], |
|
|
"probability_pro": result["probability_pro"] |
|
|
} |
|
|
|
|
|
@mcp_server.tool() |
|
|
def match_keypoint_argument(argument: str, key_point: str) -> Dict[str, Any]: |
|
|
if not kpa_model_manager.model_loaded: |
|
|
raise ValueError("Modèle KPA non chargé") |
|
|
result = kpa_model_manager.predict(argument, key_point) |
|
|
return { |
|
|
"prediction": result["prediction"], |
|
|
"label": result["label"], |
|
|
"confidence": result["confidence"], |
|
|
"probabilities": result["probabilities"] |
|
|
} |
|
|
|
|
|
@mcp_server.tool() |
|
|
def transcribe_audio(audio_path: str) -> str: |
|
|
return speech_to_text(audio_path) |
|
|
|
|
|
@mcp_server.tool() |
|
|
def generate_speech(text: str, voice: str = "Aaliyah-PlayAI", format: str = "wav") -> str: |
|
|
return text_to_speech(text, voice, format) |
|
|
|
|
|
@mcp_server.tool() |
|
|
def generate_argument(user_input: str, conversation_id: str = None) -> str: |
|
|
return generate_chat_response(user_input, conversation_id) |
|
|
|
|
|
@mcp_server.resource("debate://prompt") |
|
|
def get_debate_prompt() -> str: |
|
|
return "Tu es un expert en débat. Génère 3 arguments PRO pour le topic donné. Sois concis et persuasif." |
|
|
|
|
|
|
|
|
@mcp_server.tool() |
|
|
def health_check() -> Dict[str, Any]: |
|
|
"""Health check pour le serveur MCP""" |
|
|
try: |
|
|
tools = mcp_server.list_tools() |
|
|
tool_names = [tool.name for tool in tools] if tools else [] |
|
|
except Exception: |
|
|
tool_names = [] |
|
|
return {"status": "healthy", "tools": tool_names} |
|
|
|
|
|
def init_mcp_server(app: FastAPI) -> None: |
|
|
""" |
|
|
Initialise et monte le serveur MCP sur l'app FastAPI. |
|
|
""" |
|
|
|
|
|
mcp_app = mcp_server.streamable_http_app() |
|
|
|
|
|
|
|
|
app.mount("/api/v1/mcp", mcp_app) |
|
|
|
|
|
logger.info("✓ Serveur MCP monté sur /api/v1/mcp avec tools NLP/STT/TTS") |