Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM | |
| # ---------------------------- | |
| # 1) Chargement des modèles | |
| # ---------------------------- | |
| # Génération de questions (T5 multilingue, fonctionne en français) | |
| qg_model_name = "valhalla/t5-base-qg-hl" | |
| qg_tokenizer = AutoTokenizer.from_pretrained(qg_model_name) | |
| qg_model = AutoModelForSeq2SeqLM.from_pretrained(qg_model_name) | |
| # Réponses aux questions (CamemBERT entraîné sur FQuAD/PIAF en français) | |
| qa_model_name = "etalab-ia/camembert-base-squadFR-fquad-piaf" | |
| qa_pipeline = pipeline("question-answering", model=qa_model_name, tokenizer=qa_model_name) | |
| # ---------------------------- | |
| # 2) Fonctions principales | |
| # ---------------------------- | |
| def generate_questions(text, max_questions=5): | |
| """ | |
| Génère des questions en français à partir d'un texte donné. | |
| """ | |
| inputs = qg_tokenizer([text], return_tensors="pt", truncation=True, max_length=512) | |
| outputs = qg_model.generate( | |
| **inputs, | |
| max_length=64, | |
| num_beams=max(5, max_questions), # éviter l'erreur num_return_sequences > num_beams | |
| num_return_sequences=max_questions, | |
| do_sample=True, | |
| top_k=50, | |
| temperature=0.9, | |
| early_stopping=True | |
| ) | |
| questions = [qg_tokenizer.decode(o, skip_special_tokens=True) for o in outputs] | |
| # Suppression doublons | |
| return list(dict.fromkeys(questions))[:max_questions] | |
| def generate_quiz(text, max_questions=5): | |
| """ | |
| Génère un quiz complet (questions + réponses) en français. | |
| """ | |
| quiz = [] | |
| for q in generate_questions(text, max_questions=max_questions): | |
| try: | |
| ans = qa_pipeline(question=q, context=text) | |
| quiz.append( | |
| f"**Q : {q}**\n\n➡️ **Réponse :** {ans['answer']} _(confiance : {round(ans['score'], 2)})_" | |
| ) | |
| except: | |
| quiz.append(f"**Q : {q}**\n\n➡️ **Réponse :** Non trouvée") | |
| return "\n\n---\n\n".join(quiz) | |
| # ---------------------------- | |
| # 3) Interface Gradio | |
| # ---------------------------- | |
| with gr.Blocks(title="🧑🏫 Générateur de Quiz (FR)") as demo: | |
| gr.Markdown( | |
| """ | |
| # 🧑🏫 Générateur de Quiz (FR) | |
| Collez un texte (cours, chapitre, article…) et l’IA génère automatiquement des **questions/réponses** pour réviser. | |
| --- | |
| """ | |
| ) | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| input_text = gr.Textbox( | |
| label="Texte source", | |
| placeholder="Collez ici un texte en français…", | |
| lines=12 | |
| ) | |
| max_q = gr.Slider(1, 10, value=5, step=1, label="Nombre de questions") | |
| generate_btn = gr.Button("🚀 Générer le Quiz") | |
| with gr.Column(scale=1): | |
| output_quiz = gr.Markdown(label="Quiz généré") | |
| generate_btn.click(fn=generate_quiz, inputs=[input_text, max_q], outputs=output_quiz) | |
| # ---------------------------- | |
| # 4) Lancement | |
| # ---------------------------- | |
| if __name__ == "__main__": | |
| demo.launch() | |