File size: 3,005 Bytes
0805c5b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
"""
Quiz Tools for Manim MCP Server

This module provides tools for generating educational quiz questions based on STEM concepts.
"""

import logging
from typing import Any, Dict, Optional

from mcp.types import CallToolResult, TextContent

from utils.hf_wrapper import HFInferenceWrapper, ModelConfig

logger = logging.getLogger(__name__)


async def generate_quiz(
    hf_wrapper: HFInferenceWrapper, arguments: Dict[str, Any]
) -> CallToolResult:
    """
    Generate quiz questions for a STEM concept.

    Uses a text LLM to create educational quiz questions that assess
    understanding of the animation concept. Questions can be multiple choice,
    true/false, or short answer format.

    Args:
        hf_wrapper: HuggingFace inference wrapper instance
        arguments: Dictionary containing:
            - concept (str): The STEM concept to create quiz questions for
            - difficulty (str): Difficulty level (easy, medium, hard)
            - num_questions (int): Number of questions to generate
            - question_types (list, optional): Types of questions (default: ["multiple_choice"])
            - model (str, optional): Hugging Face model to use

    Returns:
        CallToolResult with the generated quiz questions in JSON format
    """
    concept = arguments["concept"]
    difficulty = arguments["difficulty"]
    num_questions = arguments["num_questions"]
    question_types = arguments.get("question_types", ["multiple_choice"])
    model = arguments.get("model")

    try:
        model_config = ModelConfig()
        selected_model = model or model_config.text_models[0]

        prompt = f"""
Generate {num_questions} quiz questions for the following STEM concept:

Concept: {concept}
Difficulty: {difficulty}
Question Types: {", ".join(question_types)}

For each question provide:
1. The question
2. Possible answers (for multiple choice)
3. Correct answer
4. Brief explanation

Format as JSON array of question objects with this structure:
[
  {{
    "question": "question text",
    "options": ["A", "B", "C", "D"],
    "correct_answer": "A",
    "explanation": "why this is correct"
  }}
]

Return only valid JSON without markdown formatting.
"""

        response = await hf_wrapper.text_generation(
            model=selected_model,
            prompt=prompt,
            max_new_tokens=1024,
            temperature=0.5,
        )

        logger.info(
            f"Successfully generated {num_questions} quiz questions for concept: {concept}"
        )

        return CallToolResult(
            content=[
                TextContent(
                    type="text",
                    text=f"Generated Quiz Questions:\n\n{response}",
                )
            ]
        )

    except Exception as e:
        logger.error(f"Quiz generation failed: {str(e)}")
        return CallToolResult(
            content=[
                TextContent(type="text", text=f"Quiz generation failed: {str(e)}")
            ],
            isError=True,
        )