File size: 4,594 Bytes
ed363f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import os
import json
import logging
import random
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import JsonOutputParser

# Configure Logging
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s - %(levelname)s - %(message)s",
    handlers=[
        logging.FileHandler("test.log"),  
        logging.StreamHandler()
    ]
)

# Set API Key (Ensure you have this set as an environment variable)
GROQ_API_KEY = os.getenv("GROQ_API_KEY", "your_api_key")

# Initialize LangChain LLM with Groq
llm = ChatGroq(
    model_name="llama-3.3-70b-versatile",
    temperature=0.7,  # Increased randomness for variety
    api_key=GROQ_API_KEY
)

# Define JSON Output Format
parser = JsonOutputParser(pydantic_object={
    "type": "object",
    "properties": {
        "theme": {"type": "string"},
        "question": {"type": "string"},
        "options": {
            "type": "array",
            "items": {"type": "string"},
            "minItems": 4,
            "maxItems": 4
        },
        "correct_answer": {"type": "string"},
        "difficulty": {"type": "string", "enum": ["Easy", "Medium", "Hard"]}
    },
    "required": ["theme", "question", "options", "correct_answer", "difficulty"]
})

# **πŸ”Ή FIXED Prompt Template (Escaped Theme & Proper Variable Passing)**
prompt = ChatPromptTemplate.from_messages([
    ("system", "You are a quiz generator that ensures each generated question is unique."),
    ("user", """Generate a unique multiple-choice quiz question for the theme: {{theme}}.

        - Ensure the question is unique and has never been asked before in this session.

        - The question should have 4 answer choices labeled A, B, C, D.

        - Clearly specify the correct answer (one of A, B, C, D).

        - Assign a difficulty level (Easy, Medium, Hard).

        - Do not generate a question similar to the ones listed below.



        Previously generated questions:

        {{previous_questions}}



        Return the result in this JSON format:

        {{

            "theme": "{{theme}}",

            "question": "The generated quiz question",

            "options": ["A) Option 1", "B) Option 2", "C) Option 3", "D) Option 4"],

            "correct_answer": "Correct option letter",

            "difficulty": "Easy/Medium/Hard"

        }}

        """)
])

# **πŸ”Ή Create Chain with History to Prevent Duplicates**
chat_history = []  # Stores previous questions
chain = prompt | llm | parser

# **List of Fixed Themes**
fixed_themes = ["Science", "History", "Geography", "Technology", "Mathematics"]


def generate_unique_question(theme):
    """

    Generates a quiz question ensuring uniqueness within a session.

    Theme is manually passed as input.

    """
    try:
        previous_questions = "\n".join(chat_history) if chat_history else "None"

        # **πŸ”Ή Ensure Correct Input Variables Are Passed**
        response = chain.invoke({
            "theme": theme,
            "previous_questions": previous_questions
        })
        
        if "theme" not in response or "question" not in response:
            logging.error("Invalid response from LLM: %s", response)
            return None

        # Extract theme and question
        question_text = response["question"]

        # Ensure the question is unique
        if question_text in chat_history:
            logging.warning("Duplicate question detected, regenerating...")
            return generate_unique_question(theme)  # Retry with the same theme
        
        chat_history.append(question_text)  # Store to avoid duplicates

        logging.info("Generated Question:\n%s", json.dumps(response, indent=2))
        return response

    except Exception as e:
        logging.error("Error generating quiz question: %s", e)
        return None


# **πŸ”Ή Run Test for 10 Questions Using Fixed Themes**
if __name__ == "__main__":
    total_questions = 10  # Generate 10 questions

    for i in range(total_questions):  
        theme = random.choice(fixed_themes)  # Pick a random theme from the list
        print(f"\nπŸ“ Generating Question {i+1} for Theme: {theme}\n")
        
        question = generate_unique_question(theme)
        
        if question:
            print(json.dumps(question, indent=2))
        else:
            print(f"❌ Failed to generate a question for {theme}.")

    print("\nβœ… Test completed! Check test.log for details.")