Spaces:
Build error
Build error
| # app.py - AI SEA Exam Tutor with UI API Key Entry | |
| import os | |
| import json | |
| import re | |
| import tempfile | |
| from typing import List, Dict, Any, Tuple | |
| from pathlib import Path | |
| import gradio as gr | |
| from groq import Groq | |
| import PyPDF2 | |
| # ----------------------------- | |
| # Configuration | |
| # ----------------------------- | |
| # API key is now handled via UI input | |
| api_key_state = {"value": ""} | |
| # SEA-specific configurations | |
| SEA_SUBJECTS = ["Mathematics", "English Language Arts"] | |
| SEA_MATH_TOPICS = [ | |
| "Number Theory (Fractions, Decimals, Percentages)", | |
| "Measurement (Perimeter, Area, Volume)", | |
| "Geometry", "Algebra Basics", | |
| "Word Problems", "Data Interpretation" | |
| ] | |
| SEA_ENGLISH_TOPICS = [ | |
| "Reading Comprehension", | |
| "Grammar (Parts of Speech, Tenses)", | |
| "Vocabulary (Synonyms, Antonyms)", | |
| "Composition & Writing", | |
| "Spelling & Punctuation", | |
| "Listening Comprehension (simulated)" | |
| ] | |
| LANG_OPTIONS = ["English"] | |
| LEVEL_OPTIONS = ["Beginner", "Intermediate", "Advanced"] | |
| UPLOADED_DOCS_FILE = "sea_exam_documents.json" | |
| # ----------------------------- | |
| # API Key Management | |
| # ----------------------------- | |
| def update_api_key(api_key): | |
| """Store API key in session state""" | |
| api_key_state["value"] = api_key.strip() | |
| if api_key_state["value"]: | |
| return "✅ API key saved (not visible for security)" | |
| else: | |
| return "⚠️ API key cleared" | |
| def get_groq_client(): | |
| """Get Groq client using UI-provided API key""" | |
| api_key = api_key_state["value"] | |
| if not api_key: | |
| return None, "❌ No API key provided" | |
| try: | |
| client = Groq(api_key=api_key) | |
| return client, "" | |
| except Exception as e: | |
| return None, f"❌ Invalid API key: {str(e)}" | |
| def generate_with_groq(prompt: str) -> str: | |
| """Call Groq API using UI-provided API key""" | |
| client, error_msg = get_groq_client() | |
| if error_msg: | |
| return error_msg | |
| try: | |
| response = client.chat.completions.create( | |
| model="llama-3.1-8b-instant", | |
| messages=[{"role": "user", "content": prompt}], | |
| temperature=0.7, | |
| max_tokens=800, | |
| ) | |
| return response.choices[0].message.content | |
| except Exception as e: | |
| return f"❌ API error: {e}" | |
| # ----------------------------- | |
| # Document Processing Functions | |
| # ----------------------------- | |
| def extract_text_from_pdf(file_bytes: bytes, filename: str) -> str: | |
| """Extract text from uploaded PDF files""" | |
| try: | |
| with tempfile.NamedTemporaryFile(suffix=".pdf", delete=False) as tmp_file: | |
| tmp_file.write(file_bytes) | |
| tmp_file_path = tmp_file.name | |
| full_text = "" | |
| with open(tmp_file_path, 'rb') as pdf_file: | |
| pdf_reader = PyPDF2.PdfReader(pdf_file) | |
| for page_num in range(len(pdf_reader.pages)): | |
| page = pdf_reader.pages[page_num] | |
| page_text = page.extract_text() | |
| full_text += f"\n--- Page {page_num+1} ---\n{page_text}\n" | |
| os.unlink(tmp_file_path) | |
| return full_text | |
| except Exception as e: | |
| return f"ERROR processing {filename}: {str(e)}" | |
| def process_uploaded_documents(files) -> str: | |
| """Process uploaded SEA exam documents""" | |
| if not files: | |
| return "⚠️ No files uploaded" | |
| all_documents = [] | |
| for file_info in files: | |
| if isinstance(file_info, tuple) and len(file_info) >= 2: | |
| file_path, filename = file_info[0], file_info[1] | |
| else: | |
| file_path = file_info | |
| filename = os.path.basename(str(file_info)) | |
| try: | |
| with open(file_path, 'rb') as f: | |
| file_bytes = f.read() | |
| if filename.lower().endswith('.pdf'): | |
| text_content = extract_text_from_pdf(file_bytes, filename) | |
| file_type = "PDF" | |
| elif filename.lower().endswith(('.txt', '.md')): | |
| text_content = file_bytes.decode('utf-8', errors='replace') | |
| file_type = "Text" | |
| else: | |
| continue | |
| doc_entry = { | |
| "filename": filename, | |
| "content": text_content[:10000] if len(text_content) > 10000 else text_content, | |
| "type": file_type, | |
| "upload_time": gr.utils.datetime.datetime.now().isoformat() | |
| } | |
| all_documents.append(doc_entry) | |
| except Exception as e: | |
| print(f"Failed to process {filename}: {str(e)}") | |
| try: | |
| with open(UPLOADED_DOCS_FILE, 'w', encoding='utf-8') as f: | |
| json.dump(all_documents, f, ensure_ascii=False, indent=2) | |
| return f"✅ Processed {len(all_documents)} files. Ready for RAG queries." | |
| except Exception as e: | |
| return f"❌ Error saving documents: {str(e)}" | |
| def get_relevant_context(subject: str, topic: str, max_context: int = 1500) -> str: | |
| """Retrieve relevant context from uploaded papers""" | |
| try: | |
| if not os.path.exists(UPLOADED_DOCS_FILE): | |
| return "" | |
| with open(UPLOADED_DOCS_FILE, 'r', encoding='utf-8') as f: | |
| documents = json.load(f) | |
| relevant_parts = [] | |
| for doc in documents: | |
| content = doc.get('content', '').lower() | |
| if topic.lower() in content or subject.lower() in content: | |
| relevant_parts.append(f"\n--- From: {doc['filename']} ---\n{doc['content'][:500]}...\n") | |
| combined = "\n".join(relevant_parts) | |
| if len(combined) > max_context: | |
| combined = combined[:max_context] + "\n...[truncated]..." | |
| return combined if combined else "" | |
| except: | |
| return "" | |
| # ----------------------------- | |
| # Enhanced Generation with RAG | |
| # ----------------------------- | |
| def generate_with_context(prompt: str, subject: str, topic: str, language: str, level: str) -> str: | |
| """Enhanced generator using uploaded papers as context""" | |
| context = get_relevant_context(subject, topic) | |
| if context: | |
| enhanced_prompt = f""" | |
| SEA EXAM CONTEXT FROM UPLOADED PAPERS: | |
| {context} | |
| REQUEST: | |
| Subject: {subject} | |
| Topic: {topic} | |
| Level: {level} | |
| TASK: {prompt} | |
| Create content aligned with Trinidad & Tobago SEA exam standards. | |
| """ | |
| else: | |
| enhanced_prompt = f""" | |
| Subject: {subject} | |
| Topic: {topic} | |
| Level: {level} | |
| TASK: {prompt} | |
| Create SEA-aligned content. (No papers uploaded yet) | |
| """ | |
| return generate_with_groq(enhanced_prompt) | |
| # ----------------------------- | |
| # Helper Functions | |
| # ----------------------------- | |
| def build_system_context(subject: str, topic: str, language: str, level: str) -> str: | |
| return f"SEA Exam - {subject}: {topic} ({level})" | |
| def prompt_explanation(subject: str, topic: str, language: str, level: str) -> str: | |
| ctx = build_system_context(subject, topic, language, level) | |
| return f"{ctx}\nWrite a step-by-step SEA exam explanation with examples." | |
| def prompt_quiz(subject: str, topic: str, language: str, level: str) -> str: | |
| return f""" | |
| Subject: {subject}, Topic: {topic}, Level: {level} | |
| Create 3-5 SEA-style multiple choice questions. Return JSON: | |
| {{ | |
| "questions": [ | |
| {{ | |
| "question": "string", | |
| "options": ["A", "B", "C", "D"], | |
| "answer_index": 0 | |
| }} | |
| ] | |
| }} | |
| """ | |
| # ----------------------------- | |
| # Gradio Callbacks | |
| # ----------------------------- | |
| def on_generate_explanation(subject, topic, language, level): | |
| prompt = prompt_explanation(subject, topic, language, level) | |
| return generate_with_context(prompt, subject, topic, language, level) | |
| def on_generate_quiz(subject, topic, language, level): | |
| prompt = prompt_quiz(subject, topic, language, level) | |
| raw = generate_with_context(prompt, subject, topic, language, level) | |
| # Parse JSON | |
| quiz = [] | |
| try: | |
| match = re.search(r'\{.*\}', raw, re.DOTALL) | |
| if match: | |
| parsed = json.loads(match.group()) | |
| quiz = parsed.get("questions", []) | |
| except: | |
| pass | |
| # Update UI | |
| vis = [False] * 5 | |
| labels = [("Q", ["A", "B", "C", "D"])] * 5 | |
| for i, q in enumerate(quiz[:5]): | |
| vis[i] = True | |
| labels[i] = (f"Q{i+1}. {q.get('question', '')}", q.get('options', [])) | |
| return ( | |
| quiz, | |
| gr.update(visible=vis[0], label=labels[0][0], choices=labels[0][1], value=None), | |
| gr.update(visible=vis[1], label=labels[1][0], choices=labels[1][1], value=None), | |
| gr.update(visible=vis[2], label=labels[2][0], choices=labels[2][1], value=None), | |
| gr.update(visible=vis[3], label=labels[3][0], choices=labels[3][1], value=None), | |
| gr.update(visible=vis[4], label=labels[4][0], choices=labels[4][1], value=None), | |
| f"Generated {len(quiz)} questions" if quiz else "No questions generated" | |
| ) | |
| # ----------------------------- | |
| # Gradio UI | |
| # ----------------------------- | |
| CSS = """ | |
| .card {background: #f8f9fa; border-radius: 10px; padding: 15px; margin-bottom: 15px;} | |
| .btn-primary button {background: #2563eb; color: white; border: none; border-radius: 6px;} | |
| """ | |
| with gr.Blocks(css=CSS, theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# 🇹🇹 AI SEA Exam Tutor") | |
| # API Key Section | |
| with gr.Group(elem_classes="card"): | |
| gr.Markdown("### 🔑 API Key Configuration") | |
| with gr.Row(): | |
| api_key_input = gr.Textbox( | |
| label="Groq API Key", | |
| type="password", | |
| placeholder="gsk_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", | |
| lines=1, | |
| scale=3 | |
| ) | |
| api_key_btn = gr.Button("Save Key", variant="primary", scale=1) | |
| api_key_status = gr.Markdown("Enter API key and click Save") | |
| api_key_btn.click(update_api_key, [api_key_input], [api_key_status]) | |
| # Main Inputs | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| with gr.Group(elem_classes="card"): | |
| gr.Markdown("### Study Parameters") | |
| subject = gr.Dropdown(SEA_SUBJECTS, value="Mathematics", label="Subject") | |
| topic = gr.Dropdown(SEA_MATH_TOPICS, value=SEA_MATH_TOPICS[0], label="Topic") | |
| level = gr.Radio(LEVEL_OPTIONS, value="Intermediate", label="Level") | |
| with gr.Column(scale=2): | |
| with gr.Group(elem_classes="card"): | |
| gr.Markdown("### 📤 Upload SEA Papers") | |
| uploaded_files = gr.Files( | |
| label="Upload PDF/TXT files", | |
| file_types=[".pdf", ".txt"], | |
| file_count="multiple" | |
| ) | |
| process_btn = gr.Button("Process Documents", variant="primary") | |
| upload_status = gr.Markdown("Upload files then click Process") | |
| process_btn.click(process_uploaded_documents, [uploaded_files], [upload_status]) | |
| # Features | |
| with gr.Row(): | |
| with gr.Column(): | |
| with gr.Group(elem_classes="card"): | |
| gr.Markdown("### Explanation") | |
| btn_explain = gr.Button("Generate Explanation", variant="primary") | |
| explanation = gr.Markdown("Explanation will appear here") | |
| btn_explain.click(on_generate_explanation, [subject, topic, "English", level], [explanation]) | |
| with gr.Row(): | |
| with gr.Column(): | |
| with gr.Group(elem_classes="card"): | |
| gr.Markdown("### Quiz") | |
| btn_quiz = gr.Button("Generate Quiz", variant="primary") | |
| quiz_info = gr.Markdown("Click to generate quiz") | |
| quiz_state = gr.State([]) | |
| q1 = gr.Radio([], visible=False, label="Q1") | |
| q2 = gr.Radio([], visible=False, label="Q2") | |
| q3 = gr.Radio([], visible=False, label="Q3") | |
| btn_quiz.click(on_generate_quiz, [subject, topic, "English", level], | |
| [quiz_state, q1, q2, q3, quiz_info, quiz_info, quiz_info]) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=7860) |