File size: 22,019 Bytes
6644281
 
 
 
 
 
 
 
 
 
f8cb991
 
 
 
 
 
 
 
 
 
 
 
 
6644281
 
 
 
f8cb991
 
 
 
 
6644281
 
f8cb991
 
 
 
6644281
f8cb991
 
 
 
6644281
f8cb991
6644281
 
 
f8cb991
6644281
 
f8cb991
 
 
6644281
 
f8cb991
 
 
 
 
 
 
 
 
 
 
6644281
f8cb991
 
 
 
 
 
6644281
f8cb991
6644281
 
 
f8cb991
6644281
f8cb991
 
 
 
 
 
 
6644281
f8cb991
 
 
 
 
 
 
 
6644281
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f8cb991
 
 
6644281
 
f8cb991
6644281
 
 
f8cb991
 
6644281
 
 
 
 
 
 
 
c834f54
6644281
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c834f54
6644281
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c834f54
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
import gradio as gr
from transformers import pipeline
import PyPDF2
import re
import os
import io
import random
import time
from groq import Groq

# ==================== DEBUGGING ====================
print("=" * 50)
print("STARTING APPLICATION - ENVIRONMENT CHECK")
print("=" * 50)

# List ALL environment variables (be careful with this in production!)
print("\nAll Environment Variables:")
for key in os.environ:
    if 'KEY' in key or 'TOKEN' in key or 'SECRET' in key:
        value = os.environ[key]
        masked = value[:4] + "..." + value[-4:] if len(value) > 8 else "SET"
        print(f"  {key}: {masked}")

# ==================== IMPORT HANDLING ====================
try:
    from google import genai
    from google.genai import types
    print("\nβœ… Imported google.genai successfully")
except ImportError as e:
    print(f"\n❌ Failed to import google.genai: {e}")
    genai = None
    types = None

# ==================== CONFIGURATION ====================
# Try multiple ways to get tokens
hf_token = os.environ.get("HF_TOKEN") or os.getenv("HF_TOKEN")
gemini_key = os.environ.get("GEMINI_API_KEY") or os.getenv("GEMINI_API_KEY")
groq_key = os.environ.get("GROQ_API_KEY") or os.getenv("GROQ_API_KEY")

print(f"\nToken Check:")
print(f"  HF_TOKEN: {'SET (' + hf_token[:4] + '...)' if hf_token else 'NOT SET'}")
print(f"  GEMINI_API_KEY: {'SET (' + gemini_key[:4] + '...)' if gemini_key else 'NOT SET'}")
print(f"  GROQ_API_KEY: {'SET (' + groq_key[:4] + '...)' if groq_key else 'NOT SET'}")

# ==================== GEMINI SETUP ====================
gemini_client = None
if gemini_key and genai:
    try:
        print(f"\nAttempting Gemini setup...")
        try:
            gemini_client = genai.Client(api_key=gemini_key)
            print("  βœ… Gemini client initialized with new SDK")
        except Exception as e1:
            print(f"  ⚠️ New SDK failed: {e1}")
            try:
                gemini_client = genai.client.Client(api_key=gemini_key)
                print("  βœ… Gemini client initialized with alternate method")
            except Exception as e2:
                print(f"  ⚠️ Alternate failed: {e2}")
                # Fallback to legacy
                try:
                    import google.generativeai as old_genai
                    old_genai.configure(api_key=gemini_key)
                    gemini_client = old_genai
                    print("  βœ… Using legacy google.generativeai")
                except Exception as e3:
                    print(f"  ❌ Legacy also failed: {e3}")
    except Exception as e:
        print(f"  ❌ Gemini setup error: {e}")
else:
    if not gemini_key:
        print("\n⚠️ No GEMINI_API_KEY found")
    if not genai:
        print("⚠️ Google GenAI SDK not imported")

# ==================== GROQ SETUP ====================
groq_client = None
if groq_key:
    try:
        print(f"\nAttempting Groq setup...")
        groq_client = Groq(api_key=groq_key)
        # Test the client with a simple request
        test_response = groq_client.chat.completions.create(
            messages=[{"role": "user", "content": "Hi"}],
            model="llama-3.3-70b-versatile",
            max_completion_tokens=5
        )
        print("  βœ… Groq client initialized and tested successfully")
    except Exception as e:
        print(f"  ❌ Groq setup error: {e}")
        groq_client = None
else:
    print("\n⚠️ No GROQ_API_KEY found")

print("\n" + "=" * 50)
print("SETUP COMPLETE")
print("=" * 50)

# Lazy load summarizer
summarizer = None

def load_summarizer():
    global summarizer
    if summarizer is None:
        try:
            print("Loading summarizer...")
            summarizer = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6", device=-1)
            print("βœ… Summarizer ready")
        except Exception as e:
            print(f"❌ Summarizer failed: {e}")
    return summarizer

# ==================== STATUS CHECK ====================
def check_status():
    statuses = []
    
    # Gemini status
    if gemini_client and gemini_key:
        statuses.append("βœ… Gemini Ready")
    elif gemini_key:
        statuses.append("⚠️ Gemini key present but client failed")
    else:
        statuses.append("❌ Gemini: Add GEMINI_API_KEY")
    
    # Groq status
    if groq_client and groq_key:
        statuses.append("βœ… Groq Ready")
    elif groq_key:
        statuses.append("⚠️ Groq key present but client failed")
    else:
        statuses.append("❌ Groq: Add GROQ_API_KEY")
    
    return " | ".join(statuses)

# ==================== APP 1: PDF & ESSAY TOOLS ====================

def extract_text_from_pdf(pdf_file):
    """Extract text from PDF - handle both file path and bytes"""
    if pdf_file is None:
        return None, "Please upload a PDF file."
    
    try:
        if isinstance(pdf_file, str):
            with open(pdf_file, 'rb') as f:
                pdf_reader = PyPDF2.PdfReader(f)
                text = ""
                for page in pdf_reader.pages:
                    page_text = page.extract_text()
                    if page_text:
                        text += page_text + "\n"
        else:
            if hasattr(pdf_file, 'read'):
                pdf_bytes = pdf_file.read()
                if hasattr(pdf_file, 'seek'):
                    pdf_file.seek(0)
            else:
                pdf_bytes = pdf_file
            
            if isinstance(pdf_bytes, bytes):
                pdf_stream = io.BytesIO(pdf_bytes)
            else:
                pdf_stream = io.BytesIO(pdf_bytes.encode() if isinstance(pdf_bytes, str) else pdf_bytes)
            
            pdf_reader = PyPDF2.PdfReader(pdf_stream)
            text = ""
            for page in pdf_reader.pages:
                page_text = page.extract_text()
                if page_text:
                    text += page_text + "\n"
        
        text = re.sub(r'\s+', ' ', text).strip()
        
        if len(text) < 50:
            return None, "Could not extract text. PDF may be image-based or scanned."
        
        return text, None
        
    except Exception as e:
        return None, f"Error reading PDF: {str(e)}"

def summarize_with_gemini(text, max_length, min_length):
    """Try to use Gemini for summarization"""
    if not gemini_client or not gemini_key:
        return None
    
    try:
        if hasattr(gemini_client, 'models'):
            prompt = f"Summarize the following text in {min_length}-{max_length} words. Be concise and clear:\n\n{text[:15000]}"
            
            try:
                response = gemini_client.models.generate_content(
                    model="gemini-2.5-flash",
                    contents=prompt
                )
                return response.text
            except:
                try:
                    response = gemini_client.models.generate_content(
                        model="gemini-2.5-flash",
                        contents=prompt,
                        config=types.GenerateContentConfig(
                            max_output_tokens=500
                        ) if types else None
                    )
                    return response.text
                except:
                    pass
        
        if hasattr(gemini_client, 'GenerativeModel'):
            model = gemini_client.GenerativeModel('gemini-2.5-flash')
            prompt = f"Summarize the following text in {min_length}-{max_length} words:\n\n{text[:15000]}"
            response = model.generate_content(prompt)
            return response.text
            
    except Exception as e:
        print(f"Gemini summary error: {e}")
    
    return None

def summarize_pdf(pdf_file, max_length, min_length):
    text, error = extract_text_from_pdf(pdf_file)
    if error:
        return error
    
    gemini_result = summarize_with_gemini(text, max_length, min_length)
    if gemini_result:
        return gemini_result
    
    summ = load_summarizer()
    if summ:
        try:
            result = summ(text[:3500], max_length=max_length, min_length=min_length, do_sample=False)
            return result[0]['summary_text']
        except Exception as e:
            return f"Summarization error: {str(e)}"
    
    return "Error: No summarization available"

def generate_essay_with_gemini(prompt, essay_type, word_count, tone):
    """Generate essay using Gemini"""
    if not gemini_client or not gemini_key:
        return None
    
    try:
        full_prompt = f"""You are an expert academic writer. Write a {essay_type} essay in {tone} tone.
Target length: approximately {word_count} words.
Topic: {prompt}
Requirements:
- Engaging introduction with clear thesis statement
- Well-structured body paragraphs with supporting arguments and evidence
- Strong conclusion that summarizes main points
- Use academic vocabulary and formal writing style
Write the essay now:"""
        
        if hasattr(gemini_client, 'models'):
            try:
                response = gemini_client.models.generate_content(
                    model="gemini-2.5-flash",
                    contents=full_prompt
                )
                essay = response.text.strip()
            except:
                if hasattr(gemini_client, 'GenerativeModel'):
                    model = gemini_client.GenerativeModel('gemini-2.5-flash')
                    response = model.generate_content(full_prompt)
                    essay = response.text.strip()
                else:
                    return None
        else:
            model = gemini_client.GenerativeModel('gemini-2.5-flash')
            response = model.generate_content(full_prompt)
            essay = response.text.strip()
        
        essay = re.sub(r'^#+\s*', '', essay)
        word_count_actual = len(essay.split())
        
        return f"""# {essay_type} Essay: {prompt[:50]}{'...' if len(prompt) > 50 else ''}
{essay}
---
*~{word_count_actual} words | {tone} | Gemini*"""
        
    except Exception as e:
        print(f"Essay generation error: {e}")
        return None

def generate_essay(prompt, essay_type, word_count, tone):
    if not prompt or len(prompt.strip()) < 10:
        return "Please provide a detailed prompt (at least 10 characters)."
    
    if gemini_client and gemini_key:
        gemini_result = generate_essay_with_gemini(prompt, essay_type, word_count, tone)
        if gemini_result:
            return gemini_result
    
    return f"""❌ AI generation not available.
Template Essay: {prompt}
Introduction:
{prompt} is an important topic that requires careful consideration. This essay explores its key aspects.
Body:
The significance of {prompt} cannot be overstated. Various perspectives exist on this matter, with experts debating the best approaches. Research continues to reveal new insights.
Conclusion:
In conclusion, {prompt} represents a complex issue that demands attention.
---
*Template fallback - Gemini SDK issue*
Check: 1) GEMINI_API_KEY is set 2) google-genai package is installed"""

def summarize_text(text, max_length, min_length):
    if len(text.strip()) < 100:
        return "Please provide at least 100 characters to summarize."
    
    gemini_result = summarize_with_gemini(text, max_length, min_length)
    if gemini_result:
        return gemini_result
    
    summ = load_summarizer()
    if summ:
        try:
            result = summ(text[:3500], max_length=max_length, min_length=min_length, do_sample=False)
            return result[0]['summary_text']
        except Exception as e:
            return f"Error: {str(e)}"
    
    return "Error: No summarization available"

# ==================== APP 2: QUIZ GENERATOR ====================

def extract_sentences(text):
    sentences = re.split(r'[.!?]', text)
    return [s.strip() for s in sentences if len(s.split()) > 6]

def create_quiz(text, num_questions):
    sentences = extract_sentences(text)

    if len(sentences) < num_questions:
        num_questions = len(sentences)

    selected = random.sample(sentences, num_questions)

    quiz_data = []

    for sentence in selected:
        words = sentence.split()
        keyword = random.choice(words[2:-2])

        question = sentence.replace(keyword, "_____")

        all_words = list(set(text.split()))
        wrong = random.sample(all_words, 3)

        options = wrong + [keyword]
        random.shuffle(options)

        quiz_data.append({
            "question": question,
            "options": options,
            "answer": keyword
        })

    return quiz_data

def start_quiz(text, num_questions, timer_minutes):
    if not text.strip():
        return "⚠️ Please enter study material.", None, None, None, None, None, None, ""

    quiz = create_quiz(text, num_questions)

    end_time = time.time() + (timer_minutes * 60)

    return show_question(quiz, 0, 0, end_time)

def show_question(quiz, index, score, end_time):
    if time.time() > end_time:
        return finish_quiz(score, len(quiz))

    if index >= len(quiz):
        return finish_quiz(score, len(quiz))

    q = quiz[index]

    remaining = int(end_time - time.time())

    timer_display = f"⏳ Time Left: {remaining} seconds"

    return (
        f"### Question {index+1}:\n\n{q['question']}",
        gr.update(choices=q["options"], value=None),
        f"Score: {score}",
        quiz,
        index,
        score,
        end_time,
        timer_display
    )

def submit_answer(selected, quiz, index, score, end_time):
    if selected == quiz[index]["answer"]:
        score += 1

    index += 1
    return show_question(quiz, index, score, end_time)

def finish_quiz(score, total):
    return (
        f"## βœ… Quiz Finished!\n\nFinal Score: **{score}/{total}**",
        gr.update(choices=[]),
        "",
        None,
        None,
        None,
        None,
        ""
    )

# ==================== APP 3: URDU TRANSLATOR ====================

def translate_to_urdu(text):
    """Translate English text to Urdu using Groq's Llama 3.3 70B"""
    if not text or not text.strip():
        return "Please enter some text to translate."
    
    if not groq_client:
        return "❌ Groq API not configured. Please add GROQ_API_KEY."
    
    try:
        chat_completion = groq_client.chat.completions.create(
            messages=[
                {
                    "role": "system",
                    "content": "You are a professional English to Urdu translator. Translate the given text accurately to Urdu (اردو) using natural, conversational language. Respond ONLY with the translation, no explanations."
                },
                {
                    "role": "user",
                    "content": f"Translate to Urdu:\n\n{text}"
                }
            ],
            model="llama-3.3-70b-versatile",
            temperature=0.3,
            max_completion_tokens=2048,
        )
        
        return chat_completion.choices[0].message.content
        
    except Exception as e:
        return f"Error: {str(e)}"

# ==================== CSS ====================

custom_css = """
.header { 
    text-align: center; 
    margin-bottom: 2rem; 
    padding: 2rem; 
    background: linear-gradient(135deg, #059669, #6b7280); 
    border-radius: 12px; 
    color: white; 
}
.header h1 { 
    font-size: 2.5rem; 
    margin-bottom: 0.5rem; 
}
.status-ok { 
    background: #d1fae5; 
    border: 2px solid #059669; 
    padding: 1rem; 
    border-radius: 8px; 
    text-align: center; 
    color: #065f46; 
    font-weight: bold; 
    margin-bottom: 1rem; 
}
.status-warn { 
    background: #fef3c7; 
    border: 2px solid #f59e0b; 
    padding: 1rem; 
    border-radius: 8px; 
    text-align: center; 
    color: #92400e; 
    margin-bottom: 1rem; 
}
.status-error { 
    background: #fee2e2; 
    border: 2px solid #ef4444; 
    padding: 1rem; 
    border-radius: 8px; 
    text-align: center; 
    color: #991b1b; 
    margin-bottom: 1rem; 
}
@import url('https://fonts.googleapis.com/css2?family=Noto+Nastaliq+Urdu&display=swap');
.urdu-text { 
    font-family: 'Noto Nastaliq Urdu', serif; 
    font-size: 1.5em; 
    line-height: 2; 
    direction: rtl; 
    text-align: right;
}
"""

# ==================== MAIN UI ====================

with gr.Blocks(title="Student AI Suite") as demo:
    
    # Status Display
    status = check_status()
    if "βœ…" in status and "❌" not in status:
        gr.HTML(f'<div class="status-ok">{status}</div>')
    elif "❌" in status:
        gr.HTML(f'<div class="status-error">{status}</div>')
    else:
        gr.HTML(f'<div class="status-warn">{status}</div>')
    
    gr.HTML('<div class="header"><h1>πŸŽ“ Student AI Suite</h1><p>Essay Generator β€’ PDF Summarizer β€’ Quiz Generator β€’ Urdu Translator</p></div>')
    
    with gr.Tabs():
        
        # ==================== TAB 1: PDF & ESSAY ====================
        with gr.TabItem("πŸ“„ Essay & PDF Tools"):
            with gr.Tabs():
                with gr.TabItem("πŸ“‘ PDF Summarizer"):
                    pdf_input = gr.File(label="Upload PDF", file_types=[".pdf"], type="binary")
                    with gr.Row():
                        max_len = gr.Slider(50, 500, 200, step=10, label="Max Length")
                        min_len = gr.Slider(20, 200, 50, step=10, label="Min Length")
                    summarize_btn = gr.Button("πŸ“ Summarize PDF", variant="primary")
                    pdf_output = gr.Textbox(label="Summary", lines=12)
                    
                    gr.Markdown("---")
                    text_input = gr.Textbox(label="Or paste text", lines=5)
                    text_btn = gr.Button("Summarize Text")
                    text_output = gr.Textbox(label="Summary", lines=8)
                
                with gr.TabItem("✍️ Essay Generator"):
                    prompt_input = gr.Textbox(label="Essay Topic", placeholder="Example: 'The impact of AI on education'", lines=3)
                    essay_type = gr.Dropdown(["Argumentative", "Expository", "Descriptive", "Persuasive"], value="Argumentative")
                    tone = gr.Dropdown(["Academic", "Formal", "Neutral"], value="Academic")
                    words = gr.Slider(200, 1000, 500, step=50, label="Word Count")
                    gen_btn = gr.Button("✨ Generate Essay", variant="primary")
                    essay_output = gr.Textbox(label="Generated Essay", lines=25)
        
        # ==================== TAB 2: QUIZ GENERATOR ====================
        with gr.TabItem("🎯 Smart Quiz Generator"):
            gr.Markdown("### Paste your study text β†’ generate quiz β†’ test yourself!")
            
            quiz_text_input = gr.Textbox(lines=6, label="Study Material")
            
            with gr.Row():
                num_questions = gr.Slider(1, 10, value=5, step=1, label="Number of Questions")
                timer_minutes = gr.Slider(1, 10, value=2, step=1, label="Timer (Minutes)")
            
            start_btn = gr.Button("Start Quiz", variant="primary")
            
            question_box = gr.Markdown()
            options_radio = gr.Radio(label="Select Answer")
            submit_btn = gr.Button("Submit Answer", variant="secondary")
            score_display = gr.Markdown()
            timer_display = gr.Markdown()
            
            # Hidden states for quiz
            quiz_state = gr.State()
            index_state = gr.State()
            score_state = gr.State()
            endtime_state = gr.State()
        
        # ==================== TAB 3: URDU TRANSLATOR ====================
        with gr.TabItem("🌍 English to Urdu"):
            gr.Markdown("### Powered by Groq AI")
            
            with gr.Row():
                with gr.Column():
                    input_text = gr.Textbox(
                        label="English Text",
                        placeholder="Enter text to translate...",
                        lines=4
                    )
                    translate_btn = gr.Button("Translate", variant="primary")
                    
                with gr.Column():
                    output_text = gr.Textbox(
                        label="Urdu Translation (اردو)",
                        lines=4,
                        elem_classes=["urdu-text"],
                        interactive=False
                    )
            
            examples = [
                "Hello, how are you?",
                "I love Pakistan.",
                "The weather is beautiful today."
            ]
            gr.Examples(examples=examples, inputs=input_text)
    
    # ==================== EVENT HANDLERS ====================
    
    # PDF & Essay handlers
    summarize_btn.click(summarize_pdf, [pdf_input, max_len, min_len], pdf_output)
    text_btn.click(summarize_text, [text_input, max_len, min_len], text_output)
    gen_btn.click(generate_essay, [prompt_input, essay_type, words, tone], essay_output)
    
    # Quiz handlers
    start_btn.click(
        start_quiz,
        inputs=[quiz_text_input, num_questions, timer_minutes],
        outputs=[
            question_box,
            options_radio,
            score_display,
            quiz_state,
            index_state,
            score_state,
            endtime_state,
            timer_display
        ]
    )
    
    submit_btn.click(
        submit_answer,
        inputs=[options_radio, quiz_state, index_state, score_state, endtime_state],
        outputs=[
            question_box,
            options_radio,
            score_display,
            quiz_state,
            index_state,
            score_state,
            endtime_state,
            timer_display
        ]
    )
    
    # Urdu translator handlers
    translate_btn.click(fn=translate_to_urdu, inputs=input_text, outputs=output_text)
    input_text.submit(fn=translate_to_urdu, inputs=input_text, outputs=output_text)

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860, css=custom_css)