Talhaalvi12 commited on
Commit
8e4a246
·
verified ·
1 Parent(s): 048ce45

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -31
app.py CHANGED
@@ -1,51 +1,45 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
3
 
4
- # Load model
5
- tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-large")
6
- model = AutoModelForSeq2SeqLM.from_pretrained("google/flan-t5-large")
 
7
 
8
  def generate_notes(topic):
9
- """
10
- Generate detailed paragraph-style study notes for a given topic.
11
- """
12
- if not topic.strip():
13
  return "⚠️ Please enter a topic first."
14
 
15
  prompt = (
16
- f"Write detailed, well-structured study notes about the topic '{topic}'. "
17
- f"Include an introduction, key concepts, examples, and a conclusion. "
18
- f"Make the notes clear, paragraph-based, and easy for a student to understand. "
19
- f"Use complete sentences and proper formatting."
20
  )
21
 
22
- # Encode input
23
- inputs = tokenizer(prompt, return_tensors="pt", truncation=True)
24
-
25
- # Generate long, detailed text
26
  outputs = model.generate(
27
  **inputs,
28
- max_new_tokens=600, # increase for longer output
29
- temperature=0.8, # adds variety
30
- top_p=0.9, # nucleus sampling
31
- repetition_penalty=1.1, # prevents repeating phrases
32
- do_sample=True # enables more creative generation
33
  )
34
 
35
- notes = tokenizer.decode(outputs[0], skip_special_tokens=True)
36
- return notes.strip()
37
 
38
- # Gradio UI
39
  iface = gr.Interface(
40
  fn=generate_notes,
41
- inputs=gr.Textbox(lines=2, placeholder="Enter a topic (e.g. Photosynthesis, Machine Learning)"),
42
- outputs=gr.Textbox(label="Generated Detailed Notes", lines=15),
43
- title="📘 AI Detailed Note Generator",
44
- description="Generate detailed, structured notes with multiple paragraphs using FLAN-T5-large (runs free on CPU).",
45
  examples=[
46
- ["Quantum computing"],
47
- ["Climate change and its effects"],
48
- ["Photosynthesis process"]
 
49
  ]
50
  )
51
 
 
1
  import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
 
4
+ # Load TinyLlama (small but smart)
5
+ model_name = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
 
9
  def generate_notes(topic):
10
+ topic = topic.strip()
11
+ if not topic:
 
 
12
  return "⚠️ Please enter a topic first."
13
 
14
  prompt = (
15
+ f"You are an expert teacher. Write detailed, well-structured study notes about '{topic}'. "
16
+ f"Include an introduction, main concepts, key points, and a short summary. "
17
+ f"Keep the tone educational and clear for students."
 
18
  )
19
 
20
+ inputs = tokenizer(prompt, return_tensors="pt")
 
 
 
21
  outputs = model.generate(
22
  **inputs,
23
+ max_new_tokens=600,
24
+ temperature=0.7,
25
+ top_p=0.9,
26
+ repetition_penalty=1.2,
 
27
  )
28
 
29
+ text = tokenizer.decode(outputs[0], skip_special_tokens=True)
30
+ return text.replace(prompt, "").strip()
31
 
 
32
  iface = gr.Interface(
33
  fn=generate_notes,
34
+ inputs=gr.Textbox(lines=2, placeholder="Enter a topic (e.g. Photosynthesis, World Geography)"),
35
+ outputs=gr.Textbox(lines=15, label="Generated Notes"),
36
+ title="📚 AI Note Generator (TinyLlama 1.1B Chat)",
37
+ description="Generate long, educational notes for free using the TinyLlama 1.1B Chat model.",
38
  examples=[
39
+ ["Photosynthesis process"],
40
+ ["Causes of World War II"],
41
+ ["Artificial Intelligence"],
42
+ ["The Water Cycle"]
43
  ]
44
  )
45