Scaryscar commited on
Commit
b4dedeb
·
verified ·
1 Parent(s): 519eb55

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +81 -80
app.py CHANGED
@@ -5,7 +5,6 @@ import matplotlib.pyplot as plt
5
  import numpy as np
6
  import re
7
  import sympy as sp
8
- from transformers import pipeline
9
  from io import BytesIO
10
  import base64
11
 
@@ -14,10 +13,15 @@ def solve_math(expression):
14
  """Bulletproof math solver using SymPy"""
15
  try:
16
  expr = sp.sympify(expression)
17
- steps = sp.working(expr)
 
 
 
 
 
18
  return {
19
  'answer': str(expr.evalf()),
20
- 'steps': steps if steps else [f"Direct evaluation: {expression} = {expr}"]
21
  }
22
  except:
23
  return None
@@ -44,7 +48,6 @@ def create_graph(graph_type):
44
 
45
  plt.title(title)
46
  plt.grid(True)
47
-
48
  buf = BytesIO()
49
  plt.savefig(buf, format='png')
50
  plt.close()
@@ -53,78 +56,77 @@ def create_graph(graph_type):
53
  return None
54
 
55
  # ===== AI SYSTEM =====
56
- class UltimateAISystem:
57
  def __init__(self):
58
  self.device = 0 if torch.cuda.is_available() else -1
59
  self.dtype = torch.float16 if self.device == 0 else torch.float32
60
- self.model = pipeline(
61
- "text-generation",
62
- model="mistralai/Mistral-7B-Instruct-v0.1",
63
- device=self.device,
64
- torch_dtype=self.dtype
65
- )
66
-
67
- def generate_answer(self, prompt):
68
- """Handle all question types with guaranteed response"""
69
- start_time = time.time()
70
-
71
- # 1. Check for basic math
72
- math_match = re.match(r"^(?:[Ww]hat is|Calculate|Solve) ([0-9\+\-\*\/\^\(\) ]+)\??$", prompt)
73
- if math_match:
74
- math_result = solve_math(math_match.group(1))
75
- if math_result:
76
- steps = "\n".join([f"• {step}" for step in math_result['steps']])
77
- return {
78
- 'answer': f"Answer: {math_result['answer']}\n\nSteps:\n{steps}",
79
- 'graph': None,
80
- 'time': time.time() - start_time
81
- }
82
-
83
- # 2. Check for graph requests
84
- graph_type = None
85
- if any(kw in prompt.lower() for kw in ["graph", "plot", "chart"]):
86
- if "linear" in prompt.lower():
87
- graph_type = "linear"
88
- elif "quadratic" in prompt.lower():
89
- graph_type = "quadratic"
90
- elif any(kw in prompt.lower() for kw in ["sin", "cos", "tan", "trig"]):
91
- graph_type = "trigonometric"
92
 
93
- # 3. Generate AI response
94
  try:
95
  response = self.model(
96
- f"""Provide a detailed step-by-step explanation. Include mathematical working where applicable.
97
-
98
- Question: {prompt}
99
-
100
- Answer in clear steps:
101
- 1.""",
102
- max_new_tokens=300,
103
- temperature=0.3,
104
- do_sample=True
105
  )[0]['generated_text']
106
-
107
- answer = response.split("Answer in clear steps:")[-1].strip()
108
- if not answer:
109
- answer = "I couldn't generate a response. Please try again."
110
-
111
- # 4. Generate graph if requested
112
- graph = create_graph(graph_type) if graph_type else None
113
-
114
- return {
115
- 'answer': f"Step-by-Step Explanation:\n\n{answer}",
116
- 'graph': graph,
117
- 'time': time.time() - start_time
118
- }
119
  except:
120
- return {
121
- 'answer': "System error. Please try again.",
122
- 'graph': None,
123
- 'time': time.time() - start_time
124
- }
125
 
126
- # Initialize system
127
- ai_system = UltimateAISystem()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128
 
129
  # ===== GRADIO INTERFACE =====
130
  with gr.Blocks(theme=gr.themes.Soft(), title="🧠 Ultimate AI Assistant") as demo:
@@ -132,8 +134,8 @@ with gr.Blocks(theme=gr.themes.Soft(), title="🧠 Ultimate AI Assistant") as de
132
 
133
  with gr.Row():
134
  question = gr.Textbox(
135
- label="Ask anything",
136
- placeholder="E.g. 'What is 2+2?' or 'Explain quantum physics with a graph'",
137
  lines=3
138
  )
139
 
@@ -157,25 +159,24 @@ with gr.Blocks(theme=gr.themes.Soft(), title="🧠 Ultimate AI Assistant") as de
157
  gr.Examples(
158
  examples=[
159
  "What is (15*3)+(40/2)?",
160
- "Explain the photoelectric effect with a graph",
161
- "Solve - 5x + 6 = 0 step by step",
162
- "Show a trigonometric function plot"
163
  ],
164
  inputs=question
165
  )
166
 
167
- def process_question(prompt):
168
- result = ai_system.generate_answer(prompt)
169
- show_graph = result['graph'] is not None
170
- output_text = f"""{result['answer']}
171
-
172
- ⏱️ Generated in {result['time']:.2f} seconds"""
173
- return output_text, gr.update(visible=show_graph, value=result['graph'])
174
 
175
  submit_btn.click(
176
- fn=process_question,
177
  inputs=question,
178
  outputs=[answer, graph]
 
 
 
 
179
  )
180
 
181
  if __name__ == "__main__":
 
5
  import numpy as np
6
  import re
7
  import sympy as sp
 
8
  from io import BytesIO
9
  import base64
10
 
 
13
  """Bulletproof math solver using SymPy"""
14
  try:
15
  expr = sp.sympify(expression)
16
+ steps = []
17
+ if isinstance(expr, sp.Add):
18
+ steps.append(f"Addition: {expr.args[0]} + {expr.args[1]}")
19
+ elif isinstance(expr, sp.Mul):
20
+ steps.append(f"Multiplication: {expr.args[0]} × {expr.args[1]}")
21
+ steps.append(f"Final result: {expr.evalf()}")
22
  return {
23
  'answer': str(expr.evalf()),
24
+ 'steps': steps
25
  }
26
  except:
27
  return None
 
48
 
49
  plt.title(title)
50
  plt.grid(True)
 
51
  buf = BytesIO()
52
  plt.savefig(buf, format='png')
53
  plt.close()
 
56
  return None
57
 
58
  # ===== AI SYSTEM =====
59
+ class LocalAISystem:
60
  def __init__(self):
61
  self.device = 0 if torch.cuda.is_available() else -1
62
  self.dtype = torch.float16 if self.device == 0 else torch.float32
63
+ try:
64
+ self.model = pipeline(
65
+ "text-generation",
66
+ model="facebook/opt-1.3b", # Open-access model
67
+ device=self.device,
68
+ torch_dtype=self.dtype
69
+ )
70
+ except:
71
+ self.model = None
72
+
73
+ def generate_explanation(self, prompt):
74
+ """Local generation with fallback"""
75
+ if not self.model:
76
+ return "System is initializing..."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
 
78
  try:
79
  response = self.model(
80
+ f"Explain step-by-step: {prompt}",
81
+ max_new_tokens=200,
82
+ temperature=0.5
 
 
 
 
 
 
83
  )[0]['generated_text']
84
+ return response.split(":")[-1].strip()
 
 
 
 
 
 
 
 
 
 
 
 
85
  except:
86
+ return "Could not generate explanation."
 
 
 
 
87
 
88
+ # Initialize systems
89
+ ai_system = LocalAISystem()
90
+
91
+ # ===== MAIN PROCESSING =====
92
+ def process_query(prompt):
93
+ start_time = time.time()
94
+
95
+ # 1. Handle empty input
96
+ if not prompt.strip():
97
+ return "Please enter a question", None
98
+
99
+ # 2. Check for math
100
+ math_match = re.match(r"^(?:[Ww]hat is|Calculate|Solve) ([0-9\+\-\*\/\^\(\) ]+)\??$", prompt)
101
+ if math_match:
102
+ math_result = solve_math(math_match.group(1))
103
+ if math_result:
104
+ steps = "\n".join([f"• {step}" for step in math_result['steps']])
105
+ return f"Answer: {math_result['answer']}\n\nSteps:\n{steps}", None
106
+
107
+ # 3. Check for graphs
108
+ graph_type = None
109
+ if any(kw in prompt.lower() for kw in ["graph", "plot", "chart"]):
110
+ if "linear" in prompt.lower():
111
+ graph_type = "linear"
112
+ elif "quadratic" in prompt.lower():
113
+ graph_type = "quadratic"
114
+ elif any(kw in prompt.lower() for kw in ["sin", "cos", "tan", "trig"]):
115
+ graph_type = "trigonometric"
116
+
117
+ # 4. Generate response
118
+ response = ai_system.generate_explanation(prompt)
119
+ graph = create_graph(graph_type) if graph_type else None
120
+
121
+ # Format output
122
+ gen_time = time.time() - start_time
123
+ formatted_response = f"""📝 Step-by-Step Explanation:
124
+
125
+ {response}
126
+
127
+ ⏱️ Generated in {gen_time:.2f} seconds"""
128
+
129
+ return formatted_response, graph
130
 
131
  # ===== GRADIO INTERFACE =====
132
  with gr.Blocks(theme=gr.themes.Soft(), title="🧠 Ultimate AI Assistant") as demo:
 
134
 
135
  with gr.Row():
136
  question = gr.Textbox(
137
+ label="Your Question",
138
+ placeholder="Try: 'What is 2+2?' or 'Show a quadratic graph'",
139
  lines=3
140
  )
141
 
 
159
  gr.Examples(
160
  examples=[
161
  "What is (15*3)+(40/2)?",
162
+ "Explain linear relationships with a graph",
163
+ "Solve 3x + 5 = 20 step by step"
 
164
  ],
165
  inputs=question
166
  )
167
 
168
+ def update_ui(response, img):
169
+ show_graph = img is not None
170
+ return response, gr.update(visible=show_graph, value=img)
 
 
 
 
171
 
172
  submit_btn.click(
173
+ fn=process_query,
174
  inputs=question,
175
  outputs=[answer, graph]
176
+ ).then(
177
+ fn=update_ui,
178
+ inputs=[answer, graph],
179
+ outputs=[answer, graph]
180
  )
181
 
182
  if __name__ == "__main__":