Scaryscar commited on
Commit
807936e
·
verified ·
1 Parent(s): ccdf56b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +126 -127
app.py CHANGED
@@ -3,137 +3,137 @@ import torch
3
  import time
4
  import matplotlib.pyplot as plt
5
  import numpy as np
 
 
 
6
  from io import BytesIO
7
  import base64
8
- from transformers import pipeline
9
 
10
- # ===== FAILSAFE SYSTEM =====
11
- class RobustAISystem:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  def __init__(self):
13
  self.device = 0 if torch.cuda.is_available() else -1
14
  self.dtype = torch.float16 if self.device == 0 else torch.float32
15
- self.model = None
16
- self.load_model()
17
-
18
- def load_model(self):
19
- """Ultra-reliable model loading"""
20
- try:
21
- self.model = pipeline(
22
- "text-generation",
23
- model="mistralai/Mistral-7B-v0.1", # Always works
24
- device=self.device,
25
- torch_dtype=self.dtype
26
- )
27
- # Verify working
28
- test = self.generate("Test", simple=True)
29
- if not test.strip():
30
- raise RuntimeError("Blank response")
31
- except Exception as e:
32
- print(f"Model load failed: {str(e)}")
33
- self.model = None
34
-
35
- def generate(self, prompt, simple=False):
36
- """Guaranteed to return a response"""
37
- if not self.model:
38
- return "System is initializing... Please wait"
 
 
 
 
 
 
 
 
39
 
 
40
  try:
41
- full_prompt = prompt if simple else f"""
42
- Provide a detailed, step-by-step answer. If the question involves data or relationships,
43
- describe what kind of graph would best represent it.
44
 
45
- Question: {prompt}
46
 
47
- Answer:
48
- 1."""
49
-
50
- output = self.model(
51
- full_prompt,
52
  max_new_tokens=300,
53
- temperature=0.7,
54
- do_sample=True,
55
- pad_token_id=self.model.tokenizer.eos_token_id
56
  )[0]['generated_text']
57
 
58
- return output.split("Answer:")[-1].strip() or "I couldn't generate a response. Please try again."
59
- except Exception:
60
- return "Error generating response. Please rephrase your question."
61
-
62
- def create_graph(self, graph_type):
63
- """Always returns a graph image"""
64
- try:
65
- plt.figure(figsize=(8,4))
66
- x = np.linspace(0, 10, 50)
67
-
68
- if graph_type == "linear":
69
- y = x
70
- plt.plot(x, y, 'b-')
71
- plt.title("Linear Relationship (y = x)")
72
- elif graph_type == "quadratic":
73
- y = x**2
74
- plt.plot(x, y, 'r-')
75
- plt.title("Quadratic Relationship (y = x²)")
76
- else: # Default case
77
- y = np.sin(x)
78
- plt.plot(x, y, 'g-')
79
- plt.title("Periodic Relationship (y = sin(x))")
80
 
81
- plt.xlabel("X-axis")
82
- plt.ylabel("Y-axis")
83
- plt.grid(True)
84
 
85
- buf = BytesIO()
86
- plt.savefig(buf, format='png', dpi=100)
87
- plt.close()
88
- return base64.b64encode(buf.getvalue()).decode('utf-8')
89
- except Exception:
90
- return None
 
 
 
 
 
91
 
92
- # Initialize with retries
93
- ai_system = None
94
- for _ in range(3): # Try 3 times
95
- try:
96
- ai_system = RobustAISystem()
97
- if ai_system.model:
98
- break
99
- except Exception as e:
100
- print(f"Initialization attempt failed: {str(e)}")
101
- time.sleep(2)
102
 
103
  # ===== GRADIO INTERFACE =====
104
- def process_request(prompt):
105
- start_time = time.time()
106
-
107
- if not prompt.strip():
108
- return "Please enter a question", None
109
-
110
- # Generate response
111
- response = ai_system.generate(prompt) if ai_system else "System starting up... Try again in 30 seconds"
112
-
113
- # Check for graph-related keywords
114
- graph_img = None
115
- graph_triggers = ["graph", "plot", "chart", "visualize", "diagram"]
116
- if any(keyword in prompt.lower() for keyword in graph_triggers):
117
- graph_type = "quadratic" if "quadratic" in prompt.lower() else "linear"
118
- graph_b64 = ai_system.create_graph(graph_type) if ai_system else None
119
- if graph_b64:
120
- graph_img = f"data:image/png;base64,{graph_b64}"
121
-
122
- # Format output
123
- gen_time = time.time() - start_time
124
- formatted_response = f"""{response}
125
-
126
- ⏱️ Generated in {gen_time:.2f} seconds"""
127
-
128
- return formatted_response, graph_img
129
-
130
- with gr.Blocks(theme=gr.themes.Default(), title="🔍 AI Assistant") as demo:
131
- gr.Markdown("""<h1><center>Intelligent Q&A with Visualizations</center></h1>""")
132
 
133
  with gr.Row():
134
  question = gr.Textbox(
135
- label="Your Question",
136
- placeholder="Ask anything... (e.g. 'Explain photosynthesis and show a linear graph')",
137
  lines=3
138
  )
139
 
@@ -142,45 +142,44 @@ with gr.Blocks(theme=gr.themes.Default(), title="🔍 AI Assistant") as demo:
142
 
143
  with gr.Row():
144
  answer = gr.Textbox(
145
- label="Detailed Explanation",
146
  lines=10,
147
  interactive=False
148
  )
149
 
150
  with gr.Row():
151
  graph = gr.Image(
152
- label="Relevant Graph",
153
  visible=False
154
  )
155
 
156
  # Pre-tested examples
157
  gr.Examples(
158
  examples=[
159
- "Explain the relationship between force and acceleration with a graph",
160
- "Show a quadratic graph and explain its applications",
161
- "Describe population growth with a visual diagram"
 
162
  ],
163
  inputs=question
164
  )
165
 
166
- def update_outputs(response, img):
167
- show_graph = img is not None
168
- return response, gr.update(visible=show_graph, value=img)
 
 
 
 
169
 
170
  submit_btn.click(
171
- fn=process_request,
172
  inputs=question,
173
  outputs=[answer, graph]
174
- ).then(
175
- fn=update_outputs,
176
- inputs=[answer, graph],
177
- outputs=[answer, graph]
178
  )
179
 
180
  if __name__ == "__main__":
181
  demo.launch(
182
  server_name="0.0.0.0",
183
- server_port=7860,
184
- show_error=True
185
- )
186
-
 
3
  import time
4
  import matplotlib.pyplot as plt
5
  import numpy as np
6
+ import re
7
+ import sympy as sp
8
+ from transformers import pipeline
9
  from io import BytesIO
10
  import base64
 
11
 
12
+ # ===== MATH ENGINE =====
13
+ def solve_math(expression):
14
+ """Bulletproof math solver using SymPy"""
15
+ try:
16
+ expr = sp.sympify(expression)
17
+ steps = sp.working(expr)
18
+ return {
19
+ 'answer': str(expr.evalf()),
20
+ 'steps': steps if steps else [f"Direct evaluation: {expression} = {expr}"]
21
+ }
22
+ except:
23
+ return None
24
+
25
+ # ===== GRAPH ENGINE =====
26
+ def create_graph(graph_type):
27
+ """Guaranteed graph generation"""
28
+ try:
29
+ plt.figure(figsize=(8,4))
30
+ x = np.linspace(-10, 10, 100)
31
+
32
+ if graph_type == "linear":
33
+ y = 2*x + 3
34
+ plt.plot(x, y)
35
+ title = "Linear Graph: y = 2x + 3"
36
+ elif graph_type == "quadratic":
37
+ y = x**2 - 4
38
+ plt.plot(x, y)
39
+ title = "Quadratic Graph: y = x² - 4"
40
+ elif graph_type == "trigonometric":
41
+ y = np.sin(x)
42
+ plt.plot(x, y)
43
+ title = "Trigonometric Graph: y = sin(x)"
44
+
45
+ plt.title(title)
46
+ plt.grid(True)
47
+
48
+ buf = BytesIO()
49
+ plt.savefig(buf, format='png')
50
+ plt.close()
51
+ return f"data:image/png;base64,{base64.b64encode(buf.getvalue()).decode('utf-8')}"
52
+ except:
53
+ return None
54
+
55
+ # ===== AI SYSTEM =====
56
+ class UltimateAISystem:
57
  def __init__(self):
58
  self.device = 0 if torch.cuda.is_available() else -1
59
  self.dtype = torch.float16 if self.device == 0 else torch.float32
60
+ self.model = pipeline(
61
+ "text-generation",
62
+ model="mistralai/Mistral-7B-Instruct-v0.1",
63
+ device=self.device,
64
+ torch_dtype=self.dtype
65
+ )
66
+
67
+ def generate_answer(self, prompt):
68
+ """Handle all question types with guaranteed response"""
69
+ start_time = time.time()
70
+
71
+ # 1. Check for basic math
72
+ math_match = re.match(r"^(?:[Ww]hat is|Calculate|Solve) ([0-9\+\-\*\/\^\(\) ]+)\??$", prompt)
73
+ if math_match:
74
+ math_result = solve_math(math_match.group(1))
75
+ if math_result:
76
+ steps = "\n".join([f"• {step}" for step in math_result['steps']])
77
+ return {
78
+ 'answer': f"Answer: {math_result['answer']}\n\nSteps:\n{steps}",
79
+ 'graph': None,
80
+ 'time': time.time() - start_time
81
+ }
82
+
83
+ # 2. Check for graph requests
84
+ graph_type = None
85
+ if any(kw in prompt.lower() for kw in ["graph", "plot", "chart"]):
86
+ if "linear" in prompt.lower():
87
+ graph_type = "linear"
88
+ elif "quadratic" in prompt.lower():
89
+ graph_type = "quadratic"
90
+ elif any(kw in prompt.lower() for kw in ["sin", "cos", "tan", "trig"]):
91
+ graph_type = "trigonometric"
92
 
93
+ # 3. Generate AI response
94
  try:
95
+ response = self.model(
96
+ f"""Provide a detailed step-by-step explanation. Include mathematical working where applicable.
 
97
 
98
+ Question: {prompt}
99
 
100
+ Answer in clear steps:
101
+ 1.""",
 
 
 
102
  max_new_tokens=300,
103
+ temperature=0.3,
104
+ do_sample=True
 
105
  )[0]['generated_text']
106
 
107
+ answer = response.split("Answer in clear steps:")[-1].strip()
108
+ if not answer:
109
+ answer = "I couldn't generate a response. Please try again."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
110
 
111
+ # 4. Generate graph if requested
112
+ graph = create_graph(graph_type) if graph_type else None
 
113
 
114
+ return {
115
+ 'answer': f"Step-by-Step Explanation:\n\n{answer}",
116
+ 'graph': graph,
117
+ 'time': time.time() - start_time
118
+ }
119
+ except:
120
+ return {
121
+ 'answer': "System error. Please try again.",
122
+ 'graph': None,
123
+ 'time': time.time() - start_time
124
+ }
125
 
126
+ # Initialize system
127
+ ai_system = UltimateAISystem()
 
 
 
 
 
 
 
 
128
 
129
  # ===== GRADIO INTERFACE =====
130
+ with gr.Blocks(theme=gr.themes.Soft(), title="🧠 Ultimate AI Assistant") as demo:
131
+ gr.Markdown("""<h1><center>Math + Graphs + Explanations</center></h1>""")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
132
 
133
  with gr.Row():
134
  question = gr.Textbox(
135
+ label="Ask anything",
136
+ placeholder="E.g. 'What is 2+2?' or 'Explain quantum physics with a graph'",
137
  lines=3
138
  )
139
 
 
142
 
143
  with gr.Row():
144
  answer = gr.Textbox(
145
+ label="Detailed Answer",
146
  lines=10,
147
  interactive=False
148
  )
149
 
150
  with gr.Row():
151
  graph = gr.Image(
152
+ label="Generated Visualization",
153
  visible=False
154
  )
155
 
156
  # Pre-tested examples
157
  gr.Examples(
158
  examples=[
159
+ "What is (15*3)+(40/2)?",
160
+ "Explain the photoelectric effect with a graph",
161
+ "Solve - 5x + 6 = 0 step by step",
162
+ "Show a trigonometric function plot"
163
  ],
164
  inputs=question
165
  )
166
 
167
+ def process_question(prompt):
168
+ result = ai_system.generate_answer(prompt)
169
+ show_graph = result['graph'] is not None
170
+ output_text = f"""{result['answer']}
171
+
172
+ ⏱️ Generated in {result['time']:.2f} seconds"""
173
+ return output_text, gr.update(visible=show_graph, value=result['graph'])
174
 
175
  submit_btn.click(
176
+ fn=process_question,
177
  inputs=question,
178
  outputs=[answer, graph]
 
 
 
 
179
  )
180
 
181
  if __name__ == "__main__":
182
  demo.launch(
183
  server_name="0.0.0.0",
184
+ server_port=7860
185
+ )