AI Backend Deploy commited on
Commit
3959a6f
Β·
1 Parent(s): 6e9fffd

Fix: Use gr.Interface instead of Blocks to bypass Gradio schema bug

Browse files
Files changed (1) hide show
  1. app.py +57 -51
app.py CHANGED
@@ -10,6 +10,8 @@ import gradio as gr
10
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
11
  from PIL import Image, ImageDraw
12
  import numpy as np
 
 
13
 
14
  # ===== DEVICE CONFIGURATION =====
15
  device = "cpu"
@@ -119,62 +121,66 @@ def image_fn(prompt, width, height):
119
  b = int((np.sin((x + y) / 100 + seed * 0.7) * 127) + 128)
120
  pixels[x, y] = (r, g, b)
121
 
122
- return img
 
 
 
123
  except Exception as e:
124
- return Image.new('RGB', (256, 256), color=(255, 0, 0))
125
 
126
 
127
  # ===== GRADIO INTERFACE =====
128
 
129
- demo = gr.Blocks()
130
-
131
- with demo:
132
- gr.Markdown("# πŸ€– Lightweight AI Backend")
133
- gr.Markdown("Multi-Model API running on FREE CPU tier")
134
-
135
- with gr.Tabs():
136
- with gr.Tab("πŸ’¬ Chat"):
137
- with gr.Row():
138
- with gr.Column():
139
- chat_input = gr.Textbox(label="Message", lines=3)
140
- chat_tokens = gr.Slider(50, 200, 150, 10, label="Max Tokens")
141
- chat_temp = gr.Slider(0.1, 1.0, 0.7, 0.1, label="Temperature")
142
- chat_btn = gr.Button("Generate")
143
- with gr.Column():
144
- chat_output = gr.Textbox(label="Response", lines=10)
145
- chat_btn.click(chat_fn, [chat_input, chat_tokens, chat_temp], chat_output)
146
-
147
- with gr.Tab("πŸ’» Code"):
148
- with gr.Row():
149
- with gr.Column():
150
- code_input = gr.Textbox(label="Description", lines=3)
151
- code_tokens = gr.Slider(100, 300, 256, 20, label="Max Tokens")
152
- code_temp = gr.Slider(0.1, 1.0, 0.3, 0.1, label="Temperature")
153
- code_btn = gr.Button("Generate")
154
- with gr.Column():
155
- code_output = gr.Textbox(label="Code", lines=10)
156
- code_btn.click(code_fn, [code_input, code_tokens, code_temp], code_output)
157
-
158
- with gr.Tab("πŸ“ Summarize"):
159
- with gr.Row():
160
- with gr.Column():
161
- sum_input = gr.Textbox(label="Text", lines=8)
162
- sum_length = gr.Slider(20, 150, 100, 10, label="Length")
163
- sum_btn = gr.Button("Summarize")
164
- with gr.Column():
165
- sum_output = gr.Textbox(label="Summary", lines=8)
166
- sum_btn.click(summarize_fn, [sum_input, sum_length], sum_output)
167
-
168
- with gr.Tab("🎨 Image"):
169
- with gr.Row():
170
- with gr.Column():
171
- img_input = gr.Textbox(label="Description", lines=3)
172
- img_width = gr.Slider(128, 256, 256, 32, label="Width")
173
- img_height = gr.Slider(128, 256, 256, 32, label="Height")
174
- img_btn = gr.Button("Generate")
175
- with gr.Column():
176
- img_output = gr.Image(label="Image")
177
- img_btn.click(image_fn, [img_input, img_width, img_height], img_output)
 
178
 
179
 
180
  # ===== INITIALIZE AND RUN =====
 
10
  from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
11
  from PIL import Image, ImageDraw
12
  import numpy as np
13
+ import base64
14
+ from io import BytesIO
15
 
16
  # ===== DEVICE CONFIGURATION =====
17
  device = "cpu"
 
121
  b = int((np.sin((x + y) / 100 + seed * 0.7) * 127) + 128)
122
  pixels[x, y] = (r, g, b)
123
 
124
+ buffered = BytesIO()
125
+ img.save(buffered, format="PNG")
126
+ img_str = base64.b64encode(buffered.getvalue()).decode()
127
+ return f"data:image/png;base64,{img_str}"
128
  except Exception as e:
129
+ return f"Error: {str(e)}"
130
 
131
 
132
  # ===== GRADIO INTERFACE =====
133
 
134
+ # Create individual interfaces
135
+ chat_demo = gr.Interface(
136
+ fn=chat_fn,
137
+ inputs=[
138
+ gr.Textbox(lines=3, label="Message"),
139
+ gr.Slider(50, 200, 150, step=10, label="Max Tokens"),
140
+ gr.Slider(0.1, 1.0, 0.7, step=0.1, label="Temperature")
141
+ ],
142
+ outputs=gr.Textbox(lines=10, label="Response"),
143
+ title="πŸ’¬ Chat"
144
+ )
145
+
146
+ code_demo = gr.Interface(
147
+ fn=code_fn,
148
+ inputs=[
149
+ gr.Textbox(lines=3, label="Description"),
150
+ gr.Slider(100, 300, 256, step=20, label="Max Tokens"),
151
+ gr.Slider(0.1, 1.0, 0.3, step=0.1, label="Temperature")
152
+ ],
153
+ outputs=gr.Textbox(lines=10, label="Code"),
154
+ title="πŸ’» Code"
155
+ )
156
+
157
+ summarize_demo = gr.Interface(
158
+ fn=summarize_fn,
159
+ inputs=[
160
+ gr.Textbox(lines=8, label="Text"),
161
+ gr.Slider(20, 150, 100, step=10, label="Summary Length")
162
+ ],
163
+ outputs=gr.Textbox(lines=8, label="Summary"),
164
+ title="πŸ“ Summarize"
165
+ )
166
+
167
+ image_demo = gr.Interface(
168
+ fn=image_fn,
169
+ inputs=[
170
+ gr.Textbox(label="Description"),
171
+ gr.Slider(128, 256, 256, step=32, label="Width"),
172
+ gr.Slider(128, 256, 256, step=32, label="Height")
173
+ ],
174
+ outputs=gr.Textbox(label="Image (Base64)"),
175
+ title="🎨 Image"
176
+ )
177
+
178
+ # Create tabbed interface
179
+ demo = gr.TabbedInterface(
180
+ [chat_demo, code_demo, summarize_demo, image_demo],
181
+ tab_names=["πŸ’¬ Chat", "πŸ’» Code", "πŸ“ Summarize", "🎨 Image"],
182
+ title="πŸ€– Lightweight AI Backend"
183
+ )
184
 
185
 
186
  # ===== INITIALIZE AND RUN =====