ArchCoder commited on
Commit
ad762dc
·
verified ·
1 Parent(s): 12db0e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +110 -32
app.py CHANGED
@@ -8,10 +8,12 @@ import tempfile
8
  import os
9
  import logging
10
  import time
 
11
  from datetime import datetime
12
  from html.parser import HTMLParser
13
  from fastapi import FastAPI, Request, Query
14
  from fastapi.responses import JSONResponse
 
15
  import uvicorn
16
 
17
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
@@ -32,7 +34,7 @@ logger.info("Models loaded!")
32
 
33
  def search_parallel(query):
34
  """DuckDuckGo search"""
35
- logger.info("[SEARCH] Starting...")
36
  try:
37
  response = requests.get(
38
  'https://html.duckduckgo.com/html/',
@@ -66,15 +68,15 @@ def search_parallel(query):
66
  parser = DDGParser()
67
  parser.feed(response.text)
68
  result = "\n".join([f"• {r}" for r in parser.results[:2]]) if parser.results else "No results"
69
- logger.info("[SEARCH] ✓")
70
  return result, "DuckDuckGo"
71
- except:
72
- pass
73
  return "No search results", "None"
74
 
75
  def generate_answer(text_input):
76
  """Generate answer"""
77
- logger.info(f"[AI] Question: {text_input[:60]}...")
78
 
79
  try:
80
  if not text_input or not text_input.strip():
@@ -84,10 +86,11 @@ def generate_answer(text_input):
84
 
85
  search_start = time.time()
86
  search_results, search_engine = search_parallel(text_input)
87
- logger.info(f"[AI] Search: {time.time()-search_start:.2f}s")
 
88
 
89
  messages = [
90
- {"role": "system", "content": f"Today is {current_date}. Answer briefly using search results (60-80 words)."},
91
  {"role": "user", "content": f"Search:\n{search_results}\n\nQ: {text_input}\nA:"}
92
  ]
93
 
@@ -109,7 +112,9 @@ def generate_answer(text_input):
109
  )
110
 
111
  answer = tokenizer.decode(outputs[0][inputs['input_ids'].shape[1]:], skip_special_tokens=True).strip()
112
- logger.info(f"[AI] Gen: {time.time()-gen_start:.2f}s | ✓")
 
 
113
 
114
  return f"{answer}\n\n**Source:** {search_engine}"
115
 
@@ -120,40 +125,69 @@ def generate_answer(text_input):
120
  # FastAPI app
121
  app = FastAPI()
122
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
123
  @app.post("/api/ai")
124
  async def api_ai_post(request: Request):
125
- """AI endpoint - POST with JSON body"""
126
  try:
127
  body = await request.body()
128
- logger.info(f"[API AI POST] Raw body: {body}")
129
 
130
  if not body:
131
- return JSONResponse({"error": "Empty request body"}, status_code=400)
132
 
133
- try:
134
- data = await request.json()
135
- except Exception as e:
136
- logger.error(f"[API AI POST] JSON parse error: {str(e)}")
137
- return JSONResponse({"error": f"Invalid JSON: {str(e)}"}, status_code=400)
138
-
139
- logger.info(f"[API AI POST] Parsed data: {data}")
140
 
141
  question = data.get("text", "")
142
  if not question:
143
- return JSONResponse({"error": "No 'text' field in JSON"}, status_code=400)
144
 
145
  answer = generate_answer(question)
146
  return JSONResponse({"answer": answer})
147
 
148
  except Exception as e:
149
- logger.error(f"[API AI POST] Error: {str(e)}")
150
  return JSONResponse({"error": str(e)}, status_code=500)
151
 
152
  @app.get("/api/ai")
153
- async def api_ai_get(text: str = Query(..., description="Question text")):
154
- """AI endpoint - GET with query param (Pluely fallback)"""
155
  try:
156
- logger.info(f"[API AI GET] Question: {text}")
157
 
158
  if not text:
159
  return JSONResponse({"error": "No text parameter"}, status_code=400)
@@ -162,40 +196,84 @@ async def api_ai_get(text: str = Query(..., description="Question text")):
162
  return JSONResponse({"answer": answer})
163
 
164
  except Exception as e:
165
- logger.error(f"[API AI GET] Error: {str(e)}")
166
  return JSONResponse({"error": str(e)}, status_code=500)
167
 
168
  @app.get("/health")
169
  async def health():
170
- return {"status": "ok", "model": "SmolLM2-360M"}
171
 
172
  # Gradio UI
173
  with gr.Blocks(title="Fast Q&A") as demo:
174
  gr.Markdown("""
175
- # ⚡ Ultra-Fast Q&A - SmolLM2-360M
176
 
177
  ## 🎯 Pluely Configuration
178
 
179
- ### Option 1: GET with Query Param (EASIEST - Windows Compatible)
 
 
180
  ```
181
  curl https://archcoder-basic-app.hf.space/api/ai?text={{TEXT}}
182
  ```
 
183
  **Response Path:** `answer`
184
 
185
- ### Option 2: POST with JSON (If Option 1 doesn't work)
 
 
 
 
 
 
186
  ```
187
- curl -X POST https://archcoder-basic-app.hf.space/api/ai -H "Content-Type: application/json" --data-binary @- << EOF
188
- {"text":"{{TEXT}}"}
189
- EOF
190
  ```
 
191
  **Response Path:** `answer`
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
192
  """)
193
 
194
  with gr.Tab("Test"):
195
- test_input = gr.Textbox(label="Question")
196
  test_btn = gr.Button("🚀 Test")
197
  test_output = gr.Textbox(label="Answer", lines=8)
198
  test_btn.click(fn=generate_answer, inputs=[test_input], outputs=[test_output])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
199
 
200
  app = gr.mount_gradio_app(app, demo, path="/")
201
 
 
8
  import os
9
  import logging
10
  import time
11
+ import json
12
  from datetime import datetime
13
  from html.parser import HTMLParser
14
  from fastapi import FastAPI, Request, Query
15
  from fastapi.responses import JSONResponse
16
+ from fastapi.middleware.cors import CORSMiddleware
17
  import uvicorn
18
 
19
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s')
 
34
 
35
  def search_parallel(query):
36
  """DuckDuckGo search"""
37
+ logger.info(f"[SEARCH] Query: {query}")
38
  try:
39
  response = requests.get(
40
  'https://html.duckduckgo.com/html/',
 
68
  parser = DDGParser()
69
  parser.feed(response.text)
70
  result = "\n".join([f"• {r}" for r in parser.results[:2]]) if parser.results else "No results"
71
+ logger.info(f"[SEARCH] ✓ Found {len(parser.results)} results")
72
  return result, "DuckDuckGo"
73
+ except Exception as e:
74
+ logger.error(f"[SEARCH] Error: {str(e)}")
75
  return "No search results", "None"
76
 
77
  def generate_answer(text_input):
78
  """Generate answer"""
79
+ logger.info(f"[AI] Question: {text_input}")
80
 
81
  try:
82
  if not text_input or not text_input.strip():
 
86
 
87
  search_start = time.time()
88
  search_results, search_engine = search_parallel(text_input)
89
+ search_time = time.time() - search_start
90
+ logger.info(f"[AI] Search: {search_time:.2f}s")
91
 
92
  messages = [
93
+ {"role": "system", "content": f"Today is {current_date}. Answer briefly (60-80 words)."},
94
  {"role": "user", "content": f"Search:\n{search_results}\n\nQ: {text_input}\nA:"}
95
  ]
96
 
 
112
  )
113
 
114
  answer = tokenizer.decode(outputs[0][inputs['input_ids'].shape[1]:], skip_special_tokens=True).strip()
115
+ gen_time = time.time() - gen_start
116
+ logger.info(f"[AI] Gen: {gen_time:.2f}s")
117
+ logger.info(f"[AI] Answer: {answer[:100]}...")
118
 
119
  return f"{answer}\n\n**Source:** {search_engine}"
120
 
 
125
  # FastAPI app
126
  app = FastAPI()
127
 
128
+ # Add CORS
129
+ app.add_middleware(
130
+ CORSMiddleware,
131
+ allow_origins=["*"],
132
+ allow_credentials=True,
133
+ allow_methods=["*"],
134
+ allow_headers=["*"],
135
+ )
136
+
137
+ @app.middleware("http")
138
+ async def log_requests(request: Request, call_next):
139
+ """Log all requests"""
140
+ logger.info("="*80)
141
+ logger.info(f"[REQUEST] Method: {request.method}")
142
+ logger.info(f"[REQUEST] URL: {request.url}")
143
+ logger.info(f"[REQUEST] Headers: {dict(request.headers)}")
144
+ logger.info(f"[REQUEST] Query params: {dict(request.query_params)}")
145
+
146
+ # Read body if POST
147
+ if request.method == "POST":
148
+ body = await request.body()
149
+ logger.info(f"[REQUEST] Raw body ({len(body)} bytes): {body}")
150
+ try:
151
+ body_str = body.decode('utf-8')
152
+ logger.info(f"[REQUEST] Body as string: {body_str}")
153
+ body_json = json.loads(body_str)
154
+ logger.info(f"[REQUEST] Body as JSON: {body_json}")
155
+ except Exception as e:
156
+ logger.error(f"[REQUEST] Body parse error: {str(e)}")
157
+
158
+ response = await call_next(request)
159
+ logger.info(f"[RESPONSE] Status: {response.status_code}")
160
+ logger.info("="*80)
161
+ return response
162
+
163
  @app.post("/api/ai")
164
  async def api_ai_post(request: Request):
165
+ """AI endpoint - POST"""
166
  try:
167
  body = await request.body()
 
168
 
169
  if not body:
170
+ return JSONResponse({"error": "Empty body"}, status_code=400)
171
 
172
+ data = json.loads(body.decode('utf-8'))
173
+ logger.info(f"[API POST] Parsed: {data}")
 
 
 
 
 
174
 
175
  question = data.get("text", "")
176
  if not question:
177
+ return JSONResponse({"error": "No 'text' field"}, status_code=400)
178
 
179
  answer = generate_answer(question)
180
  return JSONResponse({"answer": answer})
181
 
182
  except Exception as e:
183
+ logger.error(f"[API POST] Error: {str(e)}")
184
  return JSONResponse({"error": str(e)}, status_code=500)
185
 
186
  @app.get("/api/ai")
187
+ async def api_ai_get(text: str = Query(default="", description="Question")):
188
+ """AI endpoint - GET"""
189
  try:
190
+ logger.info(f"[API GET] text param: '{text}'")
191
 
192
  if not text:
193
  return JSONResponse({"error": "No text parameter"}, status_code=400)
 
196
  return JSONResponse({"answer": answer})
197
 
198
  except Exception as e:
199
+ logger.error(f"[API GET] Error: {str(e)}")
200
  return JSONResponse({"error": str(e)}, status_code=500)
201
 
202
  @app.get("/health")
203
  async def health():
204
+ return {"status": "ok", "model": "SmolLM2-360M", "endpoints": ["/api/ai (GET/POST)"]}
205
 
206
  # Gradio UI
207
  with gr.Blocks(title="Fast Q&A") as demo:
208
  gr.Markdown("""
209
+ # ⚡ Fast Q&A - SmolLM2-360M
210
 
211
  ## 🎯 Pluely Configuration
212
 
213
+ ### Method 1: GET Request (RECOMMENDED - Works with Pluely)
214
+
215
+ **Curl Command for Pluely:**
216
  ```
217
  curl https://archcoder-basic-app.hf.space/api/ai?text={{TEXT}}
218
  ```
219
+
220
  **Response Path:** `answer`
221
 
222
+ **Streaming:** OFF
223
+
224
+ ---
225
+
226
+ ### Method 2: POST Request (Alternative)
227
+
228
+ **Curl Command for Pluely:**
229
  ```
230
+ curl -X POST https://archcoder-basic-app.hf.space/api/ai -H "Content-Type: application/json" -d {\"text\":\"{{TEXT}}\"}
 
 
231
  ```
232
+
233
  **Response Path:** `answer`
234
+
235
+ **Streaming:** OFF
236
+
237
+ ---
238
+
239
+ ## 🧪 Test Manually
240
+
241
+ **Windows CMD:**
242
+ ```
243
+ curl "https://archcoder-basic-app.hf.space/api/ai?text=Who+is+the+president"
244
+ ```
245
+
246
+ **PowerShell:**
247
+ ```
248
+ Invoke-RestMethod -Uri "https://archcoder-basic-app.hf.space/api/ai?text=Who is the president"
249
+ ```
250
+
251
+ **Browser:**
252
+ ```
253
+ https://archcoder-basic-app.hf.space/api/ai?text=Who is the president
254
+ ```
255
  """)
256
 
257
  with gr.Tab("Test"):
258
+ test_input = gr.Textbox(label="Question", placeholder="Ask anything...")
259
  test_btn = gr.Button("🚀 Test")
260
  test_output = gr.Textbox(label="Answer", lines=8)
261
  test_btn.click(fn=generate_answer, inputs=[test_input], outputs=[test_output])
262
+
263
+ with gr.Tab("Logs"):
264
+ gr.Markdown("""
265
+ ## How to Check Logs
266
+
267
+ 1. Go to your Hugging Face Space
268
+ 2. Click on **"Logs"** tab at the top
269
+ 3. You'll see all requests with:
270
+ - Request method and URL
271
+ - Headers
272
+ - Body content
273
+ - Response
274
+
275
+ This helps debug what Pluely is actually sending!
276
+ """)
277
 
278
  app = gr.mount_gradio_app(app, demo, path="/")
279