aimanathar commited on
Commit
fc210ca
·
verified ·
1 Parent(s): d326036

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -53
app.py CHANGED
@@ -1,64 +1,22 @@
1
- from fastapi import FastAPI, Request
2
  from pydantic import BaseModel
3
- from openai import OpenAI
4
  import requests
5
- import time
6
 
7
  app = FastAPI()
8
 
9
- # -----------------------
10
- # API Keys & Config
11
- # -----------------------
12
- OPENROUTER_API_KEY = "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"
13
- PULSE_BEARER_TOKEN = "3673|1Cg9jkntwA0827JLsmIoUoR4E2hOj2sLkMwEYF8dcdd9ed59"
14
- COMPANY_ID = "4"
15
- BASE_URL = "https://pulse-survey.ospreyibs.com/api/v1"
16
-
17
- client = OpenAI(
18
- base_url="https://openrouter.ai/api/v1",
19
- api_key=OPENROUTER_API_KEY
20
- )
21
-
22
- headers = {
23
- "Authorization": f"Bearer {PULSE_BEARER_TOKEN}",
24
- "Company-Id": COMPANY_ID,
25
- "Accept": "application/json",
26
- "Content-Type": "application/json"
27
- }
28
-
29
-
30
- class QuestionRequest(BaseModel):
31
  question_text: str
32
 
 
 
 
33
 
34
- @app.post("/generate_feedback/")
35
- async def generate_feedback(request: QuestionRequest):
36
- """
37
- Endpoint to generate answer + recommendation for a question.
38
- """
39
- question = request.question_text
40
-
41
- # Generate Answer
42
- prompt = f"Answer this question positively: {question}"
43
- answer_response = client.chat.completions.create(
44
- model="meta-llama/llama-3.3-70b-instruct",
45
- messages=[
46
- {"role": "system", "content": "You are a helpful AI survey assistant."},
47
- {"role": "user", "content": prompt}
48
- ]
49
- )
50
- answer = answer_response.choices[0].message.content.strip()
51
-
52
- # Generate Recommendation
53
- recommendation_prompt = f"Based on this answer: {answer}, write one professional recommendation or reflection tip."
54
- rec_response = client.chat.completions.create(
55
- model="meta-llama/llama-3.3-70b-instruct",
56
- messages=[
57
- {"role": "user", "content": recommendation_prompt}
58
- ]
59
- )
60
- recommendation = rec_response.choices[0].message.content.strip()
61
-
62
  return {
63
  "answer": answer,
64
  "recommendation": recommendation
 
1
+ from fastapi import FastAPI
2
  from pydantic import BaseModel
 
3
  import requests
 
4
 
5
  app = FastAPI()
6
 
7
+ class Question(BaseModel):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  question_text: str
9
 
10
+ @app.get("/")
11
+ def home():
12
+ return {"message": "🚀 Feedback Engine is running!"}
13
 
14
+ @app.post("/generate-response")
15
+ def generate_response(data: Question):
16
+ # Dummy response for now
17
+ answer = f"Llama Response: {data.question_text}"
18
+ recommendation = "Recommendation: Reflect and take one positive step today."
19
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  return {
21
  "answer": answer,
22
  "recommendation": recommendation