Pulse_survey / app.py
aimanathar's picture
Update app.py
0ac300e verified
from fastapi import FastAPI, Request
from pydantic import BaseModel
from openai import OpenAI
import requests
import time
app = FastAPI()
# -----------------------
# API Keys & Config
# -----------------------
OPENROUTER_API_KEY = "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"
PULSE_BEARER_TOKEN = "3673|1Cg9jkntwA0827JLsmIoUoR4E2hOj2sLkMwEYF8dcdd9ed59"
COMPANY_ID = "4"
BASE_URL = "https://pulse-survey.ospreyibs.com/api/v1"
client = OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=OPENROUTER_API_KEY
)
headers = {
"Authorization": f"Bearer {PULSE_BEARER_TOKEN}",
"Company-Id": COMPANY_ID,
"Accept": "application/json",
"Content-Type": "application/json"
}
class QuestionRequest(BaseModel):
question_text: str
@app.post("/generate_feedback/")
async def generate_feedback(request: QuestionRequest):
"""
Endpoint to generate answer + recommendation for a question.
"""
question = request.question_text
# Generate Answer
prompt = f"Answer this question positively: {question}"
answer_response = client.chat.completions.create(
model="meta-llama/llama-3.3-70b-instruct",
messages=[
{"role": "system", "content": "You are a helpful AI survey assistant."},
{"role": "user", "content": prompt}
]
)
answer = answer_response.choices[0].message.content.strip()
# Generate Recommendation
recommendation_prompt = f"Based on this answer: {answer}, write one professional recommendation or reflection tip."
rec_response = client.chat.completions.create(
model="meta-llama/llama-3.3-70b-instruct",
messages=[
{"role": "user", "content": recommendation_prompt}
]
)
recommendation = rec_response.choices[0].message.content.strip()
return {
"answer": answer,
"recommendation": recommendation
}