Spaces:
Sleeping
Sleeping
File size: 2,191 Bytes
429d451 27c3cdb 429d451 925d1cc e185a06 429d451 925d1cc e185a06 429d451 e185a06 429d451 e185a06 429d451 e185a06 429d451 e185a06 429d451 431d9f7 429d451 925d1cc e185a06 429d451 e185a06 429d451 e185a06 429d451 431d9f7 429d451 e185a06 429d451 e185a06 429d451 e185a06 429d451 e185a06 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
from fastapi import FastAPI, Request
import os
import requests
from pydantic import BaseModel
from dotenv import load_dotenv
import openai
# ✅ Load environment variables (from Hugging Face secrets)
load_dotenv()
# ✅ Initialize FastAPI app
app = FastAPI(title="AI Feedback Engine")
# ✅ Read secrets from environment variables
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
PULSE_API_URL = os.getenv("PULSE_API_URL")
PULSE_API_KEY = os.getenv("PULSE_API_KEY")
# ✅ Configure OpenAI
openai.api_key = OPENAI_API_KEY
# ✅ Pydantic model for chatbot message
class Message(BaseModel):
text: str
@app.get("/")
def home():
return {"message": "🚀 AI Feedback Engine is running!"}
@app.post("/auto_feedback")
async def auto_feedback(msg: Message):
try:
user_input = msg.text
# Step 1️⃣: Generate AI feedback + recommendation
ai_prompt = f"""
You are an HR feedback assistant.
A user said: "{user_input}"
Generate:
1. A short, professional feedback (1–2 sentences)
2. A practical recommendation for improvement.
Return as JSON with keys: 'feedback' and 'recommendation'.
"""
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[{"role": "system", "content": ai_prompt}]
)
ai_text = completion.choices[0].message["content"]
# Step 2️⃣: Send to Pulse Survey API
pulse_response = requests.post(
f"{PULSE_API_URL}/pulse-survey-answers/store",
headers={"Authorization": f"Bearer {PULSE_API_KEY}"},
json={"question": user_input, "answer": ai_text},
timeout=10
)
# Step 3️⃣: Return structured result to chatbot
return {
"status": "success",
"user_input": user_input,
"ai_response": ai_text,
"pulse_status": pulse_response.status_code,
}
except Exception as e:
return {"status": "error", "message": str(e)}
# ✅ This part ensures it runs locally too (optional)
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860)
|