File size: 1,919 Bytes
0ac300e
 
 
 
 
649f4d3
0ac300e
649f4d3
0ac300e
 
 
 
 
 
 
649f4d3
0ac300e
 
 
 
649f4d3
0ac300e
 
 
 
 
 
649f4d3
 
0ac300e
 
649f4d3
 
0ac300e
 
 
 
 
 
649f4d3
0ac300e
 
 
 
 
 
 
 
 
 
649f4d3
0ac300e
 
 
 
 
 
 
 
 
649f4d3
0ac300e
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
from fastapi import FastAPI, Request
from pydantic import BaseModel
from openai import OpenAI
import requests
import time

app = FastAPI()

# -----------------------
# API Keys & Config
# -----------------------
OPENROUTER_API_KEY = "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"
PULSE_BEARER_TOKEN = "3673|1Cg9jkntwA0827JLsmIoUoR4E2hOj2sLkMwEYF8dcdd9ed59"
COMPANY_ID = "4"
BASE_URL = "https://pulse-survey.ospreyibs.com/api/v1"

client = OpenAI(
    base_url="https://openrouter.ai/api/v1",
    api_key=OPENROUTER_API_KEY
)

headers = {
    "Authorization": f"Bearer {PULSE_BEARER_TOKEN}",
    "Company-Id": COMPANY_ID,
    "Accept": "application/json",
    "Content-Type": "application/json"
}


class QuestionRequest(BaseModel):
    question_text: str


@app.post("/generate_feedback/")
async def generate_feedback(request: QuestionRequest):
    """
    Endpoint to generate answer + recommendation for a question.
    """
    question = request.question_text

    # Generate Answer
    prompt = f"Answer this question positively: {question}"
    answer_response = client.chat.completions.create(
        model="meta-llama/llama-3.3-70b-instruct",
        messages=[
            {"role": "system", "content": "You are a helpful AI survey assistant."},
            {"role": "user", "content": prompt}
        ]
    )
    answer = answer_response.choices[0].message.content.strip()

    # Generate Recommendation
    recommendation_prompt = f"Based on this answer: {answer}, write one professional recommendation or reflection tip."
    rec_response = client.chat.completions.create(
        model="meta-llama/llama-3.3-70b-instruct",
        messages=[
            {"role": "user", "content": recommendation_prompt}
        ]
    )
    recommendation = rec_response.choices[0].message.content.strip()

    return {
        "answer": answer,
        "recommendation": recommendation
    }