geeksiddhant commited on
Commit
d1529a9
·
verified ·
1 Parent(s): 0f75bbe

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +79 -0
app.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from typing import List, Optional
4
+ import os
5
+ from groq import Groq
6
+
7
+ # Initialize FastAPI app
8
+ app = FastAPI(title="AI Chat API", description="FastAPI backend for AI chat")
9
+
10
+ # Initialize Groq client
11
+ client = Groq(
12
+ api_key=os.environ.get("GROQ_API_KEY"),
13
+ )
14
+
15
+ # Request/Response models
16
+ class Message(BaseModel):
17
+ role: str
18
+ content: str
19
+
20
+ class ChatRequest(BaseModel):
21
+ message: str
22
+ history: Optional[List[Message]] = []
23
+
24
+ class ChatResponse(BaseModel):
25
+ reply: str
26
+
27
+ @app.get("/")
28
+ def root():
29
+ """Welcome endpoint"""
30
+ return {
31
+ "message": "Welcome to AI Chat API",
32
+ "endpoints": {
33
+ "/chat": "POST - Send a message and get AI response",
34
+ "/docs": "GET - API documentation"
35
+ }
36
+ }
37
+
38
+ @app.post("/chat", response_model=ChatResponse)
39
+ def chat(request: ChatRequest):
40
+ """
41
+ Chat endpoint that processes messages and returns AI responses
42
+
43
+ Args:
44
+ request: ChatRequest containing message and conversation history
45
+
46
+ Returns:
47
+ ChatResponse with AI reply
48
+ """
49
+ # Build messages array
50
+ messages = []
51
+
52
+ # Add conversation history
53
+ if request.history:
54
+ for msg in request.history:
55
+ messages.append({
56
+ "role": msg.role,
57
+ "content": msg.content
58
+ })
59
+
60
+ # Add current message
61
+ messages.append({
62
+ "role": "user",
63
+ "content": request.message
64
+ })
65
+
66
+ # Call Groq API
67
+ chat_completion = client.chat.completions.create(
68
+ messages=messages,
69
+ model="llama-3.3-70b-versatile",
70
+ )
71
+
72
+ # Extract response
73
+ ai_reply = chat_completion.choices[0].message.content
74
+
75
+ return ChatResponse(reply=ai_reply)
76
+
77
+ if __name__ == "__main__":
78
+ import uvicorn
79
+ uvicorn.run(app, host="0.0.0.0", port=7860)