|
|
import os |
|
|
from fastapi import FastAPI, HTTPException |
|
|
import google.generativeai as genai |
|
|
from pydantic import BaseModel |
|
|
|
|
|
genai.configure(api_key="AIzaSyDcYyq3w21iwipYn17wCAQo3AYWhUIGDSI") |
|
|
|
|
|
|
|
|
with open("prompt.txt", "r") as file: |
|
|
system_instruction = file.read() |
|
|
|
|
|
|
|
|
generation_config = { |
|
|
"temperature": 1, |
|
|
"top_p": 0.95, |
|
|
"top_k": 40, |
|
|
"max_output_tokens": 8192, |
|
|
"response_mime_type": "text/plain", |
|
|
} |
|
|
|
|
|
model = genai.GenerativeModel( |
|
|
model_name="gemini-2.0-flash", |
|
|
generation_config=generation_config, |
|
|
system_instruction=system_instruction, |
|
|
) |
|
|
|
|
|
|
|
|
chat_session = model.start_chat(history=[]) |
|
|
class ChatRequest(BaseModel): |
|
|
user_input: str |
|
|
|
|
|
app = FastAPI() |
|
|
@app.get("/") |
|
|
def root(): |
|
|
return {"message": "Welcome to Chatbot API"} |
|
|
|
|
|
@app.post("/chat/") |
|
|
def chat_with_bot(request: ChatRequest): |
|
|
if not request.user_input: |
|
|
raise HTTPException(status_code=400, detail="Input cannot be empty") |
|
|
response = chat_session.send_message(request.user_input) |
|
|
return {"response": response.text} |
|
|
|