Spaces:
Runtime error
Runtime error
File size: 1,517 Bytes
7300c08 3330fa6 7300c08 3330fa6 7300c08 3330fa6 7300c08 3330fa6 7300c08 3330fa6 7300c08 3f80d42 7300c08 3330fa6 7300c08 b508c99 7300c08 3330fa6 7300c08 c109243 7300c08 3330fa6 7300c08 c109243 7300c08 3330fa6 7300c08 3330fa6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from huggingface_hub import InferenceClient
import os
import gradio as gr
app = FastAPI()
client = InferenceClient(model="HuggingFaceH4/zephyr-7b-beta", token=os.getenv("huggingface_token"))
class Message(BaseModel):
input: str
history: list
@app.get("/")
async def home():
return {"message": "Welcome to the chatbot API!"}
@app.post("/chat")
async def chat(message: Message):
try:
input_message = message.input
# Call the chatbot function
response = chatbot(input_message)
return {"response": response}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
def chatbot(input, history):
try:
# Call your chatbot function here
message = [{"role": "user", "content": input}]
messages = message
output = client.chat_completion(
messages=messages,
max_tokens=256,
temperature=0.7
)
return output.choices[0].message.content
except Exception as e:
raise Exception(str(e))
# Define the Gradio chat interface
def gradio_chat(input):
response = chatbot(input)
return response
# Define Gradio inputs and outputs
inputs = [gr.Textbox(lines=5, label="Input")]
output = gr.Textbox(label="Response")
# Create Gradio interface
gr.Interface(fn=gradio_chat, inputs=inputs, outputs=output, title="Chatbot").launch()
|