GenAi-ChatBot_D / app.py
DarshanaD's picture
Initial commit2
ceaf5c5
import gradio as gr
import boto3
import os
import json
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
# 1. Access Gen AI Model from Hugging Face Inference API
def respond(
message,
history: list[tuple[str, str]],
system_message
):
response = ""
# 1. Access Gen AI Model from AWS Bedrock service
bedrock_runtime = boto3.client(
service_name='bedrock-runtime',
region_name=os.getenv('AWS_REGION'),
aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY')
)
# 2. Pass the user text to the AWS Bedrock API
request_body = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1000,
"messages": [
{"role": "user", "content": message}
]
}
# 3. Get the response from the Gen AI Model
# Make the API call
response = bedrock_runtime.invoke_model(
modelId="anthropic.claude-3-haiku-20240307-v1:0", # ✅ Corrected key name
body=json.dumps(request_body)
)
# 4. Print it on the output
# Parse and print the response
response_body = json.loads(response['body'].read())
print(json.dumps(response_body, indent=2))
# Display the final answer
yield response_body["content"][0]["text"]
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message")
],
)
if __name__ == "__main__":
demo.launch()