chatbot1 / app.py
Afshintm's picture
add exclamation mark
b534659
# Import necessary libraries
import os # Interacting with the operating system (reading/writing files)
from dotenv import load_dotenv # Loading environment variables from a .env file
import json # Parsing and handling JSON data
# LangChain OpenAI imports
from langchain_openai import AzureChatOpenAI # OpenAI embeddings and models
from langchain.prompts import ChatPromptTemplate # Template for chat prompts
# Other utilities
import numpy as np # Numpy for numerical operations
import streamlit as st
from datetime import datetime
#====================================SETUP=====================================#
# Fetch secrets from Hugging Face Spaces
api_key = os.environ['AT_4O_AZURE_OPENAI_KEY']
endpoint = os.environ['AT_4O_AZURE_OPENAI_ENDPOINT']
api_version = os.environ['AT_4O_AZURE_OPENAI_APIVERSION']
model_name = os.environ['AT_4O_CHATGPT_MODEL']
# Initialize the Azure Chat OpenAI model
# llm = AzureChatOpenAI(
# azure_endpoint=endpoint,
# api_key=api_key,
# api_version=api_version,
# azure_deployment='gpt-4o',
# temperature=0
# )
# This initializes the Azure Chat OpenAI model with the provided endpoint, API key, deployment name, and a temperature setting of 0 (to control response variability).
class MyChatBot:
def __init__(self):
"""
Initialize the MyChatBot class, setting up the LLM client.
"""
# Initialize the Azure OpenAI client using the provided credentials
self.client = AzureChatOpenAI(
azure_endpoint=endpoint, # Endpoint URL for Azure OpenAI
api_key=api_key, # API key for authentication
api_version=api_version, # API version being used
model_name=model_name, # Specify the model to use (e.g., GPT-4 optimized version)
temperature=0 # Controls randomness in responses; 0 ensures deterministic results
)
# Define the system prompt to set the behavior of the chatbot
system_prompt = """You are a Travel agent AI assistant specializing in Travel industry helping users to answer their questions. Your goal is to provide accurate, answer and recommendations to user question.
Guidelines for Interaction:
Maintain a polite, professional, and reassuring tone.
If any detail is unclear or missing, proactively ask for clarification.
"""
# Build the prompt template for the agent
self.prompt = ChatPromptTemplate.from_messages([
("system", system_prompt), # System instructions
("human", "{input}"), # Placeholder for human input
("placeholder", "{agent_scratchpad}") # Placeholder for intermediate reasoning steps
])
def get_llm_response(self, user_input):
"""Sends a prompt to the LLM and returns the response.
Args:
user_input: The user's prompt.
llm: The LLM object.
Returns:
The LLM's response.
"""
final_prompt = self.prompt.format(input=user_input, agent_scratchpad=[])
response = self.client(final_prompt) # Assuming your LLM has a __call__ method
return response
def simple_chat_bot_streamlit():
"""
A Streamlit-based UI for Simple chatbot.
"""
st.title("Travel Chatbot")
st.write("Ask me anything and type 'exit' to end the conversation.")
chatbot = MyChatBot()
# while True:
st.write("How can I help you?")
user_query = st.chat_input("Type your question here (or 'exit' to end)!")
if user_query:
if user_query.lower() == "exit":
goodbye_msg = "Goodbye! Feel free to return if you have more questions."
st.rerun()
return
try:
response = chatbot.get_llm_response(user_query)
st.write(response.content)
except Exception as e:
print(f"Error: {e}")
if __name__ == '__main__':
simple_chat_bot_streamlit()