Spaces:
Sleeping
Sleeping
| # from langchain_ollama import ChatOllama | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder | |
| from langchain_core.chat_history import InMemoryChatMessageHistory | |
| from langchain_core.runnables.history import RunnableWithMessageHistory | |
| from dotenv import load_dotenv | |
| import uuid | |
| import gradio as gr | |
| load_dotenv() # Load environment variables from .env file | |
| store = {} | |
| def get_session_history(session_id): | |
| """ | |
| Function to get the session history. | |
| This function can be modified to retrieve history from a database or any other storage. | |
| """ | |
| if( session_id not in store): | |
| store[session_id] = InMemoryChatMessageHistory() | |
| return store[session_id] | |
| #set up llama3.2:latest model with Ollama | |
| # llm = ChatOllama(model="llama3.2:latest", temperature=2.0) | |
| llm=ChatGoogleGenerativeAI( | |
| model="gemini-2.0-flash" | |
| ) | |
| #Create a chat prompt template | |
| prompt = ChatPromptTemplate.from_messages( | |
| messages=[ | |
| # ("system", "You are a helpful assistant. Answer the user's questions to the best of your ability."), | |
| ("system", "You are a friendly and helpful assistant named Zenbot. Start by greeting the user and then answer their questions."), | |
| MessagesPlaceholder(variable_name="history"), | |
| ("human", "{input}") | |
| ] | |
| ) | |
| #Combine the prompt and the model into a chain. 1st version | |
| # chain = prompt | llm | |
| # response = chain.invoke({"question": "How to install gradio?"}) | |
| # print(response.content) | |
| #chain with message history | |
| # This allows the model to remember previous interactions in the session | |
| chain = RunnableWithMessageHistory( | |
| runnable= prompt | llm, | |
| get_session_history=get_session_history, | |
| input_messages_key="input", | |
| history_messages_key="history", | |
| ) | |
| #Function to handle user input and generate a response. 1st version | |
| # def chatbot(question): | |
| # if(question.strip() == ""): | |
| # return "Please enter a valid question." | |
| # response = chain.invoke({"question": question}) | |
| # return response.content | |
| def chatbot(user_input,history_state,temprature_slider, session_id=str(uuid.uuid4())): | |
| """ | |
| Function to handle user input and generate a response. | |
| It maintains the session history and returns the response. | |
| """ | |
| if(user_input.strip() == ""): | |
| return "Please enter a valid question." | |
| llm.temperature = temprature_slider # Set the temperature for the model | |
| response = chain.invoke( | |
| {"input": user_input}, | |
| config={"configurable": {"session_id": session_id}} | |
| ).content | |
| if history_state is None: | |
| history_state = [] | |
| history_state.append((user_input, response)) | |
| return str(store[session_id]) #, "Temprature : " + str(temprature_slider) | |
| def clear(session_id): | |
| print("Clearing conversation history") | |
| store[session_id] = InMemoryChatMessageHistory() | |
| #Create a Gradio interface | |
| with gr.Blocks() as demo: | |
| gr.Markdown("Zensar chatbot") | |
| history_state = gr.State(value=None) # To keep track of the conversation history | |
| session_id = gr.State(value=str(uuid.uuid4())) # Unique session ID for each user | |
| input_box = gr.Textbox(label="Ask a question", placeholder="Type your question here...") | |
| temprature_slider = gr.Slider( | |
| label="Temperature", | |
| minimum=0.0, | |
| maximum=1.0, | |
| value=0.5, | |
| step=0.1, | |
| interactive=True, | |
| ) | |
| output_box = gr.Textbox(label="Answer", interactive=False) | |
| submit_button = gr.Button("Submit") | |
| clear_button = gr.ClearButton(components=[input_box, output_box, temprature_slider]) | |
| submit_button.click( | |
| fn=chatbot, | |
| inputs=[input_box,history_state,temprature_slider, session_id], | |
| outputs=output_box | |
| ) | |
| clear_button.click( | |
| fn=clear, | |
| inputs=[session_id], | |
| ) | |
| # Launch the Gradio app | |
| demo.launch() | |