fmmkii's picture
Updated API logic
c29412e
import os
import streamlit as st
from langchain_community.llms import Replicate
from langchain.chains import ConversationChain
from langchain.memory import ConversationSummaryMemory
# Setting page title and header
st.set_page_config(page_title="Chat GPT Clone", page_icon="🤖")
st.markdown("<h1 style='text-align: center;'>How can I assist you? </h1>", unsafe_allow_html=True)
# Initialize session state if not already done
if "messages" not in st.session_state:
st.session_state.messages = [{"role": "assistant", "content": "Hey there! Feel free to ask me anything. What's on your mind?"}]
if "conversation" not in st.session_state:
st.session_state["conversation"] = None
if "REPLICATE_API_TOKEN" not in st.session_state:
st.session_state["REPLICATE_API_TOKEN"] = ""
# Sidebar for API key input and summarisation
with st.sidebar:
# Get Replicate API Token
replicate_api = st.secrets.get('REPLICATE_API_TOKEN', '')
input_token = st.text_input('Enter your Replicate API token:', type='password')
if input_token:
replicate_api = input_token
# Validate the token
if not replicate_api:
st.warning('No API Key provided! You may get one by signing up at Replicate.')
elif not (replicate_api.startswith('r8_') and len(replicate_api) == 40):
st.error('Invalid API Key! Please double-check your token.')
else:
st.success('Replicate API token is valid!')
# Set environment variable for Replicate
os.environ['REPLICATE_API_TOKEN'] = replicate_api
st.session_state['REPLICATE_API_TOKEN'] = replicate_api
# Summarise button logic
summarise_button = st.button("Summarise the conversation", key="summarise_button")
if summarise_button:
if st.session_state.get('conversation') and st.session_state['conversation'].memory:
summary = st.session_state['conversation'].memory.buffer
st.session_state['summary'] = summary
st.write("Summary:\n\n" + summary)
else:
st.write("No conversation to summarise.")
# Add download button for summary
if 'summary' in st.session_state:
st.download_button(
label="Download Summary",
data=st.session_state['summary'],
file_name="conversation_summary.txt",
mime="text/plain",
)
# Ensure the API token is valid before proceeding
if st.session_state['REPLICATE_API_TOKEN']:
try:
# Initialize conversation state if not done already
if 'conversation' not in st.session_state or st.session_state['conversation'] is None:
# Ensure the API token is passed explicitly to Replicate
llm = Replicate(
model="meta/meta-llama-3-8b-instruct",
model_kwargs={
"temperature": 0.01,
"top_p": 0.9,
"max_length": 128,
},
replicate_api_token=st.session_state['REPLICATE_API_TOKEN'] # Explicitly passing the token
)
st.session_state['conversation'] = ConversationChain(
llm=llm,
verbose=True,
memory=ConversationSummaryMemory(llm=llm),
)
except Exception as e:
st.error(f"Failed to initialize the model: {e}")
else:
st.warning("Please provide a valid Replicate API token to start the conversation.")
# Function to get the response from the LLM
def get_response(user_input):
response_dict = st.session_state['conversation'].invoke(input=user_input)
return response_dict.get("response", "No response generated.")
# Function to get user input
def get_text():
return st.chat_input("Say something!")
# Display the chat history (including the initial message)
if st.session_state.messages:
for msg in st.session_state.messages:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
# Handle user input and conversation flow
if st.session_state['REPLICATE_API_TOKEN']:
user_input = get_text()
if user_input:
try:
# Append user input to messages
st.session_state.messages.append({"role": "user", "content": user_input})
with st.chat_message("user"):
st.markdown(user_input)
# Get response from LLM
response = get_response(user_input)
# Append LLM response to messages
st.session_state.messages.append({"role": "LLM", "content": response})
with st.chat_message("LLM"):
st.markdown(response)
except Exception as e:
st.error(f"An error occurred: {e}")