genaitiwari's picture
teachable agent
4583e4d
import streamlit as st
from src.cag.main import CAGLLM
from configfile import Config
from src.streamlitui.loadui import LoadStreamlitUI
from src.usecases.multiagentschat import MultiAgentChat
from src.usecases.multiagentcodeexecution import MultiAgentCodeExecution
from src.usecases.withllamaIndex import WithLlamaIndexMultiAgentChat
from src.usecases.agentchatsqlspider import AgentChatSqlSpider
from src.LLMS.groqllm import GroqLLM
from src.usecases.multiagentragchat import MultiAgentRAGChat
from src.usecases.basicexample import BasicExample
from src.usecases.cag_chat import CAGLLMChat
from src.usecases.teachableagent import TeachableAgent
# MAIN Function START
if __name__ == "__main__":
# config
obj_config = Config()
# load ui
ui = LoadStreamlitUI()
user_input = ui.load_streamlit_ui()
# Configure LLM
obj_llm_config = GroqLLM(user_controls_input=user_input)
obj_llm_config.groq_llm_config()
llm_config = st.session_state['llm_config']
# userInput
problem = st.chat_input("Start Chat ")
if problem:
# start multichat
if user_input['selected_usecase'] == "MultiAgent Code Execution":
obj_usecases_multichatexec = MultiAgentCodeExecution(assistant_name=['Assistant',"Product_Manager"], user_proxy_name='Userproxy', llm_config=llm_config,
problem=problem)
obj_usecases_multichatexec.run()
elif user_input['selected_usecase'] == "MultiAgent Chat":
obj_usecases_multichat = MultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy', llm_config=llm_config,
problem=problem)
obj_usecases_multichat.run()
elif user_input['selected_usecase'] == "RAG Chat":
obj_usecases_rag_multichat = MultiAgentRAGChat(assistant_name='Assistant', user_proxy_name='Userproxy', llm_config=llm_config,
problem=problem)
obj_usecases_rag_multichat.run()
elif user_input['selected_usecase'] == "With LLamaIndex Tool":
obj_usecases_with_llamaIndex_multichat = WithLlamaIndexMultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy',
llm_config=llm_config,
problem=problem,user_input=user_input)
obj_usecases_with_llamaIndex_multichat.run()
# elif user_input['selected_usecase'] == "AgentChat Sql Spider":
# obj_sql_spider = AgentChatSqlSpider(assistant_name="Assistant", user_proxy_name='Userproxy',
# llm_config=llm_config,
# problem=problem)
# obj_sql_spider.run()
elif user_input['selected_usecase'] == "Basic Example":
obj_basic_example = BasicExample(assistant_name="Assistant", user_proxy_name='Userproxy',
llm_config=llm_config,
problem=problem)
obj_basic_example.run()
elif user_input['selected_usecase'] == "Chat with CAG":
obj_chat = CAGLLMChat(llm_config=llm_config,problem=problem)
response = obj_chat.start_chat()
obj_cag_llm = CAGLLM(problem,response)
obj_cag_llm.process_cag_llm()
elif user_input['selected_usecase'] == "Teachable Agent":
obj_chat = TeachableAgent(llm_config=llm_config,problem=problem)
response = obj_chat.start_chat()
with st.chat_message("user"):
st.write(problem)
with st.chat_message("ai"):
st.markdown(response.summary)