Spaces:
Sleeping
Sleeping
Commit
·
10231de
1
Parent(s):
1963e36
Autogen 1st usecases
Browse files- app.py +31 -0
- configfile.ini +6 -0
- configfile.py +20 -0
- prompts.toml +0 -0
- requirements.txt +3 -0
- src/LLMS/hfllm.py +0 -0
- src/agents/assistantagent.py +9 -0
- src/agents/userproxyagent.py +9 -0
- src/streamlitui/loadui.py +33 -0
- src/usecases/multiagentschat.py +30 -0
app.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
|
| 3 |
+
from configfile import Config
|
| 4 |
+
from src.streamlitui.loadui import LoadStreamlitUI
|
| 5 |
+
from src.usecases.multiagentschat import MultiAgentChat
|
| 6 |
+
from src.LLMS.groqllm import GroqLLM
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# MAIN Function START
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
if __name__ == "__main__":
|
| 13 |
+
# config
|
| 14 |
+
obj_config = Config()
|
| 15 |
+
# load ui
|
| 16 |
+
ui = LoadStreamlitUI()
|
| 17 |
+
user_input = ui.load_streamlit_ui()
|
| 18 |
+
|
| 19 |
+
# Configure LLM
|
| 20 |
+
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
| 21 |
+
obj_llm_config.groq_llm_config()
|
| 22 |
+
llm_config = st.session_state['llm_config']
|
| 23 |
+
|
| 24 |
+
# userInput
|
| 25 |
+
problem = st.chat_input("Start Chat ")
|
| 26 |
+
|
| 27 |
+
if problem:
|
| 28 |
+
# start multichat
|
| 29 |
+
obj_usecases = MultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy', llm_config=llm_config,
|
| 30 |
+
problem=problem)
|
| 31 |
+
obj_usecases.run()
|
configfile.ini
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[DEFAULT]
|
| 2 |
+
PAGE_TITLE = AUTOGEN IN ACTION
|
| 3 |
+
LLM_OPTIONS = Groq, Huggingface
|
| 4 |
+
USECASE_OPTIONS = MultiAgent Chat, RAG Chat, With LLamaIndex Tool, Teachable Agent, With Langchain
|
| 5 |
+
GROQ_MODEL_OPTIONS = Mixtral 8x7b, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
| 6 |
+
|
configfile.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from configparser import ConfigParser
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class Config:
|
| 5 |
+
def __init__(self, config_file="configfile.ini"):
|
| 6 |
+
self.config = ConfigParser()
|
| 7 |
+
self.config.read(config_file)
|
| 8 |
+
|
| 9 |
+
def get_llm_options(self):
|
| 10 |
+
return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
|
| 11 |
+
|
| 12 |
+
def get_usecase_options(self):
|
| 13 |
+
return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
|
| 14 |
+
|
| 15 |
+
def get_groq_model_options(self):
|
| 16 |
+
return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
|
| 17 |
+
|
| 18 |
+
def get_page_title(self):
|
| 19 |
+
return self.config["DEFAULT"].get("PAGE_TITLE")
|
| 20 |
+
|
prompts.toml
ADDED
|
File without changes
|
requirements.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit
|
| 2 |
+
pyautogen
|
| 3 |
+
groq
|
src/LLMS/hfllm.py
ADDED
|
File without changes
|
src/agents/assistantagent.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from autogen import AssistantAgent
|
| 2 |
+
import streamlit as st
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class TrackableAssistantAgent(AssistantAgent):
|
| 6 |
+
def _process_received_message(self, message, sender, silent):
|
| 7 |
+
with st.chat_message(sender.name):
|
| 8 |
+
st.write(message)
|
| 9 |
+
return super()._process_received_message(message, sender, silent)
|
src/agents/userproxyagent.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from autogen import UserProxyAgent
|
| 2 |
+
import streamlit as st
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class TrackableUserProxyAgent(UserProxyAgent):
|
| 6 |
+
def _process_received_message(self, message, sender, silent):
|
| 7 |
+
with st.chat_message("user"):
|
| 8 |
+
st.write(message)
|
| 9 |
+
return super()._process_received_message(message, sender, silent)
|
src/streamlitui/loadui.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
from configfile import Config # Import the Config class
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class LoadStreamlitUI:
|
| 6 |
+
def __init__(self):
|
| 7 |
+
self.config = Config() # Create a Config instance
|
| 8 |
+
self.user_controls = {}
|
| 9 |
+
|
| 10 |
+
def load_streamlit_ui(self):
|
| 11 |
+
st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
|
| 12 |
+
st.header("🤖 " + self.config.get_page_title())
|
| 13 |
+
|
| 14 |
+
with st.sidebar:
|
| 15 |
+
# Get options from config
|
| 16 |
+
llm_options = self.config.get_llm_options()
|
| 17 |
+
usecase_options = self.config.get_usecase_options()
|
| 18 |
+
|
| 19 |
+
# LLM selection
|
| 20 |
+
self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
|
| 21 |
+
|
| 22 |
+
if self.user_controls["selected_llm"] == 'Groq':
|
| 23 |
+
# Model selection
|
| 24 |
+
model_options = self.config.get_groq_model_options()
|
| 25 |
+
self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
|
| 26 |
+
# API key input
|
| 27 |
+
self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
|
| 28 |
+
type="password")
|
| 29 |
+
# Use case selection
|
| 30 |
+
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
| 31 |
+
st.session_state["chat_with_history"] = st.sidebar.toggle("Chat With History")
|
| 32 |
+
|
| 33 |
+
return self.user_controls
|
src/usecases/multiagentschat.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from src.agents.assistantagent import TrackableAssistantAgent
|
| 3 |
+
from src.agents.userproxyagent import TrackableUserProxyAgent
|
| 4 |
+
import streamlit as st
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class MultiAgentChat:
|
| 8 |
+
def __init__(self, assistant_name, user_proxy_name, llm_config, problem):
|
| 9 |
+
self.assistant = TrackableAssistantAgent(name=assistant_name,
|
| 10 |
+
system_message="""you are helpful assistant. Reply "TERMINATE" in
|
| 11 |
+
the end when everything is done """,
|
| 12 |
+
human_input_mode="NEVER",
|
| 13 |
+
llm_config=llm_config,
|
| 14 |
+
)
|
| 15 |
+
self.user_proxy = TrackableUserProxyAgent(name=user_proxy_name,
|
| 16 |
+
system_message="You are Admin",
|
| 17 |
+
human_input_mode="NEVER",
|
| 18 |
+
llm_config=llm_config,
|
| 19 |
+
code_execution_config=False,
|
| 20 |
+
is_termination_msg=lambda x: x.get("content", "").strip().endswith(
|
| 21 |
+
"TERMINATE"))
|
| 22 |
+
self.problem = problem
|
| 23 |
+
self.loop = asyncio.new_event_loop()
|
| 24 |
+
asyncio.set_event_loop(self.loop)
|
| 25 |
+
|
| 26 |
+
async def initiate_chat(self):
|
| 27 |
+
await self.user_proxy.a_initiate_chat(self.assistant, message=self.problem, clear_history=st.session_state["chat_with_history"])
|
| 28 |
+
|
| 29 |
+
def run(self):
|
| 30 |
+
self.loop.run_until_complete(self.initiate_chat())
|