Spaces:
Sleeping
Sleeping
Commit Β·
06520ee
1
Parent(s): 3626119
CrewAI
Browse files- .vscode/launch.json +16 -0
- app.py +62 -0
- configfile.ini +6 -0
- configfile.py +20 -0
- requirements.txt +4 -0
- src/agents/agents.py +56 -0
- src/crew/crewaiprocess.py +0 -0
- src/llmconfig/groqllm.py +19 -0
- src/streamlitUI/loadui.py +35 -0
- src/task/tasks.py +25 -0
- src/utility/callbacks.py +24 -0
.vscode/launch.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"version": "0.2.0",
|
| 3 |
+
"configurations": [
|
| 4 |
+
{
|
| 5 |
+
"name": "debug streamlit",
|
| 6 |
+
"type": "debugpy",
|
| 7 |
+
"request": "launch",
|
| 8 |
+
"program": "./.venv/Lib/site-packages/streamlit", // /home/xx/tmp/venv/bin/streamlit",
|
| 9 |
+
"args": [
|
| 10 |
+
"run",
|
| 11 |
+
"app.py"
|
| 12 |
+
],
|
| 13 |
+
"justMyCode": false
|
| 14 |
+
}
|
| 15 |
+
]
|
| 16 |
+
}
|
app.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.agents.agents import CustomHandler
|
| 2 |
+
from configfile import Config
|
| 3 |
+
from src.llmconfig.groqllm import GroqLLM
|
| 4 |
+
from src.streamlitUI.loadui import LoadStreamlitUI
|
| 5 |
+
import os
|
| 6 |
+
import streamlit as st
|
| 7 |
+
from crewai import Crew, Process, Agent, Task
|
| 8 |
+
from langchain_core.callbacks import BaseCallbackHandler
|
| 9 |
+
from langchain_groq import ChatGroq # Assuming this is the correct import for Groq
|
| 10 |
+
from typing import Any, Dict
|
| 11 |
+
from src.task.tasks import CrewAITasks
|
| 12 |
+
from src.agents.agents import CrewAIAgents
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
# MAIN Function START
|
| 16 |
+
if __name__ == "__main__":
|
| 17 |
+
# config
|
| 18 |
+
obj_config = Config()
|
| 19 |
+
# load ui
|
| 20 |
+
ui = LoadStreamlitUI()
|
| 21 |
+
user_input = ui.load_streamlit_ui()
|
| 22 |
+
# Configure LLM
|
| 23 |
+
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
| 24 |
+
llm = obj_llm_config.groq_llm_config()
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
# Initialize the message log in session state if not already present
|
| 28 |
+
if "messages" not in st.session_state:
|
| 29 |
+
st.session_state["messages"] = [{"role": "assistant", "content": "What code do you want us to write?"}]
|
| 30 |
+
|
| 31 |
+
# Display existing messages
|
| 32 |
+
for msg in st.session_state.messages:
|
| 33 |
+
st.chat_message(msg["role"]).write(msg["content"])
|
| 34 |
+
|
| 35 |
+
# Handle user input
|
| 36 |
+
if prompt := st.chat_input():
|
| 37 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 38 |
+
st.chat_message("user").write(prompt)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
# agents
|
| 42 |
+
obj_crewai_agents = CrewAIAgents(llm=llm)
|
| 43 |
+
lst_agents = obj_crewai_agents.crewai_agents()
|
| 44 |
+
|
| 45 |
+
# Define tasks for each agent
|
| 46 |
+
obj_crewai_tasks = CrewAITasks(llm=llm,prompt=prompt,lst_agents=lst_agents)
|
| 47 |
+
lst_tasks = obj_crewai_tasks.create_tasks()
|
| 48 |
+
|
| 49 |
+
# Set up the crew and process tasks hierarchically
|
| 50 |
+
project_crew = Crew(
|
| 51 |
+
tasks=lst_tasks,
|
| 52 |
+
agents=lst_agents,
|
| 53 |
+
process=Process.hierarchical,
|
| 54 |
+
manager_llm=llm,
|
| 55 |
+
manager_callbacks=[CustomHandler("Crew Manager")]
|
| 56 |
+
)
|
| 57 |
+
final = project_crew.kickoff()
|
| 58 |
+
|
| 59 |
+
# Display the final result
|
| 60 |
+
result = f"## Here is the Final Result \n\n {final}"
|
| 61 |
+
st.session_state.messages.append({"role": "assistant", "content": result})
|
| 62 |
+
st.chat_message("assistant").write(result)
|
configfile.ini
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[DEFAULT]
|
| 2 |
+
PAGE_TITLE = CrewAI Studio
|
| 3 |
+
LLM_OPTIONS = Groq, Huggingface
|
| 4 |
+
USECASE_OPTIONS = MultiAgent Coder
|
| 5 |
+
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
| 6 |
+
|
configfile.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from configparser import ConfigParser
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class Config:
|
| 5 |
+
def __init__(self, config_file="configfile.ini"):
|
| 6 |
+
self.config = ConfigParser()
|
| 7 |
+
self.config.read(config_file)
|
| 8 |
+
|
| 9 |
+
def get_llm_options(self):
|
| 10 |
+
return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
|
| 11 |
+
|
| 12 |
+
def get_usecase_options(self):
|
| 13 |
+
return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
|
| 14 |
+
|
| 15 |
+
def get_groq_model_options(self):
|
| 16 |
+
return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
|
| 17 |
+
|
| 18 |
+
def get_page_title(self):
|
| 19 |
+
return self.config["DEFAULT"].get("PAGE_TITLE")
|
| 20 |
+
|
requirements.txt
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit
|
| 2 |
+
langchain-core
|
| 3 |
+
langchain-groq
|
| 4 |
+
crewai
|
src/agents/agents.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.callbacks import BaseCallbackHandler
|
| 2 |
+
from crewai import Agent
|
| 3 |
+
#from llm import groq
|
| 4 |
+
|
| 5 |
+
class CustomHandler(BaseCallbackHandler):
|
| 6 |
+
"""A custom handler for logging interactions within the process chain."""
|
| 7 |
+
|
| 8 |
+
def __init__(self, agent_name: str) -> None:
|
| 9 |
+
super().__init__()
|
| 10 |
+
self.agent_name = agent_name
|
| 11 |
+
|
| 12 |
+
def on_chain_start(self, serialized, outputs, **kwargs) -> None:
|
| 13 |
+
"""Log the start of a chain with user input."""
|
| 14 |
+
from streamlit import session_state, chat_message
|
| 15 |
+
session_state.messages.append({"role": "assistant", "content": outputs['input']})
|
| 16 |
+
chat_message("assistant").write(outputs['input'])
|
| 17 |
+
|
| 18 |
+
def on_agent_action(self, serialized, inputs, **kwargs) -> None:
|
| 19 |
+
"""Log the action taken by an agent during a chain run."""
|
| 20 |
+
from streamlit import session_state, chat_message
|
| 21 |
+
session_state.messages.append({"role": "assistant", "content": inputs['input']})
|
| 22 |
+
chat_message("assistant").write(inputs['input'])
|
| 23 |
+
|
| 24 |
+
def on_chain_end(self, outputs, **kwargs) -> None:
|
| 25 |
+
"""Log the end of a chain with the output generated by an agent."""
|
| 26 |
+
from streamlit import session_state, chat_message
|
| 27 |
+
session_state.messages.append({"role": self.agent_name, "content": outputs['output']})
|
| 28 |
+
chat_message(self.agent_name).write(outputs['output'])
|
| 29 |
+
|
| 30 |
+
class CrewAIAgents:
|
| 31 |
+
def __init__(self,llm):
|
| 32 |
+
self.llm = llm
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def crewai_agents(self):
|
| 36 |
+
project_manager = Agent(
|
| 37 |
+
role='Project Manager',
|
| 38 |
+
backstory='''You are the project manager.
|
| 39 |
+
You consider the task and break it down into smaller tasks to be performed by the team.
|
| 40 |
+
You do not write code.''',
|
| 41 |
+
goal='Generate actionable steps for task completion.',
|
| 42 |
+
llm=self.llm,
|
| 43 |
+
callbacks=[CustomHandler("Project Manager")],
|
| 44 |
+
)
|
| 45 |
+
coder = Agent(
|
| 46 |
+
role='Python Coder',
|
| 47 |
+
backstory='''You are a Senior Python Developer responsible for writing clean, efficient and robust Python code
|
| 48 |
+
that is easy to read and understand.
|
| 49 |
+
You write code using object oriented programming principles and follow best practices.
|
| 50 |
+
You produce functional, feature complete code.''',
|
| 51 |
+
goal='Develop high-quality, well-structured Python code.',
|
| 52 |
+
llm=self.llm,
|
| 53 |
+
callbacks=[CustomHandler("Coder")],
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
return [project_manager,coder]
|
src/crew/crewaiprocess.py
ADDED
|
File without changes
|
src/llmconfig/groqllm.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import streamlit as st
|
| 3 |
+
# Initialize the Groq model for use with agents
|
| 4 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 5 |
+
from langchain_groq import ChatGroq
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class GroqLLM:
|
| 10 |
+
def __init__(self, user_controls_input):
|
| 11 |
+
self.user_controls_input = user_controls_input
|
| 12 |
+
|
| 13 |
+
def groq_llm_config(self):
|
| 14 |
+
groq = ChatGroq(
|
| 15 |
+
temperature=0.5,
|
| 16 |
+
model="llama3-70b-8192",
|
| 17 |
+
# api_key="" # Optional if not set as an environment variable
|
| 18 |
+
)
|
| 19 |
+
return groq
|
src/streamlitUI/loadui.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
from crewai import Crew, Process
|
| 3 |
+
import streamlit as st
|
| 4 |
+
from configfile import Config # Import the Config class
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class LoadStreamlitUI:
|
| 8 |
+
def __init__(self):
|
| 9 |
+
self.config = Config() # Create a Config instance
|
| 10 |
+
self.user_controls = {}
|
| 11 |
+
|
| 12 |
+
def load_streamlit_ui(self):
|
| 13 |
+
st.set_page_config(page_title= "π£π»ββοΈ " + self.config.get_page_title(), layout="wide")
|
| 14 |
+
st.header("π£π»ββοΈ " + self.config.get_page_title())
|
| 15 |
+
|
| 16 |
+
with st.sidebar:
|
| 17 |
+
# Get options from config
|
| 18 |
+
llm_options = self.config.get_llm_options()
|
| 19 |
+
usecase_options = self.config.get_usecase_options()
|
| 20 |
+
|
| 21 |
+
# LLM selection
|
| 22 |
+
self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
|
| 23 |
+
|
| 24 |
+
if self.user_controls["selected_llm"] == 'Groq':
|
| 25 |
+
# Model selection
|
| 26 |
+
model_options = self.config.get_groq_model_options()
|
| 27 |
+
self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
|
| 28 |
+
# API key input
|
| 29 |
+
self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
|
| 30 |
+
type="password")
|
| 31 |
+
# Use case selection
|
| 32 |
+
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
| 33 |
+
st.session_state["chat_with_history"] = st.sidebar.toggle("Chat With History")
|
| 34 |
+
|
| 35 |
+
return self.user_controls
|
src/task/tasks.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from crewai import Task
|
| 2 |
+
|
| 3 |
+
class CrewAITasks:
|
| 4 |
+
def __init__(self,llm,prompt,lst_agents) -> None:
|
| 5 |
+
self.prompt = prompt
|
| 6 |
+
self.llm = llm
|
| 7 |
+
self.lst_agents = lst_agents
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def create_tasks(self):
|
| 11 |
+
task_plan = Task(
|
| 12 |
+
description=f"""Consider how you would go about the task, '{self.prompt}'.
|
| 13 |
+
Create a plan to complete the task.
|
| 14 |
+
The final step should always require delivering feature complete code""",
|
| 15 |
+
agent=self.lst_agents[0],
|
| 16 |
+
expected_output="A detailed plan for the team to complete the task.",
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
task_code = Task(
|
| 20 |
+
description="Write feature complete code that is simple, efficient and adheres to object oriented principles.",
|
| 21 |
+
agent=self.lst_agents[1],
|
| 22 |
+
expected_output="Well-written and structured code that is feature complete, simple, efficient and adheres to object oriented principles.",
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
return [task_plan, task_code]
|
src/utility/callbacks.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_core.callbacks import BaseCallbackHandler
|
| 2 |
+
import streamlit as st
|
| 3 |
+
|
| 4 |
+
class CustomHandler(BaseCallbackHandler):
|
| 5 |
+
"""A custom handler for logging interactions within the process chain."""
|
| 6 |
+
|
| 7 |
+
def __init__(self, agent_name: str) -> None:
|
| 8 |
+
super().__init__()
|
| 9 |
+
self.agent_name = agent_name
|
| 10 |
+
|
| 11 |
+
def on_chain_start(self, serialized, outputs, **kwargs) -> None:
|
| 12 |
+
"""Log the start of a chain with user input."""
|
| 13 |
+
st.session_state.messages.append({"role": "assistant", "content": outputs['input']})
|
| 14 |
+
st.chat_message("assistant").write(outputs['input'])
|
| 15 |
+
|
| 16 |
+
def on_agent_action(self, serialized, inputs, **kwargs) -> None:
|
| 17 |
+
"""Log the action taken by an agent during a chain run."""
|
| 18 |
+
st.session_state.messages.append({"role": "assistant", "content": inputs['input']})
|
| 19 |
+
st.chat_message("assistant").write(inputs['input'])
|
| 20 |
+
|
| 21 |
+
def on_chain_end(self, outputs, **kwargs) -> None:
|
| 22 |
+
"""Log the end of a chain with the output generated by an agent."""
|
| 23 |
+
st.session_state.messages.append({"role": self.agent_name, "content": outputs['output']})
|
| 24 |
+
st.chat_message(self.agent_name).write(outputs['output'])
|