Navya-Sree's picture
Update app.py
a3f36ed verified
raw
history blame
8.87 kB
import os
import glob
import streamlit as st
from urllib.parse import urlparse, parse_qs
from macg.llm_openai import OpenAIResponsesLLM
from macg.agents.coder import CoderAgent
from macg.agents.reviewer import ReviewerAgent
from macg.agents.tester import TesterAgent
from macg.orchestrator import Orchestrator
st.set_page_config(page_title="Multi-Agent Codegen (OpenAI)", layout="wide")
st.title("πŸ€– Multi-Agent Codegen + Review + Testing (OpenAI)")
st.caption("Coder β†’ Reviewer β†’ Tester loop with pytest verification.")
# -----------------------------
# Helpers
# -----------------------------
def parse_openai_uri(uri: str) -> tuple[str, str, str]:
"""
Supported inputs:
1) raw key: sk-... (or rk-...)
2) openai://<API_KEY>@api.openai.com?model=gpt-5
3) https://api.openai.com/v1?api_key=sk-...&model=gpt-5
Returns: (api_key, base_url, model)
"""
uri = (uri or "").strip()
default_base = "https://api.openai.com/v1"
default_model = "gpt-5"
if not uri:
return "", default_base, default_model
# If user pastes only the key
if uri.startswith("sk-") or uri.startswith("rk-") or ("://" not in uri and len(uri) > 20):
return uri, default_base, default_model
u = urlparse(uri)
q = parse_qs(u.query)
# base_url
if u.scheme in ("http", "https"):
base_url = f"{u.scheme}://{u.netloc}{u.path}".rstrip("/")
else:
base_url = default_base
# api_key (query or userinfo)
api_key = ""
if "api_key" in q:
api_key = q["api_key"][0]
elif "key" in q:
api_key = q["key"][0]
elif u.username:
api_key = u.username
model = q.get("model", [default_model])[0]
return api_key, base_url, model
def build_orchestrator(api_key: str, base_url: str, model: str, temperature: float) -> Orchestrator:
if not api_key:
raise RuntimeError("OpenAI key missing. Paste it in the URI field in the sidebar.")
llm = OpenAIResponsesLLM(
api_key=api_key,
base_url=base_url,
model=model,
temperature=float(temperature),
max_output_tokens=900,
)
coder = CoderAgent(llm)
reviewer = ReviewerAgent(llm)
tester = TesterAgent(llm)
return Orchestrator(coder=coder, reviewer=reviewer, tester=tester)
# -----------------------------
# Sidebar controls
# -----------------------------
with st.sidebar:
st.header("OpenAI Connection")
uri = st.text_input(
"OpenAI URI (paste key here)",
type="password",
value="",
help=(
"Paste either:\n"
"β€’ Just the key: sk-...\n"
"β€’ openai://sk-XXX@api.openai.com?model=gpt-5\n"
"β€’ https://api.openai.com/v1?api_key=sk-XXX&model=gpt-5"
),
)
api_key, base_url, model_from_uri = parse_openai_uri(uri)
model = st.text_input("Model", value=model_from_uri)
temperature = st.slider("Temperature", 0.0, 1.0, 0.2, 0.05)
max_iters = st.slider("Max iterations", 1, 6, 3)
st.divider()
st.caption("Debug (optional)")
if st.checkbox("Show import paths / files"):
st.write("PYTHONPATH =", os.getenv("PYTHONPATH"))
st.write("sys.path =", __import__("sys").path)
st.write("/app/src exists?", os.path.exists("/app/src"))
st.write("/app/src/macg exists?", os.path.exists("/app/src/macg"))
st.write("Files in /app/src/macg:", glob.glob("/app/src/macg/*"))
if not api_key:
st.warning("Paste your OpenAI key in the URI field to run.")
# -----------------------------
# Main UI
# -----------------------------
default_task = (
"Implement a function fizzbuzz(n: int) -> list[str] that returns strings for 1..n.\n"
"- Multiples of 3 -> 'Fizz'\n"
"- Multiples of 5 -> 'Buzz'\n"
"- Multiples of both -> 'FizzBuzz'\n"
"Return the list of length n.\n"
"Edge cases: n <= 0 should return an empty list."
)
task = st.text_area("Task", value=default_task, height=180)
colA, colB = st.columns([1, 1])
run_btn = colA.button("Run Agents", type="primary", use_container_width=True)
clear_btn = colB.button("Clear Output", use_container_width=True)
if clear_btn:
st.session_state.pop("result", None)
if run_btn:
try:
orch = build_orchestrator(api_key=api_key, base_url=base_url, model=model, temperature=temperature)
with st.spinner("Running Coder β†’ Reviewer β†’ Tester..."):
result = orch.run(task=task, max_iters=int(max_iters))
st.session_state["result"] = result
except Exception as e:
st.error(str(e))
result = st.session_state.get("result")
if result:
top1, top2, top3 = st.columns([1, 1, 1])
top1.metric("Passed", "βœ… Yes" if result.passed else "❌ No")
top2.metric("Iterations", str(result.iteration))
top3.metric("Module", result.module_name)
st.divider()
left, right = st.columns([1, 1])
with left:
st.subheader("Generated Code")
st.code(result.code or "", language="python")
st.subheader("Review Notes")
st.text(result.review_notes or "")
with right:
st.subheader("Generated Tests")
st.code(result.tests or "", language="python")
st.subheader("Test Report")
st.text(result.test_report or "")
import streamlit as st
from macg.llm_openai import OpenAIResponsesLLM
openai_key = os.getenv("OPENAI_API_KEY")
if not openai_key:
st.error("OPENAI_API_KEY not found. Add it in HF Space Settings β†’ Variables and secrets.")
st.stop()
llm = OpenAIResponsesLLM(
model=st.sidebar.text_input("OpenAI model", value="gpt-5"),
api_key=openai_key,
temperature=st.sidebar.slider("Temperature", 0.0, 1.0, 0.2, 0.05),
max_output_tokens=900,
)
st.set_page_config(page_title="Multi-Agent Codegen (HF)", layout="wide")
st.title("πŸ€– Multi-Agent Codegen + Review + Testing (Hugging Face)")
st.caption("Coder β†’ Reviewer β†’ Tester loop with pytest verification.")
# --- Secrets / token handling (Streamlit Cloud uses st.secrets) ---
hf_token = None
if "HF_TOKEN" in st.secrets:
hf_token = st.secrets["HF_TOKEN"]
else:
hf_token = os.getenv("HF_TOKEN")
with st.sidebar:
st.header("Settings")
model = st.text_input(
"HF model (Inference API)",
value="Qwen/Qwen2.5-Coder-7B-Instruct",
help="You can change to another hosted model if you want."
)
max_iters = st.slider("Max iterations", 1, 6, 3)
temperature = st.slider("Temperature", 0.0, 1.0, 0.2, 0.05)
st.divider()
st.subheader("HF Token")
if hf_token:
st.success("HF_TOKEN found (env or secrets).")
else:
st.warning("HF_TOKEN not found. Add it in Streamlit Secrets or environment.")
st.info("Streamlit Cloud: Settings β†’ Secrets β†’ add HF_TOKEN='...'")
default_task = (
"Implement a function fizzbuzz(n: int) -> list[str] that returns strings for 1..n.\n"
"- Multiples of 3 -> 'Fizz'\n"
"- Multiples of 5 -> 'Buzz'\n"
"- Multiples of both -> 'FizzBuzz'\n"
"Return the list of length n.\n"
"Edge cases: n <= 0 should return an empty list."
)
task = st.text_area("Task", value=default_task, height=180)
colA, colB = st.columns([1, 1])
run_btn = colA.button("Run Agents", type="primary", use_container_width=True)
clear_btn = colB.button("Clear Output", use_container_width=True)
if clear_btn:
st.session_state.pop("result", None)
def build_orchestrator() -> Orchestrator:
if not hf_token:
raise RuntimeError("HF_TOKEN missing. Add it to environment or Streamlit secrets.")
llm = HuggingFaceInferenceLLM(
model=model,
token=hf_token,
temperature=float(temperature),
max_new_tokens=900,
)
coder = CoderAgent(llm)
reviewer = ReviewerAgent(llm)
tester = TesterAgent(llm)
return Orchestrator(coder=coder, reviewer=reviewer, tester=tester)
if run_btn:
try:
orch = build_orchestrator()
with st.spinner("Running Coder β†’ Reviewer β†’ Tester..."):
result = orch.run(task=task, max_iters=int(max_iters))
st.session_state["result"] = result
except Exception as e:
st.error(str(e))
result = st.session_state.get("result")
if result:
top1, top2, top3 = st.columns([1, 1, 1])
top1.metric("Passed", "βœ… Yes" if result.passed else "❌ No")
top2.metric("Iterations", str(result.iteration))
top3.metric("Module", result.module_name)
st.divider()
left, right = st.columns([1, 1])
with left:
st.subheader("Generated Code")
st.code(result.code or "", language="python")
st.subheader("Review Notes")
st.text(result.review_notes or "")
with right:
st.subheader("Generated Tests")
st.code(result.tests or "", language="python")
st.subheader("Test Report")
st.text(result.test_report or "")