qcode-agent / app.py
xeeshanajmal's picture
Update app.py
d862af5 verified
import streamlit as st
import requests
import os
from openai import OpenAI
st.set_page_config(page_title="Qcode Agent: Qiskit Bug Fixer", layout="centered")
st.title("πŸ€– Qcode Agent: LLM-Powered Quantum Code Repair")
st.markdown("Paste your Qiskit code below and let an LLM find and fix bugs.")
# Sidebar for API keys
with st.sidebar:
st.header("API Configuration")
openai_api_key = st.text_input("OpenAI API Key", type="password", help="Required for GPT-5 Mini")
hf_api_key = st.text_input("HuggingFace API Key", type="password", help="Required for HF models")
# Code input
code_input = st.text_area("Qiskit Code Input", height=300, placeholder="Paste your Qiskit code here...")
# Backend selection
model_choice = st.selectbox("Choose LLM Backend", ["GPT-5 Mini (OpenAI)", "Granite-8B-Qiskit (HF)", "Code Llama (HF)"])
def query_gpt5_mini(code, api_key):
"""
Query OpenAI GPT-5 Mini for code fixing
GPT-5 Mini (gpt-5-mini-2025-08-07) is a compact reasoning model with:
- 400K token context window
- Does NOT support 'temperature' parameter (reasoning models use default temperature=1)
- Uses 'max_completion_tokens' instead of 'max_tokens'
- Supports parameters: max_completion_tokens, include_reasoning, max_output_tokens
"""
try:
client = OpenAI(api_key=api_key)
prompt = f"""You are an expert in quantum computing and Qiskit.
Analyze the following Qiskit code for bugs and provide a fixed version.
Explain what bugs you found and how you fixed them.
Code to analyze:
```python
{code}
```
Provide your response in this format:
BUGS FOUND:
[List the bugs]
FIXED CODE:
```python
[Your fixed code here]
```
EXPLANATION:
[Explain the fixes]
"""
response = client.chat.completions.create(
model="gpt-5-mini-2025-08-07",
messages=[
{"role": "system", "content": "You are a Qiskit debugging expert."},
{"role": "user", "content": prompt}
],
max_completion_tokens=2000
)
return response.choices[0].message.content
except Exception as e:
return f"Error querying GPT-5 Mini: {str(e)}"
def query_huggingface(code, api_key, model_name):
"""Query HuggingFace models for code fixing"""
try:
# Map model choice to HF model ID
model_map = {
"Granite-8B-Qiskit (HF)": "ibm-granite/granite-8b-code-instruct",
"Code Llama (HF)": "codellama/CodeLlama-13b-Instruct-hf"
}
model_id = model_map.get(model_name, "codellama/CodeLlama-7b-Instruct-hf")
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
prompt = f"""<s>[INST] You are an expert in quantum computing and Qiskit.
Analyze this Qiskit code for bugs and provide a fixed version:
{code}
Identify bugs and provide corrected code. [/INST]"""
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": 1000,
"temperature": 0.3,
"top_p": 0.9,
"return_full_text": False
}
}
api_url = f"https://api-inference.huggingface.co/models/{model_id}"
response = requests.post(api_url, headers=headers, json=payload, timeout=60)
if response.status_code == 200:
result = response.json()
if isinstance(result, list) and len(result) > 0:
return result[0].get('generated_text', 'No response generated')
return str(result)
else:
return f"Error: {response.status_code} - {response.text}"
except Exception as e:
return f"Error querying HuggingFace: {str(e)}"
def query_llm(model, code, openai_key=None, hf_key=None):
"""Main router function for LLM queries"""
if model == "GPT-5 Mini (OpenAI)":
if not openai_key:
return "❌ Error: OpenAI API key required. Please enter it in the sidebar."
return query_gpt5_mini(code, openai_key)
elif model in ["Granite-8B-Qiskit (HF)", "Code Llama (HF)"]:
if not hf_key:
return "❌ Error: HuggingFace API key required. Please enter it in the sidebar."
return query_huggingface(code, hf_key, model)
return "No fix found."
# Button to trigger repair
if st.button("πŸ”§ Fix My Code", type="primary"):
if not code_input.strip():
st.warning("⚠️ Please paste some Qiskit code to analyze.")
else:
with st.spinner(f"πŸ€” Querying {model_choice}..."):
fixed_code = query_llm(
model_choice,
code_input,
openai_api_key,
hf_api_key
)
st.success("βœ… Analysis complete!")
# Display results in an expandable section
with st.expander("πŸ“ LLM Response", expanded=True):
st.markdown(fixed_code)
# Example code section
with st.expander("πŸ’‘ Try an Example"):
example_code = """from qiskit import QuantumCircuit, QuantumRegister
from qiskit_aer import Aer
from qiskit.visualization import plot_histogram
# Create a quantum circuit with a bug
q = QuantumRegister(2, 'q')
qc = QuantumCircuit(q)
qc.h(q[0])
qc.cx(q[0], q[0]) # BUG: Control and target are the same qubit!
qc.measure_all()"""
st.code(example_code, language="python")
if st.button("Load Example"):
st.rerun()
st.markdown("---")
st.caption("Built by your Quantum AI copilot πŸ§ βš›οΈ")
# Instructions
with st.sidebar:
st.markdown("---")
st.markdown("""
### πŸ“– How to Use
1. Enter your API key(s) above
2. Paste Qiskit code in the main area
3. Select your preferred LLM
4. Click "Fix My Code"
### πŸ”‘ Getting API Keys
- **OpenAI**: https://platform.openai.com/api-keys
- **HuggingFace**: https://huggingface.co/settings/tokens
""")