xeeshanajmal commited on
Commit
388e6ed
Β·
verified Β·
1 Parent(s): 99c7f88

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +157 -13
app.py CHANGED
@@ -1,35 +1,179 @@
1
  import streamlit as st
2
  import requests
 
 
3
 
4
  st.set_page_config(page_title="Qcode Agent: Qiskit Bug Fixer", layout="centered")
5
  st.title("πŸ€– Qcode Agent: LLM-Powered Quantum Code Repair")
6
  st.markdown("Paste your Qiskit code below and let an LLM find and fix bugs.")
7
 
 
 
 
 
 
 
8
  # Code input
9
  code_input = st.text_area("Qiskit Code Input", height=300, placeholder="Paste your Qiskit code here...")
10
 
11
  # Backend selection
12
  model_choice = st.selectbox("Choose LLM Backend", ["GPT-4 (OpenAI)", "Granite-8B-Qiskit (HF)", "Code Llama (HF)"])
13
 
14
- # API setup (mocked)
15
- def query_llm(model, code):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  if model == "GPT-4 (OpenAI)":
17
- return "# Fixed by GPT-4\n" + code.replace("cx(q[0], q[0])", "cx(q[0], q[1])")
18
- elif model == "Granite-8B-Qiskit (HF)":
19
- return "# Granite-8B fix suggestion:\n" + code.replace("cx(q[0], q[0])", "cx(q[0], q[1])")
20
- elif model == "Code Llama (HF)":
21
- return "# Code Llama fix suggestion:\n" + code.replace("cx(q[0], q[0])", "cx(q[0], q[1])")
 
 
 
 
22
  return "No fix found."
23
 
24
  # Button to trigger repair
25
- if st.button("Fix My Code"):
26
  if not code_input.strip():
27
- st.warning("Please paste some Qiskit code to analyze.")
28
  else:
29
- with st.spinner("Querying the selected LLM..."):
30
- fixed_code = query_llm(model_choice, code_input)
31
- st.success("Patch generated successfully!")
32
- st.code(fixed_code, language="python")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
  st.markdown("---")
35
  st.caption("Built by your Quantum AI copilot πŸ§ βš›οΈ")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import requests
3
+ import os
4
+ from openai import OpenAI
5
 
6
  st.set_page_config(page_title="Qcode Agent: Qiskit Bug Fixer", layout="centered")
7
  st.title("πŸ€– Qcode Agent: LLM-Powered Quantum Code Repair")
8
  st.markdown("Paste your Qiskit code below and let an LLM find and fix bugs.")
9
 
10
+ # Sidebar for API keys
11
+ with st.sidebar:
12
+ st.header("API Configuration")
13
+ openai_api_key = st.text_input("OpenAI API Key", type="password", help="Required for GPT-4")
14
+ hf_api_key = st.text_input("HuggingFace API Key", type="password", help="Required for HF models")
15
+
16
  # Code input
17
  code_input = st.text_area("Qiskit Code Input", height=300, placeholder="Paste your Qiskit code here...")
18
 
19
  # Backend selection
20
  model_choice = st.selectbox("Choose LLM Backend", ["GPT-4 (OpenAI)", "Granite-8B-Qiskit (HF)", "Code Llama (HF)"])
21
 
22
+ def query_gpt4(code, api_key):
23
+ """Query OpenAI GPT-4 for code fixing"""
24
+ try:
25
+ client = OpenAI(api_key=api_key)
26
+
27
+ prompt = f"""You are an expert in quantum computing and Qiskit.
28
+ Analyze the following Qiskit code for bugs and provide a fixed version.
29
+ Explain what bugs you found and how you fixed them.
30
+
31
+ Code to analyze:
32
+ ```python
33
+ {code}
34
+ ```
35
+
36
+ Provide your response in this format:
37
+ BUGS FOUND:
38
+ [List the bugs]
39
+
40
+ FIXED CODE:
41
+ ```python
42
+ [Your fixed code here]
43
+ ```
44
+
45
+ EXPLANATION:
46
+ [Explain the fixes]
47
+ """
48
+
49
+ response = client.chat.completions.create(
50
+ model="gpt-4",
51
+ messages=[
52
+ {"role": "system", "content": "You are a Qiskit debugging expert."},
53
+ {"role": "user", "content": prompt}
54
+ ],
55
+ temperature=0.3,
56
+ max_tokens=2000
57
+ )
58
+
59
+ return response.choices[0].message.content
60
+
61
+ except Exception as e:
62
+ return f"Error querying GPT-4: {str(e)}"
63
+
64
+ def query_huggingface(code, api_key, model_name):
65
+ """Query HuggingFace models for code fixing"""
66
+ try:
67
+ # Map model choice to HF model ID
68
+ model_map = {
69
+ "Granite-8B-Qiskit (HF)": "ibm-granite/granite-8b-code-instruct",
70
+ "Code Llama (HF)": "codellama/CodeLlama-13b-Instruct-hf"
71
+ }
72
+
73
+ model_id = model_map.get(model_name, "codellama/CodeLlama-7b-Instruct-hf")
74
+
75
+ headers = {
76
+ "Authorization": f"Bearer {api_key}",
77
+ "Content-Type": "application/json"
78
+ }
79
+
80
+ prompt = f"""<s>[INST] You are an expert in quantum computing and Qiskit.
81
+ Analyze this Qiskit code for bugs and provide a fixed version:
82
+
83
+ {code}
84
+
85
+ Identify bugs and provide corrected code. [/INST]"""
86
+
87
+ payload = {
88
+ "inputs": prompt,
89
+ "parameters": {
90
+ "max_new_tokens": 1000,
91
+ "temperature": 0.3,
92
+ "top_p": 0.9,
93
+ "return_full_text": False
94
+ }
95
+ }
96
+
97
+ api_url = f"https://api-inference.huggingface.co/models/{model_id}"
98
+ response = requests.post(api_url, headers=headers, json=payload, timeout=60)
99
+
100
+ if response.status_code == 200:
101
+ result = response.json()
102
+ if isinstance(result, list) and len(result) > 0:
103
+ return result[0].get('generated_text', 'No response generated')
104
+ return str(result)
105
+ else:
106
+ return f"Error: {response.status_code} - {response.text}"
107
+
108
+ except Exception as e:
109
+ return f"Error querying HuggingFace: {str(e)}"
110
+
111
+ def query_llm(model, code, openai_key=None, hf_key=None):
112
+ """Main router function for LLM queries"""
113
  if model == "GPT-4 (OpenAI)":
114
+ if not openai_key:
115
+ return "❌ Error: OpenAI API key required. Please enter it in the sidebar."
116
+ return query_gpt4(code, openai_key)
117
+
118
+ elif model in ["Granite-8B-Qiskit (HF)", "Code Llama (HF)"]:
119
+ if not hf_key:
120
+ return "❌ Error: HuggingFace API key required. Please enter it in the sidebar."
121
+ return query_huggingface(code, hf_key, model)
122
+
123
  return "No fix found."
124
 
125
  # Button to trigger repair
126
+ if st.button("πŸ”§ Fix My Code", type="primary"):
127
  if not code_input.strip():
128
+ st.warning("⚠️ Please paste some Qiskit code to analyze.")
129
  else:
130
+ with st.spinner(f"πŸ€” Querying {model_choice}..."):
131
+ fixed_code = query_llm(
132
+ model_choice,
133
+ code_input,
134
+ openai_api_key,
135
+ hf_api_key
136
+ )
137
+
138
+ st.success("βœ… Analysis complete!")
139
+
140
+ # Display results in an expandable section
141
+ with st.expander("πŸ“ LLM Response", expanded=True):
142
+ st.markdown(fixed_code)
143
+
144
+ # Example code section
145
+ with st.expander("πŸ’‘ Try an Example"):
146
+ example_code = """from qiskit import QuantumCircuit, QuantumRegister
147
+ from qiskit_aer import Aer
148
+ from qiskit.visualization import plot_histogram
149
+
150
+ # Create a quantum circuit with a bug
151
+ q = QuantumRegister(2, 'q')
152
+ qc = QuantumCircuit(q)
153
+
154
+ qc.h(q[0])
155
+ qc.cx(q[0], q[0]) # BUG: Control and target are the same qubit!
156
+
157
+ qc.measure_all()"""
158
+
159
+ st.code(example_code, language="python")
160
+ if st.button("Load Example"):
161
+ st.rerun()
162
 
163
  st.markdown("---")
164
  st.caption("Built by your Quantum AI copilot πŸ§ βš›οΈ")
165
+
166
+ # Instructions
167
+ with st.sidebar:
168
+ st.markdown("---")
169
+ st.markdown("""
170
+ ### πŸ“– How to Use
171
+ 1. Enter your API key(s) above
172
+ 2. Paste Qiskit code in the main area
173
+ 3. Select your preferred LLM
174
+ 4. Click "Fix My Code"
175
+
176
+ ### πŸ”‘ Getting API Keys
177
+ - **OpenAI**: https://platform.openai.com/api-keys
178
+ - **HuggingFace**: https://huggingface.co/settings/tokens
179
+ """)