xeeshanajmal commited on
Commit
d862af5
·
verified ·
1 Parent(s): 0ddeceb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -9
app.py CHANGED
@@ -10,17 +10,25 @@ st.markdown("Paste your Qiskit code below and let an LLM find and fix bugs.")
10
  # Sidebar for API keys
11
  with st.sidebar:
12
  st.header("API Configuration")
13
- openai_api_key = st.text_input("OpenAI API Key", type="password", help="Required for GPT-4")
14
  hf_api_key = st.text_input("HuggingFace API Key", type="password", help="Required for HF models")
15
 
16
  # Code input
17
  code_input = st.text_area("Qiskit Code Input", height=300, placeholder="Paste your Qiskit code here...")
18
 
19
  # Backend selection
20
- model_choice = st.selectbox("Choose LLM Backend", ["GPT-4 (OpenAI)", "Granite-8B-Qiskit (HF)", "Code Llama (HF)"])
21
 
22
- def query_gpt4(code, api_key):
23
- """Query OpenAI GPT-4 for code fixing"""
 
 
 
 
 
 
 
 
24
  try:
25
  client = OpenAI(api_key=api_key)
26
 
@@ -52,14 +60,13 @@ EXPLANATION:
52
  {"role": "system", "content": "You are a Qiskit debugging expert."},
53
  {"role": "user", "content": prompt}
54
  ],
55
- temperature=0.3,
56
- max_tokens=2000
57
  )
58
 
59
  return response.choices[0].message.content
60
 
61
  except Exception as e:
62
- return f"Error querying GPT-4: {str(e)}"
63
 
64
  def query_huggingface(code, api_key, model_name):
65
  """Query HuggingFace models for code fixing"""
@@ -110,10 +117,10 @@ Identify bugs and provide corrected code. [/INST]"""
110
 
111
  def query_llm(model, code, openai_key=None, hf_key=None):
112
  """Main router function for LLM queries"""
113
- if model == "GPT-4 (OpenAI)":
114
  if not openai_key:
115
  return "❌ Error: OpenAI API key required. Please enter it in the sidebar."
116
- return query_gpt4(code, openai_key)
117
 
118
  elif model in ["Granite-8B-Qiskit (HF)", "Code Llama (HF)"]:
119
  if not hf_key:
 
10
  # Sidebar for API keys
11
  with st.sidebar:
12
  st.header("API Configuration")
13
+ openai_api_key = st.text_input("OpenAI API Key", type="password", help="Required for GPT-5 Mini")
14
  hf_api_key = st.text_input("HuggingFace API Key", type="password", help="Required for HF models")
15
 
16
  # Code input
17
  code_input = st.text_area("Qiskit Code Input", height=300, placeholder="Paste your Qiskit code here...")
18
 
19
  # Backend selection
20
+ model_choice = st.selectbox("Choose LLM Backend", ["GPT-5 Mini (OpenAI)", "Granite-8B-Qiskit (HF)", "Code Llama (HF)"])
21
 
22
+ def query_gpt5_mini(code, api_key):
23
+ """
24
+ Query OpenAI GPT-5 Mini for code fixing
25
+
26
+ GPT-5 Mini (gpt-5-mini-2025-08-07) is a compact reasoning model with:
27
+ - 400K token context window
28
+ - Does NOT support 'temperature' parameter (reasoning models use default temperature=1)
29
+ - Uses 'max_completion_tokens' instead of 'max_tokens'
30
+ - Supports parameters: max_completion_tokens, include_reasoning, max_output_tokens
31
+ """
32
  try:
33
  client = OpenAI(api_key=api_key)
34
 
 
60
  {"role": "system", "content": "You are a Qiskit debugging expert."},
61
  {"role": "user", "content": prompt}
62
  ],
63
+ max_completion_tokens=2000
 
64
  )
65
 
66
  return response.choices[0].message.content
67
 
68
  except Exception as e:
69
+ return f"Error querying GPT-5 Mini: {str(e)}"
70
 
71
  def query_huggingface(code, api_key, model_name):
72
  """Query HuggingFace models for code fixing"""
 
117
 
118
  def query_llm(model, code, openai_key=None, hf_key=None):
119
  """Main router function for LLM queries"""
120
+ if model == "GPT-5 Mini (OpenAI)":
121
  if not openai_key:
122
  return "❌ Error: OpenAI API key required. Please enter it in the sidebar."
123
+ return query_gpt5_mini(code, openai_key)
124
 
125
  elif model in ["Granite-8B-Qiskit (HF)", "Code Llama (HF)"]:
126
  if not hf_key: