SHAH-MEER commited on
Commit
92f4773
·
verified ·
1 Parent(s): 3309e5e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +69 -34
app.py CHANGED
@@ -10,7 +10,7 @@ import subprocess
10
 
11
  load_dotenv()
12
 
13
- # Set API keys HF secrets
14
  os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
15
  os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
16
  deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')
@@ -25,43 +25,52 @@ DEEPSEEK_MODEL = 'deepseek-coder'
25
 
26
  system_message = (
27
  "You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
28
- "Respond only with C++ code; use comments sparingly and do not provide any explanation other than occasional comments. "
29
  "The C++ response needs to produce an identical output in the fastest possible time."
30
  )
31
 
32
- def user_prompt_for(python):
 
 
33
  return (
34
- "Rewrite this Python code in C++ with the fastest possible implementation that produces identical output in the least time. "
35
- "Respond only with C++ code; do not explain your work other than a few comments. "
36
- "Pay attention to number types to ensure no int overflows. Remember to #include all necessary C++ packages such as iomanip.\n\n"
37
- + python
38
  )
39
 
40
- def messages_for(python):
41
  return [
42
  {"role": "system", "content": system_message},
43
- {"role": "user", "content": user_prompt_for(python)}
44
  ]
45
 
46
- def write_output(cpp):
47
- code = cpp.replace("```cpp", "").replace("```", "")
 
48
  with open("optimized.cpp", "w") as f:
49
  f.write(code)
50
 
51
- def stream_gpt(python):
52
- stream = openai.chat.completions.create(model=OPENAI_MODEL, messages=messages_for(python), stream=True)
 
 
 
 
 
 
53
  reply = ""
54
  for chunk in stream:
55
  fragment = chunk.choices[0].delta.content or ""
56
  reply += fragment
57
  yield reply.replace('```cpp\n', '').replace('```', '')
58
 
59
- def stream_claude(python):
 
60
  result = claude.messages.stream(
61
  model=CLAUDE_MODEL,
62
  max_tokens=2000,
63
  system=system_message,
64
- messages=[{"role": "user", "content": user_prompt_for(python)}],
65
  )
66
  reply = ""
67
  with result as stream:
@@ -69,10 +78,11 @@ def stream_claude(python):
69
  reply += text
70
  yield reply.replace('```cpp\n', '').replace('```', '')
71
 
72
- def stream_deepseek(python):
 
73
  stream = deepseek.chat.completions.create(
74
  model=DEEPSEEK_MODEL,
75
- messages=messages_for(python),
76
  stream=True
77
  )
78
  reply = ""
@@ -81,47 +91,72 @@ def stream_deepseek(python):
81
  reply += fragment
82
  yield reply.replace('```cpp\n', '').replace('```', '')
83
 
84
- def optimize(python, model):
 
85
  if model == "GPT":
86
- result = stream_gpt(python)
87
  elif model == "Claude":
88
- result = stream_claude(python)
89
  elif model == "DeepSeek":
90
- result = stream_deepseek(python)
91
  else:
92
- raise ValueError("Unknown model")
93
- for stream_so_far in result:
94
- yield stream_so_far
95
 
 
96
  def execute_python(code):
 
 
97
  try:
98
- output = io.StringIO()
99
  sys.stdout = output
100
  local_scope = {}
101
  exec(code, local_scope, local_scope)
102
  except Exception as e:
103
  return f"Python error:\n{e}"
104
  finally:
105
- sys.stdout = sys.__stdout__
106
  return output.getvalue()
107
 
 
108
  def execute_cpp(code):
109
  write_output(code)
 
 
110
  try:
111
- compile_cmd = ["g++", "-Ofast", "-std=c++17", "-o", "optimized", "optimized.cpp"]
112
- subprocess.run(compile_cmd, check=True, text=True, capture_output=True)
113
- run_cmd = ["./optimized"]
114
- run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
  return run_result.stdout
116
- except subprocess.CalledProcessError as e:
117
- return f"C++ error:\n{e.stderr}"
118
 
 
119
  css = """
120
  .python {background-color: #306998;}
121
  .cpp {background-color: #050;}
122
  """
123
 
124
- example_code = """
125
  import time
126
 
127
  def calculate(iterations, param1, param2):
@@ -142,7 +177,7 @@ print(f"Execution Time: {(end_time - start_time):.6f} seconds")
142
  """
143
 
144
  with gr.Blocks(css=css) as ui:
145
- gr.Markdown("## 🔁 Python To C++ Converter")
146
  with gr.Row():
147
  python = gr.Textbox(label="Python code:", value=example_code, lines=12)
148
  cpp = gr.Textbox(label="C++ code:", lines=12)
 
10
 
11
  load_dotenv()
12
 
13
+ # Set API keys from environment (HF Spaces Secrets or .env)
14
  os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
15
  os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
16
  deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')
 
25
 
26
  system_message = (
27
  "You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
28
+ "Respond only with C++ code; use comments sparingly. "
29
  "The C++ response needs to produce an identical output in the fastest possible time."
30
  )
31
 
32
+ # Build prompt
33
+
34
+ def user_prompt_for(python_code):
35
  return (
36
+ "Rewrite this Python code in C++ with the fastest possible implementation that produces identical output. "
37
+ "Respond only with C++ code; do not explain your work other than minimal comments. "
38
+ "Pay attention to number types to prevent overflow and #include all necessary headers.\n\n" + python_code
 
39
  )
40
 
41
+ def messages_for(python_code):
42
  return [
43
  {"role": "system", "content": system_message},
44
+ {"role": "user", "content": user_prompt_for(python_code)}
45
  ]
46
 
47
+ # Write C++ code to disk
48
+ def write_output(cpp_code):
49
+ code = cpp_code.replace("```cpp", "").replace("```", "")
50
  with open("optimized.cpp", "w") as f:
51
  f.write(code)
52
 
53
+ # Streaming from models
54
+
55
+ def stream_gpt(python_code):
56
+ stream = openai.chat.completions.create(
57
+ model=OPENAI_MODEL,
58
+ messages=messages_for(python_code),
59
+ stream=True
60
+ )
61
  reply = ""
62
  for chunk in stream:
63
  fragment = chunk.choices[0].delta.content or ""
64
  reply += fragment
65
  yield reply.replace('```cpp\n', '').replace('```', '')
66
 
67
+
68
+ def stream_claude(python_code):
69
  result = claude.messages.stream(
70
  model=CLAUDE_MODEL,
71
  max_tokens=2000,
72
  system=system_message,
73
+ messages=[{"role": "user", "content": user_prompt_for(python_code)}],
74
  )
75
  reply = ""
76
  with result as stream:
 
78
  reply += text
79
  yield reply.replace('```cpp\n', '').replace('```', '')
80
 
81
+
82
+ def stream_deepseek(python_code):
83
  stream = deepseek.chat.completions.create(
84
  model=DEEPSEEK_MODEL,
85
+ messages=messages_for(python_code),
86
  stream=True
87
  )
88
  reply = ""
 
91
  reply += fragment
92
  yield reply.replace('```cpp\n', '').replace('```', '')
93
 
94
+ # Main dispatcher
95
+ def optimize(python_code, model):
96
  if model == "GPT":
97
+ generator = stream_gpt(python_code)
98
  elif model == "Claude":
99
+ generator = stream_claude(python_code)
100
  elif model == "DeepSeek":
101
+ generator = stream_deepseek(python_code)
102
  else:
103
+ raise ValueError(f"Unknown model: {model}")
104
+ for chunk in generator:
105
+ yield chunk
106
 
107
+ # Safe Python execution
108
  def execute_python(code):
109
+ output = io.StringIO()
110
+ sys_stdout = sys.stdout
111
  try:
 
112
  sys.stdout = output
113
  local_scope = {}
114
  exec(code, local_scope, local_scope)
115
  except Exception as e:
116
  return f"Python error:\n{e}"
117
  finally:
118
+ sys.stdout = sys_stdout
119
  return output.getvalue()
120
 
121
+ # Improved C++ compile/run with full error capture
122
  def execute_cpp(code):
123
  write_output(code)
124
+ # Compile
125
+ compile_cmd = ["g++", "-Ofast", "-std=c++17", "-o", "optimized", "optimized.cpp"]
126
  try:
127
+ compile_result = subprocess.run(
128
+ compile_cmd,
129
+ text=True,
130
+ capture_output=True,
131
+ check=False
132
+ )
133
+ if compile_result.returncode != 0:
134
+ return f"C++ compilation failed:\n{compile_result.stderr}"
135
+ except Exception as e:
136
+ return f"C++ compile error:\n{e}"
137
+
138
+ # Run
139
+ run_cmd = ["./optimized"]
140
+ try:
141
+ run_result = subprocess.run(
142
+ run_cmd,
143
+ text=True,
144
+ capture_output=True,
145
+ check=False
146
+ )
147
+ if run_result.returncode != 0:
148
+ return f"C++ runtime error (code {run_result.returncode}):\n{run_result.stderr}"
149
  return run_result.stdout
150
+ except Exception as e:
151
+ return f"C++ execution exception:\n{e}"
152
 
153
+ # UI setup
154
  css = """
155
  .python {background-color: #306998;}
156
  .cpp {background-color: #050;}
157
  """
158
 
159
+ example_code = """
160
  import time
161
 
162
  def calculate(iterations, param1, param2):
 
177
  """
178
 
179
  with gr.Blocks(css=css) as ui:
180
+ gr.Markdown("## 🔁 Python to High Performance C++ Converter")
181
  with gr.Row():
182
  python = gr.Textbox(label="Python code:", value=example_code, lines=12)
183
  cpp = gr.Textbox(label="C++ code:", lines=12)