SHAH-MEER commited on
Commit
ae75f56
·
verified ·
1 Parent(s): e6bffd0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -62
app.py CHANGED
@@ -8,55 +8,53 @@ import anthropic
8
  import gradio as gr
9
  import subprocess
10
 
 
11
 
 
12
  os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
13
  os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
14
  deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')
15
 
16
-
17
-
18
  openai = OpenAI()
19
  claude = anthropic.Anthropic()
20
- deepseek = OpenAI(
21
- api_key=deepseek_api_key,
22
- base_url="https://api.deepseek.com/v1"
23
- )
24
 
25
  OPENAI_MODEL = "gpt-4o"
26
  CLAUDE_MODEL = "claude-3-5-sonnet-20240620"
27
  DEEPSEEK_MODEL = 'deepseek-coder'
28
 
29
- system_message = "You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
30
- system_message += "Respond only with C++ code; use comments sparingly and do not provide any explanation other than occasional comments. "
31
- system_message += "The C++ response needs to produce an identical output in the fastest possible time."
32
-
 
33
 
34
  def user_prompt_for(python):
35
- user_prompt = "Rewrite this Python code in C++ with the fastest possible implementation that produces identical output in the least time. "
36
- user_prompt += "Respond only with C++ code; do not explain your work other than a few comments. "
37
- user_prompt += "Pay attention to number types to ensure no int overflows. Remember to #include all necessary C++ packages such as iomanip.\n\n"
38
- user_prompt += python
39
- return user_prompt
 
40
 
41
  def messages_for(python):
42
  return [
43
  {"role": "system", "content": system_message},
44
  {"role": "user", "content": user_prompt_for(python)}
45
  ]
46
- # write to a file called optimized.cpp
47
 
48
  def write_output(cpp):
49
- code = cpp.replace("```cpp","").replace("```","")
50
  with open("optimized.cpp", "w") as f:
51
  f.write(code)
52
 
53
- def stream_gpt(python):
54
  stream = openai.chat.completions.create(model=OPENAI_MODEL, messages=messages_for(python), stream=True)
55
  reply = ""
56
  for chunk in stream:
57
  fragment = chunk.choices[0].delta.content or ""
58
  reply += fragment
59
- yield reply.replace('```cpp\n','').replace('```','')
60
 
61
  def stream_claude(python):
62
  result = claude.messages.stream(
@@ -69,44 +67,61 @@ def stream_claude(python):
69
  with result as stream:
70
  for text in stream.text_stream:
71
  reply += text
72
- yield reply.replace('```cpp\n','').replace('```','')
73
 
74
  def stream_deepseek(python):
75
  stream = deepseek.chat.completions.create(
76
- model = DEEPSEEK_MODEL,
77
- messages = messages_for(python),
78
  stream=True
79
  )
80
  reply = ""
81
  for chunk in stream:
82
  fragment = chunk.choices[0].delta.content or ""
83
  reply += fragment
84
- yield reply.replace('```cpp\n','').replace('```','')
85
 
86
  def optimize(python, model):
87
- if model=="GPT":
88
  result = stream_gpt(python)
89
- elif model=="Claude":
90
  result = stream_claude(python)
91
  elif model == "DeepSeek":
92
  result = stream_deepseek(python)
93
  else:
94
  raise ValueError("Unknown model")
95
  for stream_so_far in result:
96
- yield stream_so_far
97
 
98
  def execute_python(code):
99
  try:
100
  output = io.StringIO()
101
  sys.stdout = output
102
- exec(code)
 
 
 
103
  finally:
104
  sys.stdout = sys.__stdout__
105
  return output.getvalue()
106
 
107
- #example python code
108
- python_hard = """# Be careful to support large number sizes
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
 
110
  def lcg(seed, a=1664525, c=1013904223, m=2**32):
111
  value = seed
112
  while True:
@@ -133,47 +148,24 @@ def total_max_subarray_sum(n, initial_seed, min_val, max_val):
133
  total_sum += max_subarray_sum(n, seed, min_val, max_val)
134
  return total_sum
135
 
136
- # Parameters
137
- n = 10000 # Number of random numbers
138
- initial_seed = 42 # Initial seed for the LCG
139
- min_val = -10 # Minimum value of random numbers
140
- max_val = 10 # Maximum value of random numbers
141
-
142
- # Timing the function
143
  import time
 
 
 
 
144
  start_time = time.time()
145
  result = total_max_subarray_sum(n, initial_seed, min_val, max_val)
146
  end_time = time.time()
147
-
148
  print("Total Maximum Subarray Sum (20 runs):", result)
149
- print("Execution Time: {:.6f} seconds".format(end_time - start_time))
150
- """
151
-
152
-
153
- def execute_cpp(code):
154
- write_output(code)
155
- try:
156
- # Modified compiler flags to be more generic and compatible with Linux environments
157
- compile_cmd = ["g++", "-Ofast", "-std=c++17", "-march=native", "-o", "optimized", "optimized.cpp"]
158
- compile_result = subprocess.run(compile_cmd, check=True, text=True, capture_output=True)
159
- run_cmd = ["./optimized"]
160
- run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)
161
- return run_result.stdout
162
- except subprocess.CalledProcessError as e:
163
- return f"An error occurred:\n{e.stderr}"
164
-
165
- css = """
166
- .python {background-color: #306998;}
167
- .cpp {background-color: #050;}
168
- """
169
 
170
  with gr.Blocks(css=css) as ui:
171
- gr.Markdown("## Convert code from Python to C++")
172
  with gr.Row():
173
- python = gr.Textbox(label="Python code:", value=python_hard, lines=10)
174
- cpp = gr.Textbox(label="C++ code:", lines=10)
175
  with gr.Row():
176
- model = gr.Dropdown(["GPT", "Claude","DeepSeek"], label="Select model", value="Claude")
177
  with gr.Row():
178
  convert = gr.Button("Convert code")
179
  with gr.Row():
@@ -187,4 +179,4 @@ with gr.Blocks(css=css) as ui:
187
  python_run.click(execute_python, inputs=[python], outputs=[python_out])
188
  cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])
189
 
190
- ui.launch(inbrowser=True)
 
8
  import gradio as gr
9
  import subprocess
10
 
11
+ load_dotenv()
12
 
13
+ # Set API keys from .env or HF secrets
14
  os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
15
  os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
16
  deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')
17
 
 
 
18
  openai = OpenAI()
19
  claude = anthropic.Anthropic()
20
+ deepseek = OpenAI(api_key=deepseek_api_key, base_url="https://api.deepseek.com/v1")
 
 
 
21
 
22
  OPENAI_MODEL = "gpt-4o"
23
  CLAUDE_MODEL = "claude-3-5-sonnet-20240620"
24
  DEEPSEEK_MODEL = 'deepseek-coder'
25
 
26
+ system_message = (
27
+ "You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
28
+ "Respond only with C++ code; use comments sparingly and do not provide any explanation other than occasional comments. "
29
+ "The C++ response needs to produce an identical output in the fastest possible time."
30
+ )
31
 
32
  def user_prompt_for(python):
33
+ return (
34
+ "Rewrite this Python code in C++ with the fastest possible implementation that produces identical output in the least time. "
35
+ "Respond only with C++ code; do not explain your work other than a few comments. "
36
+ "Pay attention to number types to ensure no int overflows. Remember to #include all necessary C++ packages such as iomanip.\n\n"
37
+ + python
38
+ )
39
 
40
  def messages_for(python):
41
  return [
42
  {"role": "system", "content": system_message},
43
  {"role": "user", "content": user_prompt_for(python)}
44
  ]
 
45
 
46
  def write_output(cpp):
47
+ code = cpp.replace("```cpp", "").replace("```", "")
48
  with open("optimized.cpp", "w") as f:
49
  f.write(code)
50
 
51
+ def stream_gpt(python):
52
  stream = openai.chat.completions.create(model=OPENAI_MODEL, messages=messages_for(python), stream=True)
53
  reply = ""
54
  for chunk in stream:
55
  fragment = chunk.choices[0].delta.content or ""
56
  reply += fragment
57
+ yield reply.replace('```cpp\n', '').replace('```', '')
58
 
59
  def stream_claude(python):
60
  result = claude.messages.stream(
 
67
  with result as stream:
68
  for text in stream.text_stream:
69
  reply += text
70
+ yield reply.replace('```cpp\n', '').replace('```', '')
71
 
72
  def stream_deepseek(python):
73
  stream = deepseek.chat.completions.create(
74
+ model=DEEPSEEK_MODEL,
75
+ messages=messages_for(python),
76
  stream=True
77
  )
78
  reply = ""
79
  for chunk in stream:
80
  fragment = chunk.choices[0].delta.content or ""
81
  reply += fragment
82
+ yield reply.replace('```cpp\n', '').replace('```', '')
83
 
84
  def optimize(python, model):
85
+ if model == "GPT":
86
  result = stream_gpt(python)
87
+ elif model == "Claude":
88
  result = stream_claude(python)
89
  elif model == "DeepSeek":
90
  result = stream_deepseek(python)
91
  else:
92
  raise ValueError("Unknown model")
93
  for stream_so_far in result:
94
+ yield stream_so_far
95
 
96
  def execute_python(code):
97
  try:
98
  output = io.StringIO()
99
  sys.stdout = output
100
+ local_scope = {}
101
+ exec(code, local_scope, local_scope)
102
+ except Exception as e:
103
+ return f"Python error:\n{e}"
104
  finally:
105
  sys.stdout = sys.__stdout__
106
  return output.getvalue()
107
 
108
+ def execute_cpp(code):
109
+ write_output(code)
110
+ try:
111
+ compile_cmd = ["g++", "-Ofast", "-std=c++17", "-o", "optimized", "optimized.cpp"]
112
+ subprocess.run(compile_cmd, check=True, text=True, capture_output=True)
113
+ run_cmd = ["./optimized"]
114
+ run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)
115
+ return run_result.stdout
116
+ except subprocess.CalledProcessError as e:
117
+ return f"C++ error:\n{e.stderr}"
118
+
119
+ css = """
120
+ .python {background-color: #306998;}
121
+ .cpp {background-color: #050;}
122
+ """
123
 
124
+ example_code = '''# Be careful to support large number sizes
125
  def lcg(seed, a=1664525, c=1013904223, m=2**32):
126
  value = seed
127
  while True:
 
148
  total_sum += max_subarray_sum(n, seed, min_val, max_val)
149
  return total_sum
150
 
 
 
 
 
 
 
 
151
  import time
152
+ n = 10000
153
+ initial_seed = 42
154
+ min_val = -10
155
+ max_val = 10
156
  start_time = time.time()
157
  result = total_max_subarray_sum(n, initial_seed, min_val, max_val)
158
  end_time = time.time()
 
159
  print("Total Maximum Subarray Sum (20 runs):", result)
160
+ print("Execution Time: {:.6f} seconds".format(end_time - start_time))'''
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
161
 
162
  with gr.Blocks(css=css) as ui:
163
+ gr.Markdown("## 🔁 Python To C++ Converter")
164
  with gr.Row():
165
+ python = gr.Textbox(label="Python code:", value=example_code, lines=12)
166
+ cpp = gr.Textbox(label="C++ code:", lines=12)
167
  with gr.Row():
168
+ model = gr.Dropdown(["GPT", "Claude", "DeepSeek"], label="Select model", value="Claude")
169
  with gr.Row():
170
  convert = gr.Button("Convert code")
171
  with gr.Row():
 
179
  python_run.click(execute_python, inputs=[python], outputs=[python_out])
180
  cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])
181
 
182
+ ui.launch(share=True)