SHAH-MEER commited on
Commit
a7d5afc
·
verified ·
1 Parent(s): bba0ccf

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +145 -0
app.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import io
3
+ import sys
4
+ from dotenv import load_dotenv
5
+ from openai import OpenAI
6
+ import google.generativeai
7
+ import anthropic
8
+ from IPython.display import Markdown, display, update_display
9
+ import gradio as gr
10
+ import subprocess
11
+
12
+
13
+ os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
14
+ os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
15
+ deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')
16
+ )
17
+
18
+
19
+ openai = OpenAI()
20
+ claude = anthropic.Anthropic()
21
+ deepseek = OpenAI(
22
+ api_key=deepseek_api_key,
23
+ base_url="https://api.deepseek.com/v1"
24
+ )
25
+
26
+ OPENAI_MODEL = "gpt-4o"
27
+ CLAUDE_MODEL = "claude-3-5-sonnet-20240620"
28
+ DEEPSEEK_MODEL = 'deepseek-coder'
29
+
30
+ system_message = "You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
31
+ system_message += "Respond only with C++ code; use comments sparingly and do not provide any explanation other than occasional comments. "
32
+ system_message += "The C++ response needs to produce an identical output in the fastest possible time."
33
+
34
+
35
+ def user_prompt_for(python):
36
+ user_prompt = "Rewrite this Python code in C++ with the fastest possible implementation that produces identical output in the least time. "
37
+ user_prompt += "Respond only with C++ code; do not explain your work other than a few comments. "
38
+ user_prompt += "Pay attention to number types to ensure no int overflows. Remember to #include all necessary C++ packages such as iomanip.\n\n"
39
+ user_prompt += python
40
+ return user_prompt
41
+
42
+ def messages_for(python):
43
+ return [
44
+ {"role": "system", "content": system_message},
45
+ {"role": "user", "content": user_prompt_for(python)}
46
+ ]
47
+ # write to a file called optimized.cpp
48
+
49
+ def write_output(cpp):
50
+ code = cpp.replace("```cpp","").replace("```","")
51
+ with open("optimized.cpp", "w") as f:
52
+ f.write(code)
53
+
54
+ def stream_gpt(python):
55
+ stream = openai.chat.completions.create(model=OPENAI_MODEL, messages=messages_for(python), stream=True)
56
+ reply = ""
57
+ for chunk in stream:
58
+ fragment = chunk.choices[0].delta.content or ""
59
+ reply += fragment
60
+ yield reply.replace('```cpp\n','').replace('```','')
61
+
62
+ def stream_claude(python):
63
+ result = claude.messages.stream(
64
+ model=CLAUDE_MODEL,
65
+ max_tokens=2000,
66
+ system=system_message,
67
+ messages=[{"role": "user", "content": user_prompt_for(python)}],
68
+ )
69
+ reply = ""
70
+ with result as stream:
71
+ for text in stream.text_stream:
72
+ reply += text
73
+ yield reply.replace('```cpp\n','').replace('```','')
74
+
75
+ def stream_deepseek(python):
76
+ stream = deepseek.chat.completions.create(
77
+ model = DEEPSEEK_MODEL,
78
+ messages = messages_for(python),
79
+ stream=True
80
+ )
81
+ reply = ""
82
+ for chunk in stream:
83
+ fragment = chunk.choices[0].delta.content or ""
84
+ reply += fragment
85
+ yield reply.replace('```cpp\n','').replace('```','')
86
+
87
+ def optimize(python, model):
88
+ if model=="GPT":
89
+ result = stream_gpt(python)
90
+ elif model=="Claude":
91
+ result = stream_claude(python)
92
+ elif model == "DeepSeek":
93
+ result = stream_deepseek(python)
94
+ else:
95
+ raise ValueError("Unknown model")
96
+ for stream_so_far in result:
97
+ yield stream_so_far
98
+
99
+ def execute_python(code):
100
+ try:
101
+ output = io.StringIO()
102
+ sys.stdout = output
103
+ exec(code)
104
+ finally:
105
+ sys.stdout = sys.__stdout__
106
+ return output.getvalue()
107
+
108
+ def execute_cpp(code):
109
+ write_output(code)
110
+ try:
111
+ # Modified compiler flags to be more generic and compatible with Linux environments
112
+ compile_cmd = ["g++", "-Ofast", "-std=c++17", "-march=native", "-o", "optimized", "optimized.cpp"]
113
+ compile_result = subprocess.run(compile_cmd, check=True, text=True, capture_output=True)
114
+ run_cmd = ["./optimized"]
115
+ run_result = subprocess.run(run_cmd, check=True, text=True, capture_output=True)
116
+ return run_result.stdout
117
+ except subprocess.CalledProcessError as e:
118
+ return f"An error occurred:\n{e.stderr}"
119
+
120
+ css = """
121
+ .python {background-color: #306998;}
122
+ .cpp {background-color: #050;}
123
+ """
124
+
125
+ with gr.Blocks(css=css) as ui:
126
+ gr.Markdown("## Convert code from Python to C++")
127
+ with gr.Row():
128
+ python = gr.Textbox(label="Python code:", value=python_hard, lines=10)
129
+ cpp = gr.Textbox(label="C++ code:", lines=10)
130
+ with gr.Row():
131
+ model = gr.Dropdown(["GPT", "Claude","DeepSeek"], label="Select model", value="Claude")
132
+ with gr.Row():
133
+ convert = gr.Button("Convert code")
134
+ with gr.Row():
135
+ python_run = gr.Button("Run Python")
136
+ cpp_run = gr.Button("Run C++")
137
+ with gr.Row():
138
+ python_out = gr.TextArea(label="Python result:", elem_classes=["python"])
139
+ cpp_out = gr.TextArea(label="C++ result:", elem_classes=["cpp"])
140
+
141
+ convert.click(optimize, inputs=[python, model], outputs=[cpp])
142
+ python_run.click(execute_python, inputs=[python], outputs=[python_out])
143
+ cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])
144
+
145
+ ui.launch(inbrowser=True)