File size: 6,208 Bytes
a7d5afc
 
 
 
 
 
 
 
 
 
ae75f56
a7d5afc
92f4773
a7d5afc
 
 
45f5e0e
a7d5afc
 
ae75f56
a7d5afc
 
 
 
 
ae75f56
 
92f4773
ae75f56
 
a7d5afc
92f4773
 
 
ae75f56
92f4773
 
 
ae75f56
a7d5afc
92f4773
a7d5afc
 
92f4773
a7d5afc
 
92f4773
 
 
a7d5afc
 
 
92f4773
 
 
 
 
 
 
 
a7d5afc
 
 
 
ae75f56
a7d5afc
92f4773
 
a7d5afc
 
 
 
92f4773
a7d5afc
 
 
 
 
ae75f56
a7d5afc
92f4773
 
a7d5afc
ae75f56
92f4773
a7d5afc
 
 
 
 
 
ae75f56
a7d5afc
92f4773
 
ae75f56
92f4773
ae75f56
92f4773
a7d5afc
92f4773
a7d5afc
92f4773
 
 
a7d5afc
92f4773
a7d5afc
92f4773
 
a7d5afc
 
ae75f56
 
 
 
a7d5afc
92f4773
a7d5afc
 
92f4773
ae75f56
 
92f4773
 
ae75f56
92f4773
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ae75f56
92f4773
 
ae75f56
92f4773
e6bffd0
92f4773
34c079c
 
 
 
 
e6bffd0
3309e5e
 
 
 
 
 
 
 
 
 
e6bffd0
3309e5e
e6bffd0
3309e5e
 
 
 
a7d5afc
714021c
cba3888
a7d5afc
714021c
 
a7d5afc
ae75f56
a7d5afc
 
 
 
 
 
 
 
 
 
 
 
 
ae75f56
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
import os
import io
import sys
from dotenv import load_dotenv
from openai import OpenAI
import google.generativeai
import anthropic
import gradio as gr
import subprocess

load_dotenv()

# Set API keys from environment (HF Spaces Secrets or .env)
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')

openai = OpenAI()
claude = anthropic.Anthropic()
deepseek = OpenAI(api_key=deepseek_api_key, base_url="https://api.deepseek.com/v1")

OPENAI_MODEL = "gpt-4o"
CLAUDE_MODEL = "claude-3-5-sonnet-20240620"
DEEPSEEK_MODEL = 'deepseek-coder'

system_message = (
    "You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
    "Respond only with C++ code; use comments sparingly. "
    "The C++ response needs to produce an identical output in the fastest possible time."
)

# Build prompt

def user_prompt_for(python_code):
    return (
        "Rewrite this Python code in C++ with the fastest possible implementation that produces identical output. "
        "Respond only with C++ code; do not explain your work other than minimal comments. "
        "Pay attention to number types to prevent overflow and #include all necessary headers.\n\n" + python_code
    )

def messages_for(python_code):
    return [
        {"role": "system", "content": system_message},
        {"role": "user", "content": user_prompt_for(python_code)}
    ]

# Write C++ code to disk
def write_output(cpp_code):
    code = cpp_code.replace("```cpp", "").replace("```", "")
    with open("optimized.cpp", "w") as f:
        f.write(code)

# Streaming from models

def stream_gpt(python_code):    
    stream = openai.chat.completions.create(
        model=OPENAI_MODEL,
        messages=messages_for(python_code),
        stream=True
    )
    reply = ""
    for chunk in stream:
        fragment = chunk.choices[0].delta.content or ""
        reply += fragment
        yield reply.replace('```cpp\n', '').replace('```', '')


def stream_claude(python_code):
    result = claude.messages.stream(
        model=CLAUDE_MODEL,
        max_tokens=2000,
        system=system_message,
        messages=[{"role": "user", "content": user_prompt_for(python_code)}],
    )
    reply = ""
    with result as stream:
        for text in stream.text_stream:
            reply += text
            yield reply.replace('```cpp\n', '').replace('```', '')


def stream_deepseek(python_code):
    stream = deepseek.chat.completions.create(
        model=DEEPSEEK_MODEL,
        messages=messages_for(python_code),
        stream=True
    )
    reply = ""
    for chunk in stream:
        fragment = chunk.choices[0].delta.content or ""
        reply += fragment
        yield reply.replace('```cpp\n', '').replace('```', '')

# Main dispatcher
def optimize(python_code, model):
    if model == "GPT":
        generator = stream_gpt(python_code)
    elif model == "Claude":
        generator = stream_claude(python_code)
    elif model == "DeepSeek":
        generator = stream_deepseek(python_code)
    else:
        raise ValueError(f"Unknown model: {model}")
    for chunk in generator:
        yield chunk

# Safe Python execution
def execute_python(code):
    output = io.StringIO()
    sys_stdout = sys.stdout
    try:
        sys.stdout = output
        local_scope = {}
        exec(code, local_scope, local_scope)
    except Exception as e:
        return f"Python error:\n{e}"
    finally:
        sys.stdout = sys_stdout
    return output.getvalue()

# Improved C++ compile/run with full error capture
def execute_cpp(code):
    write_output(code)
    # Compile
    compile_cmd = ["g++", "-Ofast", "-std=c++17", "-o", "optimized", "optimized.cpp"]
    try:
        compile_result = subprocess.run(
            compile_cmd,
            text=True,
            capture_output=True,
            check=False
        )
        if compile_result.returncode != 0:
            return f"C++ compilation failed:\n{compile_result.stderr}"
    except Exception as e:
        return f"C++ compile error:\n{e}"

    # Run
    run_cmd = ["./optimized"]
    try:
        run_result = subprocess.run(
            run_cmd,
            text=True,
            capture_output=True,
            check=False
        )
        if run_result.returncode != 0:
            return f"C++ runtime error (code {run_result.returncode}):\n{run_result.stderr}"
        return run_result.stdout
    except Exception as e:
        return f"C++ execution exception:\n{e}"

# UI setup

example_code = """
######-----------------------------------------------#######
# This is just for a preview you can change it as you like #
######-------------------------------------------------#####


import time

def calculate(iterations, param1, param2):
    result = 1.0
    for i in range(1, iterations+1):
        j = i * param1 - param2
        result -= (1/j)
        j = i * param1 + param2
        result += (1/j)
    return result

start_time = time.time()
result = calculate(100_000_000, 4, 1) * 4
end_time = time.time()

print(f"Result: {result:.12f}")
print(f"Execution Time: {(end_time - start_time):.6f} seconds")
"""

with gr.Blocks(title="Code Converter",theme=gr.themes.Soft()) as ui:
    gr.Markdown("## 🔁 Python to C++ Converter")
    with gr.Row():
        python = gr.Code(label="Python code:",language='python', value=example_code, lines=12)
        cpp = gr.Code(label="C++ code:",language='cpp', lines=12)
    with gr.Row():
        model = gr.Dropdown(["GPT", "Claude", "DeepSeek"], label="Select model", value="Claude")
    with gr.Row():
        convert = gr.Button("Convert code")
    with gr.Row():
        python_run = gr.Button("Run Python")
        cpp_run = gr.Button("Run C++")
    with gr.Row():
        python_out = gr.TextArea(label="Python result:", elem_classes=["python"])
        cpp_out = gr.TextArea(label="C++ result:", elem_classes=["cpp"])

    convert.click(optimize, inputs=[python, model], outputs=[cpp])
    python_run.click(execute_python, inputs=[python], outputs=[python_out])
    cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])

ui.launch(share=True)