python_to_Cpp_ / app.py
SHAH-MEER's picture
Update app.py
714021c verified
import os
import io
import sys
from dotenv import load_dotenv
from openai import OpenAI
import google.generativeai
import anthropic
import gradio as gr
import subprocess
load_dotenv()
# Set API keys from environment (HF Spaces Secrets or .env)
os.environ['OPENAI_API_KEY'] = os.environ.get('OPENAI_API_KEY')
os.environ['ANTHROPIC_API_KEY'] = os.environ.get('ANTHROPIC_API_KEY')
deepseek_api_key = os.environ.get('DEEPSEEK_API_KEY')
openai = OpenAI()
claude = anthropic.Anthropic()
deepseek = OpenAI(api_key=deepseek_api_key, base_url="https://api.deepseek.com/v1")
OPENAI_MODEL = "gpt-4o"
CLAUDE_MODEL = "claude-3-5-sonnet-20240620"
DEEPSEEK_MODEL = 'deepseek-coder'
system_message = (
"You are an assistant that reimplements Python code in high performance C++ for an M1 Mac. "
"Respond only with C++ code; use comments sparingly. "
"The C++ response needs to produce an identical output in the fastest possible time."
)
# Build prompt
def user_prompt_for(python_code):
return (
"Rewrite this Python code in C++ with the fastest possible implementation that produces identical output. "
"Respond only with C++ code; do not explain your work other than minimal comments. "
"Pay attention to number types to prevent overflow and #include all necessary headers.\n\n" + python_code
)
def messages_for(python_code):
return [
{"role": "system", "content": system_message},
{"role": "user", "content": user_prompt_for(python_code)}
]
# Write C++ code to disk
def write_output(cpp_code):
code = cpp_code.replace("```cpp", "").replace("```", "")
with open("optimized.cpp", "w") as f:
f.write(code)
# Streaming from models
def stream_gpt(python_code):
stream = openai.chat.completions.create(
model=OPENAI_MODEL,
messages=messages_for(python_code),
stream=True
)
reply = ""
for chunk in stream:
fragment = chunk.choices[0].delta.content or ""
reply += fragment
yield reply.replace('```cpp\n', '').replace('```', '')
def stream_claude(python_code):
result = claude.messages.stream(
model=CLAUDE_MODEL,
max_tokens=2000,
system=system_message,
messages=[{"role": "user", "content": user_prompt_for(python_code)}],
)
reply = ""
with result as stream:
for text in stream.text_stream:
reply += text
yield reply.replace('```cpp\n', '').replace('```', '')
def stream_deepseek(python_code):
stream = deepseek.chat.completions.create(
model=DEEPSEEK_MODEL,
messages=messages_for(python_code),
stream=True
)
reply = ""
for chunk in stream:
fragment = chunk.choices[0].delta.content or ""
reply += fragment
yield reply.replace('```cpp\n', '').replace('```', '')
# Main dispatcher
def optimize(python_code, model):
if model == "GPT":
generator = stream_gpt(python_code)
elif model == "Claude":
generator = stream_claude(python_code)
elif model == "DeepSeek":
generator = stream_deepseek(python_code)
else:
raise ValueError(f"Unknown model: {model}")
for chunk in generator:
yield chunk
# Safe Python execution
def execute_python(code):
output = io.StringIO()
sys_stdout = sys.stdout
try:
sys.stdout = output
local_scope = {}
exec(code, local_scope, local_scope)
except Exception as e:
return f"Python error:\n{e}"
finally:
sys.stdout = sys_stdout
return output.getvalue()
# Improved C++ compile/run with full error capture
def execute_cpp(code):
write_output(code)
# Compile
compile_cmd = ["g++", "-Ofast", "-std=c++17", "-o", "optimized", "optimized.cpp"]
try:
compile_result = subprocess.run(
compile_cmd,
text=True,
capture_output=True,
check=False
)
if compile_result.returncode != 0:
return f"C++ compilation failed:\n{compile_result.stderr}"
except Exception as e:
return f"C++ compile error:\n{e}"
# Run
run_cmd = ["./optimized"]
try:
run_result = subprocess.run(
run_cmd,
text=True,
capture_output=True,
check=False
)
if run_result.returncode != 0:
return f"C++ runtime error (code {run_result.returncode}):\n{run_result.stderr}"
return run_result.stdout
except Exception as e:
return f"C++ execution exception:\n{e}"
# UI setup
example_code = """
######-----------------------------------------------#######
# This is just for a preview you can change it as you like #
######-------------------------------------------------#####
import time
def calculate(iterations, param1, param2):
result = 1.0
for i in range(1, iterations+1):
j = i * param1 - param2
result -= (1/j)
j = i * param1 + param2
result += (1/j)
return result
start_time = time.time()
result = calculate(100_000_000, 4, 1) * 4
end_time = time.time()
print(f"Result: {result:.12f}")
print(f"Execution Time: {(end_time - start_time):.6f} seconds")
"""
with gr.Blocks(title="Code Converter",theme=gr.themes.Soft()) as ui:
gr.Markdown("## 🔁 Python to C++ Converter")
with gr.Row():
python = gr.Code(label="Python code:",language='python', value=example_code, lines=12)
cpp = gr.Code(label="C++ code:",language='cpp', lines=12)
with gr.Row():
model = gr.Dropdown(["GPT", "Claude", "DeepSeek"], label="Select model", value="Claude")
with gr.Row():
convert = gr.Button("Convert code")
with gr.Row():
python_run = gr.Button("Run Python")
cpp_run = gr.Button("Run C++")
with gr.Row():
python_out = gr.TextArea(label="Python result:", elem_classes=["python"])
cpp_out = gr.TextArea(label="C++ result:", elem_classes=["cpp"])
convert.click(optimize, inputs=[python, model], outputs=[cpp])
python_run.click(execute_python, inputs=[python], outputs=[python_out])
cpp_run.click(execute_cpp, inputs=[cpp], outputs=[cpp_out])
ui.launch(share=True)