File size: 2,714 Bytes
5caf9d8
 
 
 
 
b2ec955
5caf9d8
 
 
b2ec955
5caf9d8
 
b2ec955
5caf9d8
b2ec955
5caf9d8
 
 
 
 
 
b2ec955
5caf9d8
b2ec955
 
 
 
 
 
5caf9d8
 
b2ec955
 
 
5caf9d8
 
 
 
b2ec955
5caf9d8
 
 
 
b2ec955
5caf9d8
 
b2ec955
5caf9d8
b2ec955
5caf9d8
 
 
b2ec955
5caf9d8
 
b2ec955
5caf9d8
 
b2ec955
5caf9d8
b2ec955
 
 
 
 
 
5caf9d8
b2ec955
5caf9d8
b2ec955
 
 
 
 
 
5caf9d8
b2ec955
5caf9d8
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import gradio as gr
import os
import requests

def convert_to_cpp(python_code):
    # 1. Get the API Key securely
    api_key = os.getenv("GROQ_API_KEY")
    
    if not api_key:
        return "// Error: API Key is missing. Please check your Settings!"
    
    if not python_code.strip():
        return "// Error: Please enter some Python code first."

    # 2. Setup the "Senior Developer" Persona
    url = "https://api.groq.com/openai/v1/chat/completions"
    headers = {
        "Authorization": f"Bearer {api_key}",
        "Content-Type": "application/json"
    }
    
    # We instruct the AI to ONLY return code, no chatting.
    system_prompt = """You are an expert C++20 developer. 
    Your task is to convert Python code into highly efficient, modern C++.
    - Use standard libraries (std::vector, std::string) where possible.
    - Include necessary headers (#include <iostream>, etc).
    - Add brief comments explaining complex translations.
    - Do NOT output markdown ticks (```cpp). Just output the raw code.
    """

    data = {
        # --- THE FIX IS HERE: UPDATED MODEL NAME ---
        "model": "llama-3.1-8b-instant", 
        # -------------------------------------------
        "messages": [
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": python_code}
        ],
        "temperature": 0.2 
    }
    
    try:
        response = requests.post(url, json=data, headers=headers)
        
        if response.status_code != 200:
            return f"// API Error: {response.text}"
            
        return response.json()['choices'][0]['message']['content']
        
    except Exception as e:
        return f"// Connection Error: {str(e)}"

# 3. Build the Professional UI
with gr.Blocks(theme=gr.themes.Soft()) as app:
    gr.Markdown("# 🐍 Python to ⚡ C++ Converter")
    gr.Markdown("Transform slow Python scripts into high-performance C++ code using Llama 3.1.")
    
    with gr.Row():
        # Left side: Python Input
        with gr.Column():
            py_input = gr.Code(
                language="python", 
                label="Paste Python Code Here", 
                lines=15
            )
            convert_btn = gr.Button("Convert to C++ 🚀", variant="primary")
            
        # Right side: C++ Output
        with gr.Column():
            cpp_output = gr.Code(
                language="cpp", 
                label="C++ Result", 
                lines=15,
                interactive=False # User can copy but not edit
            )
            
    # Connect the button
    convert_btn.click(fn=convert_to_cpp, inputs=py_input, outputs=cpp_output)

if __name__ == "__main__":
    app.launch()