| import os |
| import gradio as gr |
| from huggingface_hub import hf_hub_download |
| import importlib.util |
|
|
| |
| PRIVATE_DATASET_ID = os.getenv("PRIVATE_DATASET_ID") |
| HF_TOKEN = os.getenv("HF_TOKEN") |
| INDEX_SUBDIR = os.getenv("INDEX_SUBDIR", ".") |
|
|
| |
| try: |
| AGENT_CODE_PATH = hf_hub_download( |
| repo_id=PRIVATE_DATASET_ID, |
| filename="deepv_core.py", |
| repo_type="dataset", |
| token=HF_TOKEN |
| ) |
| spec = importlib.util.spec_from_file_location("deepv_core_module", AGENT_CODE_PATH) |
| agent_module = importlib.util.module_from_spec(spec) |
| spec.loader.exec_module(agent_module) |
|
|
| |
| run_generation = agent_module.run_generation |
| |
| except Exception as e: |
| def show_error(*args): |
| return f"// ERROR: Failed to load core agent code. Details: {e}", "", [] |
|
|
| |
| with gr.Blocks(title="DeepV for RTL (Model-Agnostic)") as demo: |
| gr.Markdown("## DeepV for RTL Code Generation β Model-Agnostic (Bring Your Own API Key)") |
|
|
| with gr.Row(): |
| with gr.Column(scale=2): |
| with gr.Row(): |
| model_choice = gr.Dropdown( |
| choices=["gpt-4o", "gpt-4o-mini", "gpt-4.1", "gpt-5-chat-latest"], |
| value="gpt-4o", |
| label="Model" |
| ) |
| api_key = gr.Textbox(label="OpenAI API Key", type="password", placeholder="sk-...") |
|
|
| gr.Markdown( |
| """ |
| **Note:** Your API key is used for the current session only and is not saved or stored. |
| """ |
| ) |
| |
| spec = gr.Textbox( |
| label="Design Specification (natural language or I/O contract)", |
| placeholder="e.g., 8-bit UART transmitter with baud rate generator ...", |
| lines=10 |
| ) |
| with gr.Row(): |
| use_rag = gr.Checkbox(value=True, label="Use Retrieval (RAG)") |
| top_k = gr.Slider(1, 10, value=3, step=1, label="Top-K retrieved examples") |
| |
| with gr.Accordion("Generation Settings", open=False): |
| temperature = gr.Slider(0.0, 1.5, value=0.2, step=0.05, label="Temperature") |
| top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p") |
| max_new_tokens = gr.Slider(128, 4096, value=768, step=64, label="Max tokens") |
|
|
| run_btn = gr.Button("Generate Verilog", variant="primary") |
| with gr.Column(scale=3): |
| gr.Markdown("**Output**") |
| with gr.Group(): |
| out_code = gr.Textbox( |
| label="Generated Verilog", |
| lines=28, |
| interactive=False, |
| placeholder="// Your Verilog code will appear here", |
| elem_id="verilog-output" |
| ) |
| copy_button = gr.Button("π", variant="secondary", elem_id="copy-button") |
|
|
| with gr.Tab("Retrieved Items (names + scores)"): |
| retrieved_list = gr.Textbox( |
| label="Retriever summary", |
| lines=8, |
| interactive=False |
| ) |
| with gr.Tab("Preview of Retrieved Context (raw)"): |
| retrieved_raw = gr.HighlightedText(label="(first K documents)", combine_adjacent=True) |
|
|
| |
| def copy_to_clipboard_fn(text): |
| return text |
|
|
| run_btn.click( |
| fn=run_generation, |
| inputs=[spec, use_rag, top_k, model_choice, api_key, temperature, top_p, max_new_tokens], |
| outputs=[out_code, retrieved_list, retrieved_raw] |
| ) |
|
|
| copy_button.click( |
| fn=copy_to_clipboard_fn, |
| inputs=[out_code], |
| outputs=[], |
| js=""" |
| (text) => { |
| const el = document.createElement('textarea'); |
| el.value = text; |
| document.body.appendChild(el); |
| el.select(); |
| document.execCommand('copy'); |
| document.body.removeChild(el); |
| } |
| """ |
| ) |
|
|
| demo.css = """ |
| #verilog-output > label > .label-wrap { |
| position: relative; |
| } |
| |
| #copy-button { |
| position: absolute; |
| top: 95px; |
| right: 30px; |
| z-index: 10; |
| } |
| """ |
|
|
| if __name__ == "__main__": |
| if 'agent_module' in locals(): |
| demo.launch() |
| else: |
| with gr.Blocks() as error_demo: |
| gr.Markdown("# Initialization Error") |
| gr.Markdown(f"An error occurred while loading the application code. Please check your configuration.") |
| gr.Textbox(label="Error Details", value=str(e), lines=5) |
| error_demo.launch() |