| import subprocess |
| import gradio as gr |
|
|
| |
| BINARY_PATH = "./bin/llama-cli" |
| MODEL_PATH = "./qwen0.5-finetuned.gguf" |
|
|
| |
| commit_prompt = """Generate a meaningful commit message explaining all the changes in the provided Git diff. |
| |
| ### Git Diff: |
| {} |
| |
| ### Commit Message: |
| """ |
|
|
| |
| git_diff_example = """ |
| diff --git a/index.html b/index.html |
| index 89abcde..f123456 100644 |
| --- a/index.html |
| +++ b/index.html |
| @@ -5,16 +5,6 @@ <body> |
| <h1>Welcome to My Page</h1> |
| |
| - <table border="1"> |
| - <tr> |
| - <th>Name</th> |
| - <th>Age</th> |
| - </tr> |
| - <tr> |
| - <td>John Doe</td> |
| - <td>30</td> |
| - </tr> |
| - </table> |
| |
| + <p>This is a newly added paragraph replacing the table.</p> |
| </body> |
| </html> |
| """ |
|
|
| def generate_commit(git_diff: str, max_tokens: int = 64) -> str: |
| """Generate a commit message using the llama-cli binary.""" |
| |
| if not git_diff.strip(): |
| return "Please provide a git diff to summarize." |
| |
| prompt_text = commit_prompt.format(git_diff) |
|
|
| cmd = [ |
| BINARY_PATH, |
| "-m", MODEL_PATH, |
| "-p", prompt_text, |
| "-n", str(max_tokens) |
| ] |
|
|
| try: |
| |
| result = subprocess.run(cmd, capture_output=True, text=True, check=True) |
| return result.stdout.strip() |
| except subprocess.CalledProcessError as e: |
| return f"Error running binary: {e}\n{e.stderr}" |
|
|
| if __name__ == "__main__": |
| demo = gr.Interface( |
| fn=generate_commit, |
| inputs=[ |
| gr.Textbox(lines=30, label="Git Diff", value=git_diff_example), |
| gr.Slider(1, 2048, value=64, step=1, label="max_tokens") |
| ], |
| outputs=gr.Textbox(label="Commit Message", lines=8), |
| title="Commit Message Generator", |
| description="Paste a git diff and generate a concise commit message using the GGUF model via llama-cli binary.", |
| allow_flagging="never" |
| ) |
| demo.launch(share=False) |
|
|