Spaces:
Sleeping
Sleeping
| import spaces | |
| from transformers import pipeline | |
| import gradio as gr | |
| import torch | |
| import logging | |
| logging.basicConfig(level=logging.INFO) | |
| logging.info(f"CUDA available: {torch.cuda.is_available()}, CUDA version: {torch.version.cuda}") | |
| def decomp_create_prompt(input_data: str) -> str: | |
| before = "# This is the decompiled code:\n" | |
| after = "\n# What is the source code?\n" | |
| prompt = before + input_data.strip() + after | |
| return prompt | |
| pipe = pipeline(model="ejschwartz/decaf-v1-22b-4bit", return_full_text=False) | |
| pipe.model.to("cuda") | |
| def generate(text): | |
| print(f"Generating text... {text}") | |
| prompt = decomp_create_prompt(text) | |
| return pipe(prompt, max_new_tokens=2000)[0]['generated_text'] | |
| demo = gr.Interface(fn=generate, inputs="text", outputs="text") | |
| demo.launch() | |