Spaces:
Sleeping
Sleeping
File size: 820 Bytes
c923a2a 5a8b10a c7e2f2b 9b3bbd4 c7e2f2b 9b3bbd4 5a8b10a 9b3bbd4 5a8b10a 7995ffa 5a8b10a c7e2f2b 9002ae5 214a45b 9b3bbd4 5a8b10a 7995ffa 2370f1a 214a45b 2370f1a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | import spaces
from transformers import pipeline
import gradio as gr
import torch
import logging
logging.basicConfig(level=logging.INFO)
logging.info(f"CUDA available: {torch.cuda.is_available()}, CUDA version: {torch.version.cuda}")
def decomp_create_prompt(input_data: str) -> str:
before = "# This is the decompiled code:\n"
after = "\n# What is the source code?\n"
prompt = before + input_data.strip() + after
return prompt
pipe = pipeline(model="ejschwartz/decaf-v1-22b-4bit", return_full_text=False)
pipe.model.to("cuda")
@spaces.GPU(duration=180)
def generate(text):
print(f"Generating text... {text}")
prompt = decomp_create_prompt(text)
return pipe(prompt, max_new_tokens=2000)[0]['generated_text']
demo = gr.Interface(fn=generate, inputs="text", outputs="text")
demo.launch()
|