Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| def load_model(): | |
| return pipeline( | |
| "text-generation", | |
| model="bigcode/santacoder", # Smaller than DeepSeek for CPU | |
| device="cpu" | |
| ) | |
| model = load_model() | |
| def generate_code(prompt): | |
| try: | |
| response = model( | |
| f"<|user|>{prompt}</s><|assistant|>", | |
| max_new_tokens=100, | |
| temperature=0.7, | |
| do_sample=True | |
| ) | |
| return response[0]['generated_text'].split("<|assistant|>")[-1] | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| demo = gr.Interface( | |
| fn=generate_code, | |
| inputs=gr.Textbox(lines=5, placeholder="Ask a coding question..."), | |
| outputs=gr.Code(language="python"), | |
| title="CPU-Friendly Coding Assistant", | |
| description="This simplified version works on free CPU hardware" | |
| ) | |
| demo.launch(server_name="0.0.0.0", server_port=7860) |