Spaces:
Sleeping
Sleeping
spaces zero gpu
Browse files
app.py
CHANGED
|
@@ -2,6 +2,7 @@ import gradio as gr
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
| 4 |
import torch
|
|
|
|
| 5 |
|
| 6 |
# βββ set this to the exact name of your HF repo
|
| 7 |
HF_MODEL_ID = "rieon/DeepCoder-14B-Preview-Suger"
|
|
@@ -49,6 +50,7 @@ model.eval()
|
|
| 49 |
# generated += chunk.generated_text
|
| 50 |
# yield generated
|
| 51 |
|
|
|
|
| 52 |
def respond(
|
| 53 |
message: str,
|
| 54 |
history: list[dict],
|
|
|
|
| 2 |
from huggingface_hub import InferenceClient
|
| 3 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
| 4 |
import torch
|
| 5 |
+
import spaces
|
| 6 |
|
| 7 |
# βββ set this to the exact name of your HF repo
|
| 8 |
HF_MODEL_ID = "rieon/DeepCoder-14B-Preview-Suger"
|
|
|
|
| 50 |
# generated += chunk.generated_text
|
| 51 |
# yield generated
|
| 52 |
|
| 53 |
+
@spaces.GPU
|
| 54 |
def respond(
|
| 55 |
message: str,
|
| 56 |
history: list[dict],
|