Spaces:
Running
on
Zero
Running
on
Zero
Lord-Raven
commited on
Commit
·
1fa57db
1
Parent(s):
a05e5e2
Messing with configuration.
Browse files
app.py
CHANGED
|
@@ -31,16 +31,16 @@ print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
|
| 31 |
# model_name = "MoritzLaurer/deberta-v3-base-zeroshot-v2.0"
|
| 32 |
# file_name = "onnx/model.onnx"
|
| 33 |
# tokenizer_name = "MoritzLaurer/deberta-v3-base-zeroshot-v2.0"
|
| 34 |
-
|
| 35 |
-
|
| 36 |
|
| 37 |
-
model = ORTModelForSequenceClassification.from_pretrained(
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
)
|
| 42 |
|
| 43 |
-
tokenizer = AutoTokenizer.from_pretrained("
|
| 44 |
|
| 45 |
classifier = pipeline(task="zero-shot-classification", model=model, tokenizer=tokenizer, device="cuda:0")
|
| 46 |
|
|
@@ -54,6 +54,7 @@ def classify(data_string, request: gradio.Request):
|
|
| 54 |
# else:
|
| 55 |
return zero_shot_classification(data)
|
| 56 |
|
|
|
|
| 57 |
def zero_shot_classification(data):
|
| 58 |
results = classifier(data['sequence'], candidate_labels=data['candidate_labels'], hypothesis_template=data['hypothesis_template'], multi_label=data['multi_label'])
|
| 59 |
response_string = json.dumps(results)
|
|
|
|
| 31 |
# model_name = "MoritzLaurer/deberta-v3-base-zeroshot-v2.0"
|
| 32 |
# file_name = "onnx/model.onnx"
|
| 33 |
# tokenizer_name = "MoritzLaurer/deberta-v3-base-zeroshot-v2.0"
|
| 34 |
+
model = ORTModelForSequenceClassification.from_pretrained(model_name, file_name=file_name, export=True, provider="CUDAExecutionProvider")
|
| 35 |
+
tokenizer = AutoTokenizer.from_pretrained(tokenizer_name, model_max_length=512)
|
| 36 |
|
| 37 |
+
# model = ORTModelForSequenceClassification.from_pretrained(
|
| 38 |
+
# "distilbert-base-uncased-finetuned-sst-2-english",
|
| 39 |
+
# export=True,
|
| 40 |
+
# provider="CUDAExecutionProvider",
|
| 41 |
+
# )
|
| 42 |
|
| 43 |
+
# tokenizer = AutoTokenizer.from_pretrained("distilbert-base-uncased-finetuned-sst-2-english")
|
| 44 |
|
| 45 |
classifier = pipeline(task="zero-shot-classification", model=model, tokenizer=tokenizer, device="cuda:0")
|
| 46 |
|
|
|
|
| 54 |
# else:
|
| 55 |
return zero_shot_classification(data)
|
| 56 |
|
| 57 |
+
@spaces.GPU()
|
| 58 |
def zero_shot_classification(data):
|
| 59 |
results = classifier(data['sequence'], candidate_labels=data['candidate_labels'], hypothesis_template=data['hypothesis_template'], multi_label=data['multi_label'])
|
| 60 |
response_string = json.dumps(results)
|