Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,16 +6,14 @@ import torch
|
|
| 6 |
import gradio as gr
|
| 7 |
|
| 8 |
|
| 9 |
-
# Define the Gradio interface
|
| 10 |
title = "Welcome to Tonic's 🐋🐳Orca-2-13B!"
|
| 11 |
description = "You can use [🐋🐳microsoft/Orca-2-13b](https://huggingface.co/microsoft/Orca-2-13b) Or clone this space to use it locally or on huggingface! [Join me on Discord to build together](https://discord.gg/VqTxc76K3u)."
|
| 12 |
|
| 13 |
-
|
| 14 |
-
# Load the model and tokenizer
|
| 15 |
model_name = "microsoft/Orca-2-13b"
|
| 16 |
-
model = AutoModelForCausalLM.from_pretrained(model_name
|
| 17 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False,)
|
| 18 |
-
|
| 19 |
|
| 20 |
class OrcaChatBot:
|
| 21 |
def __init__(self, model, tokenizer, system_message="You are Orca, an AI language model created by Microsoft. You are a cautious assistant. You carefully follow instructions. You are helpful and harmless and you follow ethical guidelines and promote positive behavior."):
|
|
|
|
| 6 |
import gradio as gr
|
| 7 |
|
| 8 |
|
|
|
|
| 9 |
title = "Welcome to Tonic's 🐋🐳Orca-2-13B!"
|
| 10 |
description = "You can use [🐋🐳microsoft/Orca-2-13b](https://huggingface.co/microsoft/Orca-2-13b) Or clone this space to use it locally or on huggingface! [Join me on Discord to build together](https://discord.gg/VqTxc76K3u)."
|
| 11 |
|
| 12 |
+
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
|
|
|
| 13 |
model_name = "microsoft/Orca-2-13b"
|
| 14 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
| 15 |
tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False,)
|
| 16 |
+
model.to(device)
|
| 17 |
|
| 18 |
class OrcaChatBot:
|
| 19 |
def __init__(self, model, tokenizer, system_message="You are Orca, an AI language model created by Microsoft. You are a cautious assistant. You carefully follow instructions. You are helpful and harmless and you follow ethical guidelines and promote positive behavior."):
|