chuckfinca commited on
Commit
09434f4
·
1 Parent(s): 72e7a06

Adds device_map to force GPU

Browse files
Files changed (1) hide show
  1. handler.py +1 -1
handler.py CHANGED
@@ -5,7 +5,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
5
  class EndpointHandler():
6
  def __init__(self, path=""):
7
  torch.set_default_device("cuda")
8
- self.model = AutoModelForCausalLM.from_pretrained("chuckfinca/arithmephi", torch_dtype="auto", trust_remote_code=True)
9
  self.tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
10
 
11
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
 
5
  class EndpointHandler():
6
  def __init__(self, path=""):
7
  torch.set_default_device("cuda")
8
+ self.model = AutoModelForCausalLM.from_pretrained("chuckfinca/arithmephi", torch_dtype="auto", trust_remote_code=True, device_map = 'cuda')
9
  self.tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
10
 
11
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]: