chuckfinca commited on
Commit
72e7a06
·
1 Parent(s): 7a1199f

harmonizing with template on huggingface to solve errors

Browse files
Files changed (1) hide show
  1. handler.py +3 -3
handler.py CHANGED
@@ -1,4 +1,4 @@
1
- from typing import Dict
2
  import torch
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
@@ -8,8 +8,8 @@ class EndpointHandler():
8
  self.model = AutoModelForCausalLM.from_pretrained("chuckfinca/arithmephi", torch_dtype="auto", trust_remote_code=True)
9
  self.tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
10
 
11
- def __call__(self, data: Dict[str, str]) -> str:
12
- input = data.get("input", data)
13
  inputs = self.tokenizer(input, return_tensors="pt", return_attention_mask=False)
14
  outputs = self.model.generate(**inputs, max_length=len(inputs[0]) + 8, pad_token_id=self.tokenizer.eos_token_id)
15
  text = self.tokenizer.batch_decode(outputs)[0]
 
1
+ from typing import Dict, List, Any
2
  import torch
3
  from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
 
8
  self.model = AutoModelForCausalLM.from_pretrained("chuckfinca/arithmephi", torch_dtype="auto", trust_remote_code=True)
9
  self.tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
10
 
11
+ def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
12
+ input = data.get("inputs", data)
13
  inputs = self.tokenizer(input, return_tensors="pt", return_attention_mask=False)
14
  outputs = self.model.generate(**inputs, max_length=len(inputs[0]) + 8, pad_token_id=self.tokenizer.eos_token_id)
15
  text = self.tokenizer.batch_decode(outputs)[0]