DisgustingOzil commited on
Commit
844d72c
·
verified ·
1 Parent(s): 14fa938

update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +12 -16
handler.py CHANGED
@@ -1,24 +1,20 @@
1
  from typing import Dict, List, Any
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
- from accelerate import Accelerator
4
 
5
-
6
-
7
- class EndpointHandler():
8
  def __init__(self, path=""):
9
- model_id = "DisgustingOzil/Academic-MCQ-Generator"
10
- load_in_4bit = True
11
- max_seq_length = 2048
12
- tokenizer = AutoTokenizer.from_pretrained(model_id)
13
- model = AutoModelForCausalLM.from_pretrained(model_id, load_in_4bit=load_in_4bit)
14
-
15
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
16
- input_text = data.pop("input_text",data)
17
  inputs = self.tokenizer(input_text, return_tensors="pt")
18
  outputs = self.model.generate(
19
- **inputs,
20
- max_length=1000,
21
- num_return_sequences=1,
22
- )
23
  output_text = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
24
- return output_text
 
1
  from typing import Dict, List, Any
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
3
 
4
+ class EndpointHandler:
 
 
5
  def __init__(self, path=""):
6
+ model_id = "DisgustingOzil/Academic-MCQ-Generator"
7
+ load_in_4bit = True
8
+ self.tokenizer = AutoTokenizer.from_pretrained(model_id)
9
+ self.model = AutoModelForCausalLM.from_pretrained(model_id, load_in_4bit=load_in_4bit)
10
+
 
11
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
12
+ input_text = data.pop("input_text", data)
13
  inputs = self.tokenizer(input_text, return_tensors="pt")
14
  outputs = self.model.generate(
15
+ **inputs,
16
+ max_length=1000,
17
+ num_return_sequences=1,
18
+ )
19
  output_text = self.tokenizer.decode(outputs[0], skip_special_tokens=True)
20
+ return [{"generated_text": output_text}]