Commit ·
cca2aed
1
Parent(s): c2db7f1
added handler.py
Browse files- handler.py +23 -0
handler.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, List, Any
|
| 2 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class PhiHandler():
|
| 6 |
+
def __init__(self, path=""):
|
| 7 |
+
self.model = AutoModelForCausalLM.from_pretrained(path, device_map="cuda", torch_dtype="auto", trust_remote_code=True)
|
| 8 |
+
self.tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
|
| 9 |
+
self.pipe = pipeline("text-generation", model=self.model, tokenizer=self.tokenizer)
|
| 10 |
+
def __call__(self, data:Dict[str, Any]) :
|
| 11 |
+
messages = data.pop("messages", None)
|
| 12 |
+
generation_args = data.pop("generation_args", None)
|
| 13 |
+
|
| 14 |
+
if generation_args==None :
|
| 15 |
+
generation_args = {
|
| 16 |
+
"max_new_tokens": 500,
|
| 17 |
+
"return_full_text": False,
|
| 18 |
+
"temperature": 0.0,
|
| 19 |
+
"do_sample": False,
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
output = self.pipe(messages, **generation_args)
|
| 23 |
+
return output[0]['generated_text']
|