Commit ·
3dfb844
1
Parent(s): cca2aed
fix: handler.py
Browse files- handler.py +2 -1
handler.py
CHANGED
|
@@ -2,11 +2,12 @@ from typing import Dict, List, Any
|
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 3 |
|
| 4 |
|
| 5 |
-
class
|
| 6 |
def __init__(self, path=""):
|
| 7 |
self.model = AutoModelForCausalLM.from_pretrained(path, device_map="cuda", torch_dtype="auto", trust_remote_code=True)
|
| 8 |
self.tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
|
| 9 |
self.pipe = pipeline("text-generation", model=self.model, tokenizer=self.tokenizer)
|
|
|
|
| 10 |
def __call__(self, data:Dict[str, Any]) :
|
| 11 |
messages = data.pop("messages", None)
|
| 12 |
generation_args = data.pop("generation_args", None)
|
|
|
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 3 |
|
| 4 |
|
| 5 |
+
class EndpointHandler():
|
| 6 |
def __init__(self, path=""):
|
| 7 |
self.model = AutoModelForCausalLM.from_pretrained(path, device_map="cuda", torch_dtype="auto", trust_remote_code=True)
|
| 8 |
self.tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-128k-instruct")
|
| 9 |
self.pipe = pipeline("text-generation", model=self.model, tokenizer=self.tokenizer)
|
| 10 |
+
|
| 11 |
def __call__(self, data:Dict[str, Any]) :
|
| 12 |
messages = data.pop("messages", None)
|
| 13 |
generation_args = data.pop("generation_args", None)
|