| import torch | |
| import numpy as np | |
| from pytriton.model_config import ModelConfig, Tensor | |
| from pytriton.triton import Triton | |
| # Connecting inference callback with Triton Inference Server | |
| with Triton() as triton: | |
| # Load model into Triton Inference Server | |
| triton | |
| triton.serve() | |