Spaces:
Sleeping
Sleeping
| def get_set_up(): | |
| import torch | |
| TORCH_VERSION = ".".join(torch.__version__.split(".")[:2]) | |
| CUDA_VERSION = torch.__version__.split("+")[-1] | |
| print("torch: ", TORCH_VERSION, "; cuda: ", CUDA_VERSION) | |
| print(f'GPU available: {torch.cuda.is_available()}') | |
| print(torch.cuda.get_device_capability()) | |
| # print("detectron2:", detectron2.__version__) | |
| def load_model(): | |
| # def predictor(img): | |
| # return {} | |
| # return predictor | |
| # import some common detectron2 utilities | |
| import torch | |
| from detectron2 import model_zoo | |
| from detectron2.engine import DefaultPredictor | |
| from detectron2.config import get_cfg | |
| from detectron2.data.datasets import register_coco_instances | |
| import os | |
| import numpy as np | |
| ## define relevant parameters | |
| cfg = get_cfg() | |
| cfg.merge_from_file("./configs/test_model_config.yaml") | |
| if not torch.cuda.is_available(): | |
| cfg.MODEL.DEVICE = "cpu" | |
| else: | |
| cfg.MODEL.DEVICE = 'cuda' | |
| predictor = DefaultPredictor(cfg) | |
| return predictor | |
| if __name__ == '__main__': | |
| # get_set_up() | |
| load_model() |