Spaces:
Runtime error
Runtime error
TYH71 commited on
Commit ·
2449b1f
1
Parent(s): 2d64026
qol: linting
Browse files- src/interface/clip.py +2 -1
- src/model/clip.py +14 -12
src/interface/clip.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
| 1 |
"""CLIP interface module"""
|
| 2 |
|
| 3 |
# libraries
|
| 4 |
-
from typing import
|
|
|
|
| 5 |
from PIL import Image
|
| 6 |
|
| 7 |
# modules
|
|
|
|
| 1 |
"""CLIP interface module"""
|
| 2 |
|
| 3 |
# libraries
|
| 4 |
+
from typing import Dict, List, Union
|
| 5 |
+
|
| 6 |
from PIL import Image
|
| 7 |
|
| 8 |
# modules
|
src/model/clip.py
CHANGED
|
@@ -1,13 +1,15 @@
|
|
| 1 |
"""CLIP model for zero-shot classification; running on CPU machine"""
|
| 2 |
-
from typing import
|
| 3 |
-
|
| 4 |
-
import torch
|
| 5 |
import open_clip
|
|
|
|
| 6 |
from open_clip import tokenizer
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# modules
|
| 9 |
from src.core.singleton import SingletonMeta
|
| 10 |
-
from src.core.logger import logger
|
| 11 |
|
| 12 |
|
| 13 |
class ClipModel(metaclass=SingletonMeta):
|
|
@@ -24,14 +26,14 @@ class ClipModel(metaclass=SingletonMeta):
|
|
| 24 |
jit: bool = False,
|
| 25 |
):
|
| 26 |
logger.debug("creating CLIP Model Object")
|
| 27 |
-
self.config =
|
| 28 |
-
model_name
|
| 29 |
-
pretrained
|
| 30 |
-
precision
|
| 31 |
-
device
|
| 32 |
-
jit
|
| 33 |
-
cache_dir
|
| 34 |
-
|
| 35 |
self.model, self.preprocess = open_clip.create_model_from_pretrained(
|
| 36 |
**self.config
|
| 37 |
)
|
|
|
|
| 1 |
"""CLIP model for zero-shot classification; running on CPU machine"""
|
| 2 |
+
from typing import Dict, List
|
| 3 |
+
|
|
|
|
| 4 |
import open_clip
|
| 5 |
+
import torch
|
| 6 |
from open_clip import tokenizer
|
| 7 |
+
from PIL import Image
|
| 8 |
+
|
| 9 |
+
from src.core.logger import logger
|
| 10 |
|
| 11 |
# modules
|
| 12 |
from src.core.singleton import SingletonMeta
|
|
|
|
| 13 |
|
| 14 |
|
| 15 |
class ClipModel(metaclass=SingletonMeta):
|
|
|
|
| 26 |
jit: bool = False,
|
| 27 |
):
|
| 28 |
logger.debug("creating CLIP Model Object")
|
| 29 |
+
self.config = {
|
| 30 |
+
"model_name": model_name,
|
| 31 |
+
"pretrained": pretrained,
|
| 32 |
+
"precision": "bf16",
|
| 33 |
+
"device": "cpu",
|
| 34 |
+
"jit": jit,
|
| 35 |
+
"cache_dir": "model_dir/",
|
| 36 |
+
}
|
| 37 |
self.model, self.preprocess = open_clip.create_model_from_pretrained(
|
| 38 |
**self.config
|
| 39 |
)
|