Spaces:
Runtime error
Runtime error
style
Browse files- demo/model.py +2 -2
demo/model.py
CHANGED
|
@@ -177,8 +177,8 @@ class Model_all:
|
|
| 177 |
# style part
|
| 178 |
self.model_style = StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8).to(device)
|
| 179 |
self.model_style.load_state_dict(torch.load("models/t2iadapter_style_sd14v1.pth", map_location=device))
|
| 180 |
-
self.clip_processor = CLIPProcessor.from_pretrained('
|
| 181 |
-
self.clip_vision_model = CLIPVisionModel.from_pretrained('
|
| 182 |
|
| 183 |
device = 'cpu'
|
| 184 |
## mmpose
|
|
|
|
| 177 |
# style part
|
| 178 |
self.model_style = StyleAdapter(width=1024, context_dim=768, num_head=8, n_layes=3, num_token=8).to(device)
|
| 179 |
self.model_style.load_state_dict(torch.load("models/t2iadapter_style_sd14v1.pth", map_location=device))
|
| 180 |
+
self.clip_processor = CLIPProcessor.from_pretrained('openai/clip-vit-large-patch14')
|
| 181 |
+
self.clip_vision_model = CLIPVisionModel.from_pretrained('openai/clip-vit-large-patch14').to(device)
|
| 182 |
|
| 183 |
device = 'cpu'
|
| 184 |
## mmpose
|