Update src/pipeline.py
Browse files- src/pipeline.py +3 -3
src/pipeline.py
CHANGED
|
@@ -145,10 +145,10 @@ def load_pipeline() -> Pipeline:
|
|
| 145 |
|
| 146 |
torch.backends.cudnn.benchmark = True
|
| 147 |
torch.backends.cudnn.deterministic = False
|
| 148 |
-
torch.set_deterministic_debug_mode(0)
|
| 149 |
torch.backends.cuda.matmul.allow_tf32 = True
|
| 150 |
-
torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction = True
|
| 151 |
-
|
| 152 |
torch.cuda.set_per_process_memory_fraction(0.99)
|
| 153 |
pipeline.text_encoder.to(memory_format=torch.channels_last)
|
| 154 |
pipeline.transformer.to(memory_format=torch.channels_last)
|
|
|
|
| 145 |
|
| 146 |
torch.backends.cudnn.benchmark = True
|
| 147 |
torch.backends.cudnn.deterministic = False
|
| 148 |
+
# torch.set_deterministic_debug_mode(0)
|
| 149 |
torch.backends.cuda.matmul.allow_tf32 = True
|
| 150 |
+
# torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction = True
|
| 151 |
+
torch.cuda.set_memory_growth(True)
|
| 152 |
torch.cuda.set_per_process_memory_fraction(0.99)
|
| 153 |
pipeline.text_encoder.to(memory_format=torch.channels_last)
|
| 154 |
pipeline.transformer.to(memory_format=torch.channels_last)
|