File size: 146 Bytes
e14f899
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
import torch

PRECISION_TO_TYPE = {
    'fp32': torch.float32,
    'fp16': torch.float16,
    'bf16': torch.bfloat16,
}

NULL_DIR="temp_data/null"