bliss_efcamdat_cn / .gitattributes
yg422's picture
Add english_trained_2/model_6450k_tokens
fc99b8d verified
*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.ftz filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.h5 filter=lfs diff=lfs merge=lfs -text
*.joblib filter=lfs diff=lfs merge=lfs -text
*.lfs.* filter=lfs diff=lfs merge=lfs -text
*.mlmodel filter=lfs diff=lfs merge=lfs -text
*.model filter=lfs diff=lfs merge=lfs -text
*.msgpack filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
*.npz filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ot filter=lfs diff=lfs merge=lfs -text
*.parquet filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.pickle filter=lfs diff=lfs merge=lfs -text
*.pkl filter=lfs diff=lfs merge=lfs -text
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar.* filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tflite filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
a2/completion/tokenizer.json filter=lfs diff=lfs merge=lfs -text
a2/completion/checkpoint-1356/tokenizer.json filter=lfs diff=lfs merge=lfs -text
a2/completion/checkpoint-2712/tokenizer.json filter=lfs diff=lfs merge=lfs -text
other_c1/contrastive_learnertoken/tokenizer.json filter=lfs diff=lfs merge=lfs -text
other_c1/contrastive_margin_sweep_m2.0/tokenizer.json filter=lfs diff=lfs merge=lfs -text
other_c1/contrastive_margin_sweep_m0.5/tokenizer.json filter=lfs diff=lfs merge=lfs -text
other_c1/contrastive_learnertoken_generic/tokenizer.json filter=lfs diff=lfs merge=lfs -text
other_c1/contrastive_margin_sweep/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6350k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2000k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1550k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5600k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2100k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6250k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5700k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1450k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2200k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6150k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_4950k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5400k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1750k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6050k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2300k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1650k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5500k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_4850k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1150k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5200k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6750k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2400k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_950k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_3950k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5300k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1050k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_3850k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_850k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2500k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6650k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5000k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1350k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_8850k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_2600k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6550k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_7800k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_1250k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_5100k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_8950k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_7900k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text
english_trained_2/model_6450k_tokens/tokenizer.json filter=lfs diff=lfs merge=lfs -text