tianleliphoebe commited on
Commit
b4162ea
·
verified ·
1 Parent(s): ca6f126

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "checkpoints/BREEN",
3
  "add_binary_mask": true,
4
  "add_learnable_query": true,
5
  "aggregate_mask": true,
@@ -57,7 +57,7 @@
57
  "shared": false,
58
  "sliding_window": null,
59
  "tie_word_embeddings": false,
60
- "torch_dtype": "float32",
61
  "transformers_version": "4.45.1",
62
  "tune_mm_mlp_adapter": false,
63
  "use_cache": false,
 
1
  {
2
+ "_name_or_path": "/apdcephfs_jn/share_302244400/phoebetlli/EVE_moe/checkpoints/eve-qwen25-7B-fitu-instruct-imgexp-autoclip-add-cos-multi-concatre34-pretext-mask-addtext-agg-linear-sft4m-4n/checkpoint-16200",
3
  "add_binary_mask": true,
4
  "add_learnable_query": true,
5
  "aggregate_mask": true,
 
57
  "shared": false,
58
  "sliding_window": null,
59
  "tie_word_embeddings": false,
60
+ "torch_dtype": "bfloat16",
61
  "transformers_version": "4.45.1",
62
  "tune_mm_mlp_adapter": false,
63
  "use_cache": false,