RichardErkhov commited on
Commit
d953281
·
verified ·
1 Parent(s): 2625f56

uploaded model

Browse files
Files changed (1) hide show
  1. config.json +0 -4
config.json CHANGED
@@ -4,10 +4,6 @@
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "auto_map": {
8
- "AutoConfig": "configuration_phi.PhiConfig",
9
- "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
10
- },
11
  "bos_token_id": 50256,
12
  "embd_pdrop": 0.0,
13
  "eos_token_id": 50256,
 
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
 
 
 
7
  "bos_token_id": 50256,
8
  "embd_pdrop": 0.0,
9
  "eos_token_id": 50256,