LisaSchunke commited on
Commit
08815e5
·
verified ·
1 Parent(s): ad51e9c

Upload Phi3ForCausalLM

Browse files
config.json CHANGED
@@ -1,13 +1,13 @@
1
  {
2
- "_name_or_path": "microsoft/Phi-3-mini-128k-instruct",
3
  "architectures": [
4
  "Phi3ForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
- "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM"
11
  },
12
  "bos_token_id": 1,
13
  "embd_pdrop": 0.0,
@@ -16,7 +16,7 @@
16
  "hidden_size": 3072,
17
  "initializer_range": 0.02,
18
  "intermediate_size": 8192,
19
- "max_position_embeddings": 131072,
20
  "model_type": "phi3",
21
  "num_attention_heads": 32,
22
  "num_hidden_layers": 32,
@@ -25,111 +25,9 @@
25
  "pad_token_id": 32000,
26
  "resid_pdrop": 0.0,
27
  "rms_norm_eps": 1e-05,
28
- "rope_scaling": {
29
- "long_factor": [
30
- 1.0299999713897705,
31
- 1.0499999523162842,
32
- 1.0499999523162842,
33
- 1.0799999237060547,
34
- 1.2299998998641968,
35
- 1.2299998998641968,
36
- 1.2999999523162842,
37
- 1.4499999284744263,
38
- 1.5999999046325684,
39
- 1.6499998569488525,
40
- 1.8999998569488525,
41
- 2.859999895095825,
42
- 3.68999981880188,
43
- 5.419999599456787,
44
- 5.489999771118164,
45
- 5.489999771118164,
46
- 9.09000015258789,
47
- 11.579999923706055,
48
- 15.65999984741211,
49
- 15.769999504089355,
50
- 15.789999961853027,
51
- 18.360000610351562,
52
- 21.989999771118164,
53
- 23.079999923706055,
54
- 30.009998321533203,
55
- 32.35000228881836,
56
- 32.590003967285156,
57
- 35.56000518798828,
58
- 39.95000457763672,
59
- 53.840003967285156,
60
- 56.20000457763672,
61
- 57.95000457763672,
62
- 59.29000473022461,
63
- 59.77000427246094,
64
- 59.920005798339844,
65
- 61.190006256103516,
66
- 61.96000671386719,
67
- 62.50000762939453,
68
- 63.3700065612793,
69
- 63.48000717163086,
70
- 63.48000717163086,
71
- 63.66000747680664,
72
- 63.850006103515625,
73
- 64.08000946044922,
74
- 64.760009765625,
75
- 64.80001068115234,
76
- 64.81001281738281,
77
- 64.81001281738281
78
- ],
79
- "short_factor": [
80
- 1.05,
81
- 1.05,
82
- 1.05,
83
- 1.1,
84
- 1.1,
85
- 1.1500000000000001,
86
- 1.2000000000000002,
87
- 1.2500000000000002,
88
- 1.3000000000000003,
89
- 1.3500000000000003,
90
- 1.5000000000000004,
91
- 2.000000000000001,
92
- 2.000000000000001,
93
- 2.000000000000001,
94
- 2.000000000000001,
95
- 2.000000000000001,
96
- 2.000000000000001,
97
- 2.000000000000001,
98
- 2.000000000000001,
99
- 2.000000000000001,
100
- 2.000000000000001,
101
- 2.000000000000001,
102
- 2.000000000000001,
103
- 2.000000000000001,
104
- 2.000000000000001,
105
- 2.000000000000001,
106
- 2.000000000000001,
107
- 2.000000000000001,
108
- 2.000000000000001,
109
- 2.000000000000001,
110
- 2.000000000000001,
111
- 2.000000000000001,
112
- 2.0500000000000007,
113
- 2.0500000000000007,
114
- 2.0500000000000007,
115
- 2.1000000000000005,
116
- 2.1000000000000005,
117
- 2.1000000000000005,
118
- 2.1500000000000004,
119
- 2.1500000000000004,
120
- 2.3499999999999996,
121
- 2.549999999999999,
122
- 2.5999999999999988,
123
- 2.5999999999999988,
124
- 2.7499999999999982,
125
- 2.849999999999998,
126
- 2.849999999999998,
127
- 2.9499999999999975
128
- ],
129
- "type": "su"
130
- },
131
  "rope_theta": 10000.0,
132
- "sliding_window": 262144,
133
  "tie_word_embeddings": false,
134
  "torch_dtype": "float16",
135
  "transformers_version": "4.42.0.dev0",
 
1
  {
2
+ "_name_or_path": "microsoft/Phi-3-mini-4k-instruct",
3
  "architectures": [
4
  "Phi3ForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
+ "AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM"
11
  },
12
  "bos_token_id": 1,
13
  "embd_pdrop": 0.0,
 
16
  "hidden_size": 3072,
17
  "initializer_range": 0.02,
18
  "intermediate_size": 8192,
19
+ "max_position_embeddings": 4096,
20
  "model_type": "phi3",
21
  "num_attention_heads": 32,
22
  "num_hidden_layers": 32,
 
25
  "pad_token_id": 32000,
26
  "resid_pdrop": 0.0,
27
  "rms_norm_eps": 1e-05,
28
+ "rope_scaling": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  "rope_theta": 10000.0,
30
+ "sliding_window": 2047,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "float16",
33
  "transformers_version": "4.42.0.dev0",
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b7e5197fb75ade8d1259335f6f216a3ff8866d2cd54757bab32e6eec14b49b0
3
  size 4972489200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17bfb1d345e6fa461f70451c6ed5d0e0e24880382257b295a5f49e179b75b35a
3
  size 4972489200
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f6cd9d761ececce4a98e44454901ab116e4f5404727f03dd07373291e31dde72
3
  size 2669692488
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ed8f2c0249121eea0d9dd6b1dd4f3def74eb2f08b6a230dcf51273dad8fec05
3
  size 2669692488