darwinkernelpanic commited on
Commit
8334c7d
·
verified ·
1 Parent(s): 00115f9

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +34 -23
config.json CHANGED
@@ -1,26 +1,37 @@
1
  {
2
- "activation": "gelu",
3
- "architectures": [
4
- "DistilBertModel"
5
  ],
6
- "attention_dropout": 0.1,
7
- "bos_token_id": null,
8
- "dim": 768,
9
- "dropout": 0.1,
10
- "dtype": "float32",
11
- "eos_token_id": null,
12
- "hidden_dim": 3072,
13
- "initializer_range": 0.02,
14
- "max_position_embeddings": 512,
15
- "model_type": "distilbert",
16
- "n_heads": 12,
17
- "n_layers": 6,
18
- "pad_token_id": 0,
19
- "qa_dropout": 0.1,
20
- "seq_classif_dropout": 0.2,
21
- "sinusoidal_pos_embds": false,
22
- "tie_weights_": true,
23
- "tie_word_embeddings": true,
24
- "transformers_version": "5.2.0",
25
- "vocab_size": 30522
 
 
 
 
 
 
 
 
 
 
 
26
  }
 
1
  {
2
+ _name_or_path: prajjwal1/bert-tiny,
3
+ architectures: [
4
+ BertForSequenceClassification
5
  ],
6
+ attention_probs_dropout_prob: 0.1,
7
+ classifier_dropout: null,
8
+ gradient_checkpointing: false,
9
+ hidden_act: gelu,
10
+ hidden_dropout_prob: 0.1,
11
+ hidden_size: 128,
12
+ id2label: {
13
+ 0: human,
14
+ 1: ai
15
+ },
16
+ initializer_range: 0.02,
17
+ intermediate_size: 512,
18
+ label2id: {
19
+ ai: 1,
20
+ human: 0
21
+ },
22
+ layer_norm_eps: 1e-12,
23
+ max_position_embeddings: 512,
24
+ model_type: bert,
25
+ num_attention_heads: 2,
26
+ num_hidden_layers: 2,
27
+ num_labels: 2,
28
+ output_attentions: false,
29
+ output_hidden_states: false,
30
+ pad_token_id: 0,
31
+ position_embedding_type: absolute,
32
+ torch_dtype: float32,
33
+ transformers_version: 4.41.0,
34
+ type_vocab_size: 2,
35
+ use_cache: true,
36
+ vocab_size: 30522
37
  }