samkeet commited on
Commit
71dfa12
·
verified ·
1 Parent(s): 4f13243

Upload model

Browse files
Files changed (1) hide show
  1. config.json +17 -33
config.json CHANGED
@@ -1,33 +1,17 @@
1
- {
2
- "architectures": [
3
- "GPTModelForTextGeneration"
4
- ],
5
- "auto_map": {
6
- "AutoConfig": "configuration_gpt.GPTConfig",
7
- "AutoModelForCausalLM": "modeling_gpt.GPTModelForTextGeneration",
8
- "AutoTokenizer": "GPT2Tokenizer",
9
- },
10
- "block_size": 1024,
11
- "custom_pipelines": {
12
- "text-generation": {
13
- "default": {
14
- "model": {
15
- "pt": "samkeet/GPT_124M"
16
- }
17
- },
18
- "impl": "pipeline_gpt.GPT124MTextGenerationPipeline",
19
- "pt": [
20
- "AutoModelForCausalLM"
21
- ],
22
- "tf": [],
23
- "type": "text"
24
- }
25
- },
26
- "model_type": "custom_gpt",
27
- "n_embd": 768,
28
- "n_head": 12,
29
- "n_layer": 12,
30
- "torch_dtype": "float32",
31
- "transformers_version": "4.48.0",
32
- "vocab_size": 50304
33
- }
 
1
+ {
2
+ "architectures": [
3
+ "GPTModelForTextGeneration"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_gpt.GPTConfig",
7
+ "AutoModelForCausalLM": "modeling_gpt.GPTModelForTextGeneration"
8
+ },
9
+ "block_size": 1024,
10
+ "model_type": "custom_gpt",
11
+ "n_embd": 768,
12
+ "n_head": 12,
13
+ "n_layer": 12,
14
+ "torch_dtype": "float32",
15
+ "transformers_version": "4.48.0",
16
+ "vocab_size": 50304
17
+ }