dineth554 commited on
Commit
b106aa6
·
verified ·
1 Parent(s): 74605de

Upload README.yaml with huggingface_hub

Browse files
Files changed (1) hide show
  1. README.yaml +87 -89
README.yaml CHANGED
@@ -1,4 +1,3 @@
1
- ---
2
  # Model Card for Legion Coder 8M
3
  # YAML Front Matter for Hugging Face Hub
4
 
@@ -7,126 +6,125 @@ library_name: transformers
7
  license: mit
8
  pipeline_tag: text-generation
9
  language:
10
- - en
11
- - code
12
  tags:
13
- - transformers
14
- - pytorch
15
- - safetensors
16
- - text-generation
17
- - code-generation
18
- - python
19
- - javascript
20
- - coding
21
- - programming
22
- - sagemaker
23
- - amazon-sagemaker
24
- - cpu
25
- - compact
26
- - efficient
27
- - nvdya-kit
28
- - death-legion
29
 
30
  datasets:
31
- - the-stack-v2
32
 
33
  metrics:
34
- - perplexity
35
- - accuracy
36
 
37
  model-index:
38
- - name: Legion Coder 8M
39
- results: []
40
 
41
  inference:
42
- parameters:
43
- temperature: 0.8
44
- top_p: 0.95
45
- top_k: 50
46
- max_new_tokens: 200
47
 
48
  sagemaker:
49
- sdk_version: "2.200.0"
50
- instance_type: "ml.m5.large"
51
- instance_count: 1
52
- container_image: "huggingface-pytorch-inference:2.0.0-transformers4.28.1-cpu-py310-ubuntu20.04-v1.0"
53
 
54
  # Model Details
55
  model_details:
56
- name: Legion Coder 8M
57
- version: 1.0.0
58
- description: A compact yet powerful 44M parameter transformer model optimized for coding tasks
59
- developer: DEATH LEGION
60
- powered_by: nvdya-kit
61
- architecture: GPT-style Transformer
62
- parameters: 44,341,632
63
- model_size: 170MB
64
- hidden_size: 576
65
- num_layers: 13
66
- num_heads: 16
67
- context_length: 1024
68
- vocabulary_size: 16000
69
- format: Safetensors
70
- precision: float32
71
 
72
  # Training Details
73
  training_details:
74
- optimizer: AdamW
75
- learning_rate: 5e-4
76
- lr_schedule: cosine_decay
77
- batch_size: 4
78
- gradient_accumulation: true
79
- training_steps: 10000
80
- precision: float32
81
 
82
  # Intended Use
83
  intended_use:
84
- primary_use_cases:
85
- - Code completion and generation
86
- - Function generation from descriptions
87
- - Debugging assistance
88
- - Code explanation and documentation
89
- - Programming concept explanations
90
- - Code scaffolding and prototyping
91
- target_users:
92
- - Software developers
93
- - Students learning to code
94
- - Data scientists
95
- - DevOps engineers
96
- - Technical writers
97
 
98
  # Limitations
99
  limitations:
100
- - Limited to 1,024 token context window
101
- - Trained primarily on Python code
102
- - May generate code that requires review before production use
103
- - Not suitable for non-coding tasks
104
 
105
  # Ethical Considerations
106
  ethical_considerations:
107
- - Generated code should be reviewed before deployment
108
- - May reproduce patterns from training data
109
- - Not a replacement for human code review
110
- - Users are responsible for compliance with licenses of generated code
111
 
112
  # Citation
113
  citation: |
114
- @misc{legioncoder2024,
115
- title={Legion Coder 8M: A Compact Transformer for Code Generation},
116
- author={DEATH LEGION},
117
- year={2024},
118
- howpublished={\url{https://huggingface.co/dineth554/legion-coder-8m}}
119
- }
120
 
121
  # Contact
122
  contact:
123
- developer: DEATH LEGION
124
- powered_by: nvdya-kit
125
- repository: https://huggingface.co/dineth554/legion-coder-8m
126
 
127
  # Branding
128
  branding:
129
- tagline: MADE WITH BY DEATH LEGION
130
- powered_by: nvdya-kit
131
- copyright: © 2024 DEATH LEGION. All rights reserved.
132
- ---
 
 
1
  # Model Card for Legion Coder 8M
2
  # YAML Front Matter for Hugging Face Hub
3
 
 
6
  license: mit
7
  pipeline_tag: text-generation
8
  language:
9
+ - en
10
+ - code
11
  tags:
12
+ - transformers
13
+ - pytorch
14
+ - safetensors
15
+ - text-generation
16
+ - code-generation
17
+ - python
18
+ - javascript
19
+ - coding
20
+ - programming
21
+ - sagemaker
22
+ - amazon-sagemaker
23
+ - cpu
24
+ - compact
25
+ - efficient
26
+ - nvdya-kit
27
+ - death-legion
28
 
29
  datasets:
30
+ - the-stack-v2
31
 
32
  metrics:
33
+ - perplexity
34
+ - accuracy
35
 
36
  model-index:
37
+ - name: Legion Coder 8M
38
+ results: []
39
 
40
  inference:
41
+ parameters:
42
+ temperature: 0.8
43
+ top_p: 0.95
44
+ top_k: 50
45
+ max_new_tokens: 200
46
 
47
  sagemaker:
48
+ sdk_version: "2.200.0"
49
+ instance_type: "ml.m5.large"
50
+ instance_count: 1
51
+ container_image: "huggingface-pytorch-inference:2.0.0-transformers4.28.1-cpu-py310-ubuntu20.04-v1.0"
52
 
53
  # Model Details
54
  model_details:
55
+ name: Legion Coder 8M
56
+ version: 1.0.0
57
+ description: A compact yet powerful 44M parameter transformer model optimized for coding tasks
58
+ developer: DEATH LEGION
59
+ powered_by: nvdya-kit
60
+ architecture: GPT-style Transformer
61
+ parameters: 44,341,632
62
+ model_size: 170MB
63
+ hidden_size: 576
64
+ num_layers: 13
65
+ num_heads: 16
66
+ context_length: 1024
67
+ vocabulary_size: 16000
68
+ format: Safetensors
69
+ precision: float32
70
 
71
  # Training Details
72
  training_details:
73
+ optimizer: AdamW
74
+ learning_rate: 5e-4
75
+ lr_schedule: cosine_decay
76
+ batch_size: 4
77
+ gradient_accumulation: true
78
+ training_steps: 10000
79
+ precision: float32
80
 
81
  # Intended Use
82
  intended_use:
83
+ primary_use_cases:
84
+ - Code completion and generation
85
+ - Function generation from descriptions
86
+ - Debugging assistance
87
+ - Code explanation and documentation
88
+ - Programming concept explanations
89
+ - Code scaffolding and prototyping
90
+ target_users:
91
+ - Software developers
92
+ - Students learning to code
93
+ - Data scientists
94
+ - DevOps engineers
95
+ - Technical writers
96
 
97
  # Limitations
98
  limitations:
99
+ - Limited to 1,024 token context window
100
+ - Trained primarily on Python code
101
+ - May generate code that requires review before production use
102
+ - Not suitable for non-coding tasks
103
 
104
  # Ethical Considerations
105
  ethical_considerations:
106
+ - Generated code should be reviewed before deployment
107
+ - May reproduce patterns from training data
108
+ - Not a replacement for human code review
109
+ - Users are responsible for compliance with licenses of generated code
110
 
111
  # Citation
112
  citation: |
113
+ @misc{legioncoder2026,
114
+ title={Legion Coder 8M: A Compact Transformer for Code Generation},
115
+ author={DEATH LEGION},
116
+ year={2026},
117
+ howpublished={\url{https://huggingface.co/dineth554/legion-coder-8m}}
118
+ }
119
 
120
  # Contact
121
  contact:
122
+ developer: DEATH LEGION
123
+ powered_by: nvdya-kit
124
+ repository: https://huggingface.co/dineth554/legion-coder-8m
125
 
126
  # Branding
127
  branding:
128
+ tagline: MADE WITH BY DEATH LEGION
129
+ powered_by: nvdya-kit
130
+ copyright: 2026 DEATH LEGION. All rights reserved.