| { | |
| "model_type": "gpt2", | |
| "architectures": [ | |
| "GPT2LMHeadModel" | |
| ], | |
| "vocab_size": 16000, | |
| "d_model": 640, | |
| "num_layers": 14, | |
| "num_heads": 16, | |
| "d_ff": 1280, | |
| "max_seq_len": 1024, | |
| "dropout": 0.1, | |
| "pad_token_id": 0, | |
| "eos_token_id": 1, | |
| "unk_token_id": 2, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.36.0", | |
| "base_model": null, | |
| "task": "text-generation", | |
| "tags": [ | |
| "transformers", | |
| "pytorch", | |
| "safetensors", | |
| "text-generation", | |
| "code-generation", | |
| "python", | |
| "javascript", | |
| "coding", | |
| "programming", | |
| "sagemaker", | |
| "amazon-sagemaker", | |
| "cpu", | |
| "compact", | |
| "efficient" | |
| ], | |
| "pipeline_tag": "text-generation", | |
| "library_name": "transformers", | |
| "license": "mit", | |
| "language": [ | |
| "en", | |
| "code" | |
| ], | |
| "datasets": [ | |
| "the-stack-v2" | |
| ], | |
| "metrics": [ | |
| "perplexity", | |
| "accuracy" | |
| ], | |
| "inference": { | |
| "parameters": { | |
| "temperature": 0.8, | |
| "top_p": 0.95, | |
| "top_k": 50, | |
| "max_new_tokens": 200 | |
| } | |
| }, | |
| "sagemaker": { | |
| "sdk_version": "2.200.0", | |
| "instance_type": "ml.m5.large", | |
| "instance_count": 1, | |
| "container_image": "huggingface-pytorch-inference:2.0.0-transformers4.28.1-cpu-py310-ubuntu20.04-v1.0" | |
| } | |
| } |