| { | |
| "_name_or_path": "hibikaze/finetune-llava-v1.5-japanese-gpt2-small_test-checkpoint-1200", | |
| "activation_function": "gelu_new", | |
| "architectures": [ | |
| "LlavaGpt2ForCausalLM" | |
| ], | |
| "attn_pdrop": 0.1, | |
| "bos_token_id": 1, | |
| "embd_pdrop": 0.1, | |
| "eos_token_id": 2, | |
| "gradient_checkpointing": false, | |
| "image_aspect_ratio": "square", | |
| "initializer_range": 0.02, | |
| "layer_norm_epsilon": 1e-05, | |
| "mm_hidden_size": 2304, | |
| "mm_projector_type": "mlp2x_gelu", | |
| "mm_vision_select_feature": "patch", | |
| "mm_vision_select_layer": -2, | |
| "mm_vision_tower": "google/siglip-so400m-patch14-384", | |
| "model_type": "llava-jp", | |
| "n_ctx": 1024, | |
| "n_embd": 768, | |
| "n_head": 12, | |
| "n_inner": 3072, | |
| "n_layer": 12, | |
| "n_positions": 1024, | |
| "reorder_and_upcast_attn": false, | |
| "resid_pdrop": 0.1, | |
| "scale_attn_by_inverse_layer_idx": false, | |
| "scale_attn_weights": true, | |
| "scales": [ | |
| 1.0, | |
| 0.5 | |
| ], | |
| "summary_activation": null, | |
| "summary_first_dropout": 0.1, | |
| "summary_proj_to_labels": true, | |
| "summary_type": "cls_index", | |
| "summary_use_proj": true, | |
| "task_specific_params": { | |
| "text-generation": { | |
| "do_sample": true, | |
| "max_length": 50 | |
| } | |
| }, | |
| "tokenizer_model_max_length": 1024, | |
| "tokenizer_padding_side": "right", | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.42.4", | |
| "tune_mm_mlp_adapter": false, | |
| "use_cache": false, | |
| "use_mm_proj": true, | |
| "vocab_size": 32000 | |
| } | |