| { | |
| "_name_or_path": "ybelkada/blip2-opt-2.7b-fp16-sharded", | |
| "architectures": [ | |
| "Blip2ForConditionalGeneration" | |
| ], | |
| "initializer_factor": 1.0, | |
| "initializer_range": 0.02, | |
| "model_type": "blip-2", | |
| "num_query_tokens": 32, | |
| "qformer_config": { | |
| "classifier_dropout": null, | |
| "model_type": "blip_2_qformer" | |
| }, | |
| "quantization_config": { | |
| "bnb_4bit_compute_dtype": "float32", | |
| "bnb_4bit_quant_type": "fp4", | |
| "bnb_4bit_use_double_quant": false, | |
| "llm_int8_enable_fp32_cpu_offload": false, | |
| "llm_int8_has_fp16_weight": false, | |
| "llm_int8_skip_modules": null, | |
| "llm_int8_threshold": 6.0, | |
| "load_in_4bit": false, | |
| "load_in_8bit": true, | |
| "quant_method": "bitsandbytes" | |
| }, | |
| "text_config": { | |
| "_name_or_path": "facebook/opt-2.7b", | |
| "activation_dropout": 0.0, | |
| "architectures": [ | |
| "OPTForCausalLM" | |
| ], | |
| "eos_token_id": 50118, | |
| "ffn_dim": 10240, | |
| "hidden_size": 2560, | |
| "model_type": "opt", | |
| "num_attention_heads": 32, | |
| "num_hidden_layers": 32, | |
| "prefix": "</s>", | |
| "torch_dtype": "float16", | |
| "word_embed_proj_dim": 2560 | |
| }, | |
| "torch_dtype": "float16", | |
| "transformers_version": "4.34.1", | |
| "use_decoder_only_language_model": true, | |
| "vision_config": { | |
| "dropout": 0.0, | |
| "initializer_factor": 1.0, | |
| "layer_norm_eps": 1e-05, | |
| "model_type": "blip_2_vision_model", | |
| "num_channels": 3, | |
| "projection_dim": 512 | |
| } | |
| } | |