File size: 203 Bytes
308b133 | 1 2 3 4 5 6 7 8 9 10 11 | {
"model": {
"model_type": "qwen2_5_vl",
"architectures": [
"Qwen2_5_VLForConditionalGeneration"
]
},
"task": "text-generation",
"framework": "pytorch",
"finetuned_from": ""
} |