File size: 2,100 Bytes
f24f82b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
[
{
"shortDescription" : "CLIP ViT-B\/32 model trained with DataComp-1B (Image Encoder Model)",
"metadataOutputVersion" : "3.0",
"outputSchema" : [
{
"hasShapeFlexibility" : "0",
"isOptional" : "0",
"dataType" : "Float32",
"formattedType" : "MultiArray (Float32 1 × 512)",
"shortDescription" : "",
"shape" : "[1, 512]",
"name" : "var_1240",
"type" : "MultiArray"
}
],
"version" : "1.0.0",
"modelParameters" : [
],
"author" : "InspiratioNULL 2026",
"specificationVersion" : 6,
"storagePrecision" : "Float16",
"license" : "MIT",
"mlProgramOperationTypeHistogram" : {
"Concat" : 1,
"Linear" : 49,
"SliceByIndex" : 37,
"LayerNorm" : 26,
"Transpose" : 85,
"Matmul" : 24,
"Gelu" : 12,
"Softmax" : 12,
"Mul" : 13,
"Cast" : 2,
"Reshape" : 109,
"Add" : 26,
"ExpandDims" : 12,
"Squeeze" : 12,
"Conv" : 1
},
"computePrecision" : "Mixed (Float16, Float32, Int32)",
"stateSchema" : [
],
"isUpdatable" : "0",
"availability" : {
"macOS" : "12.0",
"tvOS" : "15.0",
"visionOS" : "1.0",
"watchOS" : "8.0",
"iOS" : "15.0",
"macCatalyst" : "15.0"
},
"modelType" : {
"name" : "MLModelType_mlProgram"
},
"inputSchema" : [
{
"height" : "224",
"colorspace" : "RGB",
"isOptional" : "0",
"width" : "224",
"isColor" : "1",
"formattedType" : "Image (Color 224 × 224)",
"hasSizeFlexibility" : "0",
"type" : "Image",
"shortDescription" : "",
"name" : "image"
}
],
"userDefinedMetadata" : {
"com.github.apple.coremltools.conversion_date" : "2026-01-15",
"com.github.apple.coremltools.source" : "torch==2.9.1",
"com.github.apple.coremltools.version" : "9.0",
"com.github.apple.coremltools.source_dialect" : "TorchScript"
},
"generatedClassName" : "CLIP_ImageEncoder",
"method" : "predict"
}
] |