johnkillington commited on
Commit
ef1cefb
·
verified ·
1 Parent(s): 807853f

Upload CoreML model

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. README.md +33 -0
  2. Resources/SafetyChecker.mlmodelc/analytics/coremldata.bin +3 -0
  3. Resources/SafetyChecker.mlmodelc/coremldata.bin +3 -0
  4. Resources/SafetyChecker.mlmodelc/metadata.json +129 -0
  5. Resources/SafetyChecker.mlmodelc/model.mil +0 -0
  6. Resources/SafetyChecker.mlmodelc/weights/weight.bin +3 -0
  7. Resources/TextEncoder.mlmodelc/analytics/coremldata.bin +3 -0
  8. Resources/TextEncoder.mlmodelc/coremldata.bin +3 -0
  9. Resources/TextEncoder.mlmodelc/metadata.json +88 -0
  10. Resources/TextEncoder.mlmodelc/model.mil +0 -0
  11. Resources/TextEncoder.mlmodelc/weights/weight.bin +3 -0
  12. Resources/Unet.mlmodelc/analytics/coremldata.bin +3 -0
  13. Resources/Unet.mlmodelc/coremldata.bin +3 -0
  14. Resources/Unet.mlmodelc/metadata.json +109 -0
  15. Resources/Unet.mlmodelc/model.mil +0 -0
  16. Resources/Unet.mlmodelc/weights/weight.bin +3 -0
  17. Resources/UnetChunk1.mlmodelc/analytics/coremldata.bin +3 -0
  18. Resources/UnetChunk1.mlmodelc/coremldata.bin +3 -0
  19. Resources/UnetChunk1.mlmodelc/metadata.json +220 -0
  20. Resources/UnetChunk1.mlmodelc/model.mil +0 -0
  21. Resources/UnetChunk1.mlmodelc/weights/weight.bin +3 -0
  22. Resources/UnetChunk2.mlmodelc/analytics/coremldata.bin +3 -0
  23. Resources/UnetChunk2.mlmodelc/coremldata.bin +3 -0
  24. Resources/UnetChunk2.mlmodelc/metadata.json +208 -0
  25. Resources/UnetChunk2.mlmodelc/model.mil +0 -0
  26. Resources/UnetChunk2.mlmodelc/weights/weight.bin +3 -0
  27. Resources/VAEDecoder.mlmodelc/analytics/coremldata.bin +3 -0
  28. Resources/VAEDecoder.mlmodelc/coremldata.bin +3 -0
  29. Resources/VAEDecoder.mlmodelc/metadata.json +81 -0
  30. Resources/VAEDecoder.mlmodelc/model.mil +0 -0
  31. Resources/VAEDecoder.mlmodelc/weights/weight.bin +3 -0
  32. Resources/VAEEncoder.mlmodelc/analytics/coremldata.bin +3 -0
  33. Resources/VAEEncoder.mlmodelc/coremldata.bin +3 -0
  34. Resources/VAEEncoder.mlmodelc/metadata.json +81 -0
  35. Resources/VAEEncoder.mlmodelc/model.mil +0 -0
  36. Resources/VAEEncoder.mlmodelc/weights/weight.bin +3 -0
  37. Resources/merges.txt +0 -0
  38. Resources/vocab.json +0 -0
  39. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_safety_checker.mlpackage/Data/com.apple.CoreML/model.mlmodel +3 -0
  40. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_safety_checker.mlpackage/Data/com.apple.CoreML/weights/weight.bin +3 -0
  41. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_safety_checker.mlpackage/Manifest.json +18 -0
  42. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_text_encoder.mlpackage/Data/com.apple.CoreML/model.mlmodel +3 -0
  43. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_text_encoder.mlpackage/Data/com.apple.CoreML/weights/weight.bin +3 -0
  44. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_text_encoder.mlpackage/Manifest.json +18 -0
  45. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet.mlpackage/Data/com.apple.CoreML/model.mlmodel +3 -0
  46. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet.mlpackage/Data/com.apple.CoreML/weights/weight.bin +3 -0
  47. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet.mlpackage/Manifest.json +18 -0
  48. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet_chunk1.mlpackage/Data/com.apple.CoreML/model.mlmodel +3 -0
  49. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet_chunk1.mlpackage/Data/com.apple.CoreML/weights/weight.bin +3 -0
  50. Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet_chunk1.mlpackage/Manifest.json +18 -0
README.md ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - coreml
4
+ - stable-diffusion
5
+ - text-to-image
6
+ ---
7
+
8
+ # bravoChildrens_v10-CoreML
9
+
10
+ Converted CoreML Stable Diffusion model.
11
+
12
+ ## Conversion Settings
13
+ - Compute Unit: ALL
14
+ - Attention: SPLIT_EINSUM_V2
15
+ - Quantization: None
16
+
17
+ ## Usage
18
+
19
+ This model can be used with Apple's Core ML Stable Diffusion implementation on macOS, iOS, and iPadOS.
20
+
21
+ ```python
22
+ # Example usage (requires python_coreml_stable_diffusion)
23
+ from python_coreml_stable_diffusion import pipeline
24
+
25
+ pipe = pipeline.StableDiffusionCoreMLPipeline(
26
+ coreml_model_path="coreml_model",
27
+ compute_unit="ALL"
28
+ )
29
+
30
+ image = pipe(prompt="your prompt here")
31
+ ```
32
+
33
+ Converted using [Conversion Space](https://github.com/apple/ml-stable-diffusion).
Resources/SafetyChecker.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a92052e16429aa4ea6f79e8fefebff70a4f11919e78160f1ed284de8d370f930
3
+ size 243
Resources/SafetyChecker.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a46166f707afca51fbe47294e8b7dbc83620bca8e8d9324dfb8d935a458270f8
3
+ size 1609
Resources/SafetyChecker.mlmodelc/metadata.json ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "shortDescription" : "Stable Diffusion generates images conditioned on text and\/or other images as input through the diffusion process. Please refer to https:\/\/arxiv.org\/abs\/2112.10752 for details.",
4
+ "metadataOutputVersion" : "3.0",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 1 × 512 × 512 × 3)",
11
+ "shortDescription" : "Identical to the input `images`. If safety checker detected any sensitive content, the corresponding image is replaced with a blank image (zeros)",
12
+ "shape" : "[1, 512, 512, 3]",
13
+ "name" : "filtered_images",
14
+ "type" : "MultiArray"
15
+ },
16
+ {
17
+ "hasShapeFlexibility" : "0",
18
+ "isOptional" : "0",
19
+ "dataType" : "Float32",
20
+ "formattedType" : "MultiArray (Float32 1 × 1 × 1 × 1)",
21
+ "shortDescription" : "Indicates whether the safety checker model found any sensitive content in the given image",
22
+ "shape" : "[1, 1, 1, 1]",
23
+ "name" : "has_nsfw_concepts",
24
+ "type" : "MultiArray"
25
+ },
26
+ {
27
+ "hasShapeFlexibility" : "0",
28
+ "isOptional" : "0",
29
+ "dataType" : "Float32",
30
+ "formattedType" : "MultiArray (Float32 1 × 17)",
31
+ "shortDescription" : "Concept scores are the scores before thresholding at zero yields the `has_nsfw_concepts` output. These scores can be used to tune the `adjustment` input",
32
+ "shape" : "[1, 17]",
33
+ "name" : "concept_scores",
34
+ "type" : "MultiArray"
35
+ }
36
+ ],
37
+ "version" : "\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
38
+ "modelParameters" : [
39
+
40
+ ],
41
+ "author" : "Please refer to the Model Card available at huggingface.co\/\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
42
+ "specificationVersion" : 7,
43
+ "storagePrecision" : "Float16",
44
+ "license" : "OpenRAIL (https:\/\/huggingface.co\/spaces\/CompVis\/stable-diffusion-license)",
45
+ "mlProgramOperationTypeHistogram" : {
46
+ "Transpose" : 97,
47
+ "Ios16.scatterNd" : 1,
48
+ "Ios16.softmax" : 24,
49
+ "Ios16.linear" : 147,
50
+ "Ios16.add" : 51,
51
+ "Concat" : 2,
52
+ "Ios16.realDiv" : 1,
53
+ "Ios16.sigmoid" : 24,
54
+ "Tile" : 4,
55
+ "Ios16.reduceSum" : 2,
56
+ "Select" : 1,
57
+ "Ios16.greater" : 4,
58
+ "Shape" : 1,
59
+ "ExpandDims" : 4,
60
+ "Ios16.cast" : 7,
61
+ "Ios16.conv" : 1,
62
+ "Ios16.matmul" : 48,
63
+ "Ios16.reshape" : 97,
64
+ "Ios16.layerNorm" : 50,
65
+ "SliceByIndex" : 2,
66
+ "Ios16.maximum" : 1,
67
+ "Ios16.equal" : 1,
68
+ "Ios16.mul" : 73,
69
+ "NonZero" : 1,
70
+ "Ios16.reduceL2Norm" : 1
71
+ },
72
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
73
+ "stateSchema" : [
74
+
75
+ ],
76
+ "isUpdatable" : "0",
77
+ "availability" : {
78
+ "macOS" : "13.0",
79
+ "tvOS" : "16.0",
80
+ "visionOS" : "1.0",
81
+ "watchOS" : "9.0",
82
+ "iOS" : "16.0",
83
+ "macCatalyst" : "16.0"
84
+ },
85
+ "modelType" : {
86
+ "name" : "MLModelType_mlProgram"
87
+ },
88
+ "inputSchema" : [
89
+ {
90
+ "hasShapeFlexibility" : "0",
91
+ "isOptional" : "0",
92
+ "dataType" : "Float16",
93
+ "formattedType" : "MultiArray (Float16 1 × 3 × 224 × 224)",
94
+ "shortDescription" : "The normalized image input tensor resized to (224x224) in channels-first (BCHW) format",
95
+ "shape" : "[1, 3, 224, 224]",
96
+ "name" : "clip_input",
97
+ "type" : "MultiArray"
98
+ },
99
+ {
100
+ "hasShapeFlexibility" : "0",
101
+ "isOptional" : "0",
102
+ "dataType" : "Float16",
103
+ "formattedType" : "MultiArray (Float16 1 × 512 × 512 × 3)",
104
+ "shortDescription" : "Output of the vae_decoder (512x512) in channels-last (BHWC) format",
105
+ "shape" : "[1, 512, 512, 3]",
106
+ "name" : "images",
107
+ "type" : "MultiArray"
108
+ },
109
+ {
110
+ "hasShapeFlexibility" : "0",
111
+ "isOptional" : "0",
112
+ "dataType" : "Float16",
113
+ "formattedType" : "MultiArray (Float16 1)",
114
+ "shortDescription" : "Bias added to the concept scores to trade off increased recall for reduce precision in the safety checker classifier",
115
+ "shape" : "[1]",
116
+ "name" : "adjustment",
117
+ "type" : "MultiArray"
118
+ }
119
+ ],
120
+ "userDefinedMetadata" : {
121
+ "com.github.apple.coremltools.conversion_date" : "2025-12-13",
122
+ "com.github.apple.coremltools.source" : "torch==2.9.1",
123
+ "com.github.apple.coremltools.version" : "9.0",
124
+ "com.github.apple.coremltools.source_dialect" : "TorchScript"
125
+ },
126
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_safety_checker",
127
+ "method" : "predict"
128
+ }
129
+ ]
Resources/SafetyChecker.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/SafetyChecker.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffdfb1a4774feb816dd2ccc3e304d109013bc9591e188dcab840789583fdc1b4
3
+ size 607990114
Resources/TextEncoder.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c8207e8e9d05b798b2f13342a20f7bdd22321e797e9838f693d9f5b3b7345b6
3
+ size 243
Resources/TextEncoder.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d24207fa092fcc7de7d3cc869a3a400c3e405ef056bae849d5413a020fe04956
3
+ size 1012
Resources/TextEncoder.mlmodelc/metadata.json ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "shortDescription" : "Stable Diffusion generates images conditioned on text and\/or other images as input through the diffusion process. Please refer to https:\/\/arxiv.org\/abs\/2112.10752 for details.",
4
+ "metadataOutputVersion" : "3.0",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 1 × 77 × 768)",
11
+ "shortDescription" : "The token embeddings as encoded by the Transformer model",
12
+ "shape" : "[1, 77, 768]",
13
+ "name" : "last_hidden_state",
14
+ "type" : "MultiArray"
15
+ },
16
+ {
17
+ "hasShapeFlexibility" : "0",
18
+ "isOptional" : "0",
19
+ "dataType" : "Float32",
20
+ "formattedType" : "MultiArray (Float32 1 × 768)",
21
+ "shortDescription" : "The version of the `last_hidden_state` output after pooling",
22
+ "shape" : "[1, 768]",
23
+ "name" : "pooled_outputs",
24
+ "type" : "MultiArray"
25
+ }
26
+ ],
27
+ "version" : "\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
28
+ "modelParameters" : [
29
+
30
+ ],
31
+ "author" : "Please refer to the Model Card available at huggingface.co\/\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
32
+ "specificationVersion" : 7,
33
+ "storagePrecision" : "Float16",
34
+ "license" : "OpenRAIL (https:\/\/huggingface.co\/spaces\/CompVis\/stable-diffusion-license)",
35
+ "mlProgramOperationTypeHistogram" : {
36
+ "Ios16.cast" : 3,
37
+ "Ios16.mul" : 36,
38
+ "Ios16.layerNorm" : 25,
39
+ "Transpose" : 48,
40
+ "Stack" : 1,
41
+ "Ios16.sigmoid" : 12,
42
+ "Ios16.linear" : 72,
43
+ "Ios16.add" : 37,
44
+ "Ios16.softmax" : 12,
45
+ "Ios16.matmul" : 24,
46
+ "Ios16.gatherNd" : 1,
47
+ "Ios16.gather" : 1,
48
+ "Ios16.reshape" : 48,
49
+ "Ios16.reduceArgmax" : 1
50
+ },
51
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
52
+ "stateSchema" : [
53
+
54
+ ],
55
+ "isUpdatable" : "0",
56
+ "availability" : {
57
+ "macOS" : "13.0",
58
+ "tvOS" : "16.0",
59
+ "visionOS" : "1.0",
60
+ "watchOS" : "9.0",
61
+ "iOS" : "16.0",
62
+ "macCatalyst" : "16.0"
63
+ },
64
+ "modelType" : {
65
+ "name" : "MLModelType_mlProgram"
66
+ },
67
+ "inputSchema" : [
68
+ {
69
+ "hasShapeFlexibility" : "0",
70
+ "isOptional" : "0",
71
+ "dataType" : "Float32",
72
+ "formattedType" : "MultiArray (Float32 1 × 77)",
73
+ "shortDescription" : "The token ids that represent the input text",
74
+ "shape" : "[1, 77]",
75
+ "name" : "input_ids",
76
+ "type" : "MultiArray"
77
+ }
78
+ ],
79
+ "userDefinedMetadata" : {
80
+ "com.github.apple.coremltools.conversion_date" : "2025-12-13",
81
+ "com.github.apple.coremltools.source" : "torch==2.9.1",
82
+ "com.github.apple.coremltools.version" : "9.0",
83
+ "com.github.apple.coremltools.source_dialect" : "TorchScript"
84
+ },
85
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_text_encoder",
86
+ "method" : "predict"
87
+ }
88
+ ]
Resources/TextEncoder.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/TextEncoder.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f2994578d2b34e306784ca82ae6730fc4604ce758d580e3af976cb7e58cf324
3
+ size 246145536
Resources/Unet.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b298fff91b2ea143d210daa6121e5c9b1f9c5ae1042d9613e31acfa16eae51d2
3
+ size 243
Resources/Unet.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3375fbaa722b7a2bd90c963f1df287f1cfd7fb9977481de48f59736b9373455d
3
+ size 1445
Resources/Unet.mlmodelc/metadata.json ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "shortDescription" : "Stable Diffusion generates images conditioned on text or other images as input through the diffusion process. Please refer to https:\/\/arxiv.org\/abs\/2112.10752 for details.",
4
+ "metadataOutputVersion" : "3.0",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 2 × 4 × 64 × 64)",
11
+ "shortDescription" : "Same shape and dtype as the `sample` input. The predicted noise to facilitate the reverse diffusion (denoising) process",
12
+ "shape" : "[2, 4, 64, 64]",
13
+ "name" : "noise_pred",
14
+ "type" : "MultiArray"
15
+ }
16
+ ],
17
+ "version" : "\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
18
+ "modelParameters" : [
19
+
20
+ ],
21
+ "author" : "Please refer to the Model Card available at huggingface.co\/\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
22
+ "specificationVersion" : 7,
23
+ "storagePrecision" : "Float16",
24
+ "license" : "OpenRAIL (https:\/\/huggingface.co\/spaces\/CompVis\/stable-diffusion-license)",
25
+ "mlProgramOperationTypeHistogram" : {
26
+ "Transpose" : 32,
27
+ "UpsampleNearestNeighbor" : 3,
28
+ "Ios16.reduceMean" : 122,
29
+ "Ios16.sin" : 1,
30
+ "Ios16.softmax" : 896,
31
+ "Split" : 16,
32
+ "Ios16.add" : 169,
33
+ "Concat" : 206,
34
+ "Ios16.realDiv" : 61,
35
+ "Ios16.square" : 61,
36
+ "ExpandDims" : 3,
37
+ "Ios16.sub" : 61,
38
+ "Ios16.sqrt" : 61,
39
+ "Ios16.einsum" : 1792,
40
+ "Ios16.conv" : 282,
41
+ "Ios16.reshape" : 154,
42
+ "Ios16.layerNorm" : 48,
43
+ "SliceByIndex" : 1570,
44
+ "Ios16.batchNorm" : 61,
45
+ "Ios16.silu" : 47,
46
+ "Ios16.gelu" : 16,
47
+ "Ios16.cast" : 1,
48
+ "Ios16.mul" : 913,
49
+ "Ios16.cos" : 1
50
+ },
51
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
52
+ "stateSchema" : [
53
+
54
+ ],
55
+ "isUpdatable" : "0",
56
+ "availability" : {
57
+ "macOS" : "13.0",
58
+ "tvOS" : "16.0",
59
+ "visionOS" : "1.0",
60
+ "watchOS" : "9.0",
61
+ "iOS" : "16.0",
62
+ "macCatalyst" : "16.0"
63
+ },
64
+ "modelType" : {
65
+ "name" : "MLModelType_mlProgram"
66
+ },
67
+ "inputSchema" : [
68
+ {
69
+ "hasShapeFlexibility" : "0",
70
+ "isOptional" : "0",
71
+ "dataType" : "Float16",
72
+ "formattedType" : "MultiArray (Float16 2 × 4 × 64 × 64)",
73
+ "shortDescription" : "The low resolution latent feature maps being denoised through reverse diffusion",
74
+ "shape" : "[2, 4, 64, 64]",
75
+ "name" : "sample",
76
+ "type" : "MultiArray"
77
+ },
78
+ {
79
+ "hasShapeFlexibility" : "0",
80
+ "isOptional" : "0",
81
+ "dataType" : "Float16",
82
+ "formattedType" : "MultiArray (Float16 2)",
83
+ "shortDescription" : "A value emitted by the associated scheduler object to condition the model on a given noise schedule",
84
+ "shape" : "[2]",
85
+ "name" : "timestep",
86
+ "type" : "MultiArray"
87
+ },
88
+ {
89
+ "hasShapeFlexibility" : "0",
90
+ "isOptional" : "0",
91
+ "dataType" : "Float16",
92
+ "formattedType" : "MultiArray (Float16 2 × 768 × 1 × 77)",
93
+ "shortDescription" : "Output embeddings from the associated text_encoder model to condition to generated image on text. A maximum of 77 tokens (~40 words) are allowed. Longer text is truncated. Shorter text does not reduce computation.",
94
+ "shape" : "[2, 768, 1, 77]",
95
+ "name" : "encoder_hidden_states",
96
+ "type" : "MultiArray"
97
+ }
98
+ ],
99
+ "userDefinedMetadata" : {
100
+ "com.github.apple.coremltools.conversion_date" : "2025-12-13",
101
+ "com.github.apple.ml-stable-diffusion.version" : "1.1.0",
102
+ "com.github.apple.coremltools.source" : "torch==2.9.1",
103
+ "com.github.apple.coremltools.version" : "9.0",
104
+ "com.github.apple.coremltools.source_dialect" : "TorchScript"
105
+ },
106
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_unet",
107
+ "method" : "predict"
108
+ }
109
+ ]
Resources/Unet.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/Unet.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c25ad5e0061b703dba7fc93d60f689bef11b39f053f0201dc9fe220252c181e
3
+ size 1719117696
Resources/UnetChunk1.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbb35d2b1e7f729fe68b98a049e99792c3739370dc75ab827e34bf338d81d038
3
+ size 243
Resources/UnetChunk1.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b1d37605c9655686f9a84b92e6835fc25aa63574d539f22b097c478fa95f38c
3
+ size 684
Resources/UnetChunk1.mlmodelc/metadata.json ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "metadataOutputVersion" : "3.0",
4
+ "storagePrecision" : "Float16",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 2 × 640 × 32 × 32)",
11
+ "shortDescription" : "",
12
+ "shape" : "[2, 640, 32, 32]",
13
+ "name" : "input_89_cast_fp16",
14
+ "type" : "MultiArray"
15
+ },
16
+ {
17
+ "hasShapeFlexibility" : "0",
18
+ "isOptional" : "0",
19
+ "dataType" : "Float32",
20
+ "formattedType" : "MultiArray (Float32 2 × 320 × 64 × 64)",
21
+ "shortDescription" : "",
22
+ "shape" : "[2, 320, 64, 64]",
23
+ "name" : "input_7_cast_fp16",
24
+ "type" : "MultiArray"
25
+ },
26
+ {
27
+ "hasShapeFlexibility" : "0",
28
+ "isOptional" : "0",
29
+ "dataType" : "Float32",
30
+ "formattedType" : "MultiArray (Float32 2 × 320 × 32 × 32)",
31
+ "shortDescription" : "",
32
+ "shape" : "[2, 320, 32, 32]",
33
+ "name" : "input_63_cast_fp16",
34
+ "type" : "MultiArray"
35
+ },
36
+ {
37
+ "hasShapeFlexibility" : "0",
38
+ "isOptional" : "0",
39
+ "dataType" : "Float32",
40
+ "formattedType" : "MultiArray (Float32 2 × 640 × 16 × 16)",
41
+ "shortDescription" : "",
42
+ "shape" : "[2, 640, 16, 16]",
43
+ "name" : "input_117_cast_fp16",
44
+ "type" : "MultiArray"
45
+ },
46
+ {
47
+ "hasShapeFlexibility" : "0",
48
+ "isOptional" : "0",
49
+ "dataType" : "Float32",
50
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 8 × 8)",
51
+ "shortDescription" : "",
52
+ "shape" : "[2, 1280, 8, 8]",
53
+ "name" : "input_171_cast_fp16",
54
+ "type" : "MultiArray"
55
+ },
56
+ {
57
+ "hasShapeFlexibility" : "0",
58
+ "isOptional" : "0",
59
+ "dataType" : "Float32",
60
+ "formattedType" : "MultiArray (Float32 2 × 320 × 64 × 64)",
61
+ "shortDescription" : "",
62
+ "shape" : "[2, 320, 64, 64]",
63
+ "name" : "input_61_cast_fp16",
64
+ "type" : "MultiArray"
65
+ },
66
+ {
67
+ "hasShapeFlexibility" : "0",
68
+ "isOptional" : "0",
69
+ "dataType" : "Float32",
70
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 1 × 1)",
71
+ "shortDescription" : "",
72
+ "shape" : "[2, 1280, 1, 1]",
73
+ "name" : "input_15_cast_fp16",
74
+ "type" : "MultiArray"
75
+ },
76
+ {
77
+ "hasShapeFlexibility" : "0",
78
+ "isOptional" : "0",
79
+ "dataType" : "Float32",
80
+ "formattedType" : "MultiArray (Float32 2 × 640 × 32 × 32)",
81
+ "shortDescription" : "",
82
+ "shape" : "[2, 640, 32, 32]",
83
+ "name" : "input_115_cast_fp16",
84
+ "type" : "MultiArray"
85
+ },
86
+ {
87
+ "hasShapeFlexibility" : "0",
88
+ "isOptional" : "0",
89
+ "dataType" : "Float32",
90
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 16 × 16)",
91
+ "shortDescription" : "",
92
+ "shape" : "[2, 1280, 16, 16]",
93
+ "name" : "input_169_cast_fp16",
94
+ "type" : "MultiArray"
95
+ },
96
+ {
97
+ "hasShapeFlexibility" : "0",
98
+ "isOptional" : "0",
99
+ "dataType" : "Float32",
100
+ "formattedType" : "MultiArray (Float32 2 × 320 × 64 × 64)",
101
+ "shortDescription" : "",
102
+ "shape" : "[2, 320, 64, 64]",
103
+ "name" : "input_35_cast_fp16",
104
+ "type" : "MultiArray"
105
+ },
106
+ {
107
+ "hasShapeFlexibility" : "0",
108
+ "isOptional" : "0",
109
+ "dataType" : "Float32",
110
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 16 × 16)",
111
+ "shortDescription" : "",
112
+ "shape" : "[2, 1280, 16, 16]",
113
+ "name" : "input_143_cast_fp16",
114
+ "type" : "MultiArray"
115
+ },
116
+ {
117
+ "hasShapeFlexibility" : "0",
118
+ "isOptional" : "0",
119
+ "dataType" : "Float32",
120
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 8 × 8)",
121
+ "shortDescription" : "",
122
+ "shape" : "[2, 1280, 8, 8]",
123
+ "name" : "hidden_states_149_cast_fp16",
124
+ "type" : "MultiArray"
125
+ },
126
+ {
127
+ "hasShapeFlexibility" : "0",
128
+ "isOptional" : "0",
129
+ "dataType" : "Float32",
130
+ "formattedType" : "MultiArray (Float32 2 × 2560 × 8 × 8)",
131
+ "shortDescription" : "",
132
+ "shape" : "[2, 2560, 8, 8]",
133
+ "name" : "input_253_cast_fp16",
134
+ "type" : "MultiArray"
135
+ }
136
+ ],
137
+ "modelParameters" : [
138
+
139
+ ],
140
+ "specificationVersion" : 7,
141
+ "mlProgramOperationTypeHistogram" : {
142
+ "Transpose" : 14,
143
+ "Ios16.reduceMean" : 62,
144
+ "Ios16.sin" : 1,
145
+ "Ios16.softmax" : 368,
146
+ "Split" : 7,
147
+ "Ios16.add" : 82,
148
+ "Concat" : 82,
149
+ "Ios16.realDiv" : 31,
150
+ "Ios16.square" : 31,
151
+ "ExpandDims" : 3,
152
+ "Ios16.sub" : 31,
153
+ "Ios16.sqrt" : 31,
154
+ "Ios16.einsum" : 736,
155
+ "Ios16.conv" : 129,
156
+ "Ios16.reshape" : 76,
157
+ "Ios16.layerNorm" : 21,
158
+ "SliceByIndex" : 658,
159
+ "Ios16.batchNorm" : 31,
160
+ "Ios16.silu" : 26,
161
+ "Ios16.gelu" : 7,
162
+ "Ios16.cast" : 13,
163
+ "Ios16.mul" : 376,
164
+ "Ios16.cos" : 1
165
+ },
166
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
167
+ "isUpdatable" : "0",
168
+ "stateSchema" : [
169
+
170
+ ],
171
+ "availability" : {
172
+ "macOS" : "13.0",
173
+ "tvOS" : "16.0",
174
+ "visionOS" : "1.0",
175
+ "watchOS" : "9.0",
176
+ "iOS" : "16.0",
177
+ "macCatalyst" : "16.0"
178
+ },
179
+ "modelType" : {
180
+ "name" : "MLModelType_mlProgram"
181
+ },
182
+ "userDefinedMetadata" : {
183
+
184
+ },
185
+ "inputSchema" : [
186
+ {
187
+ "hasShapeFlexibility" : "0",
188
+ "isOptional" : "0",
189
+ "dataType" : "Float16",
190
+ "formattedType" : "MultiArray (Float16 2 × 4 × 64 × 64)",
191
+ "shortDescription" : "",
192
+ "shape" : "[2, 4, 64, 64]",
193
+ "name" : "sample",
194
+ "type" : "MultiArray"
195
+ },
196
+ {
197
+ "hasShapeFlexibility" : "0",
198
+ "isOptional" : "0",
199
+ "dataType" : "Float16",
200
+ "formattedType" : "MultiArray (Float16 2)",
201
+ "shortDescription" : "",
202
+ "shape" : "[2]",
203
+ "name" : "timestep",
204
+ "type" : "MultiArray"
205
+ },
206
+ {
207
+ "hasShapeFlexibility" : "0",
208
+ "isOptional" : "0",
209
+ "dataType" : "Float16",
210
+ "formattedType" : "MultiArray (Float16 2 × 768 × 1 × 77)",
211
+ "shortDescription" : "",
212
+ "shape" : "[2, 768, 1, 77]",
213
+ "name" : "encoder_hidden_states",
214
+ "type" : "MultiArray"
215
+ }
216
+ ],
217
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_unet_chunk1",
218
+ "method" : "predict"
219
+ }
220
+ ]
Resources/UnetChunk1.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/UnetChunk1.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b2ea88013f47ba9f67ce5f6c9c2eb8f64ea5956bef5c797ffd64968292b3004
3
+ size 887569600
Resources/UnetChunk2.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27ecec7ebf7b306f04dbfb2ea45e3f4b32d1f2521641cd454a9eaa57315a548a
3
+ size 243
Resources/UnetChunk2.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57a3f6d64bdce0bb0a7fd4ff75e835ee6531104c3e061bc9c1d05d75179e725f
3
+ size 665
Resources/UnetChunk2.mlmodelc/metadata.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "metadataOutputVersion" : "3.0",
4
+ "storagePrecision" : "Float16",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 2 × 4 × 64 × 64)",
11
+ "shortDescription" : "",
12
+ "shape" : "[2, 4, 64, 64]",
13
+ "name" : "noise_pred",
14
+ "type" : "MultiArray"
15
+ }
16
+ ],
17
+ "modelParameters" : [
18
+
19
+ ],
20
+ "specificationVersion" : 7,
21
+ "mlProgramOperationTypeHistogram" : {
22
+ "Transpose" : 18,
23
+ "UpsampleNearestNeighbor" : 3,
24
+ "Ios16.reduceMean" : 60,
25
+ "Ios16.softmax" : 528,
26
+ "Split" : 9,
27
+ "Ios16.add" : 87,
28
+ "Concat" : 124,
29
+ "Ios16.realDiv" : 30,
30
+ "Ios16.square" : 30,
31
+ "Ios16.sub" : 30,
32
+ "Ios16.cast" : 14,
33
+ "Ios16.sqrt" : 30,
34
+ "Ios16.einsum" : 1056,
35
+ "Ios16.conv" : 153,
36
+ "Ios16.layerNorm" : 27,
37
+ "SliceByIndex" : 912,
38
+ "Ios16.batchNorm" : 30,
39
+ "Ios16.reshape" : 78,
40
+ "Ios16.silu" : 21,
41
+ "Ios16.gelu" : 9,
42
+ "Ios16.mul" : 537
43
+ },
44
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
45
+ "isUpdatable" : "0",
46
+ "stateSchema" : [
47
+
48
+ ],
49
+ "availability" : {
50
+ "macOS" : "13.0",
51
+ "tvOS" : "16.0",
52
+ "visionOS" : "1.0",
53
+ "watchOS" : "9.0",
54
+ "iOS" : "16.0",
55
+ "macCatalyst" : "16.0"
56
+ },
57
+ "modelType" : {
58
+ "name" : "MLModelType_mlProgram"
59
+ },
60
+ "userDefinedMetadata" : {
61
+
62
+ },
63
+ "inputSchema" : [
64
+ {
65
+ "hasShapeFlexibility" : "0",
66
+ "isOptional" : "0",
67
+ "dataType" : "Float16",
68
+ "formattedType" : "MultiArray (Float16 2 × 768 × 1 × 77)",
69
+ "shortDescription" : "",
70
+ "shape" : "[2, 768, 1, 77]",
71
+ "name" : "encoder_hidden_states",
72
+ "type" : "MultiArray"
73
+ },
74
+ {
75
+ "hasShapeFlexibility" : "0",
76
+ "isOptional" : "0",
77
+ "dataType" : "Float32",
78
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 1 × 1)",
79
+ "shortDescription" : "",
80
+ "shape" : "[2, 1280, 1, 1]",
81
+ "name" : "input_15_cast_fp16",
82
+ "type" : "MultiArray"
83
+ },
84
+ {
85
+ "hasShapeFlexibility" : "0",
86
+ "isOptional" : "0",
87
+ "dataType" : "Float32",
88
+ "formattedType" : "MultiArray (Float32 2 × 320 × 64 × 64)",
89
+ "shortDescription" : "",
90
+ "shape" : "[2, 320, 64, 64]",
91
+ "name" : "input_61_cast_fp16",
92
+ "type" : "MultiArray"
93
+ },
94
+ {
95
+ "hasShapeFlexibility" : "0",
96
+ "isOptional" : "0",
97
+ "dataType" : "Float32",
98
+ "formattedType" : "MultiArray (Float32 2 × 320 × 64 × 64)",
99
+ "shortDescription" : "",
100
+ "shape" : "[2, 320, 64, 64]",
101
+ "name" : "input_7_cast_fp16",
102
+ "type" : "MultiArray"
103
+ },
104
+ {
105
+ "hasShapeFlexibility" : "0",
106
+ "isOptional" : "0",
107
+ "dataType" : "Float32",
108
+ "formattedType" : "MultiArray (Float32 2 × 640 × 32 × 32)",
109
+ "shortDescription" : "",
110
+ "shape" : "[2, 640, 32, 32]",
111
+ "name" : "input_115_cast_fp16",
112
+ "type" : "MultiArray"
113
+ },
114
+ {
115
+ "hasShapeFlexibility" : "0",
116
+ "isOptional" : "0",
117
+ "dataType" : "Float32",
118
+ "formattedType" : "MultiArray (Float32 2 × 640 × 16 × 16)",
119
+ "shortDescription" : "",
120
+ "shape" : "[2, 640, 16, 16]",
121
+ "name" : "input_117_cast_fp16",
122
+ "type" : "MultiArray"
123
+ },
124
+ {
125
+ "hasShapeFlexibility" : "0",
126
+ "isOptional" : "0",
127
+ "dataType" : "Float32",
128
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 16 × 16)",
129
+ "shortDescription" : "",
130
+ "shape" : "[2, 1280, 16, 16]",
131
+ "name" : "input_169_cast_fp16",
132
+ "type" : "MultiArray"
133
+ },
134
+ {
135
+ "hasShapeFlexibility" : "0",
136
+ "isOptional" : "0",
137
+ "dataType" : "Float32",
138
+ "formattedType" : "MultiArray (Float32 2 × 320 × 64 × 64)",
139
+ "shortDescription" : "",
140
+ "shape" : "[2, 320, 64, 64]",
141
+ "name" : "input_35_cast_fp16",
142
+ "type" : "MultiArray"
143
+ },
144
+ {
145
+ "hasShapeFlexibility" : "0",
146
+ "isOptional" : "0",
147
+ "dataType" : "Float32",
148
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 16 × 16)",
149
+ "shortDescription" : "",
150
+ "shape" : "[2, 1280, 16, 16]",
151
+ "name" : "input_143_cast_fp16",
152
+ "type" : "MultiArray"
153
+ },
154
+ {
155
+ "hasShapeFlexibility" : "0",
156
+ "isOptional" : "0",
157
+ "dataType" : "Float32",
158
+ "formattedType" : "MultiArray (Float32 2 × 640 × 32 × 32)",
159
+ "shortDescription" : "",
160
+ "shape" : "[2, 640, 32, 32]",
161
+ "name" : "input_89_cast_fp16",
162
+ "type" : "MultiArray"
163
+ },
164
+ {
165
+ "hasShapeFlexibility" : "0",
166
+ "isOptional" : "0",
167
+ "dataType" : "Float32",
168
+ "formattedType" : "MultiArray (Float32 2 × 1280 �� 8 × 8)",
169
+ "shortDescription" : "",
170
+ "shape" : "[2, 1280, 8, 8]",
171
+ "name" : "hidden_states_149_cast_fp16",
172
+ "type" : "MultiArray"
173
+ },
174
+ {
175
+ "hasShapeFlexibility" : "0",
176
+ "isOptional" : "0",
177
+ "dataType" : "Float32",
178
+ "formattedType" : "MultiArray (Float32 2 × 2560 × 8 × 8)",
179
+ "shortDescription" : "",
180
+ "shape" : "[2, 2560, 8, 8]",
181
+ "name" : "input_253_cast_fp16",
182
+ "type" : "MultiArray"
183
+ },
184
+ {
185
+ "hasShapeFlexibility" : "0",
186
+ "isOptional" : "0",
187
+ "dataType" : "Float32",
188
+ "formattedType" : "MultiArray (Float32 2 × 1280 × 8 × 8)",
189
+ "shortDescription" : "",
190
+ "shape" : "[2, 1280, 8, 8]",
191
+ "name" : "input_171_cast_fp16",
192
+ "type" : "MultiArray"
193
+ },
194
+ {
195
+ "hasShapeFlexibility" : "0",
196
+ "isOptional" : "0",
197
+ "dataType" : "Float32",
198
+ "formattedType" : "MultiArray (Float32 2 × 320 × 32 × 32)",
199
+ "shortDescription" : "",
200
+ "shape" : "[2, 320, 32, 32]",
201
+ "name" : "input_63_cast_fp16",
202
+ "type" : "MultiArray"
203
+ }
204
+ ],
205
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_unet_chunk2",
206
+ "method" : "predict"
207
+ }
208
+ ]
Resources/UnetChunk2.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/UnetChunk2.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e64219ba339d275e9810976371d9320c65114d2850642a82d544d996257dad7
3
+ size 831567872
Resources/VAEDecoder.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f12547cb19e753c01cb2722803b062f6c715cc3dbbd14386eb4df4c78f204ed
3
+ size 243
Resources/VAEDecoder.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08998c0252cd67059ce0c4408c72ab0c3add3f3909556f6ab62250e00ff85a9e
3
+ size 939
Resources/VAEDecoder.mlmodelc/metadata.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "shortDescription" : "Stable Diffusion generates images conditioned on text and\/or other images as input through the diffusion process. Please refer to https:\/\/arxiv.org\/abs\/2112.10752 for details.",
4
+ "metadataOutputVersion" : "3.0",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 1 × 3 × 512 × 512)",
11
+ "shortDescription" : "Generated image normalized to range [-1, 1]",
12
+ "shape" : "[1, 3, 512, 512]",
13
+ "name" : "image",
14
+ "type" : "MultiArray"
15
+ }
16
+ ],
17
+ "version" : "\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
18
+ "modelParameters" : [
19
+
20
+ ],
21
+ "author" : "Please refer to the Model Card available at huggingface.co\/\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
22
+ "specificationVersion" : 7,
23
+ "storagePrecision" : "Float16",
24
+ "license" : "OpenRAIL (https:\/\/huggingface.co\/spaces\/CompVis\/stable-diffusion-license)",
25
+ "mlProgramOperationTypeHistogram" : {
26
+ "Ios16.cast" : 1,
27
+ "Ios16.mul" : 2,
28
+ "Ios16.sub" : 30,
29
+ "Transpose" : 6,
30
+ "Ios16.sqrt" : 30,
31
+ "UpsampleNearestNeighbor" : 3,
32
+ "Ios16.square" : 30,
33
+ "Ios16.add" : 46,
34
+ "Ios16.reduceMean" : 60,
35
+ "Ios16.realDiv" : 30,
36
+ "Ios16.conv" : 36,
37
+ "Ios16.linear" : 4,
38
+ "Ios16.matmul" : 2,
39
+ "Ios16.batchNorm" : 29,
40
+ "Ios16.softmax" : 1,
41
+ "Ios16.reshape" : 65,
42
+ "Ios16.silu" : 29
43
+ },
44
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
45
+ "stateSchema" : [
46
+
47
+ ],
48
+ "isUpdatable" : "0",
49
+ "availability" : {
50
+ "macOS" : "13.0",
51
+ "tvOS" : "16.0",
52
+ "visionOS" : "1.0",
53
+ "watchOS" : "9.0",
54
+ "iOS" : "16.0",
55
+ "macCatalyst" : "16.0"
56
+ },
57
+ "modelType" : {
58
+ "name" : "MLModelType_mlProgram"
59
+ },
60
+ "inputSchema" : [
61
+ {
62
+ "hasShapeFlexibility" : "0",
63
+ "isOptional" : "0",
64
+ "dataType" : "Float16",
65
+ "formattedType" : "MultiArray (Float16 1 × 4 × 64 × 64)",
66
+ "shortDescription" : "The denoised latent embeddings from the unet model after the last step of reverse diffusion",
67
+ "shape" : "[1, 4, 64, 64]",
68
+ "name" : "z",
69
+ "type" : "MultiArray"
70
+ }
71
+ ],
72
+ "userDefinedMetadata" : {
73
+ "com.github.apple.coremltools.conversion_date" : "2025-12-13",
74
+ "com.github.apple.coremltools.source" : "torch==2.9.1",
75
+ "com.github.apple.coremltools.version" : "9.0",
76
+ "com.github.apple.coremltools.source_dialect" : "TorchScript"
77
+ },
78
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_vae_decoder",
79
+ "method" : "predict"
80
+ }
81
+ ]
Resources/VAEDecoder.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/VAEDecoder.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c8dd69a25877524be57cbe9ebebef5ccbcc73cf3186718f526c8dc25e90af36
3
+ size 98993280
Resources/VAEEncoder.mlmodelc/analytics/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:887bcbe2e4870c1966618492c316d67373db00126e5421ff4b9b92985a8c27d9
3
+ size 243
Resources/VAEEncoder.mlmodelc/coremldata.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41b3007085dec7edff5ccecc52bd2efdae1de12bf30e31435ce371ff0fa0a549
3
+ size 943
Resources/VAEEncoder.mlmodelc/metadata.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "shortDescription" : "Stable Diffusion generates images conditioned on text and\/or other images as input through the diffusion process. Please refer to https:\/\/arxiv.org\/abs\/2112.10752 for details.",
4
+ "metadataOutputVersion" : "3.0",
5
+ "outputSchema" : [
6
+ {
7
+ "hasShapeFlexibility" : "0",
8
+ "isOptional" : "0",
9
+ "dataType" : "Float32",
10
+ "formattedType" : "MultiArray (Float32 1 × 8 × 64 × 64)",
11
+ "shortDescription" : "The latent embeddings from the unet model from the input image.",
12
+ "shape" : "[1, 8, 64, 64]",
13
+ "name" : "latent",
14
+ "type" : "MultiArray"
15
+ }
16
+ ],
17
+ "version" : "\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
18
+ "modelParameters" : [
19
+
20
+ ],
21
+ "author" : "Please refer to the Model Card available at huggingface.co\/\/Users\/metal\/Models\/converted\/bravoChildrens_v10-Diffusers",
22
+ "specificationVersion" : 7,
23
+ "storagePrecision" : "Float16",
24
+ "license" : "OpenRAIL (https:\/\/huggingface.co\/spaces\/CompVis\/stable-diffusion-license)",
25
+ "mlProgramOperationTypeHistogram" : {
26
+ "Pad" : 3,
27
+ "Ios16.cast" : 1,
28
+ "Ios16.mul" : 2,
29
+ "Ios16.sub" : 22,
30
+ "Ios16.sqrt" : 22,
31
+ "Transpose" : 6,
32
+ "Ios16.square" : 22,
33
+ "Ios16.add" : 34,
34
+ "Ios16.reduceMean" : 44,
35
+ "Ios16.realDiv" : 22,
36
+ "Ios16.conv" : 28,
37
+ "Ios16.linear" : 4,
38
+ "Ios16.matmul" : 2,
39
+ "Ios16.batchNorm" : 21,
40
+ "Ios16.softmax" : 1,
41
+ "Ios16.reshape" : 49,
42
+ "Ios16.silu" : 21
43
+ },
44
+ "computePrecision" : "Mixed (Float16, Float32, Int32)",
45
+ "stateSchema" : [
46
+
47
+ ],
48
+ "isUpdatable" : "0",
49
+ "availability" : {
50
+ "macOS" : "13.0",
51
+ "tvOS" : "16.0",
52
+ "visionOS" : "1.0",
53
+ "watchOS" : "9.0",
54
+ "iOS" : "16.0",
55
+ "macCatalyst" : "16.0"
56
+ },
57
+ "modelType" : {
58
+ "name" : "MLModelType_mlProgram"
59
+ },
60
+ "inputSchema" : [
61
+ {
62
+ "hasShapeFlexibility" : "0",
63
+ "isOptional" : "0",
64
+ "dataType" : "Float16",
65
+ "formattedType" : "MultiArray (Float16 1 × 3 × 512 × 512)",
66
+ "shortDescription" : "The input image to base the initial latents on normalized to range [-1, 1]",
67
+ "shape" : "[1, 3, 512, 512]",
68
+ "name" : "x",
69
+ "type" : "MultiArray"
70
+ }
71
+ ],
72
+ "userDefinedMetadata" : {
73
+ "com.github.apple.coremltools.conversion_date" : "2025-12-13",
74
+ "com.github.apple.coremltools.source" : "torch==2.9.1",
75
+ "com.github.apple.coremltools.version" : "9.0",
76
+ "com.github.apple.coremltools.source_dialect" : "TorchScript"
77
+ },
78
+ "generatedClassName" : "Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10_Diffusers_vae_encoder",
79
+ "method" : "predict"
80
+ }
81
+ ]
Resources/VAEEncoder.mlmodelc/model.mil ADDED
The diff for this file is too large to render. See raw diff
 
Resources/VAEEncoder.mlmodelc/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e550383c8edd530d7d576219d9107546b027916e9d3d7f553fb50e9b37ef13cb
3
+ size 68338112
Resources/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
Resources/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_safety_checker.mlpackage/Data/com.apple.CoreML/model.mlmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cec38bd778bff55a417e74f46f8b1d5a061e0ed52fdf69f102d8c346fc8f94f7
3
+ size 298988
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_safety_checker.mlpackage/Data/com.apple.CoreML/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffdfb1a4774feb816dd2ccc3e304d109013bc9591e188dcab840789583fdc1b4
3
+ size 607990114
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_safety_checker.mlpackage/Manifest.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fileFormatVersion": "1.0.0",
3
+ "itemInfoEntries": {
4
+ "1D121421-67D7-4B82-910D-85A752E67FD9": {
5
+ "author": "com.apple.CoreML",
6
+ "description": "CoreML Model Specification",
7
+ "name": "model.mlmodel",
8
+ "path": "com.apple.CoreML/model.mlmodel"
9
+ },
10
+ "D0F626D9-7CCA-4B76-A457-58631AA43F1E": {
11
+ "author": "com.apple.CoreML",
12
+ "description": "CoreML Model Weights",
13
+ "name": "weights",
14
+ "path": "com.apple.CoreML/weights"
15
+ }
16
+ },
17
+ "rootModelIdentifier": "1D121421-67D7-4B82-910D-85A752E67FD9"
18
+ }
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_text_encoder.mlpackage/Data/com.apple.CoreML/model.mlmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5e0c7c1002b071ae6511c81c43c4576fcc5f93e5774f1fdb24ffccb1f22b75a
3
+ size 145060
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_text_encoder.mlpackage/Data/com.apple.CoreML/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f2994578d2b34e306784ca82ae6730fc4604ce758d580e3af976cb7e58cf324
3
+ size 246145536
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_text_encoder.mlpackage/Manifest.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fileFormatVersion": "1.0.0",
3
+ "itemInfoEntries": {
4
+ "8944DA1F-7714-40D5-AE2C-B44CB5D472C9": {
5
+ "author": "com.apple.CoreML",
6
+ "description": "CoreML Model Weights",
7
+ "name": "weights",
8
+ "path": "com.apple.CoreML/weights"
9
+ },
10
+ "BBC287CF-2105-4827-A218-17AC28CBFA0C": {
11
+ "author": "com.apple.CoreML",
12
+ "description": "CoreML Model Specification",
13
+ "name": "model.mlmodel",
14
+ "path": "com.apple.CoreML/model.mlmodel"
15
+ }
16
+ },
17
+ "rootModelIdentifier": "BBC287CF-2105-4827-A218-17AC28CBFA0C"
18
+ }
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet.mlpackage/Data/com.apple.CoreML/model.mlmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3d1f698c6b1b7c56114bb5f1235a22380b0d3a028ece42d696dee963c11345e
3
+ size 2693333
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet.mlpackage/Data/com.apple.CoreML/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c25ad5e0061b703dba7fc93d60f689bef11b39f053f0201dc9fe220252c181e
3
+ size 1719117696
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet.mlpackage/Manifest.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fileFormatVersion": "1.0.0",
3
+ "itemInfoEntries": {
4
+ "2ED3E775-6206-4188-A2F7-3022281FAAFB": {
5
+ "author": "com.apple.CoreML",
6
+ "description": "CoreML Model Specification",
7
+ "name": "model.mlmodel",
8
+ "path": "com.apple.CoreML/model.mlmodel"
9
+ },
10
+ "5C75E264-C890-4A93-A691-A78E04531D73": {
11
+ "author": "com.apple.CoreML",
12
+ "description": "CoreML Model Weights",
13
+ "name": "weights",
14
+ "path": "com.apple.CoreML/weights"
15
+ }
16
+ },
17
+ "rootModelIdentifier": "2ED3E775-6206-4188-A2F7-3022281FAAFB"
18
+ }
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet_chunk1.mlpackage/Data/com.apple.CoreML/model.mlmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd52ea7842ceda25eed8608ea3b5d87076244685aab827a7d8dde1480cb8a3b3
3
+ size 1150272
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet_chunk1.mlpackage/Data/com.apple.CoreML/weights/weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b2ea88013f47ba9f67ce5f6c9c2eb8f64ea5956bef5c797ffd64968292b3004
3
+ size 887569600
Stable_Diffusion_version__Users_metal_Models_converted_bravoChildrens_v10-Diffusers_unet_chunk1.mlpackage/Manifest.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fileFormatVersion": "1.0.0",
3
+ "itemInfoEntries": {
4
+ "891D96D5-53F3-4AE3-B6B4-A892B915587D": {
5
+ "author": "com.apple.CoreML",
6
+ "description": "CoreML Model Weights",
7
+ "name": "weights",
8
+ "path": "com.apple.CoreML/weights"
9
+ },
10
+ "F842D8FD-C36B-49E8-8470-7811B2048414": {
11
+ "author": "com.apple.CoreML",
12
+ "description": "CoreML Model Specification",
13
+ "name": "model.mlmodel",
14
+ "path": "com.apple.CoreML/model.mlmodel"
15
+ }
16
+ },
17
+ "rootModelIdentifier": "F842D8FD-C36B-49E8-8470-7811B2048414"
18
+ }