add AIBOM
#8
by
RiccardoDav
- opened
- HuggingFaceTB_cosmo-1b.json +135 -0
HuggingFaceTB_cosmo-1b.json
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bomFormat": "CycloneDX",
|
| 3 |
+
"specVersion": "1.6",
|
| 4 |
+
"serialNumber": "urn:uuid:51169965-d572-420d-9e77-c1c8883b800a",
|
| 5 |
+
"version": 1,
|
| 6 |
+
"metadata": {
|
| 7 |
+
"timestamp": "2025-06-05T09:42:04.548481+00:00",
|
| 8 |
+
"component": {
|
| 9 |
+
"type": "machine-learning-model",
|
| 10 |
+
"bom-ref": "HuggingFaceTB/cosmo-1b-3ae0d358-fdbe-5afc-845f-95d5e33a1462",
|
| 11 |
+
"name": "HuggingFaceTB/cosmo-1b",
|
| 12 |
+
"externalReferences": [
|
| 13 |
+
{
|
| 14 |
+
"url": "https://huggingface.co/HuggingFaceTB/cosmo-1b",
|
| 15 |
+
"type": "documentation"
|
| 16 |
+
}
|
| 17 |
+
],
|
| 18 |
+
"modelCard": {
|
| 19 |
+
"modelParameters": {
|
| 20 |
+
"task": "text-generation",
|
| 21 |
+
"architectureFamily": "llama",
|
| 22 |
+
"modelArchitecture": "LlamaForCausalLM",
|
| 23 |
+
"datasets": [
|
| 24 |
+
{
|
| 25 |
+
"ref": "HuggingFaceTB/cosmopedia-eaa52c56-2a38-5af1-a072-510ea8ee58be"
|
| 26 |
+
}
|
| 27 |
+
]
|
| 28 |
+
},
|
| 29 |
+
"properties": [
|
| 30 |
+
{
|
| 31 |
+
"name": "library_name",
|
| 32 |
+
"value": "transformers"
|
| 33 |
+
}
|
| 34 |
+
]
|
| 35 |
+
},
|
| 36 |
+
"authors": [
|
| 37 |
+
{
|
| 38 |
+
"name": "HuggingFaceTB"
|
| 39 |
+
}
|
| 40 |
+
],
|
| 41 |
+
"licenses": [
|
| 42 |
+
{
|
| 43 |
+
"license": {
|
| 44 |
+
"id": "Apache-2.0",
|
| 45 |
+
"url": "https://spdx.org/licenses/Apache-2.0.html"
|
| 46 |
+
}
|
| 47 |
+
}
|
| 48 |
+
],
|
| 49 |
+
"description": "This is a 1.8B model trained on [Cosmopedia](https://huggingface.co/datasets/HuggingFaceTB/cosmopedia) synthetic dataset.",
|
| 50 |
+
"tags": [
|
| 51 |
+
"transformers",
|
| 52 |
+
"safetensors",
|
| 53 |
+
"llama",
|
| 54 |
+
"text-generation",
|
| 55 |
+
"en",
|
| 56 |
+
"dataset:HuggingFaceTB/cosmopedia",
|
| 57 |
+
"license:apache-2.0",
|
| 58 |
+
"autotrain_compatible",
|
| 59 |
+
"text-generation-inference",
|
| 60 |
+
"endpoints_compatible",
|
| 61 |
+
"region:us"
|
| 62 |
+
]
|
| 63 |
+
}
|
| 64 |
+
},
|
| 65 |
+
"components": [
|
| 66 |
+
{
|
| 67 |
+
"type": "data",
|
| 68 |
+
"bom-ref": "HuggingFaceTB/cosmopedia-eaa52c56-2a38-5af1-a072-510ea8ee58be",
|
| 69 |
+
"name": "HuggingFaceTB/cosmopedia",
|
| 70 |
+
"data": [
|
| 71 |
+
{
|
| 72 |
+
"type": "dataset",
|
| 73 |
+
"bom-ref": "HuggingFaceTB/cosmopedia-eaa52c56-2a38-5af1-a072-510ea8ee58be",
|
| 74 |
+
"name": "HuggingFaceTB/cosmopedia",
|
| 75 |
+
"contents": {
|
| 76 |
+
"url": "https://huggingface.co/datasets/HuggingFaceTB/cosmopedia",
|
| 77 |
+
"properties": [
|
| 78 |
+
{
|
| 79 |
+
"name": "language",
|
| 80 |
+
"value": "en"
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"name": "configs",
|
| 84 |
+
"value": "Name of the dataset subset: auto_math_text {\"split\": \"train\", \"path\": \"data/auto_math_text/train-*\"}"
|
| 85 |
+
},
|
| 86 |
+
{
|
| 87 |
+
"name": "configs",
|
| 88 |
+
"value": "Name of the dataset subset: khanacademy {\"split\": \"train\", \"path\": \"data/khanacademy/train-*\"}"
|
| 89 |
+
},
|
| 90 |
+
{
|
| 91 |
+
"name": "configs",
|
| 92 |
+
"value": "Name of the dataset subset: openstax {\"split\": \"train\", \"path\": \"data/openstax/train-*\"}"
|
| 93 |
+
},
|
| 94 |
+
{
|
| 95 |
+
"name": "configs",
|
| 96 |
+
"value": "Name of the dataset subset: stanford {\"split\": \"train\", \"path\": \"data/stanford/train-*\"}"
|
| 97 |
+
},
|
| 98 |
+
{
|
| 99 |
+
"name": "configs",
|
| 100 |
+
"value": "Name of the dataset subset: stories {\"split\": \"train\", \"path\": \"data/stories/train-*\"}"
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"name": "configs",
|
| 104 |
+
"value": "Name of the dataset subset: web_samples_v1 {\"split\": \"train\", \"path\": \"data/web_samples_v1/train-*\"}"
|
| 105 |
+
},
|
| 106 |
+
{
|
| 107 |
+
"name": "configs",
|
| 108 |
+
"value": "Name of the dataset subset: web_samples_v2 {\"split\": \"train\", \"path\": \"data/web_samples_v2/train-*\"}"
|
| 109 |
+
},
|
| 110 |
+
{
|
| 111 |
+
"name": "configs",
|
| 112 |
+
"value": "Name of the dataset subset: wikihow {\"split\": \"train\", \"path\": \"data/wikihow/train-*\"}"
|
| 113 |
+
},
|
| 114 |
+
{
|
| 115 |
+
"name": "license",
|
| 116 |
+
"value": "apache-2.0"
|
| 117 |
+
}
|
| 118 |
+
]
|
| 119 |
+
},
|
| 120 |
+
"governance": {
|
| 121 |
+
"owners": [
|
| 122 |
+
{
|
| 123 |
+
"organization": {
|
| 124 |
+
"name": "HuggingFaceTB",
|
| 125 |
+
"url": "https://huggingface.co/HuggingFaceTB"
|
| 126 |
+
}
|
| 127 |
+
}
|
| 128 |
+
]
|
| 129 |
+
},
|
| 130 |
+
"description": "\n\t\n\t\t\n\t\tCosmopedia v0.1\n\t\n\n\n \n Image generated by DALL-E, the prompt was generated by Mixtral-8x7B-Instruct-v0.1\n\n\nNote: Cosmopedia v0.2 is available at smollm-corpus\nUser: What do you think \"Cosmopedia\" could mean? Hint: in our case it's not related to cosmology.\n\nMixtral-8x7B-Instruct-v0.1: A possible meaning for \"Cosmopedia\" could be an encyclopedia or collection of information about\ndifferent cultures, societies, and topics from around the world, emphasizing diversity and global\u2026 See the full description on the dataset page: https://huggingface.co/datasets/HuggingFaceTB/cosmopedia."
|
| 131 |
+
}
|
| 132 |
+
]
|
| 133 |
+
}
|
| 134 |
+
]
|
| 135 |
+
}
|