add AIBOM
#55
by
sabato-nocera
- opened
microsoft_Phi-3.5-MoE-instruct.json
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bomFormat": "CycloneDX",
|
| 3 |
+
"specVersion": "1.6",
|
| 4 |
+
"serialNumber": "urn:uuid:126ceb7e-84e7-4a76-bca4-17482de3dcfa",
|
| 5 |
+
"version": 1,
|
| 6 |
+
"metadata": {
|
| 7 |
+
"timestamp": "2025-06-05T09:39:07.677126+00:00",
|
| 8 |
+
"component": {
|
| 9 |
+
"type": "machine-learning-model",
|
| 10 |
+
"bom-ref": "microsoft/Phi-3.5-MoE-instruct-1ac47922-1ada-5e6b-a387-81986c67b4b0",
|
| 11 |
+
"name": "microsoft/Phi-3.5-MoE-instruct",
|
| 12 |
+
"externalReferences": [
|
| 13 |
+
{
|
| 14 |
+
"url": "https://huggingface.co/microsoft/Phi-3.5-MoE-instruct",
|
| 15 |
+
"type": "documentation"
|
| 16 |
+
}
|
| 17 |
+
],
|
| 18 |
+
"modelCard": {
|
| 19 |
+
"modelParameters": {
|
| 20 |
+
"task": "text-generation",
|
| 21 |
+
"architectureFamily": "phimoe",
|
| 22 |
+
"modelArchitecture": "PhiMoEForCausalLM"
|
| 23 |
+
},
|
| 24 |
+
"properties": [
|
| 25 |
+
{
|
| 26 |
+
"name": "library_name",
|
| 27 |
+
"value": "transformers"
|
| 28 |
+
}
|
| 29 |
+
]
|
| 30 |
+
},
|
| 31 |
+
"authors": [
|
| 32 |
+
{
|
| 33 |
+
"name": "microsoft"
|
| 34 |
+
}
|
| 35 |
+
],
|
| 36 |
+
"licenses": [
|
| 37 |
+
{
|
| 38 |
+
"license": {
|
| 39 |
+
"id": "MIT",
|
| 40 |
+
"url": "https://spdx.org/licenses/MIT.html"
|
| 41 |
+
}
|
| 42 |
+
}
|
| 43 |
+
],
|
| 44 |
+
"description": "Phi-3.5-MoE is a lightweight, state-of-the-art open model built upon datasets used for Phi-3 - synthetic data and filtered publicly available documents - with a focus on very high-quality, reasoning dense data. The model supports multilingual and comes with 128K context length (in tokens). The model underwent a rigorous enhancement process, incorporating supervised fine-tuning, proximal policy optimization, and direct preference optimization to ensure precise instruction adherence and robust safety measures.\ud83c\udfe1 [Phi-3 Portal](https://azure.microsoft.com/en-us/products/phi-3) <br>\ud83d\udcf0 [Phi-3 Microsoft Blog](https://aka.ms/phi3.5-techblog) <br>\ud83d\udcd6 [Phi-3 Technical Report](https://arxiv.org/abs/2404.14219) <br>\ud83d\udc69\u200d\ud83c\udf73 [Phi-3 Cookbook](https://github.com/microsoft/Phi-3CookBook) <br>\ud83d\udda5\ufe0f [Try It](https://aka.ms/try-phi3.5moe) <br>MoE references:\ud83d\udcdc[Phi-3.5-MoE Blog](https://techcommunity.microsoft.com/t5/ai-azure-ai-services-blog/announcing-the-availability-of-phi-3-5-moe-in-azure-ai-studio/ba-p/4256278) | \ud83d\ude01[GRIN MoE](https://huggingface.co/microsoft/GRIN-MoE)**Phi-3.5**: [[mini-instruct]](https://huggingface.co/microsoft/Phi-3.5-mini-instruct); [[MoE-instruct]](https://huggingface.co/microsoft/Phi-3.5-MoE-instruct) ; [[vision-instruct]](https://huggingface.co/microsoft/Phi-3.5-vision-instruct)",
|
| 45 |
+
"tags": [
|
| 46 |
+
"transformers",
|
| 47 |
+
"safetensors",
|
| 48 |
+
"phimoe",
|
| 49 |
+
"text-generation",
|
| 50 |
+
"nlp",
|
| 51 |
+
"code",
|
| 52 |
+
"conversational",
|
| 53 |
+
"custom_code",
|
| 54 |
+
"multilingual",
|
| 55 |
+
"arxiv:2404.14219",
|
| 56 |
+
"arxiv:2407.13833",
|
| 57 |
+
"arxiv:2403.06412",
|
| 58 |
+
"license:mit",
|
| 59 |
+
"autotrain_compatible",
|
| 60 |
+
"region:us"
|
| 61 |
+
]
|
| 62 |
+
}
|
| 63 |
+
}
|
| 64 |
+
}
|