Phi-3.5-MoE-instruct / microsoft_Phi-3.5-MoE-instruct.json
sabato-nocera's picture
add AIBOM
a36aff9 verified
raw
history blame
3.47 kB
{
"bomFormat": "CycloneDX",
"specVersion": "1.6",
"serialNumber": "urn:uuid:126ceb7e-84e7-4a76-bca4-17482de3dcfa",
"version": 1,
"metadata": {
"timestamp": "2025-06-05T09:39:07.677126+00:00",
"component": {
"type": "machine-learning-model",
"bom-ref": "microsoft/Phi-3.5-MoE-instruct-1ac47922-1ada-5e6b-a387-81986c67b4b0",
"name": "microsoft/Phi-3.5-MoE-instruct",
"externalReferences": [
{
"url": "https://huggingface.co/microsoft/Phi-3.5-MoE-instruct",
"type": "documentation"
}
],
"modelCard": {
"modelParameters": {
"task": "text-generation",
"architectureFamily": "phimoe",
"modelArchitecture": "PhiMoEForCausalLM"
},
"properties": [
{
"name": "library_name",
"value": "transformers"
}
]
},
"authors": [
{
"name": "microsoft"
}
],
"licenses": [
{
"license": {
"id": "MIT",
"url": "https://spdx.org/licenses/MIT.html"
}
}
],
"description": "Phi-3.5-MoE is a lightweight, state-of-the-art open model built upon datasets used for Phi-3 - synthetic data and filtered publicly available documents - with a focus on very high-quality, reasoning dense data. The model supports multilingual and comes with 128K context length (in tokens). The model underwent a rigorous enhancement process, incorporating supervised fine-tuning, proximal policy optimization, and direct preference optimization to ensure precise instruction adherence and robust safety measures.\ud83c\udfe1 [Phi-3 Portal](https://azure.microsoft.com/en-us/products/phi-3) <br>\ud83d\udcf0 [Phi-3 Microsoft Blog](https://aka.ms/phi3.5-techblog) <br>\ud83d\udcd6 [Phi-3 Technical Report](https://arxiv.org/abs/2404.14219) <br>\ud83d\udc69\u200d\ud83c\udf73 [Phi-3 Cookbook](https://github.com/microsoft/Phi-3CookBook) <br>\ud83d\udda5\ufe0f [Try It](https://aka.ms/try-phi3.5moe) <br>MoE references:\ud83d\udcdc[Phi-3.5-MoE Blog](https://techcommunity.microsoft.com/t5/ai-azure-ai-services-blog/announcing-the-availability-of-phi-3-5-moe-in-azure-ai-studio/ba-p/4256278) | \ud83d\ude01[GRIN MoE](https://huggingface.co/microsoft/GRIN-MoE)**Phi-3.5**: [[mini-instruct]](https://huggingface.co/microsoft/Phi-3.5-mini-instruct); [[MoE-instruct]](https://huggingface.co/microsoft/Phi-3.5-MoE-instruct) ; [[vision-instruct]](https://huggingface.co/microsoft/Phi-3.5-vision-instruct)",
"tags": [
"transformers",
"safetensors",
"phimoe",
"text-generation",
"nlp",
"code",
"conversational",
"custom_code",
"multilingual",
"arxiv:2404.14219",
"arxiv:2407.13833",
"arxiv:2403.06412",
"license:mit",
"autotrain_compatible",
"region:us"
]
}
}
}