File size: 3,160 Bytes
779773a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
{
    "bomFormat": "CycloneDX",
    "specVersion": "1.6",
    "serialNumber": "urn:uuid:c5591f19-4765-4021-8910-fb920fd82e23",
    "version": 1,
    "metadata": {
        "timestamp": "2025-06-05T09:39:20.822963+00:00",
        "component": {
            "type": "machine-learning-model",
            "bom-ref": "amazon/MistralLite-6b4abd3f-0fd3-55f2-9c2e-0c04831e915a",
            "name": "amazon/MistralLite",
            "externalReferences": [
                {
                    "url": "https://huggingface.co/amazon/MistralLite",
                    "type": "documentation"
                }
            ],
            "modelCard": {
                "modelParameters": {
                    "task": "text-generation",
                    "architectureFamily": "mistral",
                    "modelArchitecture": "MistralForCausalLM"
                },
                "properties": [
                    {
                        "name": "library_name",
                        "value": "transformers"
                    }
                ]
            },
            "authors": [
                {
                    "name": "amazon"
                }
            ],
            "licenses": [
                {
                    "license": {
                        "id": "Apache-2.0",
                        "url": "https://spdx.org/licenses/Apache-2.0.html"
                    }
                }
            ],
            "description": "- **Developed by:** [AWS Contributors](https://github.com/orgs/aws-samples/teams/aws-prototype-ml-apac)- **Model type:** [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1)- **Language:** English- **Finetuned from weights:** [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1)- **Finetuned on data:**- [SLidingEncoder and Decoder (SLED)](https://huggingface.co/datasets/tau/sled)- [(Long) Natural Questions (NQ)](https://huggingface.co/datasets/togethercomputer/Long-Data-Collections#multi-passage-qa-from-natural-questions)- [OpenAssistant Conversations Dataset (OASST1)](https://huggingface.co/datasets/OpenAssistant/oasst1)- **Supported Serving Framework:**- [Text-Generation-Inference 1.1.0](https://github.com/huggingface/text-generation-inference/tree/v1.1.0)- [vLLM](https://github.com/vllm-project/vllm)- [HuggingFace transformers](https://huggingface.co/docs/transformers/index)- [HuggingFace Text Generation Inference (TGI) container on SageMaker](https://github.com/awslabs/llm-hosting-container)- **Model License:** Apache 2.0- **Contact:** [GitHub issues](https://github.com/awslabs/extending-the-context-length-of-open-source-llms/issues)- **Inference Code** [Github Repo](https://github.com/awslabs/extending-the-context-length-of-open-source-llms/blob/main/MistralLite/)",
            "tags": [
                "transformers",
                "pytorch",
                "mistral",
                "text-generation",
                "license:apache-2.0",
                "autotrain_compatible",
                "text-generation-inference",
                "region:us"
            ]
        }
    }
}