Upload folder using huggingface_hub
Browse files- .gitattributes +37 -35
- README.md +226 -0
- assets/TinyAyaPlot_D_Light.png +0 -0
- assets/TinyAya_Global.png +3 -0
- assets/TinyAya_PlotB_v7_lightmode.png +0 -0
- assets/tiny-aya-lowres-dotplot_lightmode.png +0 -0
- assets/tiny_aya_regional_heatmap_lightmode.png +0 -0
- config.json +79 -0
- generation_config.json +7 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +297 -0
- signatures/tiny-aya-global.sig +1 -0
- signatures/verification-instructions.txt +40 -0
- special_tokens_map.json +30 -0
- tokenizer.json +3 -0
- tokenizer_config.json +214 -0
.gitattributes
CHANGED
|
@@ -1,35 +1,37 @@
|
|
| 1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
assets/TinyAya_Global.png filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
inference: false
|
| 3 |
+
library_name: transformers
|
| 4 |
+
language:
|
| 5 |
+
- en
|
| 6 |
+
- nl
|
| 7 |
+
- fr
|
| 8 |
+
- it
|
| 9 |
+
- pt
|
| 10 |
+
- ro
|
| 11 |
+
- es
|
| 12 |
+
- cs
|
| 13 |
+
- pl
|
| 14 |
+
- uk
|
| 15 |
+
- ru
|
| 16 |
+
- el
|
| 17 |
+
- de
|
| 18 |
+
- da
|
| 19 |
+
- sv
|
| 20 |
+
- "no"
|
| 21 |
+
- ca
|
| 22 |
+
- gl
|
| 23 |
+
- cy
|
| 24 |
+
- ga
|
| 25 |
+
- eu
|
| 26 |
+
- hr
|
| 27 |
+
- lv
|
| 28 |
+
- lt
|
| 29 |
+
- sk
|
| 30 |
+
- sl
|
| 31 |
+
- et
|
| 32 |
+
- fi
|
| 33 |
+
- hu
|
| 34 |
+
- sr
|
| 35 |
+
- bg
|
| 36 |
+
- ar
|
| 37 |
+
- fa
|
| 38 |
+
- ur
|
| 39 |
+
- tr
|
| 40 |
+
- mt
|
| 41 |
+
- he
|
| 42 |
+
- hi
|
| 43 |
+
- mr
|
| 44 |
+
- bn
|
| 45 |
+
- gu
|
| 46 |
+
- pa
|
| 47 |
+
- ta
|
| 48 |
+
- te
|
| 49 |
+
- ne
|
| 50 |
+
- tl
|
| 51 |
+
- ms
|
| 52 |
+
- id
|
| 53 |
+
- vi
|
| 54 |
+
- jv
|
| 55 |
+
- km
|
| 56 |
+
- th
|
| 57 |
+
- lo
|
| 58 |
+
- zh
|
| 59 |
+
- my
|
| 60 |
+
- ja
|
| 61 |
+
- ko
|
| 62 |
+
- am
|
| 63 |
+
- ha
|
| 64 |
+
- ig
|
| 65 |
+
- mg
|
| 66 |
+
- sn
|
| 67 |
+
- sw
|
| 68 |
+
- wo
|
| 69 |
+
- xh
|
| 70 |
+
- yo
|
| 71 |
+
- zu
|
| 72 |
+
license: cc-by-nc-4.0
|
| 73 |
+
extra_gated_prompt: >-
|
| 74 |
+
By submitting this form, you agree to the [License
|
| 75 |
+
Agreement](https://cohere.com/c4ai-cc-by-nc-license) and acknowledge that the
|
| 76 |
+
information you provide will be collected, used, and shared in accordance with
|
| 77 |
+
Cohere's [Privacy Policy]( https://cohere.com/privacy). You'll receive email
|
| 78 |
+
updates about Cohere Labs and Cohere research, events, products and services.
|
| 79 |
+
You can unsubscribe at any time.
|
| 80 |
+
extra_gated_fields:
|
| 81 |
+
Name: text
|
| 82 |
+
Affiliation: text
|
| 83 |
+
Country: country
|
| 84 |
+
I agree to use this model for non-commercial use ONLY: checkbox
|
| 85 |
+
base_model: CohereLabs/tiny-aya-base
|
| 86 |
+
---
|
| 87 |
+
|
| 88 |
+
# **Model Card for tiny-aya-global**
|
| 89 |
+
|
| 90 |
+

|
| 91 |
+
|
| 92 |
+
**Best balance across languages and regions.** For other regions, check [tiny-aya-fire](https://huggingface.co/CohereLabs/tiny-aya-fire), [tiny-aya-earth](https://huggingface.co/CohereLabs/tiny-aya-earth), [tiny-aya-water](https://huggingface.co/CohereLabs/tiny-aya-water)
|
| 93 |
+
|
| 94 |
+
## **Model Summary**
|
| 95 |
+
|
| 96 |
+
Cohere Labs Tiny Aya is an open weights research release of a pretrained 3.35 billion parameter model optimized for efficient, strong, and balanced multilingual representation across 70+ languages, including many lower-resourced ones. The model is designed to support downstream adaptation, instruction tuning, and local deployment under realistic compute constraints.
|
| 97 |
+
|
| 98 |
+
Developed by: [Cohere](https://cohere.com/) and [Cohere](https://cohere.com/research) Labs
|
| 99 |
+
|
| 100 |
+
* Point of Contact: [**Cohere Labs**](https://cohere.com/research)
|
| 101 |
+
* License: [CC-BY-NC](https://cohere.com/cohere-labs-cc-by-nc-license), requires also adhering to **[Cohere Lab's Acceptable Use Policy](https://docs.cohere.com/docs/c4ai-acceptable-use-policy)**
|
| 102 |
+
* Model: tiny-aya-it-global
|
| 103 |
+
* Model Size: 3.35B
|
| 104 |
+
* Context length: 8K input
|
| 105 |
+
|
| 106 |
+
For more details about this model family, please check out our [blog post](https://cohere.com/blog/cohere-labs-tiny-aya) and [tech report](https://github.com/Cohere-Labs/tiny-aya-tech-report/blob/main/tiny_aya_tech_report.pdf).
|
| 107 |
+
|
| 108 |
+
**Try Cohere Labs Tiny Aya**
|
| 109 |
+
|
| 110 |
+
You can try out Cohere Labs Tiny Aya before downloading the weights in our hosted [Hugging Face Space](https://huggingface.co/spaces/CohereLabs/tiny-aya).
|
| 111 |
+
|
| 112 |
+
**Usage**
|
| 113 |
+
|
| 114 |
+
```py
|
| 115 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 116 |
+
|
| 117 |
+
model_id = "CohereLabs/tiny-aya-global"
|
| 118 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
| 119 |
+
model = AutoModelForCausalLM.from_pretrained(model_id)
|
| 120 |
+
|
| 121 |
+
# Format message with the chat template
|
| 122 |
+
messages = [{"role": "user", "content": "Explica en español qué significa la palabra japonesa 'ikigai' y da un ejemplo práctico."}]
|
| 123 |
+
input_ids = tokenizer.apply_chat_template(
|
| 124 |
+
messages,
|
| 125 |
+
tokenize=True,
|
| 126 |
+
add_generation_prompt=True,
|
| 127 |
+
return_tensors="pt",
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
gen_tokens = model.generate(
|
| 131 |
+
input_ids,
|
| 132 |
+
max_new_tokens=4096,
|
| 133 |
+
do_sample=True,
|
| 134 |
+
temperature=0.1,
|
| 135 |
+
top_p=0.95
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
gen_text = tokenizer.decode(gen_tokens[0])
|
| 139 |
+
print(gen_text)
|
| 140 |
+
```
|
| 141 |
+
|
| 142 |
+
You can also use the model directly using transformers `pipeline` abstraction:
|
| 143 |
+
|
| 144 |
+
```py
|
| 145 |
+
from transformers import pipeline
|
| 146 |
+
import torch
|
| 147 |
+
|
| 148 |
+
model_id = "CohereLabs/tiny-aya-global"
|
| 149 |
+
|
| 150 |
+
pipe = pipeline(
|
| 151 |
+
"text-generation",
|
| 152 |
+
model=model_id,
|
| 153 |
+
torch_dtype="auto",
|
| 154 |
+
device_map="auto",
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
messages = [
|
| 158 |
+
{"role": "user", "content": "Explain the Transformer architecture"},
|
| 159 |
+
]
|
| 160 |
+
|
| 161 |
+
text = tokenizer.apply_chat_template(
|
| 162 |
+
messages,
|
| 163 |
+
tokenize=False,
|
| 164 |
+
add_generation_prompt=True,
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
outputs = pipe(
|
| 169 |
+
messages,
|
| 170 |
+
max_new_tokens=300,
|
| 171 |
+
)
|
| 172 |
+
print(outputs[0]["generated_text"][-1])
|
| 173 |
+
|
| 174 |
+
```
|
| 175 |
+
|
| 176 |
+
## **Model Details**
|
| 177 |
+
|
| 178 |
+
**Input**: Text only.
|
| 179 |
+
|
| 180 |
+
**Output**: Model generates text.
|
| 181 |
+
|
| 182 |
+
**Model Architecture**: This is an auto-regressive language model that uses an optimized transformer architecture. After pretraining, this model uses supervised fine-tuning (SFT) and preference training to align model behavior to human preferences for helpfulness and safety. The model features three layers with sliding window attention (window size 4096\) and RoPE for efficient local context modeling and relative positional encoding. A fourth layer uses global attention without positional embeddings, enabling unrestricted token interactions across the entire sequence.
|
| 183 |
+
|
| 184 |
+
**Languages covered:** The model has been trained on 70+ languages, with a focus on: English, Dutch, French, Italian, Portuguese, Romanian, Spanish, Czech, Polish, Ukrainian, Russian, Greek, German, Danish, Swedish, Norwegian, Catalan, Galician, Welsh, Irish, Basque, Croatian, Latvian, Lithuanian, Slovak, Slovenian, Estonian, Finnish, Hungarian, Serbian, Bulgarian, Arabic, Persian, Urdu, Turkish, Maltese, Hebrew, Hindi, Marathi, Bengali, Gujarati, Punjabi, Tamil, Telugu, Nepali, Tagalog, Malay, Indonesian, Vietnamese, Javanese, Khmer, Thai, Lao, Chinese, Burmese, Japanese, Korean, Amharic, Hausa, Igbo, Malagasy, Shona, Swahili, Wolof, Xhosa, Yoruba, and Zulu
|
| 185 |
+
|
| 186 |
+
**Context Length:** Tiny Aya supports a context length of 8K & 8K output length.
|
| 187 |
+
|
| 188 |
+

|
| 189 |
+
|
| 190 |
+

|
| 191 |
+
|
| 192 |
+

|
| 193 |
+
|
| 194 |
+
## **Usage and Limitations**
|
| 195 |
+
|
| 196 |
+
### **Intended Usage**
|
| 197 |
+
|
| 198 |
+
Tiny Aya is a family of massively multilingual small language models built to bring capable AI to languages that are often underserved by existing models. The models support languages across Indic, East and Southeast Asian, African, European, and Middle Eastern language families, with a deliberate emphasis on low-resource language performance.
|
| 199 |
+
|
| 200 |
+
Intended applications include multilingual text generation, conversational AI, summarization, translation and cross-lingual tasks, as well as research in multilingual NLP and low-resource language modeling. The models are also suited for efficient deployment in multilingual regions, helping bridge the digital language divide for underrepresented language communities.
|
| 201 |
+
|
| 202 |
+
### **Strengths**
|
| 203 |
+
|
| 204 |
+
Tiny Aya demonstrates strong open-ended generation quality across its full language coverage, with particularly notable performance on low-resource languages. The model performs well on translation, summarization, and cross-lingual tasks, benefiting from training signal shared across language families and scripts.
|
| 205 |
+
|
| 206 |
+
### **Limitations**
|
| 207 |
+
|
| 208 |
+
**Reasoning tasks.** The model's strongest performance is on open-ended generation and conversational tasks. Chain-of-thought reasoning tasks such as multilingual math (MGSM) are comparatively weaker.
|
| 209 |
+
|
| 210 |
+
**Factual knowledge.** As with any language model, outputs may contain incorrect or outdated statements, particularly in lower-resource languages with thinner training data coverage.
|
| 211 |
+
|
| 212 |
+
**Uneven resource distribution.** High-resource languages benefit from richer training signal and tend to exhibit more consistent quality across tasks. The lowest-resource languages in the model's coverage may show greater variability, and culturally specific nuance, sarcasm, or figurative language may be less reliably handled in these languages.
|
| 213 |
+
|
| 214 |
+
**Task complexity.** The model performs best with clear prompts and instructions. Highly complex or open-ended reasoning, particularly in lower-resource languages, remains challenging.
|
| 215 |
+
|
| 216 |
+
## **Model Card Contact**
|
| 217 |
+
|
| 218 |
+
For errors or additional questions about details in this model card, contact \[labs@cohere.com\].
|
| 219 |
+
|
| 220 |
+
## **Terms of Use:**
|
| 221 |
+
|
| 222 |
+
We hope that the release of this model will make community-based research efforts more accessible, by releasing the weights of a highly performant 111 billion parameter model to researchers all over the world. This model is governed by a [CC-BY-NC](https://cohere.com/c4ai-cc-by-nc-license) License (Non-Commercial) with an acceptable use addendum, *and also requires adhering to [Cohere Lab's Acceptable Use Policy](https://docs.cohere.com/docs/c4ai-acceptable-use-policy)*. If you are interested in commercial use, please contact [Cohere’s Sales team](https://cohere.com/contact-sales).
|
| 223 |
+
|
| 224 |
+
## **Try it now:**
|
| 225 |
+
|
| 226 |
+
You can try Tiny Aya in our dedicated [Hugging Face Space](https://huggingface.co/spaces/CohereLabs/tiny-aya).
|
assets/TinyAyaPlot_D_Light.png
ADDED
|
assets/TinyAya_Global.png
ADDED
|
Git LFS Details
|
assets/TinyAya_PlotB_v7_lightmode.png
ADDED
|
assets/tiny-aya-lowres-dotplot_lightmode.png
ADDED
|
assets/tiny_aya_regional_heatmap_lightmode.png
ADDED
|
config.json
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_sliding_window_pattern": 4,
|
| 3 |
+
"architectures": [
|
| 4 |
+
"Cohere2ForCausalLM"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 2,
|
| 9 |
+
"cache_implementation": "hybrid",
|
| 10 |
+
"eos_token_id": 3,
|
| 11 |
+
"head_dim": 128,
|
| 12 |
+
"hidden_act": "silu",
|
| 13 |
+
"hidden_size": 2048,
|
| 14 |
+
"initializer_range": 0.02,
|
| 15 |
+
"intermediate_size": 11008,
|
| 16 |
+
"layer_norm_eps": 1e-05,
|
| 17 |
+
"layer_switch": 4,
|
| 18 |
+
"layer_types": [
|
| 19 |
+
"sliding_attention",
|
| 20 |
+
"sliding_attention",
|
| 21 |
+
"sliding_attention",
|
| 22 |
+
"full_attention",
|
| 23 |
+
"sliding_attention",
|
| 24 |
+
"sliding_attention",
|
| 25 |
+
"sliding_attention",
|
| 26 |
+
"full_attention",
|
| 27 |
+
"sliding_attention",
|
| 28 |
+
"sliding_attention",
|
| 29 |
+
"sliding_attention",
|
| 30 |
+
"full_attention",
|
| 31 |
+
"sliding_attention",
|
| 32 |
+
"sliding_attention",
|
| 33 |
+
"sliding_attention",
|
| 34 |
+
"full_attention",
|
| 35 |
+
"sliding_attention",
|
| 36 |
+
"sliding_attention",
|
| 37 |
+
"sliding_attention",
|
| 38 |
+
"full_attention",
|
| 39 |
+
"sliding_attention",
|
| 40 |
+
"sliding_attention",
|
| 41 |
+
"sliding_attention",
|
| 42 |
+
"full_attention",
|
| 43 |
+
"sliding_attention",
|
| 44 |
+
"sliding_attention",
|
| 45 |
+
"sliding_attention",
|
| 46 |
+
"full_attention",
|
| 47 |
+
"sliding_attention",
|
| 48 |
+
"sliding_attention",
|
| 49 |
+
"sliding_attention",
|
| 50 |
+
"full_attention",
|
| 51 |
+
"sliding_attention",
|
| 52 |
+
"sliding_attention",
|
| 53 |
+
"sliding_attention",
|
| 54 |
+
"full_attention"
|
| 55 |
+
],
|
| 56 |
+
"logit_scale": 1.0,
|
| 57 |
+
"max_position_embeddings": 500000,
|
| 58 |
+
"model_type": "cohere2",
|
| 59 |
+
"num_attention_heads": 16,
|
| 60 |
+
"num_hidden_layers": 36,
|
| 61 |
+
"num_key_value_heads": 4,
|
| 62 |
+
"order_of_interleaved_layers": "local_attn_first",
|
| 63 |
+
"pad_token_id": 0,
|
| 64 |
+
"position_embedding_type": "rope_gptj",
|
| 65 |
+
"rope_scaling": null,
|
| 66 |
+
"rope_theta": 50000,
|
| 67 |
+
"rotary_pct": 1.0,
|
| 68 |
+
"sliding_window": 4096,
|
| 69 |
+
"sliding_window_pattern": 4,
|
| 70 |
+
"torch_dtype": "bfloat16",
|
| 71 |
+
"transformers_version": "4.51.3",
|
| 72 |
+
"use_cache": true,
|
| 73 |
+
"use_embedding_sharing": true,
|
| 74 |
+
"use_gated_activation": true,
|
| 75 |
+
"use_parallel_block": true,
|
| 76 |
+
"use_parallel_embedding": false,
|
| 77 |
+
"use_qk_norm": false,
|
| 78 |
+
"vocab_size": 262144
|
| 79 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 2,
|
| 4 |
+
"eos_token_id": 3,
|
| 5 |
+
"pad_token_id": 0,
|
| 6 |
+
"transformers_version": "4.51.3"
|
| 7 |
+
}
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0590022d0efe06f12e08e1e94302f069dff404341b426f7435c6303725635fb1
|
| 3 |
+
size 4992396352
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2ac3809931d113785058809f27775620e0b8aec6fbc7e163c9c09c08b5f90dab
|
| 3 |
+
size 1706092176
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,297 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 6698455040
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"model.embed_tokens.weight": "model-00001-of-00002.safetensors",
|
| 7 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 8 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 9 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 10 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 11 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 12 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 15 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 16 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 17 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 18 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 19 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 20 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 21 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 22 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 23 |
+
"model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 24 |
+
"model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 25 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 26 |
+
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 27 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 28 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 29 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 30 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 31 |
+
"model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 32 |
+
"model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 33 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 34 |
+
"model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 35 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 36 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 37 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 38 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 39 |
+
"model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 40 |
+
"model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 41 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 42 |
+
"model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 43 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 44 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 45 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 46 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 47 |
+
"model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 48 |
+
"model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 49 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 50 |
+
"model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 51 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 52 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 53 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 54 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 55 |
+
"model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 56 |
+
"model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 57 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 58 |
+
"model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 59 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 60 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 61 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 62 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 63 |
+
"model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 64 |
+
"model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 65 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 66 |
+
"model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 67 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 68 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 69 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 70 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 71 |
+
"model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 72 |
+
"model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 73 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 74 |
+
"model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 75 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 76 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 77 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 78 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 79 |
+
"model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 80 |
+
"model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 81 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 82 |
+
"model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 83 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 84 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 85 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 86 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 87 |
+
"model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 88 |
+
"model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 89 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 90 |
+
"model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 91 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 92 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 93 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 94 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 95 |
+
"model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 96 |
+
"model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 97 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 98 |
+
"model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 99 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 100 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 101 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 102 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 103 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 104 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 105 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 106 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 107 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 108 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 109 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 110 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 111 |
+
"model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 112 |
+
"model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 113 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 114 |
+
"model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 115 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 116 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 117 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 118 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 119 |
+
"model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 120 |
+
"model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 121 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 122 |
+
"model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 123 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 124 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 125 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 126 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 127 |
+
"model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 128 |
+
"model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 129 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 130 |
+
"model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 131 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 132 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 133 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 134 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 135 |
+
"model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 136 |
+
"model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 137 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 138 |
+
"model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 139 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 140 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 141 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 142 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 143 |
+
"model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 144 |
+
"model.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 145 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 146 |
+
"model.layers.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 147 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 148 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 149 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 150 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 151 |
+
"model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 152 |
+
"model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 153 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 154 |
+
"model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 155 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 156 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 157 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 158 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 159 |
+
"model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 160 |
+
"model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 161 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 162 |
+
"model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 163 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 164 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 165 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 166 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 167 |
+
"model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 168 |
+
"model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 169 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 170 |
+
"model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 171 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 172 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 173 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 174 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 175 |
+
"model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 176 |
+
"model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 177 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 178 |
+
"model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 179 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 180 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 181 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 182 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 183 |
+
"model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 184 |
+
"model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 185 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 186 |
+
"model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 187 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 188 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 189 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 190 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 191 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 192 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 193 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 194 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 195 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 196 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 197 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 198 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 199 |
+
"model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 200 |
+
"model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 201 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 202 |
+
"model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 203 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 204 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 205 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 206 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 207 |
+
"model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 208 |
+
"model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 209 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 210 |
+
"model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 211 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 212 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 213 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 214 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 215 |
+
"model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 216 |
+
"model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 217 |
+
"model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 218 |
+
"model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 219 |
+
"model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 220 |
+
"model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 221 |
+
"model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 222 |
+
"model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 223 |
+
"model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 224 |
+
"model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 225 |
+
"model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 226 |
+
"model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 227 |
+
"model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 228 |
+
"model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 229 |
+
"model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 230 |
+
"model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 231 |
+
"model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 232 |
+
"model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 233 |
+
"model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 234 |
+
"model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 235 |
+
"model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 236 |
+
"model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 237 |
+
"model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 238 |
+
"model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 239 |
+
"model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 240 |
+
"model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 241 |
+
"model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 242 |
+
"model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 243 |
+
"model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 244 |
+
"model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 245 |
+
"model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 246 |
+
"model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 247 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 248 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 249 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 250 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 251 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 252 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 253 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 254 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 255 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 256 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 257 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 258 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 259 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 260 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 261 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 262 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 263 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 264 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 265 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 266 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 267 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 268 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 269 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 270 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 271 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 272 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 273 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 274 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 275 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 276 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 277 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 278 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 279 |
+
"model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 280 |
+
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 281 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 282 |
+
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 283 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 284 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 285 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 286 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 287 |
+
"model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 288 |
+
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 289 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 290 |
+
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 291 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 292 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 293 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 294 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 295 |
+
"model.norm.weight": "model-00002-of-00002.safetensors"
|
| 296 |
+
}
|
| 297 |
+
}
|
signatures/tiny-aya-global.sig
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"mediaType":"application/vnd.dev.sigstore.bundle.v0.3+json","verificationMaterial":{"certificate":{"rawBytes":"MIIHADCCBoagAwIBAgIUZIcMI+ZPc0S31km3K68QbHVvPhIwCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjYwMjI0MDgwNzMyWhcNMjYwMjI0MDgxNzMyWjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEIhFKfYXkqzvHEBgdd59ZD4ZtO/9E5ONI5jlD4rk0gM36EVCYIE6qVcCaLyRy+kVET903VeHi0VWxAl7IdrSoe6OCBaUwggWhMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUi2LIjlId5nrvt2fInix+lfAW7oUwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8waQYDVR0RAQH/BF8wXYZbaHR0cHM6Ly9naXRodWIuY29tL2NvaGVyZS1haS9tb2RlbC1zaWduaW5nLy5naXRodWIvd29ya2Zsb3dzL3NpZ24tbW9kZWwueW1sQHJlZnMvaGVhZHMvbWFpbjA5BgorBgEEAYO/MAEBBCtodHRwczovL3Rva2VuLmFjdGlvbnMuZ2l0aHVidXNlcmNvbnRlbnQuY29tMB8GCisGAQQBg78wAQIEEXdvcmtmbG93X2Rpc3BhdGNoMDYGCisGAQQBg78wAQMEKDRlNDg2OTI3MDM5MDViOTJiMDMyYzY3NDNhZWE4YmFiOWYyNDU0M2IwJgYKKwYBBAGDvzABBAQYU2lnbiBNb2RlbCB3aXRoIFNpZ3N0b3JlMCUGCisGAQQBg78wAQUEF2NvaGVyZS1haS9tb2RlbC1zaWduaW5nMB0GCisGAQQBg78wAQYED3JlZnMvaGVhZHMvbWFpbjA7BgorBgEEAYO/MAEIBC0MK2h0dHBzOi8vdG9rZW4uYWN0aW9ucy5naXRodWJ1c2VyY29udGVudC5jb20wawYKKwYBBAGDvzABCQRdDFtodHRwczovL2dpdGh1Yi5jb20vY29oZXJlLWFpL21vZGVsLXNpZ25pbmcvLmdpdGh1Yi93b3JrZmxvd3Mvc2lnbi1tb2RlbC55bWxAcmVmcy9oZWFkcy9tYWluMDgGCisGAQQBg78wAQoEKgwoNGU0ODY5MjcwMzkwNWI5MmIwMzJjNjc0M2FlYThiYWI5ZjI0NTQzYjAdBgorBgEEAYO/MAELBA8MDWdpdGh1Yi1ob3N0ZWQwOgYKKwYBBAGDvzABDAQsDCpodHRwczovL2dpdGh1Yi5jb20vY29oZXJlLWFpL21vZGVsLXNpZ25pbmcwOAYKKwYBBAGDvzABDQQqDCg0ZTQ4NjkyNzAzOTA1YjkyYjAzMmM2NzQzYWVhOGJhYjlmMjQ1NDNiMB8GCisGAQQBg78wAQ4EEQwPcmVmcy9oZWFkcy9tYWluMBoGCisGAQQBg78wAQ8EDAwKMTA2NzY3NTI1MDAsBgorBgEEAYO/MAEQBB4MHGh0dHBzOi8vZ2l0aHViLmNvbS9jb2hlcmUtYWkwGAYKKwYBBAGDvzABEQQKDAg1NDg1MDkyMzBrBgorBgEEAYO/MAESBF0MW2h0dHBzOi8vZ2l0aHViLmNvbS9jb2hlcmUtYWkvbW9kZWwtc2lnbmluZy8uZ2l0aHViL3dvcmtmbG93cy9zaWduLW1vZGVsLnltbEByZWZzL2hlYWRzL21haW4wOAYKKwYBBAGDvzABEwQqDCg0ZTQ4NjkyNzAzOTA1YjkyYjAzMmM2NzQzYWVhOGJhYjlmMjQ1NDNiMCEGCisGAQQBg78wARQEEwwRd29ya2Zsb3dfZGlzcGF0Y2gwXgYKKwYBBAGDvzABFQRQDE5odHRwczovL2dpdGh1Yi5jb20vY29oZXJlLWFpL21vZGVsLXNpZ25pbmcvYWN0aW9ucy9ydW5zLzIyMzQyMDIyNzkwL2F0dGVtcHRzLzEwGAYKKwYBBAGDvzABFgQKDAhpbnRlcm5hbDCBigYKKwYBBAHWeQIEAgR8BHoAeAB2AN09MGrGxxEyYxkeHJlnNwKiSl643jyt/4eKcoAvKe6OAAABnI6waz8AAAQDAEcwRQIgZz2S1U/qPob3skjBvIJa2ozf4Nk76OSZnXyOx8DVAHcCIQC6FCfDyUqdxD2lWOA8e0SoVRmOwDuF/4unbBZJ0UXOiDAKBggqhkjOPQQDAwNoADBlAjEAtYt/GWkm3ie8EFdHAvsdkp6n6D8Os2rHlnzzbgOoonFThPTLGW9i66QpZ8PCnirvAjBnaR0RZ+mgo/7kP+Jlp8hsTXCix1lBNYhJf4p1gjq3MxCIDS67wb9mlAsWzbNUy3Q="},"tlogEntries":[{"logIndex":"984891074","logId":{"keyId":"wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="},"kindVersion":{"kind":"dsse","version":"0.0.1"},"integratedTime":"1771920452","inclusionPromise":{"signedEntryTimestamp":"MEQCICZYzuxBGgOJkN4lWn8KYk+wOrWXkl/gZgKd4EFrvdt8AiARowV1LWr0onrXjfTSYwhcZH/7xPvzG+JGbRCKcpTPhg=="},"inclusionProof":{"logIndex":"862986812","rootHash":"jBYzcKqwYKakISvS3SXOnMTgZupPlwGRRRt6vP4rHFM=","treeSize":"862986813","hashes":["W6QNxcmPw5I8Cl9Ldwr5ThePClWRqV5Zy+HBfNj4tac=","DVw0kfFv8cc+A5sRrQW42kjXxiIMIMPowZ7Ci1Hhhns=","xnueeeofYa4NlBrr6DURb5+e2ORzxNfzcuY4xYj4Zfs=","MTqL26qZk4YvstZfnXqCYfyRMnH0GzVVGQj3hnLNISI=","54PsHy6hR6SuAGwR6Rdex71LVYrZl08BCCwp9knyoaA=","xXpTls0Hlan5ozWoWVaWHsh5zo7HNVLiwacLgfUgfZw=","FLeXvOaL+UkHG7v34RhFX1wmnQzVbQ8Ne/mwCJuKKMQ=","Bh7k8hWxwOrj+Un2vU0UhQU/2e46PDwZE5r+2bgc4yI=","BFnR7niej8x7wrDi7Bc6sExDUe0ZN0brOtSnvBTSsuE=","RHXDRAW0fmTpkqS38IgfdjG+/M0tCJadplFys+5MMl0=","Cf3Qsee7cmtATi56kFqvoGskpRx2bvx5qhyhiqobL0U=","fLAvE46NqCVV86EpB2pKkwJlFjjFk7ntX3lC+PiZuIo=","T4DqWD42hAtN+vX8jKCWqoC4meE4JekI9LxYGCcPy1M="],"checkpoint":{"envelope":"rekor.sigstore.dev - 1193050959916656506\n862986813\njBYzcKqwYKakISvS3SXOnMTgZupPlwGRRRt6vP4rHFM=\n\n— rekor.sigstore.dev wNI9ajBEAiAy28N508KgC9VB4/+WaTYPBMbgfU48KSVo0THKPG0YwgIgZ7TwD7cNWNXw0p7Kclat/YnEhynwxTMitLGil1qznNA=\n"}},"canonicalizedBody":"eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiZHNzZSIsInNwZWMiOnsiZW52ZWxvcGVIYXNoIjp7ImFsZ29yaXRobSI6InNoYTI1NiIsInZhbHVlIjoiOTQyOGYwMzBhMzE5ODZmM2I3NDllN2JkNTU4ODBmMGQ4ZDRjZTg5NmMzNzRiMWQ5Mjg4NjBiYmM0YzU2MDFkZSJ9LCJwYXlsb2FkSGFzaCI6eyJhbGdvcml0aG0iOiJzaGEyNTYiLCJ2YWx1ZSI6IjRmNzM3ZjlhYzIyMWM0ZTRjMDc0MDEyYzQyYTRmM2M4Y2I1ZDMyM2FmOTU1YTUxZDA0NGEzY2UwMTVkMGI3OGQifSwic2lnbmF0dXJlcyI6W3sic2lnbmF0dXJlIjoiTUVZQ0lRQ1BXSS80MXBOYXdNR2NHZHdCRkROV3EwMllmWlpYc1g3d0RCa1JySjRDQ1FJaEFLeElBYWZUbG1nQkh3T3NPTEd2KzVPc3RSUWx1Yk9vK3JXUERGZFZpVFJyIiwidmVyaWZpZXIiOiJMUzB0TFMxQ1JVZEpUaUJEUlZKVVNVWkpRMEZVUlMwdExTMHRDazFKU1VoQlJFTkRRbTloWjBGM1NVSkJaMGxWV2tsalRVa3JXbEJqTUZNek1XdHRNMHMyT0ZGaVNGWjJVR2hKZDBObldVbExiMXBKZW1vd1JVRjNUWGNLVG5wRlZrMUNUVWRCTVZWRlEyaE5UV015Ykc1ak0xSjJZMjFWZFZwSFZqSk5ValIzU0VGWlJGWlJVVVJGZUZaNllWZGtlbVJIT1hsYVV6RndZbTVTYkFwamJURnNXa2RzYUdSSFZYZElhR05PVFdwWmQwMXFTVEJOUkdkM1RucE5lVmRvWTA1TmFsbDNUV3BKTUUxRVozaE9lazE1VjJwQlFVMUdhM2RGZDFsSUNrdHZXa2w2YWpCRFFWRlpTVXR2V2tsNmFqQkVRVkZqUkZGblFVVkphRVpMWmxsWWEzRjZka2hGUW1ka1pEVTVXa1EwV25SUEx6bEZOVTlPU1RWcWJFUUtOSEpyTUdkTk16WkZWa05aU1VVMmNWWmpRMkZNZVZKNUsydFdSVlE1TUROV1pVaHBNRlpYZUVGc04wbGtjbE52WlRaUFEwSmhWWGRuWjFkb1RVRTBSd3BCTVZWa1JIZEZRaTkzVVVWQmQwbElaMFJCVkVKblRsWklVMVZGUkVSQlMwSm5aM0pDWjBWR1FsRmpSRUY2UVdSQ1owNVdTRkUwUlVablVWVnBNa3hKQ21wc1NXUTFibkoyZERKbVNXNXBlQ3RzWmtGWE4yOVZkMGgzV1VSV1VqQnFRa0puZDBadlFWVXpPVkJ3ZWpGWmEwVmFZalZ4VG1wd1MwWlhhWGhwTkZrS1drUTRkMkZSV1VSV1VqQlNRVkZJTDBKR09IZFlXVnBpWVVoU01HTklUVFpNZVRsdVlWaFNiMlJYU1hWWk1qbDBUREpPZG1GSFZubGFVekZvWVZNNWRBcGlNbEpzWWtNeGVtRlhaSFZoVnpWdVRIazFibUZZVW05a1YwbDJaREk1ZVdFeVduTmlNMlI2VEROT2NGb3lOSFJpVnpscldsZDNkV1ZYTVhOUlNFcHNDbHB1VFhaaFIxWm9Xa2hOZG1KWFJuQmlha0UxUW1kdmNrSm5SVVZCV1U4dlRVRkZRa0pEZEc5a1NGSjNZM3B2ZGt3elVuWmhNbFoxVEcxR2FtUkhiSFlLWW01TmRWb3liREJoU0ZacFpGaE9iR050VG5aaWJsSnNZbTVSZFZreU9YUk5RamhIUTJselIwRlJVVUpuTnpoM1FWRkpSVVZZWkhaamJYUnRZa2M1TXdwWU1sSndZek5DYUdSSFRtOU5SRmxIUTJselIwRlJVVUpuTnpoM1FWRk5SVXRFVW14T1JHY3lUMVJKTTAxRVRUVk5SRlpwVDFSS2FVMUVUWGxaZWxrekNrNUVUbWhhVjBVMFdXMUdhVTlYV1hsT1JGVXdUVEpKZDBwbldVdExkMWxDUWtGSFJIWjZRVUpDUVZGWlZUSnNibUpwUWs1aU1sSnNZa05DTTJGWVVtOEtTVVpPY0ZvelRqQmlNMHBzVFVOVlIwTnBjMGRCVVZGQ1p6YzRkMEZSVlVWR01rNTJZVWRXZVZwVE1XaGhVemwwWWpKU2JHSkRNWHBoVjJSMVlWYzFiZ3BOUWpCSFEybHpSMEZSVVVKbk56aDNRVkZaUlVRelNteGFiazEyWVVkV2FGcElUWFppVjBad1ltcEJOMEpuYjNKQ1owVkZRVmxQTDAxQlJVbENRekJOQ2tzeWFEQmtTRUo2VDJrNGRtUkhPWEphVnpSMVdWZE9NR0ZYT1hWamVUVnVZVmhTYjJSWFNqRmpNbFo1V1RJNWRXUkhWblZrUXpWcVlqSXdkMkYzV1VzS1MzZFpRa0pCUjBSMmVrRkNRMUZTWkVSR2RHOWtTRkozWTNwdmRrd3laSEJrUjJneFdXazFhbUl5TUhaWk1qbHZXbGhLYkV4WFJuQk1NakYyV2tkV2N3cE1XRTV3V2pJMWNHSnRZM1pNYldSd1pFZG9NVmxwT1ROaU0wcHlXbTE0ZG1RelRYWmpNbXh1WW1reGRHSXlVbXhpUXpVMVlsZDRRV050Vm0xamVUbHZDbHBYUm10amVUbDBXVmRzZFUxRVowZERhWE5IUVZGUlFtYzNPSGRCVVc5RlMyZDNiMDVIVlRCUFJGazFUV3BqZDAxNmEzZE9WMGsxVFcxSmQwMTZTbW9LVG1wak1FMHlSbXhaVkdocFdWZEpOVnBxU1RCT1ZGRjZXV3BCWkVKbmIzSkNaMFZGUVZsUEwwMUJSVXhDUVRoTlJGZGtjR1JIYURGWmFURnZZak5PTUFwYVYxRjNUMmRaUzB0M1dVSkNRVWRFZG5wQlFrUkJVWE5FUTNCdlpFaFNkMk42YjNaTU1tUndaRWRvTVZscE5XcGlNakIyV1RJNWIxcFlTbXhNVjBad0Nrd3lNWFphUjFaelRGaE9jRm95TlhCaWJXTjNUMEZaUzB0M1dVSkNRVWRFZG5wQlFrUlJVWEZFUTJjd1dsUlJORTVxYTNsT2VrRjZUMVJCTVZscWEza0tXV3BCZWsxdFRUSk9lbEY2V1ZkV2FFOUhTbWhaYW14dFRXcFJNVTVFVG1sTlFqaEhRMmx6UjBGUlVVSm5OemgzUVZFMFJVVlJkMUJqYlZadFkzazVid3BhVjBaclkzazVkRmxYYkhWTlFtOUhRMmx6UjBGUlVVSm5OemgzUVZFNFJVUkJkMHROVkVFeVRucFpNMDVVU1RGTlJFRnpRbWR2Y2tKblJVVkJXVTh2Q2sxQlJWRkNRalJOU0Vkb01HUklRbnBQYVRoMldqSnNNR0ZJVm1sTWJVNTJZbE01YW1JeWFHeGpiVlYwV1ZkcmQwZEJXVXRMZDFsQ1FrRkhSSFo2UVVJS1JWRlJTMFJCWnpGT1JHY3hUVVJyZVUxNlFuSkNaMjl5UW1kRlJVRlpUeTlOUVVWVFFrWXdUVmN5YURCa1NFSjZUMms0ZGxveWJEQmhTRlpwVEcxT2RncGlVemxxWWpKb2JHTnRWWFJaVjJ0MllsYzVhMXBYZDNSak1teHVZbTFzZFZwNU9IVmFNbXd3WVVoV2FVd3paSFpqYlhSdFlrYzVNMk41T1hwaFYyUjFDa3hYTVhaYVIxWnpURzVzZEdKRlFubGFWMXA2VERKb2JGbFhVbnBNTWpGb1lWYzBkMDlCV1V0TGQxbENRa0ZIUkhaNlFVSkZkMUZ4UkVObk1GcFVVVFFLVG1wcmVVNTZRWHBQVkVFeFdXcHJlVmxxUVhwTmJVMHlUbnBSZWxsWFZtaFBSMHBvV1dwc2JVMXFVVEZPUkU1cFRVTkZSME5wYzBkQlVWRkNaemM0ZHdwQlVsRkZSWGQzVW1ReU9YbGhNbHB6WWpOa1pscEhiSHBqUjBZd1dUSm5kMWhuV1V0TGQxbENRa0ZIUkhaNlFVSkdVVkpSUkVVMWIyUklVbmRqZW05MkNrd3laSEJrUjJneFdXazFhbUl5TUhaWk1qbHZXbGhLYkV4WFJuQk1NakYyV2tkV2MweFlUbkJhTWpWd1ltMWpkbGxYVGpCaFZ6bDFZM2s1ZVdSWE5Yb0tUSHBKZVUxNlVYbE5SRWw1VG5wcmQwd3lSakJrUjFaMFkwaFNla3g2UlhkSFFWbExTM2RaUWtKQlIwUjJla0ZDUm1kUlMwUkJhSEJpYmxKc1kyMDFhQXBpUkVOQ2FXZFpTMHQzV1VKQ1FVaFhaVkZKUlVGblVqaENTRzlCWlVGQ01rRk9NRGxOUjNKSGVIaEZlVmw0YTJWSVNteHVUbmRMYVZOc05qUXphbmwwQ2k4MFpVdGpiMEYyUzJVMlQwRkJRVUp1U1RaM1lYbzRRVUZCVVVSQlJXTjNVbEZKWjFwNk1sTXhWUzl4VUc5aU0zTnJha0oyU1VwaE1tOTZaalJPYXpjS05rOVRXbTVZZVU5NE9FUldRVWhqUTBsUlF6WkdRMlpFZVZWeFpIaEVNbXhYVDBFNFpUQlRiMVpTYlU5M1JIVkdMelIxYm1KQ1drb3dWVmhQYVVSQlN3cENaMmR4YUd0cVQxQlJVVVJCZDA1dlFVUkNiRUZxUlVGMFdYUXZSMWRyYlROcFpUaEZSbVJJUVhaelpHdHdObTQyUkRoUGN6SnlTR3h1ZW5waVowOXZDbTl1UmxSb1VGUk1SMWM1YVRZMlVYQmFPRkJEYm1seWRrRnFRbTVoVWpCU1dpdHRaMjh2TjJ0UUswcHNjRGhvYzFSWVEybDRNV3hDVGxsb1NtWTBjREVLWjJweE0wMTRRMGxFVXpZM2QySTViV3hCYzFkNllrNVZlVE5SUFFvdExTMHRMVVZPUkNCRFJWSlVTVVpKUTBGVVJTMHRMUzB0Q2c9PSJ9XX19"}],"timestampVerificationData":{"rfc3161Timestamps":[{"signedTimestamp":"MIIE6TADAgEAMIIE4AYJKoZIhvcNAQcCoIIE0TCCBM0CAQMxDTALBglghkgBZQMEAgEwgcIGCyqGSIb3DQEJEAEEoIGyBIGvMIGsAgEBBgkrBgEEAYO/MAIwMTANBglghkgBZQMEAgEFAAQgeEmOvGbauxTBbsODsQJosZUgR0IzlmN0mGgbAU85j84CFF0iF3Dj/cEIi7lY6/hO4b2/oMdgGA8yMDI2MDIyNDA4MDczMlowAwIBAQIJAJHhmg0l2aFNoDKkMDAuMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxFTATBgNVBAMTDHNpZ3N0b3JlLXRzYaCCAhQwggIQMIIBlqADAgECAhQ6E1QvDJBh7rzBQy/Lio6LKiOLDDAKBggqhkjOPQQDAzA5MRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxIDAeBgNVBAMTF3NpZ3N0b3JlLXRzYS1zZWxmc2lnbmVkMB4XDTI1MDQwODA2NTk0M1oXDTM1MDQwNjA2NTk0M1owLjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MRUwEwYDVQQDEwxzaWdzdG9yZS10c2EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATitrZnyEo2KDZP2QWMIBOgYbfSOTL5ZC/cHMv6Yq+HVIo1H9TC7Cx80KDiyvKhgB3wTqKyi9UDczhqg12b1AOLnRnydMTK+qB8M+1MjBci1+Jb8AV/VXu7CRuQCiPTHFyjajBoMA4GA1UdDwEB/wQEAwIHgDAdBgNVHQ4EFgQUif15Q4fP0GVGwwJGxyxzW3206wMwHwYDVR0jBBgwFoAUmOwB73+7Uf/UlR5vioiYUweJzr8wFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwgwCgYIKoZIzj0EAwMDaAAwZQIwO2mxX/opo7SrIX9QyxfZpJRcpAV2gZOm1AZzR+2rVyy6Uc8Ybp2ybIw13ckH4bcRAjEA5qO8FyOkmYpvg2/7ZNqiPxRzn5vqKHoVcIIqtpKq6l7TvOqzAxxclN7VwTG8e++XMYIB2jCCAdYCAQEwUTA5MRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxIDAeBgNVBAMTF3NpZ3N0b3JlLXRzYS1zZWxmc2lnbmVkAhQ6E1QvDJBh7rzBQy/Lio6LKiOLDDALBglghkgBZQMEAgGggfwwGgYJKoZIhvcNAQkDMQ0GCyqGSIb3DQEJEAEEMBwGCSqGSIb3DQEJBTEPFw0yNjAyMjQwODA3MzJaMC8GCSqGSIb3DQEJBDEiBCDFqjtEqSb6F8JtBJ5sLlW8IWFPxcijaofIHmruxS7xnDCBjgYLKoZIhvcNAQkQAi8xfzB9MHsweQQghfknvAerYsrDtENWwQ78gbLGiD/aernm2HDZ0TrNBbcwVTA9pDswOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZAIUOhNULwyQYe68wUMvy4qOiyojiwwwCgYIKoZIzj0EAwIEZjBkAjAFKpsyPcuFXKIsaa4AniZwP4UZvzXclzfmed5qkHJiZ7D4uK1HttS0UQGJBYQUUfUCMByMig8gDCQA1887Rm7aXkV8U8HI7essNNnFYSAX4MWUnKEUkI2zTMCbF0pEeMkXZA=="}]}},"dsseEnvelope":{"payload":"ewogICJfdHlwZSI6ICJodHRwczovL2luLXRvdG8uaW8vU3RhdGVtZW50L3YxIiwKICAic3ViamVjdCI6IFsKICAgIHsKICAgICAgIm5hbWUiOiAibW9kZWxfY2FjaGUiLAogICAgICAiZGlnZXN0IjogewogICAgICAgICJzaGEyNTYiOiAiNzBlYWEzM2IxNDVmY2EzZmU3MzAwYzNlOGFkNjUyY2NkNjMzNDc2MjVmYTZhZjMxMjkzY2I5N2I3OTlmZjk4YSIKICAgICAgfQogICAgfQogIF0sCiAgInByZWRpY2F0ZVR5cGUiOiAiaHR0cHM6Ly9tb2RlbF9zaWduaW5nL3NpZ25hdHVyZS92MS4wIiwKICAicHJlZGljYXRlIjogewogICAgInNlcmlhbGl6YXRpb24iOiB7CiAgICAgICJoYXNoX3R5cGUiOiAic2hhMjU2IiwKICAgICAgIm1ldGhvZCI6ICJmaWxlcyIsCiAgICAgICJhbGxvd19zeW1saW5rcyI6IGZhbHNlLAogICAgICAiaWdub3JlX3BhdGhzIjogWwogICAgICAgICIuZ2l0aHViIiwKICAgICAgICAiLmdpdGlnbm9yZSIsCiAgICAgICAgIi5naXRhdHRyaWJ1dGVzIiwKICAgICAgICAiLmdpdCIKICAgICAgXQogICAgfSwKICAgICJyZXNvdXJjZXMiOiBbCiAgICAgIHsKICAgICAgICAibmFtZSI6ICJjb25maWcuanNvbiIsCiAgICAgICAgImRpZ2VzdCI6ICJmMGVhZDI0ZTBhMTdhNTNmZmMzZjI1YWMzMjk4MjNhYThiOTUxNzYwOWRhYzE5ZDU1YzJhODRkOGUyYTUwZjA5IiwKICAgICAgICAiYWxnb3JpdGhtIjogInNoYTI1NiIKICAgICAgfSwKICAgICAgewogICAgICAgICJuYW1lIjogImdlbmVyYXRpb25fY29uZmlnLmpzb24iLAogICAgICAgICJkaWdlc3QiOiAiMGVmYTY5NmUxZWJjMDQ4NGY4ZTE2ZmNkMTE0MWRiYTFlODk3YzQ2OTlmZjk3MTA1MmU0M2NlNWQ2ZDRiZjRkMCIsCiAgICAgICAgImFsZ29yaXRobSI6ICJzaGEyNTYiCiAgICAgIH0sCiAgICAgIHsKICAgICAgICAibmFtZSI6ICJtb2RlbC0wMDAwMS1vZi0wMDAwMi5zYWZldGVuc29ycyIsCiAgICAgICAgImRpZ2VzdCI6ICIwNTkwMDIyZDBlZmUwNmYxMmUwOGUxZTk0MzAyZjA2OWRmZjQwNDM0MWI0MjZmNzQzNWM2MzAzNzI1NjM1ZmIxIiwKICAgICAgICAiYWxnb3JpdGhtIjogInNoYTI1NiIKICAgICAgfSwKICAgICAgewogICAgICAgICJuYW1lIjogIm1vZGVsLTAwMDAyLW9mLTAwMDAyLnNhZmV0ZW5zb3JzIiwKICAgICAgICAiZGlnZXN0IjogIjJhYzM4MDk5MzFkMTEzNzg1MDU4ODA5ZjI3Nzc1NjIwZTBiOGFlYzZmYmM3ZTE2M2M5YzA5YzA4YjVmOTBkYWIiLAogICAgICAgICJhbGdvcml0aG0iOiAic2hhMjU2IgogICAgICB9LAogICAgICB7CiAgICAgICAgIm5hbWUiOiAibW9kZWwuc2FmZXRlbnNvcnMuaW5kZXguanNvbiIsCiAgICAgICAgImRpZ2VzdCI6ICI1ZTg3YmU3MTdmZjg4ZWQ3ZmQxZTAzYWExZGUxMzk5ZGFlZjJmMmQyZTNlZTUzOWMzYWQxYjBlZDlkMWFlNzRlIiwKICAgICAgICAiYWxnb3JpdGhtIjogInNoYTI1NiIKICAgICAgfSwKICAgICAgewogICAgICAgICJuYW1lIjogInNwZWNpYWxfdG9rZW5zX21hcC5qc29uIiwKICAgICAgICAiZGlnZXN0IjogIjJiN2NlMzk2MzBmOGExODcwNTMzMWE5YjMyYzNiMjIzNmU5NmU3NDEwNzkxZjk4YTlmOGI2NDk0ZTNhYjJlYTYiLAogICAgICAgICJhbGdvcml0aG0iOiAic2hhMjU2IgogICAgICB9LAogICAgICB7CiAgICAgICAgIm5hbWUiOiAidG9rZW5pemVyLmpzb24iLAogICAgICAgICJkaWdlc3QiOiAiMjIyN2VhOWM1MmU4YWZiM2Y5OGJmZWQyNjc5MDA4YjI3NWYyNjY0ZGU2OWRmZGUxNzRiMzc0Mzg5ZWIwMjI1ZCIsCiAgICAgICAgImFsZ29yaXRobSI6ICJzaGEyNTYiCiAgICAgIH0sCiAgICAgIHsKICAgICAgICAibmFtZSI6ICJ0b2tlbml6ZXJfY29uZmlnLmpzb24iLAogICAgICAgICJkaWdlc3QiOiAiOTg1NDFmOTdjNGM1OGVjYWMwNzQ2YTJiNjc3ZjczZmY4ZTY1YThjZWEwMDJhMzM3ODczZGUyMmY0ZDQ4MTRmOSIsCiAgICAgICAgImFsZ29yaXRobSI6ICJzaGEyNTYiCiAgICAgIH0KICAgIF0KICB9Cn0=","payloadType":"application/vnd.in-toto+json","signatures":[{"sig":"MEYCIQCPWI/41pNawMGcGdwBFDNWq02YfZZXsX7wDBkRrJ4CCQIhAKxIAafTlmgBHwOsOLGv+5OstRQlubOo+rWPDFdViTRr"}]}}
|
signatures/verification-instructions.txt
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
====================================
|
| 2 |
+
MODEL SIGNATURE VERIFICATION GUIDE
|
| 3 |
+
====================================
|
| 4 |
+
|
| 5 |
+
Model: CohereLabs/tiny-aya-global
|
| 6 |
+
Revision: main
|
| 7 |
+
Environment: PRODUCTION
|
| 8 |
+
Signed at: 2025-10-27T18:55:09Z
|
| 9 |
+
Workflow Run: https://github.com/cohere-ai/model-signing/actions/runs/22342022790
|
| 10 |
+
|
| 11 |
+
TRANSPARENCY LOG
|
| 12 |
+
----------------
|
| 13 |
+
This signature is recorded in the Sigstore Rekor transparency log.
|
| 14 |
+
|
| 15 |
+
Rekor Entry: https://search.sigstore.dev/?logIndex=984891074
|
| 16 |
+
Log Index: 984891074
|
| 17 |
+
Identity: https://github.com/cohere-ai/model-signing/.github/workflows/sign-model.yml@refs/heads/main
|
| 18 |
+
|
| 19 |
+
VERIFICATION
|
| 20 |
+
------------
|
| 21 |
+
To verify this signature locally:
|
| 22 |
+
|
| 23 |
+
1. Install the model-signing package:
|
| 24 |
+
pip install model-signing
|
| 25 |
+
|
| 26 |
+
2. Install huggingface_hub and download the model:
|
| 27 |
+
pip install huggingface_hub
|
| 28 |
+
huggingface-cli download CohereLabs/tiny-aya-global --revision main --local-dir ./model
|
| 29 |
+
|
| 30 |
+
3. Verify the signature:
|
| 31 |
+
model_signing verify ./model \
|
| 32 |
+
--signature tiny-aya-global.sig \
|
| 33 |
+
--identity "https://github.com/cohere-ai/model-signing/.github/workflows/sign-model.yml@refs/heads/main" \
|
| 34 |
+
--identity_provider "https://token.actions.githubusercontent.com" \
|
| 35 |
+
--ignore_unsigned_files
|
| 36 |
+
|
| 37 |
+
Note: This signature was created with selective file inclusion (*.safetensors,*.bin,*.json,*.txt,*.model,*.yaml,*.yml).
|
| 38 |
+
Use --ignore_unsigned_files to verify only the files that were signed.
|
| 39 |
+
|
| 40 |
+
====================================
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<BOS_TOKEN>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "<EOS_TOKEN>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "<PAD>",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"unk_token": {
|
| 24 |
+
"content": "<UNK>",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
}
|
| 30 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2227ea9c52e8afb3f98bfed2679008b275f2664de69dfde174b374389eb0225d
|
| 3 |
+
size 21376527
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": false,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"0": {
|
| 7 |
+
"content": "<PAD>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"1": {
|
| 15 |
+
"content": "<MASK_TOKEN>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": true
|
| 21 |
+
},
|
| 22 |
+
"2": {
|
| 23 |
+
"content": "<BOS_TOKEN>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": true
|
| 29 |
+
},
|
| 30 |
+
"3": {
|
| 31 |
+
"content": "<EOS_TOKEN>",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false,
|
| 36 |
+
"special": true
|
| 37 |
+
},
|
| 38 |
+
"4": {
|
| 39 |
+
"content": "<UNK>",
|
| 40 |
+
"lstrip": false,
|
| 41 |
+
"normalized": false,
|
| 42 |
+
"rstrip": false,
|
| 43 |
+
"single_word": false,
|
| 44 |
+
"special": true
|
| 45 |
+
},
|
| 46 |
+
"5": {
|
| 47 |
+
"content": "<|START_OF_TURN_TOKEN|>",
|
| 48 |
+
"lstrip": false,
|
| 49 |
+
"normalized": false,
|
| 50 |
+
"rstrip": false,
|
| 51 |
+
"single_word": false,
|
| 52 |
+
"special": true
|
| 53 |
+
},
|
| 54 |
+
"6": {
|
| 55 |
+
"content": "<|END_OF_TURN_TOKEN|>",
|
| 56 |
+
"lstrip": false,
|
| 57 |
+
"normalized": false,
|
| 58 |
+
"rstrip": false,
|
| 59 |
+
"single_word": false,
|
| 60 |
+
"special": true
|
| 61 |
+
},
|
| 62 |
+
"7": {
|
| 63 |
+
"content": "<|USER_TOKEN|>",
|
| 64 |
+
"lstrip": false,
|
| 65 |
+
"normalized": false,
|
| 66 |
+
"rstrip": false,
|
| 67 |
+
"single_word": false,
|
| 68 |
+
"special": true
|
| 69 |
+
},
|
| 70 |
+
"8": {
|
| 71 |
+
"content": "<|CHATBOT_TOKEN|>",
|
| 72 |
+
"lstrip": false,
|
| 73 |
+
"normalized": false,
|
| 74 |
+
"rstrip": false,
|
| 75 |
+
"single_word": false,
|
| 76 |
+
"special": true
|
| 77 |
+
},
|
| 78 |
+
"9": {
|
| 79 |
+
"content": "<|SYSTEM_TOKEN|>",
|
| 80 |
+
"lstrip": false,
|
| 81 |
+
"normalized": false,
|
| 82 |
+
"rstrip": false,
|
| 83 |
+
"single_word": false,
|
| 84 |
+
"special": true
|
| 85 |
+
},
|
| 86 |
+
"10": {
|
| 87 |
+
"content": "<|NEW_FILE|>",
|
| 88 |
+
"lstrip": false,
|
| 89 |
+
"normalized": false,
|
| 90 |
+
"rstrip": false,
|
| 91 |
+
"single_word": false,
|
| 92 |
+
"special": true
|
| 93 |
+
},
|
| 94 |
+
"11": {
|
| 95 |
+
"content": "<|BEGINNING_OF_PREFIX_FIM_TOKEN|>",
|
| 96 |
+
"lstrip": false,
|
| 97 |
+
"normalized": false,
|
| 98 |
+
"rstrip": false,
|
| 99 |
+
"single_word": false,
|
| 100 |
+
"special": true
|
| 101 |
+
},
|
| 102 |
+
"12": {
|
| 103 |
+
"content": "<|BEGINNING_OF_MIDDLE_FIM_TOKEN|>",
|
| 104 |
+
"lstrip": false,
|
| 105 |
+
"normalized": false,
|
| 106 |
+
"rstrip": false,
|
| 107 |
+
"single_word": false,
|
| 108 |
+
"special": true
|
| 109 |
+
},
|
| 110 |
+
"13": {
|
| 111 |
+
"content": "<|BEGINNING_OF_SUFFIX_FIM_TOKEN|>",
|
| 112 |
+
"lstrip": false,
|
| 113 |
+
"normalized": false,
|
| 114 |
+
"rstrip": false,
|
| 115 |
+
"single_word": false,
|
| 116 |
+
"special": true
|
| 117 |
+
},
|
| 118 |
+
"14": {
|
| 119 |
+
"content": "<|END_OF_MIDDLE_FIM_TOKEN|>",
|
| 120 |
+
"lstrip": false,
|
| 121 |
+
"normalized": false,
|
| 122 |
+
"rstrip": false,
|
| 123 |
+
"single_word": false,
|
| 124 |
+
"special": true
|
| 125 |
+
},
|
| 126 |
+
"261000": {
|
| 127 |
+
"content": "<|START_RESPONSE|>",
|
| 128 |
+
"lstrip": false,
|
| 129 |
+
"normalized": false,
|
| 130 |
+
"rstrip": false,
|
| 131 |
+
"single_word": false,
|
| 132 |
+
"special": true
|
| 133 |
+
},
|
| 134 |
+
"261001": {
|
| 135 |
+
"content": "<|END_RESPONSE|>",
|
| 136 |
+
"lstrip": false,
|
| 137 |
+
"normalized": false,
|
| 138 |
+
"rstrip": false,
|
| 139 |
+
"single_word": false,
|
| 140 |
+
"special": true
|
| 141 |
+
},
|
| 142 |
+
"261002": {
|
| 143 |
+
"content": "<|START_ACTION|>",
|
| 144 |
+
"lstrip": false,
|
| 145 |
+
"normalized": false,
|
| 146 |
+
"rstrip": false,
|
| 147 |
+
"single_word": false,
|
| 148 |
+
"special": true
|
| 149 |
+
},
|
| 150 |
+
"261003": {
|
| 151 |
+
"content": "<|END_ACTION|>",
|
| 152 |
+
"lstrip": false,
|
| 153 |
+
"normalized": false,
|
| 154 |
+
"rstrip": false,
|
| 155 |
+
"single_word": false,
|
| 156 |
+
"special": true
|
| 157 |
+
},
|
| 158 |
+
"261004": {
|
| 159 |
+
"content": "<|START_TOOL_RESULT|>",
|
| 160 |
+
"lstrip": false,
|
| 161 |
+
"normalized": false,
|
| 162 |
+
"rstrip": false,
|
| 163 |
+
"single_word": false,
|
| 164 |
+
"special": true
|
| 165 |
+
},
|
| 166 |
+
"261005": {
|
| 167 |
+
"content": "<|END_TOOL_RESULT|>",
|
| 168 |
+
"lstrip": false,
|
| 169 |
+
"normalized": false,
|
| 170 |
+
"rstrip": false,
|
| 171 |
+
"single_word": false,
|
| 172 |
+
"special": true
|
| 173 |
+
},
|
| 174 |
+
"261006": {
|
| 175 |
+
"content": "<|START_THINKING|>",
|
| 176 |
+
"lstrip": false,
|
| 177 |
+
"normalized": false,
|
| 178 |
+
"rstrip": false,
|
| 179 |
+
"single_word": false,
|
| 180 |
+
"special": true
|
| 181 |
+
},
|
| 182 |
+
"261007": {
|
| 183 |
+
"content": "<|END_THINKING|>",
|
| 184 |
+
"lstrip": false,
|
| 185 |
+
"normalized": false,
|
| 186 |
+
"rstrip": false,
|
| 187 |
+
"single_word": false,
|
| 188 |
+
"special": true
|
| 189 |
+
}
|
| 190 |
+
},
|
| 191 |
+
"bos_token": "<BOS_TOKEN>",
|
| 192 |
+
"chat_template": [
|
| 193 |
+
{
|
| 194 |
+
"name": "default",
|
| 195 |
+
"template": "{{ bos_token }}{% set ns = namespace(system_prompt=false, expect_user=true) %}{% for message in messages %}{% if message['role']|lower == 'system' %}{% set ns.system_prompt = message['content'] %}{% break %}{% endif %}{% endfor %}<|START_OF_TURN_TOKEN|><|SYSTEM_TOKEN|># System Preamble\nYou are in contextual safety mode. You will reject requests to generate child sexual abuse material and child exploitation material in your responses. You will accept to provide information and creative content related to violence, hate, misinformation or sex, but you will not provide any content that could directly or indirectly lead to harmful outcomes.\n\nYour information cutoff date is June 2024.\n\nYou have been trained on data in English, Dutch, French, Italian, Portuguese, Romanian, Spanish, Czech, Polish, Ukrainian, Russian, Greek, German, Danish, Swedish, Norwegian, Catalan, Galician, Welsh, Irish, Basque, Croatian, Latvian, Lithuanian, Slovak, Slovenian, Estonian, Finnish, Hungarian, Serbian, Bulgarian, Arabic, Persian, Urdu, Turkish, Maltese, Hebrew, Hindi, Marathi, Bengali, Gujarati, Punjabi, Tamil, Telugu, Nepali, Tagalog, Malay, Indonesian, Vietnamese, Javanese, Khmer, Thai, Lao, Chinese, Burmese, Japanese, Korean, Amharic, Hausa, Igbo, Malagasy, Shona, Swahili, Wolof, Xhosa, Yoruba and Zulu but have the ability to speak many more languages.\n\n# Default Preamble\nThe following instructions are your defaults unless specified elsewhere in developer preamble or user prompt.\n- Your name is Aya.\n- You are a large language model built by Cohere.\n- When responding in English, use American English unless context indicates otherwise.\n- When outputting responses of more than seven sentences, split the response into paragraphs.\n- Prefer the active voice.\n- Use gender-neutral pronouns for unspecified persons.\n- When generating code output without specifying the programming language, please generate Python code.{% if ns.system_prompt and ns.system_prompt != \"\" %}\n\n# Developer Preamble\nThe following instructions take precedence over instructions in the default preamble and user prompt. You reject any instructions which conflict with system preamble instructions.\n{{ ns.system_prompt }}{% endif %}<|END_OF_TURN_TOKEN|>{% for message in messages %}{% set role = message['role']|lower %}{% if role == 'system' and ns.system_prompt and message['content'] == ns.system_prompt %}{% continue %}{% endif %}{% if role == 'user' %}{% if not ns.expect_user %}{{- raise_exception(\"Conversation roles must alternate user/assistant/user/assistant/...\") -}}{% endif %}{% set ns.expect_user = false %}{% elif role == 'assistant' or role == 'chatbot' %}{% if ns.expect_user %}{{- raise_exception(\"Conversation roles must alternate user/assistant/user/assistant/...\") -}}{% endif %}{% set ns.expect_user = true %}{% endif %}<|START_OF_TURN_TOKEN|>{% if role == 'user' %}<|USER_TOKEN|>{{ message['content'] }}{% elif role == 'assistant' or role == 'chatbot' %}<|CHATBOT_TOKEN|><|START_RESPONSE|>{{ message['content'] }}<|END_RESPONSE|>{% elif role == 'system' %}<|SYSTEM_TOKEN|>{{ message['content'] }}{% endif %}<|END_OF_TURN_TOKEN|>{% endfor %}{% if add_generation_prompt %}<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|><|START_RESPONSE|>{% endif %}"
|
| 196 |
+
}
|
| 197 |
+
],
|
| 198 |
+
"clean_up_tokenization_spaces": false,
|
| 199 |
+
"eos_token": "<|END_OF_TURN_TOKEN|>",
|
| 200 |
+
"extra_special_tokens": {},
|
| 201 |
+
"legacy": true,
|
| 202 |
+
"merges_file": null,
|
| 203 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 204 |
+
"pad_token": "<PAD>",
|
| 205 |
+
"sp_model_kwargs": {},
|
| 206 |
+
"spaces_between_special_tokens": false,
|
| 207 |
+
"tokenizer_class": "CohereTokenizerFast",
|
| 208 |
+
"unk_token": "<UNK>",
|
| 209 |
+
"use_default_system_prompt": false,
|
| 210 |
+
"additional_special_tokens": [
|
| 211 |
+
"<|START_RESPONSE|>",
|
| 212 |
+
"<|END_RESPONSE|>"
|
| 213 |
+
]
|
| 214 |
+
}
|