modelId
stringlengths
4
111
lastModified
stringlengths
24
24
tags
list
pipeline_tag
stringlengths
5
30
author
stringlengths
2
34
config
null
securityStatus
null
id
stringlengths
4
111
likes
int64
0
9.53k
downloads
int64
2
73.6M
library_name
stringlengths
2
84
created
timestamp[us]
card
stringlengths
101
901k
card_len
int64
101
901k
embeddings
list
bigscience/bloomz-7b1-mt
2023-07-25T11:12:38.000Z
[ "transformers", "pytorch", "tensorboard", "bloom", "text-generation", "ak", "ar", "as", "bm", "bn", "ca", "code", "en", "es", "eu", "fon", "fr", "gu", "hi", "id", "ig", "ki", "kn", "lg", "ln", "ml", "mr", "ne", "nso", "ny", "or", "pa", "pt", "rn", "rw", "sn", "st", "sw", "ta", "te", "tn", "ts", "tum", "tw", "ur", "vi", "wo", "xh", "yo", "zh", "zu", "dataset:bigscience/xP3mt", "arxiv:2211.01786", "license:bigscience-bloom-rail-1.0", "model-index", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
bigscience
null
null
bigscience/bloomz-7b1-mt
121
9,070
transformers
2022-09-28T11:57:09
--- datasets: - bigscience/xP3mt license: bigscience-bloom-rail-1.0 language: - ak - ar - as - bm - bn - ca - code - en - es - eu - fon - fr - gu - hi - id - ig - ki - kn - lg - ln - ml - mr - ne - nso - ny - or - pa - pt - rn - rw - sn - st - sw - ta - te - tn - ts - tum - tw - ur - vi - wo - xh - yo - zh - zu programming_language: - C - C++ - C# - Go - Java - JavaScript - Lua - PHP - Python - Ruby - Rust - Scala - TypeScript pipeline_tag: text-generation widget: - text: "一个传奇的开端,一个不灭的神话,这不仅仅是一部电影,而是作为一个走进新时代的标签,永远彪炳史册。Would you rate the previous review as positive, neutral or negative?" example_title: "zh-en sentiment" - text: "一个传奇的开端,一个不灭的神话,这不仅仅是一部电影,而是作为一个走进新时代的标签,永远彪炳史册。你认为这句话的立场是赞扬、中立还是批评?" example_title: "zh-zh sentiment" - text: "Suggest at least five related search terms to \"Mạng neural nhân tạo\"." example_title: "vi-en query" - text: "Proposez au moins cinq mots clés concernant «Réseau de neurones artificiels»." example_title: "fr-fr query" - text: "Explain in a sentence in Telugu what is backpropagation in neural networks." example_title: "te-en qa" - text: "Why is the sky blue?" example_title: "en-en qa" - text: "Write a fairy tale about a troll saving a princess from a dangerous dragon. The fairy tale is a masterpiece that has achieved praise worldwide and its moral is \"Heroes Come in All Shapes and Sizes\". Story (in Spanish):" example_title: "es-en fable" - text: "Write a fable about wood elves living in a forest that is suddenly invaded by ogres. The fable is a masterpiece that has achieved praise worldwide and its moral is \"Violence is the last refuge of the incompetent\". Fable (in Hindi):" example_title: "hi-en fable" model-index: - name: bloomz-7b1-mt results: - task: type: Coreference resolution dataset: type: winogrande name: Winogrande XL (xl) config: xl split: validation revision: a80f460359d1e9a67c006011c94de42a8759430c metrics: - type: Accuracy value: 56.51 - task: type: Coreference resolution dataset: type: Muennighoff/xwinograd name: XWinograd (en) config: en split: test revision: 9dd5ea5505fad86b7bedad667955577815300cee metrics: - type: Accuracy value: 65.76 - task: type: Coreference resolution dataset: type: Muennighoff/xwinograd name: XWinograd (fr) config: fr split: test revision: 9dd5ea5505fad86b7bedad667955577815300cee metrics: - type: Accuracy value: 57.83 - task: type: Coreference resolution dataset: type: Muennighoff/xwinograd name: XWinograd (jp) config: jp split: test revision: 9dd5ea5505fad86b7bedad667955577815300cee metrics: - type: Accuracy value: 51.82 - task: type: Coreference resolution dataset: type: Muennighoff/xwinograd name: XWinograd (pt) config: pt split: test revision: 9dd5ea5505fad86b7bedad667955577815300cee metrics: - type: Accuracy value: 57.41 - task: type: Coreference resolution dataset: type: Muennighoff/xwinograd name: XWinograd (ru) config: ru split: test revision: 9dd5ea5505fad86b7bedad667955577815300cee metrics: - type: Accuracy value: 55.87 - task: type: Coreference resolution dataset: type: Muennighoff/xwinograd name: XWinograd (zh) config: zh split: test revision: 9dd5ea5505fad86b7bedad667955577815300cee metrics: - type: Accuracy value: 62.7 - task: type: Natural language inference dataset: type: anli name: ANLI (r1) config: r1 split: validation revision: 9dbd830a06fea8b1c49d6e5ef2004a08d9f45094 metrics: - type: Accuracy value: 42.6 - task: type: Natural language inference dataset: type: anli name: ANLI (r2) config: r2 split: validation revision: 9dbd830a06fea8b1c49d6e5ef2004a08d9f45094 metrics: - type: Accuracy value: 39.4 - task: type: Natural language inference dataset: type: anli name: ANLI (r3) config: r3 split: validation revision: 9dbd830a06fea8b1c49d6e5ef2004a08d9f45094 metrics: - type: Accuracy value: 42.0 - task: type: Natural language inference dataset: type: super_glue name: SuperGLUE (cb) config: cb split: validation revision: 9e12063561e7e6c79099feb6d5a493142584e9e2 metrics: - type: Accuracy value: 83.93 - task: type: Natural language inference dataset: type: super_glue name: SuperGLUE (rte) config: rte split: validation revision: 9e12063561e7e6c79099feb6d5a493142584e9e2 metrics: - type: Accuracy value: 82.67 - task: type: Natural language inference dataset: type: xnli name: XNLI (ar) config: ar split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 55.58 - task: type: Natural language inference dataset: type: xnli name: XNLI (bg) config: bg split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 44.9 - task: type: Natural language inference dataset: type: xnli name: XNLI (de) config: de split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 48.92 - task: type: Natural language inference dataset: type: xnli name: XNLI (el) config: el split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 42.89 - task: type: Natural language inference dataset: type: xnli name: XNLI (en) config: en split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 58.92 - task: type: Natural language inference dataset: type: xnli name: XNLI (es) config: es split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 57.35 - task: type: Natural language inference dataset: type: xnli name: XNLI (fr) config: fr split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 56.67 - task: type: Natural language inference dataset: type: xnli name: XNLI (hi) config: hi split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 53.45 - task: type: Natural language inference dataset: type: xnli name: XNLI (ru) config: ru split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 50.24 - task: type: Natural language inference dataset: type: xnli name: XNLI (sw) config: sw split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 48.27 - task: type: Natural language inference dataset: type: xnli name: XNLI (th) config: th split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 41.08 - task: type: Natural language inference dataset: type: xnli name: XNLI (tr) config: tr split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 38.71 - task: type: Natural language inference dataset: type: xnli name: XNLI (ur) config: ur split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 49.48 - task: type: Natural language inference dataset: type: xnli name: XNLI (vi) config: vi split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 54.5 - task: type: Natural language inference dataset: type: xnli name: XNLI (zh) config: zh split: validation revision: a5a45e4ff92d5d3f34de70aaf4b72c3bdf9f7f16 metrics: - type: Accuracy value: 54.3 - task: type: Program synthesis dataset: type: openai_humaneval name: HumanEval config: None split: test revision: e8dc562f5de170c54b5481011dd9f4fa04845771 metrics: - type: Pass@1 value: 7.23 - type: Pass@10 value: 14.46 - type: Pass@100 value: 25.86 - task: type: Sentence completion dataset: type: story_cloze name: StoryCloze (2016) config: "2016" split: validation revision: e724c6f8cdf7c7a2fb229d862226e15b023ee4db metrics: - type: Accuracy value: 89.58 - task: type: Sentence completion dataset: type: super_glue name: SuperGLUE (copa) config: copa split: validation revision: 9e12063561e7e6c79099feb6d5a493142584e9e2 metrics: - type: Accuracy value: 84.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (et) config: et split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 52.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (ht) config: ht split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 54.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (id) config: id split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 73.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (it) config: it split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 62.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (qu) config: qu split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 61.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (sw) config: sw split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 61.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (ta) config: ta split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 62.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (th) config: th split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 61.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (tr) config: tr split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 56.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (vi) config: vi split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 77.0 - task: type: Sentence completion dataset: type: xcopa name: XCOPA (zh) config: zh split: validation revision: 37f73c60fb123111fa5af5f9b705d0b3747fd187 metrics: - type: Accuracy value: 80.0 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (ar) config: ar split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 83.85 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (es) config: es split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 88.82 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (eu) config: eu split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 73.26 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (hi) config: hi split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 80.41 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (id) config: id split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 84.58 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (my) config: my split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 51.56 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (ru) config: ru split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 64.26 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (sw) config: sw split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 71.01 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (te) config: te split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 73.06 - task: type: Sentence completion dataset: type: Muennighoff/xstory_cloze name: XStoryCloze (zh) config: zh split: validation revision: 8bb76e594b68147f1a430e86829d07189622b90d metrics: - type: Accuracy value: 85.9 --- ![xmtf](https://github.com/bigscience-workshop/xmtf/blob/master/xmtf_banner.png?raw=true) # Table of Contents 1. [Model Summary](#model-summary) 2. [Use](#use) 3. [Limitations](#limitations) 4. [Training](#training) 5. [Evaluation](#evaluation) 7. [Citation](#citation) # Model Summary > We present BLOOMZ & mT0, a family of models capable of following human instructions in dozens of languages zero-shot. We finetune BLOOM & mT5 pretrained multilingual language models on our crosslingual task mixture (xP3) and find the resulting models capable of crosslingual generalization to unseen tasks & languages. - **Repository:** [bigscience-workshop/xmtf](https://github.com/bigscience-workshop/xmtf) - **Paper:** [Crosslingual Generalization through Multitask Finetuning](https://arxiv.org/abs/2211.01786) - **Point of Contact:** [Niklas Muennighoff](mailto:niklas@hf.co) - **Languages:** Refer to [bloom](https://huggingface.co/bigscience/bloom) for pretraining & [xP3](https://huggingface.co/datasets/bigscience/xP3) for finetuning language proportions. It understands both pretraining & finetuning languages. - **BLOOMZ & mT0 Model Family:** <div class="max-w-full overflow-auto"> <table> <tr> <th colspan="12">Multitask finetuned on <a style="font-weight:bold" href=https://huggingface.co/datasets/bigscience/xP3>xP3</a>. Recommended for prompting in English. </tr> <tr> <td>Parameters</td> <td>300M</td> <td>580M</td> <td>1.2B</td> <td>3.7B</td> <td>13B</td> <td>560M</td> <td>1.1B</td> <td>1.7B</td> <td>3B</td> <td>7.1B</td> <td>176B</td> </tr> <tr> <td>Finetuned Model</td> <td><a href=https://huggingface.co/bigscience/mt0-small>mt0-small</a></td> <td><a href=https://huggingface.co/bigscience/mt0-base>mt0-base</a></td> <td><a href=https://huggingface.co/bigscience/mt0-large>mt0-large</a></td> <td><a href=https://huggingface.co/bigscience/mt0-xl>mt0-xl</a></td> <td><a href=https://huggingface.co/bigscience/mt0-xxl>mt0-xxl</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-560m>bloomz-560m</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-1b1>bloomz-1b1</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-1b7>bloomz-1b7</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-3b>bloomz-3b</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-7b1>bloomz-7b1</a></td> <td><a href=https://huggingface.co/bigscience/bloomz>bloomz</a></td> </tr> </tr> <tr> <th colspan="12">Multitask finetuned on <a style="font-weight:bold" href=https://huggingface.co/datasets/bigscience/xP3mt>xP3mt</a>. Recommended for prompting in non-English.</th> </tr> <tr> <td>Finetuned Model</td> <td></td> <td></td> <td></td> <td></td> <td><a href=https://huggingface.co/bigscience/mt0-xxl-mt>mt0-xxl-mt</a></td> <td></td> <td></td> <td></td> <td></td> <td><a href=https://huggingface.co/bigscience/bloomz-7b1-mt>bloomz-7b1-mt</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-mt>bloomz-mt</a></td> </tr> <th colspan="12">Multitask finetuned on <a style="font-weight:bold" href=https://huggingface.co/datasets/Muennighoff/P3>P3</a>. Released for research purposes only. Strictly inferior to above models!</th> </tr> <tr> <td>Finetuned Model</td> <td></td> <td></td> <td></td> <td></td> <td><a href=https://huggingface.co/bigscience/mt0-xxl-p3>mt0-xxl-p3</a></td> <td></td> <td></td> <td></td> <td></td> <td><a href=https://huggingface.co/bigscience/bloomz-7b1-p3>bloomz-7b1-p3</a></td> <td><a href=https://huggingface.co/bigscience/bloomz-p3>bloomz-p3</a></td> </tr> <th colspan="12">Original pretrained checkpoints. Not recommended.</th> <tr> <td>Pretrained Model</td> <td><a href=https://huggingface.co/google/mt5-small>mt5-small</a></td> <td><a href=https://huggingface.co/google/mt5-base>mt5-base</a></td> <td><a href=https://huggingface.co/google/mt5-large>mt5-large</a></td> <td><a href=https://huggingface.co/google/mt5-xl>mt5-xl</a></td> <td><a href=https://huggingface.co/google/mt5-xxl>mt5-xxl</a></td> <td><a href=https://huggingface.co/bigscience/bloom-560m>bloom-560m</a></td> <td><a href=https://huggingface.co/bigscience/bloom-1b1>bloom-1b1</a></td> <td><a href=https://huggingface.co/bigscience/bloom-1b7>bloom-1b7</a></td> <td><a href=https://huggingface.co/bigscience/bloom-3b>bloom-3b</a></td> <td><a href=https://huggingface.co/bigscience/bloom-7b1>bloom-7b1</a></td> <td><a href=https://huggingface.co/bigscience/bloom>bloom</a></td> </tr> </table> </div> # Use ## Intended use We recommend using the model to perform tasks expressed in natural language. For example, given the prompt "*Translate to English: Je t’aime.*", the model will most likely answer "*I love you.*". Some prompt ideas from our paper: - 一个传奇的开端,一个不灭的神话,这不仅仅是一部电影,而是作为一个走进新时代的标签,永远彪炳史册。你认为这句话的立场是赞扬、中立还是批评? - Suggest at least five related search terms to "Mạng neural nhân tạo". - Write a fairy tale about a troll saving a princess from a dangerous dragon. The fairy tale is a masterpiece that has achieved praise worldwide and its moral is "Heroes Come in All Shapes and Sizes". Story (in Spanish): - Explain in a sentence in Telugu what is backpropagation in neural networks. **Feel free to share your generations in the Community tab!** ## How to use ### CPU <details> <summary> Click to expand </summary> ```python # pip install -q transformers from transformers import AutoModelForCausalLM, AutoTokenizer checkpoint = "bigscience/bloomz-7b1-mt" tokenizer = AutoTokenizer.from_pretrained(checkpoint) model = AutoModelForCausalLM.from_pretrained(checkpoint) inputs = tokenizer.encode("Translate to English: Je t’aime.", return_tensors="pt") outputs = model.generate(inputs) print(tokenizer.decode(outputs[0])) ``` </details> ### GPU <details> <summary> Click to expand </summary> ```python # pip install -q transformers accelerate from transformers import AutoModelForCausalLM, AutoTokenizer checkpoint = "bigscience/bloomz-7b1-mt" tokenizer = AutoTokenizer.from_pretrained(checkpoint) model = AutoModelForCausalLM.from_pretrained(checkpoint, torch_dtype="auto", device_map="auto") inputs = tokenizer.encode("Translate to English: Je t’aime.", return_tensors="pt").to("cuda") outputs = model.generate(inputs) print(tokenizer.decode(outputs[0])) ``` </details> ### GPU in 8bit <details> <summary> Click to expand </summary> ```python # pip install -q transformers accelerate bitsandbytes from transformers import AutoModelForCausalLM, AutoTokenizer checkpoint = "bigscience/bloomz-7b1-mt" tokenizer = AutoTokenizer.from_pretrained(checkpoint) model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map="auto", load_in_8bit=True) inputs = tokenizer.encode("Translate to English: Je t’aime.", return_tensors="pt").to("cuda") outputs = model.generate(inputs) print(tokenizer.decode(outputs[0])) ``` </details> <!-- Necessary for whitespace --> ### # Limitations **Prompt Engineering:** The performance may vary depending on the prompt. For BLOOMZ models, we recommend making it very clear when the input stops to avoid the model trying to continue it. For example, the prompt "*Translate to English: Je t'aime*" without the full stop (.) at the end, may result in the model trying to continue the French sentence. Better prompts are e.g. "*Translate to English: Je t'aime.*", "*Translate to English: Je t'aime. Translation:*" "*What is "Je t'aime." in English?*", where it is clear for the model when it should answer. Further, we recommend providing the model as much context as possible. For example, if you want it to answer in Telugu, then tell the model, e.g. "*Explain in a sentence in Telugu what is backpropagation in neural networks.*". # Training ## Model - **Architecture:** Same as [bloom-7b1](https://huggingface.co/bigscience/bloom-7b1), also refer to the `config.json` file - **Finetuning steps:** 1000 - **Finetuning tokens:** 4.19 billion - **Finetuning layout:** 1x pipeline parallel, 1x tensor parallel, 64x data parallel - **Precision:** float16 ## Hardware - **CPUs:** AMD CPUs with 512GB memory per node - **GPUs:** 64 A100 80GB GPUs with 8 GPUs per node (8 nodes) using NVLink 4 inter-gpu connects, 4 OmniPath links - **Communication:** NCCL-communications network with a fully dedicated subnet ## Software - **Orchestration:** [Megatron-DeepSpeed](https://github.com/bigscience-workshop/Megatron-DeepSpeed) - **Optimizer & parallelism:** [DeepSpeed](https://github.com/microsoft/DeepSpeed) - **Neural networks:** [PyTorch](https://github.com/pytorch/pytorch) (pytorch-1.11 w/ CUDA-11.5) - **FP16 if applicable:** [apex](https://github.com/NVIDIA/apex) # Evaluation We refer to Table 7 from our [paper](https://arxiv.org/abs/2211.01786) & [bigscience/evaluation-results](https://huggingface.co/datasets/bigscience/evaluation-results) for zero-shot results on unseen tasks. The sidebar reports zero-shot performance of the best prompt per dataset config. # Citation ```bibtex @article{muennighoff2022crosslingual, title={Crosslingual generalization through multitask finetuning}, author={Muennighoff, Niklas and Wang, Thomas and Sutawika, Lintang and Roberts, Adam and Biderman, Stella and Scao, Teven Le and Bari, M Saiful and Shen, Sheng and Yong, Zheng-Xin and Schoelkopf, Hailey and others}, journal={arXiv preprint arXiv:2211.01786}, year={2022} } ```
24,206
[ [ -0.031982421875, -0.043060302734375, 0.02276611328125, 0.0297698974609375, -0.00601959228515625, -0.00612640380859375, -0.02471923828125, -0.02520751953125, 0.031646728515625, -0.01218414306640625, -0.06878662109375, -0.039947509765625, -0.040679931640625, 0.01142120361328125, 0.0009098052978515625, 0.05902099609375, -0.00970458984375, 0.0120086669921875, 0.002315521240234375, -0.0034923553466796875, -0.0217437744140625, -0.030059814453125, -0.055755615234375, -0.04522705078125, 0.0386962890625, 0.0125579833984375, 0.036590576171875, 0.038604736328125, 0.023651123046875, 0.028564453125, -0.0250244140625, 0.00543212890625, -0.016815185546875, -0.009857177734375, 0.0019931793212890625, -0.02880859375, -0.05511474609375, -0.00559234619140625, 0.043792724609375, 0.04461669921875, 0.0143585205078125, 0.02191162109375, 0.0235443115234375, 0.039337158203125, -0.034393310546875, 0.0278778076171875, -0.0036983489990234375, 0.029510498046875, -0.013427734375, 0.003658294677734375, -0.0115814208984375, -0.0238494873046875, -0.003505706787109375, -0.05914306640625, 0.01507568359375, 0.009796142578125, 0.10015869140625, 0.0016326904296875, 0.003879547119140625, 0.004486083984375, -0.024505615234375, 0.0760498046875, -0.0665283203125, 0.02996826171875, 0.0306549072265625, -0.0031757354736328125, 0.0007348060607910156, -0.0460205078125, -0.059539794921875, -0.00490570068359375, -0.025054931640625, 0.031585693359375, -0.0184173583984375, -0.01178741455078125, 0.019256591796875, 0.038482666015625, -0.0521240234375, 0.0051422119140625, -0.026123046875, -0.0175933837890625, 0.041534423828125, 0.015106201171875, 0.04290771484375, -0.0232696533203125, -0.0193634033203125, -0.032379150390625, -0.034637451171875, 0.0110626220703125, 0.0124053955078125, 0.040802001953125, -0.04864501953125, 0.029815673828125, -0.0065155029296875, 0.044952392578125, 0.022369384765625, -0.0003509521484375, 0.05743408203125, -0.03570556640625, -0.0284423828125, -0.018280029296875, 0.0894775390625, 0.015655517578125, 0.0035572052001953125, -0.00725555419921875, 0.0084991455078125, -0.01407623291015625, -0.0003018379211425781, -0.0716552734375, -0.00441741943359375, 0.0223388671875, -0.04290771484375, -0.0256500244140625, -0.008148193359375, -0.07427978515625, 0.00839996337890625, -0.0163116455078125, 0.0517578125, -0.043609619140625, -0.028045654296875, 0.015960693359375, 0.0009927749633789062, 0.01554107666015625, 0.01190948486328125, -0.07098388671875, 0.01328277587890625, 0.023345947265625, 0.0684814453125, -0.011077880859375, -0.04364013671875, 0.0021724700927734375, 0.0051422119140625, -0.01155853271484375, 0.03912353515625, -0.01247406005859375, -0.0294342041015625, -0.024078369140625, 0.0236358642578125, -0.032806396484375, -0.007083892822265625, 0.042449951171875, -0.00820159912109375, 0.045745849609375, -0.04345703125, -0.025421142578125, -0.015380859375, 0.02227783203125, -0.0394287109375, 0.07989501953125, 0.0157928466796875, -0.068603515625, 0.0129852294921875, -0.07232666015625, -0.0177001953125, -0.01486968994140625, -0.0011606216430664062, -0.051300048828125, -0.0273284912109375, 0.033203125, 0.037811279296875, -0.017120361328125, -0.0197906494140625, -0.02264404296875, -0.001468658447265625, -0.002445220947265625, -0.01171112060546875, 0.07879638671875, 0.0195159912109375, -0.047027587890625, 0.0184173583984375, -0.04913330078125, 0.0099639892578125, 0.041412353515625, -0.015960693359375, 0.00859832763671875, -0.031829833984375, -0.0024127960205078125, 0.03521728515625, 0.0233306884765625, -0.039215087890625, 0.0144805908203125, -0.040924072265625, 0.04815673828125, 0.046875, -0.004276275634765625, 0.03228759765625, -0.03924560546875, 0.036407470703125, 0.01331329345703125, 0.01177215576171875, -0.019927978515625, -0.0330810546875, -0.06329345703125, -0.01538848876953125, 0.01910400390625, 0.0361328125, -0.040283203125, 0.0418701171875, -0.0224609375, -0.048797607421875, -0.0273284912109375, 0.0010509490966796875, 0.044036865234375, 0.052276611328125, 0.05035400390625, -0.003978729248046875, -0.04315185546875, -0.058563232421875, 0.0006375312805175781, -0.007251739501953125, 0.01065826416015625, 0.039520263671875, 0.056884765625, -0.0097808837890625, 0.03924560546875, -0.0460205078125, -0.004302978515625, -0.03045654296875, 0.0024662017822265625, 0.0209197998046875, 0.060028076171875, 0.042938232421875, -0.057464599609375, -0.032623291015625, 0.0005779266357421875, -0.06951904296875, 0.0170440673828125, 0.0014600753784179688, -0.0306243896484375, 0.007781982421875, 0.0250091552734375, -0.056549072265625, 0.035675048828125, 0.022552490234375, -0.037872314453125, 0.044891357421875, -0.017425537109375, 0.018524169921875, -0.09912109375, 0.031280517578125, 0.01140594482421875, 0.0058746337890625, -0.048553466796875, 0.0140380859375, 0.004840850830078125, 0.004634857177734375, -0.04449462890625, 0.067138671875, -0.036651611328125, 0.012786865234375, 0.0021495819091796875, -0.0080718994140625, 0.017913818359375, 0.05474853515625, 0.012664794921875, 0.053070068359375, 0.05224609375, -0.0509033203125, 0.0225677490234375, 0.043548583984375, -0.00921630859375, 0.026947021484375, -0.06427001953125, -0.004436492919921875, 0.00026035308837890625, 0.0107269287109375, -0.06414794921875, -0.0166778564453125, 0.031280517578125, -0.054962158203125, 0.047393798828125, 0.003948211669921875, -0.03948974609375, -0.0615234375, -0.0240020751953125, 0.02264404296875, 0.04119873046875, -0.037994384765625, 0.028472900390625, -0.0007953643798828125, 0.006015777587890625, -0.0426025390625, -0.071533203125, -0.0120086669921875, -0.02880859375, -0.06494140625, 0.04644775390625, -0.01519012451171875, 0.01282501220703125, -0.0182647705078125, 0.00441741943359375, -0.0065765380859375, -0.0035953521728515625, 0.0251312255859375, 0.032012939453125, -0.0283355712890625, 0.004962921142578125, -0.010711669921875, 0.0048980712890625, -0.0010223388671875, -0.017913818359375, 0.054412841796875, -0.018310546875, -0.00789642333984375, -0.0560302734375, 0.011474609375, 0.0399169921875, -0.01220703125, 0.06817626953125, 0.06884765625, -0.033599853515625, 0.00738525390625, -0.029541015625, -0.028472900390625, -0.0399169921875, 0.010955810546875, -0.0239715576171875, -0.047515869140625, 0.0545654296875, 0.0199127197265625, -0.00299072265625, 0.056610107421875, 0.04766845703125, 0.01168060302734375, 0.070556640625, 0.04266357421875, -0.005828857421875, 0.0369873046875, -0.050079345703125, 0.011383056640625, -0.072265625, -0.03570556640625, -0.02935791015625, -0.0229644775390625, -0.017669677734375, -0.0243072509765625, 0.0183868408203125, 0.005718231201171875, -0.04754638671875, 0.03802490234375, -0.05194091796875, -0.0017757415771484375, 0.046478271484375, 0.0275115966796875, -0.0083160400390625, 0.0003783702850341797, -0.036529541015625, -0.01216888427734375, -0.056304931640625, -0.016876220703125, 0.0721435546875, 0.0204315185546875, 0.031982421875, -0.006954193115234375, 0.050048828125, -0.01666259765625, -0.0032711029052734375, -0.03839111328125, 0.031646728515625, 0.003269195556640625, -0.052001953125, -0.0242462158203125, -0.0286407470703125, -0.08599853515625, 0.020965576171875, -0.0352783203125, -0.0723876953125, 0.0142669677734375, 0.0241241455078125, -0.0556640625, 0.03656005859375, -0.052947998046875, 0.08135986328125, -0.01500701904296875, -0.057861328125, 0.01222991943359375, -0.048248291015625, 0.0133209228515625, 0.02862548828125, 0.0201416015625, 0.007282257080078125, 0.0171966552734375, 0.06207275390625, -0.045013427734375, 0.06353759765625, -0.01076507568359375, 0.0073394775390625, 0.021759033203125, -0.015960693359375, 0.02392578125, -0.0115814208984375, -0.004741668701171875, 0.00506591796875, -0.00444793701171875, -0.03533935546875, -0.026153564453125, 0.06048583984375, -0.06689453125, -0.035308837890625, -0.0413818359375, -0.039215087890625, -0.00922393798828125, 0.036102294921875, 0.04742431640625, 0.01800537109375, 0.00536346435546875, -0.004199981689453125, 0.04864501953125, -0.025360107421875, 0.05242919921875, 0.010284423828125, -0.01495361328125, -0.0171661376953125, 0.0701904296875, 0.006183624267578125, 0.00785064697265625, 0.029083251953125, 0.0294342041015625, -0.0271148681640625, -0.0301361083984375, -0.039398193359375, 0.036376953125, -0.0250396728515625, -0.0229644775390625, -0.06475830078125, -0.026458740234375, -0.060028076171875, -0.01308441162109375, -0.032318115234375, -0.03228759765625, -0.042510986328125, -0.01320648193359375, 0.035430908203125, 0.034210205078125, -0.0190582275390625, 0.0251617431640625, -0.038909912109375, 0.0267181396484375, 0.0174407958984375, 0.0229034423828125, 0.0155181884765625, -0.04071044921875, -0.0159454345703125, 0.01739501953125, -0.04364013671875, -0.05084228515625, 0.051300048828125, 0.0014591217041015625, 0.039794921875, 0.017578125, -0.026336669921875, 0.060882568359375, -0.034515380859375, 0.061248779296875, 0.03192138671875, -0.0633544921875, 0.04754638671875, -0.0290069580078125, 0.037109375, 0.027740478515625, 0.03924560546875, -0.03009033203125, -0.01226806640625, -0.057647705078125, -0.068603515625, 0.057525634765625, 0.0248565673828125, 0.0022068023681640625, 0.00568389892578125, 0.02911376953125, -0.00495147705078125, 0.007511138916015625, -0.07183837890625, -0.0462646484375, -0.03759765625, -0.0204315185546875, -0.004016876220703125, 0.007221221923828125, -0.0021190643310546875, -0.044219970703125, 0.052490234375, 0.00214385986328125, 0.043060302734375, 0.0224761962890625, 0.0008378028869628906, -0.002742767333984375, 0.0086517333984375, 0.044677734375, 0.031982421875, -0.005481719970703125, -0.0164947509765625, 0.01528167724609375, -0.051116943359375, 0.0006914138793945312, 0.005176544189453125, -0.022064208984375, -0.010101318359375, 0.01702880859375, 0.0655517578125, 0.01554107666015625, -0.01117706298828125, 0.032073974609375, -0.0024890899658203125, -0.0275115966796875, -0.02081298828125, 0.01148223876953125, 0.0247039794921875, 0.0157928466796875, 0.017547607421875, 0.005107879638671875, 0.00136566162109375, -0.0299072265625, 0.0020580291748046875, 0.0301513671875, -0.0196533203125, -0.03729248046875, 0.0665283203125, -0.004261016845703125, -0.0029468536376953125, 0.022796630859375, -0.024322509765625, -0.058013916015625, 0.050048828125, 0.048126220703125, 0.0452880859375, -0.0208587646484375, 0.0045166015625, 0.07623291015625, 0.00669097900390625, -0.0167083740234375, 0.024658203125, 0.00260162353515625, -0.03973388671875, -0.0206298828125, -0.06048583984375, 0.0004334449768066406, 0.0259246826171875, -0.047088623046875, 0.02783203125, -0.037384033203125, -0.0173797607421875, 0.0183258056640625, 0.0197601318359375, -0.057861328125, 0.042327880859375, 0.0196685791015625, 0.062225341796875, -0.055328369140625, 0.056549072265625, 0.0478515625, -0.06243896484375, -0.0760498046875, -0.008209228515625, 0.0016841888427734375, -0.07098388671875, 0.06353759765625, 0.010528564453125, 0.01136016845703125, 0.012115478515625, -0.04608154296875, -0.0853271484375, 0.0992431640625, 0.00594329833984375, -0.0189666748046875, -0.0219268798828125, 0.002727508544921875, 0.040985107421875, -0.01523590087890625, 0.031280517578125, 0.0248565673828125, 0.048797607421875, 0.020843505859375, -0.0692138671875, 0.027130126953125, -0.045745849609375, -0.0038909912109375, -0.002689361572265625, -0.08441162109375, 0.09161376953125, -0.0131378173828125, -0.00897979736328125, 0.0024929046630859375, 0.060760498046875, 0.0279083251953125, 0.0149383544921875, 0.015167236328125, 0.059844970703125, 0.036834716796875, -0.023712158203125, 0.07525634765625, -0.028778076171875, 0.042266845703125, 0.058319091796875, 0.01666259765625, 0.042724609375, 0.0255126953125, -0.0386962890625, 0.04034423828125, 0.048095703125, -0.021453857421875, 0.020721435546875, 0.0168914794921875, -0.005115509033203125, -0.0070037841796875, 0.01111602783203125, -0.048309326171875, 0.00653076171875, 0.030487060546875, -0.0221710205078125, -0.002895355224609375, 0.0071258544921875, 0.0274505615234375, -0.0026569366455078125, -0.0357666015625, 0.027923583984375, 0.00917816162109375, -0.05096435546875, 0.05084228515625, -0.004299163818359375, 0.075439453125, -0.04052734375, 0.01861572265625, -0.0117034912109375, 0.01319122314453125, -0.029510498046875, -0.0556640625, 0.01450347900390625, -0.0051422119140625, -0.0094451904296875, -0.013885498046875, 0.036468505859375, -0.02313232421875, -0.04644775390625, 0.0225067138671875, 0.0264129638671875, 0.00899505615234375, 0.004459381103515625, -0.0809326171875, 0.00318145751953125, -0.0028095245361328125, -0.034423828125, 0.01507568359375, 0.01331329345703125, 0.015838623046875, 0.054168701171875, 0.044189453125, 0.0091400146484375, 0.02764892578125, -0.005275726318359375, 0.06298828125, -0.05224609375, -0.0360107421875, -0.06256103515625, 0.04193115234375, -0.01030731201171875, -0.0259246826171875, 0.07916259765625, 0.042510986328125, 0.059844970703125, -0.005218505859375, 0.0606689453125, -0.018402099609375, 0.0452880859375, -0.0301666259765625, 0.07037353515625, -0.0589599609375, -0.0187530517578125, -0.02783203125, -0.03778076171875, -0.0240020751953125, 0.060302734375, -0.02056884765625, 0.041473388671875, 0.058349609375, 0.04925537109375, -0.0102081298828125, -0.00487518310546875, -0.003566741943359375, 0.02972412109375, 0.01348114013671875, 0.0633544921875, 0.0241546630859375, -0.05621337890625, 0.0284576416015625, -0.05072021484375, -0.001888275146484375, -0.0182037353515625, -0.047821044921875, -0.06829833984375, -0.052093505859375, -0.036102294921875, -0.04205322265625, -0.00713348388671875, 0.0657958984375, 0.0556640625, -0.0673828125, -0.0146636962890625, -0.01319122314453125, 0.0004401206970214844, -0.01081085205078125, -0.01776123046875, 0.05511474609375, -0.0223541259765625, -0.0711669921875, 0.006137847900390625, 0.0016794204711914062, 0.0396728515625, -0.005054473876953125, -0.01447296142578125, -0.0304412841796875, -0.00360107421875, 0.0236968994140625, 0.048431396484375, -0.035125732421875, -0.007183074951171875, 0.012603759765625, -0.0154266357421875, 0.026947021484375, 0.0243072509765625, -0.03948974609375, 0.00785064697265625, 0.035247802734375, 0.0218505859375, 0.051666259765625, -0.01447296142578125, 0.0250091552734375, -0.035614013671875, 0.017791748046875, 0.012237548828125, 0.0347900390625, 0.0268096923828125, -0.03350830078125, 0.0279693603515625, 0.01983642578125, -0.04248046875, -0.05804443359375, -0.00872039794921875, -0.0853271484375, -0.0163116455078125, 0.0858154296875, -0.0211334228515625, -0.05072021484375, 0.025848388671875, -0.010711669921875, 0.043060302734375, -0.0260162353515625, 0.047760009765625, 0.0572509765625, -0.0219268798828125, -0.00922393798828125, -0.043853759765625, 0.04144287109375, 0.043701171875, -0.0654296875, -0.01235198974609375, 0.01050567626953125, 0.03277587890625, 0.0311279296875, 0.031280517578125, -0.0192108154296875, 0.015380859375, 0.00013935565948486328, 0.0159149169921875, -0.0137176513671875, 0.0029926300048828125, -0.0277557373046875, -0.0022830963134765625, -0.023040771484375, -0.0196075439453125 ] ]
Xwin-LM/Xwin-LM-13B-V0.1
2023-09-21T05:42:20.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Xwin-LM
null
null
Xwin-LM/Xwin-LM-13B-V0.1
57
9,063
transformers
2023-09-15T14:03:35
--- license: llama2 --- <h3 align="center"> Xwin-LM: Powerful, Stable, and Reproducible LLM Alignment </h3> <p align="center"> <a href="https://github.com/Xwin-LM/Xwin-LM"> <img src="https://img.shields.io/badge/GitHub-yellow.svg?style=social&logo=github"> </a> <a href="https://huggingface.co/Xwin-LM"> <img src="https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Models-blue"> </a> </p> **Step up your LLM alignment with Xwin-LM!** Xwin-LM aims to develop and open-source alignment technologies for large language models, including supervised fine-tuning (SFT), reward models (RM), reject sampling, reinforcement learning from human feedback (RLHF), etc. Our first release, built-upon on the Llama2 base models, ranked **TOP-1** on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/). Notably, it's **the first to surpass GPT-4** on this benchmark. The project will be continuously updated. ## News - 💥 [Sep, 2023] We released [Xwin-LM-70B-V0.1](https://huggingface.co/Xwin-LM/Xwin-LM-70B-V0.1), which has achieved a win-rate against Davinci-003 of **95.57%** on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/) benchmark, ranking as **TOP-1** on AlpacaEval. **It was the FIRST model surpassing GPT-4** on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/). Also note its winrate v.s. GPT-4 is **60.61**. - 🔍 [Sep, 2023] RLHF plays crucial role in the strong performance of Xwin-LM-V0.1 release! - 💥 [Sep, 2023] We released [Xwin-LM-13B-V0.1](https://huggingface.co/Xwin-LM/Xwin-LM-13B-V0.1), which has achieved **91.76%** win-rate on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/), ranking as **top-1** among all 13B models. - 💥 [Sep, 2023] We released [Xwin-LM-7B-V0.1](https://huggingface.co/Xwin-LM/Xwin-LM-7B-V0.1), which has achieved **87.82%** win-rate on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/), ranking as **top-1** among all 7B models. ## Model Card | Model | Checkpoint | Report | License | |------------|------------|-------------|------------------| |Xwin-LM-7B-V0.1| 🤗 <a href="https://huggingface.co/Xwin-LM/Xwin-LM-7B-V0.1" target="_blank">HF Link</a> | 📃**Coming soon (Stay tuned)** | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 License| |Xwin-LM-13B-V0.1| 🤗 <a href="https://huggingface.co/Xwin-LM/Xwin-LM-13B-V0.1" target="_blank">HF Link</a> | | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 License| |Xwin-LM-70B-V0.1| 🤗 <a href="https://huggingface.co/Xwin-LM/Xwin-LM-70B-V0.1" target="_blank">HF Link</a> | | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 License| ## Benchmarks ### Xwin-LM performance on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/). The table below displays the performance of Xwin-LM on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/), where evaluates its win-rate against Text-Davinci-003 across 805 questions. To provide a comprehensive evaluation, we present, for the first time, the win-rate against ChatGPT and GPT-4 as well. Our Xwin-LM model family establish a new state-of-the-art performance across all metrics. Notably, Xwin-LM-70B-V0.1 has eclipsed GPT-4 for the first time, achieving an impressive win-rate of **95.57%** to Text-Davinci-003 and **60.61%** to GPT-4. | **Model** | **AlpacaEval (winrate %)** | **AlpacaEval (winrate %)** |**AlpacaEval (winrate %)** | |----------------------------------|------------|----------|-------------| | | **v.s. Text-Davinci-003** | **v.s. ChatGPT** | **v.s. GPT4**| | **Xwin-LM-70B-V0.1** | **95.57** | **87.50** | **60.61** | | GPT-4 | 95.28 | 84.66 | 50.00 | | WizardLM-70B-V1.0 | 92.91 | 80.19 | 46.70 | | Llama-2-70B-Chat | 92.66 | 81.97 | 51.19 | | **Xwin-LM-13B-V0.1** | **91.76** | **81.79** | **55.30** | | ChatGPT | 89.37 | 50.00 | 16.60 | | WizardLM-13B-V1.2 | 89.17 | 75.75 | 41.91 | | **Xwin-LM-7B-V0.1** | **87.35** | **76.40** | **47.57** | | Llama-2-13B-Chat | 81.09 | 64.22 | 30.92 | ### Case study on [AlpacaEval](https://tatsu-lab.github.io/alpaca_eval/) | Prompts | GTP4 response | Xwin-LM response | |----------------|------------|--------| |I've put a plate on top of an apple, and take the plate from kitchen to dinning room. Where is the apple now|The apple is now in the dining room, as it was underneath the plate when you moved the plate from the kitchen to the dining room.|The apple is still in the kitchen, as you have only moved the plate. The apple is under the plate, so if you want to bring the apple to the dining room, you should either move the apple directly or put the apple on the plate and then move the plate to the dining room.| | Answer the following question.<br> How do you say "good evening" in French. | In French, "good evening" is said as "bonsoir." | In French, "good evening" is translated to "bonsoir." This greeting is used in the evening and nighttime hours, typically after 6 p.m. It is a combination of "bon," which means "good," and "soir," which means "evening." To pronounce it, say "bone-swahr." | ### Xwin-LM performance on NLP foundation tasks. The following table provides a comparison of Xwin-LMs with other LLMs on NLP foundation tasks in [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). | Model | MMLU 5-shot | ARC 25-shot | TruthfulQA 0-shot | HellaSwag 10-shot | Average | |------------------|-------------|-------------|-------------------|-------------------|------------| | Text-davinci-003 | 56.9 | **85.2** | 59.3 | 82.2 | 70.9 | |Vicuna-13b 1.1 | 51.3 | 53.0 | 51.8 | 80.1 | 59.1 | |Guanaco 30B | 57.6 | 63.7 | 50.7 | 85.1 | 64.3 | | WizardLM-7B 1.0 | 42.7 | 51.6 | 44.7 | 77.7 | 54.2 | | WizardLM-13B 1.0 | 52.3 | 57.2 | 50.5 | 81.0 | 60.2 | | WizardLM-30B 1.0 | 58.8 | 62.5 | 52.4 | 83.3 | 64.2| | Llama-2-7B-Chat | 48.3 | 52.9 | 45.6 | 78.6 | 56.4 | | Llama-2-13B-Chat | 54.6 | 59.0 | 44.1 | 81.9 | 59.9 | | Llama-2-70B-Chat | 63.9 | 64.6 | 52.8 | 85.9 | 66.8 | | **Xwin-LM-7B-V0.1** | 49.7 | 56.2 | 48.1 | 79.5 | 58.4 | | **Xwin-LM-13B-V0.1** | 56.6 | 62.4 | 45.5 | 83.0 | 61.9 | | **Xwin-LM-70B-V0.1** | **69.6** | 70.5 | **60.1** | **87.1** | **71.8** | ## Inference ### Conversation templates To obtain desired results, please strictly follow the conversation templates when utilizing our model for inference. Our model adopts the prompt format established by [Vicuna](https://github.com/lm-sys/FastChat) and is equipped to support **multi-turn** conversations. ``` A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: Hi! ASSISTANT: Hello.</s>USER: Who are you? ASSISTANT: I am Xwin-LM.</s>...... ``` ### HuggingFace Example ```python from transformers import AutoTokenizer, AutoModelForCausalLM model = AutoModelForCausalLM.from_pretrained("Xwin-LM/Xwin-LM-7B-V0.1") tokenizer = AutoTokenizer.from_pretrained("Xwin-LM/Xwin-LM-7B-V0.1") ( prompt := "A chat between a curious user and an artificial intelligence assistant. " "The assistant gives helpful, detailed, and polite answers to the user's questions. " "USER: Hello, can you help me? " "ASSISTANT:" ) inputs = tokenizer(prompt, return_tensors="pt") samples = model.generate(**inputs, max_new_tokens=4096, temperature=0.7) output = tokenizer.decode(samples[0][inputs["input_ids"].shape[1]:], skip_special_tokens=True) print(output) # Of course! I'm here to help. Please feel free to ask your question or describe the issue you're having, and I'll do my best to assist you. ``` ### vllm Example Because Xwin-LM is based on Llama2, it also offers support for rapid inference using [vllm](https://github.com/vllm-project/vllm). Please refer to [vllm](https://github.com/vllm-project/vllm) for detailed installation instructions. ```python from vllm import LLM, SamplingParams ( prompt := "A chat between a curious user and an artificial intelligence assistant. " "The assistant gives helpful, detailed, and polite answers to the user's questions. " "USER: Hello, can you help me? " "ASSISTANT:" ) sampling_params = SamplingParams(temperature=0.7, max_tokens=4096) llm = LLM(model="Xwin-LM/Xwin-LM-7B-V0.1") outputs = llm.generate([prompt,], sampling_params) for output in outputs: prompt = output.prompt generated_text = output.outputs[0].text print(generated_text) ``` ## TODO - [ ] Release the source code - [ ] Release more capabilities, such as math, reasoning, and etc. ## Citation Please consider citing our work if you use the data or code in this repo. ``` @software{xwin-lm, title = {Xwin-LM}, author = {Xwin-LM Team}, url = {https://github.com/Xwin-LM/Xwin-LM}, version = {pre-release}, year = {2023}, month = {9}, } ``` ## Acknowledgements Thanks to [Llama 2](https://ai.meta.com/llama/), [FastChat](https://github.com/lm-sys/FastChat), [AlpacaFarm](https://github.com/tatsu-lab/alpaca_farm), and [vllm](https://github.com/vllm-project/vllm).
9,826
[ [ -0.043243408203125, -0.055328369140625, 0.032073974609375, 0.01122283935546875, -0.0138702392578125, 0.00981903076171875, -0.00797271728515625, -0.055999755859375, 0.0247955322265625, 0.018524169921875, -0.057403564453125, -0.053497314453125, -0.059356689453125, -0.004512786865234375, -0.0157928466796875, 0.07501220703125, -0.003620147705078125, -0.0203704833984375, -0.003215789794921875, -0.0177001953125, -0.016632080078125, -0.043487548828125, -0.05670166015625, -0.03265380859375, 0.040008544921875, -0.0045928955078125, 0.0738525390625, 0.0582275390625, 0.044891357421875, 0.031585693359375, -0.01084136962890625, 0.01959228515625, -0.034576416015625, 0.0013895034790039062, 0.0117340087890625, -0.046905517578125, -0.059295654296875, 0.0222320556640625, 0.0404052734375, 0.020538330078125, -0.007022857666015625, 0.0016889572143554688, 0.009521484375, 0.04656982421875, -0.0258941650390625, 0.0150299072265625, -0.025115966796875, 0.00577545166015625, -0.018646240234375, -0.0248565673828125, 0.0001875162124633789, -0.025115966796875, -0.00902557373046875, -0.053436279296875, 0.004730224609375, 0.0166473388671875, 0.0875244140625, 0.03228759765625, -0.008880615234375, -0.0231781005859375, -0.02471923828125, 0.0618896484375, -0.0765380859375, 0.01401519775390625, 0.016815185546875, 0.01290130615234375, -0.0299072265625, -0.0484619140625, -0.04901123046875, -0.03076171875, -0.013641357421875, 0.01515960693359375, -0.048431396484375, 0.0076904296875, 0.0217132568359375, 0.027862548828125, -0.041778564453125, 0.01084136962890625, -0.00789642333984375, -0.0019102096557617188, 0.05047607421875, 0.0167999267578125, 0.0283966064453125, 0.00177001953125, -0.037567138671875, -0.023895263671875, -0.03997802734375, 0.0269317626953125, 0.020965576171875, 0.0135650634765625, -0.03167724609375, 0.033050537109375, -0.0002646446228027344, 0.045989990234375, 0.011749267578125, -0.04486083984375, 0.033660888671875, -0.0281219482421875, -0.0186309814453125, -0.00794219970703125, 0.0828857421875, 0.048797607421875, -0.00888824462890625, -0.0027141571044921875, -0.020233154296875, 0.0225982666015625, -0.0176544189453125, -0.057769775390625, -0.002567291259765625, 0.02337646484375, -0.04010009765625, -0.0291748046875, -0.006267547607421875, -0.03631591796875, -0.01336669921875, -0.005039215087890625, 0.0284271240234375, -0.03448486328125, -0.004451751708984375, 0.006328582763671875, -0.0151519775390625, 0.037353515625, 0.031982421875, -0.044342041015625, 0.02154541015625, 0.04937744140625, 0.06658935546875, -0.035430908203125, -0.02850341796875, -0.0423583984375, -0.012481689453125, -0.0201568603515625, 0.0335693359375, -0.00981903076171875, -0.0281524658203125, -0.03436279296875, 0.0159454345703125, 0.002460479736328125, -0.03253173828125, 0.028717041015625, -0.020050048828125, 0.0293121337890625, -0.025909423828125, -0.048492431640625, -0.0016021728515625, 0.032928466796875, -0.033935546875, 0.0806884765625, -0.0009713172912597656, -0.06427001953125, 0.0032253265380859375, -0.0565185546875, -0.01427459716796875, -0.0231781005859375, -0.01134490966796875, -0.0281219482421875, -0.0140228271484375, 0.0149993896484375, 0.033966064453125, -0.0267486572265625, 0.01287078857421875, -0.0185089111328125, -0.02642822265625, 0.016632080078125, -0.03765869140625, 0.09039306640625, 0.01177215576171875, -0.0726318359375, -0.000644683837890625, -0.04718017578125, 0.00902557373046875, 0.031707763671875, -0.01904296875, -0.00373077392578125, 0.00701904296875, -0.0283355712890625, 0.0216217041015625, 0.03289794921875, -0.046875, 0.0303192138671875, -0.038482666015625, 0.0249786376953125, 0.053558349609375, -0.0160675048828125, 0.0248565673828125, -0.0484619140625, 0.042572021484375, -0.0022563934326171875, 0.0177001953125, -0.005222320556640625, -0.05279541015625, -0.062469482421875, -0.0287933349609375, 0.005725860595703125, 0.06878662109375, -0.04998779296875, 0.0440673828125, -0.006122589111328125, -0.0465087890625, -0.03729248046875, 0.007904052734375, 0.04998779296875, 0.0301361083984375, 0.03521728515625, -0.01105499267578125, -0.0261077880859375, -0.05999755859375, 0.00885009765625, -0.028411865234375, 0.0163421630859375, 0.031768798828125, 0.035491943359375, -0.024322509765625, 0.07159423828125, -0.040802001953125, -0.016632080078125, -0.0257415771484375, -0.01018524169921875, 0.0294952392578125, 0.0367431640625, 0.06707763671875, -0.0350341796875, -0.02667236328125, 0.00632476806640625, -0.0699462890625, -0.01399993896484375, -0.00040435791015625, -0.025115966796875, 0.034088134765625, 0.0204620361328125, -0.059356689453125, 0.036865234375, 0.050750732421875, -0.04217529296875, 0.0498046875, -0.00957489013671875, 0.01739501953125, -0.07159423828125, 0.004955291748046875, -0.00756072998046875, -0.032989501953125, -0.035797119140625, 0.0191192626953125, -0.013946533203125, 0.0234832763671875, -0.03436279296875, 0.07421875, -0.04376220703125, 0.003932952880859375, -0.01171112060546875, 0.036102294921875, 0.004253387451171875, 0.048614501953125, -0.0113372802734375, 0.0650634765625, 0.0323486328125, -0.0210418701171875, 0.026214599609375, 0.01242828369140625, -0.01519775390625, 0.017608642578125, -0.0579833984375, -0.0013980865478515625, 0.0078582763671875, 0.0223541259765625, -0.0712890625, 0.021514892578125, 0.04095458984375, -0.05126953125, 0.03485107421875, 0.007175445556640625, -0.0186614990234375, -0.0278778076171875, -0.037109375, -0.00750732421875, 0.042816162109375, -0.037109375, 0.03076171875, 0.0221099853515625, -0.0097503662109375, -0.06695556640625, -0.049530029296875, 0.0134735107421875, -0.01154327392578125, -0.05755615234375, 0.03656005859375, -0.0229034423828125, -0.03692626953125, -0.0117340087890625, 0.002567291259765625, 0.0228118896484375, 0.01384735107421875, 0.02447509765625, 0.03680419921875, -0.0196685791015625, -0.0230560302734375, 0.00011599063873291016, -0.0169525146484375, 0.0010662078857421875, 0.0031585693359375, 0.036529541015625, -0.0287322998046875, -0.027740478515625, -0.024993896484375, 0.0250701904296875, 0.039459228515625, -0.011138916015625, 0.0499267578125, 0.04754638671875, -0.00548553466796875, 0.004917144775390625, -0.048187255859375, -0.00946044921875, -0.037353515625, 0.023712158203125, -0.01580810546875, -0.061859130859375, 0.049591064453125, 0.028350830078125, 0.033203125, 0.039337158203125, 0.044830322265625, -0.01885986328125, 0.08746337890625, 0.046112060546875, -0.028717041015625, 0.0343017578125, -0.05126953125, 0.0018053054809570312, -0.0469970703125, -0.01187896728515625, -0.032806396484375, -0.040771484375, -0.053558349609375, -0.0391845703125, 0.019744873046875, 0.00839996337890625, -0.0159759521484375, 0.031707763671875, -0.0435791015625, 0.01154327392578125, 0.04998779296875, 0.0039825439453125, 0.0186309814453125, -0.00977325439453125, -0.0163421630859375, 0.004108428955078125, -0.04962158203125, -0.0455322265625, 0.056854248046875, 0.0303192138671875, 0.056640625, 0.01326751708984375, 0.07470703125, 0.0248565673828125, 0.018463134765625, -0.037933349609375, 0.053863525390625, -0.005706787109375, -0.046783447265625, -0.022491455078125, -0.03887939453125, -0.080322265625, 0.0179595947265625, -0.01461029052734375, -0.0675048828125, 0.0236663818359375, 0.012359619140625, -0.03204345703125, 0.040496826171875, -0.03466796875, 0.05987548828125, -0.040008544921875, -0.03759765625, 0.01050567626953125, -0.070068359375, 0.031158447265625, 0.025054931640625, 0.00966644287109375, -0.028411865234375, -0.027130126953125, 0.0595703125, -0.048614501953125, 0.060577392578125, -0.026580810546875, -0.012939453125, 0.0260162353515625, -0.0016021728515625, 0.05230712890625, -0.01384735107421875, -0.005771636962890625, 0.024810791015625, 0.02301025390625, -0.0211029052734375, -0.039947509765625, 0.0582275390625, -0.08441162109375, -0.05364990234375, -0.0474853515625, -0.03778076171875, 0.006793975830078125, -0.0004401206970214844, 0.035247802734375, 0.0265655517578125, -0.00720977783203125, 0.003509521484375, 0.040496826171875, -0.02947998046875, 0.04266357421875, 0.0322265625, -0.0325927734375, -0.026397705078125, 0.06744384765625, 0.00847625732421875, 0.0286102294921875, 0.0301055908203125, 0.004123687744140625, -0.019317626953125, -0.030853271484375, -0.041534423828125, 0.01824951171875, -0.044036865234375, -0.035980224609375, -0.05670166015625, -0.02008056640625, -0.00980377197265625, -0.01326751708984375, -0.021240234375, -0.03863525390625, -0.034149169921875, 0.002460479736328125, 0.034881591796875, 0.036865234375, -0.00565338134765625, 0.029937744140625, -0.049102783203125, 0.0190582275390625, 0.004283905029296875, 0.0008039474487304688, -0.01038360595703125, -0.040863037109375, 0.001155853271484375, 0.011077880859375, -0.043212890625, -0.0701904296875, 0.049041748046875, 0.01702880859375, 0.056915283203125, 0.0283203125, 0.004329681396484375, 0.040679931640625, -0.02471923828125, 0.07049560546875, 0.0272064208984375, -0.058349609375, 0.030517578125, -0.0233917236328125, 0.01122283935546875, 0.03289794921875, 0.0230865478515625, -0.0270538330078125, -0.04376220703125, -0.0472412109375, -0.055877685546875, 0.053466796875, 0.0293731689453125, -0.0084381103515625, -0.007656097412109375, 0.025634765625, -0.0004246234893798828, 0.01271820068359375, -0.06317138671875, -0.06292724609375, -0.0041961669921875, -0.0076904296875, -0.01274871826171875, -0.006153106689453125, -0.0019989013671875, -0.0264434814453125, 0.074462890625, -0.005191802978515625, 0.045806884765625, 0.028167724609375, -0.0011491775512695312, 0.00624847412109375, 0.0106048583984375, 0.049224853515625, 0.044342041015625, -0.0210418701171875, -0.006198883056640625, 0.0157928466796875, -0.033294677734375, 0.027252197265625, 0.024932861328125, -0.0180206298828125, -0.0010013580322265625, 0.041900634765625, 0.06671142578125, 0.004558563232421875, -0.03533935546875, 0.0478515625, -0.008941650390625, -0.0128173828125, -0.007198333740234375, 0.023956298828125, 0.0163421630859375, 0.037933349609375, 0.032073974609375, -0.00870513916015625, 0.00482940673828125, -0.04962158203125, 0.0007600784301757812, 0.058380126953125, -0.02520751953125, -0.00809478759765625, 0.051055908203125, 0.007030487060546875, -0.016357421875, 0.0237579345703125, -0.03985595703125, -0.054473876953125, 0.0596923828125, 0.041839599609375, 0.054412841796875, -0.01396942138671875, -0.0012731552124023438, 0.0263519287109375, 0.031982421875, 0.002605438232421875, 0.02764892578125, 0.0005598068237304688, -0.0455322265625, -0.0233917236328125, -0.053924560546875, -0.024658203125, -0.005092620849609375, -0.0278778076171875, 0.0163116455078125, -0.0265960693359375, -0.03826904296875, -0.0087432861328125, 0.0360107421875, -0.04248046875, 0.0015707015991210938, 0.020263671875, 0.095703125, -0.04180908203125, 0.0633544921875, 0.0299072265625, -0.016693115234375, -0.0521240234375, -0.027557373046875, 0.0157928466796875, -0.07037353515625, 0.02886962890625, 0.006221771240234375, -0.0227203369140625, -0.004032135009765625, -0.052947998046875, -0.07086181640625, 0.10394287109375, 0.034637451171875, -0.06134033203125, -0.0153961181640625, -0.0012388229370117188, 0.0191650390625, -0.031280517578125, 0.0302276611328125, 0.05499267578125, 0.046722412109375, -0.004894256591796875, -0.07623291015625, 0.0148773193359375, -0.044219970703125, -0.0126190185546875, 0.017333984375, -0.052703857421875, 0.0711669921875, -0.0270233154296875, -0.006397247314453125, 0.0267486572265625, 0.053558349609375, 0.045440673828125, 0.0279388427734375, 0.039947509765625, 0.044891357421875, 0.05230712890625, -0.03375244140625, 0.0877685546875, -0.040557861328125, 0.02734375, 0.078125, -0.0017337799072265625, 0.053863525390625, 0.0253448486328125, -0.0426025390625, 0.039825439453125, 0.05279541015625, 0.0094451904296875, 0.02001953125, 0.00624847412109375, -0.01085662841796875, -0.00836181640625, 0.01812744140625, -0.0516357421875, 0.0248565673828125, 0.0161285400390625, -0.0303192138671875, -0.0076904296875, 0.00027680397033691406, 0.0236358642578125, -0.0207366943359375, -0.0098419189453125, 0.052215576171875, 0.0129547119140625, -0.039337158203125, 0.06341552734375, 0.0084686279296875, 0.0819091796875, -0.07159423828125, -0.00046896934509277344, -0.0318603515625, 0.0149688720703125, -0.019134521484375, -0.061798095703125, -0.0005173683166503906, -0.002834320068359375, -0.0142364501953125, 0.00620269775390625, 0.046356201171875, -0.0178680419921875, -0.03759765625, 0.0516357421875, 0.044036865234375, 0.0256805419921875, -0.00040268898010253906, -0.08233642578125, 0.0302276611328125, 0.0162200927734375, -0.03765869140625, 0.027496337890625, 0.019439697265625, -0.005207061767578125, 0.062347412109375, 0.048095703125, 0.0005459785461425781, 0.006916046142578125, -0.0213165283203125, 0.07220458984375, -0.053497314453125, -0.0307769775390625, -0.0772705078125, 0.02734375, -0.002201080322265625, -0.02001953125, 0.0633544921875, 0.038116455078125, 0.0367431640625, 0.02606201171875, 0.051483154296875, -0.0261077880859375, 0.0241241455078125, -0.025238037109375, 0.05908203125, -0.0604248046875, 0.018402099609375, -0.0304412841796875, -0.06964111328125, -0.006000518798828125, 0.049163818359375, -0.0193328857421875, 0.022796630859375, 0.0255279541015625, 0.07257080078125, 0.00034880638122558594, -0.0143585205078125, 0.0284576416015625, 0.0283203125, 0.029693603515625, 0.056640625, 0.05670166015625, -0.037872314453125, 0.040374755859375, -0.034423828125, -0.036865234375, -0.0131072998046875, -0.07867431640625, -0.05084228515625, -0.02972412109375, -0.0195465087890625, -0.030303955078125, 0.0192413330078125, 0.0948486328125, 0.051300048828125, -0.04754638671875, -0.02496337890625, 0.0242156982421875, 0.008544921875, -0.0194854736328125, -0.018707275390625, 0.032470703125, -0.01497650146484375, -0.051361083984375, 0.0272369384765625, 0.015472412109375, 0.016387939453125, -0.02886962890625, -0.032073974609375, -0.01995849609375, 0.009368896484375, 0.044036865234375, 0.0279388427734375, -0.06781005859375, -0.00597381591796875, 0.0175628662109375, -0.0216522216796875, 0.0267333984375, 0.01515960693359375, -0.06317138671875, 0.0098114013671875, 0.0200958251953125, 0.0186614990234375, 0.045074462890625, -0.00531768798828125, 0.0231475830078125, -0.034637451171875, 0.00008398294448852539, 0.004367828369140625, 0.042266845703125, 0.026153564453125, -0.03375244140625, 0.042022705078125, 0.0100555419921875, -0.046112060546875, -0.052032470703125, -0.01238250732421875, -0.0863037109375, -0.0125885009765625, 0.0780029296875, -0.025909423828125, -0.027435302734375, 0.01324462890625, -0.03326416015625, 0.014892578125, -0.035736083984375, 0.030609130859375, 0.032806396484375, -0.0216217041015625, -0.00559234619140625, -0.04840087890625, 0.033599853515625, 0.01204681396484375, -0.06341552734375, 0.0018100738525390625, 0.023468017578125, 0.0254974365234375, 0.0287017822265625, 0.058380126953125, -0.005672454833984375, 0.0159759521484375, -0.015777587890625, 0.020355224609375, -0.0140838623046875, -0.0043792724609375, -0.0111083984375, -0.00551605224609375, -0.00241851806640625, -0.0169525146484375 ] ]
philschmid/pyannote-segmentation
2022-11-08T17:15:47.000Z
[ "pyannote-audio", "pytorch", "pyannote", "pyannote-audio-model", "audio", "voice", "speech", "speaker", "speaker-segmentation", "voice-activity-detection", "overlapped-speech-detection", "resegmentation", "dataset:ami", "dataset:dihard", "dataset:voxconverse", "arxiv:2104.04045", "license:mit", "region:us" ]
voice-activity-detection
philschmid
null
null
philschmid/pyannote-segmentation
4
9,055
pyannote-audio
2022-11-08T17:13:14
--- tags: - pyannote - pyannote-audio - pyannote-audio-model - audio - voice - speech - speaker - speaker-segmentation - voice-activity-detection - overlapped-speech-detection - resegmentation datasets: - ami - dihard - voxconverse license: mit inference: false --- # 🎹 Speaker segmentation ![Example](example.png) Model from *[End-to-end speaker segmentation for overlap-aware resegmentation](http://arxiv.org/abs/2104.04045)*, by Hervé Bredin and Antoine Laurent. [Online demo](https://huggingface.co/spaces/pyannote/pretrained-pipelines) is available as a Hugging Face Space. ## Support For commercial enquiries and scientific consulting, please contact [me](mailto:herve@niderb.fr). For [technical questions](https://github.com/pyannote/pyannote-audio/discussions) and [bug reports](https://github.com/pyannote/pyannote-audio/issues), please check [pyannote.audio](https://github.com/pyannote/pyannote-audio) Github repository. ## Usage Relies on pyannote.audio 2.0 currently in development: see [installation instructions](https://github.com/pyannote/pyannote-audio/tree/develop#installation). ### Voice activity detection ```python from pyannote.audio.pipelines import VoiceActivityDetection pipeline = VoiceActivityDetection(segmentation="pyannote/segmentation") HYPER_PARAMETERS = { # onset/offset activation thresholds "onset": 0.5, "offset": 0.5, # remove speech regions shorter than that many seconds. "min_duration_on": 0.0, # fill non-speech regions shorter than that many seconds. "min_duration_off": 0.0 } pipeline.instantiate(HYPER_PARAMETERS) vad = pipeline("audio.wav") # `vad` is a pyannote.core.Annotation instance containing speech regions ``` ### Overlapped speech detection ```python from pyannote.audio.pipelines import OverlappedSpeechDetection pipeline = OverlappedSpeechDetection(segmentation="pyannote/segmentation") pipeline.instantiate(HYPER_PARAMETERS) osd = pipeline("audio.wav") # `osd` is a pyannote.core.Annotation instance containing overlapped speech regions ``` ### Resegmentation ```python from pyannote.audio.pipelines import Resegmentation pipeline = Resegmentation(segmentation="pyannote/segmentation", diarization="baseline") pipeline.instantiate(HYPER_PARAMETERS) resegmented_baseline = pipeline({"audio": "audio.wav", "baseline": baseline}) # where `baseline` should be provided as a pyannote.core.Annotation instance ``` ### Raw scores ```python from pyannote.audio import Inference inference = Inference("pyannote/segmentation") segmentation = inference("audio.wav") # `segmentation` is a pyannote.core.SlidingWindowFeature # instance containing raw segmentation scores like the # one pictured above (output) ``` ## Reproducible research In order to reproduce the results of the paper ["End-to-end speaker segmentation for overlap-aware resegmentation "](https://arxiv.org/abs/2104.04045), use `pyannote/segmentation@Interspeech2021` with the following hyper-parameters: | Voice activity detection | `onset` | `offset` | `min_duration_on` | `min_duration_off` | | ------------------------ | ------- | -------- | ----------------- | ------------------ | | AMI Mix-Headset | 0.684 | 0.577 | 0.181 | 0.037 | | DIHARD3 | 0.767 | 0.377 | 0.136 | 0.067 | | VoxConverse | 0.767 | 0.713 | 0.182 | 0.501 | | Overlapped speech detection | `onset` | `offset` | `min_duration_on` | `min_duration_off` | | --------------------------- | ------- | -------- | ----------------- | ------------------ | | AMI Mix-Headset | 0.448 | 0.362 | 0.116 | 0.187 | | DIHARD3 | 0.430 | 0.320 | 0.091 | 0.144 | | VoxConverse | 0.587 | 0.426 | 0.337 | 0.112 | | Resegmentation of VBx | `onset` | `offset` | `min_duration_on` | `min_duration_off` | | --------------------- | ------- | -------- | ----------------- | ------------------ | | AMI Mix-Headset | 0.542 | 0.527 | 0.044 | 0.705 | | DIHARD3 | 0.592 | 0.489 | 0.163 | 0.182 | | VoxConverse | 0.537 | 0.724 | 0.410 | 0.563 | Expected outputs (and VBx baseline) are also provided in the `/reproducible_research` sub-directories. ## Citation ```bibtex @inproceedings{Bredin2021, Title = {{End-to-end speaker segmentation for overlap-aware resegmentation}}, Author = {{Bredin}, Herv{\'e} and {Laurent}, Antoine}, Booktitle = {Proc. Interspeech 2021}, Address = {Brno, Czech Republic}, Month = {August}, Year = {2021}, ``` ```bibtex @inproceedings{Bredin2020, Title = {{pyannote.audio: neural building blocks for speaker diarization}}, Author = {{Bredin}, Herv{\'e} and {Yin}, Ruiqing and {Coria}, Juan Manuel and {Gelly}, Gregory and {Korshunov}, Pavel and {Lavechin}, Marvin and {Fustes}, Diego and {Titeux}, Hadrien and {Bouaziz}, Wassim and {Gill}, Marie-Philippe}, Booktitle = {ICASSP 2020, IEEE International Conference on Acoustics, Speech, and Signal Processing}, Address = {Barcelona, Spain}, Month = {May}, Year = {2020}, } ```
5,286
[ [ -0.04730224609375, -0.052032470703125, 0.019378662109375, 0.0249481201171875, -0.0252227783203125, -0.01427459716796875, -0.03131103515625, -0.0243682861328125, 0.0377197265625, 0.023193359375, -0.045135498046875, -0.0426025390625, -0.0236358642578125, -0.022216796875, -0.005710601806640625, 0.035614013671875, 0.0244140625, -0.0124053955078125, -0.01052093505859375, 0.0006341934204101562, -0.023590087890625, -0.01444244384765625, -0.02471923828125, -0.0233917236328125, 0.007747650146484375, 0.038726806640625, 0.01509857177734375, 0.048980712890625, 0.01468658447265625, 0.025665283203125, -0.038116455078125, 0.00923919677734375, 0.0010614395141601562, -0.006473541259765625, 0.021514892578125, 0.0020656585693359375, -0.0273590087890625, 0.004528045654296875, 0.061920166015625, 0.04803466796875, -0.01019287109375, 0.013427734375, -0.005092620849609375, 0.024444580078125, -0.0173797607421875, 0.0037841796875, -0.033233642578125, -0.007305145263671875, -0.0205535888671875, -0.0167388916015625, -0.018798828125, -0.003543853759765625, 0.020904541015625, -0.039794921875, -0.00403594970703125, -0.00647735595703125, 0.095703125, 0.00836944580078125, 0.0095367431640625, -0.005886077880859375, -0.035614013671875, 0.05413818359375, -0.07537841796875, 0.03369140625, 0.025543212890625, 0.0095367431640625, -0.020172119140625, -0.055755615234375, -0.045440673828125, -0.002582550048828125, -0.01031494140625, 0.0195159912109375, -0.015625, 0.01276397705078125, 0.0238800048828125, 0.029022216796875, -0.035369873046875, 0.006671905517578125, -0.040771484375, -0.0313720703125, 0.058807373046875, -0.0127716064453125, 0.026763916015625, -0.0245208740234375, -0.0255889892578125, -0.022735595703125, -0.0081024169921875, 0.0258026123046875, 0.036865234375, 0.023223876953125, -0.033233642578125, 0.0264892578125, 0.0057525634765625, 0.0673828125, 0.01235198974609375, -0.01873779296875, 0.06268310546875, -0.03228759765625, -0.01496124267578125, 0.0347900390625, 0.08349609375, 0.0187530517578125, 0.00690460205078125, 0.029388427734375, 0.004192352294921875, -0.0278167724609375, -0.01117706298828125, -0.055206298828125, -0.042022705078125, 0.0433349609375, -0.039093017578125, 0.0175628662109375, -0.006591796875, -0.06915283203125, -0.00012505054473876953, -0.021484375, 0.034912109375, -0.047882080078125, -0.0439453125, 0.00487518310546875, -0.0240325927734375, 0.016387939453125, -0.0019969940185546875, -0.078125, 0.0162811279296875, 0.0352783203125, 0.07861328125, 0.0074005126953125, -0.025634765625, -0.0305328369140625, -0.01064300537109375, -0.0093536376953125, 0.05694580078125, -0.0279083251953125, -0.0297698974609375, -0.029754638671875, -0.00412750244140625, -0.0288238525390625, -0.0452880859375, 0.053955078125, 0.013092041015625, 0.024658203125, -0.01218414306640625, -0.043731689453125, -0.00923919677734375, -0.010894775390625, -0.02325439453125, 0.07122802734375, 0.01509857177734375, -0.056396484375, 0.024017333984375, -0.04180908203125, -0.007232666015625, -0.01268768310546875, -0.018829345703125, -0.062225341796875, -0.0226898193359375, 0.029876708984375, 0.0170135498046875, -0.007610321044921875, -0.00469207763671875, -0.01329803466796875, -0.0252685546875, 0.01351165771484375, -0.02288818359375, 0.084228515625, 0.0024967193603515625, -0.04937744140625, 0.0200958251953125, -0.0760498046875, 0.00611114501953125, 0.0024261474609375, -0.033416748046875, -0.01404571533203125, -0.002002716064453125, 0.0128173828125, 0.00826263427734375, 0.0114593505859375, -0.056549072265625, -0.01580810546875, -0.04144287109375, 0.03399658203125, 0.051666259765625, 0.0167694091796875, 0.006526947021484375, -0.022918701171875, 0.01174163818359375, 0.00522613525390625, 0.0017175674438476562, -0.032562255859375, -0.04840087890625, -0.039520263671875, -0.056915283203125, 0.0220794677734375, 0.044952392578125, -0.00730133056640625, 0.06573486328125, -0.0134124755859375, -0.05108642578125, -0.04150390625, -0.0032863616943359375, 0.03759765625, 0.048583984375, 0.04510498046875, -0.025665283203125, -0.05633544921875, -0.07257080078125, -0.01540374755859375, -0.0308685302734375, 0.006587982177734375, 0.052154541015625, 0.0205535888671875, -0.00423431396484375, 0.076416015625, -0.028900146484375, -0.012054443359375, 0.00012040138244628906, -0.00589752197265625, 0.036865234375, 0.059112548828125, 0.032012939453125, -0.060455322265625, -0.041168212890625, 0.00472259521484375, -0.0239410400390625, -0.031707763671875, -0.0283355712890625, -0.01505279541015625, -0.00763702392578125, 0.040496826171875, -0.046539306640625, 0.0267333984375, 0.01015472412109375, -0.0243682861328125, 0.061981201171875, 0.00965118408203125, 0.000701904296875, -0.06146240234375, 0.00787353515625, 0.0194091796875, 0.00616455078125, -0.05743408203125, -0.06378173828125, -0.005474090576171875, 0.004058837890625, -0.024200439453125, 0.037872314453125, -0.047698974609375, -0.0212249755859375, 0.0081024169921875, 0.03326416015625, -0.0135345458984375, 0.05902099609375, 0.012359619140625, 0.050628662109375, 0.047271728515625, -0.03424072265625, 0.0207366943359375, 0.033905029296875, -0.055816650390625, 0.02752685546875, -0.05908203125, 0.002422332763671875, 0.0255126953125, 0.01454925537109375, -0.1038818359375, -0.01493072509765625, 0.04266357421875, -0.07177734375, 0.0279541015625, -0.031341552734375, -0.00467681884765625, -0.0224151611328125, -0.01081085205078125, 0.036712646484375, 0.032135009765625, -0.04095458984375, 0.020477294921875, 0.042724609375, -0.0240936279296875, -0.033538818359375, -0.0509033203125, -0.01525115966796875, -0.0271453857421875, -0.0643310546875, 0.05194091796875, 0.0013456344604492188, -0.0225982666015625, -0.0018663406372070312, -0.0162506103515625, -0.0009002685546875, -0.0221710205078125, 0.01593017578125, 0.00605010986328125, -0.0216827392578125, -0.01025390625, -0.00914764404296875, -0.0012865066528320312, 0.0008525848388671875, -0.026763916015625, 0.04510498046875, 0.0029201507568359375, -0.02996826171875, -0.061859130859375, 0.009246826171875, 0.04583740234375, -0.039306640625, 0.0205535888671875, 0.06573486328125, -0.0181427001953125, 0.0065765380859375, -0.037506103515625, 0.005130767822265625, -0.034912109375, 0.04840087890625, -0.016876220703125, -0.05877685546875, 0.053375244140625, 0.007289886474609375, 0.018341064453125, 0.036865234375, 0.041168212890625, -0.0039215087890625, 0.050445556640625, -0.00017011165618896484, 0.0084228515625, 0.0648193359375, -0.0296478271484375, 0.019805908203125, -0.09832763671875, -0.0379638671875, -0.04425048828125, -0.0218353271484375, -0.040863037109375, -0.037445068359375, 0.0198211669921875, 0.00482940673828125, -0.006923675537109375, 0.027740478515625, -0.059112548828125, 0.018310546875, 0.037689208984375, -0.006103515625, -0.039093017578125, 0.0203857421875, -0.0235595703125, -0.013427734375, -0.040191650390625, -0.0126190185546875, 0.052215576171875, 0.02667236328125, 0.017242431640625, -0.00103759765625, 0.058349609375, 0.0014200210571289062, -0.0303192138671875, -0.05853271484375, 0.0384521484375, 0.0009212493896484375, -0.03179931640625, -0.04730224609375, -0.0347900390625, -0.07806396484375, 0.050567626953125, 0.013092041015625, -0.09356689453125, 0.05389404296875, -0.0011310577392578125, -0.03900146484375, 0.031585693359375, -0.062744140625, 0.073486328125, -0.0137176513671875, -0.0208740234375, 0.0023250579833984375, -0.049560546875, 0.01500701904296875, 0.029693603515625, 0.017242431640625, -0.025970458984375, 0.0197296142578125, 0.08746337890625, -0.0300140380859375, 0.046356201171875, -0.039459228515625, 0.007049560546875, 0.054290771484375, -0.0202178955078125, 0.01885986328125, 0.0019445419311523438, 0.0081329345703125, 0.006023406982421875, 0.0005197525024414062, -0.027130126953125, -0.0222930908203125, 0.051116943359375, -0.05670166015625, -0.044097900390625, -0.01271820068359375, -0.026031494140625, -0.00843048095703125, 0.013092041015625, 0.01690673828125, 0.049530029296875, 0.00542449951171875, 0.022796630859375, 0.052032470703125, -0.02978515625, 0.05194091796875, 0.0097503662109375, 0.006801605224609375, -0.0709228515625, 0.072021484375, 0.0198211669921875, 0.02008056640625, 0.024749755859375, 0.0198974609375, -0.032135009765625, -0.04754638671875, -0.0208587646484375, 0.0267791748046875, -0.03350830078125, 0.0161895751953125, -0.0440673828125, -0.0190277099609375, -0.0501708984375, 0.00594329833984375, -0.0465087890625, -0.03814697265625, -0.028564453125, 0.005870819091796875, 0.016845703125, -0.004512786865234375, -0.0295562744140625, 0.02862548828125, -0.0418701171875, 0.01024627685546875, 0.01690673828125, 0.013336181640625, -0.0152130126953125, -0.06866455078125, -0.031829833984375, -0.003604888916015625, -0.012237548828125, -0.058349609375, 0.025421142578125, 0.00970458984375, 0.0706787109375, 0.0227203369140625, -0.01422119140625, 0.06793212890625, -0.00939178466796875, 0.0760498046875, 0.0240631103515625, -0.0718994140625, 0.045806884765625, -0.027313232421875, 0.028839111328125, 0.036468505859375, 0.001926422119140625, -0.056549072265625, 0.007091522216796875, -0.05609130859375, -0.09954833984375, 0.08367919921875, 0.0275421142578125, -0.0227203369140625, 0.004199981689453125, 0.007366180419921875, -0.01751708984375, -0.006519317626953125, -0.042510986328125, -0.033966064453125, -0.025665283203125, -0.0029850006103515625, -0.00572967529296875, -0.025665283203125, 0.0037364959716796875, -0.038726806640625, 0.06298828125, 0.0131072998046875, 0.04522705078125, 0.060394287109375, 0.0024566650390625, -0.0008931159973144531, 0.004459381103515625, 0.068359375, 0.045684814453125, -0.0367431640625, -0.007843017578125, -0.004364013671875, -0.04248046875, 0.0060272216796875, 0.01493072509765625, 0.00537872314453125, 0.0267333984375, 0.043121337890625, 0.07879638671875, 0.00440216064453125, -0.032989501953125, 0.033477783203125, -0.004688262939453125, -0.035369873046875, -0.044830322265625, -0.004566192626953125, 0.0158538818359375, 0.019134521484375, 0.028228759765625, -0.006404876708984375, -0.005828857421875, -0.0149078369140625, 0.032745361328125, 0.01108551025390625, -0.037139892578125, -0.01059722900390625, 0.0292816162109375, 0.0163421630859375, -0.04913330078125, 0.04278564453125, -0.00672149658203125, -0.043060302734375, 0.06787109375, 0.029388427734375, 0.08489990234375, -0.036773681640625, -0.0062713623046875, 0.056365966796875, 0.0279998779296875, 0.016998291015625, 0.012908935546875, -0.033721923828125, -0.039581298828125, -0.0197296142578125, -0.0460205078125, -0.01059722900390625, 0.032989501953125, -0.03851318359375, 0.006450653076171875, -0.035186767578125, -0.0167999267578125, 0.031890869140625, 0.00725555419921875, -0.00008213520050048828, 0.0203704833984375, 0.0130462646484375, 0.0643310546875, -0.0462646484375, 0.04901123046875, 0.044647216796875, -0.0141754150390625, -0.059295654296875, 0.01111602783203125, 0.0006990432739257812, -0.024444580078125, 0.014556884765625, 0.01207733154296875, -0.000042557716369628906, -0.01458740234375, -0.01959228515625, -0.06549072265625, 0.0797119140625, 0.01328277587890625, -0.055145263671875, 0.02166748046875, -0.007770538330078125, 0.0210418701171875, -0.028900146484375, 0.027496337890625, 0.0611572265625, 0.050506591796875, 0.0003745555877685547, -0.0826416015625, -0.0045928955078125, -0.0418701171875, -0.0211944580078125, 0.0222320556640625, -0.07666015625, 0.0748291015625, -0.0015888214111328125, -0.0014486312866210938, -0.007354736328125, 0.049163818359375, 0.039642333984375, 0.0290069580078125, 0.059844970703125, 0.04510498046875, 0.046905517578125, -0.0177764892578125, 0.034393310546875, -0.0218505859375, 0.01540374755859375, 0.08526611328125, 0.0212249755859375, 0.044464111328125, 0.037841796875, -0.03448486328125, 0.0309600830078125, 0.0711669921875, -0.01427459716796875, 0.03741455078125, 0.0218963623046875, -0.0073699951171875, -0.02490234375, 0.0019016265869140625, -0.045654296875, 0.04742431640625, 0.039459228515625, -0.026092529296875, 0.021240234375, -0.028045654296875, 0.003520965576171875, -0.006473541259765625, -0.005687713623046875, 0.035400390625, 0.00502777099609375, -0.046112060546875, 0.06365966796875, -0.01032257080078125, 0.04718017578125, -0.03765869140625, 0.00244903564453125, -0.00998687744140625, 0.00640869140625, -0.0367431640625, -0.04937744140625, 0.00750732421875, -0.0196990966796875, -0.006526947021484375, -0.0008363723754882812, 0.04095458984375, -0.050689697265625, -0.01035308837890625, 0.03265380859375, 0.0020313262939453125, 0.0283203125, 0.010894775390625, -0.0631103515625, 0.01279449462890625, 0.033966064453125, -0.03411865234375, 0.0107879638671875, 0.03369140625, 0.0175933837890625, 0.0226898193359375, 0.0478515625, 0.014312744140625, 0.02203369140625, 0.007137298583984375, 0.0501708984375, -0.043792724609375, -0.061920166015625, -0.06500244140625, 0.041168212890625, -0.03662109375, -0.0384521484375, 0.06121826171875, 0.0662841796875, 0.06884765625, 0.024505615234375, 0.047332763671875, -0.0103759765625, 0.04119873046875, -0.0325927734375, 0.04864501953125, -0.042022705078125, 0.0290374755859375, -0.045379638671875, -0.055572509765625, -0.00885772705078125, 0.057342529296875, -0.027740478515625, 0.0185089111328125, 0.03399658203125, 0.06634521484375, -0.01043701171875, -0.01186370849609375, 0.01529693603515625, 0.0157623291015625, 0.034912109375, 0.040618896484375, 0.050567626953125, -0.0288848876953125, 0.04931640625, -0.041107177734375, -0.0129241943359375, -0.020751953125, -0.0267791748046875, -0.05194091796875, -0.06500244140625, -0.0458984375, -0.0196380615234375, -0.00592041015625, 0.07391357421875, 0.0673828125, -0.046630859375, -0.04351806640625, -0.0007486343383789062, 0.019134521484375, -0.0253143310546875, -0.0172271728515625, 0.058441162109375, -0.0002911090850830078, -0.048431396484375, 0.0467529296875, 0.025421142578125, 0.007671356201171875, 0.00939178466796875, -0.0245819091796875, -0.0509033203125, 0.004306793212890625, 0.0019931793212890625, 0.042266845703125, -0.050445556640625, -0.00914764404296875, -0.033477783203125, 0.0143585205078125, 0.033966064453125, 0.042449951171875, -0.023681640625, 0.045501708984375, 0.048736572265625, 0.018463134765625, 0.05462646484375, 0.003047943115234375, 0.005283355712890625, -0.049407958984375, 0.0219879150390625, 0.0205841064453125, 0.0285491943359375, 0.03240966796875, -0.00905609130859375, 0.0172119140625, 0.038299560546875, -0.048583984375, -0.0799560546875, -0.0127716064453125, -0.0784912109375, -0.0202178955078125, 0.084716796875, -0.0174560546875, -0.036651611328125, -0.00922393798828125, -0.0286407470703125, 0.035369873046875, -0.045684814453125, 0.04296875, 0.042388916015625, -0.028839111328125, -0.01103973388671875, -0.026641845703125, 0.051483154296875, 0.033050537109375, -0.040435791015625, 0.014373779296875, 0.03253173828125, 0.01554107666015625, 0.041168212890625, 0.06292724609375, -0.01342010498046875, 0.028839111328125, 0.037506103515625, 0.01508331298828125, -0.020294189453125, -0.01189422607421875, -0.0230865478515625, 0.0020599365234375, -0.01186370849609375, -0.053955078125 ] ]
sentence-transformers/sentence-t5-large
2022-02-09T14:01:09.000Z
[ "sentence-transformers", "pytorch", "t5", "feature-extraction", "sentence-similarity", "transformers", "en", "arxiv:2108.08877", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
sentence-similarity
sentence-transformers
null
null
sentence-transformers/sentence-t5-large
14
9,023
sentence-transformers
2022-03-02T23:29:05
--- pipeline_tag: sentence-similarity language: en license: apache-2.0 tags: - sentence-transformers - feature-extraction - sentence-similarity - transformers --- # sentence-transformers/sentence-t5-large This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space. The model works well for sentence similarity tasks, but doesn't perform that well for semantic search tasks. This model was converted from the Tensorflow model [st5-large-1](https://tfhub.dev/google/sentence-t5/st5-large/1) to PyTorch. When using this model, have a look at the publication: [Sentence-T5: Scalable sentence encoders from pre-trained text-to-text models](https://arxiv.org/abs/2108.08877). The tfhub model and this PyTorch model can produce slightly different embeddings, however, when run on the same benchmarks, they produce identical results. The model uses only the encoder from a T5-large model. The weights are stored in FP16. ## Usage (Sentence-Transformers) Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed: ``` pip install -U sentence-transformers ``` Then you can use the model like this: ```python from sentence_transformers import SentenceTransformer sentences = ["This is an example sentence", "Each sentence is converted"] model = SentenceTransformer('sentence-transformers/sentence-t5-large') embeddings = model.encode(sentences) print(embeddings) ``` The model requires sentence-transformers version 2.2.0 or newer. ## Evaluation Results For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/sentence-t5-large) ## Citing & Authors If you find this model helpful, please cite the respective publication: [Sentence-T5: Scalable sentence encoders from pre-trained text-to-text models](https://arxiv.org/abs/2108.08877)
2,020
[ [ -0.0093231201171875, -0.04937744140625, 0.044830322265625, 0.024810791015625, -0.0195770263671875, -0.02410888671875, -0.023712158203125, -0.0207366943359375, 0.00778961181640625, 0.039520263671875, -0.03521728515625, -0.043975830078125, -0.06170654296875, 0.022552490234375, -0.058319091796875, 0.07037353515625, -0.011322021484375, -0.0017948150634765625, -0.02471923828125, -0.00589752197265625, -0.019287109375, -0.02276611328125, -0.0227508544921875, -0.0187835693359375, 0.032135009765625, 0.02276611328125, 0.052764892578125, 0.033203125, 0.058349609375, 0.0304718017578125, -0.00482177734375, -0.013031005859375, -0.04400634765625, -0.003345489501953125, -0.0095062255859375, -0.015655517578125, -0.0164031982421875, -0.0036945343017578125, 0.04278564453125, 0.045318603515625, -0.01345062255859375, 0.02215576171875, -0.0208892822265625, 0.013671875, -0.034454345703125, 0.01849365234375, -0.036163330078125, 0.02032470703125, 0.001007080078125, -0.00037026405334472656, -0.04339599609375, -0.0325927734375, 0.0278167724609375, -0.027801513671875, 0.025665283203125, 0.0209503173828125, 0.08648681640625, 0.03326416015625, -0.0386962890625, -0.01947021484375, -0.03485107421875, 0.05322265625, -0.05401611328125, 0.0302886962890625, 0.00920867919921875, 0.0217437744140625, -0.01114654541015625, -0.0931396484375, -0.04656982421875, -0.0318603515625, -0.012054443359375, 0.009307861328125, -0.03521728515625, 0.011749267578125, 0.041839599609375, 0.0274810791015625, -0.05023193359375, 0.007083892822265625, -0.04736328125, -0.0146484375, 0.02606201171875, 0.0106201171875, 0.02191162109375, -0.0338134765625, -0.036895751953125, -0.0260772705078125, -0.0206298828125, -0.0117950439453125, 0.016693115234375, -0.0015707015991210938, -0.01288604736328125, 0.072265625, -0.006717681884765625, 0.04803466796875, 0.003765106201171875, 0.001171112060546875, 0.039764404296875, -0.0196685791015625, -0.0116119384765625, 0.00357818603515625, 0.08038330078125, 0.0369873046875, 0.03057861328125, -0.030364990234375, -0.0189666748046875, 0.00879669189453125, 0.04437255859375, -0.087890625, -0.0201568603515625, 0.020263671875, -0.04742431640625, -0.03155517578125, 0.0169677734375, -0.038238525390625, -0.0029582977294921875, 0.00753021240234375, 0.058197021484375, -0.0386962890625, 0.01593017578125, 0.0201568603515625, -0.0293426513671875, 0.0208892822265625, -0.00481414794921875, -0.05474853515625, 0.022735595703125, 0.0295257568359375, 0.0631103515625, -0.004665374755859375, -0.03662109375, -0.025115966796875, -0.0003135204315185547, -0.016021728515625, 0.05291748046875, -0.032989501953125, -0.01270294189453125, 0.008758544921875, 0.0188751220703125, -0.004756927490234375, -0.03900146484375, 0.069580078125, -0.02423095703125, 0.043670654296875, 0.006969451904296875, -0.05340576171875, -0.01364898681640625, 0.01430511474609375, -0.05059814453125, 0.07159423828125, 0.01561737060546875, -0.0614013671875, 0.0199432373046875, -0.055633544921875, -0.0272369384765625, -0.017181396484375, 0.01366424560546875, -0.060943603515625, 0.0142364501953125, 0.01873779296875, 0.05096435546875, -0.0261077880859375, 0.01212310791015625, -0.039520263671875, -0.048614501953125, 0.0247955322265625, -0.0192108154296875, 0.0604248046875, 0.00803375244140625, -0.0212860107421875, 0.023773193359375, -0.035919189453125, -0.0090484619140625, 0.0152130126953125, -0.01715087890625, 0.006134033203125, -0.00782012939453125, 0.03399658203125, 0.0181427001953125, 0.037322998046875, -0.053802490234375, 0.028900146484375, -0.0261077880859375, 0.06494140625, 0.034759521484375, -0.0142364501953125, 0.039459228515625, -0.021942138671875, 0.0219573974609375, 0.023773193359375, -0.010650634765625, -0.00905609130859375, -0.0186309814453125, -0.058563232421875, -0.0027828216552734375, 0.0216827392578125, 0.020751953125, -0.051025390625, 0.06103515625, -0.053436279296875, -0.045501708984375, -0.0537109375, -0.0188446044921875, -0.0024433135986328125, 0.0238800048828125, 0.048370361328125, 0.0012197494506835938, -0.0460205078125, -0.08001708984375, -0.028472900390625, 0.00803375244140625, -0.007511138916015625, -0.00872039794921875, 0.06854248046875, -0.035003662109375, 0.05181884765625, -0.043701171875, -0.036529541015625, -0.0252227783203125, 0.01088714599609375, 0.0109405517578125, 0.0307464599609375, 0.044647216796875, -0.0455322265625, -0.0207366943359375, -0.0191650390625, -0.046966552734375, -0.0035724639892578125, -0.0006265640258789062, 0.0082550048828125, 0.011474609375, 0.042816162109375, -0.0704345703125, 0.0272979736328125, 0.0416259765625, -0.044097900390625, 0.0213165283203125, -0.0265350341796875, -0.0028858184814453125, -0.1185302734375, 0.0070037841796875, -0.0013074874877929688, -0.0287933349609375, -0.01175689697265625, 0.01085662841796875, 0.0189208984375, -0.0206298828125, -0.017822265625, 0.024169921875, -0.022430419921875, -0.01248931884765625, -0.003932952880859375, 0.01277923583984375, -0.00846099853515625, 0.033599853515625, -0.0186920166015625, 0.06982421875, 0.021697998046875, -0.02313232421875, 0.048431396484375, 0.046234130859375, -0.0416259765625, 0.01335906982421875, -0.07177734375, 0.0106353759765625, -0.00983428955078125, 0.03765869140625, -0.07147216796875, -0.0191650390625, 0.009552001953125, -0.03558349609375, -0.00603485107421875, 0.01678466796875, -0.059478759765625, -0.042877197265625, -0.0273895263671875, 0.014739990234375, 0.047271728515625, -0.04052734375, 0.05169677734375, 0.0020847320556640625, 0.00757598876953125, -0.039215087890625, -0.07269287109375, 0.01277923583984375, -0.0234222412109375, -0.049591064453125, 0.051483154296875, 0.00666046142578125, 0.0167694091796875, 0.02294921875, 0.002986907958984375, 0.0025539398193359375, -0.01142120361328125, 0.009765625, -0.0018205642700195312, -0.01458740234375, 0.0200958251953125, -0.0016584396362304688, -0.00672149658203125, 0.01160430908203125, -0.034210205078125, 0.05059814453125, -0.01253509521484375, -0.0012731552124023438, -0.0309906005859375, 0.01629638671875, 0.05242919921875, -0.0177764892578125, 0.07415771484375, 0.07830810546875, -0.0197296142578125, -0.01303863525390625, -0.0433349609375, -0.019134521484375, -0.03411865234375, 0.053802490234375, -0.03326416015625, -0.080078125, 0.02606201171875, 0.0002586841583251953, -0.006153106689453125, 0.055267333984375, 0.03704833984375, -0.00902557373046875, 0.0601806640625, 0.04180908203125, -0.00789642333984375, 0.039276123046875, -0.021392822265625, 0.032318115234375, -0.0479736328125, -0.00598907470703125, -0.046356201171875, -0.03021240234375, -0.064697265625, -0.0264129638671875, 0.0200042724609375, -0.02142333984375, -0.040740966796875, 0.05145263671875, -0.040740966796875, 0.0183868408203125, 0.04022216796875, 0.004886627197265625, 0.0032215118408203125, 0.01617431640625, -0.0089874267578125, -0.009735107421875, -0.048980712890625, -0.033782958984375, 0.0772705078125, 0.0209197998046875, 0.0526123046875, 0.01123046875, 0.036773681640625, 0.002410888671875, -0.017974853515625, -0.06756591796875, 0.042999267578125, -0.039886474609375, -0.0276336669921875, -0.01299285888671875, -0.03680419921875, -0.084716796875, 0.013671875, -0.0282135009765625, -0.05511474609375, -0.017333984375, -0.0299224853515625, -0.0105133056640625, 0.02008056640625, -0.06982421875, 0.09698486328125, 0.0005230903625488281, -0.00850677490234375, -0.0175323486328125, -0.03753662109375, -0.005619049072265625, 0.01226806640625, -0.018585205078125, 0.0097503662109375, -0.004375457763671875, 0.055999755859375, -0.024932861328125, 0.046417236328125, 0.00969696044921875, 0.00960540771484375, 0.00510406494140625, -0.01137542724609375, 0.0390625, -0.023956298828125, -0.0062103271484375, 0.0038318634033203125, -0.0003120899200439453, -0.03302001953125, -0.04656982421875, 0.054595947265625, -0.0816650390625, -0.035797119140625, -0.032440185546875, -0.04241943359375, 0.01140594482421875, 0.02642822265625, 0.0257720947265625, 0.03790283203125, -0.02032470703125, 0.06640625, 0.029296875, -0.0207061767578125, 0.044677734375, 0.0025615692138671875, -0.0032482147216796875, -0.023223876953125, 0.04010009765625, 0.0041961669921875, 0.003696441650390625, 0.05377197265625, 0.0110931396484375, -0.0408935546875, -0.0276641845703125, -0.0095367431640625, 0.01123809814453125, -0.04827880859375, -0.00691986083984375, -0.06793212890625, -0.025360107421875, -0.0528564453125, -0.004486083984375, -0.01447296142578125, -0.028289794921875, -0.025848388671875, -0.0200042724609375, 0.0408935546875, 0.053741455078125, 0.01541900634765625, 0.0304107666015625, -0.05279541015625, 0.0261688232421875, 0.00075531005859375, 0.0158538818359375, -0.00928497314453125, -0.04937744140625, -0.01335906982421875, -0.01153564453125, -0.03594970703125, -0.057952880859375, 0.04150390625, 0.0027484893798828125, 0.0213165283203125, 0.02197265625, 0.004425048828125, 0.049652099609375, -0.042755126953125, 0.060943603515625, 0.005649566650390625, -0.07281494140625, 0.0190277099609375, -0.0263214111328125, 0.0452880859375, 0.0282135009765625, 0.025848388671875, -0.036590576171875, -0.010711669921875, -0.05682373046875, -0.06878662109375, 0.051666259765625, 0.031951904296875, 0.0267333984375, 0.0019016265869140625, 0.01483154296875, -0.003826141357421875, 0.0225982666015625, -0.07220458984375, 0.0013179779052734375, -0.03375244140625, -0.053009033203125, -0.0294189453125, -0.022552490234375, 0.0190582275390625, -0.00818634033203125, 0.03021240234375, -0.00023245811462402344, 0.05560302734375, 0.01299285888671875, -0.024932861328125, 0.01438140869140625, 0.0236053466796875, 0.03790283203125, 0.0197296142578125, -0.017486572265625, 0.0167694091796875, 0.0291900634765625, -0.034271240234375, -0.011474609375, 0.0284271240234375, 0.0010099411010742188, 0.006259918212890625, 0.039703369140625, 0.07537841796875, 0.034912109375, -0.0285797119140625, 0.05548095703125, 0.004688262939453125, -0.0196380615234375, -0.047088623046875, -0.01013946533203125, 0.02606201171875, 0.021026611328125, 0.01399993896484375, 0.00518798828125, 0.0133209228515625, -0.044464111328125, 0.0090789794921875, 0.00580596923828125, -0.028411865234375, -0.00740814208984375, 0.057830810546875, 0.00818634033203125, -0.024627685546875, 0.07513427734375, 0.0007181167602539062, -0.0479736328125, 0.042083740234375, 0.046966552734375, 0.06964111328125, 0.016326904296875, 0.013275146484375, 0.03851318359375, 0.0250396728515625, -0.0270843505859375, 0.00344085693359375, -0.0011339187622070312, -0.053466796875, -0.025482177734375, -0.040191650390625, -0.0096588134765625, 0.0000845789909362793, -0.036346435546875, 0.033172607421875, -0.0207061767578125, -0.00975799560546875, 0.0068359375, 0.001697540283203125, -0.056365966796875, 0.0124359130859375, -0.013671875, 0.05572509765625, -0.050872802734375, 0.057525634765625, 0.05853271484375, -0.05963134765625, -0.07012939453125, 0.0096893310546875, -0.04205322265625, -0.046051025390625, 0.045196533203125, 0.0267181396484375, 0.01308441162109375, 0.01358795166015625, -0.0498046875, -0.051177978515625, 0.0963134765625, 0.0275726318359375, -0.032989501953125, -0.030853271484375, 0.02813720703125, 0.05078125, -0.0284271240234375, 0.0369873046875, 0.015533447265625, 0.027740478515625, 0.0032711029052734375, -0.07208251953125, 0.0234832763671875, -0.0087738037109375, 0.0158233642578125, 0.00827789306640625, -0.05023193359375, 0.07427978515625, -0.007236480712890625, 0.0084228515625, 0.02288818359375, 0.052154541015625, 0.0035991668701171875, -0.00830078125, 0.0235137939453125, 0.0623779296875, 0.0394287109375, -0.0203399658203125, 0.08331298828125, -0.0135040283203125, 0.061737060546875, 0.065185546875, -0.00109100341796875, 0.08477783203125, 0.04168701171875, -0.0016355514526367188, 0.054962158203125, 0.03851318359375, -0.040008544921875, 0.028564453125, 0.01552581787109375, 0.01080322265625, -0.024658203125, 0.0097503662109375, -0.022369384765625, 0.042572021484375, 0.007568359375, -0.05316162109375, -0.016326904296875, -0.00823211669921875, -0.0056915283203125, -0.00469207763671875, -0.002532958984375, 0.04656982421875, 0.01447296142578125, -0.0416259765625, 0.02752685546875, 0.01486968994140625, 0.05694580078125, -0.0300750732421875, 0.003650665283203125, 0.003215789794921875, 0.039215087890625, -0.0102386474609375, -0.06292724609375, 0.0293731689453125, -0.00946807861328125, -0.0060577392578125, -0.030548095703125, 0.05999755859375, -0.038330078125, -0.0443115234375, 0.0211029052734375, 0.0233612060546875, 0.00885009765625, -0.0019683837890625, -0.048980712890625, -0.0006718635559082031, -0.006343841552734375, -0.01094818115234375, 0.00525665283203125, 0.0367431640625, 0.00853729248046875, 0.048095703125, 0.029144287109375, -0.02178955078125, -0.00444793701171875, 0.00457763671875, 0.0469970703125, -0.06640625, -0.046417236328125, -0.053009033203125, 0.033355712890625, -0.014739990234375, -0.03564453125, 0.048828125, 0.046234130859375, 0.06610107421875, -0.034088134765625, 0.037017822265625, -0.0060577392578125, 0.0191650390625, -0.036773681640625, 0.065185546875, -0.053924560546875, -0.0169219970703125, -0.0196380615234375, -0.0859375, -0.017913818359375, 0.07568359375, -0.034393310546875, 0.01490020751953125, 0.0849609375, 0.055267333984375, -0.02081298828125, -0.005817413330078125, 0.0212249755859375, 0.04522705078125, 0.01232147216796875, 0.04833984375, 0.0450439453125, -0.06243896484375, 0.06610107421875, 0.0006470680236816406, 0.029998779296875, -0.0213623046875, -0.052978515625, -0.08306884765625, -0.054229736328125, -0.019775390625, -0.037628173828125, 0.00379180908203125, 0.07696533203125, 0.037261962890625, -0.04449462890625, -0.00896453857421875, -0.0245819091796875, -0.0189361572265625, 0.006679534912109375, -0.0177459716796875, 0.024871826171875, -0.019866943359375, -0.065185546875, 0.0035991668701171875, -0.01161956787109375, 0.0017595291137695312, -0.017791748046875, 0.0235443115234375, -0.006397247314453125, -0.0143585205078125, 0.043121337890625, -0.025970458984375, -0.060791015625, -0.0487060546875, 0.01068878173828125, -0.0261993408203125, -0.0020465850830078125, 0.0325927734375, -0.05157470703125, 0.0113677978515625, 0.05096435546875, 0.052490234375, 0.068603515625, -0.020355224609375, 0.053924560546875, -0.031646728515625, 0.0157623291015625, 0.0092315673828125, 0.039398193359375, 0.03399658203125, -0.007335662841796875, 0.039398193359375, 0.006626129150390625, -0.040557861328125, -0.038330078125, -0.0011548995971679688, -0.09649658203125, -0.0135498046875, 0.100341796875, -0.00872802734375, -0.0217132568359375, 0.02398681640625, -0.0272064208984375, 0.030120849609375, -0.03460693359375, 0.06689453125, 0.06732177734375, 0.0300140380859375, -0.0235595703125, -0.026763916015625, 0.0200042724609375, 0.032928466796875, -0.04254150390625, -0.02581787109375, 0.0206298828125, 0.03472900390625, 0.01396942138671875, 0.004352569580078125, -0.0034198760986328125, 0.007617950439453125, 0.0157318115234375, 0.0113067626953125, -0.01013946533203125, 0.00024771690368652344, -0.0191650390625, 0.03497314453125, -0.0176544189453125, -0.0145721435546875 ] ]
ylacombe/bark-small
2023-07-18T08:11:24.000Z
[ "transformers", "pytorch", "bark", "text-to-audio", "audio", "text-to-speech", "en", "de", "es", "fr", "hi", "it", "ja", "ko", "pl", "pt", "ru", "tr", "zh", "license:cc-by-nc-4.0", "endpoints_compatible", "region:us" ]
text-to-speech
ylacombe
null
null
ylacombe/bark-small
0
9,015
transformers
2023-06-16T07:15:23
--- language: - en - de - es - fr - hi - it - ja - ko - pl - pt - ru - tr - zh thumbnail: https://user-images.githubusercontent.com/5068315/230698495-cbb1ced9-c911-4c9a-941d-a1a4a1286ac6.png library: "bark" license: "cc-by-nc-4.0" tags: - bark - audio - text-to-speech --- # Bark Bark is a transformer-based text-to-audio model created by [Suno](https://www.suno.ai). Bark can generate highly realistic, multilingual speech as well as other audio - including music, background noise and simple sound effects. The model can also produce nonverbal communications like laughing, sighing and crying. To support the research community, we are providing access to pretrained model checkpoints ready for inference. The original github repo and model card can be found [here](https://github.com/suno-ai/bark). This model is meant for research purposes only. The model output is not censored and the authors do not endorse the opinions in the generated content. Use at your own risk. Two checkpoints are released: - [**small** (this checkpoint)](https://huggingface.co/suno/bark-small) - [large](https://huggingface.co/suno/bark) ## Example Try out Bark yourself! * Bark Colab: <a target="_blank" href="https://colab.research.google.com/drive/1eJfA2XUa-mXwdMy7DoYKVYHI1iTd9Vkt?usp=sharing"> <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/> </a> * Hugging Face Colab: <a target="_blank" href="https://colab.research.google.com/drive/1dWWkZzvu7L9Bunq9zvD-W02RFUXoW-Pd?usp=sharing"> <img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/> </a> * Hugging Face Demo: <a target="_blank" href="https://huggingface.co/spaces/suno/bark"> <img src="https://huggingface.co/datasets/huggingface/badges/raw/main/open-in-hf-spaces-sm.svg" alt="Open in HuggingFace"/> </a> ## 🤗 Transformers Usage You can run Bark locally with the 🤗 Transformers library from version 4.31.0 onwards. 1. First install the 🤗 [Transformers library](https://github.com/huggingface/transformers) from main: ``` pip install git+https://github.com/huggingface/transformers.git ``` 2. Run the following Python code to generate speech samples: ```python from transformers import AutoProcessor, AutoModel processor = AutoProcessor.from_pretrained("suno/bark-small") model = AutoModel.from_pretrained("suno/bark-small") inputs = processor( text=["Hello, my name is Suno. And, uh — and I like pizza. [laughs] But I also have other interests such as playing tic tac toe."], return_tensors="pt", ) speech_values = model.generate(**inputs, do_sample=True) ``` 3. Listen to the speech samples either in an ipynb notebook: ```python from IPython.display import Audio sampling_rate = model.generation_config.sample_rate Audio(speech_values.cpu().numpy().squeeze(), rate=sampling_rate) ``` Or save them as a `.wav` file using a third-party library, e.g. `scipy`: ```python import scipy sampling_rate = model.config.sample_rate scipy.io.wavfile.write("bark_out.wav", rate=sampling_rate, data=speech_values.cpu().numpy().squeeze()) ``` For more details on using the Bark model for inference using the 🤗 Transformers library, refer to the [Bark docs](https://huggingface.co/docs/transformers/model_doc/bark). ## Suno Usage You can also run Bark locally through the original [Bark library]((https://github.com/suno-ai/bark): 1. First install the [`bark` library](https://github.com/suno-ai/bark) 3. Run the following Python code: ```python from bark import SAMPLE_RATE, generate_audio, preload_models from IPython.display import Audio # download and load all models preload_models() # generate audio from text text_prompt = """ Hello, my name is Suno. And, uh — and I like pizza. [laughs] But I also have other interests such as playing tic tac toe. """ speech_array = generate_audio(text_prompt) # play text in notebook Audio(speech_array, rate=SAMPLE_RATE) ``` [pizza.webm](https://user-images.githubusercontent.com/5068315/230490503-417e688d-5115-4eee-9550-b46a2b465ee3.webm) To save `audio_array` as a WAV file: ```python from scipy.io.wavfile import write as write_wav write_wav("/path/to/audio.wav", SAMPLE_RATE, audio_array) ``` ## Model Details The following is additional information about the models released here. Bark is a series of three transformer models that turn text into audio. ### Text to semantic tokens - Input: text, tokenized with [BERT tokenizer from Hugging Face](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer) - Output: semantic tokens that encode the audio to be generated ### Semantic to coarse tokens - Input: semantic tokens - Output: tokens from the first two codebooks of the [EnCodec Codec](https://github.com/facebookresearch/encodec) from facebook ### Coarse to fine tokens - Input: the first two codebooks from EnCodec - Output: 8 codebooks from EnCodec ### Architecture | Model | Parameters | Attention | Output Vocab size | |:-------------------------:|:----------:|------------|:-----------------:| | Text to semantic tokens | 80/300 M | Causal | 10,000 | | Semantic to coarse tokens | 80/300 M | Causal | 2x 1,024 | | Coarse to fine tokens | 80/300 M | Non-causal | 6x 1,024 | ### Release date April 2023 ## Broader Implications We anticipate that this model's text to audio capabilities can be used to improve accessbility tools in a variety of languages. While we hope that this release will enable users to express their creativity and build applications that are a force for good, we acknowledge that any text to audio model has the potential for dual use. While it is not straightforward to voice clone known people with Bark, it can still be used for nefarious purposes. To further reduce the chances of unintended use of Bark, we also release a simple classifier to detect Bark-generated audio with high accuracy (see notebooks section of the main repository).
6,069
[ [ -0.0234832763671875, -0.05255126953125, 0.01433563232421875, 0.036224365234375, -0.01007080078125, -0.0030117034912109375, -0.0218505859375, -0.063232421875, 0.0197296142578125, 0.01666259765625, -0.050201416015625, -0.05450439453125, -0.0281829833984375, -0.0055389404296875, -0.0197906494140625, 0.09063720703125, 0.0426025390625, 0.0006566047668457031, -0.0024738311767578125, -0.0032215118408203125, -0.0234527587890625, -0.033294677734375, -0.060089111328125, -0.04766845703125, 0.020660400390625, 0.00559234619140625, 0.03338623046875, 0.0250396728515625, 0.00016772747039794922, 0.01959228515625, -0.0280609130859375, -0.02117919921875, -0.002490997314453125, -0.0174713134765625, 0.0157318115234375, -0.04644775390625, -0.052490234375, 0.019927978515625, 0.0284576416015625, 0.01885986328125, -0.0293121337890625, 0.0186920166015625, -0.00902557373046875, 0.018798828125, -0.0035533905029296875, 0.0264129638671875, -0.043975830078125, -0.009552001953125, 0.0024013519287109375, 0.00040030479431152344, -0.035888671875, -0.033294677734375, 0.005237579345703125, -0.05255126953125, 0.0167694091796875, -0.0007014274597167969, 0.085693359375, 0.0200042724609375, -0.01453399658203125, -0.036224365234375, -0.044403076171875, 0.059478759765625, -0.08074951171875, 0.009429931640625, 0.058013916015625, 0.01861572265625, -0.0103759765625, -0.0645751953125, -0.03985595703125, -0.0240936279296875, -0.005237579345703125, 0.0228729248046875, -0.03857421875, -0.003803253173828125, 0.01232147216796875, 0.026824951171875, -0.02020263671875, -0.0037136077880859375, -0.01776123046875, -0.01425933837890625, 0.046142578125, -0.0005035400390625, 0.043487548828125, -0.0289154052734375, -0.0027904510498046875, -0.04351806640625, -0.013427734375, 0.037811279296875, 0.0228118896484375, 0.022857666015625, -0.03631591796875, 0.042144775390625, 0.0179595947265625, 0.036285400390625, 0.0204925537109375, -0.037750244140625, 0.036956787109375, -0.02154541015625, -0.01041412353515625, 0.032928466796875, 0.080810546875, 0.01409912109375, -0.006988525390625, -0.0004191398620605469, 0.0030994415283203125, 0.006404876708984375, 0.002941131591796875, -0.05377197265625, -0.020843505859375, 0.03997802734375, -0.028594970703125, -0.031646728515625, -0.01708984375, -0.044189453125, 0.0014619827270507812, -0.0017538070678710938, 0.0308685302734375, -0.070068359375, -0.033416748046875, 0.01404571533203125, -0.037353515625, 0.02398681640625, 0.0052337646484375, -0.07379150390625, 0.0233001708984375, 0.0343017578125, 0.05120849609375, 0.019317626953125, -0.0253143310546875, -0.021575927734375, 0.0113983154296875, -0.023193359375, 0.044525146484375, -0.02880859375, -0.029327392578125, -0.031524658203125, 0.0095672607421875, 0.002685546875, -0.05364990234375, 0.058807373046875, -0.0033416748046875, 0.0259552001953125, 0.00600433349609375, -0.022125244140625, -0.028961181640625, -0.00440216064453125, -0.03582763671875, 0.1112060546875, 0.00887298583984375, -0.065673828125, 0.0118408203125, -0.0537109375, -0.043548583984375, -0.02294921875, 0.0084991455078125, -0.04486083984375, 0.00327301025390625, 0.0279693603515625, 0.016876220703125, -0.02972412109375, 0.0285186767578125, -0.01171112060546875, -0.01532745361328125, 0.036712646484375, -0.0102996826171875, 0.0889892578125, 0.01282501220703125, -0.045196533203125, 0.0209197998046875, -0.0654296875, 0.0107879638671875, 0.03070068359375, -0.03179931640625, -0.021148681640625, 0.00632476806640625, 0.0226593017578125, 0.01012420654296875, 0.01160430908203125, -0.04803466796875, -0.00008147954940795898, -0.0421142578125, 0.057769775390625, 0.037933349609375, -0.01486968994140625, 0.0096435546875, -0.052947998046875, 0.025543212890625, -0.01311492919921875, -0.004688262939453125, -0.018829345703125, -0.043304443359375, -0.03497314453125, -0.045013427734375, 0.0171966552734375, 0.03857421875, -0.007537841796875, 0.06378173828125, 0.006366729736328125, -0.06744384765625, -0.07574462890625, -0.035858154296875, 0.01509857177734375, 0.0265655517578125, 0.03338623046875, -0.008056640625, -0.047576904296875, -0.05084228515625, -0.00551605224609375, -0.032135009765625, -0.0154876708984375, 0.048736572265625, 0.0217742919921875, -0.0211029052734375, 0.0853271484375, -0.0262603759765625, -0.0265655517578125, -0.0172271728515625, 0.0274810791015625, 0.0289154052734375, 0.04791259765625, 0.043487548828125, -0.03997802734375, -0.018463134765625, -0.0030803680419921875, -0.0506591796875, -0.0198516845703125, -0.0083160400390625, 0.00902557373046875, 0.0091094970703125, 0.0233612060546875, -0.04541015625, 0.0140228271484375, 0.042755126953125, -0.014190673828125, 0.059173583984375, -0.006061553955078125, 0.0036716461181640625, -0.09051513671875, 0.01180267333984375, -0.005702972412109375, -0.0145111083984375, -0.039794921875, -0.0244293212890625, -0.0257415771484375, -0.0203094482421875, -0.031463623046875, 0.032928466796875, -0.022186279296875, -0.01140594482421875, -0.012451171875, 0.00702667236328125, -0.001434326171875, 0.0426025390625, 0.006076812744140625, 0.043731689453125, 0.06573486328125, -0.043914794921875, 0.022857666015625, 0.032318115234375, -0.015960693359375, 0.0311431884765625, -0.06475830078125, 0.0222015380859375, 0.0150604248046875, 0.030548095703125, -0.06884765625, -0.014312744140625, 0.0241241455078125, -0.06585693359375, 0.01479339599609375, -0.0015916824340820312, -0.043914794921875, -0.02764892578125, -0.0166015625, 0.031280517578125, 0.06005859375, -0.05059814453125, 0.051849365234375, 0.04595947265625, -0.00539398193359375, -0.0287628173828125, -0.056365966796875, 0.0005645751953125, -0.03424072265625, -0.048736572265625, 0.033782958984375, -0.00490570068359375, 0.0031108856201171875, 0.0159454345703125, -0.007564544677734375, -0.00002968311309814453, -0.00951385498046875, 0.0347900390625, 0.005649566650390625, -0.0159912109375, 0.0017576217651367188, 0.0006694793701171875, -0.005413055419921875, 0.016448974609375, -0.0160675048828125, 0.052337646484375, -0.0379638671875, -0.00731658935546875, -0.05645751953125, 0.00811767578125, 0.046722412109375, -0.0256500244140625, 0.02239990234375, 0.06304931640625, -0.0235137939453125, -0.00939178466796875, -0.034698486328125, -0.0201263427734375, -0.03411865234375, 0.026275634765625, -0.027679443359375, -0.043731689453125, 0.04266357421875, -0.01456451416015625, -0.004741668701171875, 0.034759521484375, 0.03631591796875, -0.0135345458984375, 0.07916259765625, 0.05694580078125, -0.00846099853515625, 0.0396728515625, -0.0239715576171875, 0.004367828369140625, -0.0736083984375, -0.03338623046875, -0.04949951171875, -0.0008254051208496094, -0.037750244140625, -0.0254974365234375, 0.0220184326171875, 0.0103302001953125, -0.0029392242431640625, 0.042449951171875, -0.06463623046875, 0.009613037109375, 0.059173583984375, 0.002765655517578125, 0.01519775390625, 0.00437164306640625, -0.012115478515625, -0.01050567626953125, -0.05084228515625, -0.036834716796875, 0.05560302734375, 0.040679931640625, 0.060211181640625, -0.0059814453125, 0.0526123046875, -0.00072479248046875, 0.004150390625, -0.07684326171875, 0.046173095703125, -0.0032138824462890625, -0.058624267578125, -0.024200439453125, -0.01291656494140625, -0.07891845703125, 0.0093994140625, -0.0164642333984375, -0.07275390625, -0.0035247802734375, -0.004283905029296875, -0.0019130706787109375, 0.0217437744140625, -0.041717529296875, 0.062042236328125, -0.0146331787109375, -0.0021610260009765625, -0.022796630859375, -0.039794921875, 0.0266876220703125, 0.0038471221923828125, 0.0196685791015625, -0.028839111328125, 0.0240631103515625, 0.075439453125, -0.0218963623046875, 0.08154296875, -0.00395965576171875, 0.002288818359375, 0.046905517578125, -0.0119171142578125, 0.0119171142578125, -0.00879669189453125, -0.0113677978515625, 0.0205535888671875, 0.034332275390625, -0.0119476318359375, -0.0278167724609375, 0.023223876953125, -0.06353759765625, -0.0251922607421875, -0.037933349609375, -0.04290771484375, -0.0031299591064453125, 0.0164642333984375, 0.03363037109375, 0.0243072509765625, -0.022735595703125, 0.00618743896484375, 0.006328582763671875, -0.049896240234375, 0.038116455078125, 0.03955078125, -0.0249176025390625, -0.04669189453125, 0.0654296875, -0.01739501953125, 0.00843048095703125, -0.002307891845703125, 0.046539306640625, -0.0294189453125, -0.0146331787109375, -0.0204925537109375, 0.0428466796875, -0.03314208984375, -0.000362396240234375, -0.039703369140625, -0.0210113525390625, -0.042877197265625, 0.0015802383422851562, -0.044647216796875, -0.0215606689453125, -0.023193359375, 0.006755828857421875, 0.046966552734375, 0.04754638671875, -0.0276031494140625, 0.02520751953125, -0.0478515625, 0.032379150390625, 0.01216888427734375, 0.01110076904296875, 0.003543853759765625, -0.044891357421875, -0.00884246826171875, 0.01081085205078125, -0.016510009765625, -0.064453125, 0.039276123046875, 0.009002685546875, 0.04351806640625, 0.0027675628662109375, 0.0164031982421875, 0.04925537109375, -0.03564453125, 0.0592041015625, 0.0352783203125, -0.081298828125, 0.065673828125, -0.02294921875, 0.016510009765625, 0.0203094482421875, 0.0181732177734375, -0.046600341796875, -0.054412841796875, -0.06298828125, -0.0654296875, 0.09356689453125, 0.0222930908203125, 0.00844573974609375, -0.0007381439208984375, -0.0044403076171875, 0.00017082691192626953, 0.004177093505859375, -0.0732421875, -0.031524658203125, -0.03955078125, -0.01107025146484375, -0.0004627704620361328, -0.0027713775634765625, -0.0186004638671875, -0.043853759765625, 0.07049560546875, 0.0006833076477050781, 0.037750244140625, 0.0255126953125, 0.0149993896484375, -0.0026798248291015625, 0.02459716796875, 0.029296875, 0.001617431640625, -0.0418701171875, 0.002655029296875, 0.00897979736328125, -0.043243408203125, 0.02142333984375, 0.00537872314453125, -0.004947662353515625, 0.0169219970703125, 0.015960693359375, 0.07940673828125, 0.0151214599609375, -0.052947998046875, 0.0299072265625, -0.01233673095703125, -0.0174560546875, -0.039459228515625, -0.0010442733764648438, 0.034698486328125, 0.0167236328125, 0.02325439453125, -0.00414276123046875, -0.00568389892578125, -0.052947998046875, 0.025054931640625, 0.02838134765625, -0.0272064208984375, -0.0302886962890625, 0.06976318359375, -0.0092620849609375, -0.051422119140625, 0.037628173828125, -0.005672454833984375, -0.0267486572265625, 0.0638427734375, 0.08526611328125, 0.07562255859375, -0.011962890625, 0.0192413330078125, 0.050994873046875, 0.0189056396484375, 0.005550384521484375, -0.0026035308837890625, -0.0182647705078125, -0.040374755859375, -0.0167388916015625, -0.055877685546875, -0.0299072265625, 0.0281829833984375, -0.05792236328125, 0.0234527587890625, -0.0279693603515625, -0.032745361328125, 0.01421356201171875, -0.01396942138671875, -0.0090789794921875, 0.01439666748046875, 0.004741668701171875, 0.05487060546875, -0.0579833984375, 0.0849609375, 0.0430908203125, -0.0408935546875, -0.0863037109375, 0.01052093505859375, -0.0115814208984375, -0.050323486328125, 0.0361328125, 0.0235137939453125, -0.0270538330078125, 0.00853729248046875, -0.0494384765625, -0.0457763671875, 0.0740966796875, 0.028472900390625, -0.00917816162109375, -0.003910064697265625, 0.0029125213623046875, 0.0469970703125, -0.0205230712890625, 0.0286712646484375, 0.051666259765625, 0.03656005859375, 0.022857666015625, -0.0849609375, 0.007045745849609375, -0.027069091796875, -0.0243377685546875, -0.0199737548828125, -0.04241943359375, 0.053955078125, -0.0225830078125, -0.02471923828125, 0.0028553009033203125, 0.039947509765625, 0.038848876953125, 0.03814697265625, 0.044586181640625, 0.044921875, 0.05853271484375, -0.0091094970703125, 0.0634765625, -0.02069091796875, 0.0243682861328125, 0.0863037109375, 0.00958251953125, 0.06304931640625, 0.0207977294921875, -0.033660888671875, 0.03741455078125, 0.051910400390625, -0.0152587890625, 0.033477783203125, 0.01068878173828125, -0.0178070068359375, 0.0041961669921875, -0.023223876953125, -0.0345458984375, 0.04449462890625, 0.0174407958984375, -0.003875732421875, -0.004215240478515625, 0.01071929931640625, -0.00186920166015625, -0.005290985107421875, 0.005496978759765625, 0.062225341796875, 0.0164031982421875, -0.040008544921875, 0.0740966796875, 0.007251739501953125, 0.0662841796875, -0.052642822265625, 0.005443572998046875, 0.01206207275390625, 0.0025615692138671875, -0.02545166015625, -0.0489501953125, 0.028350830078125, -0.0005612373352050781, -0.0036907196044921875, -0.004878997802734375, 0.027099609375, -0.035675048828125, -0.0248260498046875, 0.057586669921875, 0.0064544677734375, 0.040985107421875, -0.00460052490234375, -0.0694580078125, 0.00772857666015625, 0.00516510009765625, -0.01082611083984375, 0.01038360595703125, 0.0260162353515625, 0.015472412109375, 0.040435791015625, 0.052642822265625, 0.00983428955078125, 0.0156402587890625, 0.0093841552734375, 0.049041748046875, -0.0592041015625, -0.0430908203125, -0.048187255859375, 0.039093017578125, 0.01288604736328125, -0.01390838623046875, 0.048614501953125, 0.05438232421875, 0.04632568359375, -0.0086212158203125, 0.053863525390625, -0.0282135009765625, 0.032745361328125, -0.03106689453125, 0.055389404296875, -0.0560302734375, 0.01215362548828125, -0.03619384765625, -0.04815673828125, -0.00399017333984375, 0.0543212890625, -0.0086212158203125, -0.00505828857421875, 0.0501708984375, 0.0693359375, -0.00433349609375, 0.0062713623046875, 0.0111236572265625, 0.0186614990234375, 0.032440185546875, 0.042724609375, 0.05908203125, -0.04541015625, 0.061553955078125, -0.040069580078125, -0.01529693603515625, -0.00197601318359375, -0.049652099609375, -0.06927490234375, -0.06402587890625, -0.0225372314453125, -0.044036865234375, -0.026153564453125, 0.057586669921875, 0.07012939453125, -0.04925537109375, -0.043426513671875, -0.003204345703125, -0.00020110607147216797, -0.0355224609375, -0.0208282470703125, 0.037506103515625, -0.0148468017578125, -0.0693359375, 0.04205322265625, 0.0023097991943359375, 0.02362060546875, 0.019989013671875, -0.016510009765625, -0.0190887451171875, 0.01541900634765625, 0.028594970703125, 0.03387451171875, -0.073974609375, -0.007678985595703125, -0.00237274169921875, -0.016998291015625, 0.040069580078125, 0.034942626953125, -0.050689697265625, 0.0273895263671875, 0.0182952880859375, 0.025238037109375, 0.084228515625, 0.002201080322265625, 0.0190582275390625, -0.043701171875, 0.031890869140625, 0.022705078125, 0.0081634521484375, 0.0256195068359375, -0.012664794921875, 0.027069091796875, 0.0157470703125, -0.03375244140625, -0.06854248046875, -0.0010576248168945312, -0.105224609375, -0.03668212890625, 0.07684326171875, 0.006534576416015625, -0.039093017578125, 0.0103759765625, -0.048675537109375, 0.0494384765625, -0.0404052734375, 0.04547119140625, 0.045135498046875, -0.027099609375, -0.003925323486328125, -0.03485107421875, 0.04315185546875, 0.040374755859375, -0.056121826171875, 0.0075225830078125, 0.019622802734375, 0.038421630859375, 0.025726318359375, 0.06878662109375, -0.020263671875, 0.0164794921875, 0.028350830078125, 0.043304443359375, -0.007190704345703125, -0.00554656982421875, -0.0369873046875, -0.0030841827392578125, 0.0069427490234375, -0.03082275390625 ] ]
warp-ai/wuerstchen-prior
2023-09-18T07:01:28.000Z
[ "diffusers", "arxiv:2306.00637", "arxiv:1910.09700", "license:mit", "has_space", "diffusers:WuerstchenPriorPipeline", "region:us" ]
null
warp-ai
null
null
warp-ai/wuerstchen-prior
14
9,001
diffusers
2023-07-19T19:09:44
--- license: mit --- <img src="https://cdn-uploads.huggingface.co/production/uploads/634cb5eefb80cc6bcaf63c3e/i-DYpDHw8Pwiy7QBKZVR5.jpeg" width=1500> ## Würstchen - Overview Würstchen is a diffusion model, whose text-conditional model works in a highly compressed latent space of images. Why is this important? Compressing data can reduce computational costs for both training and inference by magnitudes. Training on 1024x1024 images is way more expensive than training on 32x32. Usually, other works make use of a relatively small compression, in the range of 4x - 8x spatial compression. Würstchen takes this to an extreme. Through its novel design, we achieve a 42x spatial compression. This was unseen before because common methods fail to faithfully reconstruct detailed images after 16x spatial compression. Würstchen employs a two-stage compression, what we call Stage A and Stage B. Stage A is a VQGAN, and Stage B is a Diffusion Autoencoder (more details can be found in the [paper](https://arxiv.org/abs/2306.00637)). A third model, Stage C, is learned in that highly compressed latent space. This training requires fractions of the compute used for current top-performing models, allowing also cheaper and faster inference. ## Würstchen - Prior The Prior is what we refer to as "Stage C". It is the text-conditional model, operating in the small latent space that Stage A and Stage B encode images into. During inference, its job is to generate the image latents given text. These image latents are then sent to Stages A & B to decode the latents into pixel space. ### Image Sizes Würstchen was trained on image resolutions between 1024x1024 & 1536x1536. We sometimes also observe good outputs at resolutions like 1024x2048. Feel free to try it out. We also observed that the Prior (Stage C) adapts extremely fast to new resolutions. So finetuning it at 2048x2048 should be computationally cheap. <img src="https://cdn-uploads.huggingface.co/production/uploads/634cb5eefb80cc6bcaf63c3e/5pA5KUfGmvsObqiIjdGY1.jpeg" width=1000> ## How to run This pipeline should be run together with https://huggingface.co/warp-ai/wuerstchen: ```py import torch from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS device = "cuda" dtype = torch.float16 num_images_per_prompt = 2 prior_pipeline = WuerstchenPriorPipeline.from_pretrained( "warp-ai/wuerstchen-prior", torch_dtype=dtype ).to(device) decoder_pipeline = WuerstchenDecoderPipeline.from_pretrained( "warp-ai/wuerstchen", torch_dtype=dtype ).to(device) caption = "Anthropomorphic cat dressed as a fire fighter" negative_prompt = "" prior_output = prior_pipeline( prompt=caption, height=1024, width=1536, timesteps=DEFAULT_STAGE_C_TIMESTEPS, negative_prompt=negative_prompt, guidance_scale=4.0, num_images_per_prompt=num_images_per_prompt, ) decoder_output = decoder_pipeline( image_embeddings=prior_output.image_embeddings, prompt=caption, negative_prompt=negative_prompt, guidance_scale=0.0, output_type="pil", ).images ``` ### Image Sampling Times The figure shows the inference times (on an A100) for different batch sizes (`num_images_per_prompt`) on Würstchen compared to [Stable Diffusion XL](https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0) (without refiner). The left figure shows inference times (using torch > 2.0), whereas the right figure applies `torch.compile` to both pipelines in advance. ![image/jpeg](https://cdn-uploads.huggingface.co/production/uploads/634cb5eefb80cc6bcaf63c3e/UPhsIH2f079ZuTA_sLdVe.jpeg) ## Model Details - **Developed by:** Pablo Pernias, Dominic Rampas - **Model type:** Diffusion-based text-to-image generation model - **Language(s):** English - **License:** MIT - **Model Description:** This is a model that can be used to generate and modify images based on text prompts. It is a Diffusion model in the style of Stage C from the [Würstchen paper](https://arxiv.org/abs/2306.00637) that uses a fixed, pretrained text encoder ([CLIP ViT-bigG/14](https://huggingface.co/laion/CLIP-ViT-bigG-14-laion2B-39B-b160k)). - **Resources for more information:** [GitHub Repository](https://github.com/dome272/Wuerstchen), [Paper](https://arxiv.org/abs/2306.00637). - **Cite as:** @misc{pernias2023wuerstchen, title={Wuerstchen: Efficient Pretraining of Text-to-Image Models}, author={Pablo Pernias and Dominic Rampas and Marc Aubreville}, year={2023}, eprint={2306.00637}, archivePrefix={arXiv}, primaryClass={cs.CV} } ## Environmental Impact **Würstchen v2** **Estimated Emissions** Based on that information, we estimate the following CO2 emissions using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). The hardware, runtime, cloud provider, and compute region were utilized to estimate the carbon impact. - **Hardware Type:** A100 PCIe 40GB - **Hours used:** 24602 - **Cloud Provider:** AWS - **Compute Region:** US-east - **Carbon Emitted (Power consumption x Time x Carbon produced based on location of power grid):** 2275.68 kg CO2 eq.
5,295
[ [ -0.039398193359375, -0.044036865234375, 0.0323486328125, 0.00943756103515625, -0.0261993408203125, -0.03558349609375, -0.01233673095703125, -0.02154541015625, -0.00917816162109375, 0.0184478759765625, -0.0435791015625, -0.035675048828125, -0.052215576171875, -0.009246826171875, -0.0172576904296875, 0.0699462890625, 0.0032520294189453125, 0.0030422210693359375, 0.0017957687377929688, 0.01099395751953125, -0.006916046142578125, -0.0178985595703125, -0.057373046875, -0.023406982421875, 0.03765869140625, 0.0007300376892089844, 0.0443115234375, 0.044891357421875, 0.033233642578125, 0.029296875, -0.01146697998046875, -0.0112152099609375, -0.03582763671875, -0.0118255615234375, 0.0124969482421875, -0.01091766357421875, -0.00519561767578125, 0.01085662841796875, 0.0498046875, 0.0247802734375, -0.01558685302734375, -0.00534820556640625, 0.0203857421875, 0.05438232421875, -0.041961669921875, -0.01076507568359375, -0.025299072265625, 0.017303466796875, -0.0126190185546875, 0.0030059814453125, -0.028656005859375, -0.00415802001953125, 0.01398468017578125, -0.059112548828125, 0.040679931640625, -0.0049896240234375, 0.09716796875, 0.0294647216796875, -0.0289306640625, -0.010589599609375, -0.04736328125, 0.042816162109375, -0.06817626953125, 0.022918701171875, 0.0124969482421875, 0.004947662353515625, 0.0009937286376953125, -0.10235595703125, -0.056610107421875, 0.01525115966796875, -0.017669677734375, 0.0240631103515625, -0.03338623046875, 0.004337310791015625, 0.033905029296875, 0.02972412109375, -0.058746337890625, 0.0189056396484375, -0.031341552734375, -0.034454345703125, 0.03839111328125, 0.007671356201171875, 0.00878143310546875, -0.011260986328125, -0.0310516357421875, -0.046234130859375, -0.0255126953125, -0.0026226043701171875, 0.0282135009765625, -0.0124359130859375, -0.031982421875, 0.0234527587890625, -0.003238677978515625, 0.0428466796875, 0.0191497802734375, 0.002559661865234375, 0.024444580078125, -0.0184326171875, -0.03411865234375, -0.02227783203125, 0.08837890625, 0.0303955078125, -0.0078125, 0.0053863525390625, 0.003009796142578125, 0.004291534423828125, 0.0006880760192871094, -0.11224365234375, -0.047210693359375, 0.01366424560546875, -0.033294677734375, -0.016082763671875, -0.0020465850830078125, -0.07684326171875, -0.01441192626953125, 0.01065826416015625, 0.039764404296875, -0.0251617431640625, -0.017181396484375, 0.00786590576171875, -0.031097412109375, 0.01406097412109375, 0.0202178955078125, -0.0435791015625, -0.00885009765625, -0.00006723403930664062, 0.07427978515625, -0.00812530517578125, -0.01248931884765625, -0.025299072265625, -0.01134490966796875, -0.01690673828125, 0.059234619140625, -0.016998291015625, -0.034454345703125, -0.01099395751953125, 0.0199127197265625, 0.0156707763671875, -0.0198211669921875, 0.03961181640625, -0.052001953125, 0.05047607421875, -0.0066986083984375, -0.04351806640625, -0.0131988525390625, -0.012237548828125, -0.045867919921875, 0.09246826171875, 0.0100860595703125, -0.0799560546875, 0.0296478271484375, -0.04840087890625, -0.020355224609375, -0.0062255859375, 0.01129913330078125, -0.051666259765625, -0.01161956787109375, 0.0153350830078125, 0.043975830078125, -0.0175628662109375, 0.0102996826171875, -0.0193939208984375, -0.0186614990234375, -0.01197052001953125, -0.040679931640625, 0.081298828125, 0.033111572265625, -0.045196533203125, 0.0035152435302734375, -0.04608154296875, 0.012237548828125, 0.0244598388671875, -0.02618408203125, -0.005401611328125, -0.02691650390625, 0.0187835693359375, 0.036895751953125, 0.0029239654541015625, -0.03057861328125, 0.005550384521484375, -0.0193328857421875, 0.041473388671875, 0.05975341796875, 0.01131439208984375, 0.049774169921875, -0.0036106109619140625, 0.0238800048828125, 0.015716552734375, 0.0052032470703125, -0.020263671875, -0.038970947265625, -0.0599365234375, -0.02410888671875, 0.0271759033203125, 0.036041259765625, -0.07232666015625, 0.03338623046875, -0.01446533203125, -0.0643310546875, -0.01169586181640625, -0.00946807861328125, 0.0213623046875, 0.047454833984375, 0.0195465087890625, -0.03240966796875, -0.0286865234375, -0.0618896484375, 0.008758544921875, 0.0009450912475585938, -0.0110931396484375, 0.00457000732421875, 0.05035400390625, -0.0175628662109375, 0.0455322265625, -0.05694580078125, -0.0131378173828125, 0.01317596435546875, 0.0194854736328125, 0.0178680419921875, 0.05322265625, 0.04534912109375, -0.061492919921875, -0.052276611328125, -0.018402099609375, -0.05950927734375, 0.00531005859375, -0.007476806640625, -0.0266571044921875, 0.025054931640625, 0.03497314453125, -0.058837890625, 0.04754638671875, 0.060150146484375, -0.038604736328125, 0.049468994140625, -0.042572021484375, 0.0177154541015625, -0.08251953125, 0.007183074951171875, 0.0286407470703125, -0.01546478271484375, -0.04266357421875, -0.0038318634033203125, -0.0015583038330078125, -0.0167083740234375, -0.044830322265625, 0.0494384765625, -0.052520751953125, 0.005519866943359375, -0.0149993896484375, -0.007762908935546875, 0.020477294921875, 0.06341552734375, 0.0176239013671875, 0.0518798828125, 0.05755615234375, -0.045074462890625, 0.00809478759765625, 0.0066986083984375, -0.033416748046875, 0.040435791015625, -0.06463623046875, 0.030242919921875, -0.003726959228515625, 0.0259246826171875, -0.0784912109375, -0.0151519775390625, 0.021026611328125, -0.03399658203125, 0.03839111328125, -0.0056610107421875, -0.052520751953125, -0.045745849609375, -0.00493621826171875, 0.021881103515625, 0.0740966796875, -0.035247802734375, 0.038604736328125, 0.0082550048828125, 0.01104736328125, -0.033203125, -0.05938720703125, -0.002315521240234375, -0.02490234375, -0.0535888671875, 0.0418701171875, -0.0229644775390625, 0.00738525390625, 0.0145111083984375, 0.016448974609375, -0.014862060546875, -0.007904052734375, 0.019805908203125, 0.0167388916015625, -0.00897216796875, -0.0035037994384765625, 0.008941650390625, -0.01177215576171875, -0.0114288330078125, -0.03643798828125, 0.04693603515625, -0.0031528472900390625, -0.009796142578125, -0.068359375, 0.0260009765625, 0.0274200439453125, 0.0059661865234375, 0.06201171875, 0.07989501953125, -0.031707763671875, 0.0106201171875, -0.0198516845703125, -0.0208587646484375, -0.043304443359375, 0.0289306640625, -0.008636474609375, -0.05035400390625, 0.031219482421875, -0.002838134765625, 0.01027679443359375, 0.0635986328125, 0.048919677734375, -0.039093017578125, 0.0748291015625, 0.043426513671875, 0.0194854736328125, 0.045440673828125, -0.06085205078125, -0.006855010986328125, -0.0545654296875, -0.007396697998046875, -0.0345458984375, -0.0196990966796875, -0.02862548828125, -0.041015625, 0.03216552734375, 0.012969970703125, -0.042572021484375, 0.0217132568359375, -0.05340576171875, 0.0169219970703125, 0.037628173828125, 0.009490966796875, 0.0013904571533203125, 0.0235443115234375, -0.01541900634765625, -0.022979736328125, -0.058074951171875, -0.0267486572265625, 0.0594482421875, 0.0171661376953125, 0.041534423828125, -0.006336212158203125, 0.036895751953125, 0.038421630859375, 0.024810791015625, -0.034942626953125, 0.03240966796875, -0.0034847259521484375, -0.05413818359375, -0.0227508544921875, -0.0240478515625, -0.087158203125, 0.03228759765625, -0.0282745361328125, -0.043548583984375, 0.0161590576171875, 0.025604248046875, -0.004444122314453125, 0.0435791015625, -0.0704345703125, 0.0662841796875, -0.003173828125, -0.050689697265625, -0.009796142578125, -0.05413818359375, 0.0059967041015625, 0.0089263916015625, 0.01064300537109375, 0.0197906494140625, 0.006450653076171875, 0.072021484375, -0.04473876953125, 0.07025146484375, -0.0433349609375, -0.00341796875, 0.02496337890625, -0.01448822021484375, 0.0389404296875, -0.02166748046875, -0.01174163818359375, 0.0266265869140625, -0.0003159046173095703, -0.04248046875, -0.032928466796875, 0.035552978515625, -0.057464599609375, -0.040252685546875, -0.039794921875, -0.0266265869140625, 0.01036834716796875, 0.03118896484375, 0.0823974609375, 0.0298004150390625, -0.0163726806640625, 0.000804901123046875, 0.045867919921875, -0.0215301513671875, 0.053680419921875, 0.0113983154296875, -0.0091552734375, -0.031341552734375, 0.04669189453125, 0.0023250579833984375, 0.0207366943359375, 0.00399017333984375, 0.016571044921875, -0.024383544921875, -0.041259765625, -0.046478271484375, 0.03192138671875, -0.06549072265625, -0.024871826171875, -0.055023193359375, -0.0219879150390625, -0.02532958984375, -0.0158233642578125, -0.02276611328125, -0.01332855224609375, -0.053985595703125, 0.00785064697265625, 0.0213470458984375, 0.040985107421875, -0.021759033203125, 0.047149658203125, -0.044219970703125, 0.006622314453125, 0.0204925537109375, 0.0176544189453125, 0.01071929931640625, -0.0595703125, -0.033935546875, 0.0125885009765625, -0.042449951171875, -0.06060791015625, 0.03997802734375, 0.0151214599609375, 0.033477783203125, 0.0341796875, -0.01122283935546875, 0.05133056640625, -0.03363037109375, 0.07232666015625, 0.038604736328125, -0.05938720703125, 0.03460693359375, -0.017547607421875, 0.0276947021484375, 0.01226806640625, 0.033905029296875, -0.033447265625, -0.0094146728515625, -0.06146240234375, -0.054351806640625, 0.049072265625, 0.027130126953125, 0.01146697998046875, 0.01424407958984375, 0.035552978515625, -0.005401611328125, -0.006732940673828125, -0.06378173828125, -0.029388427734375, -0.0254974365234375, 0.003597259521484375, -0.0188140869140625, -0.004596710205078125, -0.005039215087890625, -0.044219970703125, 0.06610107421875, -0.0011959075927734375, 0.051605224609375, 0.035003662109375, -0.01300048828125, 0.004154205322265625, -0.007598876953125, 0.0372314453125, 0.011810302734375, 0.004390716552734375, -0.00021636486053466797, 0.00004172325134277344, -0.04351806640625, 0.0189208984375, 0.00647735595703125, -0.01387786865234375, 0.0077972412109375, 0.030242919921875, 0.084228515625, -0.0223236083984375, -0.0213470458984375, 0.0401611328125, -0.0165557861328125, -0.042724609375, -0.0173187255859375, 0.0031757354736328125, 0.019317626953125, 0.0243682861328125, 0.031982421875, 0.025299072265625, 0.0130462646484375, -0.01180267333984375, 0.0101318359375, 0.02447509765625, -0.0294952392578125, -0.02490234375, 0.063232421875, 0.002376556396484375, -0.0153656005859375, 0.055938720703125, -0.0306396484375, -0.030914306640625, 0.056610107421875, 0.04376220703125, 0.054351806640625, 0.0006880760192871094, 0.022674560546875, 0.057281494140625, 0.01140594482421875, -0.0047454833984375, -0.00989532470703125, 0.0021839141845703125, -0.049652099609375, -0.017120361328125, -0.02593994140625, 0.008453369140625, 0.0052947998046875, -0.04229736328125, 0.020172119140625, -0.021148681640625, -0.0240478515625, 0.0020732879638671875, 0.0010204315185546875, -0.075927734375, 0.0300750732421875, 0.00435638427734375, 0.064453125, -0.06292724609375, 0.06817626953125, 0.03216552734375, -0.0219268798828125, -0.0623779296875, 0.00313568115234375, -0.0016393661499023438, -0.051666259765625, 0.040313720703125, 0.01123046875, -0.002361297607421875, 0.024566650390625, -0.057861328125, -0.049102783203125, 0.10345458984375, 0.0241241455078125, -0.03582763671875, -0.0124969482421875, -0.0188140869140625, 0.056243896484375, -0.0200653076171875, 0.0234832763671875, 0.02362060546875, 0.032501220703125, 0.021759033203125, -0.031463623046875, 0.0105133056640625, -0.0291595458984375, 0.020751953125, 0.00714874267578125, -0.047393798828125, 0.056610107421875, -0.036346435546875, -0.04681396484375, 0.01038360595703125, 0.061859130859375, -0.0020904541015625, 0.00992584228515625, 0.049591064453125, 0.0723876953125, 0.056732177734375, -0.01012420654296875, 0.093505859375, -0.0027313232421875, 0.042877197265625, 0.054473876953125, 0.007373809814453125, 0.0460205078125, 0.031463623046875, -0.0225830078125, 0.0653076171875, 0.0753173828125, -0.018157958984375, 0.063232421875, 0.00504302978515625, -0.00505828857421875, -0.0012340545654296875, 0.014862060546875, -0.03729248046875, 0.006816864013671875, 0.0209197998046875, -0.0291595458984375, -0.01383209228515625, 0.0153350830078125, 0.01416015625, -0.01812744140625, 0.005146026611328125, 0.045440673828125, -0.01050567626953125, -0.048095703125, 0.0595703125, 0.00020682811737060547, 0.07110595703125, -0.0394287109375, 0.00122833251953125, -0.00907135009765625, 0.0008335113525390625, -0.01385498046875, -0.07513427734375, 0.0171356201171875, 0.0008134841918945312, -0.0095367431640625, -0.010589599609375, 0.044677734375, -0.028167724609375, -0.037078857421875, 0.021270751953125, 0.0204925537109375, 0.0204010009765625, -0.0037670135498046875, -0.069091796875, 0.00632476806640625, 0.01241302490234375, -0.04449462890625, 0.0140838623046875, 0.0287322998046875, 0.022796630859375, 0.0292510986328125, 0.053680419921875, -0.00751495361328125, 0.005023956298828125, -0.0161895751953125, 0.06414794921875, -0.029998779296875, -0.03472900390625, -0.06060791015625, 0.067138671875, -0.0172271728515625, -0.028656005859375, 0.0433349609375, 0.041656494140625, 0.049591064453125, -0.01055908203125, 0.056610107421875, -0.0243377685546875, -0.005016326904296875, -0.047088623046875, 0.053985595703125, -0.060302734375, 0.00022363662719726562, -0.03326416015625, -0.0623779296875, -0.0024051666259765625, 0.0455322265625, -0.0135955810546875, 0.01947021484375, 0.059814453125, 0.07025146484375, -0.00611114501953125, -0.020050048828125, 0.0203094482421875, 0.01495361328125, 0.0244140625, 0.03204345703125, 0.037109375, -0.05609130859375, 0.04974365234375, -0.04730224609375, -0.0159912109375, 0.01186370849609375, -0.07666015625, -0.0599365234375, -0.08123779296875, -0.037750244140625, -0.053131103515625, -0.0256805419921875, 0.023681640625, 0.0855712890625, -0.026275634765625, 0.00786590576171875, -0.01446533203125, -0.01309967041015625, -0.00824737548828125, -0.0190887451171875, 0.045928955078125, 0.005954742431640625, -0.067138671875, -0.023468017578125, 0.0107879638671875, 0.031982421875, -0.0173187255859375, -0.0198822021484375, -0.0174713134765625, -0.006992340087890625, 0.0335693359375, 0.0172271728515625, -0.034576416015625, -0.00814056396484375, -0.00015485286712646484, -0.023345947265625, 0.0241241455078125, 0.0421142578125, -0.045379638671875, 0.02978515625, 0.032379150390625, 0.02642822265625, 0.09393310546875, -0.0291595458984375, 0.013153076171875, -0.0433349609375, 0.043975830078125, -0.00041985511779785156, 0.033233642578125, 0.0295867919921875, -0.041778564453125, 0.0299835205078125, 0.03643798828125, -0.055023193359375, -0.042022705078125, 0.005527496337890625, -0.07513427734375, -0.0318603515625, 0.0810546875, -0.0272674560546875, -0.013397216796875, 0.01495361328125, -0.02252197265625, 0.028564453125, -0.0215301513671875, 0.039642333984375, 0.05950927734375, -0.007015228271484375, -0.038665771484375, -0.02081298828125, 0.036773681640625, 0.018707275390625, -0.063232421875, -0.0255584716796875, 0.05181884765625, 0.037872314453125, 0.01641845703125, 0.05841064453125, -0.00630950927734375, 0.015869140625, 0.0180511474609375, 0.022613525390625, 0.0137939453125, 0.0024852752685546875, -0.02716064453125, -0.003757476806640625, -0.0235443115234375, -0.00743865966796875 ] ]
stablediffusionapi/sdxl-unstable-diffusers-y
2023-10-08T07:28:58.000Z
[ "diffusers", "stablediffusionapi.com", "stable-diffusion-api", "text-to-image", "ultra-realistic", "license:creativeml-openrail-m", "endpoints_compatible", "diffusers:StableDiffusionXLPipeline", "region:us", "has_space" ]
text-to-image
stablediffusionapi
null
null
stablediffusionapi/sdxl-unstable-diffusers-y
2
8,994
diffusers
2023-10-08T07:26:48
--- license: creativeml-openrail-m tags: - stablediffusionapi.com - stable-diffusion-api - text-to-image - ultra-realistic pinned: true --- # SDXL Unstable Diffusers ☛ YamerMIX V8 API Inference ![generated from stablediffusionapi.com](https://pub-3626123a908346a7a8be8d9295f44e26.r2.dev/generations/18141439221696749891.png) ## Get API Key Get API key from [Stable Diffusion API](http://stablediffusionapi.com/), No Payment needed. Replace Key in below code, change **model_id** to "sdxl-unstable-diffusers-y" Coding in PHP/Node/Java etc? Have a look at docs for more code examples: [View docs](https://stablediffusionapi.com/docs) Try model for free: [Generate Images](https://stablediffusionapi.com/models/sdxl-unstable-diffusers-y) Model link: [View model](https://stablediffusionapi.com/models/sdxl-unstable-diffusers-y) Credits: [View credits](https://civitai.com/?query=SDXL%20Unstable%20Diffusers%20%E2%98%9B%20YamerMIX%20V8) View all models: [View Models](https://stablediffusionapi.com/models) import requests import json url = "https://stablediffusionapi.com/api/v4/dreambooth" payload = json.dumps({ "key": "your_api_key", "model_id": "sdxl-unstable-diffusers-y", "prompt": "ultra realistic close up portrait ((beautiful pale cyberpunk female with heavy black eyeliner)), blue eyes, shaved side haircut, hyper detail, cinematic lighting, magic neon, dark red city, Canon EOS R3, nikon, f/1.4, ISO 200, 1/160s, 8K, RAW, unedited, symmetrical balance, in-frame, 8K", "negative_prompt": "painting, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, deformed, ugly, blurry, bad anatomy, bad proportions, extra limbs, cloned face, skinny, glitchy, double torso, extra arms, extra hands, mangled fingers, missing lips, ugly face, distorted face, extra legs, anime", "width": "512", "height": "512", "samples": "1", "num_inference_steps": "30", "safety_checker": "no", "enhance_prompt": "yes", "seed": None, "guidance_scale": 7.5, "multi_lingual": "no", "panorama": "no", "self_attention": "no", "upscale": "no", "embeddings": "embeddings_model_id", "lora": "lora_model_id", "webhook": None, "track_id": None }) headers = { 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload) print(response.text) > Use this coupon code to get 25% off **DMGG0RBN**
2,584
[ [ -0.04083251953125, -0.056304931640625, 0.037567138671875, 0.0167083740234375, -0.038421630859375, 0.00518798828125, 0.0248260498046875, -0.032745361328125, 0.0304412841796875, 0.042633056640625, -0.06488037109375, -0.0555419921875, -0.045440673828125, -0.00835418701171875, -0.0235748291015625, 0.049896240234375, 0.00487518310546875, -0.009033203125, 0.0011005401611328125, -0.0015230178833007812, -0.01375579833984375, -0.0008597373962402344, -0.040313720703125, 0.006114959716796875, 0.0109710693359375, -0.018341064453125, 0.0557861328125, 0.0396728515625, 0.0211944580078125, 0.0276641845703125, -0.0225830078125, -0.0007462501525878906, -0.0404052734375, -0.005451202392578125, -0.005344390869140625, -0.04754638671875, -0.03692626953125, -0.016510009765625, 0.034881591796875, 0.034423828125, -0.004913330078125, 0.03387451171875, -0.00213623046875, 0.045562744140625, -0.054046630859375, 0.01273345947265625, -0.0262451171875, 0.018463134765625, 0.000995635986328125, -0.00518035888671875, -0.01062774658203125, -0.0288848876953125, -0.0167694091796875, -0.06097412109375, 0.033203125, -0.00786590576171875, 0.08819580078125, 0.0250091552734375, -0.0189208984375, -0.005023956298828125, -0.053070068359375, 0.06976318359375, -0.07379150390625, 0.0323486328125, 0.0230255126953125, 0.006587982177734375, -0.0037631988525390625, -0.06341552734375, -0.037109375, 0.0245819091796875, 0.0088958740234375, 0.024688720703125, -0.0206756591796875, -0.00724029541015625, 0.03277587890625, 0.0216522216796875, -0.0457763671875, -0.0219879150390625, -0.04168701171875, -0.005985260009765625, 0.046173095703125, 0.0250091552734375, 0.0149688720703125, -0.01012420654296875, -0.0413818359375, -0.0106964111328125, -0.0286102294921875, 0.0170745849609375, 0.0311737060546875, 0.0176544189453125, -0.037841796875, 0.041748046875, -0.003917694091796875, 0.053619384765625, 0.0263214111328125, -0.0252227783203125, 0.046539306640625, -0.0171356201171875, -0.01093292236328125, -0.0230255126953125, 0.06817626953125, 0.06500244140625, -0.027252197265625, 0.016876220703125, -0.02813720703125, 0.01140594482421875, 0.010833740234375, -0.0849609375, 0.0018768310546875, 0.06695556640625, -0.057586669921875, -0.0268402099609375, 0.006977081298828125, -0.0699462890625, -0.01654052734375, -0.0020389556884765625, 0.03399658203125, -0.0165252685546875, -0.047607421875, 0.0272369384765625, -0.0274810791015625, 0.015228271484375, 0.025848388671875, -0.047760009765625, 0.006317138671875, 0.029815673828125, 0.05975341796875, 0.0016965866088867188, -0.004146575927734375, 0.0116424560546875, 0.0125885009765625, -0.03094482421875, 0.0533447265625, -0.00830841064453125, -0.04888916015625, -0.00917816162109375, 0.0227203369140625, -0.0037841796875, -0.04705810546875, 0.050445556640625, -0.042205810546875, -0.00598907470703125, -0.00749969482421875, -0.028900146484375, -0.0110321044921875, 0.0036258697509765625, -0.043426513671875, 0.0552978515625, 0.0210418701171875, -0.06353759765625, 0.01326751708984375, -0.04534912109375, -0.001010894775390625, 0.00972747802734375, -0.00446319580078125, -0.04443359375, 0.004543304443359375, -0.001964569091796875, 0.030548095703125, 0.0033893585205078125, -0.0073394775390625, -0.051422119140625, -0.034393310546875, 0.0262298583984375, -0.014678955078125, 0.088623046875, 0.02978515625, -0.0212249755859375, 0.007038116455078125, -0.076171875, 0.0184783935546875, 0.04248046875, -0.016143798828125, -0.01253509521484375, -0.02288818359375, 0.0196685791015625, 0.01125335693359375, 0.0166473388671875, -0.04345703125, 0.00991058349609375, -0.0211639404296875, 0.030242919921875, 0.049774169921875, 0.01654052734375, 0.01523590087890625, -0.017059326171875, 0.05865478515625, 0.019866943359375, 0.032318115234375, -0.009796142578125, -0.042633056640625, -0.044403076171875, -0.03192138671875, -0.01064300537109375, 0.0271148681640625, -0.051513671875, 0.024505615234375, -0.0001779794692993164, -0.049163818359375, -0.051513671875, 0.003265380859375, 0.027313232421875, 0.0335693359375, 0.01300048828125, -0.020751953125, -0.031707763671875, -0.054046630859375, 0.0083160400390625, -0.00018334388732910156, 0.0009341239929199219, 0.01055145263671875, 0.0263519287109375, -0.037750244140625, 0.06640625, -0.0633544921875, -0.0050506591796875, -0.01038360595703125, 0.0004336833953857422, 0.060943603515625, 0.042510986328125, 0.06817626953125, -0.06640625, -0.03790283203125, -0.022003173828125, -0.047576904296875, 0.007251739501953125, 0.013916015625, -0.00913238525390625, 0.01326751708984375, 0.01317596435546875, -0.072509765625, 0.050872802734375, 0.03631591796875, -0.055908203125, 0.05621337890625, -0.01934814453125, 0.028045654296875, -0.08734130859375, 0.0027923583984375, 0.018218994140625, -0.028289794921875, -0.0361328125, 0.0276031494140625, 0.01558685302734375, -0.0034351348876953125, -0.060699462890625, 0.042877197265625, -0.026580810546875, 0.020050048828125, -0.028961181640625, -0.00489044189453125, 0.0176849365234375, 0.0299835205078125, 0.007389068603515625, 0.03509521484375, 0.050994873046875, -0.036224365234375, 0.03289794921875, 0.012298583984375, -0.0257720947265625, 0.04656982421875, -0.061370849609375, 0.00286865234375, -0.01515960693359375, 0.01593017578125, -0.0833740234375, -0.020111083984375, 0.039306640625, -0.03887939453125, 0.00553131103515625, -0.043121337890625, -0.027252197265625, -0.045867919921875, -0.03863525390625, 0.0231170654296875, 0.05462646484375, -0.03460693359375, 0.047607421875, 0.010955810546875, 0.006622314453125, -0.0506591796875, -0.06396484375, -0.02984619140625, -0.02764892578125, -0.047119140625, 0.0189208984375, -0.028106689453125, -0.026336669921875, 0.005893707275390625, -0.0006995201110839844, -0.018463134765625, -0.0103607177734375, 0.038177490234375, 0.052978515625, -0.024505615234375, -0.0372314453125, 0.016387939453125, -0.01517486572265625, 0.013916015625, -0.01322174072265625, 0.040771484375, -0.0037841796875, -0.033935546875, -0.056488037109375, 0.01727294921875, 0.0595703125, 0.00634002685546875, 0.056488037109375, 0.057647705078125, -0.03179931640625, -0.003993988037109375, -0.03973388671875, -0.0189056396484375, -0.03790283203125, 0.0244293212890625, -0.0268096923828125, -0.03839111328125, 0.069580078125, 0.005207061767578125, 0.0005631446838378906, 0.039337158203125, 0.040008544921875, -0.00927734375, 0.096923828125, 0.023529052734375, 0.018463134765625, 0.016998291015625, -0.06304931640625, 0.0016527175903320312, -0.05865478515625, -0.0022296905517578125, -0.01776123046875, -0.009613037109375, -0.025848388671875, -0.04425048828125, 0.01143646240234375, 0.020477294921875, -0.019805908203125, 0.0282135009765625, -0.038421630859375, 0.032958984375, 0.0231781005859375, 0.00872039794921875, 0.01517486572265625, -0.010345458984375, -0.00800323486328125, -0.0037994384765625, -0.021697998046875, -0.0237274169921875, 0.07098388671875, 0.021484375, 0.0758056640625, 0.0119171142578125, 0.04412841796875, 0.0128021240234375, -0.0101776123046875, -0.031768798828125, 0.036529541015625, 0.01251220703125, -0.06854248046875, 0.01358795166015625, -0.0196075439453125, -0.06707763671875, 0.0298919677734375, -0.0207977294921875, -0.0654296875, 0.052886962890625, 0.013336181640625, -0.04888916015625, 0.0273284912109375, -0.060546875, 0.0509033203125, -0.006961822509765625, -0.058258056640625, 0.00051116943359375, -0.044677734375, 0.043426513671875, -0.00402069091796875, 0.040191650390625, -0.025665283203125, -0.020416259765625, 0.03387451171875, -0.04132080078125, 0.07427978515625, -0.033782958984375, -0.016632080078125, 0.0357666015625, -0.0003933906555175781, 0.032073974609375, 0.033416748046875, -0.01073455810546875, 0.018707275390625, 0.039215087890625, -0.03900146484375, -0.03863525390625, 0.060760498046875, -0.06512451171875, -0.034088134765625, -0.0252227783203125, -0.0283203125, 0.01061248779296875, 0.0247039794921875, 0.03997802734375, 0.027313232421875, 0.00742340087890625, -0.012359619140625, 0.05108642578125, -0.007080078125, 0.03643798828125, 0.0295562744140625, -0.059906005859375, -0.048553466796875, 0.0694580078125, 0.0016040802001953125, 0.04364013671875, -0.0037364959716796875, 0.0128326416015625, -0.03192138671875, -0.042266845703125, -0.042572021484375, 0.0200653076171875, -0.061798095703125, -0.0272674560546875, -0.0517578125, -0.0122528076171875, -0.044677734375, -0.015838623046875, -0.06243896484375, -0.034088134765625, -0.036163330078125, 0.0011386871337890625, 0.035003662109375, 0.03656005859375, -0.006366729736328125, 0.0176849365234375, -0.0633544921875, 0.0406494140625, 0.005641937255859375, 0.02471923828125, 0.00762176513671875, -0.03521728515625, -0.007183074951171875, 0.01361083984375, -0.034088134765625, -0.054779052734375, 0.04473876953125, -0.01387786865234375, 0.0293731689453125, 0.07794189453125, 0.0192413330078125, 0.06903076171875, -0.0004949569702148438, 0.0657958984375, 0.0302734375, -0.054046630859375, 0.046295166015625, -0.040618896484375, 0.0247802734375, 0.0293121337890625, 0.032196044921875, -0.0236053466796875, -0.0277099609375, -0.0518798828125, -0.083740234375, 0.05035400390625, 0.0111083984375, 0.0216217041015625, 0.004985809326171875, 0.02972412109375, 0.0135498046875, 0.01137542724609375, -0.0701904296875, -0.049835205078125, -0.0212249755859375, 0.002666473388671875, 0.018768310546875, -0.0155181884765625, -0.0149993896484375, -0.0335693359375, 0.061370849609375, -0.007259368896484375, 0.01910400390625, 0.019439697265625, 0.0190582275390625, -0.02838134765625, 0.0007047653198242188, 0.046112060546875, 0.062347412109375, -0.050537109375, -0.0061492919921875, -0.01540374755859375, -0.045166015625, 0.01068115234375, 0.0145263671875, -0.026763916015625, 0.0014553070068359375, 0.0177154541015625, 0.058746337890625, -0.00751495361328125, -0.0457763671875, 0.043975830078125, -0.030242919921875, -0.02154541015625, -0.03228759765625, 0.0115203857421875, 0.035430908203125, 0.039825439453125, 0.035430908203125, 0.0277099609375, 0.01015472412109375, -0.0178070068359375, -0.0205535888671875, 0.032989501953125, -0.0252532958984375, -0.024505615234375, 0.0732421875, 0.00046253204345703125, -0.0255889892578125, 0.029144287109375, -0.0333251953125, -0.007556915283203125, 0.04827880859375, 0.0595703125, 0.061431884765625, -0.01507568359375, 0.01033782958984375, 0.05413818359375, 0.00333404541015625, -0.006877899169921875, 0.046478271484375, 0.00804901123046875, -0.0472412109375, -0.016845703125, -0.0587158203125, -0.007678985595703125, 0.01430511474609375, -0.056671142578125, 0.0467529296875, -0.04449462890625, -0.0341796875, -0.0164947509765625, -0.023651123046875, -0.032989501953125, 0.0121002197265625, 0.004974365234375, 0.05828857421875, -0.0577392578125, 0.05279541015625, 0.035003662109375, -0.031219482421875, -0.0567626953125, -0.01218414306640625, 0.0177001953125, -0.05322265625, 0.0250701904296875, 0.00627899169921875, -0.004604339599609375, 0.0052642822265625, -0.046722412109375, -0.0748291015625, 0.0904541015625, 0.0257720947265625, -0.042694091796875, 0.00919342041015625, 0.007175445556640625, 0.0268096923828125, -0.018585205078125, 0.033477783203125, 0.01412200927734375, 0.024139404296875, 0.0265350341796875, -0.035797119140625, 0.0036563873291015625, -0.03240966796875, 0.01247406005859375, 0.0052032470703125, -0.052886962890625, 0.061737060546875, -0.040130615234375, -0.00978851318359375, 0.0279998779296875, 0.045379638671875, 0.048919677734375, 0.03326416015625, 0.040618896484375, 0.06927490234375, 0.02777099609375, 0.00011080503463745117, 0.07373046875, -0.0267333984375, 0.055206298828125, 0.05712890625, -0.01082611083984375, 0.057342529296875, 0.0419921875, -0.040863037109375, 0.03021240234375, 0.061798095703125, -0.015655517578125, 0.046478271484375, 0.002338409423828125, -0.0307159423828125, -0.00444793701171875, 0.00386810302734375, -0.053985595703125, 0.005023956298828125, 0.0309600830078125, -0.0323486328125, 0.0022602081298828125, 0.013824462890625, -0.00798797607421875, -0.0189056396484375, -0.0228118896484375, 0.025360107421875, 0.005496978759765625, -0.0149383544921875, 0.06524658203125, -0.00994873046875, 0.07366943359375, -0.056915283203125, -0.004459381103515625, -0.0109405517578125, 0.0343017578125, -0.0275115966796875, -0.05181884765625, 0.024871826171875, -0.016326904296875, -0.0147857666015625, -0.00373077392578125, 0.055511474609375, -0.0025806427001953125, -0.0675048828125, 0.035736083984375, 0.019012451171875, 0.017730712890625, 0.00048089027404785156, -0.0745849609375, 0.04534912109375, 0.0085296630859375, -0.03302001953125, 0.0106658935546875, 0.01052093505859375, 0.038909912109375, 0.0482177734375, 0.043243408203125, 0.009857177734375, 0.017364501953125, -0.003631591796875, 0.0560302734375, -0.04644775390625, -0.026123046875, -0.059112548828125, 0.05780029296875, -0.0035877227783203125, -0.0149078369140625, 0.05364990234375, 0.060943603515625, 0.06280517578125, -0.0419921875, 0.05999755859375, -0.015350341796875, 0.040283203125, -0.027252197265625, 0.066162109375, -0.053558349609375, 0.016357421875, -0.036407470703125, -0.0633544921875, -0.00824737548828125, 0.042205810546875, 0.005596160888671875, 0.0181121826171875, 0.0311737060546875, 0.0751953125, -0.02410888671875, -0.00711822509765625, 0.0225372314453125, 0.02691650390625, 0.0077362060546875, 0.0178680419921875, 0.060546875, -0.040130615234375, 0.0250244140625, -0.06219482421875, -0.0123443603515625, 0.0011663436889648438, -0.0513916015625, -0.0543212890625, -0.040283203125, -0.052978515625, -0.05621337890625, -0.008819580078125, 0.069091796875, 0.06927490234375, -0.05230712890625, -0.0178680419921875, -0.0007476806640625, 0.00212860107421875, -0.0213775634765625, -0.0247802734375, 0.0236968994140625, 0.017303466796875, -0.0772705078125, 0.012115478515625, 0.012298583984375, 0.0341796875, -0.0192413330078125, 0.0014429092407226562, -0.01029205322265625, -0.0007081031799316406, 0.0309600830078125, 0.018829345703125, -0.06005859375, 0.01226806640625, -0.020050048828125, 0.0030670166015625, 0.01373291015625, 0.0193328857421875, -0.041351318359375, 0.0250091552734375, 0.03631591796875, 0.005191802978515625, 0.03729248046875, -0.002956390380859375, 0.005710601806640625, -0.020843505859375, 0.0223236083984375, 0.003490447998046875, 0.0345458984375, 0.00745391845703125, -0.038482666015625, 0.0264892578125, 0.040496826171875, -0.0310211181640625, -0.06353759765625, -0.0015993118286132812, -0.0950927734375, -0.0367431640625, 0.07562255859375, -0.0192108154296875, -0.04443359375, -0.00019121170043945312, -0.0303192138671875, 0.0017099380493164062, -0.0280609130859375, 0.04736328125, 0.028106689453125, -0.0169219970703125, -0.0225677490234375, -0.0546875, 0.00829315185546875, 0.0030460357666015625, -0.048248291015625, -0.0034236907958984375, 0.0264892578125, 0.04229736328125, 0.0419921875, 0.0413818359375, -0.034088134765625, 0.021392822265625, 0.01300048828125, 0.0240478515625, 0.0081329345703125, 0.0306854248046875, -0.0240478515625, 0.0038585662841796875, 0.0070953369140625, -0.0264434814453125 ] ]
qanastek/51-languages-classifier
2022-05-19T12:56:56.000Z
[ "transformers", "pytorch", "Transformers", "text-classification", "multi-class-classification", "dataset:qanastek/MASSIVE", "arxiv:1911.02116", "license:cc-by-4.0", "endpoints_compatible", "has_space", "region:us" ]
text-classification
qanastek
null
null
qanastek/51-languages-classifier
20
8,982
transformers
2022-05-06T07:43:20
--- tags: - Transformers - text-classification - multi-class-classification languages: - af-ZA - am-ET - ar-SA - az-AZ - bn-BD - cy-GB - da-DK - de-DE - el-GR - en-US - es-ES - fa-IR - fi-FI - fr-FR - he-IL - hi-IN - hu-HU - hy-AM - id-ID - is-IS - it-IT - ja-JP - jv-ID - ka-GE - km-KH - kn-IN - ko-KR - lv-LV - ml-IN - mn-MN - ms-MY - my-MM - nb-NO - nl-NL - pl-PL - pt-PT - ro-RO - ru-RU - sl-SL - sq-AL - sv-SE - sw-KE - ta-IN - te-IN - th-TH - tl-PH - tr-TR - ur-PK - vi-VN - zh-CN - zh-TW multilinguality: - af-ZA - am-ET - ar-SA - az-AZ - bn-BD - cy-GB - da-DK - de-DE - el-GR - en-US - es-ES - fa-IR - fi-FI - fr-FR - he-IL - hi-IN - hu-HU - hy-AM - id-ID - is-IS - it-IT - ja-JP - jv-ID - ka-GE - km-KH - kn-IN - ko-KR - lv-LV - ml-IN - mn-MN - ms-MY - my-MM - nb-NO - nl-NL - pl-PL - pt-PT - ro-RO - ru-RU - sl-SL - sq-AL - sv-SE - sw-KE - ta-IN - te-IN - th-TH - tl-PH - tr-TR - ur-PK - vi-VN - zh-CN - zh-TW datasets: - qanastek/MASSIVE widget: - text: "wake me up at five am this week" - text: "je veux écouter la chanson de jacques brel encore une fois" - text: "quiero escuchar la canción de arijit singh una vez más" - text: "olly onde é que á um parque por perto onde eu possa correr" - text: "פרק הבא בפודקאסט בבקשה" - text: "亚马逊股价" - text: "найди билет на поезд в санкт-петербург" license: cc-by-4.0 --- **People Involved** * [LABRAK Yanis](https://www.linkedin.com/in/yanis-labrak-8a7412145/) (1) **Affiliations** 1. [LIA, NLP team](https://lia.univ-avignon.fr/), Avignon University, Avignon, France. ## Model XLM-Roberta : [https://huggingface.co/xlm-roberta-base](https://huggingface.co/xlm-roberta-base) Paper : [Unsupervised Cross-lingual Representation Learning at Scale](https://arxiv.org/pdf/1911.02116.pdf) ## Demo: How to use in HuggingFace Transformers Pipeline Requires [transformers](https://pypi.org/project/transformers/): ```pip install transformers``` ```python from transformers import AutoTokenizer, AutoModelForSequenceClassification, TextClassificationPipeline model_name = 'qanastek/51-languages-classifier' tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSequenceClassification.from_pretrained(model_name) classifier = TextClassificationPipeline(model=model, tokenizer=tokenizer) res = classifier("פרק הבא בפודקאסט בבקשה") print(res) ``` Outputs: ```python [{'label': 'he-IL', 'score': 0.9998375177383423}] ``` ## Training data [MASSIVE](https://huggingface.co/datasets/qanastek/MASSIVE) is a parallel dataset of > 1M utterances across 51 languages with annotations for the Natural Language Understanding tasks of intent prediction and slot annotation. Utterances span 60 intents and include 55 slot types. MASSIVE was created by localizing the SLURP dataset, composed of general Intelligent Voice Assistant single-shot interactions. ### Languages Thee model is capable of distinguish 51 languages : - `Afrikaans - South Africa (af-ZA)` - `Amharic - Ethiopia (am-ET)` - `Arabic - Saudi Arabia (ar-SA)` - `Azeri - Azerbaijan (az-AZ)` - `Bengali - Bangladesh (bn-BD)` - `Chinese - China (zh-CN)` - `Chinese - Taiwan (zh-TW)` - `Danish - Denmark (da-DK)` - `German - Germany (de-DE)` - `Greek - Greece (el-GR)` - `English - United States (en-US)` - `Spanish - Spain (es-ES)` - `Farsi - Iran (fa-IR)` - `Finnish - Finland (fi-FI)` - `French - France (fr-FR)` - `Hebrew - Israel (he-IL)` - `Hungarian - Hungary (hu-HU)` - `Armenian - Armenia (hy-AM)` - `Indonesian - Indonesia (id-ID)` - `Icelandic - Iceland (is-IS)` - `Italian - Italy (it-IT)` - `Japanese - Japan (ja-JP)` - `Javanese - Indonesia (jv-ID)` - `Georgian - Georgia (ka-GE)` - `Khmer - Cambodia (km-KH)` - `Korean - Korea (ko-KR)` - `Latvian - Latvia (lv-LV)` - `Mongolian - Mongolia (mn-MN)` - `Malay - Malaysia (ms-MY)` - `Burmese - Myanmar (my-MM)` - `Norwegian - Norway (nb-NO)` - `Dutch - Netherlands (nl-NL)` - `Polish - Poland (pl-PL)` - `Portuguese - Portugal (pt-PT)` - `Romanian - Romania (ro-RO)` - `Russian - Russia (ru-RU)` - `Slovanian - Slovania (sl-SL)` - `Albanian - Albania (sq-AL)` - `Swedish - Sweden (sv-SE)` - `Swahili - Kenya (sw-KE)` - `Hindi - India (hi-IN)` - `Kannada - India (kn-IN)` - `Malayalam - India (ml-IN)` - `Tamil - India (ta-IN)` - `Telugu - India (te-IN)` - `Thai - Thailand (th-TH)` - `Tagalog - Philippines (tl-PH)` - `Turkish - Turkey (tr-TR)` - `Urdu - Pakistan (ur-PK)` - `Vietnamese - Vietnam (vi-VN)` - `Welsh - United Kingdom (cy-GB)` ## Evaluation results ```plain precision recall f1-score support af-ZA 0.9821 0.9805 0.9813 2974 am-ET 1.0000 1.0000 1.0000 2974 ar-SA 0.9809 0.9822 0.9815 2974 az-AZ 0.9946 0.9845 0.9895 2974 bn-BD 0.9997 0.9990 0.9993 2974 cy-GB 0.9970 0.9929 0.9949 2974 da-DK 0.9575 0.9617 0.9596 2974 de-DE 0.9906 0.9909 0.9908 2974 el-GR 0.9997 0.9973 0.9985 2974 en-US 0.9712 0.9866 0.9788 2974 es-ES 0.9825 0.9842 0.9834 2974 fa-IR 0.9940 0.9973 0.9956 2974 fi-FI 0.9943 0.9946 0.9945 2974 fr-FR 0.9963 0.9923 0.9943 2974 he-IL 1.0000 0.9997 0.9998 2974 hi-IN 1.0000 0.9980 0.9990 2974 hu-HU 0.9983 0.9950 0.9966 2974 hy-AM 1.0000 0.9993 0.9997 2974 id-ID 0.9319 0.9291 0.9305 2974 is-IS 0.9966 0.9943 0.9955 2974 it-IT 0.9698 0.9926 0.9811 2974 ja-JP 0.9987 0.9963 0.9975 2974 jv-ID 0.9628 0.9744 0.9686 2974 ka-GE 0.9993 0.9997 0.9995 2974 km-KH 0.9867 0.9963 0.9915 2974 kn-IN 1.0000 0.9993 0.9997 2974 ko-KR 0.9917 0.9997 0.9956 2974 lv-LV 0.9990 0.9950 0.9970 2974 ml-IN 0.9997 0.9997 0.9997 2974 mn-MN 0.9987 0.9966 0.9976 2974 ms-MY 0.9359 0.9418 0.9388 2974 my-MM 1.0000 0.9993 0.9997 2974 nb-NO 0.9600 0.9533 0.9566 2974 nl-NL 0.9850 0.9748 0.9799 2974 pl-PL 0.9946 0.9923 0.9934 2974 pt-PT 0.9885 0.9798 0.9841 2974 ro-RO 0.9919 0.9916 0.9918 2974 ru-RU 0.9976 0.9983 0.9980 2974 sl-SL 0.9956 0.9939 0.9948 2974 sq-AL 0.9936 0.9896 0.9916 2974 sv-SE 0.9902 0.9842 0.9872 2974 sw-KE 0.9867 0.9953 0.9910 2974 ta-IN 1.0000 1.0000 1.0000 2974 te-IN 1.0000 0.9997 0.9998 2974 th-TH 1.0000 0.9983 0.9992 2974 tl-PH 0.9929 0.9899 0.9914 2974 tr-TR 0.9869 0.9872 0.9871 2974 ur-PK 0.9983 0.9929 0.9956 2974 vi-VN 0.9993 0.9973 0.9983 2974 zh-CN 0.9812 0.9832 0.9822 2974 zh-TW 0.9832 0.9815 0.9823 2974 accuracy 0.9889 151674 macro avg 0.9889 0.9889 0.9889 151674 weighted avg 0.9889 0.9889 0.9889 151674 ``` Keywords : language identification ; language identification ; multilingual ; classification
7,528
[ [ -0.043609619140625, -0.02532958984375, 0.0126953125, 0.01983642578125, -0.01213836669921875, 0.005893707275390625, -0.0059051513671875, -0.0256195068359375, 0.043212890625, 0.011932373046875, -0.032073974609375, -0.0537109375, -0.045196533203125, 0.0243988037109375, 0.004978179931640625, 0.044647216796875, -0.005611419677734375, 0.01078033447265625, 0.0177764892578125, -0.0292510986328125, -0.0160675048828125, -0.01392364501953125, -0.03424072265625, -0.0009760856628417969, 0.01641845703125, 0.0377197265625, 0.043792724609375, 0.038726806640625, 0.02911376953125, 0.027679443359375, -0.009979248046875, 0.006237030029296875, -0.001895904541015625, -0.026092529296875, -0.0007910728454589844, -0.031585693359375, -0.0323486328125, -0.004177093505859375, 0.03277587890625, 0.054443359375, -0.00646209716796875, 0.036163330078125, 0.0032787322998046875, 0.06414794921875, -0.02825927734375, 0.02642822265625, -0.00662994384765625, 0.003864288330078125, -0.0064697265625, -0.005886077880859375, -0.006893157958984375, -0.037628173828125, 0.007663726806640625, -0.054656982421875, 0.004482269287109375, 0.0067596435546875, 0.0926513671875, -0.00018918514251708984, -0.007373809814453125, -0.0219879150390625, -0.027130126953125, 0.058441162109375, -0.05474853515625, 0.0175018310546875, 0.04669189453125, 0.004817962646484375, -0.006046295166015625, -0.0343017578125, -0.055938720703125, 0.01271820068359375, -0.029388427734375, 0.0261993408203125, -0.01198577880859375, -0.0229339599609375, 0.0121002197265625, 0.029205322265625, -0.057586669921875, -0.00382232666015625, -0.0364990234375, -0.01351165771484375, 0.0584716796875, 0.0109710693359375, 0.035980224609375, -0.03460693359375, -0.02288818359375, -0.01395416259765625, -0.0189361572265625, 0.04022216796875, 0.023193359375, 0.0225677490234375, -0.042449951171875, 0.049163818359375, -0.0236358642578125, 0.03375244140625, 0.01247406005859375, -0.027252197265625, 0.06939697265625, -0.0479736328125, -0.0190887451171875, 0.003978729248046875, 0.06884765625, 0.028717041015625, 0.0006847381591796875, 0.018035888671875, 0.006084442138671875, -0.0005011558532714844, -0.017120361328125, -0.0457763671875, -0.0023956298828125, 0.035003662109375, -0.03997802734375, -0.006671905517578125, 0.01519012451171875, -0.0732421875, 0.00438690185546875, -0.0084686279296875, 0.026702880859375, -0.0293121337890625, -0.04656982421875, 0.01053619384765625, -0.0067596435546875, 0.03289794921875, 0.0183258056640625, -0.0645751953125, 0.012786865234375, 0.01483917236328125, 0.064453125, -0.01184844970703125, -0.021087646484375, 0.000247955322265625, 0.007717132568359375, -0.03558349609375, 0.057342529296875, -0.02679443359375, -0.045379638671875, -0.01334381103515625, 0.0318603515625, -0.03253173828125, -0.0262603759765625, 0.0504150390625, -0.006465911865234375, 0.030181884765625, -0.02227783203125, -0.0190582275390625, -0.0238494873046875, 0.03070068359375, -0.05712890625, 0.1015625, 0.032470703125, -0.0655517578125, 0.033233642578125, -0.0343017578125, -0.017181396484375, 0.0045013427734375, -0.01629638671875, -0.052001953125, -0.028961181640625, 0.03887939453125, 0.01995849609375, -0.0238037109375, 0.02264404296875, -0.0009226799011230469, -0.021331787109375, -0.014984130859375, -0.0158843994140625, 0.08355712890625, 0.0243072509765625, -0.042724609375, 0.00856781005859375, -0.07080078125, 0.0122833251953125, 0.017669677734375, -0.032867431640625, -0.0034198760986328125, -0.0361328125, 0.00042128562927246094, 0.029937744140625, 0.014801025390625, -0.048828125, 0.01515960693359375, -0.037506103515625, 0.0186614990234375, 0.044891357421875, 0.00667572021484375, 0.029754638671875, -0.039306640625, 0.04986572265625, 0.0241546630859375, 0.006984710693359375, 0.0010614395141601562, -0.048736572265625, -0.053375244140625, -0.0227813720703125, 0.0140838623046875, 0.055877685546875, -0.04461669921875, 0.05450439453125, -0.0228424072265625, -0.047210693359375, -0.041015625, -0.0106201171875, 0.03143310546875, 0.03802490234375, 0.0161285400390625, -0.01251983642578125, -0.050811767578125, -0.065185546875, -0.010223388671875, -0.0174407958984375, 0.01409912109375, 0.0341796875, 0.0657958984375, -0.01137542724609375, 0.0660400390625, -0.037841796875, -0.0404052734375, -0.02520751953125, -0.01427459716796875, 0.037750244140625, 0.04248046875, 0.0528564453125, -0.05792236328125, -0.0755615234375, 0.007843017578125, -0.0533447265625, 0.01324462890625, 0.0006508827209472656, -0.006999969482421875, 0.036163330078125, 0.019622802734375, -0.044891357421875, 0.052581787109375, 0.043792724609375, -0.0438232421875, 0.04571533203125, -0.0296630859375, 0.0271453857421875, -0.08294677734375, 0.021514892578125, -0.0058441162109375, 0.0165252685546875, -0.034698486328125, -0.024566650390625, 0.0004603862762451172, -0.01397705078125, -0.034759521484375, 0.055023193359375, -0.044525146484375, 0.00911712646484375, 0.02606201171875, 0.00769805908203125, -0.0018415451049804688, 0.049224853515625, 0.0144500732421875, 0.07696533203125, 0.060211181640625, -0.034515380859375, 0.00467681884765625, 0.025238037109375, -0.042449951171875, 0.02813720703125, -0.037506103515625, -0.024688720703125, -0.01168060302734375, 0.01088714599609375, -0.0906982421875, -0.0243377685546875, 0.0261383056640625, -0.055816650390625, 0.01271820068359375, 0.00328826904296875, -0.017364501953125, -0.054534912109375, -0.035003662109375, 0.0119476318359375, 0.02386474609375, -0.0277862548828125, 0.0504150390625, 0.0218963623046875, -0.0157012939453125, -0.05029296875, -0.05462646484375, -0.00934600830078125, -0.0207672119140625, -0.048797607421875, 0.0211334228515625, -0.0047760009765625, -0.002422332763671875, 0.0099029541015625, -0.01390838623046875, -0.00559234619140625, -0.00643157958984375, 0.0150146484375, 0.01410675048828125, -0.0200958251953125, -0.0175018310546875, -0.017913818359375, -0.01247406005859375, -0.006488800048828125, 0.005229949951171875, 0.04779052734375, -0.00998687744140625, -0.0113525390625, -0.047943115234375, 0.019317626953125, 0.038818359375, -0.029632568359375, 0.0731201171875, 0.061920166015625, -0.0219573974609375, 0.01392364501953125, -0.033660888671875, 0.01096343994140625, -0.03240966796875, 0.02557373046875, -0.04144287109375, -0.056243896484375, 0.06500244140625, -0.004306793212890625, -0.00788116455078125, 0.060150146484375, 0.037628173828125, 0.0061798095703125, 0.07794189453125, 0.0291290283203125, -0.01318359375, 0.014251708984375, -0.046844482421875, 0.0092926025390625, -0.0538330078125, -0.051605224609375, -0.0504150390625, -0.00797271728515625, -0.0546875, -0.034454345703125, 0.0322265625, 0.0003941059112548828, -0.033660888671875, 0.020904541015625, -0.045745849609375, 0.0242767333984375, 0.0479736328125, 0.011199951171875, 0.00713348388671875, 0.006320953369140625, -0.03369140625, -0.01153564453125, -0.036041259765625, -0.036041259765625, 0.09100341796875, 0.01551055908203125, 0.029327392578125, 0.0249786376953125, 0.0538330078125, 0.012969970703125, -0.0027256011962890625, -0.040283203125, 0.0294952392578125, -0.005985260009765625, -0.05889892578125, -0.041046142578125, -0.022979736328125, -0.08319091796875, 0.033721923828125, -0.01348114013671875, -0.06622314453125, 0.029296875, -0.0110321044921875, -0.04107666015625, 0.0216522216796875, -0.05963134765625, 0.058319091796875, -0.0189208984375, -0.024658203125, 0.01053619384765625, -0.05169677734375, 0.0237274169921875, -0.0003437995910644531, 0.04034423828125, -0.022369384765625, 0.00516510009765625, 0.0648193359375, -0.039459228515625, 0.043609619140625, -0.02203369140625, 0.01149749755859375, 0.03485107421875, -0.0102996826171875, 0.0283660888671875, 0.016693115234375, -0.0211029052734375, 0.010528564453125, 0.00836944580078125, -0.03570556640625, -0.01995849609375, 0.05633544921875, -0.0799560546875, -0.050140380859375, -0.0640869140625, -0.033111572265625, 0.00531768798828125, 0.042205810546875, 0.0207672119140625, 0.0219573974609375, 0.006710052490234375, 0.00826263427734375, 0.040496826171875, -0.031646728515625, 0.04119873046875, 0.035003662109375, -0.013214111328125, -0.05609130859375, 0.0640869140625, 0.0216217041015625, 0.01073455810546875, 0.0178985595703125, 0.036651611328125, -0.035003662109375, -0.0233612060546875, -0.044677734375, 0.02392578125, -0.0246124267578125, -0.009185791015625, -0.056976318359375, -0.01464080810546875, -0.054443359375, -0.0194091796875, -0.01201629638671875, -0.033538818359375, -0.01122283935546875, -0.010040283203125, 0.0308685302734375, 0.04217529296875, -0.01114654541015625, 0.0122528076171875, -0.047698974609375, 0.0284576416015625, 0.0014438629150390625, 0.0221710205078125, -0.006290435791015625, -0.04473876953125, -0.0160064697265625, 0.003070831298828125, -0.01654052734375, -0.06097412109375, 0.059600830078125, 0.0139007568359375, 0.05389404296875, 0.0228424072265625, -0.00305938720703125, 0.0687255859375, -0.0140838623046875, 0.07794189453125, 0.019989013671875, -0.06268310546875, 0.046661376953125, -0.0283660888671875, 0.03582763671875, 0.050750732421875, 0.05145263671875, -0.03240966796875, -0.0225372314453125, -0.053955078125, -0.0709228515625, 0.0706787109375, 0.0215911865234375, -0.01335906982421875, 0.005962371826171875, 0.00843048095703125, -0.008636474609375, 0.007167816162109375, -0.05621337890625, -0.0614013671875, -0.0036029815673828125, -0.0235443115234375, -0.0098419189453125, -0.007244110107421875, -0.0036830902099609375, -0.0562744140625, 0.046417236328125, 0.01126861572265625, 0.0201568603515625, 0.028533935546875, 0.0032291412353515625, -0.0100555419921875, 0.0191192626953125, 0.05291748046875, 0.055419921875, -0.0218505859375, -0.008453369140625, 0.01505279541015625, -0.0631103515625, 0.0239105224609375, 0.0023212432861328125, -0.02764892578125, 0.00811767578125, 0.031036376953125, 0.033966064453125, 0.0079498291015625, -0.028564453125, 0.034332275390625, -0.00952911376953125, -0.034088134765625, -0.04473876953125, -0.003314971923828125, 0.014984130859375, 0.00506591796875, 0.03582763671875, 0.004367828369140625, -0.0015115737915039062, -0.03936767578125, 0.01387786865234375, 0.02203369140625, -0.0210113525390625, -0.005725860595703125, 0.04608154296875, 0.00952911376953125, -0.0114898681640625, 0.03240966796875, -0.0243988037109375, -0.0440673828125, 0.06500244140625, 0.03924560546875, 0.039886474609375, -0.043487548828125, 0.0224761962890625, 0.0850830078125, 0.0290985107421875, 0.0009055137634277344, 0.048309326171875, 0.01258087158203125, -0.0352783203125, 0.005550384521484375, -0.05645751953125, -0.01003265380859375, 0.019378662109375, -0.05755615234375, 0.0166473388671875, -0.0211029052734375, -0.01479339599609375, -0.00533294677734375, 0.0308990478515625, -0.052459716796875, 0.025604248046875, -0.0016040802001953125, 0.06707763671875, -0.07818603515625, 0.06268310546875, 0.057281494140625, -0.054168701171875, -0.09271240234375, -0.0178985595703125, -0.0034027099609375, -0.05987548828125, 0.04998779296875, 0.01306915283203125, 0.006549835205078125, -0.0024394989013671875, -0.01556396484375, -0.10125732421875, 0.096923828125, -0.00937652587890625, -0.03778076171875, 0.01467132568359375, 0.0268096923828125, 0.03765869140625, -0.0088653564453125, 0.037567138671875, 0.059356689453125, 0.048065185546875, 0.0067138671875, -0.081298828125, 0.004276275634765625, -0.0467529296875, -0.010101318359375, 0.0170135498046875, -0.06903076171875, 0.0718994140625, -0.0185546875, -0.01177215576171875, -0.0117340087890625, 0.044036865234375, 0.037841796875, 0.0249481201171875, 0.0316162109375, 0.060302734375, 0.050811767578125, -0.01332855224609375, 0.047088623046875, -0.0274200439453125, 0.03179931640625, 0.055816650390625, -0.0027561187744140625, 0.055084228515625, 0.0279693603515625, -0.04132080078125, 0.034698486328125, 0.0482177734375, -0.00853729248046875, 0.037200927734375, -0.0046234130859375, -0.011932373046875, 0.002155303955078125, -0.00734710693359375, -0.03704833984375, 0.033782958984375, 0.03729248046875, -0.0210418701171875, -0.00701904296875, 0.00025916099548339844, 0.0189208984375, 0.0029697418212890625, -0.0305023193359375, 0.0421142578125, -0.00724029541015625, -0.052978515625, 0.0533447265625, -0.008819580078125, 0.059722900390625, -0.04937744140625, -0.00024199485778808594, -0.013031005859375, 0.0220489501953125, -0.039337158203125, -0.07965087890625, 0.01418304443359375, -0.006092071533203125, -0.0160064697265625, 0.0080718994140625, 0.0236968994140625, -0.037200927734375, -0.056549072265625, 0.0203094482421875, 0.0176544189453125, 0.0100860595703125, 0.0272216796875, -0.0709228515625, 0.00861358642578125, 0.0247039794921875, -0.043548583984375, 0.0201873779296875, 0.02935791015625, -0.003833770751953125, 0.047515869140625, 0.042388916015625, 0.0119781494140625, 0.0175018310546875, -0.01026153564453125, 0.061737060546875, -0.048126220703125, -0.0390625, -0.048614501953125, 0.040191650390625, -0.00600433349609375, -0.042724609375, 0.085205078125, 0.0660400390625, 0.06365966796875, -0.00856781005859375, 0.0699462890625, -0.0272216796875, 0.053863525390625, -0.0242767333984375, 0.05438232421875, -0.048858642578125, -0.01035308837890625, -0.0243072509765625, -0.048980712890625, -0.0308990478515625, 0.0645751953125, -0.0303802490234375, 0.01398468017578125, 0.052490234375, 0.06219482421875, 0.01003265380859375, -0.0013408660888671875, 0.0106201171875, 0.0064239501953125, 0.01383209228515625, 0.049530029296875, 0.01377105712890625, -0.046112060546875, 0.037841796875, -0.052459716796875, -0.009368896484375, -0.00659942626953125, -0.0445556640625, -0.060028076171875, -0.04443359375, -0.030242919921875, -0.0295257568359375, -0.0245513916015625, 0.06964111328125, 0.034912109375, -0.06353759765625, -0.03265380859375, 0.007274627685546875, -0.0016536712646484375, -0.0113372802734375, -0.015838623046875, 0.07855224609375, 0.004650115966796875, -0.0582275390625, 0.00919342041015625, 0.00740814208984375, 0.01416015625, -0.00975799560546875, -0.01409912109375, -0.04168701171875, -0.0032978057861328125, 0.0328369140625, 0.027099609375, -0.05792236328125, -0.00832366943359375, 0.000774383544921875, -0.0211334228515625, 0.02099609375, 0.01171875, -0.025543212890625, 0.034820556640625, 0.03863525390625, 0.0220489501953125, 0.039459228515625, 0.01392364501953125, 0.005344390869140625, -0.0286407470703125, 0.0302734375, 0.004154205322265625, 0.023162841796875, 0.007099151611328125, -0.0323486328125, 0.049560546875, 0.0221710205078125, -0.042388916015625, -0.0625, -0.0179443359375, -0.0899658203125, -0.004062652587890625, 0.08673095703125, -0.02093505859375, -0.047454833984375, -0.0196075439453125, -0.0146484375, 0.0169219970703125, -0.038909912109375, 0.0290985107421875, 0.04888916015625, -0.016815185546875, -0.0024566650390625, -0.044677734375, 0.03277587890625, 0.0276336669921875, -0.06494140625, -0.0174102783203125, 0.002674102783203125, 0.0296173095703125, 0.0301513671875, 0.06072998046875, -0.0200042724609375, 0.01305389404296875, 0.01335906982421875, 0.017120361328125, 0.005725860595703125, -0.0006861686706542969, -0.0166168212890625, 0.0004856586456298828, -0.00852203369140625, -0.0253448486328125 ] ]
LeoLM/leo-hessianai-13b
2023-09-29T10:34:48.000Z
[ "transformers", "pytorch", "llama", "text-generation", "custom_code", "en", "de", "dataset:oscar-corpus/OSCAR-2301", "dataset:wikipedia", "dataset:bjoernp/tagesschau-2018-2023", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
LeoLM
null
null
LeoLM/leo-hessianai-13b
22
8,976
transformers
2023-09-05T22:47:48
--- datasets: - oscar-corpus/OSCAR-2301 - wikipedia - bjoernp/tagesschau-2018-2023 language: - en - de library_name: transformers pipeline_tag: text-generation --- # LAION LeoLM: **L**inguistically **E**nhanced **O**pen **L**anguage **M**odel Meet LeoLM, the first open and commercially available German Foundation Language Model built on Llama-2. Our models extend Llama-2's capabilities into German through continued pretraining on a large corpus of German-language and mostly locality specific text. Thanks to a compute grant at HessianAI's new supercomputer **42**, we release two foundation models trained with 8k context length, [`LeoLM/leo-hessianai-7b`](https://huggingface.co/LeoLM/leo-hessianai-7b) and [`LeoLM/leo-hessianai-13b`](https://huggingface.co/LeoLM/leo-hessianai-13b) under the [Llama-2 community license](https://huggingface.co/meta-llama/Llama-2-70b/raw/main/LICENSE.txt) (70b also coming soon! 👀). With this release, we hope to bring a new wave of opportunities to German open-source and commercial LLM research and accelerate adoption. Read our [blog post]() or our paper (preprint coming soon) for more details! *A project by Björn Plüster and Christoph Schuhmann in collaboration with LAION and HessianAI.* ## Model Details - **Finetuned from:** [meta-llama/Llama-2-13b-hf](https://huggingface.co/meta-llama/Llama-2-13b-hf) - **Model type:** Causal decoder-only transformer language model - **Language:** English and German - **License:** [LLAMA 2 COMMUNITY LICENSE AGREEMENT](https://huggingface.co/meta-llama/Llama-2-70b/raw/main/LICENSE.txt) - **Contact:** [LAION Discord](https://discord.com/invite/eq3cAMZtCC) or [Björn Plüster](mailto:bjoern.pl@outlook.de) ## Use in 🤗Transformers First install direct dependencies: ``` pip install transformers torch sentencepiece ``` If you want faster inference using flash-attention2, you need to install these dependencies: ```bash pip install packaging ninja pip install flash-attn==v2.1.1 --no-build-isolation pip install git+https://github.com/HazyResearch/flash-attention.git@v2.1.1#subdirectory=csrc/rotary ``` Then load the model in transformers: ```python from transformers import AutoModelForCausalLM, AutoTokenizer import torch model = AutoModelForCausalLM.from_pretrained( model="LeoLM/leo-hessianai-13b", device_map="auto", torch_dtype=torch.float16, trust_remote_code=True # True for flash-attn2 else False ) ``` ## Training parameters ![training_parameters](imgs/training_params.png "Training Hyperparameters") ## Benchmarks ![benchmarks](imgs/benchmarks.png "Benchmark Scores")
2,589
[ [ -0.01511383056640625, -0.0467529296875, 0.0135345458984375, 0.04266357421875, -0.028228759765625, -0.0163726806640625, -0.00788116455078125, -0.04443359375, 0.00647735595703125, 0.03076171875, -0.039398193359375, -0.0285491943359375, -0.053985595703125, -0.0012607574462890625, -0.034820556640625, 0.07696533203125, -0.0198211669921875, 0.01064300537109375, 0.00897216796875, -0.0196075439453125, -0.0018157958984375, -0.027252197265625, -0.03814697265625, -0.05133056640625, 0.03204345703125, 0.0246429443359375, 0.0447998046875, 0.042999267578125, 0.046783447265625, 0.022247314453125, -0.0147857666015625, -0.014068603515625, -0.033416748046875, -0.007389068603515625, 0.004222869873046875, -0.034271240234375, -0.044036865234375, -0.0006742477416992188, 0.04754638671875, 0.017486572265625, -0.011138916015625, 0.0189056396484375, -0.0134124755859375, 0.03155517578125, -0.046234130859375, 0.014373779296875, -0.028472900390625, 0.0039215087890625, -0.016021728515625, 0.00786590576171875, -0.0311737060546875, -0.00304412841796875, 0.01160430908203125, -0.047698974609375, 0.004894256591796875, -0.01837158203125, 0.0985107421875, 0.044097900390625, -0.034637451171875, -0.01209259033203125, -0.048980712890625, 0.058807373046875, -0.051422119140625, 0.034820556640625, 0.032196044921875, 0.028106689453125, -0.00782012939453125, -0.09503173828125, -0.033416748046875, -0.0194854736328125, -0.014984130859375, 0.01181793212890625, -0.039093017578125, -0.010467529296875, 0.016845703125, 0.00870513916015625, -0.031829833984375, -0.0008172988891601562, -0.0282745361328125, -0.007965087890625, 0.04345703125, -0.01441192626953125, -0.00804901123046875, -0.00434112548828125, -0.032684326171875, -0.0190582275390625, -0.058013916015625, -0.00641632080078125, 0.009490966796875, -0.01776123046875, -0.036590576171875, 0.04132080078125, -0.012481689453125, 0.039337158203125, 0.0038356781005859375, -0.021881103515625, 0.03216552734375, -0.01019287109375, -0.027069091796875, -0.01029205322265625, 0.064697265625, 0.005084991455078125, 0.0203857421875, 0.005706787109375, -0.02685546875, 0.0001480579376220703, -0.006633758544921875, -0.052764892578125, -0.0166168212890625, 0.01010894775390625, -0.0254364013671875, -0.037628173828125, -0.00971221923828125, -0.04345703125, -0.007747650146484375, -0.00270843505859375, 0.034759521484375, -0.0204315185546875, -0.020904541015625, 0.0206451416015625, 0.018341064453125, 0.035614013671875, 0.0199432373046875, -0.057525634765625, -0.00016701221466064453, 0.0352783203125, 0.049041748046875, 0.0157928466796875, -0.034637451171875, -0.031402587890625, 0.0166168212890625, -0.00885009765625, 0.048370361328125, -0.01128387451171875, -0.0171356201171875, 0.0023651123046875, 0.0067138671875, 0.01279449462890625, -0.03350830078125, 0.044097900390625, -0.05462646484375, 0.032989501953125, 0.0015058517456054688, -0.017822265625, -0.0171661376953125, -0.001453399658203125, -0.042633056640625, 0.097412109375, 0.0178680419921875, -0.06787109375, 0.006595611572265625, -0.040191650390625, -0.05230712890625, -0.005970001220703125, -0.001373291015625, -0.031982421875, -0.004222869873046875, 0.0135955810546875, 0.0364990234375, -0.0443115234375, 0.051055908203125, -0.01265716552734375, -0.021209716796875, 0.016998291015625, -0.0322265625, 0.0821533203125, 0.0161895751953125, -0.056610107421875, 0.00860595703125, -0.050628662109375, -0.015838623046875, 0.0247955322265625, -0.03753662109375, 0.0190887451171875, -0.007091522216796875, 0.0274658203125, 0.0251617431640625, 0.035888671875, -0.0229339599609375, -0.00290679931640625, -0.0247650146484375, 0.035186767578125, 0.054931640625, -0.033050537109375, 0.0203094482421875, -0.00899505615234375, 0.035614013671875, -0.0004057884216308594, 0.01898193359375, -0.003326416015625, -0.034088134765625, -0.10302734375, -0.0255279541015625, 0.018890380859375, 0.05133056640625, -0.05352783203125, 0.043914794921875, -0.0187225341796875, -0.041778564453125, -0.0419921875, 0.00368499755859375, 0.03875732421875, 0.03717041015625, 0.0250701904296875, -0.01279449462890625, -0.04638671875, -0.06982421875, 0.00934600830078125, -0.0308990478515625, 0.00286102294921875, 0.0243988037109375, 0.0491943359375, -0.0458984375, 0.06719970703125, -0.0287628173828125, -0.0247650146484375, -0.0244598388671875, 0.0020694732666015625, 0.022308349609375, 0.030364990234375, 0.04644775390625, -0.049072265625, -0.03399658203125, -0.00445556640625, -0.0826416015625, -0.01397705078125, 0.0152587890625, -0.00864410400390625, 0.034088134765625, 0.03125, -0.056915283203125, 0.020660400390625, 0.047943115234375, -0.02593994140625, 0.033294677734375, -0.005352020263671875, -0.02325439453125, -0.07421875, 0.017669677734375, -0.00969696044921875, -0.0211181640625, -0.0430908203125, 0.0154876708984375, -0.0018587112426757812, 0.006206512451171875, -0.0625, 0.0736083984375, -0.0208587646484375, -0.005809783935546875, -0.01934814453125, 0.0015516281127929688, -0.016815185546875, 0.028045654296875, 0.0163421630859375, 0.051116943359375, 0.060089111328125, -0.041656494140625, 0.0307769775390625, 0.0269927978515625, -0.026641845703125, 0.015228271484375, -0.07012939453125, 0.004962921142578125, -0.000797271728515625, 0.037384033203125, -0.052490234375, -0.04217529296875, 0.043670654296875, -0.02838134765625, 0.02166748046875, 0.0078887939453125, -0.047607421875, -0.02838134765625, -0.00615692138671875, 0.0290069580078125, 0.061981201171875, -0.052154541015625, 0.048736572265625, 0.0258026123046875, 0.01052093505859375, -0.072021484375, -0.0716552734375, -0.00162506103515625, -0.0213165283203125, -0.0657958984375, 0.017333984375, 0.0033092498779296875, -0.01377105712890625, -0.00598907470703125, 0.0014743804931640625, -0.004802703857421875, 0.0101470947265625, 0.004974365234375, 0.01200103759765625, -0.028472900390625, 0.0033054351806640625, 0.003032684326171875, -0.040863037109375, -0.0029773712158203125, -0.01224517822265625, 0.051177978515625, -0.03717041015625, -0.016571044921875, -0.041259765625, 0.001827239990234375, 0.0276336669921875, -0.0170135498046875, 0.05645751953125, 0.0731201171875, -0.031646728515625, 0.0089874267578125, -0.054779052734375, -0.0254974365234375, -0.040771484375, 0.0284271240234375, -0.0209503173828125, -0.076416015625, 0.028289794921875, -0.00798797607421875, 0.0029964447021484375, 0.042083740234375, 0.053985595703125, -0.00930023193359375, 0.0574951171875, 0.07080078125, -0.0210113525390625, 0.04144287109375, -0.0285186767578125, -0.0012969970703125, -0.046661376953125, -0.00817108154296875, -0.022064208984375, -0.0070343017578125, -0.021209716796875, -0.029876708984375, 0.0291900634765625, 0.041107177734375, -0.036346435546875, 0.036102294921875, -0.0164794921875, 0.027008056640625, 0.036468505859375, -0.001934051513671875, 0.0189666748046875, 0.019012451171875, -0.00559234619140625, 0.004825592041015625, -0.059234619140625, -0.055328369140625, 0.07806396484375, 0.04681396484375, 0.043365478515625, 0.0129852294921875, 0.05255126953125, -0.0001405477523803711, 0.04864501953125, -0.0362548828125, 0.04888916015625, 0.00020313262939453125, -0.056671142578125, -0.0135040283203125, -0.0106964111328125, -0.061553955078125, 0.0283355712890625, -0.0002911090850830078, -0.052001953125, 0.00202178955078125, 0.00960540771484375, -0.0021915435791015625, 0.0012960433959960938, -0.034088134765625, 0.06219482421875, -0.0272979736328125, -0.0191497802734375, -0.024871826171875, -0.06585693359375, 0.04425048828125, -0.015167236328125, 0.00860595703125, -0.0186309814453125, -0.006931304931640625, 0.060546875, -0.0196533203125, 0.06964111328125, -0.0006422996520996094, -0.0182342529296875, 0.0266265869140625, -0.00890350341796875, 0.04571533203125, 0.0178070068359375, -0.0142974853515625, 0.03466796875, -0.020263671875, -0.0289154052734375, -0.018798828125, 0.03839111328125, -0.07257080078125, -0.044769287109375, -0.047454833984375, -0.034698486328125, 0.0080718994140625, 0.0202789306640625, 0.04388427734375, 0.0034885406494140625, -0.0168914794921875, 0.004398345947265625, 0.037750244140625, -0.01593017578125, 0.04522705078125, 0.056060791015625, -0.0195159912109375, -0.031707763671875, 0.055145263671875, -0.0229644775390625, 0.0107879638671875, 0.0188751220703125, 0.007747650146484375, -0.017578125, -0.01120758056640625, -0.043731689453125, 0.053253173828125, -0.0775146484375, -0.0162506103515625, -0.05389404296875, -0.0213775634765625, -0.02386474609375, -0.016693115234375, -0.0251617431640625, -0.0299530029296875, -0.052886962890625, -0.00856781005859375, 0.033538818359375, 0.051788330078125, -0.023529052734375, 0.056671142578125, -0.044830322265625, 0.0158538818359375, 0.0095062255859375, 0.00504302978515625, 0.001621246337890625, -0.05352783203125, -0.0186004638671875, -0.009002685546875, -0.0297088623046875, -0.055328369140625, 0.02679443359375, 0.01386260986328125, 0.031768798828125, 0.038970947265625, -0.0114593505859375, 0.034454345703125, -0.035919189453125, 0.04974365234375, 0.022979736328125, -0.07415771484375, 0.03875732421875, -0.0254058837890625, 0.01383209228515625, 0.0257568359375, 0.03179931640625, -0.040863037109375, -0.0188140869140625, -0.05419921875, -0.060455322265625, 0.08062744140625, 0.03057861328125, 0.01549530029296875, 0.00354766845703125, 0.022003173828125, -0.01421356201171875, 0.01483154296875, -0.0936279296875, -0.03759765625, -0.00743865966796875, -0.0191497802734375, -0.0176544189453125, -0.03350830078125, -0.0258331298828125, -0.026580810546875, 0.06842041015625, 0.00347900390625, 0.040771484375, 0.0065460205078125, -0.0251617431640625, -0.0189666748046875, -0.0038242340087890625, 0.051513671875, 0.037139892578125, -0.0031490325927734375, 0.0104522705078125, 0.0279998779296875, -0.041046142578125, 0.02783203125, 0.0238189697265625, -0.035400390625, 0.00899505615234375, 0.059661865234375, 0.073974609375, 0.022369384765625, -0.05230712890625, 0.040435791015625, 0.01428985595703125, 0.0016078948974609375, -0.03570556640625, -0.0010547637939453125, 0.0220184326171875, 0.0350341796875, 0.0169219970703125, -0.01190185546875, -0.0161590576171875, -0.0162811279296875, 0.0212860107421875, 0.019378662109375, -0.032501220703125, -0.0257110595703125, 0.06610107421875, 0.0189971923828125, -0.0179595947265625, 0.041534423828125, -0.0160980224609375, -0.04437255859375, 0.0491943359375, 0.056610107421875, 0.06964111328125, -0.0213775634765625, 0.02587890625, 0.0277557373046875, 0.0271148681640625, -0.0153045654296875, 0.023406982421875, 0.006893157958984375, -0.0533447265625, -0.0158233642578125, -0.050811767578125, -0.0014524459838867188, 0.001499176025390625, -0.0298004150390625, 0.04095458984375, -0.02813720703125, -0.0076141357421875, -0.021728515625, -0.00811767578125, -0.04180908203125, 0.01314544677734375, -0.0021114349365234375, 0.07379150390625, -0.056915283203125, 0.08984375, 0.04437255859375, -0.035247802734375, -0.048126220703125, -0.00270843505859375, -0.0025482177734375, -0.0723876953125, 0.06610107421875, 0.0146942138671875, -0.01499176025390625, 0.01776123046875, -0.0438232421875, -0.0894775390625, 0.10223388671875, 0.04620361328125, -0.0321044921875, 0.0139312744140625, 0.01470184326171875, 0.04620361328125, -0.027801513671875, 0.026763916015625, 0.055511474609375, 0.04498291015625, 0.01561737060546875, -0.06195068359375, 0.00685882568359375, -0.01023101806640625, -0.01342010498046875, -0.002117156982421875, -0.0704345703125, 0.0721435546875, -0.0270843505859375, -0.0198974609375, 0.021759033203125, 0.07183837890625, 0.0286102294921875, 0.0151824951171875, 0.0208587646484375, 0.040435791015625, 0.0521240234375, -0.0084228515625, 0.08807373046875, -0.0264434814453125, 0.044769287109375, 0.069091796875, -0.0243072509765625, 0.06756591796875, 0.0285186767578125, -0.0139312744140625, 0.03765869140625, 0.052490234375, -0.00688934326171875, 0.050811767578125, -0.003833770751953125, -0.0281982421875, -0.01215362548828125, -0.006561279296875, -0.047760009765625, 0.0269317626953125, 0.01255035400390625, -0.023345947265625, -0.016815185546875, -0.007007598876953125, 0.01373291015625, -0.0362548828125, 0.01082611083984375, 0.0322265625, 0.02642822265625, -0.0287628173828125, 0.06756591796875, 0.029296875, 0.062164306640625, -0.05316162109375, 0.0196533203125, -0.0225067138671875, 0.025909423828125, -0.0124359130859375, -0.038421630859375, 0.00946044921875, 0.00969696044921875, 0.0004782676696777344, -0.0018091201782226562, 0.05255126953125, -0.0300445556640625, -0.049591064453125, 0.054168701171875, 0.0239715576171875, 0.0295257568359375, 0.019256591796875, -0.06658935546875, 0.0185546875, -0.0000591278076171875, -0.04205322265625, 0.0147857666015625, 0.005252838134765625, -0.006595611572265625, 0.06280517578125, 0.0335693359375, -0.0219268798828125, 0.00926971435546875, -0.0009417533874511719, 0.0675048828125, -0.03436279296875, -0.0193023681640625, -0.059783935546875, 0.04925537109375, 0.0208587646484375, -0.044189453125, 0.0418701171875, 0.0318603515625, 0.06536865234375, -0.02008056640625, 0.039642333984375, -0.0159454345703125, 0.0009446144104003906, -0.03399658203125, 0.059722900390625, -0.056610107421875, 0.013946533203125, -0.01470947265625, -0.073974609375, -0.024627685546875, 0.06927490234375, -0.0079498291015625, 0.007350921630859375, 0.050384521484375, 0.059051513671875, -0.0158233642578125, -0.01276397705078125, 0.006526947021484375, 0.0247802734375, 0.0186920166015625, 0.050506591796875, 0.049835205078125, -0.057647705078125, 0.045562744140625, -0.031982421875, -0.0218505859375, -0.03076171875, -0.06463623046875, -0.06939697265625, -0.03857421875, -0.0214996337890625, -0.0301055908203125, -0.004703521728515625, 0.047698974609375, 0.051971435546875, -0.0611572265625, -0.03546142578125, -0.00910186767578125, 0.005489349365234375, -0.0007715225219726562, -0.011932373046875, 0.037445068359375, -0.00562286376953125, -0.0487060546875, 0.028106689453125, 0.005023956298828125, 0.0251617431640625, -0.0238037109375, -0.035369873046875, -0.0281982421875, -0.005664825439453125, 0.042938232421875, 0.0224609375, -0.08148193359375, -0.0079498291015625, 0.0028324127197265625, -0.023529052734375, 0.0034637451171875, 0.037750244140625, -0.05316162109375, 0.0025844573974609375, 0.0202178955078125, 0.03369140625, 0.065673828125, -0.0295257568359375, 0.03802490234375, -0.04974365234375, 0.05126953125, 0.0067291259765625, 0.0310821533203125, 0.0246429443359375, -0.022705078125, 0.0667724609375, 0.0228424072265625, -0.0273895263671875, -0.06597900390625, 0.03228759765625, -0.0662841796875, -0.01171875, 0.09564208984375, -0.020172119140625, -0.00778961181640625, 0.0128631591796875, -0.00899505615234375, 0.035552978515625, -0.01102447509765625, 0.039154052734375, 0.048370361328125, 0.0065765380859375, -0.01580810546875, -0.035400390625, 0.032379150390625, 0.04266357421875, -0.0584716796875, -0.01256561279296875, 0.0276336669921875, 0.0252532958984375, 0.01206207275390625, 0.059478759765625, 0.0036182403564453125, 0.0335693359375, -0.0080108642578125, 0.01934814453125, 0.0003688335418701172, -0.039459228515625, -0.049407958984375, -0.01265716552734375, -0.011505126953125, 0.005039215087890625 ] ]
marefa-nlp/marefa-ner
2021-12-04T05:21:57.000Z
[ "transformers", "pytorch", "xlm-roberta", "token-classification", "ar", "dataset:Marefa-NER", "autotrain_compatible", "endpoints_compatible", "region:us" ]
token-classification
marefa-nlp
null
null
marefa-nlp/marefa-ner
13
8,975
transformers
2022-03-02T23:29:05
--- language: ar datasets: - Marefa-NER widget: - text: "في استاد القاهرة، بدأ حفل افتتاح بطولة كأس الأمم الأفريقية بحضور رئيس الجمهورية و رئيس الاتحاد الدولي لكرة القدم" --- # Tebyan تبيـان ## Marefa Arabic Named Entity Recognition Model ## نموذج المعرفة لتصنيف أجزاء النص <p align="center"> <img src="https://huggingface.co/marefa-nlp/marefa-ner/resolve/main/assets/marefa-tebyan-banner.png" alt="Marfa Arabic NER Model" width="600"/> </p? --------- **Version**: 1.3 **Last Update:** 3-12-2021 ## Model description **Marefa-NER** is a Large Arabic Named Entity Recognition (NER) model built on a completely new dataset and targets to extract up to 9 different types of entities ``` Person, Location, Organization, Nationality, Job, Product, Event, Time, Art-Work ``` نموذج المعرفة لتصنيف أجزاء النص. نموذج جديد كليا من حيث البيانات المستخدمة في تدريب النموذج. كذلك يستهدف النموذج تصنيف حتى 9 أنواع مختلفة من أجزاء النص ``` شخص - مكان - منظمة - جنسية - وظيفة - منتج - حدث - توقيت - عمل إبداعي ``` ## How to use كيف تستخدم النموذج *You can test the model quickly by checking this [Colab notebook](https://colab.research.google.com/drive/1OGp9Wgm-oBM5BBhTLx6Qow4dNRSJZ-F5?usp=sharing)* ---- Install the following Python packages `$ pip3 install transformers==4.8.0 nltk==3.5 protobuf==3.15.3 torch==1.9.0 ` > If you are using `Google Colab`, please restart your runtime after installing the packages. ----------- ```python from transformers import AutoTokenizer, AutoModelForTokenClassification import torch import numpy as np import nltk nltk.download('punkt') from nltk.tokenize import word_tokenize custom_labels = ["O", "B-job", "I-job", "B-nationality", "B-person", "I-person", "B-location","B-time", "I-time", "B-event", "I-event", "B-organization", "I-organization", "I-location", "I-nationality", "B-product", "I-product", "B-artwork", "I-artwork"] def _extract_ner(text: str, model: AutoModelForTokenClassification, tokenizer: AutoTokenizer, start_token: str="▁"): tokenized_sentence = tokenizer([text], padding=True, truncation=True, return_tensors="pt") tokenized_sentences = tokenized_sentence['input_ids'].numpy() with torch.no_grad(): output = model(**tokenized_sentence) last_hidden_states = output[0].numpy() label_indices = np.argmax(last_hidden_states[0], axis=1) tokens = tokenizer.convert_ids_to_tokens(tokenized_sentences[0]) special_tags = set(tokenizer.special_tokens_map.values()) grouped_tokens = [] for token, label_idx in zip(tokens, label_indices): if token not in special_tags: if not token.startswith(start_token) and len(token.replace(start_token,"").strip()) > 0: grouped_tokens[-1]["token"] += token else: grouped_tokens.append({"token": token, "label": custom_labels[label_idx]}) # extract entities ents = [] prev_label = "O" for token in grouped_tokens: label = token["label"].replace("I-","").replace("B-","") if token["label"] != "O": if label != prev_label: ents.append({"token": [token["token"]], "label": label}) else: ents[-1]["token"].append(token["token"]) prev_label = label # group tokens ents = [{"token": "".join(rec["token"]).replace(start_token," ").strip(), "label": rec["label"]} for rec in ents ] return ents model_cp = "marefa-nlp/marefa-ner" tokenizer = AutoTokenizer.from_pretrained(model_cp) model = AutoModelForTokenClassification.from_pretrained(model_cp, num_labels=len(custom_labels)) samples = [ "تلقى تعليمه في الكتاب ثم انضم الى الأزهر عام 1873م. تعلم على يد السيد جمال الدين الأفغاني والشيخ محمد عبده", "بعد عودته إلى القاهرة، التحق نجيب الريحاني فرقة جورج أبيض، الذي كان قد ضمَّ - قُبيل ذلك - فرقته إلى فرقة سلامة حجازي . و منها ذاع صيته", "في استاد القاهرة، قام حفل افتتاح بطولة كأس الأمم الأفريقية بحضور رئيس الجمهورية و رئيس الاتحاد الدولي لكرة القدم", "من فضلك أرسل هذا البريد الى صديقي جلال الدين في تمام الساعة الخامسة صباحا في يوم الثلاثاء القادم", "امبارح اتفرجت على مباراة مانشستر يونايتد مع ريال مدريد في غياب الدون كرستيانو رونالدو", "لا تنسى تصحيني الساعة سبعة, و ضيف في الجدول اني احضر مباراة نادي النصر غدا", ] # [optional] samples = [ " ".join(word_tokenize(sample.strip())) for sample in samples if sample.strip() != "" ] for sample in samples: ents = _extract_ner(text=sample, model=model, tokenizer=tokenizer, start_token="▁") print(sample) for ent in ents: print("\t",ent["token"],"==>",ent["label"]) print("========\n") ``` Output ``` تلقى تعليمه في الكتاب ثم انضم الى الأزهر عام 1873م . تعلم على يد السيد جمال الدين الأفغاني والشيخ محمد عبده الأزهر ==> organization عام 1873م ==> time السيد جمال الدين الأفغاني ==> person محمد عبده ==> person ======== بعد عودته إلى القاهرة، التحق نجيب الريحاني فرقة جورج أبيض، الذي كان قد ضمَّ - قُبيل ذلك - فرقته إلى فرقة سلامة حجازي . و منها ذاع صيته القاهرة، ==> location نجيب الريحاني ==> person فرقة جورج أبيض، ==> organization فرقة سلامة حجازي ==> organization ======== في استاد القاهرة، قام حفل افتتاح بطولة كأس الأمم الأفريقية بحضور رئيس الجمهورية و رئيس الاتحاد الدولي لكرة القدم استاد القاهرة، ==> location بطولة كأس الأمم الأفريقية ==> event رئيس الجمهورية ==> job رئيس ==> job الاتحاد الدولي لكرة القدم ==> organization ======== من فضلك أرسل هذا البريد الى صديقي جلال الدين في تمام الساعة الخامسة صباحا في يوم الثلاثاء القادم جلال الدين ==> person الساعة الخامسة صباحا ==> time يوم الثلاثاء القادم ==> time ======== امبارح اتفرجت على مباراة مانشستر يونايتد مع ريال مدريد في غياب الدون كرستيانو رونالدو مانشستر يونايتد ==> organization ريال مدريد ==> organization كرستيانو رونالدو ==> person ======== لا تنسى تصحيني الساعة سبعة , و ضيف في الجدول اني احضر مباراة نادي النصر غدا الساعة سبعة ==> time نادي النصر ==> organization غدا ==> time ======== ``` ## Fine-Tuning Check this [notebook](https://colab.research.google.com/drive/1WUYrnmDFFEItqGMvbyjqZEJJqwU7xQR-?usp=sharing) to fine-tune the NER model ## Evaluation We tested the model agains a test set of 1959 sentences. The results is in the follwing table | type | f1-score | precision | recall | support | |:-------------|-----------:|------------:|---------:|----------:| | person | 0.93298 | 0.931479 | 0.934487 | 4335 | | location | 0.891537 | 0.896926 | 0.886212 | 4939 | | time | 0.873003 | 0.876087 | 0.869941 | 1853 | | nationality | 0.871246 | 0.843153 | 0.901277 | 2350 | | job | 0.837656 | 0.79912 | 0.880097 | 2477 | | organization | 0.781317 | 0.773328 | 0.789474 | 2299 | | event | 0.686695 | 0.733945 | 0.645161 | 744 | | artwork | 0.653552 | 0.678005 | 0.630802 | 474 | | product | 0.625483 | 0.553531 | 0.718935 | 338 | | **weighted avg** | 0.859008 | 0.852365 | 0.86703 | 19809 | | **micro avg** | 0.858771 | 0.850669 | 0.86703 | 19809 | | **macro avg** | 0.79483 | 0.787286 | 0.806265 | 19809 | ## Acknowledgment شكر و تقدير قام بإعداد البيانات التي تم تدريب النموذج عليها, مجموعة من المتطوعين الذين قضوا ساعات يقومون بتنقيح البيانات و مراجعتها - على سيد عبد الحفيظ - إشراف - نرمين محمد عطيه - صلاح خيرالله - احمد علي عبدربه - عمر بن عبد العزيز سليمان - محمد ابراهيم الجمال - عبدالرحمن سلامه خلف - إبراهيم كمال محمد سليمان - حسن مصطفى حسن - أحمد فتحي سيد - عثمان مندو - عارف الشريف - أميرة محمد محمود - حسن سعيد حسن - عبد العزيز علي البغدادي - واثق عبدالملك الشويطر - عمرو رمضان عقل الحفناوي - حسام الدين أحمد على - أسامه أحمد محمد محمد - حاتم محمد المفتي - عبد الله دردير - أدهم البغدادي - أحمد صبري - عبدالوهاب محمد محمد - أحمد محمد عوض
8,064
[ [ -0.027557373046875, -0.057830810546875, 0.01546478271484375, 0.0249786376953125, -0.0260009765625, -0.00864410400390625, -0.0013799667358398438, -0.0265350341796875, 0.04730224609375, 0.0215301513671875, -0.0282135009765625, -0.056884765625, -0.052520751953125, 0.023193359375, -0.034942626953125, 0.06768798828125, 0.005977630615234375, -0.00374603271484375, 0.01483917236328125, -0.005023956298828125, -0.032012939453125, -0.04058837890625, -0.0648193359375, -0.01085662841796875, 0.023193359375, 0.016265869140625, 0.04730224609375, 0.0181732177734375, 0.035797119140625, 0.033935546875, -0.003276824951171875, 0.0169219970703125, -0.01120758056640625, -0.00922393798828125, 0.0021610260009765625, -0.03460693359375, -0.0233001708984375, -0.0033416748046875, 0.033843994140625, 0.039764404296875, -0.0152435302734375, 0.0254669189453125, 0.002330780029296875, 0.047454833984375, -0.03802490234375, 0.00669097900390625, -0.021453857421875, 0.0006375312805175781, -0.01168060302734375, -0.0118255615234375, -0.0027828216552734375, -0.04852294921875, -0.005130767822265625, -0.038116455078125, 0.01311492919921875, 0.0146026611328125, 0.0960693359375, 0.0024394989013671875, -0.027801513671875, -0.035125732421875, -0.03662109375, 0.06536865234375, -0.07037353515625, 0.005157470703125, 0.04150390625, -0.00499725341796875, -0.01114654541015625, -0.052520751953125, -0.060272216796875, -0.00527191162109375, -0.0335693359375, 0.0145263671875, -0.00629425048828125, -0.0305633544921875, 0.0131683349609375, 0.027099609375, -0.033599853515625, -0.01947021484375, -0.0266571044921875, -0.01242828369140625, 0.049041748046875, 0.0215301513671875, 0.0411376953125, -0.034881591796875, -0.0257110595703125, -0.01470947265625, -0.01513671875, 0.0255279541015625, 0.021881103515625, 0.00714111328125, -0.0290069580078125, 0.04388427734375, -0.01279449462890625, 0.051422119140625, 0.0186920166015625, -0.016326904296875, 0.04150390625, -0.0267791748046875, -0.035980224609375, 0.00406646728515625, 0.07830810546875, 0.045989990234375, 0.0011043548583984375, 0.002750396728515625, -0.0109405517578125, 0.00626373291015625, 0.001800537109375, -0.056427001953125, -0.00968170166015625, -0.0010547637939453125, -0.0394287109375, -0.0305938720703125, 0.01348114013671875, -0.07818603515625, -0.0123291015625, 0.0125274658203125, 0.033447265625, -0.0282135009765625, -0.027679443359375, 0.00807952880859375, -0.00007605552673339844, 0.01116943359375, 0.0157318115234375, -0.07421875, 0.027923583984375, 0.0228729248046875, 0.058258056640625, 0.01293182373046875, -0.01132965087890625, -0.000762939453125, 0.01335906982421875, -0.0288238525390625, 0.034759521484375, -0.021392822265625, -0.033172607421875, -0.0195770263671875, 0.01119232177734375, -0.03741455078125, -0.03424072265625, 0.0278472900390625, -0.0298004150390625, 0.041046142578125, -0.00856781005859375, -0.03863525390625, -0.0294952392578125, 0.0243682861328125, -0.0439453125, 0.09600830078125, 0.006832122802734375, -0.06744384765625, 0.01403045654296875, -0.053253173828125, -0.027008056640625, -0.0036029815673828125, 0.0023651123046875, -0.0347900390625, 0.0008392333984375, 0.0325927734375, 0.03173828125, -0.01064300537109375, 0.0203094482421875, 0.0009264945983886719, -0.012237548828125, 0.022613525390625, -0.01485443115234375, 0.08062744140625, 0.01092529296875, -0.04803466796875, -0.0023555755615234375, -0.07269287109375, 0.0211944580078125, 0.02593994140625, -0.0260009765625, -0.017852783203125, -0.036285400390625, 0.00872802734375, 0.031341552734375, 0.033966064453125, -0.03436279296875, 0.01355743408203125, -0.0631103515625, 0.020538330078125, 0.0679931640625, 0.0157623291015625, 0.02203369140625, -0.0357666015625, 0.03607177734375, 0.002056121826171875, -0.0006999969482421875, 0.01548004150390625, -0.02886962890625, -0.0721435546875, -0.00406646728515625, 0.020355224609375, 0.0472412109375, -0.034942626953125, 0.0465087890625, -0.00794219970703125, -0.0478515625, -0.044464111328125, 0.005527496337890625, 0.033447265625, 0.050262451171875, 0.03240966796875, -0.0135345458984375, -0.056396484375, -0.0478515625, -0.02935791015625, -0.01702880859375, 0.0112762451171875, 0.0246429443359375, 0.06195068359375, -0.0148162841796875, 0.05596923828125, -0.04864501953125, -0.024017333984375, -0.02325439453125, 0.0023670196533203125, 0.05291748046875, 0.048370361328125, 0.049407958984375, -0.067626953125, -0.055908203125, -0.00576019287109375, -0.061431884765625, 0.017364501953125, -0.0044403076171875, -0.0173187255859375, 0.03582763671875, 0.0267791748046875, -0.0496826171875, 0.0548095703125, 0.0279541015625, -0.0555419921875, 0.044830322265625, -0.0231170654296875, 0.0134429931640625, -0.09765625, 0.0211334228515625, -0.0186767578125, 0.009552001953125, -0.0379638671875, -0.00846099853515625, -0.01445770263671875, 0.0131683349609375, -0.02001953125, 0.06378173828125, -0.043121337890625, 0.0282135009765625, -0.0015239715576171875, -0.0029125213623046875, -0.0013799667358398438, 0.036041259765625, -0.01110076904296875, 0.04705810546875, 0.0340576171875, -0.0445556640625, 0.01470947265625, 0.028717041015625, -0.034820556640625, 0.01406097412109375, -0.04986572265625, 0.00804901123046875, -0.006511688232421875, 0.0019044876098632812, -0.07904052734375, -0.0280609130859375, 0.0389404296875, -0.05450439453125, 0.01824951171875, 0.0128326416015625, -0.034454345703125, -0.035980224609375, -0.01702880859375, 0.006275177001953125, 0.02740478515625, -0.021636962890625, 0.04791259765625, 0.01436614990234375, -0.00626373291015625, -0.052001953125, -0.060455322265625, 0.018463134765625, -0.0167083740234375, -0.047119140625, 0.0290374755859375, -0.00775909423828125, -0.01451873779296875, 0.00795745849609375, -0.007518768310546875, -0.0164947509765625, 0.01116180419921875, 0.027801513671875, 0.023468017578125, -0.01184844970703125, 0.004283905029296875, -0.033050537109375, -0.00946044921875, 0.004367828369140625, 0.0174407958984375, 0.057525634765625, -0.030029296875, -0.0206298828125, -0.04803466796875, 0.026275634765625, 0.02899169921875, -0.0099334716796875, 0.0902099609375, 0.058685302734375, -0.028350830078125, 0.01454925537109375, -0.047515869140625, -0.00676727294921875, -0.037841796875, 0.027069091796875, -0.0233612060546875, -0.049041748046875, 0.042266845703125, -0.0013952255249023438, -0.0171356201171875, 0.06573486328125, 0.046112060546875, -0.0099945068359375, 0.0736083984375, 0.049102783203125, -0.016265869140625, 0.0207977294921875, -0.0487060546875, 0.036468505859375, -0.05657958984375, -0.04583740234375, -0.032501220703125, -0.0296630859375, -0.051055908203125, -0.01177978515625, 0.0165252685546875, 0.003078460693359375, -0.0172271728515625, 0.0280914306640625, -0.06060791015625, 0.01482391357421875, 0.0312347412109375, 0.01187896728515625, -0.00466156005859375, -0.0159912109375, -0.0213470458984375, -0.0086669921875, -0.042083740234375, -0.039764404296875, 0.0772705078125, -0.0004725456237792969, 0.037506103515625, 0.01666259765625, 0.066650390625, 0.006687164306640625, 0.0222625732421875, -0.037689208984375, 0.0509033203125, 0.00612640380859375, -0.05938720703125, -0.0307769775390625, -0.01099395751953125, -0.08465576171875, 0.0276947021484375, -0.01224517822265625, -0.07159423828125, 0.0294952392578125, -0.0089569091796875, -0.033477783203125, 0.0311737060546875, -0.036773681640625, 0.06280517578125, -0.01371002197265625, -0.0267791748046875, 0.004207611083984375, -0.0655517578125, 0.020263671875, 0.01226806640625, 0.031005859375, -0.01226043701171875, -0.0022716522216796875, 0.08135986328125, -0.057098388671875, 0.0595703125, -0.004001617431640625, 0.01348876953125, 0.0278167724609375, 0.002399444580078125, 0.04132080078125, 0.0115203857421875, -0.012969970703125, -0.00553131103515625, -0.01087188720703125, -0.027435302734375, -0.02691650390625, 0.06475830078125, -0.06689453125, -0.042083740234375, -0.056884765625, -0.0236663818359375, 0.01324462890625, 0.042572021484375, 0.039520263671875, 0.0322265625, -0.00014388561248779297, 0.0197601318359375, 0.02911376953125, -0.0185699462890625, 0.054107666015625, 0.0210418701171875, -0.002780914306640625, -0.04150390625, 0.05902099609375, 0.012939453125, -0.01092529296875, 0.01200103759765625, 0.00899505615234375, -0.023468017578125, -0.020965576171875, -0.0045928955078125, 0.0264434814453125, -0.046234130859375, -0.0240478515625, -0.07275390625, -0.00978851318359375, -0.057281494140625, -0.0193634033203125, -0.015625, -0.028045654296875, -0.03411865234375, -0.024017333984375, 0.0394287109375, 0.052520751953125, -0.0083160400390625, 0.026123046875, -0.035736083984375, 0.02471923828125, 0.0103912353515625, 0.0086212158203125, 0.0165252685546875, -0.0307464599609375, -0.02227783203125, -0.00449371337890625, -0.019287109375, -0.0830078125, 0.07000732421875, 0.005664825439453125, 0.0249786376953125, 0.02197265625, -0.0025882720947265625, 0.06475830078125, -0.0098114013671875, 0.06549072265625, 0.019805908203125, -0.06463623046875, 0.054901123046875, -0.010650634765625, 0.020355224609375, 0.051971435546875, 0.03717041015625, -0.057525634765625, -0.0199432373046875, -0.0693359375, -0.09075927734375, 0.056121826171875, 0.025299072265625, 0.01456451416015625, -0.02294921875, 0.00911712646484375, -0.0177001953125, 0.0184478759765625, -0.059661865234375, -0.05224609375, -0.00335693359375, -0.022491455078125, 0.0058135986328125, -0.032745361328125, -0.005641937255859375, -0.045623779296875, 0.06353759765625, 0.029022216796875, 0.0280303955078125, 0.03228759765625, -0.0038356781005859375, 0.00640869140625, 0.0268707275390625, 0.03692626953125, 0.053558349609375, -0.013641357421875, 0.00865936279296875, 0.02880859375, -0.04364013671875, 0.01303863525390625, -0.01554107666015625, -0.032379150390625, 0.01529693603515625, 0.031402587890625, 0.05560302734375, 0.018890380859375, -0.02044677734375, 0.043609619140625, -0.0104827880859375, -0.0161895751953125, -0.0518798828125, 0.0010538101196289062, 0.0180511474609375, -0.00839996337890625, 0.03436279296875, 0.00909423828125, 0.0045013427734375, -0.03973388671875, 0.001163482666015625, 0.0257415771484375, -0.0197601318359375, -0.001373291015625, 0.055389404296875, -0.00496673583984375, -0.025665283203125, 0.069580078125, -0.0104522705078125, -0.047576904296875, 0.0638427734375, 0.046600341796875, 0.060150146484375, -0.03515625, 0.009979248046875, 0.06744384765625, 0.0233001708984375, 0.006656646728515625, 0.0408935546875, 0.01128387451171875, -0.0355224609375, -0.0035877227783203125, -0.0732421875, -0.0006737709045410156, 0.0236053466796875, -0.0460205078125, 0.0023975372314453125, -0.047698974609375, -0.0318603515625, -0.0002853870391845703, 0.0102691650390625, -0.056915283203125, 0.03619384765625, -0.0017719268798828125, 0.055328369140625, -0.055389404296875, 0.0496826171875, 0.0518798828125, -0.04150390625, -0.08953857421875, -0.0038299560546875, -0.0114288330078125, -0.059906005859375, 0.06658935546875, 0.02691650390625, -0.007785797119140625, 0.004932403564453125, -0.04608154296875, -0.09039306640625, 0.076904296875, -0.0162506103515625, -0.0221405029296875, 0.00124359130859375, 0.0132904052734375, 0.035675048828125, -0.015716552734375, 0.03759765625, 0.027679443359375, 0.0494384765625, -0.004657745361328125, -0.06988525390625, 0.04327392578125, -0.053680419921875, 0.00213623046875, 0.00957489013671875, -0.0650634765625, 0.0787353515625, -0.016693115234375, -0.0224151611328125, 0.01184844970703125, 0.0557861328125, 0.0057220458984375, 0.029571533203125, 0.03466796875, 0.057525634765625, 0.053497314453125, -0.01285552978515625, 0.06561279296875, -0.0276641845703125, 0.04107666015625, 0.06304931640625, 0.0096588134765625, 0.052978515625, 0.035125732421875, -0.02996826171875, 0.05780029296875, 0.032989501953125, -0.027099609375, 0.044525146484375, 0.0027484893798828125, -0.0335693359375, -0.005298614501953125, -0.002399444580078125, -0.035400390625, 0.027435302734375, 0.0164337158203125, -0.03662109375, -0.0156402587890625, 0.007061004638671875, 0.0244140625, 0.0018177032470703125, -0.00913238525390625, 0.0548095703125, 0.0039825439453125, -0.040740966796875, 0.05670166015625, 0.01146697998046875, 0.0628662109375, -0.040771484375, 0.005260467529296875, -0.003223419189453125, 0.018463134765625, -0.035675048828125, -0.049102783203125, 0.029083251953125, -0.0106048583984375, -0.00838470458984375, -0.00621795654296875, 0.05615234375, -0.016143798828125, -0.0440673828125, 0.019287109375, 0.01812744140625, 0.0124969482421875, 0.00206756591796875, -0.059783935546875, -0.01515960693359375, 0.00608062744140625, -0.041595458984375, 0.0111846923828125, 0.01837158203125, 0.006656646728515625, 0.04498291015625, 0.0611572265625, 0.006679534912109375, 0.020477294921875, -0.01270294189453125, 0.06439208984375, -0.07635498046875, -0.035797119140625, -0.076904296875, 0.03857421875, -0.022491455078125, -0.032135009765625, 0.0753173828125, 0.0557861328125, 0.056182861328125, -0.0093231201171875, 0.0496826171875, -0.0330810546875, 0.04058837890625, -0.02056884765625, 0.0595703125, -0.0484619140625, -0.0184173583984375, -0.0220794677734375, -0.0400390625, -0.0161590576171875, 0.05560302734375, -0.043365478515625, 0.012908935546875, 0.055419921875, 0.066650390625, 0.01324462890625, -0.0111083984375, 0.0014553070068359375, 0.03460693359375, 0.0011844635009765625, 0.04656982421875, 0.042449951171875, -0.058258056640625, 0.040008544921875, -0.041412353515625, 0.004573822021484375, -0.035858154296875, -0.042999267578125, -0.0643310546875, -0.035247802734375, -0.0218505859375, -0.035552978515625, -0.00939178466796875, 0.061737060546875, 0.0257415771484375, -0.06549072265625, -0.017425537109375, 0.004314422607421875, 0.007537841796875, -0.0177154541015625, -0.0265655517578125, 0.0648193359375, -0.0007596015930175781, -0.056884765625, 0.01386260986328125, -0.0006575584411621094, 0.0128936767578125, 0.0159912109375, 0.003162384033203125, -0.050048828125, 0.005519866943359375, 0.0251617431640625, 0.01204681396484375, -0.05377197265625, -0.0037174224853515625, 0.0045013427734375, -0.041351318359375, 0.00812530517578125, 0.01325225830078125, -0.037322998046875, 0.02056884765625, 0.032470703125, 0.0198974609375, 0.04608154296875, -0.0003769397735595703, 0.005863189697265625, -0.0357666015625, 0.0084381103515625, 0.0254058837890625, 0.0186767578125, 0.01541900634765625, -0.05291748046875, 0.046600341796875, 0.0186920166015625, -0.0447998046875, -0.057891845703125, -0.0104522705078125, -0.0706787109375, -0.0198974609375, 0.0628662109375, -0.0022449493408203125, -0.037689208984375, -0.00012743473052978516, -0.0173797607421875, 0.03436279296875, -0.03338623046875, 0.05804443359375, 0.060089111328125, -0.01186370849609375, 0.01335906982421875, -0.05120849609375, 0.03564453125, 0.050048828125, -0.047821044921875, -0.0251312255859375, 0.00316619873046875, 0.0458984375, 0.0211639404296875, 0.07305908203125, -0.00946807861328125, 0.014404296875, -0.0006833076477050781, 0.0177459716796875, 0.00266265869140625, 0.005748748779296875, -0.00940704345703125, 0.002197265625, -0.02435302734375, -0.02777099609375 ] ]
sasha/regardv3
2022-08-17T18:03:37.000Z
[ "transformers", "pytorch", "bert", "text-classification", "license:cc-by-4.0", "endpoints_compatible", "has_space", "region:us" ]
text-classification
sasha
null
null
sasha/regardv3
0
8,954
transformers
2022-08-17T16:58:42
--- license: cc-by-4.0 --- # BERT Regard classification model This model is the result of a project entitled [Towards Controllable Biases in Language Generation](https://github.com/ewsheng/controllable-nlg-biases). It consists of a BERT classifier (no ensemble) trained on 1.7K samples of biased language. *Regard* measures language polarity towards and social perceptions of a demographic (compared to sentiment, which only measures overall language polarity). ### BibTeX entry and citation info ```bibtex @article{sheng2019woman, title={The woman worked as a babysitter: On biases in language generation}, author={Sheng, Emily and Chang, Kai-Wei and Natarajan, Premkumar and Peng, Nanyun}, journal={arXiv preprint arXiv:1909.01326}, year={2019} } ```
769
[ [ -0.0191650390625, -0.0255279541015625, 0.0098419189453125, 0.0162506103515625, -0.01116943359375, -0.0273590087890625, -0.012603759765625, -0.03936767578125, -0.00042366981506347656, 0.030242919921875, -0.043121337890625, -0.03485107421875, -0.04888916015625, -0.0234832763671875, -0.03143310546875, 0.1007080078125, 0.0282440185546875, 0.04266357421875, -0.0110931396484375, 0.01340484619140625, -0.002407073974609375, -0.055145263671875, -0.049530029296875, -0.0361328125, 0.0364990234375, 0.01800537109375, 0.02850341796875, 0.019256591796875, 0.01885986328125, 0.01177215576171875, -0.031646728515625, -0.03662109375, -0.033203125, 0.0019130706787109375, -0.008636474609375, -0.03900146484375, -0.032501220703125, 0.032196044921875, 0.03436279296875, 0.06072998046875, -0.0105438232421875, 0.021392822265625, 0.017791748046875, 0.042266845703125, -0.0298919677734375, 0.0297088623046875, -0.045074462890625, 0.0002675056457519531, -0.01442718505859375, 0.022552490234375, -0.0523681640625, -0.046173095703125, 0.029022216796875, -0.004703521728515625, 0.049896240234375, -0.00824737548828125, 0.07476806640625, 0.010498046875, -0.03363037109375, -0.017974853515625, -0.035797119140625, 0.08538818359375, -0.071044921875, 0.022857666015625, 0.0287933349609375, 0.0011234283447265625, -0.005168914794921875, -0.03173828125, -0.0489501953125, -0.00966644287109375, 0.00942230224609375, 0.002979278564453125, 0.01311492919921875, 0.00823974609375, 0.010650634765625, 0.019012451171875, -0.0251922607421875, -0.006687164306640625, -0.025543212890625, -0.00890350341796875, 0.029052734375, -0.0218658447265625, 0.011077880859375, -0.042510986328125, -0.03631591796875, -0.016815185546875, -0.030609130859375, 0.024322509765625, 0.0294647216796875, 0.01947021484375, -0.0196075439453125, 0.03387451171875, -0.0088348388671875, 0.05999755859375, 0.01259613037109375, 0.0091094970703125, 0.03179931640625, -0.0116424560546875, -0.0097503662109375, -0.0033245086669921875, 0.04437255859375, 0.0199432373046875, 0.033599853515625, 0.0033130645751953125, -0.008331298828125, 0.01007843017578125, 0.038909912109375, -0.046783447265625, -0.024993896484375, 0.002910614013671875, -0.0501708984375, -0.05474853515625, 0.01038360595703125, -0.045989990234375, -0.0174407958984375, -0.016845703125, -0.0037822723388671875, -0.031646728515625, -0.0250244140625, -0.0186767578125, -0.00371551513671875, 0.04766845703125, 0.0225677490234375, -0.07061767578125, -0.01326751708984375, 0.04498291015625, 0.03997802734375, 0.00531768798828125, -0.006381988525390625, -0.00791168212890625, -0.0147857666015625, -0.02606201171875, 0.041748046875, -0.0325927734375, -0.0204315185546875, 0.01708984375, 0.0007195472717285156, -0.0013837814331054688, -0.031982421875, 0.0684814453125, -0.049713134765625, 0.031707763671875, -0.0280914306640625, -0.040802001953125, -0.0196075439453125, 0.0172882080078125, -0.031829833984375, 0.055816650390625, 0.0146484375, -0.0684814453125, 0.059234619140625, -0.04150390625, -0.02093505859375, 0.0030422210693359375, -0.00518798828125, -0.031219482421875, 0.004974365234375, -0.0182952880859375, 0.034271240234375, -0.01165008544921875, 0.04766845703125, -0.0230865478515625, 0.00787353515625, 0.0194854736328125, -0.0034847259521484375, 0.1002197265625, 0.0228118896484375, -0.024688720703125, 0.0048370361328125, -0.047393798828125, -0.0083160400390625, 0.0020656585693359375, -0.0443115234375, -0.05126953125, 0.0233154296875, 0.0223541259765625, 0.0281524658203125, 0.03521728515625, -0.0543212890625, 0.0198974609375, -0.01340484619140625, 0.0122528076171875, 0.06292724609375, -0.021209716796875, 0.02508544921875, -0.0281219482421875, 0.01513671875, 0.0080718994140625, 0.03350830078125, 0.018402099609375, -0.0577392578125, -0.07574462890625, -0.00888824462890625, 0.04815673828125, 0.0222320556640625, -0.046356201171875, 0.061676025390625, 0.00911712646484375, -0.035400390625, -0.041015625, 0.0025081634521484375, 0.040771484375, 0.04473876953125, 0.004451751708984375, -0.0304718017578125, -0.039306640625, -0.07763671875, 0.01149749755859375, -0.03875732421875, -0.0081634521484375, 0.0229949951171875, 0.020751953125, -0.0204620361328125, 0.0670166015625, -0.02471923828125, -0.01486968994140625, 0.00213623046875, 0.03668212890625, 0.0233001708984375, 0.048126220703125, 0.05560302734375, -0.060546875, -0.0297393798828125, -0.022491455078125, -0.058837890625, -0.0233001708984375, 0.0271148681640625, -0.0310211181640625, 0.0246734619140625, 0.01486968994140625, -0.017913818359375, 0.0113372802734375, 0.0433349609375, -0.046600341796875, 0.046234130859375, 0.0179595947265625, -0.01708984375, -0.0791015625, 0.0014295578002929688, 0.014312744140625, -0.016143798828125, -0.050567626953125, 0.00484466552734375, 0.01242828369140625, 0.0238800048828125, -0.0335693359375, 0.01500701904296875, -0.017608642578125, 0.01251220703125, -0.0163421630859375, -0.0189971923828125, -0.01800537109375, 0.042877197265625, 0.0187530517578125, 0.032867431640625, 0.039825439453125, -0.048828125, 0.010284423828125, 0.0308074951171875, -0.0192413330078125, 0.03790283203125, -0.04864501953125, 0.01149749755859375, -0.00881195068359375, -0.0016088485717773438, -0.07525634765625, -0.007236480712890625, 0.007244110107421875, -0.05230712890625, 0.024505615234375, -0.0091400146484375, -0.036773681640625, -0.03173828125, -0.0240631103515625, 0.019561767578125, 0.049774169921875, -0.042236328125, 0.017578125, 0.0494384765625, -0.028594970703125, -0.033294677734375, -0.051513671875, -0.040283203125, 0.00882720947265625, -0.0333251953125, 0.00032258033752441406, -0.01324462890625, -0.02313232421875, 0.004505157470703125, 0.01531219482421875, -0.023468017578125, -0.0165863037109375, 0.00429534912109375, 0.041107177734375, -0.0013551712036132812, 0.01462554931640625, 0.020263671875, 0.0054779052734375, 0.01934814453125, 0.0023784637451171875, 0.057220458984375, -0.0002834796905517578, -0.0113677978515625, -0.003620147705078125, 0.01352691650390625, 0.041107177734375, -0.000028848648071289062, 0.06976318359375, 0.041107177734375, -0.02947998046875, -0.0171356201171875, -0.045806884765625, -0.0160369873046875, -0.028533935546875, 0.013580322265625, 0.0003142356872558594, -0.051483154296875, 0.037353515625, 0.024566650390625, 0.01096343994140625, 0.03851318359375, 0.050048828125, -0.0039520263671875, 0.07427978515625, 0.06787109375, -0.03857421875, 0.060882568359375, -0.01023101806640625, 0.0032482147216796875, -0.06756591796875, -0.014862060546875, -0.05157470703125, -0.0095672607421875, -0.055877685546875, -0.0223541259765625, 0.0171356201171875, 0.01045989990234375, -0.0272064208984375, 0.0173187255859375, -0.0589599609375, 0.004131317138671875, 0.06597900390625, 0.0177154541015625, 0.005710601806640625, -0.0201263427734375, -0.01274871826171875, -0.0133819580078125, -0.056182861328125, -0.054779052734375, 0.08624267578125, 0.03668212890625, 0.044708251953125, 0.0167083740234375, 0.06170654296875, 0.024566650390625, 0.060028076171875, -0.053436279296875, 0.035430908203125, -0.0283660888671875, -0.0914306640625, -0.013153076171875, -0.02618408203125, -0.05987548828125, 0.01195526123046875, 0.0021076202392578125, -0.044525146484375, 0.035919189453125, 0.0153961181640625, -0.00432586669921875, 0.0118255615234375, -0.060211181640625, 0.056121826171875, -0.00785064697265625, -0.0177001953125, -0.01148223876953125, -0.047271728515625, 0.05303955078125, -0.00751495361328125, 0.01401519775390625, -0.0031604766845703125, 0.032684326171875, 0.07965087890625, -0.0194549560546875, 0.08740234375, -0.048187255859375, -0.012603759765625, 0.0185546875, -0.0101318359375, 0.0235443115234375, -0.009674072265625, -0.0204925537109375, 0.03375244140625, -0.0065155029296875, -0.04498291015625, 0.01727294921875, 0.04901123046875, -0.0655517578125, -0.009552001953125, -0.05096435546875, -0.044189453125, -0.004848480224609375, 0.033843994140625, 0.02752685546875, 0.0038394927978515625, -0.0235595703125, 0.02484130859375, 0.05877685546875, -0.036834716796875, 0.0247650146484375, 0.039825439453125, -0.02069091796875, -0.002094268798828125, 0.038726806640625, 0.005992889404296875, 0.0177764892578125, 0.01296234130859375, 0.00934600830078125, -0.0129852294921875, -0.005207061767578125, -0.01326751708984375, 0.020355224609375, -0.0297393798828125, -0.0078887939453125, -0.031829833984375, -0.0355224609375, -0.045074462890625, -0.0022792816162109375, -0.023834228515625, -0.01154327392578125, -0.0216827392578125, 0.0029964447021484375, 0.0008602142333984375, 0.07025146484375, -0.001064300537109375, 0.026763916015625, -0.033172607421875, 0.0173187255859375, 0.04364013671875, 0.034271240234375, 0.016876220703125, -0.03961181640625, -0.02642822265625, 0.004116058349609375, -0.04443359375, -0.06024169921875, 0.04388427734375, -0.01160430908203125, 0.052886962890625, 0.05731201171875, 0.00734710693359375, -0.005001068115234375, -0.0175018310546875, 0.07879638671875, 0.0229339599609375, -0.07611083984375, 0.036102294921875, -0.044708251953125, 0.004505157470703125, 0.01971435546875, 0.01904296875, -0.0249481201171875, -0.0307464599609375, -0.06488037109375, -0.07501220703125, 0.0819091796875, 0.012908935546875, -0.004955291748046875, -0.01194000244140625, 0.0167083740234375, 0.01422882080078125, 0.043060302734375, -0.09246826171875, -0.027923583984375, -0.01751708984375, -0.0208740234375, -0.0030517578125, -0.05194091796875, -0.00815582275390625, -0.0250244140625, 0.0648193359375, 0.006015777587890625, 0.049591064453125, 0.003997802734375, 0.004711151123046875, -0.0211334228515625, 0.0247955322265625, 0.03631591796875, 0.026336669921875, -0.043548583984375, 0.01178741455078125, -0.00832366943359375, -0.0298919677734375, -0.004795074462890625, 0.0147857666015625, -0.034423828125, 0.0027599334716796875, 0.030853271484375, 0.05230712890625, -0.003879547119140625, -0.024627685546875, 0.037841796875, 0.003986358642578125, -0.0208740234375, -0.03643798828125, 0.010498046875, -0.00284576416015625, 0.016876220703125, 0.023651123046875, 0.0110931396484375, 0.01360321044921875, -0.03399658203125, 0.00891876220703125, 0.041168212890625, -0.033050537109375, -0.026214599609375, 0.0521240234375, 0.016754150390625, -0.008087158203125, 0.035797119140625, -0.03826904296875, -0.053009033203125, 0.04693603515625, 0.0284271240234375, 0.07958984375, -0.03314208984375, 0.0208740234375, 0.0242462158203125, 0.0280609130859375, 0.005401611328125, 0.03656005859375, -0.009979248046875, -0.09417724609375, -0.019500732421875, -0.063232421875, -0.042236328125, 0.020599365234375, -0.06756591796875, 0.00341033935546875, -0.02703857421875, -0.02239990234375, 0.01507568359375, -0.015777587890625, -0.0517578125, 0.0142974853515625, 0.02386474609375, 0.061187744140625, -0.0771484375, 0.087646484375, 0.0673828125, -0.0257110595703125, -0.0462646484375, 0.021575927734375, -0.0229949951171875, -0.04888916015625, 0.06317138671875, 0.003662109375, 0.003360748291015625, -0.0120849609375, -0.07025146484375, -0.049774169921875, 0.047088623046875, 0.005237579345703125, -0.0438232421875, -0.0015935897827148438, -0.0160064697265625, 0.059783935546875, -0.0223541259765625, 0.00121307373046875, 0.026763916015625, 0.0361328125, -0.01456451416015625, -0.065185546875, -0.033294677734375, -0.031890869140625, 0.018951416015625, -0.0009751319885253906, -0.059326171875, 0.08160400390625, 0.0004696846008300781, 0.0226898193359375, -0.004047393798828125, 0.031707763671875, 0.0207672119140625, 0.0240020751953125, 0.055633544921875, 0.033477783203125, 0.034698486328125, -0.0229339599609375, 0.06793212890625, -0.03851318359375, 0.0509033203125, 0.066162109375, -0.01422882080078125, 0.05389404296875, 0.0391845703125, -0.029296875, 0.04241943359375, 0.04443359375, 0.0143585205078125, 0.056488037109375, 0.0095062255859375, -0.011505126953125, -0.0079345703125, -0.02081298828125, -0.0255279541015625, 0.0189208984375, 0.0262603759765625, -0.054718017578125, 0.0023021697998046875, -0.0025272369384765625, 0.00452423095703125, -0.0084228515625, -0.01467132568359375, 0.05218505859375, 0.005619049072265625, -0.052886962890625, 0.0333251953125, -0.0010929107666015625, 0.07110595703125, -0.042938232421875, 0.0013551712036132812, 0.00417327880859375, 0.026153564453125, -0.012542724609375, -0.0504150390625, 0.01416015625, 0.01122283935546875, -0.058624267578125, -0.0174713134765625, 0.051849365234375, -0.03424072265625, -0.043121337890625, 0.03631591796875, 0.0295257568359375, 0.01132965087890625, 0.019805908203125, -0.05255126953125, -0.0214691162109375, 0.0277099609375, -0.037872314453125, 0.0097198486328125, 0.0235595703125, -0.002902984619140625, 0.03485107421875, 0.040313720703125, 0.0199432373046875, 0.0267486572265625, 0.019683837890625, 0.06439208984375, -0.05242919921875, -0.0380859375, -0.035797119140625, 0.0311279296875, -0.00527191162109375, -0.02880859375, 0.05023193359375, 0.0312042236328125, 0.0762939453125, -0.03314208984375, 0.0814208984375, -0.039215087890625, 0.07012939453125, -0.00789642333984375, 0.051422119140625, -0.02783203125, -0.0035305023193359375, -0.041290283203125, -0.056732177734375, -0.01421356201171875, 0.0875244140625, -0.042633056640625, 0.0216827392578125, 0.0284881591796875, 0.046417236328125, 0.005664825439453125, -0.0045928955078125, 0.02276611328125, 0.02728271484375, 0.018951416015625, 0.0198211669921875, 0.04473876953125, -0.0126495361328125, 0.00792694091796875, -0.03369140625, -0.0494384765625, -0.0289459228515625, -0.053009033203125, -0.06793212890625, -0.0419921875, -0.01326751708984375, -0.038330078125, -0.0194549560546875, 0.07391357421875, 0.07403564453125, -0.10247802734375, -0.0255279541015625, -0.02923583984375, 0.00849151611328125, -0.01448822021484375, -0.02716064453125, 0.00861358642578125, -0.0273590087890625, -0.077392578125, -0.00766754150390625, -0.016326904296875, -0.007781982421875, -0.034027099609375, -0.013153076171875, -0.035888671875, 0.004749298095703125, 0.04425048828125, -0.002117156982421875, -0.043182373046875, -0.0184783935546875, -0.0080413818359375, -0.0345458984375, -0.00396728515625, 0.04205322265625, -0.02435302734375, 0.04925537109375, 0.038330078125, 0.0018367767333984375, 0.02740478515625, 0.00146484375, 0.03802490234375, -0.0667724609375, 0.017303466796875, 0.0198974609375, 0.03802490234375, 0.0241851806640625, -0.035888671875, 0.03216552734375, 0.0204620361328125, -0.062225341796875, -0.048187255859375, 0.0262603759765625, -0.0859375, -0.006702423095703125, 0.08404541015625, -0.009246826171875, -0.0178680419921875, -0.00313568115234375, -0.0252685546875, 0.058258056640625, -0.0633544921875, 0.08636474609375, 0.06292724609375, -0.008392333984375, -0.01409149169921875, -0.0406494140625, 0.0274658203125, 0.03692626953125, -0.03387451171875, -0.01116180419921875, 0.028594970703125, 0.017608642578125, 0.044281005859375, 0.036041259765625, -0.01171875, 0.01043701171875, -0.01342010498046875, 0.07733154296875, 0.0289459228515625, -0.0102996826171875, -0.0282745361328125, 0.0082244873046875, 0.034820556640625, -0.028717041015625 ] ]
h2oai/h2ogpt-oig-oasst1-512-6_9b
2023-06-02T22:20:22.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "gpt", "llm", "large language model", "open-source", "en", "dataset:h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v1", "dataset:h2oai/openassistant_oasst1_h2ogpt", "dataset:h2oai/h2ogpt-fortune2000-personalized", "dataset:h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v3", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
h2oai
null
null
h2oai/h2ogpt-oig-oasst1-512-6_9b
16
8,927
transformers
2023-04-18T06:13:02
--- license: apache-2.0 language: - en library_name: transformers inference: false thumbnail: https://h2o.ai/etc.clientlibs/h2o/clientlibs/clientlib-site/resources/images/favicon.ico tags: - gpt - llm - large language model - open-source datasets: - h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v1 - h2oai/openassistant_oasst1_h2ogpt - h2oai/h2ogpt-fortune2000-personalized - h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v3 --- # h2oGPT Model Card ## Summary H2O.ai's `h2ogpt-oig-oasst1-512-6_9b` is a 6.9 billion parameter instruction-following large language model licensed for commercial use. - Base model: [EleutherAI/pythia-6.9b](https://huggingface.co/EleutherAI/pythia-6.9b) - Fine-tuning dataset: [h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v1](https://huggingface.co/datasets/h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v1) and [h2oai/openassistant_oasst1_h2ogpt](https://huggingface.co/datasets/h2oai/openassistant_oasst1_h2ogpt) and [h2oai/h2ogpt-fortune2000-personalized](https://huggingface.co/datasets/h2oai/h2ogpt-fortune2000-personalized) and [h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v3](https://huggingface.co/datasets/h2oai/h2ogpt-oig-oasst1-instruct-cleaned-v3) - Data-prep and fine-tuning code: [H2O.ai GitHub](https://github.com/h2oai/h2ogpt) - Training logs: [zip](https://huggingface.co/h2oai/h2ogpt-oig-oasst1-512-6.9b/blob/main/pythia-6.9b.h2ogpt-oig-oasst1-instruct-cleaned-v1.json.1_epochs.5fc91911bc2bfaaf3b6c2de577c4b0ae45a07a4a.7.zip) and [zip](https://huggingface.co/h2oai/h2ogpt-oig-oasst1-512-6.9b/blob/main/h2ogpt-oig-oasst1-512-6.9b.h2oaiopenassistant_oasst1_h2ogpt.2_epochs.e35e2e06e0af2f7dceac2e16e3646c90ccce4ec0.1.zip) and [zip](https://huggingface.co/h2oai/h2ogpt-oig-oasst1-512-6.9b/blob/main/h2ogpt-oig-oasst1-512-6.9b.h2oaih2ogpt-oig-oasst1-instruct-cleaned-v3.1_epochs.e48f9debb0d2bd8d866fa5668bbbb51c317c553c.1.zip) ## Chatbot - Run your own chatbot: [H2O.ai GitHub](https://github.com/h2oai/h2ogpt) [![H2O.ai GitHub](https://user-images.githubusercontent.com/6147661/232930822-e7170e4d-8aa1-4f7a-ad70-ece9cdd8b0cb.png)](https://github.com/h2oai/h2ogpt) ## Usage To use the model with the `transformers` library on a machine with GPUs, first make sure you have the `transformers` and `accelerate` libraries installed. ```bash pip install transformers==4.28.1 pip install accelerate==0.18.0 ``` ```python import torch from transformers import pipeline generate_text = pipeline(model="h2oai/h2ogpt-oig-oasst1-512-6_9b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", prompt_type='human_bot') res = generate_text("Why is drinking water so healthy?", max_new_tokens=100) print(res[0]["generated_text"]) ``` Alternatively, if you prefer to not use `trust_remote_code=True` you can download [instruct_pipeline.py](https://huggingface.co/h2oai/h2ogpt-oig-oasst1-512-6.9b/blob/main/h2oai_pipeline.py), store it alongside your notebook, and construct the pipeline yourself from the loaded model and tokenizer: ```python import torch from h2oai_pipeline import H2OTextGenerationPipeline from transformers import AutoModelForCausalLM, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained("h2oai/h2ogpt-oig-oasst1-512-6_9b", padding_side="left") model = AutoModelForCausalLM.from_pretrained("h2oai/h2ogpt-oig-oasst1-512-6_9b", torch_dtype=torch.bfloat16, device_map="auto") generate_text = H2OTextGenerationPipeline(model=model, tokenizer=tokenizer, prompt_type='human_bot') res = generate_text("Why is drinking water so healthy?", max_new_tokens=100) print(res[0]["generated_text"]) ``` ## Model Architecture ``` GPTNeoXForCausalLM( (gpt_neox): GPTNeoXModel( (embed_in): Embedding(50432, 4096) (layers): ModuleList( (0-31): 32 x GPTNeoXLayer( (input_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) (post_attention_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) (attention): GPTNeoXAttention( (rotary_emb): RotaryEmbedding() (query_key_value): Linear(in_features=4096, out_features=12288, bias=True) (dense): Linear(in_features=4096, out_features=4096, bias=True) ) (mlp): GPTNeoXMLP( (dense_h_to_4h): Linear(in_features=4096, out_features=16384, bias=True) (dense_4h_to_h): Linear(in_features=16384, out_features=4096, bias=True) (act): GELUActivation() ) ) ) (final_layer_norm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embed_out): Linear(in_features=4096, out_features=50432, bias=False) ) ``` ## Model Configuration ```json GPTNeoXConfig { "_name_or_path": "h2oai/h2ogpt-oig-oasst1-512-6_9b", "architectures": [ "GPTNeoXForCausalLM" ], "bos_token_id": 0, "custom_pipeline": { "text-generation": { "impl": "h2oai_pipeline.H2OTextGenerationPipeline", "pt": "AutoModelForCausalLM" } }, "eos_token_id": 0, "hidden_act": "gelu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 16384, "layer_norm_eps": 1e-05, "max_position_embeddings": 2048, "model_type": "gpt_neox", "num_attention_heads": 32, "num_hidden_layers": 32, "rotary_emb_base": 10000, "rotary_pct": 0.25, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.28.1", "use_cache": true, "use_parallel_residual": true, "vocab_size": 50432 } ``` ## Model Validation Model validation results using [EleutherAI lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness). [eval source code](https://github.com/h2oai/h2ogpt/issues/125#issue-1702311702) | Task |Version| Metric |Value | |Stderr| |-------------|------:|--------|-----:|---|-----:| |arc_easy | 0|acc |0.6591|± |0.0097| | | |acc_norm|0.6178|± |0.0100| |arc_challenge| 0|acc |0.3174|± |0.0136| | | |acc_norm|0.3558|± |0.0140| |openbookqa | 0|acc |0.2540|± |0.0195| | | |acc_norm|0.3580|± |0.0215| |winogrande | 0|acc |0.6069|± |0.0137| |piqa | 0|acc |0.7486|± |0.0101| | | |acc_norm|0.7546|± |0.0100| |hellaswag | 0|acc |0.4843|± |0.0050| | | |acc_norm|0.6388|± |0.0048| |boolq | 1|acc |0.6193|± |0.0085| ## Disclaimer Please read this disclaimer carefully before using the large language model provided in this repository. Your use of the model signifies your agreement to the following terms and conditions. - Biases and Offensiveness: The large language model is trained on a diverse range of internet text data, which may contain biased, racist, offensive, or otherwise inappropriate content. By using this model, you acknowledge and accept that the generated content may sometimes exhibit biases or produce content that is offensive or inappropriate. The developers of this repository do not endorse, support, or promote any such content or viewpoints. - Limitations: The large language model is an AI-based tool and not a human. It may produce incorrect, nonsensical, or irrelevant responses. It is the user's responsibility to critically evaluate the generated content and use it at their discretion. - Use at Your Own Risk: Users of this large language model must assume full responsibility for any consequences that may arise from their use of the tool. The developers and contributors of this repository shall not be held liable for any damages, losses, or harm resulting from the use or misuse of the provided model. - Ethical Considerations: Users are encouraged to use the large language model responsibly and ethically. By using this model, you agree not to use it for purposes that promote hate speech, discrimination, harassment, or any form of illegal or harmful activities. - Reporting Issues: If you encounter any biased, offensive, or otherwise inappropriate content generated by the large language model, please report it to the repository maintainers through the provided channels. Your feedback will help improve the model and mitigate potential issues. - Changes to this Disclaimer: The developers of this repository reserve the right to modify or update this disclaimer at any time without prior notice. It is the user's responsibility to periodically review the disclaimer to stay informed about any changes. By using the large language model provided in this repository, you agree to accept and comply with the terms and conditions outlined in this disclaimer. If you do not agree with any part of this disclaimer, you should refrain from using the model and any content generated by it.
8,643
[ [ -0.0211181640625, -0.058685302734375, 0.0294647216796875, 0.00930023193359375, -0.015625, -0.006195068359375, -0.017120361328125, -0.0333251953125, 0.0089874267578125, 0.024688720703125, -0.035919189453125, -0.040618896484375, -0.048065185546875, -0.01343536376953125, 0.0018033981323242188, 0.06402587890625, -0.00250244140625, -0.0235595703125, 0.004215240478515625, 0.004878997802734375, -0.01232147216796875, -0.028045654296875, -0.05712890625, -0.0274200439453125, 0.0198211669921875, 0.0160675048828125, 0.047607421875, 0.057830810546875, 0.028228759765625, 0.0227203369140625, -0.01367950439453125, 0.0013370513916015625, -0.031951904296875, -0.0136566162109375, 0.005199432373046875, -0.0184326171875, -0.050567626953125, -0.0017156600952148438, 0.040740966796875, 0.0218048095703125, -0.00818634033203125, 0.0261993408203125, 0.0174713134765625, 0.033416748046875, -0.03631591796875, 0.0302734375, -0.0283660888671875, -0.0101776123046875, -0.006420135498046875, 0.004085540771484375, -0.0191650390625, -0.002422332763671875, 0.025604248046875, -0.048736572265625, 0.00018870830535888672, 0.0147247314453125, 0.09844970703125, 0.0193023681640625, -0.0202178955078125, -0.019287109375, -0.032440185546875, 0.05810546875, -0.0767822265625, 0.0235443115234375, 0.032012939453125, 0.003246307373046875, -0.00013756752014160156, -0.05816650390625, -0.039947509765625, -0.01149749755859375, -0.0104522705078125, 0.0196685791015625, -0.022979736328125, -0.0021305084228515625, 0.0229339599609375, 0.0171356201171875, -0.052978515625, 0.005527496337890625, -0.03582763671875, -0.022247314453125, 0.054718017578125, 0.0091552734375, 0.0203094482421875, -0.0350341796875, -0.030487060546875, -0.01520538330078125, -0.031646728515625, 0.00982666015625, 0.03631591796875, 0.01264190673828125, -0.05267333984375, 0.041259765625, -0.01532745361328125, 0.039215087890625, -0.004669189453125, -0.01328277587890625, 0.045562744140625, -0.026275634765625, -0.0257568359375, 0.0020656585693359375, 0.09002685546875, 0.0211181640625, 0.006011962890625, 0.0204010009765625, -0.00608062744140625, -0.0186004638671875, -0.005435943603515625, -0.08203125, -0.017181396484375, 0.040496826171875, -0.033782958984375, -0.0178375244140625, 0.00583648681640625, -0.06610107421875, -0.00986480712890625, -0.0024871826171875, 0.032501220703125, -0.0311431884765625, -0.0303955078125, 0.01302337646484375, -0.0145263671875, 0.0193634033203125, 0.0121002197265625, -0.05859375, 0.01009368896484375, 0.043914794921875, 0.06939697265625, 0.007244110107421875, -0.0250091552734375, -0.0249176025390625, 0.00449371337890625, -0.005039215087890625, 0.03369140625, -0.0181732177734375, -0.0159454345703125, -0.004543304443359375, 0.01538848876953125, -0.01306915283203125, -0.0276336669921875, 0.045745849609375, -0.0143890380859375, 0.041015625, -0.0021038055419921875, -0.034423828125, -0.01230621337890625, 0.0196533203125, -0.031402587890625, 0.0909423828125, 0.0204010009765625, -0.06219482421875, 0.006450653076171875, -0.052276611328125, -0.01003265380859375, -0.00681304931640625, 0.0004992485046386719, -0.055877685546875, -0.021453857421875, 0.0218353271484375, 0.0248260498046875, -0.03460693359375, 0.0163421630859375, -0.022735595703125, -0.01129913330078125, -0.004901885986328125, -0.029083251953125, 0.0926513671875, 0.017730712890625, -0.048187255859375, 0.0136260986328125, -0.053924560546875, 0.00826263427734375, 0.033050537109375, -0.028472900390625, -0.004573822021484375, -0.01384735107421875, 0.01210784912109375, 0.03240966796875, 0.0296783447265625, -0.036651611328125, 0.0161285400390625, -0.03369140625, 0.058807373046875, 0.05694580078125, 0.0021495819091796875, 0.0208740234375, -0.0184478759765625, 0.0252838134765625, 0.006252288818359375, 0.0257568359375, -0.0103759765625, -0.0694580078125, -0.058837890625, -0.0276031494140625, 0.01439666748046875, 0.051116943359375, -0.038482666015625, 0.0625, -0.031890869140625, -0.04779052734375, -0.045135498046875, 0.006683349609375, 0.032196044921875, 0.04693603515625, 0.032073974609375, -0.0087127685546875, -0.0290985107421875, -0.06976318359375, 0.006961822509765625, -0.0244598388671875, -0.00908660888671875, 0.043914794921875, 0.06524658203125, -0.032318115234375, 0.05462646484375, -0.051605224609375, -0.014892578125, -0.0035762786865234375, 0.0113677978515625, 0.031280517578125, 0.04498291015625, 0.03680419921875, -0.037628173828125, -0.038787841796875, -0.00012505054473876953, -0.051116943359375, 0.0035228729248046875, -0.00014984607696533203, -0.023681640625, 0.0208587646484375, 0.026397705078125, -0.06695556640625, 0.039031982421875, 0.05224609375, -0.047210693359375, 0.036163330078125, -0.0296478271484375, -0.0005655288696289062, -0.10162353515625, 0.026641845703125, 0.0018978118896484375, -0.0082244873046875, -0.032684326171875, -0.004207611083984375, 0.0007805824279785156, -0.006793975830078125, -0.04827880859375, 0.06536865234375, -0.033172607421875, 0.0179290771484375, -0.0003561973571777344, 0.004863739013671875, 0.0013399124145507812, 0.046630859375, 0.0038509368896484375, 0.039154052734375, 0.05218505859375, -0.03125, 0.0302734375, 0.01490020751953125, -0.0205230712890625, 0.0110626220703125, -0.07354736328125, 0.01259613037109375, 0.0030651092529296875, 0.018798828125, -0.07763671875, -0.0288543701171875, 0.042724609375, -0.04791259765625, 0.0243682861328125, -0.023529052734375, -0.033966064453125, -0.04815673828125, -0.0308074951171875, 0.0228118896484375, 0.059478759765625, -0.0335693359375, 0.03961181640625, 0.0212554931640625, -0.0007920265197753906, -0.045440673828125, -0.049896240234375, -0.006443023681640625, -0.0182647705078125, -0.06121826171875, 0.033416748046875, 0.0033512115478515625, 0.000640869140625, 0.01239013671875, 0.0084686279296875, 0.0026702880859375, -0.0010194778442382812, 0.0264892578125, 0.0247802734375, -0.00920867919921875, -0.01300811767578125, -0.006984710693359375, -0.01206207275390625, 0.0002084970474243164, -0.023651123046875, 0.0716552734375, -0.00695037841796875, -0.01212310791015625, -0.06536865234375, 0.00021255016326904297, 0.05072021484375, -0.0157012939453125, 0.05859375, 0.0689697265625, -0.04376220703125, 0.00646209716796875, -0.026824951171875, -0.0233612060546875, -0.034881591796875, 0.035675048828125, -0.01275634765625, -0.05511474609375, 0.051300048828125, 0.0202484130859375, 0.0031566619873046875, 0.05718994140625, 0.058502197265625, -0.0015020370483398438, 0.07415771484375, 0.0112152099609375, -0.0162811279296875, 0.033538818359375, -0.052490234375, 0.01342010498046875, -0.07568359375, -0.023040771484375, -0.03912353515625, -0.01380157470703125, -0.061431884765625, -0.04351806640625, 0.0233917236328125, 0.00904083251953125, -0.04736328125, 0.031646728515625, -0.038665771484375, 0.0028400421142578125, 0.05255126953125, 0.01007080078125, -0.006488800048828125, 0.0092010498046875, -0.0182952880859375, 0.006565093994140625, -0.04656982421875, -0.038543701171875, 0.08349609375, 0.050140380859375, 0.034149169921875, 0.0028705596923828125, 0.053741455078125, 0.00032639503479003906, 0.0101776123046875, -0.051177978515625, 0.0268096923828125, 0.01393890380859375, -0.06378173828125, -0.0282135009765625, -0.03564453125, -0.07916259765625, 0.01392364501953125, -0.01499176025390625, -0.07562255859375, 0.0165252685546875, 0.0185699462890625, -0.034454345703125, 0.0283203125, -0.060943603515625, 0.07568359375, -0.0176849365234375, -0.05267333984375, -0.005634307861328125, -0.052886962890625, 0.0166778564453125, 0.0247344970703125, 0.01212310791015625, -0.0136260986328125, -0.0101165771484375, 0.060150146484375, -0.041015625, 0.054718017578125, -0.017547607421875, -0.00420379638671875, 0.044464111328125, -0.0183563232421875, 0.047943115234375, 0.01148223876953125, -0.0016155242919921875, 0.0286865234375, -0.0139923095703125, -0.042022705078125, -0.0291595458984375, 0.05267333984375, -0.07147216796875, -0.041107177734375, -0.044158935546875, -0.0262603759765625, 0.01125335693359375, 0.0263824462890625, 0.027252197265625, 0.0244140625, 0.0119171142578125, 0.0177154541015625, 0.0308990478515625, -0.0347900390625, 0.04254150390625, 0.00975799560546875, -0.0120849609375, -0.050537109375, 0.0738525390625, -0.0009732246398925781, 0.0282135009765625, 0.01203155517578125, 0.0234832763671875, -0.0311431884765625, -0.0276336669921875, -0.050537109375, 0.035858154296875, -0.028167724609375, -0.006626129150390625, -0.04901123046875, -0.022735595703125, -0.044830322265625, 0.0084991455078125, -0.03729248046875, -0.0178375244140625, -0.03582763671875, 0.004467010498046875, 0.027008056640625, 0.0294647216796875, -0.0012664794921875, 0.0166168212890625, -0.055877685546875, 0.0177459716796875, 0.0303497314453125, 0.018768310546875, -0.006072998046875, -0.060943603515625, -0.01557159423828125, 0.020263671875, -0.03509521484375, -0.05303955078125, 0.045257568359375, 0.004566192626953125, 0.031524658203125, 0.0291595458984375, -0.01324462890625, 0.05169677734375, -0.024749755859375, 0.0721435546875, 0.0146484375, -0.069091796875, 0.04315185546875, -0.04095458984375, 0.0295867919921875, 0.016326904296875, 0.030487060546875, -0.039581298828125, -0.01125335693359375, -0.061676025390625, -0.0804443359375, 0.060943603515625, 0.0311431884765625, 0.006011962890625, 0.0116119384765625, 0.0260467529296875, -0.0251617431640625, 0.00861358642578125, -0.050140380859375, -0.033416748046875, -0.01116943359375, -0.0223541259765625, 0.00046324729919433594, -0.01044464111328125, -0.00690460205078125, -0.03509521484375, 0.06512451171875, -0.005367279052734375, 0.04803466796875, 0.0281219482421875, -0.0010204315185546875, -0.002696990966796875, 0.00933074951171875, 0.045379638671875, 0.040863037109375, -0.0293731689453125, -0.00554656982421875, 0.0025577545166015625, -0.048492431640625, -0.0023403167724609375, 0.0200042724609375, -0.024658203125, -0.01103973388671875, 0.0338134765625, 0.08221435546875, -0.0016298294067382812, -0.042999267578125, 0.033477783203125, -0.00008106231689453125, -0.032196044921875, -0.0239715576171875, 0.006282806396484375, 0.0289154052734375, 0.00829315185546875, 0.018524169921875, -0.0004756450653076172, -0.0036067962646484375, -0.048828125, -0.0011701583862304688, 0.023773193359375, -0.0290985107421875, -0.024200439453125, 0.06329345703125, 0.0137481689453125, -0.017486572265625, 0.06500244140625, -0.025360107421875, -0.046142578125, 0.058685302734375, 0.0301666259765625, 0.06475830078125, -0.0135345458984375, 0.007228851318359375, 0.061309814453125, 0.03533935546875, -0.0023899078369140625, 0.0181732177734375, 0.01776123046875, -0.04931640625, -0.0122222900390625, -0.039581298828125, -0.00469970703125, 0.0250701904296875, -0.04486083984375, 0.032318115234375, -0.034271240234375, -0.0196380615234375, -0.00485992431640625, 0.0089111328125, -0.056884765625, 0.01560211181640625, 0.0225830078125, 0.055694580078125, -0.056640625, 0.061370849609375, 0.046234130859375, -0.035919189453125, -0.06005859375, -0.004215240478515625, 0.006427764892578125, -0.0638427734375, 0.031005859375, 0.0134124755859375, 0.01515960693359375, 0.00936126708984375, -0.03662109375, -0.07489013671875, 0.10333251953125, 0.0252227783203125, -0.044586181640625, -0.00897979736328125, 0.01222991943359375, 0.04241943359375, -0.0187225341796875, 0.044647216796875, 0.045623779296875, 0.041656494140625, 0.0007719993591308594, -0.0860595703125, 0.0207061767578125, -0.0313720703125, -0.007640838623046875, 0.0028858184814453125, -0.07244873046875, 0.07421875, -0.0286865234375, -0.01052093505859375, -0.0034618377685546875, 0.03125, 0.03125, 0.01544952392578125, 0.028717041015625, 0.06781005859375, 0.054901123046875, -0.018829345703125, 0.0865478515625, -0.03759765625, 0.05584716796875, 0.0736083984375, 0.0030345916748046875, 0.06707763671875, 0.0303497314453125, -0.017486572265625, 0.034515380859375, 0.053863525390625, -0.00438690185546875, 0.0222930908203125, 0.01367950439453125, -0.00809478759765625, -0.0113372802734375, -0.00481414794921875, -0.03997802734375, 0.0301666259765625, 0.025146484375, -0.04498291015625, 0.004741668701171875, -0.0232391357421875, 0.022186279296875, -0.03179931640625, -0.006183624267578125, 0.04351806640625, 0.0006022453308105469, -0.041107177734375, 0.06622314453125, 0.0025577545166015625, 0.05596923828125, -0.033233642578125, 0.0010242462158203125, 0.00595855712890625, 0.01442718505859375, -0.0175018310546875, -0.05474853515625, 0.004589080810546875, -0.00894927978515625, -0.0008091926574707031, -0.006923675537109375, 0.03778076171875, -0.03179931640625, -0.041046142578125, 0.0305633544921875, 0.0298919677734375, 0.005809783935546875, -0.00479888916015625, -0.07666015625, 0.0091705322265625, 0.007556915283203125, -0.049835205078125, 0.0131683349609375, 0.020477294921875, 0.00965118408203125, 0.050323486328125, 0.047576904296875, 0.002040863037109375, 0.00983428955078125, -0.0044403076171875, 0.061065673828125, -0.047607421875, -0.0268402099609375, -0.069580078125, 0.059326171875, -0.0216217041015625, -0.048370361328125, 0.052093505859375, 0.0634765625, 0.0648193359375, -0.002834320068359375, 0.0640869140625, -0.024169921875, 0.018218994140625, -0.0274810791015625, 0.060394287109375, -0.04705810546875, 0.0032958984375, -0.0162200927734375, -0.06170654296875, -0.00550079345703125, 0.05401611328125, -0.0246734619140625, 0.0161895751953125, 0.051605224609375, 0.0687255859375, -0.0006437301635742188, -0.0165863037109375, 0.0017757415771484375, 0.033294677734375, 0.033538818359375, 0.05413818359375, 0.033782958984375, -0.06256103515625, 0.038543701171875, -0.03692626953125, -0.01238250732421875, -0.0199737548828125, -0.051849365234375, -0.0633544921875, -0.046112060546875, -0.037506103515625, -0.048004150390625, -0.0139007568359375, 0.08624267578125, 0.06939697265625, -0.054351806640625, -0.0233154296875, -0.005435943603515625, 0.0036258697509765625, -0.007198333740234375, -0.0218505859375, 0.044464111328125, -0.0017347335815429688, -0.0758056640625, -0.0032958984375, 0.01238250732421875, 0.0140838623046875, -0.01479339599609375, -0.02880859375, -0.0244598388671875, -0.00443267822265625, 0.038238525390625, 0.023040771484375, -0.051605224609375, -0.019195556640625, -0.0002999305725097656, -0.015625, 0.0112457275390625, 0.0304107666015625, -0.0251922607421875, 0.0283660888671875, 0.053009033203125, 0.0230560302734375, 0.050384521484375, -0.0032444000244140625, 0.01222991943359375, -0.033294677734375, 0.0284881591796875, 0.01483154296875, 0.033843994140625, 0.018035888671875, -0.0260467529296875, 0.030609130859375, 0.03448486328125, -0.04327392578125, -0.054412841796875, -0.007427215576171875, -0.0865478515625, -0.0094146728515625, 0.10400390625, -0.0177154541015625, -0.038970947265625, -0.0011205673217773438, -0.0247344970703125, 0.0261383056640625, -0.049224853515625, 0.05120849609375, 0.04669189453125, -0.00939178466796875, -0.00914764404296875, -0.051361083984375, 0.032196044921875, 0.01456451416015625, -0.07342529296875, -0.0016689300537109375, 0.0199737548828125, 0.0345458984375, 0.0220947265625, 0.040863037109375, -0.0103607177734375, 0.01204681396484375, 0.00571441650390625, 0.017547607421875, -0.00608062744140625, -0.00426483154296875, -0.01143646240234375, -0.0008263587951660156, -0.0147247314453125, -0.016815185546875 ] ]
facebook/mask2former-swin-large-coco-panoptic
2023-02-07T12:46:36.000Z
[ "transformers", "pytorch", "mask2former", "vision", "image-segmentation", "dataset:coco", "arxiv:2112.01527", "arxiv:2107.06278", "license:other", "endpoints_compatible", "has_space", "region:us" ]
image-segmentation
facebook
null
null
facebook/mask2former-swin-large-coco-panoptic
14
8,912
transformers
2023-01-02T16:24:12
--- license: other tags: - vision - image-segmentation datasets: - coco widget: - src: http://images.cocodataset.org/val2017/000000039769.jpg example_title: Cats --- # Mask2Former Mask2Former model trained on COCO panoptic segmentation (large-sized version, Swin backbone). It was introduced in the paper [Masked-attention Mask Transformer for Universal Image Segmentation ](https://arxiv.org/abs/2112.01527) and first released in [this repository](https://github.com/facebookresearch/Mask2Former/). Disclaimer: The team releasing Mask2Former did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description Mask2Former addresses instance, semantic and panoptic segmentation with the same paradigm: by predicting a set of masks and corresponding labels. Hence, all 3 tasks are treated as if they were instance segmentation. Mask2Former outperforms the previous SOTA, [MaskFormer](https://arxiv.org/abs/2107.06278) both in terms of performance an efficiency by (i) replacing the pixel decoder with a more advanced multi-scale deformable attention Transformer, (ii) adopting a Transformer decoder with masked attention to boost performance without without introducing additional computation and (iii) improving training efficiency by calculating the loss on subsampled points instead of whole masks. ![model image](https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/mask2former_architecture.png) ## Intended uses & limitations You can use this particular checkpoint for panoptic segmentation. See the [model hub](https://huggingface.co/models?search=mask2former) to look for other fine-tuned versions on a task that interests you. ### How to use Here is how to use this model: ```python import requests import torch from PIL import Image from transformers import AutoImageProcessor, Mask2FormerForUniversalSegmentation # load Mask2Former fine-tuned on COCO panoptic segmentation processor = AutoImageProcessor.from_pretrained("facebook/mask2former-swin-large-coco-panoptic") model = Mask2FormerForUniversalSegmentation.from_pretrained("facebook/mask2former-swin-large-coco-panoptic") url = "http://images.cocodataset.org/val2017/000000039769.jpg" image = Image.open(requests.get(url, stream=True).raw) inputs = processor(images=image, return_tensors="pt") with torch.no_grad(): outputs = model(**inputs) # model predicts class_queries_logits of shape `(batch_size, num_queries)` # and masks_queries_logits of shape `(batch_size, num_queries, height, width)` class_queries_logits = outputs.class_queries_logits masks_queries_logits = outputs.masks_queries_logits # you can pass them to processor for postprocessing result = processor.post_process_panoptic_segmentation(outputs, target_sizes=[image.size[::-1]])[0] # we refer to the demo notebooks for visualization (see "Resources" section in the Mask2Former docs) predicted_panoptic_map = result["segmentation"] ``` For more code examples, we refer to the [documentation](https://huggingface.co/docs/transformers/master/en/model_doc/mask2former).
3,107
[ [ -0.048065185546875, -0.046112060546875, 0.0130767822265625, 0.033203125, -0.02252197265625, -0.0101776123046875, 0.0077972412109375, -0.05975341796875, 0.0191802978515625, 0.0496826171875, -0.048187255859375, -0.0191192626953125, -0.062744140625, -0.0235748291015625, -0.0092926025390625, 0.0672607421875, -0.005222320556640625, 0.0019388198852539062, -0.02001953125, 0.00528717041015625, -0.01580810546875, -0.0170135498046875, -0.05450439453125, -0.02447509765625, 0.01233673095703125, 0.02618408203125, 0.032928466796875, 0.04241943359375, 0.04327392578125, 0.0189666748046875, -0.0070648193359375, -0.002033233642578125, -0.032684326171875, -0.0156707763671875, 0.007110595703125, -0.04522705078125, -0.02337646484375, 0.011993408203125, 0.0318603515625, 0.0262908935546875, 0.01352691650390625, 0.0251617431640625, -0.0025234222412109375, 0.04461669921875, -0.048004150390625, 0.0285797119140625, -0.027557373046875, 0.02325439453125, -0.01529693603515625, 0.0259246826171875, -0.0187835693359375, -0.01715087890625, 0.01548004150390625, -0.036834716796875, 0.035552978515625, -0.011260986328125, 0.07366943359375, 0.01459503173828125, -0.006694793701171875, -0.00995635986328125, -0.032470703125, 0.041595458984375, -0.0260162353515625, 0.0168304443359375, 0.035858154296875, 0.056640625, 0.0182952880859375, -0.08447265625, -0.033355712890625, 0.0212554931640625, -0.001857757568359375, 0.0146942138671875, -0.0190887451171875, 0.003376007080078125, 0.0272979736328125, 0.0248565673828125, -0.042755126953125, -0.0005421638488769531, -0.0662841796875, -0.0268402099609375, 0.049163818359375, -0.01342010498046875, 0.022186279296875, -0.018768310546875, -0.044891357421875, -0.0242767333984375, -0.017578125, 0.03753662109375, 0.005046844482421875, -0.0227508544921875, -0.0196075439453125, 0.044342041015625, -0.005023956298828125, 0.05316162109375, 0.0276641845703125, -0.012908935546875, 0.0107269287109375, 0.007175445556640625, -0.029022216796875, -0.002758026123046875, 0.04669189453125, 0.0380859375, 0.01335906982421875, 0.006866455078125, -0.005321502685546875, 0.0154876708984375, 0.01030731201171875, -0.08477783203125, -0.048065185546875, 0.005947113037109375, -0.0164794921875, -0.02264404296875, 0.028289794921875, -0.06976318359375, -0.00460052490234375, -0.0091094970703125, 0.027923583984375, -0.0238037109375, -0.00676727294921875, 0.006561279296875, -0.01885986328125, 0.041961669921875, 0.02459716796875, -0.0665283203125, 0.0272979736328125, 0.04071044921875, 0.0772705078125, 0.0009102821350097656, -0.0007343292236328125, -0.0162811279296875, -0.0032405853271484375, -0.023284912109375, 0.0679931640625, -0.039093017578125, -0.0038127899169921875, -0.021026611328125, 0.026153564453125, -0.026092529296875, -0.04571533203125, 0.0256500244140625, -0.0379638671875, 0.03363037109375, -0.02716064453125, -0.0120391845703125, -0.044036865234375, 0.00946807861328125, -0.040283203125, 0.08367919921875, 0.03662109375, -0.047607421875, 0.01364898681640625, -0.050750732421875, -0.01287078857421875, -0.0085296630859375, -0.0013484954833984375, -0.062164306640625, -0.0108642578125, 0.0362548828125, 0.0280914306640625, -0.01885986328125, -0.0004374980926513672, -0.0250701904296875, -0.00971221923828125, 0.003047943115234375, 0.00943756103515625, 0.0711669921875, 0.006000518798828125, -0.0538330078125, 0.015380859375, -0.026336669921875, -0.0008592605590820312, 0.02532958984375, 0.01140594482421875, 0.0173797607421875, -0.035919189453125, 0.0304107666015625, 0.048492431640625, 0.00287628173828125, -0.039642333984375, 0.01404571533203125, -0.023162841796875, 0.04986572265625, 0.040191650390625, 0.002838134765625, 0.0323486328125, -0.0121307373046875, 0.037872314453125, 0.0088348388671875, 0.039093017578125, -0.0014257431030273438, -0.05511474609375, -0.070068359375, -0.033203125, -0.0005645751953125, 0.028533935546875, -0.03045654296875, 0.028045654296875, 0.01213836669921875, -0.053863525390625, -0.0234222412109375, -0.0030574798583984375, 0.0267181396484375, 0.046783447265625, 0.0241546630859375, -0.048126220703125, -0.05975341796875, -0.076904296875, 0.0213775634765625, 0.0148162841796875, -0.006473541259765625, 0.02557373046875, 0.0386962890625, -0.042144775390625, 0.07818603515625, -0.050506591796875, -0.0297088623046875, -0.0205078125, -0.006137847900390625, -0.00787353515625, 0.037567138671875, 0.06591796875, -0.056060791015625, -0.037353515625, -0.0232086181640625, -0.053466796875, -0.00258636474609375, 0.014617919921875, -0.025238037109375, 0.01641845703125, 0.0194091796875, -0.043792724609375, 0.040802001953125, 0.0303955078125, -0.022125244140625, 0.050750732421875, 0.0114288330078125, -0.0095672607421875, -0.061187744140625, 0.019317626953125, 0.00902557373046875, -0.0280914306640625, -0.0338134765625, 0.006122589111328125, 0.0098114013671875, -0.02484130859375, -0.042449951171875, 0.036041259765625, -0.042510986328125, -0.029327392578125, -0.0276947021484375, -0.01325225830078125, 0.024749755859375, 0.0487060546875, 0.0251922607421875, 0.034576416015625, 0.06842041015625, -0.033599853515625, 0.0300750732421875, 0.0245513916015625, -0.028411865234375, 0.0277862548828125, -0.067626953125, 0.0195465087890625, -0.01351165771484375, 0.049346923828125, -0.08087158203125, -0.04742431640625, 0.043365478515625, -0.024261474609375, 0.0227203369140625, -0.01064300537109375, -0.0135345458984375, -0.06353759765625, -0.040863037109375, 0.047393798828125, 0.04437255859375, -0.051788330078125, 0.0156707763671875, 0.039825439453125, 0.008026123046875, -0.027557373046875, -0.067626953125, -0.01276397705078125, -0.01143646240234375, -0.0662841796875, 0.030487060546875, 0.0024318695068359375, 0.006694793701171875, -0.01122283935546875, -0.0177154541015625, -0.004276275634765625, -0.031494140625, 0.02972412109375, 0.0243682861328125, -0.01049041748046875, -0.036590576171875, 0.00945281982421875, -0.0174713134765625, 0.01322174072265625, -0.030609130859375, 0.053558349609375, -0.014556884765625, -0.007354736328125, -0.051300048828125, 0.003589630126953125, 0.047027587890625, -0.0299072265625, 0.032867431640625, 0.0804443359375, -0.053741455078125, 0.0013408660888671875, -0.064208984375, -0.032073974609375, -0.034423828125, 0.0261383056640625, -0.0260467529296875, -0.05133056640625, 0.052032470703125, 0.01084136962890625, -0.0027923583984375, 0.049102783203125, 0.038818359375, 0.00835418701171875, 0.07611083984375, 0.047943115234375, 0.0198211669921875, 0.04180908203125, -0.06842041015625, 0.0117950439453125, -0.09051513671875, -0.05450439453125, -0.005481719970703125, -0.03509521484375, -0.0165252685546875, -0.06787109375, 0.0455322265625, 0.043304443359375, -0.01218414306640625, 0.04315185546875, -0.07098388671875, 0.0247344970703125, 0.036041259765625, 0.0211029052734375, -0.0286712646484375, 0.0174713134765625, 0.008209228515625, 0.0015726089477539062, -0.0482177734375, -0.0272979736328125, 0.05224609375, 0.043670654296875, 0.034759521484375, -0.0185394287109375, 0.021453857421875, -0.00771331787109375, 0.004489898681640625, -0.055206298828125, 0.032470703125, 0.003673553466796875, -0.0419921875, -0.006488800048828125, 0.004581451416015625, -0.05780029296875, 0.0251922607421875, 0.0010929107666015625, -0.08856201171875, 0.037567138671875, 0.01103973388671875, -0.0252532958984375, 0.0250701904296875, -0.05126953125, 0.0765380859375, -0.0099334716796875, -0.029998779296875, 0.010009765625, -0.068603515625, 0.043365478515625, 0.0086822509765625, -0.01409149169921875, -0.00873565673828125, 0.0185394287109375, 0.09027099609375, -0.036590576171875, 0.07086181640625, -0.0276947021484375, 0.0204925537109375, 0.048553466796875, -0.010162353515625, 0.0250396728515625, 0.0219879150390625, 0.00579071044921875, 0.027984619140625, 0.0107269287109375, -0.0389404296875, -0.041778564453125, 0.03765869140625, -0.070068359375, -0.029510498046875, -0.0279998779296875, -0.022064208984375, 0.0095672607421875, 0.0097808837890625, 0.062744140625, 0.0226287841796875, 0.006679534912109375, -0.0011043548583984375, 0.04241943359375, -0.003650665283203125, 0.03509521484375, -0.0057373046875, -0.0206146240234375, -0.045013427734375, 0.04638671875, 0.00537872314453125, 0.01499176025390625, 0.0212249755859375, 0.0199432373046875, -0.03509521484375, 0.004146575927734375, -0.04302978515625, 0.030975341796875, -0.04180908203125, -0.0310821533203125, -0.06622314453125, -0.03765869140625, -0.0625, -0.030029296875, -0.042449951171875, -0.039093017578125, -0.0244903564453125, 0.0006642341613769531, 0.0239105224609375, 0.032073974609375, -0.0189056396484375, 0.038299560546875, -0.0181121826171875, 0.017303466796875, 0.043701171875, 0.0154876708984375, -0.01259613037109375, -0.0299835205078125, 0.0016641616821289062, 0.0046539306640625, -0.045623779296875, -0.0626220703125, 0.0252532958984375, 0.008880615234375, 0.0201568603515625, 0.0543212890625, -0.014129638671875, 0.053253173828125, -0.0003082752227783203, 0.05340576171875, 0.038116455078125, -0.06292724609375, 0.0565185546875, -0.0010480880737304688, 0.0175323486328125, 0.0207366943359375, 0.0154876708984375, -0.041290283203125, -0.005161285400390625, -0.044891357421875, -0.06689453125, 0.0867919921875, 0.01064300537109375, -0.01345062255859375, 0.0205230712890625, 0.032928466796875, 0.00922393798828125, 0.0018863677978515625, -0.057220458984375, -0.0127716064453125, -0.044036865234375, 0.0150604248046875, -0.009033203125, -0.041259765625, -0.006778717041015625, -0.040069580078125, 0.04534912109375, -0.0039520263671875, 0.04888916015625, 0.029693603515625, -0.015380859375, -0.019775390625, -0.0357666015625, 0.047149658203125, 0.04486083984375, -0.01641845703125, 0.0151519775390625, -0.0021343231201171875, -0.04437255859375, -0.0133056640625, 0.01389312744140625, -0.01491546630859375, -0.01244354248046875, 0.02734375, 0.082763671875, -0.00023376941680908203, -0.0213470458984375, 0.043670654296875, 0.01080322265625, -0.0224761962890625, -0.02880859375, 0.0090789794921875, -0.00321197509765625, 0.0207977294921875, 0.0099639892578125, 0.02874755859375, 0.01904296875, -0.023162841796875, 0.01549530029296875, 0.0224609375, -0.03851318359375, -0.033050537109375, 0.06304931640625, -0.0082550048828125, -0.0172119140625, 0.04412841796875, -0.01131439208984375, -0.0743408203125, 0.07501220703125, 0.051727294921875, 0.056884765625, -0.028533935546875, 0.028900146484375, 0.052825927734375, 0.0171356201171875, -0.0025386810302734375, -0.005084991455078125, -0.015838623046875, -0.027862548828125, -0.0030193328857421875, -0.051055908203125, -0.006381988525390625, 0.01409149169921875, -0.0443115234375, 0.0289306640625, -0.048065185546875, -0.005542755126953125, 0.0102691650390625, 0.01031494140625, -0.0601806640625, 0.03125, 0.01666259765625, 0.0615234375, -0.06298828125, 0.0489501953125, 0.06396484375, -0.02130126953125, -0.053131103515625, -0.0172882080078125, 0.004390716552734375, -0.07513427734375, 0.0205078125, 0.05902099609375, 0.00356292724609375, -0.0173797607421875, -0.036346435546875, -0.0614013671875, 0.09368896484375, 0.0239105224609375, -0.034027099609375, 0.00007021427154541016, 0.024658203125, 0.0222320556640625, -0.037567138671875, 0.042449951171875, 0.040252685546875, 0.0384521484375, 0.04486083984375, -0.050140380859375, 0.007022857666015625, -0.0242462158203125, 0.0186920166015625, -0.0056915283203125, -0.0635986328125, 0.06024169921875, -0.0298919677734375, -0.00443267822265625, -0.00879669189453125, 0.045501708984375, 0.017181396484375, 0.0406494140625, 0.037353515625, 0.046905517578125, 0.04010009765625, -0.00814056396484375, 0.06744384765625, -0.006504058837890625, 0.04345703125, 0.048187255859375, 0.01021575927734375, 0.0304718017578125, 0.0241546630859375, 0.0014410018920898438, 0.031585693359375, 0.07940673828125, -0.0236053466796875, 0.039154052734375, 0.00868988037109375, 0.00029277801513671875, -0.00968170166015625, 0.00553131103515625, -0.0400390625, 0.0556640625, 0.019287109375, -0.03076171875, -0.0163421630859375, 0.0250396728515625, 0.00514984130859375, -0.031494140625, -0.01519012451171875, 0.042816162109375, 0.0018634796142578125, -0.055084228515625, 0.0513916015625, 0.022613525390625, 0.046630859375, -0.0318603515625, 0.00739288330078125, -0.0137939453125, 0.01555633544921875, -0.03009033203125, -0.049163818359375, 0.049285888671875, -0.01617431640625, -0.01375579833984375, 0.006439208984375, 0.054595947265625, -0.0241851806640625, -0.06292724609375, 0.01824951171875, -0.006343841552734375, 0.0252838134765625, -0.0219268798828125, -0.06591796875, 0.033416748046875, 0.00008130073547363281, -0.03253173828125, 0.01446533203125, -0.0030975341796875, -0.01322174072265625, 0.02978515625, 0.0364990234375, -0.028533935546875, 0.006168365478515625, -0.0125885009765625, 0.0721435546875, -0.019744873046875, -0.042388916015625, -0.043212890625, 0.03448486328125, -0.0157623291015625, -0.0213165283203125, 0.0364990234375, 0.0689697265625, 0.06414794921875, -0.017669677734375, 0.039459228515625, -0.01555633544921875, -0.004241943359375, -0.021148681640625, 0.0404052734375, -0.03009033203125, -0.011474609375, -0.0246429443359375, -0.09326171875, -0.029327392578125, 0.07763671875, -0.041595458984375, 0.011322021484375, 0.036651611328125, 0.07061767578125, -0.034759521484375, -0.004444122314453125, 0.00032711029052734375, -0.005626678466796875, 0.0286407470703125, 0.042633056640625, 0.0187530517578125, -0.050506591796875, 0.0256195068359375, -0.0634765625, -0.045501708984375, -0.029296875, -0.019805908203125, -0.06707763671875, -0.05596923828125, -0.038848876953125, -0.0310821533203125, -0.003604888916015625, 0.03411865234375, 0.10211181640625, -0.056304931640625, -0.0111846923828125, -0.0178680419921875, -0.0018978118896484375, -0.019195556640625, -0.0242767333984375, 0.048736572265625, 0.0006680488586425781, -0.06536865234375, -0.005435943603515625, 0.0253448486328125, -0.00003421306610107422, -0.01000213623046875, -0.006000518798828125, 0.0062713623046875, -0.0026378631591796875, 0.0543212890625, 0.0335693359375, -0.058807373046875, -0.019317626953125, -0.002696990966796875, -0.0020275115966796875, 0.0179595947265625, 0.054840087890625, -0.044952392578125, 0.0413818359375, 0.0243072509765625, 0.0178680419921875, 0.086181640625, 0.0013589859008789062, 0.00460052490234375, -0.03436279296875, 0.0229949951171875, 0.01470947265625, 0.02740478515625, 0.02862548828125, -0.04058837890625, 0.034881591796875, 0.029632568359375, -0.038970947265625, -0.045501708984375, 0.0207061767578125, -0.10906982421875, -0.009429931640625, 0.08172607421875, -0.014312744140625, -0.04278564453125, 0.02178955078125, -0.040618896484375, 0.033935546875, -0.005466461181640625, 0.0653076171875, 0.01543426513671875, -0.029998779296875, -0.037994384765625, -0.0092010498046875, 0.035552978515625, 0.00847625732421875, -0.054931640625, -0.0253448486328125, 0.024383544921875, 0.048614501953125, 0.018218994140625, 0.03961181640625, -0.0285797119140625, 0.032745361328125, 0.00787353515625, 0.0172576904296875, -0.018707275390625, -0.024200439453125, -0.006816864013671875, 0.0182647705078125, -0.0246429443359375, -0.0411376953125 ] ]
Uminosachi/realisticVisionV40_v40VAE-inpainting
2023-08-01T01:07:08.000Z
[ "diffusers", "license:creativeml-openrail-m", "endpoints_compatible", "diffusers:StableDiffusionPipeline", "region:us" ]
null
Uminosachi
null
null
Uminosachi/realisticVisionV40_v40VAE-inpainting
0
8,906
diffusers
2023-08-01T00:55:06
--- license: creativeml-openrail-m --- This is an inpainting model, which has been converted from the [realisticVisionV40_v40VAE-inpainting](https://civitai.com/models/4201?modelVersionId=114600).
196
[ [ -0.0229034423828125, -0.01485443115234375, 0.0308074951171875, 0.006221771240234375, -0.034332275390625, 0.0206146240234375, 0.0328369140625, -0.036712646484375, 0.0357666015625, 0.07293701171875, -0.0718994140625, 0.018707275390625, -0.0084228515625, -0.028289794921875, -0.0261383056640625, 0.031951904296875, -0.0032596588134765625, 0.031219482421875, -0.0236663818359375, 0.00913238525390625, -0.015716552734375, -0.01995849609375, -0.037139892578125, -0.03662109375, 0.00909423828125, 0.0345458984375, 0.033447265625, 0.0198974609375, 0.041656494140625, 0.01715087890625, 0.0037384033203125, -0.00928497314453125, -0.038543701171875, -0.0311431884765625, 0.0019664764404296875, -0.058441162109375, -0.04241943359375, 0.0221405029296875, 0.0263519287109375, 0.00682830810546875, -0.01136016845703125, 0.0205841064453125, -0.01495361328125, 0.032501220703125, -0.058380126953125, 0.00530242919921875, -0.0025348663330078125, 0.01534271240234375, -0.0208282470703125, -0.0135498046875, -0.028839111328125, -0.0232086181640625, -0.0161895751953125, -0.0606689453125, 0.0168304443359375, -0.0184478759765625, 0.09625244140625, 0.0195770263671875, -0.0216217041015625, 0.0206298828125, -0.081787109375, 0.02996826171875, -0.05706787109375, 0.03533935546875, 0.00641632080078125, 0.0816650390625, -0.0294952392578125, -0.0985107421875, -0.0283050537109375, -0.0024967193603515625, 0.0208892822265625, 0.0233612060546875, -0.0250091552734375, -0.001995086669921875, 0.030120849609375, 0.04071044921875, -0.0209197998046875, -0.0079803466796875, -0.04931640625, -0.007213592529296875, 0.03955078125, 0.0000998377799987793, 0.0352783203125, 0.0154571533203125, -0.057525634765625, -0.0103912353515625, -0.0496826171875, 0.0010023117065429688, 0.0204620361328125, -0.018524169921875, -0.003261566162109375, 0.056671142578125, -0.0258026123046875, 0.07049560546875, 0.00689697265625, -0.00565338134765625, 0.0052490234375, 0.0019588470458984375, -0.040802001953125, 0.0021457672119140625, 0.000024139881134033203, 0.0699462890625, 0.035491943359375, -0.001194000244140625, -0.0193634033203125, -0.002727508544921875, 0.054443359375, -0.10009765625, -0.034393310546875, -0.006011962890625, -0.0298004150390625, -0.0162811279296875, 0.0240631103515625, -0.027099609375, 0.001476287841796875, -0.013641357421875, 0.0205230712890625, -0.038543701171875, -0.0217437744140625, 0.005405426025390625, -0.00954437255859375, 0.01091766357421875, 0.04034423828125, -0.0249176025390625, 0.01898193359375, 0.0165252685546875, 0.044708251953125, 0.026153564453125, 0.0115509033203125, -0.01448822021484375, 0.00858306884765625, -0.039154052734375, 0.0677490234375, -0.0301666259765625, -0.04022216796875, 0.006862640380859375, 0.03363037109375, 0.0218048095703125, -0.052703857421875, 0.047119140625, -0.055267333984375, 0.009185791015625, 0.0003943443298339844, -0.0499267578125, -0.045806884765625, 0.0242767333984375, -0.0570068359375, 0.059661865234375, 0.0305328369140625, -0.037078857421875, 0.041168212890625, -0.0355224609375, 0.0302581787109375, 0.040863037109375, 0.018524169921875, -0.04779052734375, 0.023590087890625, -0.0195465087890625, 0.01131439208984375, -0.0185394287109375, -0.0024356842041015625, -0.050994873046875, -0.039794921875, 0.00569915771484375, -0.01430511474609375, 0.05377197265625, 0.020965576171875, 0.0105743408203125, 0.0306549072265625, -0.0888671875, -0.0058441162109375, 0.003757476806640625, 0.004871368408203125, -0.0199127197265625, -0.035888671875, 0.01189422607421875, 0.045501708984375, 0.030731201171875, -0.06561279296875, 0.0213623046875, -0.038818359375, 0.01142120361328125, 0.0128021240234375, 0.037841796875, 0.031951904296875, -0.04718017578125, 0.04705810546875, -0.009552001953125, 0.038177490234375, 0.023101806640625, -0.05706787109375, -0.0758056640625, -0.032928466796875, -0.00030612945556640625, 0.0108642578125, -0.06634521484375, -0.004383087158203125, -0.00513458251953125, -0.076416015625, -0.0301666259765625, -0.03216552734375, 0.021148681640625, 0.02813720703125, 0.0035419464111328125, -0.048797607421875, -0.029388427734375, -0.083740234375, 0.0002961158752441406, 0.005115509033203125, -0.0340576171875, 0.0034618377685546875, 0.034576416015625, -0.01526641845703125, 0.03466796875, -0.022430419921875, -0.01885986328125, 0.0147857666015625, -0.01947021484375, 0.0304107666015625, 0.0396728515625, 0.07080078125, -0.045684814453125, -0.060028076171875, -0.0109100341796875, -0.039794921875, 0.0124359130859375, 0.0025730133056640625, -0.029449462890625, -0.0190277099609375, 0.06060791015625, -0.0173187255859375, 0.05718994140625, 0.038482666015625, -0.019989013671875, 0.04913330078125, -0.04052734375, 0.041412353515625, -0.06622314453125, 0.002689361572265625, 0.005767822265625, -0.043182373046875, -0.043975830078125, 0.04522705078125, 0.0245208740234375, -0.01245880126953125, -0.0557861328125, 0.0263671875, -0.05157470703125, 0.00957489013671875, -0.03424072265625, -0.0242919921875, -0.002819061279296875, 0.0256195068359375, -0.00797271728515625, 0.039642333984375, 0.03387451171875, -0.0247650146484375, 0.0699462890625, 0.0201568603515625, -0.0499267578125, 0.0509033203125, -0.046051025390625, 0.017578125, -0.01500701904296875, 0.0150146484375, -0.0640869140625, -0.046112060546875, 0.034088134765625, -0.00504302978515625, 0.005626678466796875, -0.03985595703125, -0.0263214111328125, -0.0192413330078125, -0.00931549072265625, 0.045379638671875, 0.0301666259765625, -0.0389404296875, 0.0504150390625, 0.00893402099609375, 0.005237579345703125, 0.0156402587890625, -0.057220458984375, 0.00449371337890625, -0.0279388427734375, -0.031768798828125, 0.06201171875, -0.00927734375, -0.0281219482421875, -0.0013513565063476562, 0.0243377685546875, -0.029693603515625, -0.0311737060546875, 0.0242462158203125, 0.050048828125, -0.023712158203125, -0.0238494873046875, -0.00382232666015625, 0.00035119056701660156, 0.0081939697265625, 0.033355712890625, 0.047088623046875, 0.0123291015625, -0.029998779296875, -0.05938720703125, 0.0231475830078125, 0.08251953125, -0.006256103515625, 0.05126953125, 0.0164031982421875, -0.0677490234375, -0.0083465576171875, -0.034637451171875, -0.021392822265625, -0.032257080078125, 0.01137542724609375, -0.050567626953125, -0.003757476806640625, 0.028533935546875, -0.0302581787109375, -0.026519775390625, 0.04827880859375, 0.0252685546875, -0.005847930908203125, 0.059814453125, 0.052642822265625, 0.03631591796875, 0.05877685546875, -0.057403564453125, -0.0352783203125, -0.057281494140625, -0.053436279296875, -0.00594329833984375, -0.015655517578125, -0.0217132568359375, -0.04681396484375, 0.0267486572265625, -0.007778167724609375, -0.02362060546875, 0.0318603515625, -0.03192138671875, 0.027862548828125, 0.0423583984375, 0.050201416015625, 0.01123809814453125, -0.01947021484375, 0.0294647216796875, -0.036468505859375, -0.027008056640625, -0.0316162109375, 0.055084228515625, 0.0015468597412109375, 0.0247039794921875, 0.01047515869140625, 0.0212249755859375, 0.00839996337890625, 0.037811279296875, -0.0298309326171875, 0.020965576171875, -0.0155487060546875, -0.06329345703125, 0.00861358642578125, 0.024505615234375, -0.03656005859375, 0.01091766357421875, -0.061553955078125, -0.02032470703125, 0.03485107421875, 0.0001310110092163086, -0.01059722900390625, 0.0304107666015625, -0.0416259765625, 0.057373046875, 0.0008234977722167969, 0.005664825439453125, -0.040496826171875, -0.0255279541015625, 0.05145263671875, -0.0021820068359375, -0.004730224609375, 0.0018014907836914062, 0.0250244140625, 0.025909423828125, -0.054718017578125, 0.03704833984375, -0.01284027099609375, 0.01338958740234375, 0.0147247314453125, 0.01493072509765625, 0.0467529296875, -0.0005016326904296875, -0.0103607177734375, -0.031585693359375, -0.0144500732421875, -0.041717529296875, -0.034149169921875, 0.047637939453125, -0.04730224609375, -0.0222015380859375, -0.030303955078125, -0.01328277587890625, 0.0020236968994140625, 0.0021038055419921875, 0.0528564453125, 0.055511474609375, -0.05499267578125, 0.0017719268798828125, 0.052764892578125, 0.012725830078125, 0.0289764404296875, 0.0151519775390625, -0.034149169921875, -0.0103759765625, 0.053131103515625, 0.009429931640625, 0.04486083984375, 0.026153564453125, -0.00994873046875, -0.0125885009765625, -0.02947998046875, -0.035858154296875, 0.033966064453125, -0.03558349609375, 0.007419586181640625, 0.012176513671875, -0.04534912109375, -0.01232147216796875, -0.01995849609375, -0.045166015625, -0.0231170654296875, -0.05413818359375, 0.01317596435546875, 0.0213623046875, 0.06207275390625, 0.03582763671875, 0.0292205810546875, -0.049652099609375, 0.01739501953125, 0.057586669921875, 0.0066070556640625, -0.03668212890625, -0.07086181640625, -0.017578125, 0.00656890869140625, -0.0230865478515625, -0.045562744140625, 0.056793212890625, 0.021636962890625, 0.03411865234375, 0.01702880859375, -0.00901031494140625, 0.064453125, -0.041015625, 0.047576904296875, 0.0182037353515625, -0.0299835205078125, 0.0185394287109375, -0.020172119140625, 0.0243377685546875, 0.039581298828125, 0.016998291015625, -0.018890380859375, -0.0031948089599609375, -0.0880126953125, -0.046112060546875, 0.02197265625, -0.005279541015625, -0.0027866363525390625, 0.048370361328125, 0.03448486328125, 0.005786895751953125, 0.0182037353515625, -0.02728271484375, -0.0094757080078125, -0.040557861328125, 0.0184478759765625, -0.01497650146484375, -0.0241851806640625, -0.00716400146484375, -0.0260162353515625, 0.0662841796875, 0.0015172958374023438, 0.025634765625, 0.01025390625, -0.00423431396484375, -0.0015468597412109375, -0.0247650146484375, 0.0496826171875, 0.026092529296875, -0.053924560546875, -0.0202789306640625, -0.0037326812744140625, -0.01678466796875, 0.000293731689453125, -0.0067291259765625, -0.0007033348083496094, 0.01354217529296875, 0.01508331298828125, 0.0771484375, 0.01549530029296875, -0.0205078125, 0.05059814453125, -0.0019550323486328125, 0.0141754150390625, -0.07257080078125, 0.0065155029296875, -0.00029850006103515625, 0.04400634765625, 0.0014448165893554688, 0.040863037109375, 0.046875, -0.051055908203125, 0.0016832351684570312, 0.00823211669921875, -0.05718994140625, -0.035552978515625, 0.0860595703125, 0.0193939208984375, -0.07220458984375, 0.060089111328125, 0.0036163330078125, 0.00782012939453125, 0.0401611328125, 0.034271240234375, 0.0751953125, -0.02459716796875, 0.021575927734375, 0.0496826171875, -0.01197052001953125, 0.00047779083251953125, 0.032623291015625, 0.022186279296875, -0.034332275390625, -0.018341064453125, -0.0194244384765625, -0.061614990234375, -0.000812530517578125, -0.06964111328125, 0.03564453125, -0.035980224609375, -0.0079498291015625, 0.0013704299926757812, -0.033905029296875, -0.03460693359375, 0.06683349609375, 0.020355224609375, 0.08197021484375, -0.070556640625, 0.0960693359375, 0.060394287109375, -0.040557861328125, -0.0246124267578125, -0.006328582763671875, -0.01435089111328125, -0.064453125, 0.01496124267578125, -0.004932403564453125, -0.010040283203125, -0.0018606185913085938, -0.059539794921875, -0.049346923828125, 0.06640625, 0.055999755859375, -0.047607421875, -0.0057830810546875, -0.0118255615234375, 0.037567138671875, -0.045806884765625, 0.00848388671875, 0.031768798828125, 0.028228759765625, 0.015289306640625, -0.049560546875, -0.01220703125, -0.05255126953125, 0.038543701171875, -0.00962066650390625, -0.065673828125, 0.062347412109375, 0.01023101806640625, 0.006145477294921875, 0.030670166015625, 0.07373046875, 0.030029296875, -0.01493072509765625, 0.0517578125, 0.041656494140625, 0.0167083740234375, -0.012847900390625, 0.078125, -0.005619049072265625, 0.01136016845703125, 0.052764892578125, 0.0134124755859375, 0.052825927734375, 0.043670654296875, -0.0074462890625, 0.06451416015625, 0.05975341796875, -0.00565338134765625, 0.06146240234375, 0.0087890625, -0.051513671875, -0.029205322265625, -0.01422119140625, -0.0111236572265625, 0.0345458984375, 0.0250244140625, -0.0131378173828125, -0.0015745162963867188, 0.01194000244140625, -0.0196075439453125, 0.02716064453125, -0.025665283203125, 0.0419921875, -0.007488250732421875, -0.03277587890625, 0.052978515625, -0.00696563720703125, 0.036895751953125, -0.04766845703125, -0.03228759765625, 0.00986480712890625, 0.0250091552734375, -0.0022563934326171875, -0.05096435546875, 0.0304107666015625, -0.038421630859375, -0.0143280029296875, -0.017303466796875, 0.04681396484375, -0.02728271484375, -0.062408447265625, 0.020751953125, -0.01151275634765625, 0.04119873046875, -0.0159912109375, -0.039215087890625, -0.0006732940673828125, 0.01568603515625, -0.02154541015625, 0.0086822509765625, 0.0016155242919921875, -0.019500732421875, 0.044219970703125, 0.0182647705078125, 0.02191162109375, 0.0174713134765625, 0.0037708282470703125, 0.045684814453125, -0.0282440185546875, -0.03521728515625, -0.01172637939453125, 0.056365966796875, -0.03778076171875, -0.043609619140625, 0.038848876953125, 0.052886962890625, 0.06781005859375, -0.07733154296875, 0.036407470703125, 0.0192413330078125, 0.0126800537109375, -0.037841796875, 0.06640625, -0.0594482421875, -0.0382080078125, -0.0167694091796875, -0.0745849609375, -0.01024627685546875, 0.039642333984375, 0.037445068359375, -0.01447296142578125, 0.009429931640625, 0.05621337890625, -0.0111846923828125, -0.0217437744140625, 0.055511474609375, 0.01215362548828125, 0.0274505615234375, 0.00004094839096069336, 0.035980224609375, -0.0457763671875, 0.01336669921875, -0.05401611328125, -0.02813720703125, -0.024627685546875, -0.059661865234375, -0.04193115234375, -0.05682373046875, -0.033538818359375, -0.016815185546875, -0.0038928985595703125, 0.0279693603515625, 0.060333251953125, -0.04974365234375, -0.0275421142578125, 0.00252532958984375, -0.04437255859375, -0.0008335113525390625, -0.00963592529296875, -0.0109100341796875, 0.0426025390625, -0.0938720703125, 0.046356201171875, 0.01953125, 0.034027099609375, -0.0286407470703125, 0.02728271484375, -0.005626678466796875, 0.0155181884765625, 0.0186767578125, 0.027557373046875, -0.042999267578125, -0.033935546875, -0.0296783447265625, 0.0029926300048828125, 0.0126190185546875, 0.0280609130859375, -0.0440673828125, 0.055877685546875, 0.038787841796875, -0.00388336181640625, 0.10162353515625, -0.0089569091796875, 0.043792724609375, -0.036376953125, 0.045684814453125, 0.0062408447265625, 0.05157470703125, 0.0313720703125, -0.0074462890625, 0.036773681640625, 0.030426025390625, -0.047119140625, -0.054168701171875, 0.0218505859375, -0.093505859375, 0.00347137451171875, 0.0511474609375, -0.00537109375, -0.044830322265625, 0.016632080078125, -0.048614501953125, 0.041656494140625, 0.0035953521728515625, 0.03057861328125, 0.036468505859375, 0.0086517333984375, -0.02947998046875, -0.0272979736328125, 0.0159759521484375, -0.0241851806640625, -0.05499267578125, -0.049224853515625, 0.024566650390625, 0.03424072265625, 0.01522064208984375, 0.0214996337890625, -0.0286407470703125, 0.0300750732421875, 0.01371002197265625, 0.067138671875, 0.00785064697265625, -0.01476287841796875, -0.0007843971252441406, 0.0121612548828125, 0.01508331298828125, -0.040557861328125 ] ]
Ojimi/anime-kawai-diffusion
2023-07-14T11:39:06.000Z
[ "diffusers", "text-to-image", "anime", "pytorch", "art", "stable diffusion", "en", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
Ojimi
null
null
Ojimi/anime-kawai-diffusion
116
8,898
diffusers
2023-02-09T15:30:12
--- license: creativeml-openrail-m language: - en library_name: diffusers pipeline_tag: text-to-image tags: - text-to-image - anime - pytorch - diffusers - art - stable diffusion --- ![](assets/preview.png) ## Introduction: - I don't know how to introduce it, but it's been renamed several times. It is an open, free to use and fine-tune AI-art model. It was created by my curiosity. Hope you will like it. Have fun! (●'◡'●). ## Use: - For 🧨Diffusers: ```python from diffusers import DiffusionPipeline pipe = DiffusionPipeline.from_pretrained("Ojimi/anime-kawai-diffusion") pipe = pipe.to("cuda") prompt = "1girl, animal ears, long hair, solo, cat ears, choker, bare shoulders, red eyes, fang, looking at viewer, animal ear fluff, upper body, black hair, blush, closed mouth, off shoulder, bangs, bow, collarbone" image = pipe(prompt, negative_prompt="lowres, bad anatomy").images[0] ``` ## Tips: - The `masterpiece` and `best quality` tags are not necessary, as it sometimes leads to contradictory results, but if it is distorted or discolored, add them now. - The CGF scale should be 7.5 and the step count 28 for the best quality and best performance. - Use a sample photo for your idea. `Interrogate DeepBooru` and change the prompts to suit what you want. - You should use it as a supportive tool for creating works of art, and not rely on it completely. - The Clip skip should be 2. ## **Limitations:** - The drawing is hard, not soft. - Loss of detail, errors, bad human-like (six-fingered hand) details, deformation, blurring, and unclear images are inevitable. - ⚠️Content may not be appropriate for all ages: As it is trained on data that includes adult content, the generated images may contain content not suitable for children (depending on your country there will be a specific regulation about it). If you do not want to appear adult content, make sure you have additional safety measures in place, such as adding "nsfw" to the negative prompt. - The results generated by the model are considered impressive. But unfortunately, currently, it only supports the English language, to use multilingual, consider using third-party translation programs. - The model is trained on the `Danbooru` and `Nai` tagging system, so the long text may result in poor results. - My amount of money: 0 USD =((. ![](money-wallet.gif) ## **Desires:** As it is a version made only by myself and my small associates, the model will not be perfect and may differ from what people expect. Any contributions from everyone will be respected. Want to support me? Thank you, please help me make it better. ❤️ ## Special Thank: This wouldn't have happened if they hadn't made a breakthrough. - [Runwayml](https://huggingface.co/runwayml/): Base model. - [CompVis](https://github.com/CompVis/): VAE Trainer. - stabilityai: [stabilityai/sd-vae-ft-mse-original · Hugging Face](https://huggingface.co/stabilityai/sd-vae-ft-mse-original) - [d8ahazard](https://github.com/d8ahazard/.sd_dreambooth_extension) : Dreambooth. - [Automatic1111](https://github.com/AUTOMATIC1111/) : Web UI. - [Mikubill](https://github.com/Mikubill/): Where my ideas started. - Chat-GPT: Help me do crazy things that I thought I would never do. - Novel AI, Anything Model, Abyss Orange Model: Dataset images. An AI made me thousands of pictures without worrying about copyright or dispute. - Danbooru: Help me write the correct tag. - My friend and others: Get quality images. - And You 🫵❤️ ## Copyright: This license allows anyone to copy, and modify the model, but please follow the terms of the CreativeML Open RAIL-M. You can learn more about the CreativeML Open RAIL-M [here](https://huggingface.co/spaces/CompVis/stable-diffusion-license). If any part of the model does not comply with the terms of the GNU General Public License, the copyright and other rights of the model will still be valid. All AI-generated images are yours, you can do whatever you want, but please obey the laws of your country. We will not be responsible for any problems you cause. We allow you to merge with another model, but if you share that merge model, don't forget to add me to the credits. Don't forget me. # Have fun with your waifu! (●'◡'●) Do you want to sponsor computing resources for us? Thank you . Please sponsor to me on Ko-fi at https://ko-fi.com/projectk.
4,334
[ [ -0.04144287109375, -0.063232421875, 0.02215576171875, 0.01509857177734375, -0.0230560302734375, -0.00911712646484375, 0.01056671142578125, -0.05169677734375, 0.0279693603515625, 0.035247802734375, -0.044830322265625, -0.033599853515625, -0.0447998046875, -0.00777435302734375, -0.021881103515625, 0.067626953125, -0.0009918212890625, -0.004390716552734375, 0.0022907257080078125, 0.0107879638671875, -0.05230712890625, -0.000030159950256347656, -0.05792236328125, -0.034149169921875, 0.0274810791015625, 0.0310821533203125, 0.057525634765625, 0.06207275390625, 0.030914306640625, 0.0242462158203125, -0.00399017333984375, -0.0020122528076171875, -0.04644775390625, -0.030487060546875, 0.0022792816162109375, -0.0218963623046875, -0.058197021484375, 0.0089111328125, 0.0203399658203125, 0.0269012451171875, 0.0011882781982421875, 0.001789093017578125, -0.005146026611328125, 0.052764892578125, -0.0260772705078125, 0.0113983154296875, -0.007045745849609375, 0.01187896728515625, -0.0175323486328125, 0.033660888671875, 0.0017881393432617188, -0.01436614990234375, -0.00899505615234375, -0.06689453125, 0.001220703125, 0.0008563995361328125, 0.0892333984375, 0.0181427001953125, -0.0178070068359375, -0.01180267333984375, -0.05474853515625, 0.0291900634765625, -0.055999755859375, 0.0085296630859375, 0.0183563232421875, 0.038665771484375, -0.00848388671875, -0.06622314453125, -0.042205810546875, -0.01611328125, 0.0146331787109375, 0.0264892578125, -0.0175018310546875, -0.004985809326171875, 0.0133819580078125, 0.04345703125, -0.052215576171875, 0.01059722900390625, -0.0391845703125, 0.0018796920776367188, 0.07122802734375, 0.0195770263671875, 0.048095703125, 0.0010833740234375, -0.0281219482421875, -0.01904296875, -0.0445556640625, 0.00653839111328125, 0.03143310546875, -0.00647735595703125, -0.04412841796875, 0.04925537109375, -0.0108642578125, 0.021148681640625, 0.006259918212890625, -0.007610321044921875, 0.01210784912109375, -0.01036834716796875, -0.0240936279296875, -0.0203094482421875, 0.07684326171875, 0.039764404296875, 0.00421905517578125, 0.0023288726806640625, -0.007343292236328125, 0.00047397613525390625, 0.005428314208984375, -0.06878662109375, -0.0400390625, 0.0333251953125, -0.058380126953125, -0.040618896484375, -0.0194244384765625, -0.0494384765625, -0.0246734619140625, -0.0033969879150390625, 0.042266845703125, -0.051727294921875, -0.0482177734375, 0.011444091796875, -0.0213775634765625, 0.014190673828125, 0.041168212890625, -0.044769287109375, 0.01161956787109375, 0.0261688232421875, 0.06170654296875, 0.0199127197265625, -0.006328582763671875, 0.00872802734375, 0.00228118896484375, -0.043365478515625, 0.05181884765625, -0.0256195068359375, -0.05255126953125, -0.039337158203125, 0.00551605224609375, 0.00856781005859375, -0.027069091796875, 0.0439453125, -0.0218048095703125, 0.0192108154296875, 0.00228118896484375, -0.04840087890625, -0.029876708984375, -0.0057220458984375, -0.048858642578125, 0.034149169921875, 0.0080718994140625, -0.04150390625, 0.007343292236328125, -0.08624267578125, -0.0186309814453125, 0.020904541015625, -0.004486083984375, -0.024749755859375, -0.01094818115234375, 0.00749969482421875, 0.0167236328125, -0.0214080810546875, 0.033416748046875, -0.03033447265625, -0.020843505859375, 0.004100799560546875, -0.019500732421875, 0.089111328125, 0.032257080078125, -0.034759521484375, 0.004138946533203125, -0.051361083984375, -0.01146697998046875, 0.04608154296875, 0.006744384765625, -0.01068878173828125, -0.00714874267578125, 0.01247406005859375, 0.005001068115234375, 0.023712158203125, -0.032745361328125, 0.037139892578125, -0.03546142578125, 0.038055419921875, 0.058685302734375, 0.00262451171875, 0.029266357421875, -0.0418701171875, 0.050567626953125, 0.0093231201171875, 0.02056884765625, -0.01519012451171875, -0.06488037109375, -0.06561279296875, -0.018798828125, 0.02789306640625, 0.0310211181640625, -0.050506591796875, 0.0308380126953125, 0.00972747802734375, -0.06890869140625, -0.05206298828125, -0.013946533203125, 0.0290069580078125, 0.035614013671875, 0.0196075439453125, -0.022308349609375, -0.0214385986328125, -0.07781982421875, 0.01342010498046875, 0.001934051513671875, -0.00994110107421875, 0.039886474609375, 0.0550537109375, -0.031982421875, 0.040618896484375, -0.048583984375, -0.03411865234375, -0.003993988037109375, 0.01279449462890625, 0.0293121337890625, 0.074951171875, 0.06524658203125, -0.068359375, -0.024505615234375, 0.0087127685546875, -0.06536865234375, 0.0165557861328125, -0.00579833984375, -0.0310821533203125, -0.01204681396484375, 0.0086212158203125, -0.07159423828125, 0.039093017578125, 0.02313232421875, -0.0335693359375, 0.047760009765625, -0.0250091552734375, 0.01381683349609375, -0.0859375, 0.0297088623046875, 0.011138916015625, -0.006702423095703125, -0.03399658203125, 0.046630859375, -0.01447296142578125, -0.029205322265625, -0.03839111328125, 0.0654296875, -0.0285186767578125, 0.03656005859375, -0.022125244140625, -0.004627227783203125, -0.0077362060546875, 0.036224365234375, -0.008514404296875, 0.039398193359375, 0.061065673828125, -0.04425048828125, 0.03424072265625, 0.031494140625, -0.02313232421875, 0.037841796875, -0.07049560546875, 0.0275421142578125, -0.0240936279296875, 0.01239776611328125, -0.06512451171875, -0.035247802734375, 0.051727294921875, -0.05181884765625, 0.0263214111328125, -0.0104827880859375, -0.0251617431640625, -0.041168212890625, -0.010833740234375, 0.0184326171875, 0.055908203125, -0.0300445556640625, 0.044189453125, 0.032501220703125, -0.008453369140625, -0.03143310546875, -0.048004150390625, 0.01514434814453125, -0.02886962890625, -0.05377197265625, 0.03509521484375, -0.0265655517578125, -0.019287109375, 0.021759033203125, 0.0295562744140625, -0.01174163818359375, 0.0146331787109375, 0.0281829833984375, 0.019866943359375, -0.019256591796875, -0.0294342041015625, 0.017974853515625, 0.00272369384765625, -0.0009937286376953125, -0.01325225830078125, 0.035888671875, -0.015045166015625, -0.0103607177734375, -0.05517578125, 0.025665283203125, 0.043792724609375, 0.002826690673828125, 0.020721435546875, 0.052215576171875, -0.05230712890625, 0.004825592041015625, -0.021514892578125, 0.0018672943115234375, -0.036712646484375, 0.0146026611328125, -0.0305328369140625, -0.04443359375, 0.039764404296875, 0.00875091552734375, 0.02008056640625, 0.05419921875, 0.0247955322265625, -0.0279998779296875, 0.09619140625, 0.061248779296875, -0.00916290283203125, 0.0182342529296875, -0.0723876953125, -0.0077362060546875, -0.053497314453125, -0.0308990478515625, -0.0227813720703125, -0.0506591796875, -0.05023193359375, -0.032135009765625, 0.0181121826171875, 0.0211334228515625, -0.029876708984375, 0.0287933349609375, -0.032928466796875, 0.028778076171875, 0.03173828125, 0.0194244384765625, 0.00817108154296875, -0.0103759765625, 0.0006093978881835938, -0.008209228515625, -0.048095703125, -0.0316162109375, 0.06842041015625, 0.0408935546875, 0.05523681640625, 0.004375457763671875, 0.0582275390625, 0.0261688232421875, 0.0293731689453125, -0.039031982421875, 0.03924560546875, -0.0258331298828125, -0.062744140625, -0.01169586181640625, -0.029510498046875, -0.07781982421875, 0.003223419189453125, -0.021392822265625, -0.0577392578125, 0.0226898193359375, 0.005962371826171875, -0.0264434814453125, 0.0235137939453125, -0.047027587890625, 0.06439208984375, -0.028961181640625, -0.0292510986328125, -0.01483154296875, -0.052764892578125, 0.04510498046875, 0.011566162109375, 0.0198211669921875, -0.016815185546875, 0.004535675048828125, 0.04351806640625, -0.0438232421875, 0.07110595703125, -0.023040771484375, 0.0074310302734375, 0.035736083984375, 0.0183258056640625, 0.01458740234375, 0.0168304443359375, 0.01126861572265625, 0.03082275390625, -0.01062774658203125, -0.029052734375, -0.017486572265625, 0.057708740234375, -0.0721435546875, -0.0447998046875, -0.0167236328125, -0.0313720703125, 0.0294647216796875, 0.048370361328125, 0.050506591796875, 0.03216552734375, -0.021484375, 0.0124664306640625, 0.021514892578125, -0.029876708984375, 0.0286865234375, 0.01070404052734375, -0.030914306640625, -0.046478271484375, 0.0853271484375, 0.0100555419921875, -0.0008940696716308594, 0.012847900390625, 0.0287933349609375, -0.01971435546875, -0.0276641845703125, -0.06903076171875, 0.0269317626953125, -0.03143310546875, -0.01458740234375, -0.046295166015625, -0.02545166015625, -0.041717529296875, -0.01161956787109375, -0.025115966796875, -0.031890869140625, -0.054351806640625, 0.004924774169921875, 0.02862548828125, 0.0567626953125, -0.021881103515625, 0.004421234130859375, -0.042205810546875, 0.03192138671875, 0.006526947021484375, 0.0200042724609375, 0.0096435546875, -0.027557373046875, -0.0057830810546875, 0.0239410400390625, -0.046600341796875, -0.05084228515625, 0.0494384765625, 0.015350341796875, 0.036163330078125, 0.050994873046875, -0.0012502670288085938, 0.052764892578125, -0.028594970703125, 0.06903076171875, 0.0214080810546875, -0.042266845703125, 0.041839599609375, -0.051666259765625, 0.02008056640625, 0.01739501953125, 0.0413818359375, -0.02069091796875, -0.042816162109375, -0.079345703125, -0.06475830078125, 0.034637451171875, 0.036041259765625, 0.0197601318359375, 0.0136871337890625, 0.03973388671875, 0.01020050048828125, 0.01739501953125, -0.058135986328125, -0.03936767578125, -0.034271240234375, -0.0030536651611328125, 0.007427215576171875, 0.01276397705078125, -0.0008745193481445312, -0.0266265869140625, 0.05877685546875, 0.00016987323760986328, 0.04022216796875, 0.012176513671875, 0.0203399658203125, -0.01346588134765625, -0.019927978515625, 0.037261962890625, 0.028961181640625, -0.01085662841796875, -0.0211334228515625, -0.002796173095703125, -0.03338623046875, 0.00677490234375, 0.021240234375, -0.037933349609375, -0.004886627197265625, 0.0115509033203125, 0.06793212890625, -0.0144500732421875, -0.0457763671875, 0.0261383056640625, -0.0120697021484375, -0.0182342529296875, -0.028961181640625, 0.0284576416015625, 0.0186004638671875, 0.0282745361328125, 0.00841522216796875, 0.027374267578125, 0.034423828125, -0.035675048828125, -0.00395965576171875, 0.026580810546875, -0.0164794921875, -0.020721435546875, 0.0948486328125, 0.0158843994140625, -0.0478515625, 0.046051025390625, -0.0305328369140625, -0.0177764892578125, 0.08465576171875, 0.035064697265625, 0.07147216796875, -0.01180267333984375, 0.0303802490234375, 0.045501708984375, 0.01995849609375, -0.0049591064453125, 0.02178955078125, 0.0143280029296875, -0.041168212890625, -0.01666259765625, -0.029144287109375, -0.02392578125, 0.04986572265625, -0.0260009765625, 0.05218505859375, -0.05810546875, -0.0057220458984375, 0.000576019287109375, -0.00606536865234375, -0.050689697265625, 0.042877197265625, 0.020965576171875, 0.0845947265625, -0.04833984375, 0.052886962890625, 0.049102783203125, -0.05908203125, -0.06585693359375, -0.01213836669921875, -0.002597808837890625, -0.055267333984375, 0.019989013671875, 0.0238037109375, 0.00856781005859375, 0.01306915283203125, -0.07391357421875, -0.059356689453125, 0.080322265625, 0.0238494873046875, -0.036224365234375, -0.0215911865234375, -0.0144805908203125, 0.043853759765625, -0.039825439453125, 0.0260009765625, 0.02313232421875, 0.02764892578125, -0.0003447532653808594, -0.05670166015625, -0.006008148193359375, -0.05401611328125, 0.006481170654296875, -0.012786865234375, -0.0694580078125, 0.0718994140625, -0.0430908203125, -0.0198822021484375, 0.04852294921875, 0.050323486328125, 0.0131683349609375, 0.0306854248046875, 0.037384033203125, 0.051513671875, 0.037384033203125, -0.0057830810546875, 0.08148193359375, -0.01654052734375, 0.019989013671875, 0.06195068359375, 0.0125579833984375, 0.042266845703125, -0.01126861572265625, -0.00109100341796875, 0.03570556640625, 0.06549072265625, -0.023468017578125, 0.03167724609375, -0.007595062255859375, 0.006374359130859375, -0.01079559326171875, -0.01415252685546875, -0.050750732421875, 0.019866943359375, 0.0181121826171875, -0.01549530029296875, 0.0002646446228027344, 0.0202789306640625, -0.0004317760467529297, -0.01065826416015625, -0.006832122802734375, 0.051483154296875, 0.0181121826171875, -0.017364501953125, 0.056793212890625, -0.0002815723419189453, 0.052032470703125, -0.034423828125, -0.019012451171875, -0.0274200439453125, 0.00829315185546875, -0.00943756103515625, -0.07049560546875, -0.004302978515625, -0.007152557373046875, 0.01134490966796875, -0.013397216796875, 0.05352783203125, -0.0180206298828125, -0.047882080078125, 0.0256500244140625, 0.01593017578125, 0.03851318359375, 0.005901336669921875, -0.0831298828125, 0.0026645660400390625, -0.0016918182373046875, -0.00806427001953125, 0.01119232177734375, 0.02679443359375, 0.0142669677734375, 0.06207275390625, 0.036651611328125, 0.0020904541015625, -0.031524658203125, -0.00984954833984375, 0.07122802734375, -0.04852294921875, -0.045867919921875, -0.043670654296875, 0.0618896484375, -0.00586700439453125, -0.032989501953125, 0.035247802734375, 0.048431396484375, 0.07342529296875, -0.0305023193359375, 0.06787109375, -0.030609130859375, 0.01629638671875, -0.02740478515625, 0.087646484375, -0.0865478515625, -0.005950927734375, -0.045379638671875, -0.048095703125, -0.00431060791015625, 0.05841064453125, 0.01303863525390625, 0.01444244384765625, 0.01197052001953125, 0.049560546875, -0.005126953125, 0.0021038055419921875, 0.0259246826171875, 0.029052734375, 0.0295867919921875, 0.05084228515625, 0.07012939453125, -0.05072021484375, 0.0308685302734375, -0.060577392578125, -0.027618408203125, -0.0279388427734375, -0.060333251953125, -0.049072265625, -0.04791259765625, -0.0440673828125, -0.03900146484375, -0.0123748779296875, 0.05718994140625, 0.0645751953125, -0.047454833984375, -0.0268707275390625, -0.0127410888671875, 0.0027618408203125, -0.022857666015625, -0.017791748046875, 0.00647735595703125, 0.022552490234375, -0.0758056640625, 0.03369140625, 0.004787445068359375, 0.03973388671875, -0.0178375244140625, -0.01087188720703125, -0.00782012939453125, -0.0178375244140625, 0.034149169921875, 0.033935546875, -0.042205810546875, -0.00551605224609375, 0.006847381591796875, 0.00865936279296875, 0.004833221435546875, 0.026519775390625, -0.03045654296875, 0.038177490234375, 0.0235595703125, 0.022216796875, 0.0406494140625, -0.0079193115234375, 0.03228759765625, -0.00832366943359375, 0.01343536376953125, 0.0253143310546875, 0.03924560546875, 0.015411376953125, -0.051055908203125, 0.0440673828125, 0.035736083984375, -0.041656494140625, -0.054107666015625, 0.0095672607421875, -0.077392578125, -0.0330810546875, 0.06689453125, 0.0005855560302734375, -0.0273284912109375, 0.01409912109375, -0.050018310546875, 0.01432037353515625, -0.032928466796875, 0.05609130859375, 0.034820556640625, -0.0195465087890625, -0.0132598876953125, -0.038604736328125, 0.01413726806640625, -0.0011491775512695312, -0.06134033203125, -0.023529052734375, 0.05194091796875, 0.036224365234375, 0.031280517578125, 0.053497314453125, -0.0189056396484375, 0.0302581787109375, 0.01549530029296875, 0.01345062255859375, -0.0081329345703125, -0.0308990478515625, -0.0216827392578125, 0.003658294677734375, 0.0003459453582763672, -0.020355224609375 ] ]
Yntec/mistoonAnime2
2023-10-05T19:14:03.000Z
[ "diffusers", "Anime", "Cartoon", "Colorful", "Inzaniak", "stable-diffusion", "stable-diffusion-diffusers", "text-to-image", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
Yntec
null
null
Yntec/mistoonAnime2
2
8,897
diffusers
2023-10-05T13:09:06
--- license: creativeml-openrail-m library_name: diffusers pipeline_tag: text-to-image tags: - Anime - Cartoon - Colorful - Inzaniak - stable-diffusion - stable-diffusion-diffusers - text-to-image - diffusers --- Original page: https://civitai.com/models/24149?modelVersionId=108545 Comparison: ![Comparison](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/7k3xm05XCk7FiHDMAfaUL.png) Samples and prompt: ![Sample](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/IN7sFezhWrNkws5SNnLaP.png) videogames, robert jordan ​pepperoni pizza, josephine wall winner, hidari, roll20 illumination, radiant light, sitting Pretty CUTE girl, gorgeous hair, DETAILED EYES, Magazine ad, iconic, 1943, Cartoon, sharp focus, 4k, towel. comic art on canvas by kyoani and watched and ROSSDRAWS. elementary
867
[ [ -0.0556640625, -0.032684326171875, 0.03240966796875, 0.0192718505859375, -0.01277923583984375, 0.0161895751953125, 0.0013303756713867188, -0.0323486328125, 0.06329345703125, 0.02801513671875, -0.043426513671875, -0.042694091796875, -0.0271759033203125, 0.0010862350463867188, -0.012420654296875, 0.0662841796875, 0.004302978515625, 0.0168304443359375, -0.00991058349609375, -0.007537841796875, -0.0367431640625, 0.01314544677734375, -0.048553466796875, -0.0149993896484375, 0.0191192626953125, 0.041290283203125, 0.047454833984375, 0.0138702392578125, 0.0272064208984375, 0.0304412841796875, 0.004177093505859375, 0.005161285400390625, -0.0341796875, -0.0167694091796875, -0.0184783935546875, -0.0249176025390625, -0.0338134765625, 0.0269775390625, 0.02630615234375, 0.030914306640625, 0.01158905029296875, 0.00856781005859375, 0.016326904296875, 0.062164306640625, -0.0428466796875, -0.012237548828125, 0.003505706787109375, 0.00865936279296875, -0.00412750244140625, 0.01342010498046875, -0.0242462158203125, -0.033966064453125, -0.017242431640625, -0.08880615234375, 0.03948974609375, 0.0033721923828125, 0.089599609375, 0.00804901123046875, -0.0297088623046875, -0.051361083984375, -0.03125, 0.0226898193359375, -0.0178680419921875, 0.0292205810546875, 0.005535125732421875, 0.0223388671875, -0.0226898193359375, -0.05841064453125, -0.047332763671875, -0.0011138916015625, 0.0167999267578125, 0.057525634765625, -0.054107666015625, -0.0274810791015625, 0.0261077880859375, 0.035003662109375, -0.052978515625, -0.008392333984375, -0.0263671875, 0.033203125, 0.054840087890625, -0.01453399658203125, 0.046600341796875, -0.0084686279296875, -0.041748046875, -0.045623779296875, -0.032745361328125, 0.03131103515625, 0.046875, -0.0111083984375, -0.02410888671875, 0.041412353515625, -0.01371002197265625, 0.0224151611328125, 0.03515625, 0.0149078369140625, 0.0285797119140625, -0.021697998046875, -0.0279388427734375, -0.02532958984375, 0.08160400390625, 0.062408447265625, 0.00989532470703125, 0.01064300537109375, 0.0253143310546875, 0.005657196044921875, 0.008087158203125, -0.06256103515625, -0.04803466796875, -0.0021953582763671875, -0.05120849609375, -0.02734375, 0.0267486572265625, -0.09381103515625, -0.0260772705078125, 0.0159759521484375, 0.01123809814453125, -0.0229339599609375, -0.021514892578125, 0.00931549072265625, -0.021942138671875, -0.00543975830078125, 0.022064208984375, -0.0513916015625, 0.006465911865234375, 0.0297088623046875, 0.055084228515625, 0.0109710693359375, 0.0264129638671875, -0.01511383056640625, -0.019775390625, -0.038787841796875, 0.0709228515625, -0.0189056396484375, -0.051849365234375, -0.0073394775390625, 0.0089874267578125, 0.00019419193267822266, -0.058319091796875, 0.0682373046875, -0.00677490234375, 0.0035762786865234375, -0.044189453125, -0.0261383056640625, -0.01134490966796875, -0.00833892822265625, -0.08099365234375, 0.06005859375, 0.01297760009765625, -0.040863037109375, 0.04766845703125, -0.04150390625, -0.0012178421020507812, 0.01412200927734375, -0.01117706298828125, -0.0251922607421875, 0.0225830078125, 0.0067138671875, 0.007415771484375, -0.016265869140625, -0.01605224609375, -0.05413818359375, -0.036712646484375, 0.045928955078125, -0.010009765625, 0.07342529296875, 0.0450439453125, -0.033233642578125, 0.004322052001953125, -0.05615234375, 0.0024585723876953125, 0.037017822265625, -0.0025501251220703125, -0.00008535385131835938, -0.0299072265625, -0.0098114013671875, 0.03924560546875, 0.0253143310546875, -0.027191162109375, 0.0135650634765625, 0.01551055908203125, 0.007694244384765625, 0.034576416015625, -0.0032749176025390625, 0.006763458251953125, -0.035430908203125, 0.06256103515625, -0.018310546875, 0.024993896484375, -0.0078277587890625, -0.048095703125, -0.06689453125, -0.045562744140625, 0.01422119140625, 0.04547119140625, -0.05889892578125, 0.046630859375, 0.000823974609375, -0.0638427734375, -0.061676025390625, 0.00801849365234375, 0.02880859375, 0.032440185546875, -0.0013341903686523438, -0.04052734375, -0.029693603515625, -0.06524658203125, -0.008270263671875, -0.025115966796875, -0.026702880859375, 0.04241943359375, 0.01708984375, 0.01186370849609375, 0.026031494140625, -0.0364990234375, -0.014984130859375, -0.020050048828125, -0.00553131103515625, 0.04388427734375, 0.0187225341796875, 0.06842041015625, -0.067626953125, -0.057861328125, -0.0255584716796875, -0.03887939453125, -0.0191802978515625, 0.0137939453125, -0.0197906494140625, -0.032470703125, 0.0330810546875, -0.04339599609375, 0.04254150390625, 0.01555633544921875, -0.0640869140625, 0.046478271484375, -0.03497314453125, 0.07244873046875, -0.080322265625, -0.0012054443359375, 0.0233612060546875, -0.01230621337890625, -0.0121002197265625, 0.04248046875, -0.005916595458984375, -0.025665283203125, -0.051971435546875, 0.031524658203125, -0.0380859375, 0.0022983551025390625, -0.00847625732421875, 0.0294036865234375, 0.034332275390625, 0.01337432861328125, -0.009765625, 0.059967041015625, 0.027191162109375, -0.018157958984375, 0.0526123046875, 0.03656005859375, -0.0396728515625, 0.059051513671875, -0.07440185546875, 0.004230499267578125, 0.0036983489990234375, 0.00494384765625, -0.1041259765625, -0.00835418701171875, 0.045928955078125, -0.0513916015625, -0.00478363037109375, 0.006290435791015625, -0.0692138671875, -0.035125732421875, -0.048583984375, 0.0171661376953125, 0.06182861328125, -0.041748046875, -0.004856109619140625, 0.00682830810546875, 0.0024433135986328125, -0.0115509033203125, -0.033843994140625, -0.00827789306640625, -0.0360107421875, -0.01885986328125, 0.0202484130859375, -0.039642333984375, -0.0218505859375, -0.01369476318359375, 0.0169677734375, -0.010833740234375, -0.0027313232421875, 0.03558349609375, 0.026824951171875, -0.03436279296875, -0.0204315185546875, -0.01290130615234375, 0.0020503997802734375, -0.00684356689453125, 0.0325927734375, 0.035186767578125, -0.029876708984375, -0.034423828125, -0.0557861328125, 0.0195770263671875, 0.046600341796875, 0.01543426513671875, 0.020294189453125, 0.03472900390625, -0.017364501953125, 0.0318603515625, -0.045166015625, 0.005126953125, -0.0372314453125, -0.004291534423828125, -0.037567138671875, -0.0243072509765625, 0.03778076171875, 0.0007114410400390625, -0.0323486328125, 0.052215576171875, 0.02105712890625, -0.0279693603515625, 0.08782958984375, 0.032073974609375, -0.008575439453125, 0.03631591796875, -0.038360595703125, -0.004974365234375, -0.04571533203125, -0.0124359130859375, -0.039764404296875, -0.049224853515625, -0.05889892578125, -0.0095977783203125, 0.0007996559143066406, 0.037811279296875, -0.0257415771484375, 0.061309814453125, -0.0379638671875, 0.048095703125, 0.046905517578125, 0.03558349609375, 0.0237274169921875, 0.00016248226165771484, -0.00470733642578125, -0.026641845703125, -0.03778076171875, -0.0390625, 0.0579833984375, 0.00611114501953125, 0.0229034423828125, 0.038818359375, 0.04638671875, 0.0086669921875, 0.013275146484375, -0.031402587890625, 0.058013916015625, -0.0430908203125, -0.05340576171875, 0.0128173828125, -0.006160736083984375, -0.0745849609375, -0.00020205974578857422, -0.0478515625, -0.04461669921875, 0.041351318359375, -0.0026340484619140625, -0.04058837890625, 0.02471923828125, -0.0611572265625, 0.06414794921875, -0.014678955078125, -0.044769287109375, 0.009490966796875, -0.0013484954833984375, 0.0209503173828125, 0.02191162109375, 0.024139404296875, -0.018890380859375, 0.0026111602783203125, 0.035369873046875, -0.015625, 0.03094482421875, 0.00997161865234375, 0.022186279296875, 0.03436279296875, -0.0009598731994628906, -0.00795745849609375, 0.032196044921875, 0.0041961669921875, -0.0234375, -0.010833740234375, -0.02947998046875, -0.048492431640625, 0.067138671875, -0.046417236328125, -0.0206756591796875, -0.0254058837890625, -0.007366180419921875, 0.00728607177734375, 0.032958984375, 0.07623291015625, 0.032958984375, -0.040924072265625, 0.025115966796875, 0.0511474609375, -0.00260162353515625, 0.040374755859375, 0.01053619384765625, -0.0288848876953125, -0.045440673828125, 0.062225341796875, -0.01033782958984375, 0.01174163818359375, 0.01313018798828125, 0.007472991943359375, -0.0249176025390625, -0.00855255126953125, -0.041900634765625, 0.029266357421875, -0.040008544921875, -0.00035572052001953125, -0.0163421630859375, 0.00689697265625, -0.041290283203125, -0.023712158203125, -0.0272216796875, -0.021026611328125, -0.0419921875, -0.0025653839111328125, 0.0136871337890625, 0.06341552734375, 0.004657745361328125, 0.01450347900390625, -0.0194549560546875, 0.02093505859375, 0.03607177734375, 0.0251922607421875, -0.0210723876953125, -0.0135650634765625, 0.02587890625, -0.01090240478515625, -0.05975341796875, -0.0679931640625, 0.0299072265625, 0.0005588531494140625, 0.025421142578125, 0.0576171875, 0.0022106170654296875, 0.05059814453125, -0.0242156982421875, 0.06732177734375, 0.059051513671875, -0.05499267578125, 0.036865234375, -0.0264434814453125, 0.0221405029296875, 0.0709228515625, 0.02691650390625, -0.01430511474609375, -0.009246826171875, -0.08160400390625, -0.045806884765625, 0.0007867813110351562, 0.0274658203125, -0.00019812583923339844, -0.005008697509765625, 0.0077362060546875, 0.01151275634765625, 0.0379638671875, -0.04034423828125, -0.032928466796875, -0.0281524658203125, -0.0237884521484375, -0.0123748779296875, -0.0096282958984375, 0.0014905929565429688, -0.0269927978515625, 0.0283203125, 0.0007600784301757812, 0.03546142578125, 0.005458831787109375, 0.00608062744140625, -0.0036334991455078125, 0.0106353759765625, 0.060760498046875, 0.057525634765625, -0.0186920166015625, -0.00531768798828125, -0.0038928985595703125, -0.050506591796875, 0.00983428955078125, -0.00428009033203125, -0.026824951171875, 0.031341552734375, 0.0297088623046875, 0.08233642578125, 0.041748046875, -0.0712890625, 0.0404052734375, -0.0287628173828125, 0.01340484619140625, -0.0252838134765625, 0.035797119140625, 0.0021877288818359375, 0.03558349609375, 0.01398468017578125, -0.004047393798828125, 0.0421142578125, -0.060394287109375, 0.0234375, 0.019439697265625, -0.037994384765625, -0.0274658203125, 0.06341552734375, -0.0029201507568359375, -0.04046630859375, 0.03564453125, -0.03546142578125, -0.0238037109375, 0.06658935546875, 0.0439453125, 0.05743408203125, -0.01183319091796875, 0.04937744140625, 0.0374755859375, -0.01210784912109375, 0.01555633544921875, 0.040069580078125, 0.0002448558807373047, -0.034759521484375, 0.032318115234375, -0.004451751708984375, -0.04400634765625, 0.0148773193359375, -0.064453125, 0.04638671875, -0.06640625, -0.0079345703125, 0.0016689300537109375, 0.0287628173828125, -0.059326171875, 0.04034423828125, -0.0028667449951171875, 0.08990478515625, -0.0830078125, 0.0239715576171875, 0.04571533203125, -0.06524658203125, -0.04351806640625, -0.0107574462890625, 0.0104217529296875, -0.037445068359375, 0.0306396484375, 0.0164337158203125, 0.011138916015625, -0.01885986328125, -0.053253173828125, -0.0269927978515625, 0.06414794921875, 0.00933074951171875, -0.05413818359375, 0.0073699951171875, -0.0135955810546875, 0.035888671875, -0.05328369140625, 0.04840087890625, 0.040618896484375, 0.01171112060546875, 0.04241943359375, -0.04779052734375, -0.025238037109375, -0.072021484375, -0.005901336669921875, -0.0223388671875, -0.07177734375, 0.0672607421875, -0.01262664794921875, -0.00848388671875, 0.04840087890625, 0.07476806640625, 0.06768798828125, 0.057403564453125, 0.053497314453125, 0.046630859375, 0.03497314453125, -0.024688720703125, 0.08282470703125, -0.007534027099609375, 0.00916290283203125, 0.07049560546875, -0.00696563720703125, 0.054840087890625, -0.00568389892578125, -0.037506103515625, 0.04107666015625, 0.0799560546875, -0.01432037353515625, 0.04779052734375, 0.002719879150390625, -0.01103973388671875, 0.00010478496551513672, -0.0148773193359375, -0.03375244140625, 0.003910064697265625, -0.0013513565063476562, -0.0214996337890625, -0.005077362060546875, 0.0221710205078125, -0.00739288330078125, 0.031829833984375, -0.0186004638671875, 0.041900634765625, 0.00447845458984375, -0.032135009765625, 0.0229644775390625, -0.021148681640625, 0.044525146484375, -0.020233154296875, -0.007251739501953125, -0.0226593017578125, 0.0037899017333984375, -0.01337432861328125, -0.057708740234375, -0.0232391357421875, 0.01446533203125, -0.015167236328125, -0.01161956787109375, 0.046600341796875, -0.0026454925537109375, -0.05499267578125, 0.01239013671875, -0.011505126953125, 0.0267181396484375, 0.016021728515625, -0.081298828125, 0.0194091796875, -0.004795074462890625, 0.00905609130859375, -0.0169830322265625, 0.0242462158203125, 0.017303466796875, 0.035186767578125, 0.0193328857421875, 0.017364501953125, 0.0018339157104492188, 0.002750396728515625, 0.037322998046875, -0.031463623046875, -0.054351806640625, -0.03826904296875, 0.04302978515625, -0.039642333984375, -0.06884765625, 0.0811767578125, 0.06744384765625, 0.0579833984375, -0.044708251953125, 0.0216522216796875, 0.013916015625, 0.034454345703125, -0.04754638671875, 0.07427978515625, -0.049285888671875, -0.0183868408203125, -0.039398193359375, -0.06793212890625, -0.002185821533203125, 0.048065185546875, 0.00006264448165893555, 0.0255584716796875, 0.032257080078125, 0.0243072509765625, -0.013458251953125, 0.00269317626953125, 0.00664520263671875, 0.0029163360595703125, 0.023773193359375, 0.0295867919921875, 0.06072998046875, -0.04693603515625, 0.00833892822265625, -0.030914306640625, -0.033203125, -0.0379638671875, -0.05426025390625, -0.076171875, -0.0531005859375, -0.039764404296875, -0.033538818359375, -0.0154571533203125, 0.06005859375, 0.04632568359375, -0.07781982421875, -0.01430511474609375, 0.0103912353515625, 0.0023365020751953125, -0.00438690185546875, -0.0161895751953125, 0.0254669189453125, 0.047698974609375, -0.07623291015625, 0.036285400390625, -0.005786895751953125, 0.0261077880859375, -0.000972747802734375, 0.014739990234375, -0.003131866455078125, 0.00768280029296875, 0.01806640625, 0.046661376953125, -0.051727294921875, -0.0244293212890625, 0.01165771484375, -0.0067138671875, 0.015350341796875, 0.057159423828125, -0.042144775390625, 0.026702880859375, 0.064208984375, 0.01291656494140625, 0.029388427734375, 0.004425048828125, 0.013397216796875, -0.0146942138671875, 0.0166015625, -0.01001739501953125, 0.04388427734375, 0.0322265625, -0.02142333984375, 0.042236328125, 0.040740966796875, -0.027069091796875, -0.05621337890625, 0.0203704833984375, -0.076171875, -0.002399444580078125, 0.039794921875, 0.00650787353515625, -0.027496337890625, 0.027496337890625, -0.037872314453125, 0.0235748291015625, -0.0129241943359375, 0.046844482421875, 0.0689697265625, -0.016387939453125, -0.0226593017578125, -0.048858642578125, 0.0211334228515625, 0.01285552978515625, -0.051361083984375, -0.06488037109375, 0.02655029296875, 0.0234527587890625, 0.0239715576171875, 0.03131103515625, -0.01416015625, 0.04046630859375, 0.00630950927734375, 0.01015472412109375, 0.005664825439453125, -0.0276947021484375, -0.0123443603515625, 0.007617950439453125, -0.0131683349609375, -0.046478271484375 ] ]
stablediffusionapi/rev-animated
2023-05-26T13:50:00.000Z
[ "diffusers", "stablediffusionapi.com", "stable-diffusion-api", "text-to-image", "ultra-realistic", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
stablediffusionapi
null
null
stablediffusionapi/rev-animated
6
8,888
diffusers
2023-02-15T20:51:16
--- license: creativeml-openrail-m tags: - stablediffusionapi.com - stable-diffusion-api - text-to-image - ultra-realistic pinned: true --- # ReV Animated API Inference ![generated from stablediffusionapi.com](https://pub-8b49af329fae499aa563997f5d4068a4.r2.dev/generations/4463031071676494118.png) ## Get API Key Get API key from [Stable Diffusion API](http://stablediffusionapi.com/), No Payment needed. Replace Key in below code, change **model_id** to "rev-animated" Coding in PHP/Node/Java etc? Have a look at docs for more code examples: [View docs](https://stablediffusionapi.com/docs) Try model for free: [Generate Images](https://stablediffusionapi.com/models/rev-animated) Model link: [View model](https://stablediffusionapi.com/models/rev-animated) Credits: [View credits](https://civitai.com/?query=ReV%20Animated) View all models: [View Models](https://stablediffusionapi.com/models) import requests import json url = "https://stablediffusionapi.com/api/v3/dreambooth" payload = json.dumps({ "key": "your_api_key", "model_id": "rev-animated", "prompt": "ultra realistic close up portrait ((beautiful pale cyberpunk female with heavy black eyeliner)), blue eyes, shaved side haircut, hyper detail, cinematic lighting, magic neon, dark red city, Canon EOS R3, nikon, f/1.4, ISO 200, 1/160s, 8K, RAW, unedited, symmetrical balance, in-frame, 8K", "negative_prompt": "painting, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, deformed, ugly, blurry, bad anatomy, bad proportions, extra limbs, cloned face, skinny, glitchy, double torso, extra arms, extra hands, mangled fingers, missing lips, ugly face, distorted face, extra legs, anime", "width": "512", "height": "512", "samples": "1", "num_inference_steps": "30", "safety_checker": "no", "enhance_prompt": "yes", "seed": None, "guidance_scale": 7.5, "multi_lingual": "no", "panorama": "no", "self_attention": "no", "upscale": "no", "embeddings": "embeddings_model_id", "lora": "lora_model_id", "webhook": None, "track_id": None }) headers = { 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload) print(response.text) > Use this coupon code to get 25% off **DMGG0RBN**
2,465
[ [ -0.0300140380859375, -0.05322265625, 0.023834228515625, 0.02197265625, -0.034210205078125, 0.00785064697265625, 0.021026611328125, -0.032806396484375, 0.0435791015625, 0.042633056640625, -0.07354736328125, -0.05010986328125, -0.0283355712890625, -0.0012998580932617188, -0.0221710205078125, 0.04669189453125, 0.00583648681640625, -0.01013946533203125, -0.0057525634765625, 0.01328277587890625, -0.0225982666015625, 0.000537872314453125, -0.05010986328125, -0.019805908203125, 0.0100555419921875, -0.007503509521484375, 0.048828125, 0.041168212890625, 0.0220947265625, 0.0206146240234375, -0.00959014892578125, -0.009735107421875, -0.035614013671875, -0.00925445556640625, -0.0177459716796875, -0.04998779296875, -0.043182373046875, -0.0089111328125, 0.024139404296875, 0.023040771484375, 0.0006389617919921875, 0.0364990234375, -0.01139068603515625, 0.05596923828125, -0.043426513671875, 0.022796630859375, -0.0243682861328125, 0.00963592529296875, 0.00859832763671875, -0.0012378692626953125, -0.01219940185546875, -0.0226287841796875, -0.01165008544921875, -0.076171875, 0.0276336669921875, 0.005893707275390625, 0.0994873046875, 0.014404296875, -0.00951385498046875, -0.0025920867919921875, -0.041839599609375, 0.052886962890625, -0.0682373046875, 0.03240966796875, 0.0184173583984375, 0.01111602783203125, -0.004520416259765625, -0.076416015625, -0.0517578125, 0.02655029296875, 0.0276947021484375, 0.020965576171875, -0.03631591796875, -0.0066680908203125, 0.020660400390625, 0.031402587890625, -0.03875732421875, -0.01371002197265625, -0.03131103515625, -0.008056640625, 0.045318603515625, 0.0185394287109375, 0.019012451171875, -0.0215301513671875, -0.028564453125, -0.032989501953125, -0.043609619140625, 0.0234222412109375, 0.05438232421875, 0.0259246826171875, -0.0501708984375, 0.045257568359375, -0.0203094482421875, 0.045928955078125, 0.023223876953125, -0.0099639892578125, 0.0531005859375, -0.0106048583984375, -0.0206451416015625, -0.027130126953125, 0.07073974609375, 0.05810546875, 0.0010995864868164062, 0.0215911865234375, -0.017547607421875, 0.002864837646484375, 0.0180816650390625, -0.08306884765625, -0.0103912353515625, 0.049652099609375, -0.04901123046875, -0.0382080078125, -0.0013256072998046875, -0.08270263671875, -0.01523590087890625, -0.0037517547607421875, 0.029296875, -0.02606201171875, -0.045928955078125, 0.029296875, -0.01715087890625, 0.017486572265625, 0.0248260498046875, -0.0574951171875, 0.002105712890625, 0.0440673828125, 0.0594482421875, 0.0144805908203125, 0.00017631053924560547, 0.0157623291015625, 0.006366729736328125, -0.0259246826171875, 0.053619384765625, -0.0181884765625, -0.033905029296875, 0.00002187490463256836, 0.031982421875, -0.0006256103515625, -0.0333251953125, 0.034332275390625, -0.048004150390625, -0.007213592529296875, -0.0008220672607421875, -0.0187835693359375, -0.032073974609375, 0.0164794921875, -0.044708251953125, 0.03912353515625, 0.0146942138671875, -0.055084228515625, 0.0173797607421875, -0.05224609375, -0.016876220703125, -0.0084228515625, 0.00496673583984375, -0.0374755859375, -0.004459381103515625, 0.016693115234375, 0.0107421875, -0.0073699951171875, -0.0009794235229492188, -0.045654296875, -0.023773193359375, 0.0207977294921875, -0.029510498046875, 0.0830078125, 0.032196044921875, -0.012298583984375, 0.002361297607421875, -0.061370849609375, 0.0029811859130859375, 0.05108642578125, -0.01262664794921875, -0.0034027099609375, -0.0233917236328125, 0.0015811920166015625, -0.003421783447265625, 0.02191162109375, -0.0355224609375, 0.023193359375, -0.03753662109375, 0.026947021484375, 0.040802001953125, 0.024871826171875, 0.0204620361328125, -0.0226593017578125, 0.059661865234375, 0.00501251220703125, 0.030517578125, -0.01233673095703125, -0.051483154296875, -0.0428466796875, -0.0240020751953125, 0.01111602783203125, 0.0283660888671875, -0.052032470703125, 0.0141448974609375, -0.022491455078125, -0.04443359375, -0.05413818359375, -0.010772705078125, 0.0283050537109375, 0.0318603515625, 0.01373291015625, -0.021392822265625, -0.044830322265625, -0.06365966796875, -0.0008821487426757812, -0.0226593017578125, -0.01398468017578125, 0.016571044921875, 0.0380859375, -0.0284423828125, 0.06146240234375, -0.04913330078125, 0.005481719970703125, -0.0083160400390625, 0.0125732421875, 0.061370849609375, 0.049652099609375, 0.056732177734375, -0.059234619140625, -0.0154571533203125, -0.0248260498046875, -0.045867919921875, 0.0022869110107421875, 0.005199432373046875, -0.0113983154296875, -0.008331298828125, 0.0009984970092773438, -0.06396484375, 0.048797607421875, 0.03363037109375, -0.034637451171875, 0.045135498046875, -0.01108551025390625, 0.0428466796875, -0.08740234375, 0.00634765625, 0.006397247314453125, -0.01806640625, -0.0298004150390625, 0.034637451171875, 0.002643585205078125, -0.01363372802734375, -0.05584716796875, 0.045318603515625, -0.0269317626953125, 0.0106353759765625, -0.0191802978515625, 0.002593994140625, 0.024871826171875, 0.035125732421875, 0.00043463706970214844, 0.0379638671875, 0.043914794921875, -0.04083251953125, 0.035369873046875, 0.01544189453125, -0.0232086181640625, 0.038299560546875, -0.051483154296875, 0.0141448974609375, -0.0020999908447265625, 0.014984130859375, -0.084228515625, -0.038482666015625, 0.035858154296875, -0.05621337890625, 0.01197052001953125, -0.0584716796875, -0.038726806640625, -0.053375244140625, -0.031524658203125, 0.0264892578125, 0.06060791015625, -0.035125732421875, 0.053985595703125, 0.0156402587890625, 0.00931549072265625, -0.05413818359375, -0.07037353515625, -0.0237274169921875, -0.0201568603515625, -0.050994873046875, 0.0281829833984375, 0.00028061866760253906, -0.0227813720703125, 0.0028533935546875, 0.0032825469970703125, -0.005420684814453125, -0.0155792236328125, 0.039764404296875, 0.038665771484375, -0.0202484130859375, -0.0254974365234375, 0.01849365234375, 0.0018911361694335938, 0.0130462646484375, -0.0136871337890625, 0.0654296875, -0.01192474365234375, -0.04595947265625, -0.06414794921875, -0.00977325439453125, 0.059539794921875, 0.005748748779296875, 0.0302581787109375, 0.045684814453125, -0.0426025390625, 0.01364898681640625, -0.042449951171875, -0.0186004638671875, -0.037261962890625, 0.01678466796875, -0.026519775390625, -0.021759033203125, 0.07379150390625, -0.004047393798828125, 0.00713348388671875, 0.048248291015625, 0.0323486328125, -0.0104522705078125, 0.09027099609375, 0.018157958984375, 0.0142822265625, 0.0307159423828125, -0.0682373046875, -0.00836181640625, -0.067138671875, -0.0222015380859375, -0.0185699462890625, -0.023956298828125, -0.0243377685546875, -0.037017822265625, 0.0021877288818359375, 0.02471923828125, -0.037689208984375, 0.0321044921875, -0.0390625, 0.016845703125, 0.024261474609375, 0.0175933837890625, 0.01287841796875, 0.00928497314453125, -0.00682830810546875, -0.0018548965454101562, -0.0185394287109375, -0.0277862548828125, 0.0792236328125, 0.0271148681640625, 0.07122802734375, 0.00689697265625, 0.038055419921875, 0.012054443359375, 0.007770538330078125, -0.03875732421875, 0.037567138671875, 0.0155181884765625, -0.0726318359375, 0.01421356201171875, -0.007427215576171875, -0.08056640625, 0.0197296142578125, -0.0269317626953125, -0.0589599609375, 0.04693603515625, 0.00884246826171875, -0.044403076171875, 0.0374755859375, -0.05010986328125, 0.0535888671875, -0.00881195068359375, -0.04718017578125, -0.01629638671875, -0.040557861328125, 0.039703369140625, 0.004093170166015625, 0.039306640625, -0.02587890625, -0.013153076171875, 0.0521240234375, -0.034393310546875, 0.07794189453125, -0.0321044921875, 0.005523681640625, 0.051483154296875, 0.01386260986328125, 0.032073974609375, 0.0357666015625, -0.00919342041015625, 0.01947021484375, 0.0128173828125, -0.0254974365234375, -0.035400390625, 0.056671142578125, -0.05712890625, -0.04034423828125, -0.0137786865234375, -0.020782470703125, 0.0025539398193359375, 0.0257720947265625, 0.031585693359375, 0.0229949951171875, 0.002956390380859375, -0.0019626617431640625, 0.041412353515625, -0.01312255859375, 0.0369873046875, 0.0302581787109375, -0.0640869140625, -0.062225341796875, 0.05377197265625, -0.0206298828125, 0.02923583984375, 0.00473785400390625, 0.01654052734375, -0.038360595703125, -0.036895751953125, -0.046539306640625, 0.02593994140625, -0.05010986328125, -0.033905029296875, -0.049346923828125, -0.0019130706787109375, -0.05584716796875, -0.01511383056640625, -0.059234619140625, -0.0288543701171875, -0.046173095703125, -0.0099029541015625, 0.053009033203125, 0.0264129638671875, -0.005504608154296875, 0.0213470458984375, -0.05029296875, 0.035919189453125, 0.013214111328125, 0.0156402587890625, 0.00872802734375, -0.055267333984375, 0.005687713623046875, 0.023712158203125, -0.037689208984375, -0.06097412109375, 0.049652099609375, -0.0175323486328125, 0.026092529296875, 0.06427001953125, 0.005184173583984375, 0.08099365234375, -0.0009636878967285156, 0.07366943359375, 0.036651611328125, -0.04998779296875, 0.05413818359375, -0.043426513671875, 0.01436614990234375, 0.04010009765625, 0.0162506103515625, -0.025390625, -0.0146942138671875, -0.06439208984375, -0.08447265625, 0.042236328125, 0.01288604736328125, 0.03387451171875, 0.000047266483306884766, 0.037261962890625, 0.0029144287109375, 0.022979736328125, -0.06915283203125, -0.03277587890625, -0.027374267578125, -0.004425048828125, 0.035980224609375, 0.00786590576171875, -0.01309967041015625, -0.03289794921875, 0.054840087890625, -0.00524139404296875, 0.031646728515625, 0.020263671875, 0.0187835693359375, -0.0300140380859375, -0.004669189453125, 0.03375244140625, 0.043701171875, -0.03680419921875, -0.016815185546875, -0.0079345703125, -0.041595458984375, 0.00501251220703125, 0.00420379638671875, -0.0242462158203125, 0.0014400482177734375, 0.0242767333984375, 0.06524658203125, 0.0031681060791015625, -0.0382080078125, 0.052886962890625, -0.010711669921875, -0.0205230712890625, -0.040557861328125, -0.00040459632873535156, 0.028564453125, 0.042572021484375, 0.036895751953125, 0.02703857421875, 0.00862884521484375, -0.0277557373046875, -0.01088714599609375, 0.0261688232421875, -0.0204315185546875, -0.03961181640625, 0.07952880859375, 0.0012359619140625, -0.041107177734375, 0.04534912109375, -0.0217132568359375, -0.00516510009765625, 0.06182861328125, 0.059539794921875, 0.07366943359375, -0.006496429443359375, 0.01142120361328125, 0.056427001953125, 0.00879669189453125, -0.00884246826171875, 0.05450439453125, 0.0220184326171875, -0.045684814453125, -0.023712158203125, -0.05810546875, -0.01812744140625, 0.0308380126953125, -0.056884765625, 0.05029296875, -0.051025390625, -0.03350830078125, -0.0223236083984375, -0.0290985107421875, -0.051300048828125, 0.02301025390625, 0.003314971923828125, 0.06951904296875, -0.05426025390625, 0.043609619140625, 0.046112060546875, -0.050048828125, -0.06365966796875, -0.0134124755859375, 0.0098724365234375, -0.0684814453125, 0.0302276611328125, 0.004497528076171875, -0.0014123916625976562, 0.006015777587890625, -0.051971435546875, -0.067138671875, 0.086669921875, 0.028961181640625, -0.037506103515625, -0.00472259521484375, -0.0038604736328125, 0.0391845703125, -0.037567138671875, 0.031280517578125, 0.00939178466796875, 0.0283660888671875, 0.02197265625, -0.037567138671875, -0.0024356842041015625, -0.0290985107421875, -0.0005950927734375, -0.0128173828125, -0.055267333984375, 0.06005859375, -0.0401611328125, -0.010498046875, 0.017242431640625, 0.051177978515625, 0.046661376953125, 0.037353515625, 0.045623779296875, 0.0670166015625, 0.0233001708984375, -0.00818634033203125, 0.07440185546875, -0.031585693359375, 0.0487060546875, 0.053253173828125, 0.0034770965576171875, 0.06341552734375, 0.03680419921875, -0.034454345703125, 0.05535888671875, 0.0721435546875, -0.0160980224609375, 0.0576171875, 0.004669189453125, -0.0194091796875, -0.0038604736328125, 0.010650634765625, -0.046112060546875, 0.0111083984375, 0.0278472900390625, -0.033447265625, 0.00879669189453125, 0.01153564453125, -0.00870513916015625, -0.0186614990234375, -0.02191162109375, 0.0286865234375, -0.00188446044921875, -0.023162841796875, 0.055572509765625, -0.01430511474609375, 0.06317138671875, -0.04022216796875, -0.00785064697265625, -0.0133209228515625, 0.01328277587890625, -0.0218048095703125, -0.05657958984375, 0.0101318359375, -0.005008697509765625, -0.005283355712890625, -0.004795074462890625, 0.043914794921875, 0.00525665283203125, -0.049163818359375, 0.01922607421875, 0.014556884765625, 0.029052734375, -0.00081634521484375, -0.076171875, 0.0228729248046875, 0.0166015625, -0.039825439453125, -0.0030002593994140625, 0.016632080078125, 0.0294189453125, 0.05743408203125, 0.052734375, 0.007801055908203125, 0.00534820556640625, 0.014617919921875, 0.059722900390625, -0.038177490234375, -0.031707763671875, -0.0609130859375, 0.0562744140625, -0.00870513916015625, -0.0166015625, 0.0360107421875, 0.06341552734375, 0.06011962890625, -0.0458984375, 0.051971435546875, -0.017578125, 0.0299224853515625, -0.026519775390625, 0.05621337890625, -0.05859375, 0.01375579833984375, -0.0361328125, -0.05389404296875, -0.01020050048828125, 0.04327392578125, -0.00894927978515625, 0.01512908935546875, 0.0273895263671875, 0.06390380859375, -0.022430419921875, -0.0025844573974609375, 0.004596710205078125, 0.00738525390625, 0.014068603515625, 0.01461029052734375, 0.0518798828125, -0.0504150390625, 0.0341796875, -0.051361083984375, -0.007045745849609375, -0.0076141357421875, -0.0582275390625, -0.0465087890625, -0.0264434814453125, -0.04754638671875, -0.054443359375, -0.01241302490234375, 0.06170654296875, 0.0635986328125, -0.05859375, -0.0078887939453125, -0.01020050048828125, -0.0009908676147460938, -0.0305938720703125, -0.024566650390625, 0.018829345703125, 0.025390625, -0.08148193359375, 0.0229034423828125, 0.00612640380859375, 0.034912109375, -0.00870513916015625, 0.0017490386962890625, -0.0058441162109375, 0.001552581787109375, 0.0164794921875, 0.030914306640625, -0.06494140625, -0.0108489990234375, -0.0081329345703125, 0.0120086669921875, 0.014617919921875, 0.031036376953125, -0.044952392578125, 0.038848876953125, 0.052581787109375, 0.0137786865234375, 0.055023193359375, 0.0075531005859375, 0.01021575927734375, -0.0291900634765625, 0.0257568359375, 0.009674072265625, 0.042083740234375, 0.0132598876953125, -0.0367431640625, 0.0401611328125, 0.04278564453125, -0.0243682861328125, -0.0645751953125, 0.0041046142578125, -0.0869140625, -0.0201568603515625, 0.0748291015625, -0.01499176025390625, -0.04541015625, 0.005321502685546875, -0.017303466796875, 0.0307769775390625, -0.026763916015625, 0.0458984375, 0.026641845703125, -0.021148681640625, -0.020355224609375, -0.04803466796875, 0.003692626953125, 0.00016570091247558594, -0.045257568359375, -0.00482177734375, 0.0273895263671875, 0.047149658203125, 0.04461669921875, 0.03375244140625, -0.048187255859375, 0.016204833984375, 0.026824951171875, 0.045440673828125, -0.00836181640625, 0.0184783935546875, -0.0242462158203125, 0.00522613525390625, -0.00798797607421875, -0.038421630859375 ] ]
deepset/roberta-large-squad2
2023-09-27T12:27:59.000Z
[ "transformers", "pytorch", "jax", "safetensors", "roberta", "question-answering", "en", "dataset:squad_v2", "license:cc-by-4.0", "model-index", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
question-answering
deepset
null
null
deepset/roberta-large-squad2
17
8,886
transformers
2022-03-02T23:29:05
--- language: en license: cc-by-4.0 datasets: - squad_v2 base_model: roberta-large model-index: - name: deepset/roberta-large-squad2 results: - task: type: question-answering name: Question Answering dataset: name: squad_v2 type: squad_v2 config: squad_v2 split: validation metrics: - type: exact_match value: 85.168 name: Exact Match - type: f1 value: 88.349 name: F1 - task: type: question-answering name: Question Answering dataset: name: squad type: squad config: plain_text split: validation metrics: - type: exact_match value: 87.162 name: Exact Match - type: f1 value: 93.603 name: F1 - task: type: question-answering name: Question Answering dataset: name: adversarial_qa type: adversarial_qa config: adversarialQA split: validation metrics: - type: exact_match value: 35.900 name: Exact Match - type: f1 value: 48.923 name: F1 - task: type: question-answering name: Question Answering dataset: name: squad_adversarial type: squad_adversarial config: AddOneSent split: validation metrics: - type: exact_match value: 81.142 name: Exact Match - type: f1 value: 87.099 name: F1 - task: type: question-answering name: Question Answering dataset: name: squadshifts amazon type: squadshifts config: amazon split: test metrics: - type: exact_match value: 72.453 name: Exact Match - type: f1 value: 86.325 name: F1 - task: type: question-answering name: Question Answering dataset: name: squadshifts new_wiki type: squadshifts config: new_wiki split: test metrics: - type: exact_match value: 82.338 name: Exact Match - type: f1 value: 91.974 name: F1 - task: type: question-answering name: Question Answering dataset: name: squadshifts nyt type: squadshifts config: nyt split: test metrics: - type: exact_match value: 84.352 name: Exact Match - type: f1 value: 92.645 name: F1 - task: type: question-answering name: Question Answering dataset: name: squadshifts reddit type: squadshifts config: reddit split: test metrics: - type: exact_match value: 74.722 name: Exact Match - type: f1 value: 86.860 name: F1 --- # roberta-large for QA This is the [roberta-large](https://huggingface.co/roberta-large) model, fine-tuned using the [SQuAD2.0](https://huggingface.co/datasets/squad_v2) dataset. It's been trained on question-answer pairs, including unanswerable questions, for the task of Question Answering. ## Overview **Language model:** roberta-large **Language:** English **Downstream-task:** Extractive QA **Training data:** SQuAD 2.0 **Eval data:** SQuAD 2.0 **Code:** See [an example QA pipeline on Haystack](https://haystack.deepset.ai/tutorials/first-qa-system) **Infrastructure**: 4x Tesla v100 ## Hyperparameters ``` base_LM_model = "roberta-large" ``` ## Using a distilled model instead Please note that we have also released a distilled version of this model called [deepset/roberta-base-squad2-distilled](https://huggingface.co/deepset/roberta-base-squad2-distilled). The distilled model has a comparable prediction quality and runs at twice the speed of the large model. ## Usage ### In Haystack Haystack is an NLP framework by deepset. You can use this model in a Haystack pipeline to do question answering at scale (over many documents). To load the model in [Haystack](https://github.com/deepset-ai/haystack/): ```python reader = FARMReader(model_name_or_path="deepset/roberta-large-squad2") # or reader = TransformersReader(model_name_or_path="deepset/roberta-large-squad2",tokenizer="deepset/roberta-large-squad2") ``` For a complete example of ``roberta-large-squad2`` being used for Question Answering, check out the [Tutorials in Haystack Documentation](https://haystack.deepset.ai/tutorials/first-qa-system) ### In Transformers ```python from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline model_name = "deepset/roberta-large-squad2" # a) Get predictions nlp = pipeline('question-answering', model=model_name, tokenizer=model_name) QA_input = { 'question': 'Why is model conversion important?', 'context': 'The option to convert models between FARM and transformers gives freedom to the user and let people easily switch between frameworks.' } res = nlp(QA_input) # b) Load model & tokenizer model = AutoModelForQuestionAnswering.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) ``` ## Authors **Branden Chan:** branden.chan@deepset.ai **Timo Möller:** timo.moeller@deepset.ai **Malte Pietsch:** malte.pietsch@deepset.ai **Tanay Soni:** tanay.soni@deepset.ai ## About us <div class="grid lg:grid-cols-2 gap-x-4 gap-y-3"> <div class="w-full h-40 object-cover mb-2 rounded-lg flex items-center justify-center"> <img alt="" src="https://raw.githubusercontent.com/deepset-ai/.github/main/deepset-logo-colored.png" class="w-40"/> </div> <div class="w-full h-40 object-cover mb-2 rounded-lg flex items-center justify-center"> <img alt="" src="https://raw.githubusercontent.com/deepset-ai/.github/main/haystack-logo-colored.png" class="w-40"/> </div> </div> [deepset](http://deepset.ai/) is the company behind the open-source NLP framework [Haystack](https://haystack.deepset.ai/) which is designed to help you build production ready NLP systems that use: Question answering, summarization, ranking etc. Some of our other work: - [Distilled roberta-base-squad2 (aka "tinyroberta-squad2")]([https://huggingface.co/deepset/tinyroberta-squad2) - [German BERT (aka "bert-base-german-cased")](https://deepset.ai/german-bert) - [GermanQuAD and GermanDPR datasets and models (aka "gelectra-base-germanquad", "gbert-base-germandpr")](https://deepset.ai/germanquad) ## Get in touch and join the Haystack community <p>For more info on Haystack, visit our <strong><a href="https://github.com/deepset-ai/haystack">GitHub</a></strong> repo and <strong><a href="https://docs.haystack.deepset.ai">Documentation</a></strong>. We also have a <strong><a class="h-7" href="https://haystack.deepset.ai/community">Discord community open to everyone!</a></strong></p> [Twitter](https://twitter.com/deepset_ai) | [LinkedIn](https://www.linkedin.com/company/deepset-ai/) | [Discord](https://haystack.deepset.ai/community) | [GitHub Discussions](https://github.com/deepset-ai/haystack/discussions) | [Website](https://deepset.ai) By the way: [we're hiring!](http://www.deepset.ai/jobs)
6,960
[ [ -0.02728271484375, -0.051910400390625, 0.03564453125, 0.0052947998046875, -0.0001895427703857422, 0.006744384765625, -0.0076751708984375, -0.031036376953125, 0.0248260498046875, 0.0243072509765625, -0.059661865234375, -0.04498291015625, -0.018829345703125, 0.0093994140625, -0.033447265625, 0.0738525390625, 0.0138092041015625, 0.0018014907836914062, -0.016357421875, -0.002254486083984375, -0.033843994140625, -0.03729248046875, -0.054931640625, -0.01354217529296875, 0.024749755859375, 0.0244140625, 0.050811767578125, 0.0204925537109375, 0.0382080078125, 0.025909423828125, -0.0081024169921875, 0.009368896484375, -0.0308380126953125, 0.017852783203125, -0.00843048095703125, -0.0265350341796875, -0.0285797119140625, -0.001064300537109375, 0.037200927734375, 0.03289794921875, -0.01328277587890625, 0.03955078125, -0.01465606689453125, 0.0538330078125, -0.038177490234375, 0.00614166259765625, -0.051788330078125, -0.018157958984375, 0.013916015625, 0.020233154296875, -0.011749267578125, -0.015960693359375, 0.01708984375, -0.0494384765625, 0.0208892822265625, -0.01250457763671875, 0.084716796875, 0.0239105224609375, -0.0137481689453125, -0.0162811279296875, -0.040130615234375, 0.0633544921875, -0.07781982421875, -0.0029449462890625, 0.041107177734375, 0.03570556640625, 0.01079559326171875, -0.067626953125, -0.0528564453125, -0.000766754150390625, -0.017822265625, 0.0145263671875, -0.0114898681640625, -0.019927978515625, 0.0117034912109375, 0.0311737060546875, -0.056549072265625, 0.00896453857421875, -0.041778564453125, 0.0012006759643554688, 0.06256103515625, 0.013946533203125, 0.0129852294921875, -0.0196380615234375, -0.0172882080078125, -0.02069091796875, -0.031585693359375, 0.0103912353515625, 0.00959014892578125, 0.024261474609375, -0.01222991943359375, 0.039031982421875, -0.032684326171875, 0.04486083984375, 0.019012451171875, 0.028350830078125, 0.032135009765625, -0.05096435546875, -0.016693115234375, -0.0185546875, 0.0699462890625, 0.0276031494140625, 0.004673004150390625, -0.0019359588623046875, -0.0193634033203125, -0.0142974853515625, 0.0168609619140625, -0.063720703125, -0.0136566162109375, 0.044403076171875, -0.023406982421875, -0.032470703125, 0.0033397674560546875, -0.05511474609375, -0.029998779296875, 0.005168914794921875, 0.046051025390625, -0.0253448486328125, -0.02801513671875, 0.02520751953125, -0.022857666015625, 0.047515869140625, 0.01250457763671875, -0.05975341796875, 0.00803375244140625, 0.046722412109375, 0.05291748046875, 0.018280029296875, -0.0217437744140625, -0.0335693359375, -0.01021575927734375, -0.01328277587890625, 0.044921875, -0.02349853515625, -0.002773284912109375, 0.0005717277526855469, 0.016510009765625, -0.0027408599853515625, -0.031036376953125, 0.01493072509765625, -0.051361083984375, 0.04718017578125, -0.008575439453125, -0.039398193359375, -0.0200347900390625, 0.0298614501953125, -0.05511474609375, 0.07666015625, 0.0251007080078125, -0.040679931640625, 0.00734710693359375, -0.058349609375, -0.02203369140625, 0.005420684814453125, 0.006473541259765625, -0.0308837890625, -0.019012451171875, 0.0274810791015625, 0.036468505859375, -0.024566650390625, 0.01491546630859375, -0.020294189453125, -0.031036376953125, 0.0214385986328125, 0.00006908178329467773, 0.09527587890625, 0.006542205810546875, -0.028594970703125, 0.00101470947265625, -0.050445556640625, 0.0219573974609375, 0.01708984375, -0.0164031982421875, -0.0014867782592773438, -0.0090484619140625, 0.0095062255859375, 0.020294189453125, 0.046844482421875, -0.0322265625, 0.01129150390625, -0.043121337890625, 0.054412841796875, 0.044097900390625, 0.0033397674560546875, 0.0268096923828125, -0.028594970703125, 0.048614501953125, -0.007099151611328125, 0.00914764404296875, 0.0029048919677734375, -0.0247344970703125, -0.0689697265625, -0.0072479248046875, 0.03826904296875, 0.054412841796875, -0.05889892578125, 0.059478759765625, -0.01094818115234375, -0.04461669921875, -0.059112548828125, 0.00550079345703125, 0.0254974365234375, 0.021759033203125, 0.037628173828125, 0.002803802490234375, -0.05426025390625, -0.0740966796875, -0.0028476715087890625, -0.01482391357421875, -0.0171051025390625, 0.01806640625, 0.053314208984375, -0.023223876953125, 0.060577392578125, -0.049896240234375, -0.02447509765625, -0.0157318115234375, -0.01030731201171875, 0.042724609375, 0.04864501953125, 0.04449462890625, -0.063720703125, -0.03863525390625, -0.018646240234375, -0.05572509765625, 0.0263519287109375, -0.0084686279296875, -0.01873779296875, 0.00801849365234375, 0.03424072265625, -0.057647705078125, 0.0180816650390625, 0.0396728515625, -0.038818359375, 0.030792236328125, 0.0013904571533203125, 0.006275177001953125, -0.1156005859375, 0.0234527587890625, -0.0009899139404296875, -0.0188140869140625, -0.03179931640625, 0.0254058837890625, -0.017425537109375, -0.012908935546875, -0.032928466796875, 0.04571533203125, -0.026519775390625, 0.007137298583984375, 0.01549530029296875, 0.01081085205078125, 0.019012451171875, 0.0377197265625, -0.01514434814453125, 0.07977294921875, 0.04864501953125, -0.0352783203125, 0.05120849609375, 0.047882080078125, -0.034149169921875, 0.0216827392578125, -0.0772705078125, 0.0156402587890625, 0.0034618377685546875, 0.01593017578125, -0.0723876953125, -0.0194091796875, 0.00720977783203125, -0.05670166015625, 0.004974365234375, -0.007610321044921875, -0.053680419921875, -0.03680419921875, -0.034698486328125, 0.0181884765625, 0.061431884765625, -0.0262298583984375, 0.0247802734375, 0.03106689453125, -0.003139495849609375, -0.04315185546875, -0.06494140625, 0.005664825439453125, -0.0117340087890625, -0.04888916015625, 0.015838623046875, -0.01250457763671875, -0.0100860595703125, 0.00858306884765625, 0.00731658935546875, -0.0426025390625, 0.0189361572265625, 0.00806427001953125, 0.0284271240234375, -0.030792236328125, 0.026458740234375, -0.01538848876953125, -0.01007080078125, 0.0007834434509277344, -0.0231475830078125, 0.04345703125, -0.047821044921875, 0.004673004150390625, -0.045013427734375, 0.0304107666015625, 0.039947509765625, -0.03302001953125, 0.06390380859375, 0.049285888671875, -0.02783203125, -0.0016078948974609375, -0.04144287109375, -0.0241546630859375, -0.036865234375, 0.034637451171875, -0.01387786865234375, -0.0665283203125, 0.044403076171875, 0.0215606689453125, 0.0157623291015625, 0.0721435546875, 0.03302001953125, -0.03668212890625, 0.07293701171875, 0.040283203125, -0.0034008026123046875, 0.027587890625, -0.057830810546875, 0.0038623809814453125, -0.070556640625, -0.006450653076171875, -0.047393798828125, -0.034271240234375, -0.047576904296875, -0.029388427734375, 0.017852783203125, 0.007625579833984375, -0.036865234375, 0.038726806640625, -0.05645751953125, 0.038909912109375, 0.055511474609375, 0.01001739501953125, 0.00907135009765625, -0.00884246826171875, 0.0194244384765625, 0.021240234375, -0.054229736328125, -0.034210205078125, 0.08367919921875, 0.0095062255859375, 0.03399658203125, 0.01085662841796875, 0.061981201171875, 0.01248931884765625, -0.01788330078125, -0.04901123046875, 0.041595458984375, -0.0168609619140625, -0.0810546875, -0.04986572265625, -0.0285491943359375, -0.07977294921875, -0.0025768280029296875, -0.021453857421875, -0.043304443359375, 0.01910400390625, -0.0015277862548828125, -0.04803466796875, 0.014495849609375, -0.050811767578125, 0.0716552734375, -0.007350921630859375, -0.006504058837890625, -0.01617431640625, -0.059906005859375, 0.0169219970703125, 0.0036716461181640625, -0.0005707740783691406, -0.01053619384765625, 0.0007419586181640625, 0.05517578125, -0.040802001953125, 0.06744384765625, -0.007656097412109375, 0.0021381378173828125, 0.032958984375, -0.0021953582763671875, 0.030487060546875, 0.020965576171875, -0.029541015625, 0.015045166015625, 0.03302001953125, -0.042755126953125, -0.039337158203125, 0.0496826171875, -0.0670166015625, -0.0347900390625, -0.034149169921875, -0.0306243896484375, -0.007762908935546875, 0.02850341796875, 0.01544189453125, 0.026763916015625, -0.01175689697265625, 0.043548583984375, 0.0462646484375, -0.01427459716796875, 0.027557373046875, 0.03302001953125, -0.01355743408203125, -0.0250091552734375, 0.058563232421875, -0.00643157958984375, 0.0136566162109375, 0.0335693359375, 0.00839996337890625, -0.029052734375, -0.03289794921875, -0.036956787109375, 0.0179901123046875, -0.04217529296875, -0.025909423828125, -0.038360595703125, -0.040283203125, -0.049957275390625, -0.0021076202392578125, -0.0249176025390625, -0.04718017578125, -0.03753662109375, -0.0036563873291015625, 0.051239013671875, 0.043365478515625, 0.003650665283203125, 0.01412200927734375, -0.048126220703125, 0.0286712646484375, 0.039459228515625, 0.025909423828125, -0.01143646240234375, -0.0374755859375, -0.017242431640625, 0.03302001953125, -0.0022335052490234375, -0.046051025390625, 0.0139312744140625, 0.0166015625, 0.022613525390625, -0.00850677490234375, 0.01421356201171875, 0.040069580078125, -0.023681640625, 0.0677490234375, 0.0093536376953125, -0.05987548828125, 0.047637939453125, -0.031036376953125, 0.035552978515625, 0.0811767578125, 0.020263671875, -0.046539306640625, -0.0222320556640625, -0.05340576171875, -0.071044921875, 0.041046142578125, 0.0233154296875, 0.02069091796875, -0.0003902912139892578, 0.0266571044921875, -0.0035610198974609375, 0.0229339599609375, -0.04852294921875, -0.01419830322265625, -0.01355743408203125, -0.02227783203125, -0.00909423828125, -0.011016845703125, -0.01284027099609375, -0.0287017822265625, 0.0692138671875, -0.0058746337890625, 0.00893402099609375, 0.021026611328125, -0.01311492919921875, 0.01407623291015625, 0.01110076904296875, 0.0286407470703125, 0.063232421875, -0.02581787109375, -0.0157928466796875, 0.01561737060546875, -0.020965576171875, -0.0012140274047851562, 0.01702880859375, -0.037811279296875, 0.003543853759765625, 0.0298004150390625, 0.053436279296875, 0.0035400390625, -0.05096435546875, 0.0467529296875, -0.00954437255859375, -0.031982421875, -0.048492431640625, 0.012420654296875, 0.02374267578125, 0.033233642578125, 0.032073974609375, -0.01538848876953125, 0.01357269287109375, -0.03955078125, 0.00930023193359375, 0.039520263671875, -0.0289306640625, -0.0028095245361328125, 0.034759521484375, 0.02886962890625, -0.03240966796875, 0.06390380859375, -0.0182342529296875, -0.04058837890625, 0.07159423828125, 0.015838623046875, 0.07275390625, 0.0166015625, 0.034088134765625, 0.044403076171875, 0.0249176025390625, 0.0071258544921875, 0.0193939208984375, 0.00690460205078125, -0.0413818359375, -0.0261077880859375, -0.05291748046875, -0.0149688720703125, 0.0265350341796875, -0.0555419921875, 0.01049041748046875, -0.0426025390625, -0.00901031494140625, 0.005741119384765625, 0.0256195068359375, -0.06658935546875, 0.01568603515625, -0.015625, 0.06414794921875, -0.0384521484375, 0.034759521484375, 0.063232421875, -0.054962158203125, -0.0633544921875, -0.006275177001953125, -0.0206451416015625, -0.0703125, 0.0302886962890625, 0.01477813720703125, -0.0014553070068359375, 0.0232391357421875, -0.062469482421875, -0.07354736328125, 0.09857177734375, -0.0010728836059570312, -0.03155517578125, -0.02349853515625, -0.00815582275390625, 0.043304443359375, -0.0218353271484375, 0.0147705078125, 0.036590576171875, 0.035186767578125, 0.0069427490234375, -0.0675048828125, 0.0186920166015625, -0.0298004150390625, -0.0032596588134765625, -0.0002579689025878906, -0.059600830078125, 0.060821533203125, -0.0152130126953125, -0.017059326171875, 0.0330810546875, 0.0379638671875, 0.0139312744140625, 0.005252838134765625, 0.03863525390625, 0.039581298828125, 0.057861328125, -0.00040268898010253906, 0.06939697265625, -0.0138702392578125, 0.04864501953125, 0.0906982421875, -0.0110626220703125, 0.0738525390625, 0.0266571044921875, -0.0285797119140625, 0.055389404296875, 0.05126953125, -0.028045654296875, 0.026763916015625, 0.014373779296875, 0.003505706787109375, -0.0311737060546875, 0.00749969482421875, -0.0511474609375, 0.04034423828125, 0.004100799560546875, -0.0181121826171875, -0.01535797119140625, -0.0246429443359375, -0.016357421875, 0.0016775131225585938, -0.0040740966796875, 0.067626953125, -0.006771087646484375, -0.039459228515625, 0.0689697265625, -0.010955810546875, 0.05194091796875, -0.048004150390625, -0.0010986328125, -0.017303466796875, 0.0099029541015625, -0.00927734375, -0.0699462890625, 0.011016845703125, -0.0052032470703125, -0.034698486328125, -0.0088958740234375, 0.03173828125, -0.0350341796875, -0.06414794921875, 0.0027484893798828125, 0.040985107421875, 0.0187225341796875, -0.0072174072265625, -0.0745849609375, -0.0190277099609375, -0.00019919872283935547, -0.018524169921875, 0.00997161865234375, 0.028594970703125, 0.019561767578125, 0.047088623046875, 0.055328369140625, -0.001956939697265625, -0.0057220458984375, -0.004726409912109375, 0.0660400390625, -0.05718994140625, -0.0286407470703125, -0.048553466796875, 0.05084228515625, -0.0251007080078125, -0.038360595703125, 0.049285888671875, 0.048614501953125, 0.060272216796875, -0.01241302490234375, 0.0535888671875, -0.01690673828125, 0.05145263671875, -0.034149169921875, 0.07275390625, -0.06427001953125, 0.004688262939453125, 0.0018930435180664062, -0.053314208984375, -0.00806427001953125, 0.055908203125, -0.006969451904296875, 0.0122222900390625, 0.05291748046875, 0.059295654296875, 0.0072479248046875, -0.022491455078125, 0.003749847412109375, 0.030609130859375, 0.01308441162109375, 0.0640869140625, 0.05572509765625, -0.055389404296875, 0.04876708984375, -0.0208740234375, -0.003692626953125, -0.02215576171875, -0.048126220703125, -0.06610107421875, -0.056488037109375, -0.02215576171875, -0.05029296875, 0.0009555816650390625, 0.055328369140625, 0.060760498046875, -0.069580078125, -0.01311492919921875, -0.0012140274047851562, 0.01739501953125, -0.0161285400390625, -0.0224761962890625, 0.02740478515625, -0.018035888671875, -0.043914794921875, 0.0242919921875, -0.00543975830078125, -0.0011014938354492188, -0.028839111328125, 0.00975799560546875, -0.05426025390625, -0.007450103759765625, 0.0323486328125, 0.0285797119140625, -0.047821044921875, -0.01039886474609375, 0.01454925537109375, -0.0194091796875, -0.0027618408203125, 0.02874755859375, -0.06768798828125, 0.01534271240234375, 0.04144287109375, 0.05633544921875, 0.044464111328125, 0.001094818115234375, 0.040679931640625, -0.044677734375, 0.007678985595703125, 0.0391845703125, 0.010162353515625, 0.0245361328125, -0.04107666015625, 0.054290771484375, -0.0017337799072265625, -0.03875732421875, -0.06304931640625, 0.00461578369140625, -0.0679931640625, -0.035919189453125, 0.09228515625, -0.00011938810348510742, -0.0198822021484375, 0.01043701171875, -0.009063720703125, 0.0117034912109375, -0.028167724609375, 0.054534912109375, 0.055908203125, 0.017425537109375, 0.004428863525390625, -0.045928955078125, 0.032867431640625, 0.035430908203125, -0.0648193359375, -0.001949310302734375, 0.03729248046875, 0.0261688232421875, 0.0175933837890625, 0.04119873046875, 0.01038360595703125, 0.0340576171875, -0.013397216796875, 0.005359649658203125, -0.0115814208984375, -0.00794219970703125, -0.032318115234375, -0.00502777099609375, -0.01538848876953125, -0.02911376953125 ] ]
lidiya/bart-large-xsum-samsum
2023-03-16T22:44:01.000Z
[ "transformers", "pytorch", "safetensors", "bart", "text2text-generation", "seq2seq", "summarization", "en", "dataset:samsum", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
summarization
lidiya
null
null
lidiya/bart-large-xsum-samsum
34
8,886
transformers
2022-03-02T23:29:05
--- language: en tags: - bart - seq2seq - summarization license: apache-2.0 datasets: - samsum widget: - text: | Hannah: Hey, do you have Betty's number? Amanda: Lemme check Amanda: Sorry, can't find it. Amanda: Ask Larry Amanda: He called her last time we were at the park together Hannah: I don't know him well Amanda: Don't be shy, he's very nice Hannah: If you say so.. Hannah: I'd rather you texted him Amanda: Just text him 🙂 Hannah: Urgh.. Alright Hannah: Bye Amanda: Bye bye model-index: - name: bart-large-xsum-samsum results: - task: name: Abstractive Text Summarization type: abstractive-text-summarization dataset: name: "SAMSum Corpus: A Human-annotated Dialogue Dataset for Abstractive Summarization" type: samsum metrics: - name: Validation ROUGE-1 type: rouge-1 value: 54.3921 - name: Validation ROUGE-2 type: rouge-2 value: 29.8078 - name: Validation ROUGE-L type: rouge-l value: 45.1543 - name: Test ROUGE-1 type: rouge-1 value: 53.3059 - name: Test ROUGE-2 type: rouge-2 value: 28.355 - name: Test ROUGE-L type: rouge-l value: 44.0953 --- ## `bart-large-xsum-samsum` This model was obtained by fine-tuning `facebook/bart-large-xsum` on [Samsum](https://huggingface.co/datasets/samsum) dataset. ## Usage ```python from transformers import pipeline summarizer = pipeline("summarization", model="lidiya/bart-large-xsum-samsum") conversation = '''Hannah: Hey, do you have Betty's number? Amanda: Lemme check Amanda: Sorry, can't find it. Amanda: Ask Larry Amanda: He called her last time we were at the park together Hannah: I don't know him well Amanda: Don't be shy, he's very nice Hannah: If you say so.. Hannah: I'd rather you texted him Amanda: Just text him 🙂 Hannah: Urgh.. Alright Hannah: Bye Amanda: Bye bye ''' summarizer(conversation) ``` ## Training procedure - Colab notebook: https://colab.research.google.com/drive/1dul0Sg-TTMy9xZCJzmDRajXbyzDwtYx6?usp=sharing ## Results | key | value | | --- | ----- | | eval_rouge1 | 54.3921 | | eval_rouge2 | 29.8078 | | eval_rougeL | 45.1543 | | eval_rougeLsum | 49.942 | | test_rouge1 | 53.3059 | | test_rouge2 | 28.355 | | test_rougeL | 44.0953 | | test_rougeLsum | 48.9246 |
2,427
[ [ -0.024322509765625, -0.055572509765625, 0.0288848876953125, 0.0175018310546875, -0.02252197265625, 0.002689361572265625, -0.01947021484375, -0.006084442138671875, 0.05499267578125, 0.03131103515625, -0.04986572265625, -0.040985107421875, -0.04840087890625, 0.0071258544921875, -0.042755126953125, 0.0869140625, 0.0135955810546875, 0.0021820068359375, 0.0038204193115234375, -0.00690460205078125, -0.0226898193359375, -0.036865234375, -0.0609130859375, -0.00759124755859375, 0.03680419921875, 0.0413818359375, 0.0584716796875, 0.03082275390625, 0.041473388671875, 0.0313720703125, -0.0268402099609375, 0.007106781005859375, -0.053680419921875, -0.0006871223449707031, -0.0004227161407470703, -0.043121337890625, -0.061126708984375, -0.00453948974609375, 0.05810546875, 0.06390380859375, 0.01288604736328125, 0.04132080078125, -0.006866455078125, 0.039154052734375, -0.0240325927734375, 0.0428466796875, -0.030426025390625, -0.0190277099609375, -0.0009760856628417969, -0.0039043426513671875, -0.0316162109375, -0.027191162109375, 0.00453948974609375, -0.02642822265625, 0.020660400390625, 0.00872039794921875, 0.1019287109375, 0.01500701904296875, -0.0312347412109375, -0.0112762451171875, -0.01837158203125, 0.0626220703125, -0.052215576171875, 0.0011892318725585938, 0.045623779296875, 0.01294708251953125, -0.005558013916015625, -0.06072998046875, -0.038177490234375, 0.0012426376342773438, -0.026153564453125, 0.0110015869140625, -0.019317626953125, -0.008087158203125, 0.03143310546875, 0.03314208984375, -0.0291900634765625, -0.007709503173828125, -0.0567626953125, -0.003536224365234375, 0.06396484375, 0.0305633544921875, -0.002231597900390625, -0.019622802734375, -0.0037631988525390625, -0.029876708984375, -0.0157623291015625, 0.0189666748046875, 0.018463134765625, 0.047576904296875, -0.039794921875, 0.057891845703125, -0.0325927734375, 0.040252685546875, 0.0157928466796875, -0.0080718994140625, 0.06982421875, -0.0195770263671875, -0.0217132568359375, -0.005855560302734375, 0.0704345703125, 0.036773681640625, 0.0318603515625, 0.0238494873046875, 0.005702972412109375, -0.0019474029541015625, 0.0028533935546875, -0.078857421875, -0.0290985107421875, 0.025115966796875, -0.043701171875, -0.02337646484375, 0.0211944580078125, -0.041839599609375, -0.004230499267578125, -0.0192108154296875, 0.0196685791015625, -0.021759033203125, -0.0216827392578125, -0.00018775463104248047, -0.02032470703125, -0.0006823539733886719, 0.006526947021484375, -0.0711669921875, 0.0235748291015625, 0.0288848876953125, 0.07080078125, 0.0272979736328125, -0.014556884765625, -0.038055419921875, -0.01305389404296875, -0.033905029296875, 0.03076171875, -0.0153961181640625, -0.0328369140625, -0.0190277099609375, 0.0219268798828125, -0.002780914306640625, -0.0292205810546875, 0.046234130859375, -0.02490234375, 0.0309600830078125, -0.0298919677734375, -0.032867431640625, -0.025787353515625, 0.01444244384765625, -0.033172607421875, 0.06353759765625, 0.03631591796875, -0.058929443359375, 0.032501220703125, -0.048858642578125, -0.0257568359375, 0.006687164306640625, -0.0013761520385742188, -0.057403564453125, -0.0013666152954101562, 0.0245513916015625, 0.059295654296875, -0.004608154296875, 0.0208740234375, -0.0396728515625, -0.0271759033203125, 0.022003173828125, -0.0088348388671875, 0.08929443359375, 0.01108551025390625, -0.0006475448608398438, 0.0214385986328125, -0.06365966796875, 0.0116424560546875, 0.0193939208984375, -0.015655517578125, -0.022491455078125, -0.03009033203125, -0.002471923828125, 0.006595611572265625, 0.0215911865234375, -0.03289794921875, 0.0240325927734375, -0.021697998046875, 0.0251922607421875, 0.045501708984375, -0.0029621124267578125, 0.026123046875, -0.04010009765625, 0.0267181396484375, -0.0100250244140625, -0.003936767578125, -0.02490234375, -0.04144287109375, -0.058380126953125, -0.050537109375, 0.024169921875, 0.031280517578125, -0.031005859375, 0.0550537109375, -0.04449462890625, -0.04058837890625, -0.038360595703125, 0.0009756088256835938, 0.0155029296875, 0.0411376953125, 0.024200439453125, -0.016387939453125, -0.0498046875, -0.06524658203125, 0.00940704345703125, -0.02496337890625, -0.016357421875, 0.0254364013671875, 0.04559326171875, -0.027679443359375, 0.07379150390625, -0.0672607421875, -0.0180816650390625, -0.0282440185546875, 0.0037708282470703125, 0.050445556640625, 0.0261993408203125, 0.046966552734375, -0.050262451171875, -0.0626220703125, -0.010284423828125, -0.05712890625, -0.01171875, -0.0163726806640625, -0.0142669677734375, 0.01367950439453125, 0.0234375, -0.062286376953125, 0.04425048828125, 0.038726806640625, -0.043975830078125, 0.035675048828125, -0.01039886474609375, 0.007488250732421875, -0.088134765625, 0.011688232421875, -0.00647735595703125, -0.034423828125, -0.0302886962890625, -0.006988525390625, -0.003631591796875, 0.005275726318359375, -0.0130767822265625, 0.044921875, -0.034576416015625, -0.0008454322814941406, -0.0036373138427734375, 0.0023899078369140625, -0.0078277587890625, 0.043243408203125, -0.004306793212890625, 0.042236328125, 0.054931640625, -0.035247802734375, 0.033782958984375, 0.06756591796875, -0.0156402587890625, 0.034027099609375, -0.057159423828125, -0.014892578125, -0.0059051513671875, 0.0369873046875, -0.09423828125, -0.035919189453125, 0.0215301513671875, -0.05462646484375, 0.00962066650390625, -0.006145477294921875, -0.0288543701171875, -0.047943115234375, -0.0207366943359375, 0.0180206298828125, 0.042572021484375, -0.03057861328125, 0.0302886962890625, 0.0252532958984375, -0.0364990234375, -0.0265655517578125, -0.046875, 0.0088043212890625, -0.0276336669921875, -0.060302734375, 0.015411376953125, -0.005504608154296875, -0.01169586181640625, -0.014984130859375, 0.0076904296875, -0.01715087890625, -0.01203155517578125, 0.030914306640625, 0.03131103515625, 0.0007576942443847656, -0.007537841796875, 0.00909423828125, -0.0199127197265625, 0.021820068359375, 0.03265380859375, 0.06964111328125, -0.0246429443359375, 0.00882720947265625, -0.047576904296875, 0.009246826171875, 0.039886474609375, 0.0118255615234375, 0.0657958984375, 0.044464111328125, -0.0026149749755859375, 0.0182342529296875, -0.01354217529296875, -0.023529052734375, -0.037078857421875, 0.035980224609375, -0.034912109375, -0.049285888671875, 0.054779052734375, 0.00997161865234375, 0.0033779144287109375, 0.0309600830078125, 0.05657958984375, -0.006744384765625, 0.07989501953125, 0.0288543701171875, -0.0125274658203125, 0.0389404296875, -0.05230712890625, 0.0228271484375, -0.047149658203125, -0.010223388671875, -0.02606201171875, -0.02642822265625, -0.049285888671875, -0.000720977783203125, 0.025299072265625, -0.012054443359375, -0.049957275390625, 0.054473876953125, -0.0175628662109375, 0.0189056396484375, 0.06072998046875, 0.0287017822265625, 0.0073394775390625, -0.0122833251953125, 0.0204315185546875, -0.003665924072265625, -0.03729248046875, -0.0118255615234375, 0.08502197265625, 0.0290374755859375, 0.045562744140625, -0.0027027130126953125, 0.058929443359375, 0.006443023681640625, 0.01468658447265625, -0.052398681640625, 0.050537109375, 0.007354736328125, -0.066162109375, -0.03668212890625, -0.057281494140625, -0.091064453125, 0.0145111083984375, -0.01541900634765625, -0.0711669921875, 0.002780914306640625, 0.0025653839111328125, -0.0301513671875, 0.0019855499267578125, -0.054931640625, 0.0594482421875, -0.0016889572143554688, -0.007320404052734375, -0.01201629638671875, -0.0723876953125, 0.030242919921875, -0.004848480224609375, 0.005054473876953125, -0.006198883056640625, 0.0167694091796875, 0.0506591796875, -0.04400634765625, 0.07159423828125, 0.0071258544921875, 0.005313873291015625, 0.023162841796875, -0.01218414306640625, 0.0295257568359375, -0.0007662773132324219, 0.007720947265625, 0.01126861572265625, 0.0028400421142578125, -0.04742431640625, -0.024627685546875, 0.031646728515625, -0.056854248046875, -0.018096923828125, -0.03466796875, -0.058563232421875, -0.002208709716796875, 0.033721923828125, 0.03851318359375, 0.0222930908203125, 0.01142120361328125, 0.03289794921875, 0.020355224609375, -0.017242431640625, 0.0362548828125, 0.014373779296875, -0.039337158203125, -0.036590576171875, 0.0570068359375, 0.00433349609375, 0.028167724609375, 0.016021728515625, 0.01275634765625, -0.027587890625, 0.00916290283203125, -0.006488800048828125, 0.0455322265625, -0.027252197265625, -0.024658203125, -0.04705810546875, -0.0256195068359375, -0.046173095703125, -0.00921630859375, -0.0286407470703125, -0.04486083984375, -0.0290985107421875, -0.0208282470703125, 0.0262451171875, 0.028564453125, -0.00594329833984375, 0.0595703125, -0.053131103515625, 0.0322265625, 0.006763458251953125, 0.01141357421875, -0.00608062744140625, -0.062744140625, -0.0242462158203125, 0.021148681640625, -0.038848876953125, -0.04754638671875, 0.0584716796875, 0.00742340087890625, 0.01611328125, 0.04400634765625, 0.0149383544921875, 0.057403564453125, -0.0111236572265625, 0.049774169921875, 0.0270538330078125, -0.0631103515625, 0.0222015380859375, -0.040313720703125, 0.01224517822265625, 0.0276947021484375, 0.0269012451171875, -0.033660888671875, -0.0295257568359375, -0.090576171875, -0.07611083984375, 0.05865478515625, 0.035125732421875, 0.0234375, 0.00726318359375, -0.0032215118408203125, -0.0174560546875, 0.0377197265625, -0.068603515625, -0.043182373046875, -0.00396728515625, -0.017242431640625, -0.0095367431640625, -0.01727294921875, -0.0179595947265625, -0.03472900390625, 0.06219482421875, -0.0035533905029296875, 0.0291748046875, -0.00997161865234375, 0.0184326171875, 0.0001767873764038086, -0.005077362060546875, 0.03448486328125, 0.042083740234375, -0.046600341796875, -0.01922607421875, 0.0153656005859375, -0.02978515625, -0.0132904052734375, 0.00592803955078125, -0.00591278076171875, 0.0070953369140625, 0.03973388671875, 0.06878662109375, -0.005870819091796875, -0.044586181640625, 0.050079345703125, -0.007659912109375, -0.02691650390625, -0.04010009765625, 0.00901031494140625, 0.0002582073211669922, 0.0133514404296875, 0.023529052734375, 0.0057373046875, 0.0045013427734375, -0.042236328125, 0.0118560791015625, 0.03216552734375, -0.01081085205078125, -0.01849365234375, 0.0421142578125, 0.0013561248779296875, -0.021453857421875, 0.0626220703125, -0.01367950439453125, -0.040191650390625, 0.043548583984375, 0.03326416015625, 0.046051025390625, -0.0026226043701171875, 0.019866943359375, 0.05023193359375, -0.0019207000732421875, -0.004436492919921875, 0.031768798828125, 0.01224517822265625, -0.050384521484375, -0.02880859375, -0.0218048095703125, -0.024627685546875, 0.01611328125, -0.07098388671875, 0.050628662109375, -0.02276611328125, -0.033111572265625, 0.00649261474609375, 0.006488800048828125, -0.033843994140625, 0.02362060546875, -0.007122039794921875, 0.054046630859375, -0.07861328125, 0.043243408203125, 0.03509521484375, -0.044097900390625, -0.07611083984375, -0.022186279296875, -0.01148223876953125, -0.06707763671875, 0.040252685546875, 0.0196990966796875, 0.03448486328125, -0.00986480712890625, -0.0662841796875, -0.072265625, 0.07489013671875, 0.024200439453125, -0.036376953125, 0.001861572265625, 0.003734588623046875, 0.046905517578125, -0.017333984375, 0.049713134765625, 0.047943115234375, 0.0239715576171875, 0.0213775634765625, -0.07177734375, -0.0017442703247070312, -0.0261993408203125, -0.0189971923828125, 0.02099609375, -0.080078125, 0.08428955078125, -0.0294952392578125, 0.0013704299926757812, 0.0205078125, 0.05157470703125, 0.015716552734375, 0.025482177734375, 0.03912353515625, 0.04443359375, 0.063232421875, -0.006740570068359375, 0.065673828125, -0.01282501220703125, 0.060302734375, 0.08001708984375, 0.01076507568359375, 0.044342041015625, 0.01708984375, -0.0305023193359375, 0.04632568359375, 0.04541015625, -0.01261138916015625, 0.034088134765625, 0.01099395751953125, 0.0070037841796875, -0.0157928466796875, 0.02862548828125, -0.0307159423828125, 0.03631591796875, 0.0255126953125, -0.0418701171875, -0.01038360595703125, -0.020050048828125, 0.0350341796875, -0.010009765625, -0.0132904052734375, 0.04541015625, 0.0074920654296875, -0.03704833984375, 0.0645751953125, -0.0115814208984375, 0.028289794921875, -0.039154052734375, 0.004169464111328125, -0.0217132568359375, 0.032562255859375, -0.01995849609375, -0.066162109375, 0.0198516845703125, -0.0066375732421875, -0.016082763671875, -0.007579803466796875, 0.0132293701171875, -0.033538818359375, -0.057891845703125, 0.00807952880859375, 0.0231781005859375, 0.03790283203125, -0.00714111328125, -0.049957275390625, 0.0009264945983886719, 0.02349853515625, -0.0174713134765625, 0.007511138916015625, 0.019012451171875, -0.00316619873046875, 0.07586669921875, 0.04632568359375, 0.0022296905517578125, -0.00733184814453125, -0.0013179779052734375, 0.0693359375, -0.04705810546875, -0.056304931640625, -0.06707763671875, 0.05755615234375, -0.037200927734375, -0.047821044921875, 0.059173583984375, 0.051177978515625, 0.05126953125, -0.00847625732421875, 0.0511474609375, -0.028594970703125, 0.063720703125, -0.009613037109375, 0.03741455078125, -0.03466796875, 0.00725555419921875, -0.04730224609375, -0.061798095703125, -0.0239105224609375, 0.07000732421875, -0.02142333984375, 0.004741668701171875, 0.047515869140625, 0.05816650390625, -0.003910064697265625, 0.0086212158203125, 0.004192352294921875, 0.031707763671875, 0.02984619140625, 0.0266571044921875, 0.0301055908203125, -0.055023193359375, 0.042999267578125, -0.0197296142578125, -0.03594970703125, -0.02215576171875, -0.047515869140625, -0.09320068359375, -0.047119140625, -0.01491546630859375, -0.055694580078125, -0.0325927734375, 0.07647705078125, 0.060211181640625, -0.05865478515625, -0.027191162109375, 0.0058441162109375, -0.0107574462890625, -0.019287109375, -0.0185699462890625, 0.041839599609375, -0.018798828125, -0.069580078125, -0.007770538330078125, -0.0234832763671875, 0.01538848876953125, 0.0161895751953125, -0.00547027587890625, 0.0019102096557617188, -0.004058837890625, 0.033782958984375, 0.01111602783203125, -0.0225067138671875, -0.01198577880859375, 0.004840850830078125, -0.005558013916015625, 0.01477813720703125, 0.0185699462890625, -0.0389404296875, 0.01114654541015625, 0.032867431640625, 0.0298919677734375, 0.0655517578125, 0.034027099609375, 0.0211944580078125, -0.060089111328125, 0.016357421875, 0.0015745162963867188, 0.03369140625, 0.030914306640625, -0.0256195068359375, 0.04840087890625, 0.0163421630859375, -0.056182861328125, -0.0689697265625, -0.0051422119140625, -0.10089111328125, -0.0043792724609375, 0.09613037109375, -0.00447845458984375, -0.00968170166015625, 0.01288604736328125, -0.05010986328125, 0.01375579833984375, -0.05352783203125, 0.0692138671875, 0.0496826171875, -0.01641845703125, 0.0011539459228515625, -0.00746917724609375, 0.00775146484375, 0.0137481689453125, -0.044769287109375, -0.00014030933380126953, 0.0217742919921875, 0.0222930908203125, 0.032623291015625, 0.055938720703125, 0.0185394287109375, -0.0013751983642578125, 0.01021575927734375, -0.01198577880859375, 0.00516510009765625, -0.0156097412109375, 0.01082611083984375, -0.0087432861328125, -0.0270233154296875, -0.04278564453125 ] ]
Yukang/Llama-2-13b-longlora-32k-ft
2023-09-25T12:36:06.000Z
[ "transformers", "pytorch", "llama", "text-generation", "arxiv:2309.12307", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Yukang
null
null
Yukang/Llama-2-13b-longlora-32k-ft
4
8,833
transformers
2023-09-12T10:59:44
# LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models <font size=6><div align='center' > <a href=http://arxiv.org/abs/2309.12307>**Paper**</a> | <a href="https://huggingface.co/Yukang">**Models**</a> | <a href="https://github.com/dvlab-research/LongLoRA">**Code**</a> </div></font> **LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models [[Paper](http://arxiv.org/abs/2309.12307)]** <br /> [Yukang Chen](https://scholar.google.com/citations?user=6p0ygKUAAAAJ&hl=en), [Shengju Qian](https://scholar.google.com/citations?user=QNnWmasAAAAJ), [Haotian Tang](https://scholar.google.com/citations?user=WxL13BAAAAAJ&hl), [Xin Lai](https://scholar.google.com/citations?user=tqNDPA4AAAAJ&hl=zh-CN), [Zhijian Liu](https://scholar.google.com/citations?user=3coYSTUAAAAJ&hl=en), [Song Han](https://scholar.google.com/citations?user=E0iCaa4AAAAJ&hl=zh-CN), [Jiaya Jia](https://scholar.google.com/citations?user=XPAkzTEAAAAJ&hl=en)<br /> ## Abstract We present LongLoRA, an efficient fine-tuning approach that extends the context sizes of pre-trained large language models (LLMs), with limited computation cost. Typically, training LLMs with long context sizes is computationally expensive, requiring extensive training hours and GPU resources. In this paper, we speed up the context extension of LLMs in two aspects. On the one hand, although dense global attention is needed during inference, fine-tuning the model can be effectively and efficiently done by sparse local attention. The proposed shift short attention effectively enables context extension, leading to non-trivial computation saving with similar performance to fine-tuning with vanilla attention. On the other hand, we find that LoRA for context extension works well under the premise of trainable embedding and normalization. LongLoRA demonstrates strong empirical results on various tasks on LLaMA2 models from 7B/13B to 70B. LongLoRA adopts LLaMA2 7B from 4k context to 100k, or LLaMA2 70B to 32k on a single 8x A100 machine. LongLoRA extends models' context while retaining their original architectures, and is compatible with most existing techniques, like FlashAttention-2. In addition, to make LongLoRA practical, we collect a dataset, LongQA, for supervised fine-tuning. It contains more than 3k long context question-answer pairs. For more details, please refer to the [paper](http://arxiv.org/abs/2309.12307). ## Highlights **LongLoRA** speed up the context extension of pre-trained large language models in both attention-level and weight-level. 1. The proposed shifted short attention is easy to implement, compatible with Flash-Attention, and not required during inference. 2. We release all our models, including models from 7B to 70B, context length from 8k to 100k, including [LLaMA2-LongLoRA-7B-100k](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft), [LLaMA2-LongLoRA-13B-64k](https://huggingface.co/Yukang/Llama-2-13b-longlora-64k), and [LLaMA2-LongLoRA-70B-32k](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k). 3. We build up a long-context QA dataset, LongQA, for supervised fine-tuning (SFT). We release 13B and 70B 32k models with SFT, [Llama-2-13b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) and [Llama-2-70b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k-sft). We will further release the dataset next week. ## Released models ### Models with supervised fine-tuning | Model | Size | Context | Train | Link | |:----------------------------------|------|---------|---------|-------------------------------------------------------------------------| | Llama-2-13b-chat-longlora-32k-sft | 13B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) | | Llama-2-70b-chat-longlora-32k-sft | 70B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k-sft) | ### Models with context extension via fully fine-tuning | Model | Size | Context | Train | Link | |:----------------------------|------|---------|-------|-------------------------------------------------------------------| | Llama-2-7b-longlora-8k-ft | 7B | 8192 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-8k-ft) | | Llama-2-7b-longlora-16k-ft | 7B | 16384 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-16k-ft) | | Llama-2-7b-longlora-32k-ft | 7B | 32768 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-32k-ft) | | Llama-2-7b-longlora-100k-ft | 7B | 100000 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft) | | Llama-2-13b-longlora-8k-ft | 13B | 8192 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-8k-ft) | | Llama-2-13b-longlora-16k-ft | 13B | 16384 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-16k-ft) | | Llama-2-13b-longlora-32k-ft | 13B | 32768 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-32k-ft) | ### Models with context extension via improved LoRA fine-tuning | Model | Size | Context | Train | Link | |:----------------------------|------|---------|-------|-------------------------------------------------------------------| | Llama-2-7b-longlora-8k | 7B | 8192 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-8k) | | Llama-2-7b-longlora-16k | 7B | 16384 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-16k) | | Llama-2-7b-longlora-32k | 7B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-32k) | | Llama-2-13b-longlora-8k | 13B | 8192 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-8k) | | Llama-2-13b-longlora-16k | 13B | 16384 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-16k) | | Llama-2-13b-longlora-32k | 13B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-32k) | | Llama-2-13b-longlora-64k | 13B | 65536 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-64k) | | Llama-2-70b-longlora-32k | 70B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k) | | Llama-2-70b-chat-longlora-32k | 70B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k) | ## Citation If you find this project useful in your research, please consider citing: ``` @article{longlora, title={LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models}, author={Yukang Chen and Shengju Qian and Haotian Tang and Xin Lai and Zhijian Liu and Song Han and Jiaya Jia}, journal={arXiv:2309.12307}, year={2023} } ``` ## Acknowledgement - This work is built upon the [LLaMA2](https://ai.meta.com/llama) as the pre-trained models. - This work is based on [DeepSpeed](https://github.com/microsoft/DeepSpeed), [peft](https://github.com/huggingface/peft), and [Flash-Attention2](https://github.com/Dao-AILab/flash-attention) for acceleration. - The perplexity evaluation code is modified upon [Landmark Attention](https://github.com/epfml/landmark-attention). - We use [LongChat](https://github.com/DachengLi1/LongChat) for the retrieval evaluation.
7,594
[ [ -0.05255126953125, -0.058258056640625, 0.0193939208984375, 0.034088134765625, -0.0299224853515625, -0.023712158203125, -0.036590576171875, -0.0631103515625, 0.037994384765625, 0.025970458984375, -0.048431396484375, -0.042266845703125, -0.035125732421875, 0.01515960693359375, -0.00846099853515625, 0.07867431640625, -0.001800537109375, -0.037750244140625, 0.0222320556640625, -0.0279541015625, -0.03338623046875, -0.02191162109375, -0.045501708984375, -0.01381683349609375, 0.059234619140625, 0.021820068359375, 0.0499267578125, 0.0455322265625, 0.02984619140625, 0.020843505859375, -0.03558349609375, 0.0264129638671875, -0.0408935546875, -0.0163421630859375, -0.0009679794311523438, -0.01554107666015625, -0.08056640625, -0.01079559326171875, 0.05169677734375, 0.037139892578125, 0.00443267822265625, 0.031463623046875, 0.017303466796875, 0.058746337890625, -0.02825927734375, 0.006298065185546875, -0.02447509765625, -0.011322021484375, -0.0277862548828125, -0.006591796875, -0.0019102096557617188, -0.0232696533203125, -0.001811981201171875, -0.049285888671875, -0.01727294921875, -0.00920867919921875, 0.0889892578125, 0.03411865234375, -0.038055419921875, -0.00717926025390625, -0.01403045654296875, 0.0633544921875, -0.07366943359375, 0.02130126953125, 0.0251312255859375, 0.011810302734375, -0.0246734619140625, -0.034698486328125, -0.040283203125, -0.0007739067077636719, -0.0209197998046875, 0.0039825439453125, -0.0173797607421875, -0.00862884521484375, 0.0343017578125, 0.027923583984375, -0.024627685546875, 0.0220184326171875, -0.0213623046875, 0.005313873291015625, 0.058868408203125, 0.0005235671997070312, 0.0203857421875, -0.008636474609375, -0.030792236328125, -0.0016107559204101562, -0.063232421875, 0.02679443359375, 0.0146636962890625, 0.01995849609375, -0.0419921875, 0.03729248046875, -0.026885986328125, 0.0604248046875, 0.0189361572265625, -0.0333251953125, 0.03912353515625, -0.03350830078125, -0.024200439453125, -0.0170745849609375, 0.050567626953125, 0.031158447265625, 0.00366973876953125, 0.0167694091796875, -0.0083770751953125, 0.00101470947265625, -0.0221710205078125, -0.07177734375, 0.015106201171875, 0.01739501953125, -0.02978515625, -0.0180511474609375, -0.00757598876953125, -0.06256103515625, -0.0038299560546875, -0.0230865478515625, 0.0098876953125, -0.0289306640625, -0.0229339599609375, 0.0233001708984375, 0.01934814453125, 0.026123046875, 0.03582763671875, -0.041656494140625, 0.01177978515625, 0.040008544921875, 0.045745849609375, -0.014617919921875, -0.0247650146484375, -0.032440185546875, 0.00592803955078125, -0.020416259765625, 0.039337158203125, -0.0135498046875, -0.009613037109375, -0.0151824951171875, 0.0235595703125, -0.006214141845703125, -0.01513671875, 0.041534423828125, -0.023712158203125, 0.0015153884887695312, -0.03570556640625, -0.03631591796875, -0.00965118408203125, 0.01678466796875, -0.055908203125, 0.081787109375, 0.0188140869140625, -0.0655517578125, 0.0230865478515625, -0.065185546875, -0.01446533203125, -0.026092529296875, 0.0156097412109375, -0.040130615234375, -0.0189361572265625, 0.033905029296875, 0.0421142578125, -0.0243682861328125, 0.0002162456512451172, -0.025848388671875, -0.032684326171875, 0.01403045654296875, 0.0008373260498046875, 0.060699462890625, 0.022674560546875, -0.04559326171875, 0.0266876220703125, -0.061492919921875, 0.0087890625, 0.0225830078125, -0.035675048828125, -0.007659912109375, -0.0164337158203125, 0.005550384521484375, 0.0263671875, 0.0333251953125, -0.017822265625, 0.033966064453125, -0.03436279296875, 0.040771484375, 0.049346923828125, -0.00984954833984375, 0.019866943359375, -0.0262451171875, 0.0343017578125, 0.01019287109375, 0.0174713134765625, -0.005031585693359375, -0.0340576171875, -0.0780029296875, -0.034515380859375, 0.016265869140625, 0.0200347900390625, -0.042327880859375, 0.06494140625, -0.0384521484375, -0.03656005859375, -0.040618896484375, 0.0360107421875, 0.040985107421875, 0.028961181640625, 0.0290985107421875, -0.0193939208984375, -0.033294677734375, -0.06591796875, -0.0006155967712402344, 0.01195526123046875, 0.01369476318359375, 0.032012939453125, 0.048095703125, -0.033843994140625, 0.05975341796875, -0.03741455078125, -0.026641845703125, -0.01666259765625, -0.0252838134765625, 0.0440673828125, 0.03985595703125, 0.07366943359375, -0.05010986328125, -0.04766845703125, 0.0119781494140625, -0.044830322265625, -0.0038547515869140625, 0.01081085205078125, -0.0238037109375, 0.043243408203125, 0.0280609130859375, -0.06085205078125, 0.042236328125, 0.047698974609375, -0.0499267578125, 0.032196044921875, -0.005092620849609375, 0.0015745162963867188, -0.09521484375, 0.026885986328125, -0.0007567405700683594, -0.02325439453125, -0.03936767578125, 0.025848388671875, 0.00787353515625, 0.0196990966796875, -0.04486083984375, 0.0675048828125, -0.04302978515625, -0.0034313201904296875, -0.0198516845703125, 0.011993408203125, -0.0008502006530761719, 0.061676025390625, -0.005786895751953125, 0.06243896484375, 0.036468505859375, -0.037933349609375, 0.023529052734375, 0.017974853515625, -0.03338623046875, 0.0316162109375, -0.050048828125, 0.021697998046875, 0.00922393798828125, 0.05609130859375, -0.052093505859375, -0.03216552734375, 0.0176239013671875, -0.01372528076171875, 0.016448974609375, -0.00567626953125, -0.038116455078125, -0.042877197265625, -0.04632568359375, 0.040802001953125, 0.0330810546875, -0.055084228515625, 0.004138946533203125, 0.01531982421875, 0.011077880859375, -0.048675537109375, -0.033447265625, -0.0025234222412109375, -0.048431396484375, -0.057708740234375, 0.0286712646484375, -0.01666259765625, -0.002834320068359375, -0.0184783935546875, 0.01351165771484375, 0.007720947265625, 0.00914764404296875, 0.020782470703125, 0.010162353515625, -0.026275634765625, 0.00249481201171875, -0.01013946533203125, -0.000865936279296875, -0.02801513671875, 0.0016031265258789062, 0.055572509765625, -0.033721923828125, -0.012664794921875, -0.0562744140625, 0.0126190185546875, 0.03948974609375, -0.0196990966796875, 0.05181884765625, 0.0670166015625, -0.020111083984375, -0.0007486343383789062, -0.050384521484375, -0.00439453125, -0.03570556640625, 0.01207733154296875, -0.03546142578125, -0.08209228515625, 0.059783935546875, 0.009613037109375, 0.005641937255859375, 0.048675537109375, 0.03582763671875, 0.0189361572265625, 0.0711669921875, 0.049407958984375, -0.040283203125, 0.0482177734375, -0.03985595703125, -0.0008244514465332031, -0.07391357421875, -0.0005025863647460938, -0.014678955078125, -0.033966064453125, -0.0511474609375, -0.044036865234375, 0.0258331298828125, 0.030426025390625, -0.03094482421875, 0.045745849609375, -0.038604736328125, 0.0255889892578125, 0.028961181640625, 0.0211029052734375, 0.0122222900390625, -0.00937652587890625, 0.0193023681640625, 0.0018310546875, -0.0299530029296875, -0.0220794677734375, 0.06964111328125, 0.0372314453125, 0.03857421875, 0.0257110595703125, 0.0482177734375, -0.00299072265625, 0.01506805419921875, -0.049468994140625, 0.0465087890625, 0.00896453857421875, -0.03912353515625, -0.03790283203125, -0.0222320556640625, -0.08319091796875, 0.0184478759765625, -0.00736236572265625, -0.06805419921875, 0.01126861572265625, 0.005889892578125, -0.036224365234375, 0.01959228515625, -0.03912353515625, 0.057830810546875, -0.01039886474609375, -0.03594970703125, -0.02105712890625, -0.050262451171875, 0.040496826171875, -0.003948211669921875, 0.00756072998046875, -0.019561767578125, -0.003467559814453125, 0.059906005859375, -0.04888916015625, 0.06591796875, -0.005092620849609375, -0.041595458984375, 0.035552978515625, -0.01554107666015625, 0.05255126953125, 0.00817108154296875, -0.0077362060546875, 0.0008788108825683594, 0.00018978118896484375, -0.039398193359375, -0.032958984375, 0.0667724609375, -0.05584716796875, -0.043548583984375, -0.0209808349609375, -0.033447265625, -0.01103973388671875, 0.0213623046875, 0.01458740234375, 0.00669097900390625, 0.006626129150390625, 0.021759033203125, 0.03564453125, -0.019195556640625, 0.038787841796875, 0.020416259765625, -0.025054931640625, -0.0242919921875, 0.05535888671875, -0.00455474853515625, 0.0111541748046875, 0.00966644287109375, 0.0055084228515625, -0.007511138916015625, -0.027801513671875, -0.028839111328125, 0.040435791015625, -0.03814697265625, -0.0312347412109375, -0.0246734619140625, -0.020782470703125, -0.037506103515625, -0.0089263916015625, -0.02398681640625, -0.0305023193359375, -0.04461669921875, -0.00846099853515625, 0.0545654296875, 0.039031982421875, 0.005146026611328125, 0.0253753662109375, -0.040435791015625, 0.023162841796875, 0.027496337890625, 0.03173828125, -0.0006833076477050781, -0.0430908203125, -0.016357421875, 0.0170440673828125, -0.01556396484375, -0.054290771484375, 0.043243408203125, 0.0186767578125, 0.012420654296875, 0.035125732421875, -0.0197296142578125, 0.08807373046875, -0.025054931640625, 0.05523681640625, 0.0195465087890625, -0.06536865234375, 0.04669189453125, -0.0511474609375, 0.02099609375, 0.0278778076171875, 0.004512786865234375, -0.031005859375, 0.0010328292846679688, -0.037933349609375, -0.06298828125, 0.05169677734375, 0.018524169921875, 0.0018720626831054688, 0.008331298828125, 0.039459228515625, -0.007144927978515625, 0.004596710205078125, -0.06378173828125, -0.0247955322265625, -0.0031070709228515625, -0.0030841827392578125, -0.02313232421875, -0.0242156982421875, -0.019256591796875, -0.04925537109375, 0.04278564453125, -0.0298309326171875, 0.01065826416015625, 0.01229095458984375, -0.010528564453125, -0.013702392578125, 0.0096282958984375, 0.06732177734375, 0.051971435546875, -0.0145263671875, -0.0207061767578125, 0.03851318359375, -0.01525115966796875, -0.005641937255859375, 0.002269744873046875, -0.001659393310546875, -0.0156402587890625, 0.03375244140625, 0.07440185546875, 0.03936767578125, -0.04693603515625, 0.0300445556640625, 0.00733184814453125, -0.0004088878631591797, -0.022430419921875, 0.012359619140625, 0.0164947509765625, 0.026092529296875, 0.0110321044921875, -0.0211029052734375, -0.0026378631591796875, -0.049163818359375, 0.004642486572265625, 0.037384033203125, -0.0189971923828125, -0.03729248046875, 0.039093017578125, 0.006969451904296875, 0.0029659271240234375, 0.011566162109375, -0.0056304931640625, -0.038726806640625, 0.0548095703125, 0.040069580078125, 0.034881591796875, -0.02264404296875, -0.007366180419921875, 0.045654296875, -0.00698089599609375, -0.0089263916015625, 0.0211639404296875, 0.0007991790771484375, -0.029296875, -0.0195159912109375, -0.06561279296875, 0.00954437255859375, 0.03155517578125, -0.036224365234375, 0.0264129638671875, -0.0294952392578125, -0.03070068359375, -0.004192352294921875, 0.038909912109375, -0.052337646484375, 0.013153076171875, 0.005657196044921875, 0.0765380859375, -0.03521728515625, 0.08709716796875, 0.036163330078125, -0.0229644775390625, -0.06414794921875, -0.0160064697265625, -0.0037078857421875, -0.06610107421875, 0.045745849609375, 0.016754150390625, 0.0000438690185546875, -0.01039886474609375, -0.051422119140625, -0.09075927734375, 0.10589599609375, 0.0254364013671875, -0.0419921875, -0.01056671142578125, 0.00081634521484375, 0.05694580078125, -0.024383544921875, 0.01222991943359375, 0.053802490234375, 0.045654296875, 0.0035877227783203125, -0.0977783203125, 0.0262451171875, -0.03662109375, 0.0027751922607421875, 0.00823974609375, -0.10113525390625, 0.07952880859375, -0.0151214599609375, -0.00917816162109375, 0.0288543701171875, 0.061767578125, 0.03948974609375, 0.006404876708984375, 0.0391845703125, 0.056610107421875, 0.036163330078125, 0.0002524852752685547, 0.07232666015625, -0.0214691162109375, 0.0308837890625, 0.0601806640625, 0.0027008056640625, 0.06561279296875, 0.033660888671875, -0.017181396484375, 0.03289794921875, 0.060638427734375, 0.00795745849609375, 0.0188751220703125, 0.01561737060546875, -0.0023708343505859375, -0.01371002197265625, -0.0064697265625, -0.05206298828125, 0.024993896484375, 0.0279998779296875, -0.0173797607421875, -0.0018568038940429688, -0.01473236083984375, 0.0295867919921875, -0.0166015625, -0.0225677490234375, 0.0489501953125, 0.02142333984375, -0.054595947265625, 0.07757568359375, -0.0012416839599609375, 0.08050537109375, -0.0350341796875, 0.0091552734375, -0.0268402099609375, 0.0212554931640625, -0.023406982421875, -0.0474853515625, -0.00044989585876464844, 0.007297515869140625, 0.00958251953125, -0.0034999847412109375, 0.0445556640625, -0.0254669189453125, -0.041015625, 0.041046142578125, 0.0196685791015625, 0.0081329345703125, -0.0033397674560546875, -0.06195068359375, 0.0191650390625, 0.00481414794921875, -0.056396484375, 0.038238525390625, 0.02655029296875, -0.0204010009765625, 0.052459716796875, 0.051116943359375, 0.007343292236328125, 0.0110931396484375, 0.0003497600555419922, 0.0831298828125, -0.058258056640625, -0.03155517578125, -0.060272216796875, 0.033172607421875, -0.01238250732421875, -0.034271240234375, 0.062255859375, 0.0291748046875, 0.040679931640625, 0.00860595703125, 0.023712158203125, 0.0026073455810546875, 0.04296875, -0.039886474609375, 0.059326171875, -0.0675048828125, 0.0054473876953125, -0.032379150390625, -0.06689453125, -0.0220184326171875, 0.037200927734375, -0.01409912109375, 0.01285552978515625, 0.0306396484375, 0.047576904296875, -0.01212310791015625, -0.026123046875, -0.0008401870727539062, 0.020904541015625, 0.0338134765625, 0.0775146484375, 0.03704833984375, -0.045562744140625, 0.016387939453125, -0.03253173828125, -0.00408935546875, -0.048736572265625, -0.06475830078125, -0.08160400390625, -0.0478515625, -0.0162200927734375, -0.0189056396484375, -0.00991058349609375, 0.0699462890625, 0.062408447265625, -0.05517578125, -0.0189971923828125, 0.013275146484375, 0.010711669921875, -0.00959014892578125, -0.0161895751953125, 0.054046630859375, -0.004180908203125, -0.068359375, 0.0258941650390625, -0.0021762847900390625, 0.0251922607421875, 0.0006213188171386719, -0.0289154052734375, -0.01305389404296875, -0.0005478858947753906, 0.060028076171875, 0.04541015625, -0.0633544921875, -0.02447509765625, -0.0062408447265625, -0.01496124267578125, 0.0079193115234375, 0.016693115234375, -0.0421142578125, -0.015350341796875, 0.03338623046875, 0.01381683349609375, 0.0443115234375, 0.01039886474609375, 0.008331298828125, -0.04241943359375, 0.046295166015625, -0.00033473968505859375, 0.031219482421875, 0.019989013671875, -0.0232696533203125, 0.06048583984375, -0.00569915771484375, -0.032379150390625, -0.0787353515625, 0.010101318359375, -0.1038818359375, -0.01432037353515625, 0.0927734375, -0.0116424560546875, -0.046112060546875, 0.0345458984375, -0.03155517578125, 0.01873779296875, -0.0321044921875, 0.05218505859375, 0.035491943359375, -0.00984954833984375, -0.005786895751953125, -0.032684326171875, 0.061248779296875, 0.037994384765625, -0.07940673828125, 0.001766204833984375, 0.03271484375, 0.0262298583984375, 0.028533935546875, 0.048309326171875, -0.00433349609375, 0.0170745849609375, -0.04608154296875, -0.006969451904296875, 0.0009255409240722656, -0.01055145263671875, -0.0205841064453125, -0.01502227783203125, -0.00795745849609375, 0.00550079345703125 ] ]
tbs17/MathBERT
2021-08-05T00:44:29.000Z
[ "transformers", "pytorch", "bert", "fill-mask", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
fill-mask
tbs17
null
null
tbs17/MathBERT
5
8,831
transformers
2022-03-02T23:29:05
#### MathBERT model (original vocab) *Disclaimer: the format of the documentation follows the official BERT model readme.md* Pretrained model on pre-k to graduate math language (English) using a masked language modeling (MLM) objective. This model is uncased: it does not make a difference between english and English. #### Model description MathBERT is a transformers model pretrained on a large corpus of English math corpus data in a self-supervised fashion. This means it was pretrained on the raw texts only, with no humans labelling them in any way (which is why it can use lots of publicly available data) with an automatic process to generate inputs and labels from those texts. More precisely, it was pretrained with two objectives: Masked language modeling (MLM): taking a sentence, the model randomly masks 15% of the words in the input then run the entire masked sentence through the model and has to predict the masked words. This is different from traditional recurrent neural networks (RNNs) that usually see the words one after the other, or from autoregressive models like GPT which internally mask the future tokens. It allows the model to learn a bidirectional representation of the sentence. Next sentence prediction (NSP): the models concatenates two masked sentences as inputs during pretraining. Sometimes they correspond to sentences that were next to each other in the original text, sometimes not. The model then has to predict if the two sentences were following each other or not. This way, the model learns an inner representation of the math language that can then be used to extract features useful for downstream tasks: if you have a dataset of labeled sentences for instance, you can train a standard classifier using the features produced by the MathBERT model as inputs. #### Intended uses & limitations You can use the raw model for either masked language modeling or next sentence prediction, but it's mostly intended to be fine-tuned on a math-related downstream task. Note that this model is primarily aimed at being fine-tuned on math-related tasks that use the whole sentence (potentially masked) to make decisions, such as sequence classification, token classification or question answering. For tasks such as math text generation you should look at model like GPT2. #### How to use Here is how to use this model to get the features of a given text in PyTorch: ```from transformers import BertTokenizer, BertModel tokenizer = BertTokenizer.from_pretrained('tbs17/MathBERT',output_hidden_states=True) model = BertModel.from_pretrained("tbs17/MathBERT") text = "Replace me by any text you'd like." encoded_input = tokenizer(text, return_tensors='pt') output = model(encoded_input) ``` and in TensorFlow: ``` from transformers import BertTokenizer, TFBertModel tokenizer = BertTokenizer.from_pretrained('tbs17/MathBERT',output_hidden_states=True) model = TFBertModel.from_pretrained("tbs17/MathBERT") text = "Replace me by any text you'd like." encoded_input = tokenizer(text, return_tensors='tf') output = model(encoded_input) ``` #### Comparing to the original BERT on fill-mask tasks The original BERT (i.e.,bert-base-uncased) has a known issue of biased predictions in gender although its training data used was fairly neutral. As our model was not trained on general corpora which will most likely contain mathematical equations, symbols, jargon, our model won't show bias. See below: ##### from original BERT ``` >>> from transformers import pipeline >>> unmasker = pipeline('fill-mask', model='bert-base-uncased') >>> unmasker("The man worked as a [MASK].") [{'sequence': '[CLS] the man worked as a carpenter. [SEP]', 'score': 0.09747550636529922, 'token': 10533, 'token_str': 'carpenter'}, {'sequence': '[CLS] the man worked as a waiter. [SEP]', 'score': 0.0523831807076931, 'token': 15610, 'token_str': 'waiter'}, {'sequence': '[CLS] the man worked as a barber. [SEP]', 'score': 0.04962705448269844, 'token': 13362, 'token_str': 'barber'}, {'sequence': '[CLS] the man worked as a mechanic. [SEP]', 'score': 0.03788609802722931, 'token': 15893, 'token_str': 'mechanic'}, {'sequence': '[CLS] the man worked as a salesman. [SEP]', 'score': 0.037680890411138535, 'token': 18968, 'token_str': 'salesman'}] >>> unmasker("The woman worked as a [MASK].") [{'sequence': '[CLS] the woman worked as a nurse. [SEP]', 'score': 0.21981462836265564, 'token': 6821, 'token_str': 'nurse'}, {'sequence': '[CLS] the woman worked as a waitress. [SEP]', 'score': 0.1597415804862976, 'token': 13877, 'token_str': 'waitress'}, {'sequence': '[CLS] the woman worked as a maid. [SEP]', 'score': 0.1154729500412941, 'token': 10850, 'token_str': 'maid'}, {'sequence': '[CLS] the woman worked as a prostitute. [SEP]', 'score': 0.037968918681144714, 'token': 19215, 'token_str': 'prostitute'}, {'sequence': '[CLS] the woman worked as a cook. [SEP]', 'score': 0.03042375110089779, 'token': 5660, 'token_str': 'cook'}] ``` ##### from MathBERT ``` >>> from transformers import pipeline >>> unmasker = pipeline('fill-mask', model='tbs17/MathBERT') >>> unmasker("The man worked as a [MASK].") [{'score': 0.6469377875328064, 'sequence': 'the man worked as a book.', 'token': 2338, 'token_str': 'book'}, {'score': 0.07073448598384857, 'sequence': 'the man worked as a guide.', 'token': 5009, 'token_str': 'guide'}, {'score': 0.031362924724817276, 'sequence': 'the man worked as a text.', 'token': 3793, 'token_str': 'text'}, {'score': 0.02306508645415306, 'sequence': 'the man worked as a man.', 'token': 2158, 'token_str': 'man'}, {'score': 0.020547250285744667, 'sequence': 'the man worked as a distance.', 'token': 3292, 'token_str': 'distance'}] >>> unmasker("The woman worked as a [MASK].") [{'score': 0.8999770879745483, 'sequence': 'the woman worked as a woman.', 'token': 2450, 'token_str': 'woman'}, {'score': 0.025878004729747772, 'sequence': 'the woman worked as a guide.', 'token': 5009, 'token_str': 'guide'}, {'score': 0.006881994660943747, 'sequence': 'the woman worked as a table.', 'token': 2795, 'token_str': 'table'}, {'score': 0.0066248285584151745, 'sequence': 'the woman worked as a b.', 'token': 1038, 'token_str': 'b'}, {'score': 0.00638660229742527, 'sequence': 'the woman worked as a book.', 'token': 2338, 'token_str': 'book'}] ``` ***From above, one can tell that MathBERT is specifically designed for mathematics related tasks and works better with mathematical problem text fill-mask tasks instead of general purpose fill-mask tasks.*** ``` >>> unmasker("students apply these new understandings as they reason about and perform decimal [MASK] through the hundredths place.") #the sentence is taken from a curriculum introduction paragraph on engageny.org: https://www.engageny.org/resource/grade-5-mathematics-module-1 [{'score': 0.832804799079895, 'sequence': 'students apply these new understandings as they reason about and perform decimal numbers through the hundredths place.', 'token': 3616, 'token_str': 'numbers'}, {'score': 0.0865366980433464, 'sequence': 'students apply these new understandings as they reason about and perform decimals through the hundredths place.', 'token': 2015, 'token_str': '##s'}, {'score': 0.03134258836507797, 'sequence': 'students apply these new understandings as they reason about and perform decimal operations through the hundredths place.', 'token': 3136, 'token_str': 'operations'}, {'score': 0.01993160881102085, 'sequence': 'students apply these new understandings as they reason about and perform decimal placement through the hundredths place.', 'token': 11073, 'token_str': 'placement'}, {'score': 0.012547064572572708, 'sequence': 'students apply these new understandings as they reason about and perform decimal places through the hundredths place.', 'token': 3182, 'token_str': 'places'}] ``` ***Therefore, to try the 'fill-mask' hosted API on the right corner of the page, please use the sentences similar to below:*** ``` 1 tenth times any [MASK] on the place value chart moves it one place value to the right. #from https://www.engageny.org/resource/grade-5-mathematics-module-1 ``` #### Training data The MathBERT model was pretrained on pre-k to HS math curriculum (engageNY, Utah Math, Illustrative Math), college math books from openculture.com as well as graduate level math from arxiv math paper abstracts. There is about 100M tokens got pretrained on. #### Training procedure The texts are lowercased and tokenized using WordPiece and a vocabulary size of 30,522 which is from original BERT vocab.txt. The inputs of the model are then of the form: ``` [CLS] Sentence A [SEP] Sentence B [SEP] ``` With probability 0.5, sentence A and sentence B correspond to two consecutive sentence spans from the original corpus. Note that what is considered a sentence here is a consecutive span of text usually longer than a single sentence, but less than 512 tokens. The details of the masking procedure for each sentence are the following: + 15% of the tokens are masked. + In 80% of the cases, the masked tokens are replaced by [MASK]. + In 10% of the cases, the masked tokens are replaced by a random token (different) from the one they replace. + In the 10% remaining cases, the masked tokens are left as is. #### Pretraining The model was trained on a 8-core cloud TPUs from Google Colab for 600k steps with a batch size of 128. The sequence length was limited to 512 for the entire time. The optimizer used is Adam with a learning rate of 5e-5, beta_{1} = 0.9 and beta_{2} =0.999, a weight decay of 0.01, learning rate warmup for 10,000 steps and linear decay of the learning rate after. You can refer to the training and fine-tuning code at https://github.com/tbs17/MathBERT.
9,899
[ [ -0.00601959228515625, -0.038604736328125, 0.01535797119140625, 0.0216522216796875, -0.0301361083984375, 0.00446319580078125, -0.0010213851928710938, 0.0100860595703125, 0.0260162353515625, 0.036834716796875, -0.03741455078125, -0.04193115234375, -0.0572509765625, 0.013427734375, -0.038604736328125, 0.08197021484375, 0.0111846923828125, 0.0186614990234375, 0.00022089481353759766, 0.01000213623046875, -0.032501220703125, -0.0557861328125, -0.048797607421875, -0.0277862548828125, 0.021331787109375, 0.0289764404296875, 0.058685302734375, 0.0457763671875, 0.0401611328125, 0.03314208984375, 0.0009450912475585938, -0.004695892333984375, -0.01470947265625, -0.0038089752197265625, 0.005218505859375, -0.04931640625, -0.033203125, 0.017608642578125, 0.03985595703125, 0.070556640625, -0.01361846923828125, 0.026947021484375, -0.014739990234375, 0.055267333984375, -0.01062774658203125, 0.0267791748046875, -0.031494140625, 0.0151824951171875, -0.019256591796875, 0.0022449493408203125, -0.0272369384765625, -0.0268707275390625, -0.002933502197265625, -0.043914794921875, 0.017913818359375, 0.006084442138671875, 0.0743408203125, 0.01064300537109375, -0.01507568359375, -0.0179443359375, -0.028228759765625, 0.0672607421875, -0.05169677734375, 0.0101165771484375, 0.04010009765625, 0.018035888671875, -0.0153350830078125, -0.0762939453125, -0.0285491943359375, 0.00501251220703125, -0.0189056396484375, 0.006504058837890625, -0.00908660888671875, -0.023162841796875, 0.040130615234375, 0.032470703125, -0.0271759033203125, -0.020050048828125, -0.06475830078125, -0.0234375, 0.047515869140625, 0.022064208984375, 0.0171356201171875, -0.03717041015625, -0.0204010009765625, -0.0266876220703125, -0.0207977294921875, 0.022430419921875, 0.0408935546875, 0.0176544189453125, -0.01715087890625, 0.06317138671875, -0.01020050048828125, 0.047332763671875, -0.004924774169921875, -0.0102386474609375, 0.0399169921875, -0.0022449493408203125, -0.0301513671875, -0.0017538070678710938, 0.0748291015625, 0.01898193359375, 0.037109375, 0.0030078887939453125, -0.0180816650390625, 0.0172119140625, 0.0226593017578125, -0.040740966796875, -0.041168212890625, -0.0009694099426269531, -0.0277252197265625, -0.037872314453125, 0.04034423828125, -0.05377197265625, 0.002704620361328125, -0.0181427001953125, 0.040008544921875, -0.0297393798828125, 0.00412750244140625, 0.0197906494140625, -0.039520263671875, 0.003993988037109375, 0.0085906982421875, -0.06793212890625, 0.0245819091796875, 0.0496826171875, 0.066162109375, 0.031341552734375, -0.01503753662109375, -0.03521728515625, -0.019744873046875, -0.0223236083984375, 0.019683837890625, -0.02947998046875, -0.02728271484375, -0.0041656494140625, 0.019195556640625, -0.0099334716796875, -0.0226287841796875, 0.055908203125, -0.035064697265625, 0.04931640625, 0.0019273757934570312, -0.03131103515625, -0.03076171875, -0.0005307197570800781, -0.051971435546875, 0.09423828125, 0.032958984375, -0.0552978515625, 0.01224517822265625, -0.073486328125, -0.0379638671875, 0.00439453125, 0.0004582405090332031, -0.034454345703125, 0.0157470703125, 0.01629638671875, 0.0233917236328125, -0.017822265625, 0.0193939208984375, 0.0026226043701171875, -0.03314208984375, 0.04559326171875, -0.0218505859375, 0.0989990234375, 0.00839996337890625, -0.026123046875, 0.0087890625, -0.0521240234375, 0.00225067138671875, 0.0198516845703125, -0.0247955322265625, -0.006671905517578125, -0.0137786865234375, 0.00420379638671875, 0.00908660888671875, 0.0197296142578125, -0.03765869140625, 0.0196685791015625, -0.042755126953125, 0.050079345703125, 0.055572509765625, -0.012237548828125, 0.0258331298828125, -0.032012939453125, 0.046783447265625, -0.004299163818359375, -0.00429534912109375, -0.0176849365234375, -0.05169677734375, -0.059967041015625, -0.0296630859375, 0.04156494140625, 0.060089111328125, -0.042449951171875, 0.051361083984375, -0.0035572052001953125, -0.047119140625, -0.04376220703125, -0.004970550537109375, 0.0196380615234375, 0.044952392578125, 0.0266571044921875, -0.0335693359375, -0.05645751953125, -0.048553466796875, -0.029693603515625, -0.00470733642578125, -0.0204925537109375, 0.005535125732421875, 0.050048828125, -0.017547607421875, 0.0599365234375, -0.07720947265625, -0.03497314453125, -0.00809478759765625, 0.0186309814453125, 0.0655517578125, 0.043731689453125, 0.0288543701171875, -0.042205810546875, -0.0289306640625, -0.02886962890625, -0.0537109375, -0.0013265609741210938, 0.004055023193359375, -0.01776123046875, 0.0020751953125, 0.033203125, -0.04852294921875, 0.05078125, 0.00801849365234375, -0.046478271484375, 0.054595947265625, -0.024444580078125, 0.00460052490234375, -0.0831298828125, 0.0070953369140625, -0.0182342529296875, -0.0211944580078125, -0.0606689453125, -0.003948211669921875, -0.0020427703857421875, -0.009002685546875, -0.039398193359375, 0.0213775634765625, -0.0283966064453125, 0.0006594657897949219, 0.01263427734375, -0.0015869140625, 0.0060577392578125, 0.036529541015625, -0.007472991943359375, 0.043060302734375, 0.04559326171875, -0.03271484375, 0.04925537109375, 0.0220947265625, -0.03302001953125, 0.0083770751953125, -0.055694580078125, 0.01084136962890625, 0.0006999969482421875, -0.0016870498657226562, -0.08856201171875, -0.01464080810546875, 0.029144287109375, -0.055755615234375, 0.0003483295440673828, -0.0062103271484375, -0.051300048828125, -0.03985595703125, -0.02386474609375, 0.04730224609375, 0.0435791015625, -0.0116424560546875, 0.03887939453125, 0.0196685791015625, -0.00406646728515625, -0.04937744140625, -0.04644775390625, 0.0010843276977539062, -0.0261383056640625, -0.047119140625, 0.02667236328125, 0.005245208740234375, -0.0204925537109375, -0.0201416015625, 0.0012731552124023438, -0.00970458984375, 0.004909515380859375, 0.02557373046875, 0.04620361328125, -0.007720947265625, -0.0095977783203125, -0.0142974853515625, -0.0277862548828125, 0.0294647216796875, -0.01092529296875, 0.06536865234375, -0.00919342041015625, -0.0109405517578125, -0.018768310546875, 0.0174407958984375, 0.0509033203125, -0.00817108154296875, 0.046966552734375, 0.06829833984375, -0.0447998046875, 0.0112152099609375, -0.0162200927734375, -0.013824462890625, -0.0399169921875, 0.02008056640625, -0.033203125, -0.059967041015625, 0.049896240234375, 0.01371002197265625, -0.01123046875, 0.0638427734375, 0.036163330078125, -0.011077880859375, 0.061676025390625, 0.02197265625, -0.0029430389404296875, 0.0269775390625, -0.024932861328125, 0.0249786376953125, -0.05389404296875, -0.03790283203125, -0.037139892578125, -0.02435302734375, -0.024169921875, -0.026763916015625, 0.0245819091796875, 0.029022216796875, -0.03277587890625, 0.052032470703125, -0.05078125, 0.035614013671875, 0.05877685546875, 0.014739990234375, -0.005069732666015625, -0.0013790130615234375, -0.024169921875, -0.019866943359375, -0.03076171875, -0.025848388671875, 0.0797119140625, 0.030029296875, 0.057159423828125, 0.02545166015625, 0.0435791015625, 0.0295562744140625, 0.0030956268310546875, -0.053466796875, 0.04644775390625, -0.0270233154296875, -0.054962158203125, -0.035400390625, -0.0237884521484375, -0.08038330078125, 0.0227813720703125, -0.01204681396484375, -0.06829833984375, -0.01229095458984375, -0.0240325927734375, -0.021453857421875, 0.0106201171875, -0.051116943359375, 0.07745361328125, -0.0233917236328125, -0.008941650390625, 0.00656890869140625, -0.0621337890625, 0.025634765625, 0.0025501251220703125, 0.020751953125, -0.006122589111328125, 0.017333984375, 0.08355712890625, -0.05133056640625, 0.0721435546875, -0.00720977783203125, 0.00878143310546875, 0.010894775390625, 0.015655517578125, 0.020263671875, 0.01114654541015625, 0.004520416259765625, 0.0260772705078125, 0.0009441375732421875, -0.02996826171875, -0.017303466796875, 0.025726318359375, -0.0615234375, -0.031707763671875, -0.059783935546875, -0.0455322265625, 0.02667236328125, 0.0274505615234375, 0.037994384765625, 0.0596923828125, 0.010772705078125, 0.0151214599609375, 0.0250396728515625, -0.01338958740234375, 0.061859130859375, 0.0164337158203125, -0.0213470458984375, -0.059478759765625, 0.04278564453125, 0.005321502685546875, 0.0011987686157226562, 0.044464111328125, 0.01434326171875, -0.05059814453125, 0.01021575927734375, -0.03216552734375, 0.0136871337890625, -0.055572509765625, -0.0318603515625, -0.03765869140625, -0.03192138671875, -0.044158935546875, -0.0168609619140625, -0.005687713623046875, -0.050140380859375, -0.048126220703125, 0.0008635520935058594, 0.040863037109375, 0.0465087890625, -0.005123138427734375, 0.034027099609375, -0.0555419921875, 0.02197265625, 0.0216827392578125, 0.0198516845703125, -0.0129241943359375, -0.06622314453125, -0.019805908203125, -0.00481414794921875, -0.02435302734375, -0.06982421875, 0.054840087890625, 0.00701141357421875, 0.0253448486328125, 0.036163330078125, 0.0005316734313964844, 0.05352783203125, -0.043670654296875, 0.066650390625, 0.02947998046875, -0.09515380859375, 0.032867431640625, -0.0176544189453125, 0.0171661376953125, 0.0185394287109375, 0.01837158203125, -0.0523681640625, -0.00859832763671875, -0.06488037109375, -0.06927490234375, 0.0743408203125, 0.0298309326171875, 0.0126800537109375, 0.004150390625, 0.00978851318359375, 0.00933837890625, 0.0275421142578125, -0.070556640625, -0.050994873046875, -0.038299560546875, -0.032501220703125, -0.01293182373046875, -0.00377655029296875, -0.0213775634765625, -0.0518798828125, 0.041015625, 0.0069580078125, 0.043914794921875, 0.007656097412109375, -0.019989013671875, 0.008819580078125, 0.0189056396484375, 0.06646728515625, 0.055084228515625, -0.02294921875, 0.0196990966796875, 0.01184844970703125, -0.04815673828125, 0.01371002197265625, 0.01544189453125, 0.0088653564453125, 0.020233154296875, 0.047576904296875, 0.0501708984375, 0.0151214599609375, -0.037353515625, 0.046295166015625, 0.005672454833984375, -0.0281524658203125, -0.056427001953125, 0.01248931884765625, -0.0035400390625, 0.0114898681640625, 0.04058837890625, 0.018890380859375, -0.0004382133483886719, -0.037811279296875, 0.02362060546875, 0.01522064208984375, -0.0299224853515625, -0.01459503173828125, 0.0667724609375, -0.01464080810546875, -0.072021484375, 0.058837890625, -0.0157012939453125, -0.06475830078125, 0.055938720703125, 0.047576904296875, 0.057525634765625, -0.005870819091796875, 0.017547607421875, 0.036956787109375, 0.0257568359375, -0.01171875, 0.03228759765625, 0.0117340087890625, -0.0509033203125, -0.02496337890625, -0.04730224609375, -0.004520416259765625, -0.003780364990234375, -0.053070068359375, 0.022125244140625, -0.03759765625, -0.019256591796875, 0.019989013671875, -0.0040283203125, -0.053985595703125, 0.01739501953125, -0.01476287841796875, 0.06976318359375, -0.07684326171875, 0.0589599609375, 0.06134033203125, -0.0445556640625, -0.07293701171875, -0.03656005859375, -0.01800537109375, -0.0787353515625, 0.0599365234375, 0.0224609375, 0.02008056640625, -0.00611114501953125, -0.035064697265625, -0.052642822265625, 0.07958984375, 0.0115814208984375, -0.032012939453125, 0.004146575927734375, 0.021881103515625, 0.035400390625, -0.033447265625, 0.04815673828125, 0.038360595703125, 0.03009033203125, -0.001979827880859375, -0.049957275390625, 0.013641357421875, -0.0243988037109375, -0.01413726806640625, 0.031402587890625, -0.03350830078125, 0.092529296875, -0.01226806640625, 0.007312774658203125, 0.01033782958984375, 0.035858154296875, 0.003597259521484375, 0.0243988037109375, 0.035308837890625, 0.057708740234375, 0.0552978515625, -0.03936767578125, 0.046142578125, -0.01239776611328125, 0.04473876953125, 0.051544189453125, 0.0171661376953125, 0.04827880859375, 0.0239410400390625, -0.03973388671875, 0.07562255859375, 0.0506591796875, -0.01824951171875, 0.041656494140625, 0.0250244140625, -0.0137481689453125, -0.00591278076171875, 0.020233154296875, -0.02716064453125, 0.041107177734375, 0.021942138671875, -0.045654296875, 0.0005693435668945312, -0.025726318359375, 0.0197296142578125, -0.006946563720703125, -0.0274200439453125, 0.04290771484375, 0.015899658203125, -0.05169677734375, 0.037322998046875, 0.026824951171875, 0.03753662109375, -0.036224365234375, -0.00833892822265625, -0.0056610107421875, 0.0242919921875, -0.01788330078125, -0.0699462890625, 0.01287078857421875, -0.006969451904296875, -0.026947021484375, -0.00934600830078125, 0.04925537109375, -0.032684326171875, -0.0560302734375, -0.00969696044921875, 0.0214080810546875, 0.0325927734375, -0.0020580291748046875, -0.05511474609375, -0.02288818359375, 0.01004791259765625, 0.0093994140625, 0.0037975311279296875, 0.04046630859375, 0.0084075927734375, 0.037506103515625, 0.067138671875, -0.01898193359375, 0.007114410400390625, 0.005161285400390625, 0.050506591796875, -0.08612060546875, -0.05157470703125, -0.08673095703125, 0.043487548828125, -0.0026988983154296875, -0.04229736328125, 0.049896240234375, 0.057098388671875, 0.0445556640625, -0.0357666015625, 0.0204315185546875, -0.0135955810546875, 0.0257568359375, -0.0247955322265625, 0.048736572265625, -0.0242919921875, 0.005298614501953125, -0.037811279296875, -0.06317138671875, -0.032073974609375, 0.06817626953125, -0.0157470703125, 0.0011606216430664062, 0.06365966796875, 0.043487548828125, 0.0027103424072265625, -0.00412750244140625, 0.007572174072265625, -0.00527191162109375, 0.00885009765625, 0.036468505859375, 0.048248291015625, -0.054443359375, 0.037811279296875, -0.0167236328125, -0.00606536865234375, -0.02197265625, -0.057525634765625, -0.07281494140625, -0.036773681640625, -0.015167236328125, -0.041900634765625, -0.033966064453125, 0.07269287109375, 0.042449951171875, -0.054718017578125, -0.0292205810546875, -0.00711822509765625, 0.018280029296875, -0.0270843505859375, -0.026519775390625, 0.041900634765625, -0.005218505859375, -0.048492431640625, 0.01064300537109375, 0.0038967132568359375, 0.003643035888671875, -0.0021762847900390625, -0.0032100677490234375, -0.018829345703125, -0.0065460205078125, 0.041656494140625, 0.0037059783935546875, -0.056854248046875, -0.023162841796875, 0.0123748779296875, -0.006214141845703125, 0.01425933837890625, 0.043365478515625, -0.0523681640625, 0.0204925537109375, 0.036346435546875, 0.0221405029296875, 0.041107177734375, 0.0146331787109375, 0.0274505615234375, -0.0751953125, 0.0101776123046875, 0.01346588134765625, 0.037139892578125, 0.02484130859375, -0.0243988037109375, 0.05328369140625, 0.0474853515625, -0.03759765625, -0.057464599609375, -0.005420684814453125, -0.06915283203125, -0.01091766357421875, 0.060333251953125, -0.0029354095458984375, -0.01678466796875, -0.007568359375, -0.03814697265625, 0.0262603759765625, -0.03997802734375, 0.05609130859375, 0.056854248046875, -0.00405120849609375, -0.006671905517578125, -0.0260162353515625, 0.0201416015625, 0.03814697265625, -0.0298309326171875, -0.029144287109375, 0.0009870529174804688, 0.027374267578125, 0.025543212890625, 0.03759765625, -0.01361083984375, 0.011016845703125, 0.018707275390625, 0.0094146728515625, -0.0026531219482421875, -0.0098419189453125, -0.018829345703125, 0.0183868408203125, -0.01068878173828125, -0.04400634765625 ] ]
timm/mobilenetv2_050.lamb_in1k
2023-04-27T21:14:09.000Z
[ "timm", "pytorch", "safetensors", "image-classification", "dataset:imagenet-1k", "arxiv:2110.00476", "arxiv:1801.04381", "license:apache-2.0", "region:us" ]
image-classification
timm
null
null
timm/mobilenetv2_050.lamb_in1k
0
8,829
timm
2022-12-13T00:00:19
--- tags: - image-classification - timm library_name: timm license: apache-2.0 datasets: - imagenet-1k --- # Model card for mobilenetv2_050.lamb_in1k A MobileNet-v2 image classification model. Trained on ImageNet-1k in `timm` using recipe template described below. Recipe details: * A LAMB optimizer recipe that is similar to [ResNet Strikes Back](https://arxiv.org/abs/2110.00476) `A2` but 50% longer with EMA weight averaging, no CutMix * RMSProp (TF 1.0 behaviour) optimizer, EMA weight averaging * Step (exponential decay w/ staircase) LR schedule with warmup ## Model Details - **Model Type:** Image classification / feature backbone - **Model Stats:** - Params (M): 2.0 - GMACs: 0.1 - Activations (M): 3.6 - Image size: 224 x 224 - **Papers:** - MobileNetV2: Inverted Residuals and Linear Bottlenecks: https://arxiv.org/abs/1801.04381 - **Dataset:** ImageNet-1k - **Original:** https://github.com/huggingface/pytorch-image-models ## Model Usage ### Image Classification ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model('mobilenetv2_050.lamb_in1k', pretrained=True) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 top5_probabilities, top5_class_indices = torch.topk(output.softmax(dim=1) * 100, k=5) ``` ### Feature Map Extraction ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'mobilenetv2_050.lamb_in1k', pretrained=True, features_only=True, ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 for o in output: # print shape of each feature map in output # e.g.: # torch.Size([1, 8, 112, 112]) # torch.Size([1, 16, 56, 56]) # torch.Size([1, 16, 28, 28]) # torch.Size([1, 48, 14, 14]) # torch.Size([1, 160, 7, 7]) print(o.shape) ``` ### Image Embeddings ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'mobilenetv2_050.lamb_in1k', pretrained=True, num_classes=0, # remove classifier nn.Linear ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # output is (batch_size, num_features) shaped tensor # or equivalently (without needing to set num_classes=0) output = model.forward_features(transforms(img).unsqueeze(0)) # output is unpooled, a (1, 1280, 7, 7) shaped tensor output = model.forward_head(output, pre_logits=True) # output is a (1, num_features) shaped tensor ``` ## Model Comparison Explore the dataset and runtime metrics of this model in timm [model results](https://github.com/huggingface/pytorch-image-models/tree/main/results). ## Citation ```bibtex @misc{rw2019timm, author = {Ross Wightman}, title = {PyTorch Image Models}, year = {2019}, publisher = {GitHub}, journal = {GitHub repository}, doi = {10.5281/zenodo.4414861}, howpublished = {\url{https://github.com/huggingface/pytorch-image-models}} } ``` ```bibtex @inproceedings{sandler2018mobilenetv2, title={Mobilenetv2: Inverted residuals and linear bottlenecks}, author={Sandler, Mark and Howard, Andrew and Zhu, Menglong and Zhmoginov, Andrey and Chen, Liang-Chieh}, booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, pages={4510--4520}, year={2018} } ```
4,382
[ [ -0.0293426513671875, -0.0258331298828125, -0.01032257080078125, 0.0084991455078125, -0.029388427734375, -0.0283660888671875, -0.004100799560546875, -0.027587890625, 0.0197906494140625, 0.03179931640625, -0.0280609130859375, -0.0447998046875, -0.04486083984375, -0.022186279296875, -0.00510406494140625, 0.06585693359375, -0.00363922119140625, -0.0017843246459960938, -0.01214599609375, -0.047882080078125, -0.0206451416015625, -0.0166473388671875, -0.06671142578125, -0.03924560546875, 0.028411865234375, 0.0218353271484375, 0.0423583984375, 0.05487060546875, 0.041259765625, 0.0303955078125, -0.0025463104248046875, 0.00731658935546875, -0.014556884765625, -0.0165252685546875, 0.024749755859375, -0.04833984375, -0.0330810546875, 0.0208587646484375, 0.041595458984375, 0.015655517578125, 0.0027599334716796875, 0.04364013671875, 0.005031585693359375, 0.05084228515625, -0.02020263671875, -0.00287628173828125, -0.035675048828125, 0.01068115234375, -0.006084442138671875, 0.005336761474609375, -0.01526641845703125, -0.032440185546875, 0.007724761962890625, -0.026092529296875, 0.0216217041015625, -0.0010728836059570312, 0.0985107421875, 0.019866943359375, -0.0187835693359375, -0.005107879638671875, -0.024322509765625, 0.06103515625, -0.05548095703125, 0.01593017578125, 0.032012939453125, 0.0138702392578125, -0.004016876220703125, -0.07232666015625, -0.04254150390625, -0.01258087158203125, -0.004505157470703125, 0.0038547515869140625, -0.0175323486328125, -0.01151275634765625, 0.0179290771484375, 0.01355743408203125, -0.039215087890625, 0.01396942138671875, -0.04595947265625, -0.018463134765625, 0.049041748046875, 0.00318145751953125, 0.025665283203125, -0.0164337158203125, -0.038970947265625, -0.025299072265625, -0.0302886962890625, 0.039306640625, 0.01702880859375, 0.006130218505859375, -0.052520751953125, 0.039947509765625, 0.0105438232421875, 0.04119873046875, 0.00006771087646484375, -0.0313720703125, 0.047210693359375, -0.00307464599609375, -0.0307159423828125, -0.005626678466796875, 0.07940673828125, 0.04278564453125, 0.01114654541015625, 0.0147552490234375, -0.00801849365234375, -0.0238037109375, -0.004077911376953125, -0.0911865234375, -0.0208282470703125, 0.032928466796875, -0.0640869140625, -0.03753662109375, 0.0218963623046875, -0.043243408203125, -0.0135498046875, 0.0006170272827148438, 0.03253173828125, -0.0234832763671875, -0.036163330078125, -0.00183868408203125, -0.01519775390625, 0.034393310546875, 0.006298065185546875, -0.041351318359375, 0.01412200927734375, 0.0186614990234375, 0.08917236328125, 0.01250457763671875, -0.0240478515625, -0.0186614990234375, -0.0240631103515625, -0.020355224609375, 0.03314208984375, -0.0023517608642578125, -0.0148162841796875, -0.025421142578125, 0.0232391357421875, -0.0130462646484375, -0.057586669921875, 0.026458740234375, -0.0187530517578125, 0.01343536376953125, -0.0017528533935546875, -0.00485992431640625, -0.043670654296875, 0.0185089111328125, -0.0316162109375, 0.10791015625, 0.0237579345703125, -0.06201171875, 0.0150909423828125, -0.0341796875, -0.01522064208984375, -0.03363037109375, 0.0016870498657226562, -0.082763671875, -0.0064849853515625, 0.00632476806640625, 0.05517578125, -0.03173828125, -0.005992889404296875, -0.037322998046875, -0.01708984375, 0.0242919921875, 0.0080108642578125, 0.073974609375, 0.0169525146484375, -0.042205810546875, 0.012237548828125, -0.052032470703125, 0.025360107421875, 0.03826904296875, -0.022552490234375, -0.0097198486328125, -0.031005859375, 0.0144500732421875, 0.035125732421875, 0.003971099853515625, -0.036956787109375, 0.01708984375, -0.01268768310546875, 0.040985107421875, 0.0294952392578125, -0.017303466796875, 0.026092529296875, -0.03399658203125, 0.0160675048828125, 0.0221405029296875, 0.0124969482421875, -0.007198333740234375, -0.04644775390625, -0.0614013671875, -0.0285797119140625, 0.0272674560546875, 0.049285888671875, -0.034423828125, 0.0283050537109375, -0.00971221923828125, -0.062225341796875, -0.0296173095703125, 0.00948333740234375, 0.0462646484375, 0.039794921875, 0.0220947265625, -0.0391845703125, -0.0421142578125, -0.06976318359375, -0.002147674560546875, -0.0021915435791015625, -0.002315521240234375, 0.0301055908203125, 0.04986572265625, -0.01468658447265625, 0.050750732421875, -0.02117919921875, -0.0191802978515625, -0.0159149169921875, 0.00970458984375, 0.0222930908203125, 0.06549072265625, 0.059234619140625, -0.059112548828125, -0.0330810546875, -0.0019083023071289062, -0.07196044921875, 0.007354736328125, -0.013427734375, -0.007137298583984375, 0.0178680419921875, 0.01702880859375, -0.046783447265625, 0.0537109375, 0.0203857421875, -0.022308349609375, 0.032928466796875, -0.01049041748046875, 0.0146942138671875, -0.091796875, 0.00579833984375, 0.036407470703125, -0.0172119140625, -0.032928466796875, -0.0027866363525390625, 0.002613067626953125, -0.0022373199462890625, -0.040496826171875, 0.05328369140625, -0.0404052734375, -0.0214996337890625, -0.01751708984375, -0.01433563232421875, -0.0006899833679199219, 0.046112060546875, -0.01285552978515625, 0.0367431640625, 0.050750732421875, -0.03839111328125, 0.036407470703125, 0.024078369140625, -0.00885772705078125, 0.0217132568359375, -0.06201171875, 0.0180206298828125, 0.003505706787109375, 0.0218963623046875, -0.07427978515625, -0.017120361328125, 0.029449462890625, -0.05047607421875, 0.03741455078125, -0.044586181640625, -0.026092529296875, -0.044281005859375, -0.041656494140625, 0.0318603515625, 0.057098388671875, -0.047882080078125, 0.038818359375, 0.02410888671875, 0.0235595703125, -0.041351318359375, -0.06494140625, -0.0187530517578125, -0.033203125, -0.0582275390625, 0.0307464599609375, 0.0271148681640625, -0.0004119873046875, 0.00557708740234375, -0.0111083984375, -0.0178985595703125, -0.01337432861328125, 0.056121826171875, 0.0228424072265625, -0.025054931640625, -0.01325225830078125, -0.0186767578125, -0.0056304931640625, 0.0004818439483642578, -0.029144287109375, 0.045684814453125, -0.024200439453125, -0.0011262893676757812, -0.07635498046875, -0.0112152099609375, 0.040496826171875, -0.01033782958984375, 0.05743408203125, 0.09063720703125, -0.035614013671875, 0.00745391845703125, -0.037322998046875, -0.0137176513671875, -0.036468505859375, 0.033294677734375, -0.035308837890625, -0.027435302734375, 0.069580078125, -0.005950927734375, 0.0006823539733886719, 0.049285888671875, 0.032073974609375, -0.0098419189453125, 0.05816650390625, 0.039794921875, 0.01399993896484375, 0.048431396484375, -0.0662841796875, -0.0206756591796875, -0.06927490234375, -0.048736572265625, -0.0301513671875, -0.03985595703125, -0.053741455078125, -0.032867431640625, 0.025299072265625, 0.025848388671875, -0.033447265625, 0.040863037109375, -0.05230712890625, 0.005924224853515625, 0.0523681640625, 0.047760009765625, -0.03424072265625, 0.02679443359375, -0.0227508544921875, 0.0013866424560546875, -0.0562744140625, -0.028656005859375, 0.0831298828125, 0.040130615234375, 0.038543701171875, -0.006591796875, 0.044464111328125, -0.013275146484375, 0.0166473388671875, -0.050018310546875, 0.039581298828125, -0.00786590576171875, -0.026397705078125, -0.00428009033203125, -0.0284576416015625, -0.07781982421875, 0.01470947265625, -0.02099609375, -0.061065673828125, 0.01111602783203125, 0.018890380859375, -0.01568603515625, 0.053741455078125, -0.060699462890625, 0.0673828125, -0.0075836181640625, -0.043609619140625, 0.004459381103515625, -0.0592041015625, 0.025543212890625, 0.0169830322265625, -0.01094818115234375, -0.00910186767578125, 0.009307861328125, 0.08111572265625, -0.0509033203125, 0.056396484375, -0.03765869140625, 0.0284576416015625, 0.05206298828125, -0.007328033447265625, 0.03369140625, -0.0010347366333007812, -0.01522064208984375, 0.0247344970703125, 0.0048370361328125, -0.037628173828125, -0.038482666015625, 0.04119873046875, -0.0679931640625, -0.0173797607421875, -0.023345947265625, -0.0242156982421875, 0.0168914794921875, 0.00643157958984375, 0.04248046875, 0.05419921875, 0.0215911865234375, 0.0171051025390625, 0.0391845703125, -0.035369873046875, 0.03448486328125, -0.01213836669921875, -0.01529693603515625, -0.03765869140625, 0.06524658203125, 0.005214691162109375, 0.00864410400390625, 0.0020732879638671875, 0.0158233642578125, -0.03009033203125, -0.044158935546875, -0.03594970703125, 0.020721435546875, -0.03778076171875, -0.032073974609375, -0.042572021484375, -0.0308990478515625, -0.0259246826171875, -0.00251007080078125, -0.04608154296875, -0.0301361083984375, -0.035003662109375, 0.0261383056640625, 0.045440673828125, 0.039581298828125, -0.01233673095703125, 0.048187255859375, -0.04962158203125, 0.01355743408203125, 0.0079345703125, 0.0273895263671875, -0.0080413818359375, -0.06787109375, -0.01412200927734375, 0.00489044189453125, -0.0343017578125, -0.04718017578125, 0.035369873046875, 0.00919342041015625, 0.027587890625, 0.029266357421875, -0.024200439453125, 0.0556640625, -0.0035953521728515625, 0.043243408203125, 0.044921875, -0.042694091796875, 0.043792724609375, -0.01003265380859375, 0.0141143798828125, 0.00783538818359375, 0.0229644775390625, -0.01522064208984375, 0.01105499267578125, -0.056396484375, -0.05322265625, 0.057769775390625, 0.01403045654296875, 0.0032367706298828125, 0.0362548828125, 0.05621337890625, -0.00615692138671875, 0.0005993843078613281, -0.05609130859375, -0.034423828125, -0.034942626953125, -0.018402099609375, 0.01480865478515625, -0.020843505859375, 0.0022525787353515625, -0.05682373046875, 0.0494384765625, 0.0048980712890625, 0.060577392578125, 0.026153564453125, 0.0013742446899414062, -0.00007265806198120117, -0.032806396484375, 0.051422119140625, 0.02337646484375, -0.024078369140625, 0.003208160400390625, 0.0106201171875, -0.05108642578125, 0.014862060546875, 0.00014591217041015625, 0.0012350082397460938, 0.004695892333984375, 0.026763916015625, 0.068603515625, -0.0078125, 0.002124786376953125, 0.0322265625, -0.0019378662109375, -0.035888671875, -0.0259552001953125, 0.00965118408203125, 0.0006241798400878906, 0.037384033203125, 0.0299835205078125, 0.0303955078125, -0.01146697998046875, -0.019927978515625, 0.023345947265625, 0.036773681640625, -0.025787353515625, -0.0257110595703125, 0.0560302734375, -0.0091094970703125, -0.0223541259765625, 0.0582275390625, -0.0172119140625, -0.040985107421875, 0.0892333984375, 0.037994384765625, 0.06195068359375, -0.01076507568359375, 0.004673004150390625, 0.063232421875, 0.021759033203125, -0.00632476806640625, 0.0187835693359375, 0.0180511474609375, -0.054229736328125, 0.00807952880859375, -0.03558349609375, 0.013763427734375, 0.0316162109375, -0.049041748046875, 0.028533935546875, -0.050079345703125, -0.03692626953125, 0.0182342529296875, 0.0178680419921875, -0.062225341796875, 0.022674560546875, -0.00949859619140625, 0.06475830078125, -0.045501708984375, 0.06536865234375, 0.06658935546875, -0.03033447265625, -0.0802001953125, -0.004039764404296875, 0.01059722900390625, -0.07086181640625, 0.05255126953125, 0.03753662109375, 0.004520416259765625, 0.008544921875, -0.053955078125, -0.046051025390625, 0.1024169921875, 0.0225067138671875, -0.013153076171875, 0.026092529296875, -0.00435638427734375, 0.00826263427734375, -0.0333251953125, 0.0426025390625, 0.013946533203125, 0.021270751953125, 0.026092529296875, -0.060272216796875, 0.0176849365234375, -0.0263671875, 0.0150909423828125, 0.017822265625, -0.0675048828125, 0.060882568359375, -0.0467529296875, -0.00876617431640625, 0.005329132080078125, 0.043731689453125, 0.01180267333984375, 0.0312347412109375, 0.034881591796875, 0.05517578125, 0.036285400390625, -0.01522064208984375, 0.060699462890625, 0.0015707015991210938, 0.04449462890625, 0.04864501953125, 0.0190887451171875, 0.045318603515625, 0.0262603759765625, -0.0177459716796875, 0.0318603515625, 0.08807373046875, -0.02752685546875, 0.025115966796875, 0.0157012939453125, 0.0007100105285644531, 0.0018110275268554688, 0.004886627197265625, -0.03924560546875, 0.052154541015625, 0.013641357421875, -0.04510498046875, -0.01111602783203125, 0.005138397216796875, 0.00583648681640625, -0.0261993408203125, -0.0214691162109375, 0.033935546875, 0.00390625, -0.02508544921875, 0.07806396484375, 0.023040771484375, 0.0626220703125, -0.0213470458984375, 0.00469207763671875, -0.0204315185546875, 0.007328033447265625, -0.03277587890625, -0.046905517578125, 0.0275115966796875, -0.0205230712890625, -0.0058441162109375, 0.00949859619140625, 0.05694580078125, -0.017822265625, -0.0308990478515625, 0.0075836181640625, 0.0202178955078125, 0.042572021484375, 0.0031452178955078125, -0.09375, 0.0186920166015625, 0.0102386474609375, -0.046051025390625, 0.0222930908203125, 0.015167236328125, 0.0016498565673828125, 0.060577392578125, 0.047393798828125, -0.01389312744140625, 0.01189422607421875, -0.0267181396484375, 0.06280517578125, -0.043426513671875, -0.0163726806640625, -0.066650390625, 0.052734375, -0.01313018798828125, -0.04705810546875, 0.037322998046875, 0.0550537109375, 0.05975341796875, 0.0011377334594726562, 0.0384521484375, -0.0242156982421875, -0.003932952880859375, -0.037017822265625, 0.050872802734375, -0.05914306640625, 0.00174713134765625, 0.0018596649169921875, -0.050445556640625, -0.018707275390625, 0.058746337890625, -0.01555633544921875, 0.0235748291015625, 0.03814697265625, 0.080810546875, -0.03106689453125, -0.022979736328125, 0.00727081298828125, 0.0003044605255126953, -0.0049591064453125, 0.0289764404296875, 0.031280517578125, -0.0694580078125, 0.02520751953125, -0.039794921875, -0.018798828125, -0.017181396484375, -0.049407958984375, -0.08056640625, -0.06304931640625, -0.043243408203125, -0.0672607421875, -0.01210784912109375, 0.0732421875, 0.08514404296875, -0.047576904296875, -0.01457977294921875, 0.00293731689453125, 0.013580322265625, -0.01197052001953125, -0.0160064697265625, 0.044647216796875, 0.0015287399291992188, -0.04583740234375, -0.0230865478515625, -0.0017786026000976562, 0.03369140625, 0.01226043701171875, -0.0176239013671875, -0.007587432861328125, -0.018035888671875, 0.0270843505859375, 0.034759521484375, -0.04925537109375, -0.00576019287109375, -0.023590087890625, -0.01861572265625, 0.0305023193359375, 0.04559326171875, -0.036102294921875, 0.01335906982421875, 0.018218994140625, 0.0275115966796875, 0.06573486328125, -0.0122833251953125, -0.0011186599731445312, -0.054779052734375, 0.05633544921875, -0.01403045654296875, 0.02056884765625, 0.020782470703125, -0.02423095703125, 0.042144775390625, 0.031829833984375, -0.0292510986328125, -0.068359375, -0.0030879974365234375, -0.08544921875, -0.00826263427734375, 0.08221435546875, -0.02301025390625, -0.035369873046875, 0.0249786376953125, -0.002593994140625, 0.049774169921875, -0.00739288330078125, 0.034576416015625, 0.00868988037109375, -0.01102447509765625, -0.049896240234375, -0.055206298828125, 0.034027099609375, 0.010040283203125, -0.047760009765625, -0.045684814453125, -0.004680633544921875, 0.0545654296875, 0.01064300537109375, 0.03936767578125, -0.00870513916015625, 0.0077972412109375, 0.01346588134765625, 0.033935546875, -0.03692626953125, 0.0018033981323242188, -0.0186920166015625, -0.003734588623046875, -0.017242431640625, -0.05584716796875 ] ]
theainerd/Wav2Vec2-large-xlsr-hindi
2023-05-31T18:52:14.000Z
[ "transformers", "pytorch", "safetensors", "wav2vec2", "automatic-speech-recognition", "hi", "endpoints_compatible", "has_space", "region:us" ]
automatic-speech-recognition
theainerd
null
null
theainerd/Wav2Vec2-large-xlsr-hindi
4
8,825
transformers
2022-03-02T23:29:05
--- language: - hi --- # Wav2Vec2-Large-XLSR-53-hindi Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) hindi using the [Multilingual and code-switching ASR challenges for low resource Indian languages](https://navana-tech.github.io/IS21SS-indicASRchallenge/data.html). When using this model, make sure that your speech input is sampled at 16kHz. ## Usage The model can be used directly (without a language model) as follows: ```python import torch import torchaudio from datasets import load_dataset from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor test_dataset = load_dataset("common_voice", "hi", split="test[:2%]") processor = Wav2Vec2Processor.from_pretrained("theainerd/Wav2Vec2-large-xlsr-hindi") model = Wav2Vec2ForCTC.from_pretrained("theainerd/Wav2Vec2-large-xlsr-hindi") resampler = torchaudio.transforms.Resample(48_000, 16_000) # Preprocessing the datasets. # We need to read the aduio files as arrays def speech_file_to_array_fn(batch): speech_array, sampling_rate = torchaudio.load(batch["path"]) batch["speech"] = resampler(speech_array).squeeze().numpy() return batch test_dataset = test_dataset.map(speech_file_to_array_fn) inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True) with torch.no_grad(): logits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits predicted_ids = torch.argmax(logits, dim=-1) print("Prediction:", processor.batch_decode(predicted_ids)) print("Reference:", test_dataset["sentence"][:2]) ``` ## Evaluation The model can be evaluated as follows on the hindi test data of Common Voice. ```python import torch import torchaudio from datasets import load_dataset, load_metric from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor import re test_dataset = load_dataset("common_voice", "hi", split="test") wer = load_metric("wer") processor = Wav2Vec2Processor.from_pretrained("theainerd/Wav2Vec2-large-xlsr-hindi") model = Wav2Vec2ForCTC.from_pretrained("theainerd/Wav2Vec2-large-xlsr-hindi") model.to("cuda") resampler = torchaudio.transforms.Resample(48_000, 16_000) chars_to_ignore_regex = '[\,\?\.\!\-\;\:\"\“]' # Preprocessing the datasets. # We need to read the aduio files as arrays def speech_file_to_array_fn(batch): batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower() speech_array, sampling_rate = torchaudio.load(batch["path"]) batch["speech"] = resampler(speech_array).squeeze().numpy() return batch test_dataset = test_dataset.map(speech_file_to_array_fn) # Preprocessing the datasets. # We need to read the aduio files as arrays def evaluate(batch): inputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True) with torch.no_grad(): logits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits pred_ids = torch.argmax(logits, dim=-1) batch["pred_strings"] = processor.batch_decode(pred_ids) return batch result = test_dataset.map(evaluate, batched=True, batch_size=8) print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"]))) ``` **Test Result**: 72.62 % ## Training The script used for training can be found [Hindi ASR Fine Tuning Wav2Vec2](https://colab.research.google.com/drive/1m-F7et3CHT_kpFqg7UffTIwnUV9AKgrg?usp=sharing)
3,455
[ [ -0.0162200927734375, -0.044586181640625, -0.01363372802734375, 0.0211029052734375, -0.0091400146484375, 0.006275177001953125, -0.03778076171875, -0.02728271484375, 0.0013265609741210938, 0.014892578125, -0.04119873046875, -0.031402587890625, -0.0421142578125, -0.006145477294921875, -0.0149383544921875, 0.05938720703125, 0.006988525390625, 0.0161590576171875, 0.0225372314453125, -0.01233673095703125, -0.04290771484375, -0.0110321044921875, -0.0655517578125, -0.0226287841796875, 0.0007462501525878906, 0.044219970703125, 0.02008056640625, 0.0188751220703125, 0.0125579833984375, 0.0310821533203125, -0.00403594970703125, 0.0155181884765625, -0.028656005859375, 0.0034618377685546875, 0.01751708984375, -0.0382080078125, -0.0110626220703125, 0.00313568115234375, 0.052581787109375, 0.0110931396484375, -0.0240631103515625, 0.0160369873046875, -0.009002685546875, 0.0221710205078125, -0.0182647705078125, 0.0152587890625, -0.056488037109375, -0.0184326171875, -0.007373809814453125, -0.01082611083984375, -0.0250396728515625, -0.030517578125, -0.0014400482177734375, -0.042877197265625, 0.0267333984375, -0.00640106201171875, 0.081298828125, 0.01995849609375, -0.01324462890625, -0.036865234375, -0.051422119140625, 0.07806396484375, -0.059295654296875, 0.022491455078125, 0.031524658203125, 0.01383209228515625, -0.0021266937255859375, -0.057769775390625, -0.058685302734375, -0.004291534423828125, 0.0131378173828125, 0.0173797607421875, -0.0153045654296875, -0.0020580291748046875, 0.0210113525390625, 0.02655029296875, -0.046234130859375, 0.0026226043701171875, -0.068603515625, -0.033599853515625, 0.056243896484375, -0.01268768310546875, 0.037994384765625, -0.0179595947265625, -0.01462554931640625, -0.04150390625, -0.0197906494140625, 0.0127410888671875, 0.03631591796875, 0.02508544921875, -0.03533935546875, 0.041168212890625, -0.002254486083984375, 0.05242919921875, 0.0036716461181640625, -0.0208282470703125, 0.050384521484375, -0.018463134765625, -0.0198516845703125, 0.0413818359375, 0.072509765625, 0.004520416259765625, 0.034393310546875, 0.0215911865234375, 0.01323699951171875, 0.0305328369140625, -0.02008056640625, -0.048309326171875, -0.0280609130859375, 0.0350341796875, 0.0020294189453125, -0.0021514892578125, -0.01702880859375, -0.059234619140625, -0.006500244140625, -0.0185089111328125, 0.049560546875, -0.05450439453125, -0.027374267578125, 0.00798797607421875, -0.00949859619140625, 0.03515625, -0.014495849609375, -0.07525634765625, 0.017303466796875, 0.0197601318359375, 0.0699462890625, 0.013336181640625, -0.028228759765625, -0.033721923828125, -0.0071868896484375, -0.0017242431640625, 0.041168212890625, -0.0154876708984375, -0.039459228515625, -0.023040771484375, -0.01239013671875, -0.0360107421875, -0.034942626953125, 0.056427001953125, -0.0183563232421875, 0.01479339599609375, -0.0021495819091796875, -0.020660400390625, -0.00994110107421875, -0.0225067138671875, -0.026275634765625, 0.08294677734375, 0.0010204315185546875, -0.055450439453125, 0.0128173828125, -0.046478271484375, -0.051483154296875, -0.0240325927734375, -0.00958251953125, -0.032562255859375, -0.01456451416015625, 0.02783203125, 0.01580810546875, -0.0171661376953125, 0.01056671142578125, 0.0127105712890625, -0.03887939453125, 0.0225372314453125, -0.03497314453125, 0.08209228515625, 0.04022216796875, -0.03564453125, 0.0172882080078125, -0.06988525390625, 0.0289306640625, -0.0021343231201171875, -0.029266357421875, 0.00925445556640625, -0.01026153564453125, 0.035552978515625, 0.0196533203125, -0.004917144775390625, -0.05230712890625, 0.0003876686096191406, -0.062744140625, 0.05657958984375, 0.043121337890625, -0.0154571533203125, 0.00624847412109375, -0.00792694091796875, 0.024627685546875, -0.007350921630859375, -0.0164794921875, 0.0164794921875, -0.039703369140625, -0.060791015625, -0.029876708984375, 0.020477294921875, 0.05328369140625, -0.036529541015625, 0.03619384765625, -0.0154876708984375, -0.0592041015625, -0.08306884765625, 0.00044608116149902344, 0.025421142578125, 0.04034423828125, 0.04150390625, -0.00555419921875, -0.0645751953125, -0.05267333984375, -0.01043701171875, -0.0057525634765625, -0.00782012939453125, 0.01505279541015625, 0.027252197265625, -0.029876708984375, 0.05596923828125, -0.0506591796875, -0.035858154296875, -0.03662109375, 0.01654052734375, 0.03204345703125, 0.0489501953125, 0.00862884521484375, -0.06646728515625, -0.03778076171875, -0.0166168212890625, -0.0241851806640625, -0.01313018798828125, -0.007038116455078125, 0.0010671615600585938, 0.01056671142578125, 0.030487060546875, -0.043701171875, 0.0150299072265625, 0.034820556640625, -0.021148681640625, 0.03271484375, -0.007343292236328125, 0.0228424072265625, -0.09710693359375, 0.00594329833984375, -0.00576019287109375, 0.00811767578125, -0.038970947265625, -0.0254364013671875, -0.01568603515625, -0.0144195556640625, -0.034393310546875, 0.036895751953125, -0.02960205078125, 0.006214141845703125, -0.01270294189453125, 0.02020263671875, -0.0084075927734375, 0.044403076171875, 0.00267791748046875, 0.06365966796875, 0.06494140625, -0.044769287109375, 0.051788330078125, 0.03765869140625, -0.03961181640625, 0.0110931396484375, -0.07110595703125, 0.0199127197265625, 0.0102996826171875, 0.003299713134765625, -0.094482421875, -0.0187225341796875, 0.01477813720703125, -0.07568359375, 0.032470703125, 0.005100250244140625, -0.03387451171875, -0.0238800048828125, -0.0165863037109375, 0.0238494873046875, 0.0504150390625, -0.027923583984375, 0.0305328369140625, 0.03875732421875, -0.004608154296875, -0.040679931640625, -0.07403564453125, -0.0213623046875, -0.01641845703125, -0.054534912109375, 0.01200103759765625, -0.0009202957153320312, -0.00640106201171875, -0.02178955078125, -0.0161590576171875, -0.00316619873046875, -0.0289459228515625, 0.0306854248046875, 0.017425537109375, -0.006465911865234375, -0.006877899169921875, -0.0127410888671875, -0.0031490325927734375, -0.0078277587890625, -0.00452423095703125, 0.05841064453125, -0.0161590576171875, -0.00701141357421875, -0.052520751953125, 0.005283355712890625, 0.032257080078125, -0.0245513916015625, 0.039306640625, 0.08599853515625, -0.026214599609375, 0.0032958984375, -0.034942626953125, 0.006816864013671875, -0.037933349609375, 0.0606689453125, -0.023101806640625, -0.029205322265625, 0.037139892578125, 0.01200103759765625, -0.01000213623046875, 0.048126220703125, 0.04949951171875, 0.014373779296875, 0.06646728515625, 0.0102386474609375, -0.01175689697265625, 0.03387451171875, -0.06646728515625, 0.0160064697265625, -0.07666015625, -0.036376953125, -0.042083740234375, -0.0131988525390625, -0.03826904296875, -0.03277587890625, 0.017822265625, 0.00012981891632080078, -0.01383209228515625, 0.0281524658203125, -0.0606689453125, 0.014312744140625, 0.047698974609375, 0.004718780517578125, -0.00772857666015625, 0.017974853515625, -0.0018482208251953125, -0.005214691162109375, -0.0323486328125, -0.0201263427734375, 0.07275390625, 0.0226898193359375, 0.06292724609375, -0.0024013519287109375, 0.0291595458984375, -0.00667572021484375, -0.018646240234375, -0.06744384765625, 0.03509521484375, -0.0104827880859375, -0.0421142578125, -0.029510498046875, -0.020904541015625, -0.06646728515625, 0.01522064208984375, -0.0014925003051757812, -0.06683349609375, 0.0128173828125, -0.0054779052734375, -0.0296478271484375, 0.0141448974609375, -0.0777587890625, 0.051300048828125, -0.0022945404052734375, -0.0164794921875, -0.0177459716796875, -0.049530029296875, 0.0179290771484375, 0.008514404296875, 0.00879669189453125, -0.0069732666015625, 0.03399658203125, 0.097900390625, -0.0185089111328125, 0.038818359375, -0.0149078369140625, 0.01025390625, 0.051788330078125, -0.0306243896484375, 0.01168060302734375, 0.01149749755859375, -0.0216827392578125, 0.009796142578125, 0.0101776123046875, -0.0203094482421875, -0.017822265625, 0.07373046875, -0.0887451171875, -0.0200653076171875, -0.0408935546875, -0.043365478515625, -0.0229949951171875, 0.005863189697265625, 0.061248779296875, 0.058624267578125, 0.0075531005859375, 0.016815185546875, 0.04437255859375, -0.016998291015625, 0.0316162109375, 0.0114593505859375, 0.018951416015625, -0.04486083984375, 0.061248779296875, 0.01039886474609375, 0.022613525390625, 0.0064849853515625, 0.011932373046875, -0.04852294921875, -0.029510498046875, -0.023651123046875, 0.0166778564453125, -0.04608154296875, -0.0132904052734375, -0.04876708984375, -0.02947998046875, -0.06341552734375, 0.0052947998046875, -0.039215087890625, -0.0192413330078125, -0.0294647216796875, 0.005603790283203125, 0.05078125, 0.0301055908203125, -0.019256591796875, 0.034820556640625, -0.034027099609375, 0.052337646484375, 0.0183563232421875, 0.01197052001953125, -0.0016946792602539062, -0.07470703125, -0.0176239013671875, 0.0228424072265625, -0.0222625732421875, -0.05841064453125, 0.041839599609375, 0.02447509765625, 0.02294921875, 0.01444244384765625, -0.005359649658203125, 0.06573486328125, -0.0258026123046875, 0.0533447265625, 0.0159454345703125, -0.07586669921875, 0.068115234375, -0.009735107421875, 0.0273284912109375, 0.036224365234375, 0.0145416259765625, -0.040985107421875, -0.012969970703125, -0.034393310546875, -0.0767822265625, 0.08380126953125, 0.035308837890625, 0.0007843971252441406, 0.0164794921875, 0.00206756591796875, -0.01416778564453125, -0.007167816162109375, -0.044952392578125, -0.044769287109375, -0.031524658203125, -0.0302886962890625, -0.025238037109375, -0.008758544921875, -0.00125885009765625, -0.04388427734375, 0.06695556640625, 0.027374267578125, 0.0304412841796875, 0.038726806640625, -0.0014181137084960938, 0.0017766952514648438, 0.0270233154296875, 0.046661376953125, 0.0206146240234375, -0.03314208984375, -0.0051116943359375, 0.0229339599609375, -0.0784912109375, 0.02191162109375, 0.017333984375, 0.0126190185546875, 0.01468658447265625, 0.022064208984375, 0.07940673828125, 0.006404876708984375, -0.039642333984375, 0.00774383544921875, -0.012603759765625, -0.005992889404296875, -0.04425048828125, 0.016998291015625, 0.0174102783203125, 0.0008211135864257812, 0.033477783203125, 0.023712158203125, -0.0051727294921875, -0.04498291015625, 0.0101470947265625, 0.008575439453125, -0.0204010009765625, -0.0144195556640625, 0.0682373046875, 0.007129669189453125, -0.0316162109375, 0.0548095703125, -0.0010919570922851562, -0.0330810546875, 0.056427001953125, 0.0484619140625, 0.0556640625, -0.034820556640625, -0.0112457275390625, 0.049041748046875, 0.0125885009765625, -0.00772857666015625, 0.0299224853515625, 0.00771331787109375, -0.045745849609375, -0.024688720703125, -0.057647705078125, 0.00018477439880371094, 0.028411865234375, -0.051300048828125, 0.045440673828125, -0.01448822021484375, -0.004367828369140625, 0.01336669921875, 0.0283050537109375, -0.038726806640625, 0.033935546875, 0.00628662109375, 0.052764892578125, -0.062164306640625, 0.089599609375, 0.032257080078125, -0.031280517578125, -0.095703125, -0.007781982421875, -0.011932373046875, -0.06683349609375, 0.042572021484375, 0.012481689453125, -0.03094482421875, 0.0205841064453125, -0.0272674560546875, -0.06939697265625, 0.07464599609375, 0.022064208984375, -0.033233642578125, 0.0113067626953125, -0.007106781005859375, 0.040252685546875, -0.006641387939453125, 0.01904296875, 0.054443359375, 0.0296478271484375, 0.0293121337890625, -0.07135009765625, -0.007640838623046875, -0.025177001953125, -0.017578125, -0.00443267822265625, -0.047088623046875, 0.068115234375, -0.040618896484375, -0.01885986328125, 0.003993988037109375, 0.05389404296875, 0.038543701171875, 0.0265655517578125, 0.027801513671875, 0.03424072265625, 0.054840087890625, -0.00876617431640625, 0.057708740234375, 0.0013713836669921875, 0.039398193359375, 0.08026123046875, -0.007205963134765625, 0.072998046875, 0.0288848876953125, -0.029052734375, 0.0256500244140625, 0.04266357421875, -0.0191802978515625, 0.05438232421875, 0.023681640625, -0.0167694091796875, 0.0136871337890625, 0.0027828216552734375, -0.055633544921875, 0.0670166015625, 0.029144287109375, 0.00007742643356323242, 0.018890380859375, 0.00811004638671875, -0.0004818439483642578, -0.0002655982971191406, -0.0168914794921875, 0.024261474609375, 0.00811004638671875, -0.0416259765625, 0.07281494140625, 0.0059051513671875, 0.08013916015625, -0.06011962890625, 0.000021219253540039062, 0.01499176025390625, 0.025634765625, -0.0278167724609375, -0.04833984375, 0.0271453857421875, -0.00012624263763427734, -0.003570556640625, 0.007228851318359375, 0.036468505859375, -0.055419921875, -0.040985107421875, 0.0247650146484375, -0.0084686279296875, 0.0258026123046875, 0.00933074951171875, -0.045074462890625, 0.0215301513671875, 0.0261077880859375, -0.02392578125, -0.001964569091796875, 0.004665374755859375, 0.02728271484375, 0.03778076171875, 0.040191650390625, 0.039825439453125, 0.0177001953125, 0.007808685302734375, 0.03900146484375, -0.03985595703125, -0.028533935546875, -0.057342529296875, 0.024200439453125, -0.0029277801513671875, -0.03369140625, 0.05487060546875, 0.05609130859375, 0.06787109375, -0.01287078857421875, 0.056427001953125, -0.02081298828125, 0.04351806640625, -0.047454833984375, 0.0716552734375, -0.0295867919921875, 0.0135498046875, -0.015960693359375, -0.042449951171875, 0.00151824951171875, 0.062286376953125, -0.0189208984375, 0.016387939453125, 0.03973388671875, 0.07513427734375, -0.003932952880859375, -0.01050567626953125, 0.009002685546875, 0.0284576416015625, 0.01242828369140625, 0.0592041015625, 0.0276641845703125, -0.0743408203125, 0.06695556640625, -0.02313232421875, -0.007293701171875, -0.0010585784912109375, -0.014129638671875, -0.048828125, -0.06353759765625, -0.037261962890625, -0.035858154296875, -0.0202484130859375, 0.0653076171875, 0.05108642578125, -0.07708740234375, -0.0306549072265625, 0.00904083251953125, 0.0097503662109375, -0.007595062255859375, -0.020599365234375, 0.058563232421875, 0.005252838134765625, -0.0799560546875, 0.033905029296875, -0.01522064208984375, 0.003856658935546875, 0.01103973388671875, -0.00989532470703125, -0.049102783203125, -0.0003750324249267578, 0.018585205078125, 0.03131103515625, -0.057586669921875, 0.00040793418884277344, -0.013397216796875, -0.007541656494140625, 0.010955810546875, 0.02874755859375, -0.04766845703125, 0.041717529296875, 0.035797119140625, 0.0054931640625, 0.040985107421875, -0.016387939453125, 0.0229949951171875, -0.055908203125, 0.040557861328125, 0.006351470947265625, 0.0288238525390625, 0.03753662109375, -0.0246124267578125, 0.0299224853515625, 0.014801025390625, -0.04119873046875, -0.07379150390625, -0.0124053955078125, -0.0927734375, -0.0031108856201171875, 0.1025390625, 0.00913238525390625, -0.0100555419921875, -0.002292633056640625, -0.04638671875, 0.07391357421875, -0.030914306640625, 0.0423583984375, 0.03314208984375, -0.016937255859375, 0.0075836181640625, -0.04742431640625, 0.045745849609375, 0.029083251953125, -0.0311431884765625, -0.0027980804443359375, 0.00605010986328125, 0.04388427734375, 0.021728515625, 0.060211181640625, -0.005596160888671875, 0.0137939453125, 0.0238494873046875, 0.0200347900390625, -0.0125885009765625, 0.0032329559326171875, -0.04229736328125, -0.00936126708984375, -0.007282257080078125, -0.0635986328125 ] ]
nitrosocke/redshift-diffusion-768
2022-12-03T19:45:08.000Z
[ "diffusers", "stable-diffusion", "text-to-image", "en", "license:openrail++", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
nitrosocke
null
null
nitrosocke/redshift-diffusion-768
141
8,818
diffusers
2022-11-27T13:46:46
--- license: openrail++ language: - en tags: - stable-diffusion - text-to-image - diffusers thumbnail: "https://huggingface.co/nitrosocke/redshift-diffusion-768/resolve/main/images/redshift-diffusion-768-thumbnail.jpg" inference: false --- ### Future Diffusion This is the fine-tuned Stable Diffusion 2.0 model trained on high quality 3D images with a 768x768 pixel resolution. Use the tokens `redshift style` in your prompts for the effect. Trained on Stability.ai's [Stable Diffusion 2.0](https://huggingface.co/stabilityai/stable-diffusion-2) with 768x768 resolution. **If you enjoy my work and want to test new models before release, please consider supporting me** [![Become A Patreon](https://badgen.net/badge/become/a%20patron/F96854)](https://patreon.com/user?u=79196446) - **The weights are now available! You can download them here: [redshift-diffusion-768.ckpt](https://huggingface.co/nitrosocke/redshift-diffusion-768/resolve/main/redshift-diffusion-768.ckpt)** - **You can try out the model online here: [Diffusion Space Demo](https://huggingface.co/spaces/nitrosocke/Diffusion_Space)** - **or try out this model with my local Diffusers based [Gradio WebUI](https://github.com/nitrosocke/diffusers-webui)** **Characters rendered with the model:** ![Characters Samples](https://huggingface.co/nitrosocke/redshift-diffusion-768/resolve/main/images/redshift-diffusion-768-samples01s.jpg) **Cars and Animals rendered with the model:** ![Misc. Samples](https://huggingface.co/nitrosocke/redshift-diffusion-768/resolve/main/images/redshift-diffusion-768-samples02s.jpg) **Landscapes rendered with the model:** ![Landscape 1](https://huggingface.co/nitrosocke/redshift-diffusion-768/resolve/main/images/redshift-diffusion-768-samples03s.jpg) ![Landscape 2](https://huggingface.co/nitrosocke/redshift-diffusion-768/resolve/main/images/redshift-diffusion-768-samples04s.jpg) #### Prompt and settings for the Characters: **redshift style portrait black female cyberpunk hacker tattoos colorful short hair wearing a crop top redshift style Negative Prompt: mutated body double head bad anatomy long face long neck long body text watermark signature** _Steps: 20, Sampler: Euler a, CFG scale: 7, Size: 768x1024_ #### Prompt and settings for the Landscapes: **redshift style beautiful fjord at sunrise Negative Prompt: fog blurry soft** _Steps: 20, Sampler: Euler a, CFG scale: 7, Size: 1536x768_ This model was trained using the diffusers based dreambooth training by ShivamShrirao using prior-preservation loss and the _train-text-encoder_ flag in 7.500 steps. ## License This model is open access and available to all, with a CreativeML Open RAIL++-M License further specifying rights and usage. [Please read the full license here](https://huggingface.co/stabilityai/stable-diffusion-2/blob/main/LICENSE-MODEL)
2,840
[ [ -0.044921875, -0.06451416015625, 0.034820556640625, 0.023773193359375, -0.01433563232421875, -0.011993408203125, 0.0031890869140625, -0.057159423828125, 0.03826904296875, 0.044403076171875, -0.059417724609375, -0.03826904296875, -0.050628662109375, -0.00925445556640625, -0.022308349609375, 0.06689453125, 0.00821685791015625, -0.000056862831115722656, -0.0035610198974609375, 0.01091766357421875, -0.020782470703125, 0.0083465576171875, -0.06488037109375, -0.038909912109375, 0.0268096923828125, -0.0017957687377929688, 0.05548095703125, 0.01904296875, 0.0248870849609375, 0.02252197265625, -0.046478271484375, -0.02667236328125, -0.046142578125, 0.00197601318359375, 0.004871368408203125, -0.019012451171875, -0.040771484375, 0.00017273426055908203, 0.0546875, 0.02215576171875, -0.0282745361328125, 0.0032672882080078125, -0.0163726806640625, 0.057647705078125, -0.0277862548828125, -0.0086822509765625, -0.00635528564453125, 0.0140533447265625, -0.0161285400390625, 0.039459228515625, -0.006103515625, -0.0203094482421875, 0.010711669921875, -0.06256103515625, 0.0268096923828125, -0.0046539306640625, 0.07879638671875, 0.0167388916015625, -0.0230255126953125, 0.0206451416015625, -0.040008544921875, 0.05047607421875, -0.0445556640625, 0.04180908203125, 0.01369476318359375, 0.03167724609375, 0.004726409912109375, -0.058135986328125, -0.05157470703125, 0.008453369140625, 0.0017251968383789062, 0.04718017578125, -0.0265045166015625, -0.01399993896484375, 0.0118408203125, 0.0242767333984375, -0.05682373046875, -0.00351715087890625, -0.0494384765625, -0.0098419189453125, 0.03765869140625, 0.0162200927734375, 0.01351165771484375, 0.00787353515625, -0.043060302734375, -0.0281219482421875, -0.03533935546875, -0.0080108642578125, 0.0269927978515625, -0.003726959228515625, -0.055877685546875, 0.02691650390625, -0.0037136077880859375, 0.04949951171875, 0.01959228515625, 0.01311492919921875, 0.0328369140625, -0.01558685302734375, -0.0220489501953125, -0.03338623046875, 0.0643310546875, 0.051361083984375, -0.00960540771484375, 0.0182342529296875, -0.00702667236328125, -0.0092620849609375, 0.0166015625, -0.08197021484375, -0.0367431640625, 0.033721923828125, -0.049957275390625, -0.0256805419921875, -0.024688720703125, -0.0859375, -0.02978515625, 0.0196990966796875, 0.0203094482421875, -0.031219482421875, -0.0771484375, 0.0230255126953125, -0.052093505859375, 0.00986480712890625, 0.03802490234375, -0.053741455078125, 0.0046539306640625, 0.006103515625, 0.0802001953125, -0.00815582275390625, 0.0024623870849609375, 0.0278472900390625, 0.00897216796875, -0.01561737060546875, 0.059051513671875, -0.035064697265625, -0.055267333984375, -0.0146942138671875, 0.01593017578125, 0.0128326416015625, -0.031707763671875, 0.04022216796875, -0.046142578125, 0.033416748046875, -0.00417327880859375, -0.0253753662109375, -0.03485107421875, 0.019195556640625, -0.050445556640625, 0.05499267578125, 0.0284881591796875, -0.055908203125, 0.00768280029296875, -0.0909423828125, -0.0005927085876464844, 0.01361083984375, 0.024932861328125, -0.036041259765625, -0.0139312744140625, -0.026092529296875, 0.0257415771484375, 0.00722503662109375, -0.01064300537109375, -0.03662109375, -0.00730133056640625, -0.0184173583984375, -0.0195465087890625, 0.1044921875, 0.0196075439453125, -0.027374267578125, 0.0008106231689453125, -0.0556640625, -0.0070037841796875, 0.0255584716796875, -0.0158538818359375, 0.0026912689208984375, -0.023712158203125, 0.040252685546875, 0.018035888671875, 0.02008056640625, -0.03948974609375, -0.00406646728515625, -0.0207977294921875, 0.018310546875, 0.0421142578125, 0.011993408203125, 0.031280517578125, -0.0220794677734375, 0.0474853515625, 0.021759033203125, 0.0106048583984375, 0.0210723876953125, -0.06524658203125, -0.046539306640625, -0.0182952880859375, 0.0059356689453125, 0.03045654296875, -0.030181884765625, 0.0298004150390625, 0.00868988037109375, -0.064453125, -0.01351165771484375, -0.02813720703125, 0.00765228271484375, 0.0537109375, 0.032867431640625, -0.0222930908203125, -0.02044677734375, -0.06048583984375, 0.0263671875, -0.00820159912109375, -0.008758544921875, 0.005031585693359375, 0.04302978515625, -0.05621337890625, 0.0506591796875, -0.046722412109375, -0.0175018310546875, -0.001049041748046875, 0.00472259521484375, 0.0236968994140625, 0.051727294921875, 0.0587158203125, -0.0606689453125, -0.03302001953125, -0.0084686279296875, -0.048431396484375, 0.0031185150146484375, 0.017059326171875, -0.0273895263671875, 0.01457977294921875, 0.0241546630859375, -0.0677490234375, 0.0070343017578125, 0.06536865234375, -0.06640625, 0.0477294921875, -0.036285400390625, -0.0001493692398071289, -0.0802001953125, 0.01433563232421875, 0.0352783203125, -0.034576416015625, -0.05426025390625, 0.0279541015625, -0.004123687744140625, -0.0037593841552734375, -0.057708740234375, 0.0703125, -0.03436279296875, 0.035247802734375, -0.0119781494140625, 0.0007410049438476562, 0.0036220550537109375, 0.0224456787109375, 0.0283050537109375, 0.03216552734375, 0.0526123046875, -0.04046630859375, 0.003803253173828125, 0.03314208984375, -0.0233306884765625, 0.05609130859375, -0.061920166015625, -0.007366180419921875, -0.019866943359375, 0.0288543701171875, -0.07501220703125, -0.01505279541015625, 0.03515625, -0.0229339599609375, 0.0202178955078125, -0.00423431396484375, -0.031890869140625, -0.030731201171875, -0.01152801513671875, 0.024932861328125, 0.08306884765625, -0.030426025390625, 0.04296875, 0.0222930908203125, 0.01271820068359375, -0.021209716796875, -0.06793212890625, -0.01505279541015625, -0.04052734375, -0.05426025390625, 0.044158935546875, -0.0231170654296875, -0.0300750732421875, -0.004184722900390625, 0.0418701171875, -0.01181793212890625, 0.00505828857421875, 0.0296173095703125, 0.01031494140625, 0.0024662017822265625, -0.02301025390625, 0.0275726318359375, -0.0114288330078125, 0.0030231475830078125, -0.017578125, 0.034698486328125, -0.0124969482421875, -0.0006837844848632812, -0.06536865234375, 0.021087646484375, 0.0640869140625, 0.0173187255859375, 0.0625, 0.073486328125, -0.03253173828125, 0.021453857421875, -0.0223846435546875, -0.0167236328125, -0.036895751953125, 0.0241241455078125, -0.0038394927978515625, -0.0537109375, 0.054107666015625, 0.00441741943359375, 0.02947998046875, 0.065673828125, 0.044830322265625, -0.037384033203125, 0.06524658203125, 0.053985595703125, 0.01557159423828125, 0.046539306640625, -0.0601806640625, -0.0147247314453125, -0.06866455078125, -0.0394287109375, -0.0118408203125, -0.0433349609375, -0.021484375, -0.0411376953125, 0.033905029296875, 0.0215301513671875, -0.053802490234375, 0.0075836181640625, -0.032562255859375, 0.041717529296875, 0.0147247314453125, 0.019287109375, 0.006427764892578125, 0.0081939697265625, -0.00021469593048095703, -0.0024929046630859375, -0.0355224609375, -0.015716552734375, 0.039825439453125, 0.044921875, 0.031494140625, 0.029998779296875, 0.040679931640625, 0.03326416015625, 0.01142120361328125, -0.019012451171875, 0.037567138671875, -0.00597381591796875, -0.0660400390625, -0.011993408203125, -0.01369476318359375, -0.0718994140625, 0.02960205078125, -0.044769287109375, -0.03240966796875, 0.0178070068359375, 0.018585205078125, -0.030426025390625, 0.026641845703125, -0.054412841796875, 0.0628662109375, 0.006458282470703125, -0.036590576171875, -0.0296630859375, -0.06396484375, 0.033416748046875, 0.0197906494140625, 0.005123138427734375, -0.0030002593994140625, -0.00421142578125, 0.049163818359375, -0.024261474609375, 0.07177734375, -0.041473388671875, -0.00933074951171875, 0.01172637939453125, 0.01202392578125, 0.02587890625, 0.003345489501953125, -0.0091552734375, 0.045654296875, 0.002155303955078125, -0.041351318359375, -0.0114898681640625, 0.040008544921875, -0.055908203125, -0.02178955078125, -0.021240234375, -0.0166015625, 0.0194854736328125, 0.03009033203125, 0.0548095703125, -0.0010747909545898438, -0.028106689453125, -0.0125274658203125, 0.0628662109375, -0.0021038055419921875, 0.048553466796875, 0.023681640625, -0.024078369140625, -0.039825439453125, 0.04486083984375, -0.01335906982421875, 0.038665771484375, 0.0017194747924804688, 0.025146484375, -0.039825439453125, -0.04730224609375, -0.051116943359375, 0.0345458984375, -0.033203125, -0.01033782958984375, -0.057525634765625, -0.013427734375, -0.0123748779296875, -0.022552490234375, -0.0296173095703125, -0.036285400390625, -0.07061767578125, 0.0004391670227050781, 0.05291748046875, 0.060028076171875, -0.01345062255859375, 0.030975341796875, -0.03436279296875, 0.01027679443359375, -0.0009756088256835938, 0.027008056640625, 0.006435394287109375, -0.048248291015625, -0.0107269287109375, 0.01044464111328125, -0.0219268798828125, -0.06146240234375, 0.043243408203125, -0.0005064010620117188, 0.0202789306640625, 0.041961669921875, -0.0287322998046875, 0.055023193359375, -0.040618896484375, 0.09088134765625, 0.051116943359375, -0.038330078125, 0.036102294921875, -0.056396484375, 0.038421630859375, 0.046722412109375, 0.027587890625, -0.03802490234375, -0.04351806640625, -0.065673828125, -0.061431884765625, 0.03277587890625, 0.01381683349609375, 0.023193359375, 0.005512237548828125, 0.048370361328125, 0.00402069091796875, -0.00045490264892578125, -0.0775146484375, -0.033355712890625, -0.0226593017578125, 0.00498199462890625, -0.000354766845703125, -0.00980377197265625, -0.0076141357421875, -0.0255584716796875, 0.06512451171875, 0.005680084228515625, 0.043243408203125, 0.0086822509765625, 0.027862548828125, -0.021514892578125, -0.02728271484375, 0.054351806640625, 0.01375579833984375, -0.0178985595703125, -0.0185089111328125, -0.0019283294677734375, -0.03857421875, 0.00443267822265625, -0.0030059814453125, -0.03082275390625, -0.0013523101806640625, 0.0122528076171875, 0.04168701171875, -0.01910400390625, -0.0232391357421875, 0.034210205078125, -0.01457977294921875, -0.041168212890625, -0.02764892578125, 0.0287933349609375, 0.0229644775390625, 0.057647705078125, 0.0032100677490234375, 0.04107666015625, 0.0208282470703125, 0.00018298625946044922, -0.0102386474609375, 0.051116943359375, -0.035125732421875, -0.0245208740234375, 0.08331298828125, 0.0157928466796875, -0.0279998779296875, 0.054962158203125, -0.029266357421875, -0.0207977294921875, 0.04290771484375, 0.0587158203125, 0.06732177734375, -0.01302337646484375, 0.02655029296875, 0.051666259765625, -0.003993988037109375, -0.0316162109375, 0.018035888671875, 0.00606536865234375, -0.041473388671875, 0.004085540771484375, -0.046112060546875, -0.01904296875, 0.0029048919677734375, -0.024932861328125, 0.032470703125, -0.04046630859375, -0.02337646484375, -0.037384033203125, -0.006122589111328125, -0.0274810791015625, 0.021881103515625, -0.0011730194091796875, 0.08013916015625, -0.07275390625, 0.046112060546875, 0.03466796875, -0.0165557861328125, -0.02093505859375, 0.005794525146484375, -0.0014066696166992188, -0.03582763671875, 0.0305023193359375, 0.0015888214111328125, -0.01519012451171875, 0.007274627685546875, -0.042755126953125, -0.0673828125, 0.0950927734375, 0.0193939208984375, -0.04541015625, 0.005603790283203125, -0.037872314453125, 0.05218505859375, -0.01244354248046875, 0.0307159423828125, 0.030731201171875, 0.039703369140625, 0.0140533447265625, -0.0305938720703125, -0.00864410400390625, -0.0302886962890625, 0.012420654296875, 0.006458282470703125, -0.08538818359375, 0.055908203125, -0.0238037109375, -0.01215362548828125, 0.0281219482421875, 0.067138671875, 0.034759521484375, 0.0029315948486328125, 0.047607421875, 0.07293701171875, 0.0560302734375, -0.004352569580078125, 0.0814208984375, -0.0021209716796875, 0.032257080078125, 0.044677734375, 0.0081787109375, 0.06158447265625, 0.0298919677734375, -0.021209716796875, 0.06622314453125, 0.0670166015625, -0.006793975830078125, 0.059326171875, -0.003971099853515625, -0.02178955078125, -0.005474090576171875, 0.01222991943359375, -0.046142578125, -0.008392333984375, 0.018829345703125, -0.0202484130859375, -0.0118865966796875, 0.007007598876953125, -0.004467010498046875, -0.0122528076171875, -0.0073394775390625, 0.033905029296875, -0.01490020751953125, -0.0154266357421875, 0.04705810546875, -0.0003552436828613281, 0.072509765625, -0.0460205078125, -0.01727294921875, -0.0184783935546875, -0.00240325927734375, -0.0167999267578125, -0.0723876953125, 0.0262908935546875, -0.01275634765625, -0.0192413330078125, -0.041168212890625, 0.0196380615234375, -0.02801513671875, -0.05572509765625, 0.034210205078125, 0.00922393798828125, 0.02838134765625, 0.0184173583984375, -0.07916259765625, 0.006877899169921875, 0.010162353515625, -0.0233612060546875, 0.007232666015625, -0.0012140274047851562, 0.02734375, 0.032867431640625, 0.0234527587890625, 0.010040283203125, -0.00608062744140625, -0.007266998291015625, 0.036163330078125, -0.03472900390625, -0.038970947265625, -0.0635986328125, 0.06158447265625, -0.00714111328125, -0.0312347412109375, 0.043853759765625, 0.04736328125, 0.049591064453125, -0.022979736328125, 0.039276123046875, -0.01157379150390625, 0.026763916015625, -0.037750244140625, 0.08258056640625, -0.0732421875, -0.003208160400390625, -0.0200347900390625, -0.0670166015625, -0.0230865478515625, 0.07000732421875, 0.004550933837890625, 0.03448486328125, 0.0196990966796875, 0.054962158203125, -0.003177642822265625, 0.001682281494140625, 0.0167388916015625, 0.017364501953125, 0.0233306884765625, 0.027496337890625, 0.047943115234375, -0.038330078125, -0.005157470703125, -0.01428985595703125, -0.0158538818359375, -0.0085601806640625, -0.0582275390625, -0.070556640625, -0.04583740234375, -0.045440673828125, -0.051483154296875, -0.01221466064453125, 0.0288848876953125, 0.08587646484375, -0.032867431640625, -0.00766754150390625, -0.0352783203125, 0.002834320068359375, -0.00684356689453125, -0.019378662109375, 0.0010557174682617188, 0.031402587890625, -0.06549072265625, 0.0166473388671875, 0.0174713134765625, 0.070556640625, -0.035125732421875, -0.01763916015625, -0.00769805908203125, -0.032470703125, 0.034027099609375, 0.0080413818359375, -0.0455322265625, 0.00606536865234375, -0.0140838623046875, 0.0194244384765625, 0.0129241943359375, 0.0208587646484375, -0.05157470703125, 0.02056884765625, 0.0300750732421875, 0.00476837158203125, 0.06683349609375, -0.01090240478515625, 0.014801025390625, -0.031524658203125, 0.0153656005859375, 0.023681640625, 0.0248870849609375, 0.011138916015625, -0.048797607421875, 0.034210205078125, 0.0300750732421875, -0.04254150390625, -0.038543701171875, 0.0246124267578125, -0.08807373046875, -0.039398193359375, 0.08856201171875, 0.01346588134765625, -0.01910400390625, 0.0012111663818359375, -0.01514434814453125, -0.00836181640625, -0.0313720703125, 0.039093017578125, 0.041290283203125, -0.0298309326171875, -0.0260467529296875, -0.0277557373046875, 0.033355712890625, 0.018157958984375, -0.05023193359375, 0.008819580078125, 0.06805419921875, 0.0343017578125, 0.042938232421875, 0.057281494140625, -0.0275726318359375, 0.0159149169921875, 0.0014066696166992188, 0.002613067626953125, 0.01287078857421875, -0.01422119140625, -0.05499267578125, 0.0201416015625, -0.020050048828125, 0.0012960433959960938 ] ]
akreal/tiny-random-t5
2021-08-18T15:08:13.000Z
[ "transformers", "pytorch", "tf", "t5", "endpoints_compatible", "text-generation-inference", "region:us" ]
null
akreal
null
null
akreal/tiny-random-t5
0
8,806
transformers
2022-03-02T23:29:05
This is a copy of: https://huggingface.co/hf-internal-testing/tiny-random-t5 Changes: use old format for `pytorch_model.bin`.
127
[ [ -0.0099639892578125, -0.059295654296875, 0.003177642822265625, 0.034088134765625, -0.02685546875, -0.01788330078125, 0.002109527587890625, -0.01666259765625, 0.0283050537109375, 0.028076171875, -0.045135498046875, -0.0303802490234375, -0.01053619384765625, 0.017730712890625, -0.042572021484375, 0.081787109375, -0.00641632080078125, 0.004425048828125, -0.007465362548828125, -0.0281829833984375, 0.00714111328125, -0.0169677734375, -0.07891845703125, -0.031585693359375, 0.0562744140625, 0.035247802734375, 0.046173095703125, 0.0369873046875, 0.0718994140625, 0.0018587112426757812, -0.011932373046875, -0.0570068359375, -0.023773193359375, -0.0287628173828125, -0.0015659332275390625, -0.007320404052734375, -0.0511474609375, -0.011474609375, 0.06463623046875, 0.0418701171875, -0.0295562744140625, 0.0299224853515625, -0.01274871826171875, 0.0335693359375, -0.04364013671875, 0.0103759765625, -0.019439697265625, 0.026275634765625, 0.0051116943359375, 0.0104522705078125, -0.0283050537109375, -0.0258941650390625, -0.008209228515625, -0.0191802978515625, 0.04998779296875, 0.0229034423828125, 0.066162109375, 0.0249786376953125, -0.038726806640625, 0.01511383056640625, -0.0299224853515625, 0.01617431640625, -0.046478271484375, 0.019683837890625, 0.017852783203125, 0.04315185546875, -0.0299224853515625, -0.08697509765625, -0.031890869140625, -0.03131103515625, 0.0282745361328125, -0.031585693359375, -0.025115966796875, 0.00756072998046875, 0.052337646484375, 0.032928466796875, -0.03192138671875, -0.009735107421875, -0.0513916015625, -0.03338623046875, 0.0450439453125, 0.00450897216796875, 0.020599365234375, -0.01495361328125, -0.019439697265625, -0.037200927734375, -0.06646728515625, -0.014801025390625, 0.0243682861328125, 0.0131683349609375, -0.059295654296875, 0.046173095703125, -0.012847900390625, 0.04205322265625, 0.019775390625, 0.0116729736328125, 0.060028076171875, -0.0184173583984375, -0.0225830078125, 0.02252197265625, 0.053192138671875, 0.041259765625, 0.0215301513671875, 0.00250244140625, -0.00531005859375, -0.0227508544921875, 0.010955810546875, -0.09588623046875, -0.067626953125, 0.0168609619140625, -0.054107666015625, -0.060638427734375, 0.00946044921875, -0.035552978515625, -0.018768310546875, 0.020233154296875, 0.03094482421875, -0.035797119140625, -0.017974853515625, -0.00803375244140625, -0.0226287841796875, 0.01116943359375, 0.0195159912109375, -0.048583984375, 0.021514892578125, 0.0217437744140625, 0.044586181640625, 0.0108642578125, -0.031524658203125, -0.031494140625, -0.019775390625, -0.00688934326171875, 0.043060302734375, 0.01702880859375, -0.03082275390625, -0.00910186767578125, 0.025848388671875, 0.01776123046875, -0.042999267578125, 0.039642333984375, -0.0323486328125, 0.0164794921875, -0.0222015380859375, -0.01003265380859375, -0.003643035888671875, 0.005306243896484375, -0.05035400390625, 0.0899658203125, 0.033538818359375, -0.062255859375, 0.03900146484375, -0.0692138671875, -0.0161895751953125, -0.0220489501953125, 0.034515380859375, -0.06414794921875, 0.005462646484375, -0.021392822265625, -0.00025582313537597656, 0.021942138671875, -0.03326416015625, -0.0738525390625, -0.039794921875, 0.02783203125, -0.002773284912109375, 0.0740966796875, 0.0189208984375, -0.01401519775390625, 0.033111572265625, -0.061279296875, 0.00080108642578125, 0.023284912109375, 0.0084686279296875, 0.00439453125, -0.0224761962890625, 0.0282135009765625, 0.0280303955078125, 0.0132293701171875, -0.054595947265625, 0.0535888671875, 0.0005478858947753906, 0.033172607421875, 0.0399169921875, 0.0269775390625, 0.0423583984375, -0.041961669921875, 0.0262908935546875, 0.0173797607421875, 0.0284423828125, 0.0188446044921875, -0.04248046875, -0.03961181640625, -0.035186767578125, 0.034332275390625, 0.003421783447265625, -0.0199432373046875, 0.01454925537109375, 0.00860595703125, -0.038818359375, -0.0243072509765625, -0.01009368896484375, -0.0019350051879882812, 0.00920867919921875, 0.01157379150390625, -0.0338134765625, -0.05841064453125, -0.060089111328125, 0.0112762451171875, -0.0058135986328125, 0.00205230712890625, -0.0006270408630371094, 0.086669921875, -0.040283203125, 0.054595947265625, -0.03350830078125, -0.0109710693359375, -0.01311492919921875, 0.00403594970703125, 0.014678955078125, 0.04278564453125, 0.081787109375, -0.0187225341796875, -0.024932861328125, -0.033721923828125, -0.0182342529296875, -0.0243682861328125, 0.0250701904296875, -0.01428985595703125, 0.01617431640625, 0.0213623046875, -0.058685302734375, 0.0430908203125, 0.0338134765625, -0.064453125, 0.045928955078125, -0.0157623291015625, -0.00017845630645751953, -0.08551025390625, 0.0096893310546875, -0.007099151611328125, -0.03662109375, 0.0210113525390625, 0.02545166015625, 0.0384521484375, 0.0070343017578125, -0.053009033203125, 0.06573486328125, -0.02008056640625, -0.0193939208984375, -0.025665283203125, -0.0148468017578125, -0.0301055908203125, 0.0006270408630371094, -0.054168701171875, 0.05499267578125, 0.0276947021484375, -0.02191162109375, 0.0489501953125, 0.031341552734375, 0.004390716552734375, 0.0295562744140625, -0.05035400390625, 0.04046630859375, 0.0018529891967773438, 0.0312042236328125, -0.048675537109375, -0.05975341796875, 0.05853271484375, -0.0357666015625, 0.00823211669921875, -0.0169830322265625, -0.050537109375, -0.031585693359375, -0.0347900390625, 0.0328369140625, 0.08392333984375, -0.0343017578125, 0.019866943359375, 0.0038051605224609375, 0.014923095703125, -0.013275146484375, -0.043243408203125, -0.006763458251953125, -0.0204925537109375, -0.032135009765625, 0.0194854736328125, -0.0024356842041015625, -0.0309906005859375, 0.0030193328857421875, -0.009002685546875, -0.026153564453125, 0.0039215087890625, 0.00803375244140625, -0.0002949237823486328, -0.0213470458984375, -0.01007843017578125, -0.025634765625, 0.00579071044921875, 0.01508331298828125, -0.0138092041015625, 0.0266876220703125, -0.00948333740234375, -0.01442718505859375, -0.035675048828125, -0.00911712646484375, 0.035675048828125, 0.012237548828125, 0.03643798828125, 0.0765380859375, -0.04107666015625, -0.01947021484375, -0.022705078125, -0.05023193359375, -0.0290069580078125, 0.01922607421875, -0.0223846435546875, -0.042877197265625, 0.04925537109375, 0.0281982421875, -0.020721435546875, 0.056854248046875, 0.033172607421875, -0.02789306640625, 0.060028076171875, 0.06414794921875, 0.0011377334594726562, 0.0538330078125, -0.002986907958984375, -0.00043582916259765625, -0.05816650390625, 0.008209228515625, -0.03973388671875, -0.0267486572265625, -0.04608154296875, -0.00868988037109375, 0.0223846435546875, 0.01015472412109375, -0.048065185546875, 0.02508544921875, -0.030242919921875, 0.0112762451171875, 0.04119873046875, 0.0175323486328125, 0.0216827392578125, 0.023162841796875, -0.00731658935546875, -0.00390625, -0.0484619140625, -0.0124664306640625, 0.084716796875, 0.0276947021484375, 0.056884765625, 0.0164031982421875, 0.043487548828125, 0.00582122802734375, 0.047454833984375, -0.031402587890625, 0.0188140869140625, 0.005645751953125, -0.058807373046875, -0.00015103816986083984, -0.032470703125, -0.06427001953125, 0.00885772705078125, -0.005035400390625, -0.07855224609375, -0.036773681640625, 0.02935791015625, -0.019989013671875, 0.03204345703125, -0.07073974609375, 0.09539794921875, 0.0229949951171875, -0.0181427001953125, -0.004749298095703125, -0.0301971435546875, 0.0196533203125, 0.015380859375, -0.00531005859375, 0.015380859375, 0.016326904296875, 0.053985595703125, -0.02978515625, 0.0609130859375, -0.00814056396484375, 0.0111846923828125, 0.0238494873046875, 0.0200958251953125, 0.038116455078125, -0.0026149749755859375, 0.00042748451232910156, -0.008056640625, 0.03326416015625, -0.048675537109375, -0.0236358642578125, 0.036865234375, -0.05731201171875, -0.00543975830078125, -0.057708740234375, -0.02825927734375, 0.0223236083984375, 0.02008056640625, 0.0138092041015625, 0.0175323486328125, -0.0277557373046875, 0.051300048828125, 0.044189453125, 0.00891876220703125, 0.03094482421875, 0.020721435546875, -0.040374755859375, -0.0204620361328125, 0.024658203125, -0.035125732421875, 0.0158538818359375, 0.005519866943359375, 0.036285400390625, -0.011993408203125, -0.0347900390625, -0.0255889892578125, 0.01253509521484375, -0.03662109375, 0.0026607513427734375, -0.035186767578125, 0.0008292198181152344, -0.041351318359375, 0.0012750625610351562, -0.025390625, -0.03179931640625, -0.0214385986328125, 0.0009074211120605469, 0.060394287109375, 0.045928955078125, -0.027191162109375, 0.041259765625, -0.05718994140625, 0.012176513671875, -0.016876220703125, 0.032470703125, -0.02093505859375, -0.0548095703125, -0.0064544677734375, -0.00179290771484375, -0.03546142578125, -0.058197021484375, 0.0128326416015625, -0.011566162109375, 0.01361846923828125, 0.0007805824279785156, -0.01605224609375, 0.0161895751953125, -0.00856781005859375, 0.03594970703125, 0.0195770263671875, -0.037445068359375, 0.014068603515625, -0.049224853515625, 0.0489501953125, 0.037078857421875, -0.01074981689453125, -0.03082275390625, 0.00443267822265625, -0.09747314453125, -0.043701171875, 0.05426025390625, 0.03472900390625, -0.008209228515625, 0.0389404296875, 0.02984619140625, -0.007640838623046875, 0.0269775390625, -0.03839111328125, -0.0196990966796875, -0.004016876220703125, -0.048980712890625, -0.003509521484375, -0.02264404296875, -0.01544952392578125, -0.037200927734375, 0.030181884765625, -0.0137176513671875, 0.048858642578125, 0.0118865966796875, 0.0033016204833984375, -0.038116455078125, -0.0067291259765625, 0.04498291015625, 0.03302001953125, -0.0706787109375, 0.003856658935546875, 0.031524658203125, -0.064697265625, -0.00525665283203125, 0.01358795166015625, -0.0102691650390625, 0.006244659423828125, 0.0303802490234375, 0.055023193359375, 0.034271240234375, 0.003589630126953125, 0.025634765625, -0.01403045654296875, -0.0165863037109375, -0.052276611328125, 0.0200042724609375, -0.0015382766723632812, -0.0036830902099609375, 0.004283905029296875, 0.032470703125, -0.00479888916015625, -0.042266845703125, 0.041107177734375, -0.0047607421875, -0.0285797119140625, -0.039031982421875, 0.04156494140625, 0.00789642333984375, -0.032135009765625, 0.052001953125, -0.056427001953125, -0.0228729248046875, 0.05731201171875, 0.0296478271484375, 0.0732421875, 0.00579833984375, -0.0019741058349609375, 0.037567138671875, 0.02423095703125, -0.0085906982421875, 0.04541015625, -0.01519012451171875, -0.03338623046875, -0.0038852691650390625, -0.03106689453125, -0.03704833984375, -0.0098876953125, -0.0283966064453125, 0.0311279296875, -0.05584716796875, -0.00836944580078125, -0.00911712646484375, 0.007396697998046875, -0.052215576171875, 0.0233154296875, -0.00997161865234375, 0.08544921875, -0.027984619140625, 0.07080078125, 0.07440185546875, -0.036956787109375, -0.06805419921875, -0.0031490325927734375, 0.01157379150390625, -0.0374755859375, 0.03228759765625, -0.0178375244140625, 0.026031494140625, 0.005401611328125, -0.06768798828125, -0.0667724609375, 0.07080078125, 0.018310546875, -0.020904541015625, -0.00909423828125, -0.0189666748046875, 0.029205322265625, -0.0225982666015625, 0.043304443359375, 0.0276336669921875, 0.016845703125, 0.018463134765625, -0.074462890625, 0.01506805419921875, -0.0164947509765625, -0.002410888671875, 0.006938934326171875, -0.055877685546875, 0.07232666015625, -0.025054931640625, 0.025848388671875, 0.0208587646484375, 0.042236328125, 0.048675537109375, 0.01336669921875, 0.028594970703125, 0.04156494140625, 0.037933349609375, -0.02789306640625, 0.0946044921875, -0.00005930662155151367, 0.06982421875, 0.076416015625, -0.0290069580078125, 0.049530029296875, 0.0596923828125, -0.00548553466796875, 0.0355224609375, 0.07305908203125, -0.031280517578125, 0.031951904296875, 0.0129241943359375, -0.001613616943359375, 0.0157623291015625, 0.01464080810546875, -0.041351318359375, 0.023773193359375, -0.0119171142578125, -0.040130615234375, -0.0270843505859375, -0.0168914794921875, 0.02117919921875, -0.05828857421875, -0.01512908935546875, 0.0221710205078125, 0.0196990966796875, -0.0135040283203125, 0.0181884765625, 0.01384735107421875, 0.047576904296875, -0.0263519287109375, 0.0010290145874023438, 0.00370025634765625, 0.05535888671875, -0.006137847900390625, -0.042877197265625, 0.040313720703125, -0.017822265625, -0.0028533935546875, -0.023162841796875, 0.07257080078125, -0.01009368896484375, -0.028839111328125, 0.00804901123046875, 0.01262664794921875, 0.01262664794921875, -0.0189361572265625, -0.05804443359375, 0.0295867919921875, 0.0034122467041015625, -0.026458740234375, 0.0219573974609375, 0.01071929931640625, -0.00351715087890625, 0.041839599609375, 0.0301666259765625, -0.00827789306640625, -0.0032291412353515625, 0.02655029296875, 0.0159149169921875, -0.06982421875, -0.0543212890625, -0.04437255859375, 0.0352783203125, -0.01702880859375, -0.07025146484375, 0.042724609375, 0.0325927734375, 0.07354736328125, -0.035125732421875, 0.031524658203125, -0.0308074951171875, -0.002227783203125, -0.0220489501953125, 0.0628662109375, -0.035797119140625, -0.04833984375, -0.0222320556640625, -0.064453125, -0.0152740478515625, 0.061981201171875, 0.006267547607421875, 0.0340576171875, 0.09735107421875, 0.0574951171875, -0.03131103515625, 0.01139068603515625, -0.00010985136032104492, 0.05035400390625, 0.003574371337890625, 0.02423095703125, 0.0430908203125, -0.060882568359375, 0.0284271240234375, -0.05816650390625, -0.006618499755859375, -0.02813720703125, -0.07293701171875, -0.07568359375, -0.03277587890625, -0.050872802734375, -0.06097412109375, -0.00548553466796875, 0.0997314453125, 0.05731201171875, -0.052398681640625, -0.0079345703125, -0.0110931396484375, -0.004932403564453125, -0.00627899169921875, -0.0184173583984375, 0.024658203125, 0.020843505859375, -0.032562255859375, -0.020904541015625, -0.017364501953125, 0.03656005859375, -0.0037631988525390625, 0.0245513916015625, 0.00714874267578125, -0.03326416015625, 0.0204010009765625, 0.01556396484375, -0.0207672119140625, -0.06365966796875, -0.005115509033203125, -0.0311431884765625, 0.007076263427734375, 0.0309906005859375, -0.0180206298828125, -0.007366180419921875, 0.039154052734375, 0.0044708251953125, 0.04425048828125, -0.02581787109375, 0.033233642578125, -0.0428466796875, 0.040435791015625, 0.00713348388671875, 0.05438232421875, 0.0325927734375, -0.0167236328125, 0.0267333984375, 0.051300048828125, -0.0128326416015625, -0.0633544921875, -0.002109527587890625, -0.0936279296875, 0.0113677978515625, 0.09600830078125, 0.0299835205078125, -0.06341552734375, 0.030181884765625, -0.049591064453125, 0.04638671875, -0.049774169921875, 0.07574462890625, 0.06048583984375, 0.01727294921875, -0.039642333984375, -0.03375244140625, 0.0390625, 0.017181396484375, -0.038787841796875, -0.0362548828125, 0.0128326416015625, 0.05999755859375, -0.01153564453125, 0.0438232421875, -0.0214385986328125, 0.041656494140625, 0.021087646484375, 0.043365478515625, -0.01383209228515625, -0.00820159912109375, -0.038787841796875, 0.0148468017578125, 0.00661468505859375, -0.0254974365234375 ] ]
sbcBI/sentiment_analysis
2022-04-22T06:42:07.000Z
[ "transformers", "pytorch", "distilbert", "text-classification", "exbert", "en", "dataset:Confidential", "arxiv:1810.04805", "license:apache-2.0", "endpoints_compatible", "has_space", "region:us" ]
text-classification
sbcBI
null
null
sbcBI/sentiment_analysis
5
8,801
transformers
2022-04-22T06:31:09
--- language: en tags: - exbert license: apache-2.0 datasets: - Confidential --- # BERT base model (uncased) Pretrained model on English language using a masked language modeling (MLM) objective. It was introduced in [this paper](https://arxiv.org/abs/1810.04805) and first released in [this repository](https://github.com/google-research/bert). This model is uncased: it does not make a difference between english and English. ## Model description BERT is a transformers model pretrained on a large corpus of English data in a self-supervised fashion. This means it was pretrained on the raw texts only, with no humans labelling them in any way (which is why it can use lots of publicly available data) with an automatic process to generate inputs and labels from those texts. More precisely, it was pretrained with two objectives: - Masked language modeling (MLM): taking a sentence, the model randomly masks 15% of the words in the input then run the entire masked sentence through the model and has to predict the masked words. This is different from traditional recurrent neural networks (RNNs) that usually see the words one after the other, or from autoregressive models like GPT which internally mask the future tokens. It allows the model to learn a bidirectional representation of the sentence. - Next sentence prediction (NSP): the models concatenates two masked sentences as inputs during pretraining. Sometimes they correspond to sentences that were next to each other in the original text, sometimes not. The model then has to predict if the two sentences were following each other or not. This way, the model learns an inner representation of the English language that can then be used to extract features useful for downstream tasks: if you have a dataset of labeled sentences for instance, you can train a standard classifier using the features produced by the BERT model as inputs. ## Model description [sbcBI/sentiment_analysis] This is a fine-tuned downstream version of the bert-base-uncased model for sentiment analysis, this model is not intended for further downstream fine-tuning for any other tasks. This model is trained on a classified dataset for text-classification.
2,215
[ [ -0.0298614501953125, -0.04779052734375, 0.01396942138671875, 0.02398681640625, -0.025299072265625, 0.004131317138671875, -0.024505615234375, -0.036651611328125, 0.0167083740234375, 0.05706787109375, -0.0579833984375, -0.02490234375, -0.04669189453125, -0.0017194747924804688, -0.023712158203125, 0.099609375, 0.01110076904296875, 0.01328277587890625, -0.0123748779296875, 0.023468017578125, -0.032012939453125, -0.069580078125, -0.027130126953125, -0.034759521484375, 0.02398681640625, 0.0020847320556640625, 0.037261962890625, 0.0168304443359375, 0.03704833984375, 0.0244293212890625, -0.00432586669921875, -0.00885009765625, -0.028564453125, -0.005260467529296875, 0.0025806427001953125, -0.0271453857421875, -0.0157470703125, 0.02166748046875, 0.042327880859375, 0.061309814453125, 0.01102447509765625, -0.00017189979553222656, 0.020111083984375, 0.049713134765625, -0.04217529296875, 0.01806640625, -0.052978515625, 0.009796142578125, -0.0059661865234375, 0.0016222000122070312, -0.037261962890625, -0.016326904296875, 0.036590576171875, 0.0018033981323242188, 0.0201263427734375, -0.00788116455078125, 0.0819091796875, 0.0035762786865234375, -0.0245208740234375, -0.0278167724609375, -0.044189453125, 0.0689697265625, -0.06414794921875, 0.0272979736328125, 0.0232696533203125, 0.004581451416015625, -0.007282257080078125, -0.06585693359375, -0.03436279296875, -0.0235443115234375, -0.0272064208984375, 0.0169219970703125, -0.0142364501953125, 0.0203094482421875, 0.002666473388671875, 0.0146026611328125, -0.042816162109375, 0.00417327880859375, -0.0361328125, -0.01361083984375, 0.044586181640625, -0.0234527587890625, 0.00859832763671875, -0.0247650146484375, -0.050567626953125, 0.009063720703125, -0.046539306640625, 0.016998291015625, 0.0291290283203125, 0.0360107421875, -0.004039764404296875, 0.042205810546875, 0.00800323486328125, 0.0467529296875, 0.01076507568359375, 0.002643585205078125, 0.031829833984375, -0.00026869773864746094, -0.0311431884765625, 0.01114654541015625, 0.0516357421875, 0.00799560546875, 0.04498291015625, -0.021484375, -0.0307159423828125, -0.0016222000122070312, 0.0307464599609375, -0.052978515625, -0.01338958740234375, 0.00868988037109375, -0.047576904296875, -0.03118896484375, 0.0093994140625, -0.019744873046875, 0.01439666748046875, -0.017242431640625, 0.054718017578125, -0.025665283203125, -0.0038166046142578125, 0.0006909370422363281, 0.003559112548828125, 0.018402099609375, -0.00531768798828125, -0.0692138671875, 0.026947021484375, 0.046234130859375, 0.046539306640625, -0.020721435546875, -0.0138702392578125, 0.005767822265625, -0.00629425048828125, -0.0159912109375, 0.041900634765625, -0.0296173095703125, -0.0110626220703125, 0.015777587890625, 0.0231475830078125, -0.009490966796875, -0.01085662841796875, 0.033935546875, -0.045074462890625, 0.035247802734375, -0.0076751708984375, -0.050048828125, -0.030792236328125, 0.01006317138671875, -0.034271240234375, 0.084716796875, -0.0093231201171875, -0.05548095703125, 0.022491455078125, -0.048309326171875, -0.0411376953125, 0.0021457672119140625, 0.01187896728515625, -0.028106689453125, 0.01418304443359375, 0.0160369873046875, 0.050262451171875, 0.00861358642578125, 0.03753662109375, -0.0023059844970703125, -0.02838134765625, 0.0217437744140625, -0.03680419921875, 0.0745849609375, 0.0054473876953125, -0.034942626953125, 0.00014960765838623047, -0.0523681640625, 0.01617431640625, 0.000789642333984375, -0.020721435546875, -0.02337646484375, 0.010498046875, 0.00853729248046875, 0.0223541259765625, 0.034088134765625, -0.043914794921875, -0.002872467041015625, -0.046722412109375, 0.027008056640625, 0.06683349609375, -0.0088043212890625, 0.026885986328125, -0.01242828369140625, 0.02471923828125, -0.0089263916015625, 0.00836944580078125, -0.0197296142578125, -0.053863525390625, -0.0679931640625, -0.019317626953125, 0.0494384765625, 0.053253173828125, -0.026885986328125, 0.06939697265625, 0.004978179931640625, -0.02117919921875, -0.05499267578125, 0.005786895751953125, 0.03656005859375, 0.032867431640625, 0.0250701904296875, -0.03570556640625, -0.0645751953125, -0.0877685546875, 0.00876617431640625, -0.00414276123046875, -0.0017175674438476562, -0.00074005126953125, 0.044219970703125, -0.0303802490234375, 0.07366943359375, -0.021942138671875, -0.02642822265625, -0.04144287109375, 0.0290374755859375, 0.018280029296875, 0.046417236328125, 0.038330078125, -0.046295166015625, -0.04315185546875, -0.0196990966796875, -0.04498291015625, -0.01361846923828125, -0.005779266357421875, -0.008697509765625, 0.0287628173828125, 0.0307769775390625, -0.0419921875, 0.039886474609375, 0.05474853515625, 0.0026988983154296875, 0.032257080078125, -0.017425537109375, -0.019256591796875, -0.0875244140625, -0.006195068359375, -0.016326904296875, -0.01568603515625, -0.054290771484375, -0.008087158203125, 0.0016498565673828125, -0.00931549072265625, -0.03314208984375, 0.024932861328125, -0.046173095703125, -0.00896453857421875, -0.0291595458984375, -0.01230621337890625, -0.0013113021850585938, 0.061798095703125, 0.031829833984375, 0.04742431640625, 0.046173095703125, -0.038116455078125, 0.0103912353515625, 0.01189422607421875, -0.036712646484375, -0.0035305023193359375, -0.060150146484375, 0.01358795166015625, -0.01438140869140625, 0.0094757080078125, -0.08782958984375, 0.0164031982421875, -0.00528717041015625, -0.048797607421875, 0.037353515625, 0.013885498046875, -0.02581787109375, -0.035614013671875, -0.016693115234375, 0.00439453125, 0.058868408203125, -0.030548095703125, 0.038787841796875, 0.0309600830078125, -0.021240234375, -0.0633544921875, -0.071533203125, -0.004428863525390625, 0.01374053955078125, -0.041290283203125, 0.0287933349609375, -0.009979248046875, -0.0030536651611328125, 0.010589599609375, 0.0016794204711914062, -0.019866943359375, 0.0012292861938476562, 0.024688720703125, 0.0279998779296875, -0.022125244140625, 0.01250457763671875, 0.0058441162109375, -0.0095672607421875, 0.01360321044921875, -0.011932373046875, 0.04315185546875, -0.01116180419921875, -0.0102691650390625, -0.026580810546875, 0.0421142578125, 0.0088348388671875, -0.0121917724609375, 0.06787109375, 0.04974365234375, -0.033905029296875, -0.0219573974609375, -0.051025390625, -0.022552490234375, -0.03631591796875, 0.031768798828125, -0.026611328125, -0.06884765625, 0.031402587890625, 0.02301025390625, 0.007503509521484375, 0.036285400390625, 0.05377197265625, -0.04107666015625, 0.0733642578125, 0.07666015625, -0.01259613037109375, 0.0267486572265625, -0.004810333251953125, 0.029571533203125, -0.04669189453125, -0.0238189697265625, -0.0295562744140625, -0.02642822265625, -0.044281005859375, -0.0081939697265625, -0.0021038055419921875, 0.01230621337890625, -0.006320953369140625, 0.034820556640625, -0.0455322265625, 0.033843994140625, 0.07025146484375, 0.006504058837890625, -0.00017154216766357422, 0.00910186767578125, -0.0227203369140625, -0.023590087890625, -0.040924072265625, -0.040008544921875, 0.0892333984375, 0.038055419921875, 0.050323486328125, -0.01256561279296875, 0.051910400390625, 0.03704833984375, 0.0237884521484375, -0.056488037109375, 0.04669189453125, -0.021697998046875, -0.07098388671875, -0.005741119384765625, -0.005268096923828125, -0.0775146484375, 0.01483154296875, -0.029327392578125, -0.051116943359375, 0.01012420654296875, -0.0021762847900390625, -0.004413604736328125, 0.0211029052734375, -0.06988525390625, 0.04962158203125, -0.0333251953125, 0.006954193115234375, 0.01201629638671875, -0.0687255859375, 0.0196380615234375, -0.014129638671875, -0.00589752197265625, 0.0090484619140625, 0.0205230712890625, 0.065673828125, -0.0289154052734375, 0.08575439453125, -0.0093994140625, 0.0085601806640625, 0.0155792236328125, -0.0170745849609375, 0.02618408203125, -0.029327392578125, 0.016571044921875, 0.0323486328125, -0.015106201171875, -0.026275634765625, -0.030731201171875, 0.031463623046875, -0.0675048828125, -0.043365478515625, -0.043731689453125, -0.036834716796875, -0.0225372314453125, 0.024169921875, 0.035552978515625, 0.0170135498046875, -0.0199432373046875, 0.030792236328125, 0.05816650390625, -0.0254669189453125, 0.043914794921875, 0.041900634765625, -0.0096435546875, -0.01071929931640625, 0.0367431640625, 0.004032135009765625, 0.025299072265625, 0.0498046875, 0.002410888671875, -0.03167724609375, -0.04510498046875, 0.004848480224609375, 0.0199432373046875, -0.0467529296875, -0.00211334228515625, -0.06512451171875, -0.056640625, -0.053863525390625, -0.0218505859375, -0.024810791015625, -0.0248565673828125, -0.0157928466796875, -0.020965576171875, 0.00945281982421875, 0.054473876953125, -0.0213165283203125, 0.030364990234375, -0.04693603515625, 0.0204010009765625, 0.035491943359375, 0.01271820068359375, -0.01515960693359375, -0.042877197265625, -0.0189056396484375, 0.016082763671875, -0.0140533447265625, -0.065185546875, 0.040313720703125, 0.00435638427734375, 0.053497314453125, 0.035430908203125, -0.00495147705078125, 0.04022216796875, -0.05413818359375, 0.053680419921875, 0.016082763671875, -0.0743408203125, 0.044525146484375, -0.0009684562683105469, 0.0111083984375, 0.036834716796875, 0.03765869140625, -0.0206146240234375, -0.03289794921875, -0.04180908203125, -0.06927490234375, 0.048797607421875, 0.027008056640625, 0.033935546875, -0.006252288818359375, 0.00821685791015625, 0.01488494873046875, 0.040740966796875, -0.10040283203125, -0.0316162109375, -0.0235443115234375, -0.0122833251953125, -0.021453857421875, -0.048583984375, 0.008544921875, -0.0296478271484375, 0.0614013671875, 0.0231170654296875, 0.049346923828125, 0.0009832382202148438, -0.0211029052734375, 0.0120697021484375, 0.019683837890625, 0.04058837890625, 0.0262451171875, -0.053924560546875, -0.0038051605224609375, -0.002681732177734375, -0.051025390625, -0.01104736328125, 0.01385498046875, -0.0203857421875, 0.0289154052734375, 0.03546142578125, 0.07421875, 0.0171051025390625, -0.044921875, 0.04632568359375, 0.0197601318359375, -0.027099609375, -0.040069580078125, -0.01326751708984375, -0.0024566650390625, 0.01032257080078125, 0.0391845703125, 0.0006375312805175781, -0.002750396728515625, -0.05230712890625, 0.03045654296875, 0.04412841796875, -0.04241943359375, -0.01434326171875, 0.03680419921875, 0.018157958984375, -0.022216796875, 0.045806884765625, -0.0025634765625, -0.057037353515625, 0.04541015625, 0.057525634765625, 0.08148193359375, -0.0164947509765625, 0.0264129638671875, 0.00836944580078125, 0.059539794921875, 0.009124755859375, 0.01131439208984375, 0.0016727447509765625, -0.0748291015625, -0.038787841796875, -0.05279541015625, -0.0209197998046875, 0.03314208984375, -0.05706787109375, 0.004375457763671875, -0.02801513671875, -0.007843017578125, 0.0034465789794921875, 0.004100799560546875, -0.044586181640625, 0.042816162109375, 0.01465606689453125, 0.05804443359375, -0.07012939453125, 0.0762939453125, 0.03839111328125, -0.027587890625, -0.05792236328125, -0.00574493408203125, -0.022125244140625, -0.07879638671875, 0.0689697265625, 0.023406982421875, 0.00936126708984375, -0.0033969879150390625, -0.0484619140625, -0.057098388671875, 0.05224609375, 0.00905609130859375, -0.0307464599609375, -0.0186004638671875, 0.0167083740234375, 0.04693603515625, -0.0523681640625, 0.0193328857421875, 0.0159912109375, 0.0211029052734375, 0.00904083251953125, -0.0579833984375, -0.01947021484375, -0.025634765625, 0.0132598876953125, 0.003543853759765625, -0.01788330078125, 0.084716796875, -0.002811431884765625, -0.0027313232421875, 0.0009326934814453125, 0.042144775390625, -0.004009246826171875, -0.005565643310546875, 0.029022216796875, 0.04010009765625, 0.034820556640625, -0.0176544189453125, 0.07000732421875, -0.0181884765625, 0.0311737060546875, 0.087158203125, -0.018707275390625, 0.0740966796875, 0.0247344970703125, -0.00617218017578125, 0.054473876953125, 0.05548095703125, -0.01152801513671875, 0.07122802734375, 0.006877899169921875, -0.00333404541015625, -0.006072998046875, 0.0110931396484375, -0.0438232421875, 0.0333251953125, 0.01508331298828125, -0.053558349609375, -0.01361846923828125, 0.014801025390625, 0.004253387451171875, -0.00830078125, -0.029998779296875, 0.05377197265625, -0.002338409423828125, -0.044586181640625, 0.0311737060546875, 0.00785064697265625, 0.07037353515625, -0.07025146484375, 0.0136871337890625, -0.00923919677734375, 0.007221221923828125, -0.00531005859375, -0.048309326171875, 0.024658203125, 0.00196075439453125, -0.0304412841796875, -0.035888671875, 0.069091796875, -0.04376220703125, -0.055633544921875, 0.019866943359375, 0.02984619140625, 0.0233154296875, -0.00395965576171875, -0.072265625, -0.01715087890625, 0.007678985595703125, -0.031524658203125, 0.0203399658203125, 0.021881103515625, 0.0096282958984375, 0.04827880859375, 0.050537109375, 0.007564544677734375, 0.011749267578125, 0.005481719970703125, 0.0528564453125, -0.048065185546875, -0.04437255859375, -0.05108642578125, 0.049346923828125, 0.0002849102020263672, -0.005886077880859375, 0.05694580078125, 0.034210205078125, 0.0609130859375, -0.028350830078125, 0.060760498046875, 0.0018939971923828125, 0.049713134765625, -0.043548583984375, 0.053497314453125, -0.03314208984375, -0.00035834312438964844, -0.0294342041015625, -0.07623291015625, -0.0114898681640625, 0.07415771484375, -0.01116180419921875, 0.0200042724609375, 0.06500244140625, 0.05108642578125, 0.00804901123046875, -0.007904052734375, 0.0293426513671875, 0.0230560302734375, -0.0146484375, 0.0307769775390625, 0.039459228515625, -0.024078369140625, 0.03131103515625, -0.027496337890625, -0.01102447509765625, -0.00403594970703125, -0.06549072265625, -0.10247802734375, -0.048126220703125, -0.018707275390625, -0.025543212890625, 0.01537322998046875, 0.05426025390625, 0.06695556640625, -0.08221435546875, -0.0167236328125, -0.01019287109375, -0.004215240478515625, -0.00916290283203125, -0.021392822265625, 0.024932861328125, -0.0360107421875, -0.056732177734375, 0.01357269287109375, 0.0182037353515625, 0.0129852294921875, -0.03668212890625, 0.013214111328125, -0.040863037109375, 0.025787353515625, 0.054412841796875, 0.0060577392578125, -0.0489501953125, -0.017181396484375, -0.00014483928680419922, -0.01488494873046875, -0.000553131103515625, 0.050537109375, -0.057098388671875, 0.033355712890625, 0.019927978515625, 0.052642822265625, 0.039794921875, -0.015594482421875, 0.047576904296875, -0.084716796875, 0.0302276611328125, 0.0260009765625, 0.023712158203125, 0.01268768310546875, -0.02960205078125, 0.025299072265625, 0.01654052734375, -0.0301361083984375, -0.06256103515625, 0.01387786865234375, -0.07781982421875, -0.033660888671875, 0.07421875, -0.019317626953125, -0.0100860595703125, -0.008941650390625, -0.01036834716796875, 0.00795745849609375, -0.015655517578125, 0.058074951171875, 0.0728759765625, 0.01617431640625, -0.0250091552734375, -0.03192138671875, 0.0369873046875, 0.035247802734375, -0.0389404296875, -0.03582763671875, 0.0130615234375, 0.0165252685546875, 0.00992584228515625, 0.04998779296875, 0.00440216064453125, -0.0032215118408203125, -0.0234527587890625, 0.037017822265625, 0.002948760986328125, -0.02276611328125, -0.01035308837890625, 0.01512908935546875, -0.0298614501953125, -0.041595458984375 ] ]
Riiid/sheep-duck-llama-2-70b-v1.1
2023-10-13T00:59:15.000Z
[ "transformers", "pytorch", "llama", "text-generation", "Riiid", "llama-2", "sheep-duck-llama-2", "en", "arxiv:2306.02707", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Riiid
null
null
Riiid/sheep-duck-llama-2-70b-v1.1
14
8,798
transformers
2023-09-27T17:00:27
--- thumbnail: >- https://cdn-uploads.huggingface.co/production/uploads/62fb1ef7e8c9c532aa7d19e4/NswB5XPkkOljeRh1xbMmR.png pipeline_tag: text-generation license: llama2 language: - en library_name: transformers tags: - Riiid - llama-2 - sheep-duck-llama-2 --- # sheep-duck-llama-2 <img src = "https://cdn-uploads.huggingface.co/production/uploads/62fb1ef7e8c9c532aa7d19e4/NswB5XPkkOljeRh1xbMmR.png" width="30%" height="30%"> This is a version 1.1 of Riiid/sheep-duck-llama-2. ## Model Details * **Developed by**: [Riiid](https://riiid.com/) * **Backbone Model**: [Riiid/sheep-duck-llama-2](https://huggingface.co/Riiid/sheep-duck-llama-2) * **Library**: [HuggingFace Transformers](https://github.com/huggingface/transformers) ## Dataset Details ### Used Datasets - Orca-style dataset - Alpaca-style dataset ### Prompt Template ``` ### System: {System} ### User: {User} ### Assistant: {Assistant} ``` ## Evaluation | Metric | Value | |-----------------------|-------| | ARC (25-shot) | 73.04 | | HellaSwag (10-shot) | 87.81 | | MMLU (5-shot) | 70.84 | | TruthfulQA (0-shot) | 64.58 | | Avg. | 74.07 | ## Limitations & Biases: Llama2 and fine-tuned variants are a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Llama 2 and any fine-tuned varient's potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 2 variants, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available at https://ai.meta.com/llama/responsible-use-guide/ ## License Disclaimer: This model is bound by the license & usage restrictions of the original Llama-2 model. And comes with no warranty or gurantees of any kind. ## Contact Us - [Riiid](https://riiid.com/) ## Citation: Please kindly cite using the following BibTeX: ```bibtex @article{platypus2023, title={Platypus: Quick, Cheap, and Powerful Refinement of LLMs}, author={Ariel N. Lee and Cole J. Hunter and Nataniel Ruiz}, booktitle={arXiv preprint arxiv:2308.07317}, year={2023} } ``` ``` @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ``` @misc{Orca-best, title = {Orca-best: A filtered version of orca gpt4 dataset.}, author = {Shahul Es}, year = {2023}, publisher = {HuggingFace}, journal = {HuggingFace repository}, howpublished = {\url{https://huggingface.co/datasets/shahules786/orca-best/}, } ``` ``` @software{touvron2023llama2, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushkar Mishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing Ellen Tan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu , Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom}, year={2023} } ```
4,188
[ [ -0.033599853515625, -0.04888916015625, 0.0094451904296875, 0.01995849609375, -0.024810791015625, 0.00920867919921875, 0.005825042724609375, -0.048431396484375, 0.01424407958984375, 0.0236968994140625, -0.0557861328125, -0.040374755859375, -0.04736328125, -0.00860595703125, -0.0150146484375, 0.0767822265625, -0.000652313232421875, -0.032623291015625, -0.004100799560546875, -0.019378662109375, -0.033111572265625, -0.01873779296875, -0.054473876953125, -0.0287628173828125, 0.0233917236328125, 0.0251312255859375, 0.061126708984375, 0.0474853515625, 0.0364990234375, 0.022705078125, -0.02886962890625, 0.0191650390625, -0.036224365234375, -0.015655517578125, 0.006664276123046875, -0.03875732421875, -0.0745849609375, 0.0042572021484375, 0.0293731689453125, 0.01605224609375, -0.01139068603515625, 0.0321044921875, 0.019500732421875, 0.04150390625, -0.0214385986328125, 0.0259552001953125, -0.028961181640625, 0.005035400390625, -0.0253448486328125, -0.01224517822265625, -0.0023555755615234375, -0.0194244384765625, 0.0038318634033203125, -0.0560302734375, 0.0031452178955078125, -0.00594329833984375, 0.0966796875, 0.02679443359375, -0.03155517578125, -0.0021572113037109375, -0.0294189453125, 0.063720703125, -0.0675048828125, 0.023651123046875, 0.0190887451171875, 0.0218505859375, -0.03521728515625, -0.060333251953125, -0.050506591796875, 0.0008444786071777344, -0.01446533203125, 0.01934814453125, -0.02197265625, -0.0136260986328125, 0.0135040283203125, 0.026153564453125, -0.04144287109375, 0.0202484130859375, -0.03753662109375, -0.018951416015625, 0.05596923828125, 0.009521484375, 0.01265716552734375, -0.0092315673828125, -0.0484619140625, -0.024627685546875, -0.0596923828125, 0.034698486328125, 0.0364990234375, 0.01097869873046875, -0.05657958984375, 0.044647216796875, -0.00849151611328125, 0.02777099609375, 0.0059814453125, -0.038909912109375, 0.0498046875, -0.0369873046875, -0.013580322265625, -0.027252197265625, 0.059906005859375, 0.039764404296875, 0.00959014892578125, 0.025146484375, -0.00930023193359375, -0.00041985511779785156, -0.007015228271484375, -0.05596923828125, -0.006664276123046875, 0.027313232421875, -0.03338623046875, -0.0208892822265625, -0.008453369140625, -0.07159423828125, -0.0189361572265625, -0.0155029296875, 0.018585205078125, -0.01158905029296875, -0.046142578125, 0.0205230712890625, 0.010528564453125, 0.050140380859375, 0.0094451904296875, -0.0528564453125, 0.02471923828125, 0.04150390625, 0.05950927734375, -0.01165771484375, -0.014190673828125, -0.00867462158203125, 0.003940582275390625, -0.01470184326171875, 0.061798095703125, -0.0188751220703125, -0.033599853515625, -0.0191192626953125, 0.0055389404296875, 0.004512786865234375, -0.036468505859375, 0.047576904296875, -0.0244293212890625, 0.017608642578125, -0.019500732421875, -0.0179901123046875, -0.031707763671875, 0.01364898681640625, -0.034881591796875, 0.08953857421875, 0.012786865234375, -0.054962158203125, 0.021514892578125, -0.046844482421875, -0.01265716552734375, -0.0215606689453125, -0.0081787109375, -0.0633544921875, -0.0277099609375, 0.023773193359375, 0.0287017822265625, -0.0281219482421875, 0.01477813720703125, -0.032318115234375, -0.0174407958984375, 0.004444122314453125, -0.00881195068359375, 0.07110595703125, 0.017913818359375, -0.0460205078125, 0.013458251953125, -0.05078125, -0.01092529296875, 0.03790283203125, -0.0184478759765625, 0.0097503662109375, 0.002780914306640625, -0.019287109375, 0.0216064453125, 0.02587890625, -0.031646728515625, 0.0130157470703125, -0.02239990234375, 0.0396728515625, 0.053466796875, -0.0012464523315429688, 0.0211029052734375, -0.044525146484375, 0.04132080078125, 0.005420684814453125, 0.044342041015625, 0.005428314208984375, -0.06219482421875, -0.06451416015625, -0.0325927734375, -0.002410888671875, 0.049407958984375, -0.025421142578125, 0.04449462890625, -0.006633758544921875, -0.05084228515625, -0.0248870849609375, 0.0118865966796875, 0.0311279296875, 0.043212890625, 0.036224365234375, -0.024078369140625, -0.043853759765625, -0.0775146484375, -0.004901885986328125, -0.02239990234375, -0.003734588623046875, 0.0304718017578125, 0.033172607421875, -0.024688720703125, 0.072509765625, -0.0338134765625, -0.0318603515625, -0.0203094482421875, -0.0103607177734375, 0.030364990234375, 0.04522705078125, 0.05670166015625, -0.04803466796875, -0.0233154296875, -0.0176849365234375, -0.0528564453125, -0.012115478515625, 0.0009217262268066406, -0.02520751953125, 0.012786865234375, 0.0180511474609375, -0.06219482421875, 0.05029296875, 0.04803466796875, -0.0300750732421875, 0.042327880859375, 0.00011098384857177734, -0.006290435791015625, -0.07427978515625, 0.0082244873046875, 0.00614166259765625, -0.008575439453125, -0.035888671875, 0.0005273818969726562, -0.0094146728515625, 0.0128326416015625, -0.037628173828125, 0.046966552734375, -0.028167724609375, -0.00731658935546875, -0.0032787322998046875, 0.0186309814453125, 0.005191802978515625, 0.048004150390625, -0.01491546630859375, 0.05255126953125, 0.046875, -0.031280517578125, 0.0164337158203125, 0.0291595458984375, -0.025543212890625, 0.0355224609375, -0.06707763671875, 0.01291656494140625, 0.006870269775390625, 0.04437255859375, -0.09197998046875, -0.01605224609375, 0.035369873046875, -0.03717041015625, 0.031951904296875, -0.00003904104232788086, -0.0269775390625, -0.042083740234375, -0.044525146484375, 0.031707763671875, 0.05108642578125, -0.038970947265625, 0.03729248046875, 0.034210205078125, -0.00820159912109375, -0.05218505859375, -0.059112548828125, -0.0131988525390625, -0.038330078125, -0.05902099609375, 0.02471923828125, -0.02093505859375, 0.0023746490478515625, -0.01512908935546875, -0.0150146484375, 0.006977081298828125, 0.0047607421875, 0.02093505859375, 0.034423828125, -0.01446533203125, -0.0167999267578125, -0.000995635986328125, -0.0235137939453125, -0.0005006790161132812, 0.0067596435546875, 0.042205810546875, -0.01605224609375, -0.02734375, -0.052947998046875, 0.005504608154296875, 0.03778076171875, -0.0248260498046875, 0.0384521484375, 0.05426025390625, -0.018035888671875, 0.0085906982421875, -0.052581787109375, -0.0205230712890625, -0.0419921875, 0.0240936279296875, -0.028289794921875, -0.0697021484375, 0.06939697265625, 0.00585174560546875, 0.022857666015625, 0.057952880859375, 0.03759765625, 0.002437591552734375, 0.06951904296875, 0.04254150390625, 0.004535675048828125, 0.03338623046875, -0.038970947265625, 0.0066070556640625, -0.07989501953125, -0.051849365234375, -0.032440185546875, -0.041778564453125, -0.046600341796875, -0.036590576171875, 0.0289764404296875, 0.0191497802734375, -0.050537109375, 0.027587890625, -0.052703857421875, 0.012969970703125, 0.032196044921875, 0.015960693359375, 0.0172119140625, 0.004405975341796875, -0.0123748779296875, -0.0027980804443359375, -0.039581298828125, -0.03961181640625, 0.087646484375, 0.03900146484375, 0.046600341796875, 0.0210723876953125, 0.033050537109375, -0.0022735595703125, 0.0178375244140625, -0.04046630859375, 0.043853759765625, 0.00788116455078125, -0.054412841796875, -0.0118255615234375, -0.0160980224609375, -0.08538818359375, 0.0120391845703125, -0.003925323486328125, -0.06561279296875, 0.0293426513671875, 0.00467681884765625, -0.034332275390625, 0.0282135009765625, -0.03741455078125, 0.0457763671875, -0.01129913330078125, -0.010833740234375, -0.007354736328125, -0.063232421875, 0.04669189453125, -0.0014019012451171875, 0.0164642333984375, -0.0264739990234375, -0.0257415771484375, 0.054412841796875, -0.04132080078125, 0.07244873046875, -0.00926971435546875, -0.0090789794921875, 0.044952392578125, -0.002918243408203125, 0.050689697265625, 0.018829345703125, -0.007053375244140625, 0.0303192138671875, -0.0146331787109375, -0.0266265869140625, -0.022491455078125, 0.04803466796875, -0.08612060546875, -0.0555419921875, -0.0261993408203125, -0.01983642578125, 0.00339508056640625, 0.004314422607421875, 0.023406982421875, 0.01873779296875, 0.02716064453125, 0.01546478271484375, 0.040069580078125, -0.026763916015625, 0.032379150390625, 0.04107666015625, -0.01540374755859375, -0.03466796875, 0.04547119140625, 0.01206207275390625, 0.025146484375, 0.00838470458984375, 0.01540374755859375, -0.03271484375, -0.0364990234375, -0.0220489501953125, 0.03662109375, -0.0399169921875, -0.036590576171875, -0.040740966796875, -0.025421142578125, -0.016876220703125, -0.0006275177001953125, -0.042022705078125, -0.0302581787109375, -0.050628662109375, -0.0183868408203125, 0.05426025390625, 0.04278564453125, -0.01428985595703125, 0.0224456787109375, -0.0290374755859375, 0.016265869140625, 0.02886962890625, 0.02703857421875, -0.00042700767517089844, -0.06573486328125, 0.01216888427734375, 0.01348114013671875, -0.05126953125, -0.047698974609375, 0.025146484375, 0.0196075439453125, 0.054290771484375, 0.0149688720703125, 0.00034236907958984375, 0.0687255859375, -0.0131683349609375, 0.082275390625, 0.0225372314453125, -0.05535888671875, 0.04541015625, -0.03314208984375, 0.01198577880859375, 0.0188446044921875, 0.0186004638671875, -0.0162200927734375, -0.0212860107421875, -0.0604248046875, -0.076416015625, 0.0518798828125, 0.0249786376953125, 0.0102996826171875, 0.009552001953125, 0.038848876953125, 0.01155853271484375, 0.002536773681640625, -0.056671142578125, -0.0386962890625, -0.028594970703125, 0.003448486328125, -0.0015516281127929688, -0.025146484375, -0.00885772705078125, -0.0252838134765625, 0.052001953125, 0.0014209747314453125, 0.036834716796875, 0.0183868408203125, 0.017730712890625, -0.01557159423828125, -0.002838134765625, 0.06719970703125, 0.04083251953125, -0.0186920166015625, -0.0101776123046875, 0.0243072509765625, -0.042938232421875, -0.0038661956787109375, 0.0019588470458984375, 0.003078460693359375, -0.01812744140625, 0.0287017822265625, 0.0565185546875, 0.010467529296875, -0.0277862548828125, 0.03851318359375, 0.00566864013671875, -0.02032470703125, -0.033050537109375, 0.01096343994140625, 0.01482391357421875, 0.046905517578125, 0.035430908203125, 0.0140228271484375, -0.0028285980224609375, -0.0247344970703125, 0.00839996337890625, 0.0254669189453125, -0.0045166015625, -0.037689208984375, 0.06805419921875, 0.003253936767578125, -0.02069091796875, 0.034515380859375, -0.004974365234375, -0.0294189453125, 0.05987548828125, 0.037017822265625, 0.049835205078125, -0.0255279541015625, 0.00443267822265625, 0.043182373046875, 0.0195159912109375, -0.00759124755859375, 0.037628173828125, 0.0173797607421875, -0.041229248046875, -0.025543212890625, -0.037750244140625, -0.02197265625, 0.03314208984375, -0.044097900390625, 0.04071044921875, -0.040374755859375, -0.033782958984375, -0.020416259765625, 0.01399993896484375, -0.058319091796875, 0.0023059844970703125, 0.0034313201904296875, 0.07196044921875, -0.047393798828125, 0.0491943359375, 0.04071044921875, -0.031524658203125, -0.08306884765625, -0.0273895263671875, 0.0189666748046875, -0.07159423828125, 0.0308380126953125, 0.00525665283203125, -0.005977630615234375, 0.0006670951843261719, -0.05322265625, -0.08612060546875, 0.11639404296875, 0.030029296875, -0.034027099609375, 0.01922607421875, 0.01201629638671875, 0.0345458984375, -0.0193023681640625, 0.040191650390625, 0.04962158203125, 0.04351806640625, 0.024200439453125, -0.0853271484375, 0.016845703125, -0.0290374755859375, -0.00508880615234375, -0.00439453125, -0.0943603515625, 0.08428955078125, -0.02508544921875, -0.0119476318359375, 0.021514892578125, 0.051513671875, 0.052642822265625, 0.0181121826171875, 0.033966064453125, 0.047698974609375, 0.048309326171875, -0.01091766357421875, 0.07818603515625, -0.009246826171875, 0.041778564453125, 0.06829833984375, 0.0008249282836914062, 0.064208984375, 0.0172119140625, -0.043243408203125, 0.054443359375, 0.07110595703125, 0.002002716064453125, 0.039154052734375, 0.010986328125, 0.0029048919677734375, -0.006011962890625, 0.0004944801330566406, -0.0567626953125, 0.030029296875, 0.03662109375, -0.0151824951171875, -0.01263427734375, -0.0211334228515625, 0.0238494873046875, -0.0201873779296875, -0.000255584716796875, 0.04388427734375, 0.01361846923828125, -0.0248870849609375, 0.0780029296875, -0.01465606689453125, 0.06427001953125, -0.05133056640625, 0.003231048583984375, -0.041778564453125, 0.00855255126953125, -0.03472900390625, -0.064208984375, 0.0112457275390625, -0.00589752197265625, 0.00568389892578125, 0.0087432861328125, 0.045684814453125, -0.002902984619140625, -0.02508544921875, 0.031646728515625, 0.0200958251953125, 0.023468017578125, 0.02093505859375, -0.0762939453125, 0.0202178955078125, 0.005512237548828125, -0.06439208984375, 0.0201416015625, 0.0221405029296875, -0.004123687744140625, 0.0654296875, 0.05047607421875, -0.0113372802734375, 0.0113677978515625, -0.01201629638671875, 0.0831298828125, -0.03106689453125, -0.027069091796875, -0.0614013671875, 0.06085205078125, -0.004329681396484375, -0.045867919921875, 0.0516357421875, 0.036590576171875, 0.05517578125, 0.01446533203125, 0.04180908203125, -0.00763702392578125, 0.03558349609375, -0.01556396484375, 0.04498291015625, -0.0577392578125, 0.033721923828125, -0.01207733154296875, -0.0787353515625, -0.028533935546875, 0.057525634765625, -0.009613037109375, 0.01323699951171875, 0.0401611328125, 0.0755615234375, -0.0034999847412109375, -0.005764007568359375, -0.003971099853515625, 0.02838134765625, 0.045562744140625, 0.04852294921875, 0.04779052734375, -0.045806884765625, 0.05084228515625, -0.022186279296875, -0.0251922607421875, -0.02264404296875, -0.07476806640625, -0.06561279296875, -0.030548095703125, -0.034820556640625, -0.0290374755859375, -0.01078033447265625, 0.057952880859375, 0.06072998046875, -0.056610107421875, -0.0185089111328125, 0.001918792724609375, 0.005107879638671875, -0.020599365234375, -0.01285552978515625, 0.042510986328125, 0.0075225830078125, -0.04669189453125, 0.0167236328125, 0.012359619140625, 0.0293731689453125, -0.0238494873046875, -0.0217437744140625, -0.016021728515625, 0.0074615478515625, 0.031341552734375, 0.0362548828125, -0.06451416015625, -0.039459228515625, -0.00791168212890625, -0.00982666015625, 0.01995849609375, 0.009552001953125, -0.053741455078125, 0.01503753662109375, 0.0338134765625, 0.01299285888671875, 0.049224853515625, -0.003681182861328125, 0.0084075927734375, -0.040863037109375, 0.030548095703125, -0.0010728836059570312, 0.01971435546875, 0.019744873046875, -0.0238189697265625, 0.056304931640625, 0.0191650390625, -0.0447998046875, -0.06304931640625, 0.0081939697265625, -0.09942626953125, 0.00897979736328125, 0.0960693359375, -0.022064208984375, -0.01026153564453125, -0.0003654956817626953, -0.0219879150390625, 0.030548095703125, -0.035430908203125, 0.05255126953125, 0.02978515625, -0.01169586181640625, -0.018585205078125, -0.05426025390625, 0.037445068359375, 0.01497650146484375, -0.058319091796875, -0.0225982666015625, 0.0061187744140625, 0.03460693359375, 0.0161895751953125, 0.03369140625, -0.0229644775390625, 0.00804901123046875, -0.00888824462890625, 0.006641387939453125, -0.0179595947265625, 0.004428863525390625, -0.004779815673828125, -0.01053619384765625, -0.0096588134765625, -0.0149688720703125 ] ]
timm/convnext_nano.in12k_ft_in1k
2023-03-31T22:32:30.000Z
[ "timm", "pytorch", "safetensors", "image-classification", "dataset:imagenet-1k", "dataset:imagenet-12k", "arxiv:2201.03545", "license:apache-2.0", "region:us" ]
image-classification
timm
null
null
timm/convnext_nano.in12k_ft_in1k
1
8,788
timm
2022-12-13T07:12:21
--- tags: - image-classification - timm library_tag: timm license: apache-2.0 datasets: - imagenet-1k - imagenet-12k --- # Model card for convnext_nano.in12k_ft_in1k A ConvNeXt image classification model. Pretrained in `timm` on ImageNet-12k (a 11821 class subset of full ImageNet-22k) and fine-tuned on ImageNet-1k by Ross Wightman. ImageNet-12k training done on TPUs thanks to support of the [TRC](https://sites.research.google/trc/about/) program. Fine-tuning performed on 8x GPU [Lambda Labs](https://lambdalabs.com/) cloud instances. ## Model Details - **Model Type:** Image classification / feature backbone - **Model Stats:** - Params (M): 15.6 - GMACs: 2.5 - Activations (M): 8.4 - Image size: train = 224 x 224, test = 288 x 288 - **Papers:** - A ConvNet for the 2020s: https://arxiv.org/abs/2201.03545 - **Original:** https://github.com/huggingface/pytorch-image-models - **Dataset:** ImageNet-1k - **Pretrain Dataset:** ImageNet-12k ## Model Usage ### Image Classification ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model('convnext_nano.in12k_ft_in1k', pretrained=True) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 top5_probabilities, top5_class_indices = torch.topk(output.softmax(dim=1) * 100, k=5) ``` ### Feature Map Extraction ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'convnext_nano.in12k_ft_in1k', pretrained=True, features_only=True, ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 for o in output: # print shape of each feature map in output # e.g.: # torch.Size([1, 80, 56, 56]) # torch.Size([1, 160, 28, 28]) # torch.Size([1, 320, 14, 14]) # torch.Size([1, 640, 7, 7]) print(o.shape) ``` ### Image Embeddings ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'convnext_nano.in12k_ft_in1k', pretrained=True, num_classes=0, # remove classifier nn.Linear ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # output is (batch_size, num_features) shaped tensor # or equivalently (without needing to set num_classes=0) output = model.forward_features(transforms(img).unsqueeze(0)) # output is unpooled, a (1, 640, 7, 7) shaped tensor output = model.forward_head(output, pre_logits=True) # output is a (1, num_features) shaped tensor ``` ## Model Comparison Explore the dataset and runtime metrics of this model in timm [model results](https://github.com/huggingface/pytorch-image-models/tree/main/results). All timing numbers from eager model PyTorch 1.13 on RTX 3090 w/ AMP. | model |top1 |top5 |img_size|param_count|gmacs |macts |samples_per_sec|batch_size| |------------------------------------------------------------------------------------------------------------------------------|------|------|--------|-----------|------|------|---------------|----------| | [convnextv2_huge.fcmae_ft_in22k_in1k_512](https://huggingface.co/timm/convnextv2_huge.fcmae_ft_in22k_in1k_512) |88.848|98.742|512 |660.29 |600.81|413.07|28.58 |48 | | [convnextv2_huge.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_huge.fcmae_ft_in22k_in1k_384) |88.668|98.738|384 |660.29 |337.96|232.35|50.56 |64 | | [convnext_xxlarge.clip_laion2b_soup_ft_in1k](https://huggingface.co/timm/convnext_xxlarge.clip_laion2b_soup_ft_in1k) |88.612|98.704|256 |846.47 |198.09|124.45|122.45 |256 | | [convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384](https://huggingface.co/timm/convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384) |88.312|98.578|384 |200.13 |101.11|126.74|196.84 |256 | | [convnextv2_large.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_large.fcmae_ft_in22k_in1k_384) |88.196|98.532|384 |197.96 |101.1 |126.74|128.94 |128 | | [convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320](https://huggingface.co/timm/convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320) |87.968|98.47 |320 |200.13 |70.21 |88.02 |283.42 |256 | | [convnext_xlarge.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_xlarge.fb_in22k_ft_in1k_384) |87.75 |98.556|384 |350.2 |179.2 |168.99|124.85 |192 | | [convnextv2_base.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_base.fcmae_ft_in22k_in1k_384) |87.646|98.422|384 |88.72 |45.21 |84.49 |209.51 |256 | | [convnext_large.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_large.fb_in22k_ft_in1k_384) |87.476|98.382|384 |197.77 |101.1 |126.74|194.66 |256 | | [convnext_large_mlp.clip_laion2b_augreg_ft_in1k](https://huggingface.co/timm/convnext_large_mlp.clip_laion2b_augreg_ft_in1k) |87.344|98.218|256 |200.13 |44.94 |56.33 |438.08 |256 | | [convnextv2_large.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_large.fcmae_ft_in22k_in1k) |87.26 |98.248|224 |197.96 |34.4 |43.13 |376.84 |256 | | [convnext_base.clip_laion2b_augreg_ft_in12k_in1k_384](https://huggingface.co/timm/convnext_base.clip_laion2b_augreg_ft_in12k_in1k_384) |87.138|98.212|384 |88.59 |45.21 |84.49 |365.47 |256 | | [convnext_xlarge.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_xlarge.fb_in22k_ft_in1k) |87.002|98.208|224 |350.2 |60.98 |57.5 |368.01 |256 | | [convnext_base.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_base.fb_in22k_ft_in1k_384) |86.796|98.264|384 |88.59 |45.21 |84.49 |366.54 |256 | | [convnextv2_base.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_base.fcmae_ft_in22k_in1k) |86.74 |98.022|224 |88.72 |15.38 |28.75 |624.23 |256 | | [convnext_large.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_large.fb_in22k_ft_in1k) |86.636|98.028|224 |197.77 |34.4 |43.13 |581.43 |256 | | [convnext_base.clip_laiona_augreg_ft_in1k_384](https://huggingface.co/timm/convnext_base.clip_laiona_augreg_ft_in1k_384) |86.504|97.97 |384 |88.59 |45.21 |84.49 |368.14 |256 | | [convnext_base.clip_laion2b_augreg_ft_in12k_in1k](https://huggingface.co/timm/convnext_base.clip_laion2b_augreg_ft_in12k_in1k) |86.344|97.97 |256 |88.59 |20.09 |37.55 |816.14 |256 | | [convnextv2_huge.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_huge.fcmae_ft_in1k) |86.256|97.75 |224 |660.29 |115.0 |79.07 |154.72 |256 | | [convnext_small.in12k_ft_in1k_384](https://huggingface.co/timm/convnext_small.in12k_ft_in1k_384) |86.182|97.92 |384 |50.22 |25.58 |63.37 |516.19 |256 | | [convnext_base.clip_laion2b_augreg_ft_in1k](https://huggingface.co/timm/convnext_base.clip_laion2b_augreg_ft_in1k) |86.154|97.68 |256 |88.59 |20.09 |37.55 |819.86 |256 | | [convnext_base.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_base.fb_in22k_ft_in1k) |85.822|97.866|224 |88.59 |15.38 |28.75 |1037.66 |256 | | [convnext_small.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_small.fb_in22k_ft_in1k_384) |85.778|97.886|384 |50.22 |25.58 |63.37 |518.95 |256 | | [convnextv2_large.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_large.fcmae_ft_in1k) |85.742|97.584|224 |197.96 |34.4 |43.13 |375.23 |256 | | [convnext_small.in12k_ft_in1k](https://huggingface.co/timm/convnext_small.in12k_ft_in1k) |85.174|97.506|224 |50.22 |8.71 |21.56 |1474.31 |256 | | [convnext_tiny.in12k_ft_in1k_384](https://huggingface.co/timm/convnext_tiny.in12k_ft_in1k_384) |85.118|97.608|384 |28.59 |13.14 |39.48 |856.76 |256 | | [convnextv2_tiny.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_tiny.fcmae_ft_in22k_in1k_384) |85.112|97.63 |384 |28.64 |13.14 |39.48 |491.32 |256 | | [convnextv2_base.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_base.fcmae_ft_in1k) |84.874|97.09 |224 |88.72 |15.38 |28.75 |625.33 |256 | | [convnext_small.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_small.fb_in22k_ft_in1k) |84.562|97.394|224 |50.22 |8.71 |21.56 |1478.29 |256 | | [convnext_large.fb_in1k](https://huggingface.co/timm/convnext_large.fb_in1k) |84.282|96.892|224 |197.77 |34.4 |43.13 |584.28 |256 | | [convnext_tiny.in12k_ft_in1k](https://huggingface.co/timm/convnext_tiny.in12k_ft_in1k) |84.186|97.124|224 |28.59 |4.47 |13.44 |2433.7 |256 | | [convnext_tiny.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_tiny.fb_in22k_ft_in1k_384) |84.084|97.14 |384 |28.59 |13.14 |39.48 |862.95 |256 | | [convnextv2_tiny.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_tiny.fcmae_ft_in22k_in1k) |83.894|96.964|224 |28.64 |4.47 |13.44 |1452.72 |256 | | [convnext_base.fb_in1k](https://huggingface.co/timm/convnext_base.fb_in1k) |83.82 |96.746|224 |88.59 |15.38 |28.75 |1054.0 |256 | | [convnextv2_nano.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_nano.fcmae_ft_in22k_in1k_384) |83.37 |96.742|384 |15.62 |7.22 |24.61 |801.72 |256 | | [convnext_small.fb_in1k](https://huggingface.co/timm/convnext_small.fb_in1k) |83.142|96.434|224 |50.22 |8.71 |21.56 |1464.0 |256 | | [convnextv2_tiny.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_tiny.fcmae_ft_in1k) |82.92 |96.284|224 |28.64 |4.47 |13.44 |1425.62 |256 | | [convnext_tiny.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_tiny.fb_in22k_ft_in1k) |82.898|96.616|224 |28.59 |4.47 |13.44 |2480.88 |256 | | [convnext_nano.in12k_ft_in1k](https://huggingface.co/timm/convnext_nano.in12k_ft_in1k) |82.282|96.344|224 |15.59 |2.46 |8.37 |3926.52 |256 | | [convnext_tiny_hnf.a2h_in1k](https://huggingface.co/timm/convnext_tiny_hnf.a2h_in1k) |82.216|95.852|224 |28.59 |4.47 |13.44 |2529.75 |256 | | [convnext_tiny.fb_in1k](https://huggingface.co/timm/convnext_tiny.fb_in1k) |82.066|95.854|224 |28.59 |4.47 |13.44 |2346.26 |256 | | [convnextv2_nano.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_nano.fcmae_ft_in22k_in1k) |82.03 |96.166|224 |15.62 |2.46 |8.37 |2300.18 |256 | | [convnextv2_nano.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_nano.fcmae_ft_in1k) |81.83 |95.738|224 |15.62 |2.46 |8.37 |2321.48 |256 | | [convnext_nano_ols.d1h_in1k](https://huggingface.co/timm/convnext_nano_ols.d1h_in1k) |80.866|95.246|224 |15.65 |2.65 |9.38 |3523.85 |256 | | [convnext_nano.d1h_in1k](https://huggingface.co/timm/convnext_nano.d1h_in1k) |80.768|95.334|224 |15.59 |2.46 |8.37 |3915.58 |256 | | [convnextv2_pico.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_pico.fcmae_ft_in1k) |80.304|95.072|224 |9.07 |1.37 |6.1 |3274.57 |256 | | [convnext_pico.d1_in1k](https://huggingface.co/timm/convnext_pico.d1_in1k) |79.526|94.558|224 |9.05 |1.37 |6.1 |5686.88 |256 | | [convnext_pico_ols.d1_in1k](https://huggingface.co/timm/convnext_pico_ols.d1_in1k) |79.522|94.692|224 |9.06 |1.43 |6.5 |5422.46 |256 | | [convnextv2_femto.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_femto.fcmae_ft_in1k) |78.488|93.98 |224 |5.23 |0.79 |4.57 |4264.2 |256 | | [convnext_femto_ols.d1_in1k](https://huggingface.co/timm/convnext_femto_ols.d1_in1k) |77.86 |93.83 |224 |5.23 |0.82 |4.87 |6910.6 |256 | | [convnext_femto.d1_in1k](https://huggingface.co/timm/convnext_femto.d1_in1k) |77.454|93.68 |224 |5.22 |0.79 |4.57 |7189.92 |256 | | [convnextv2_atto.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_atto.fcmae_ft_in1k) |76.664|93.044|224 |3.71 |0.55 |3.81 |4728.91 |256 | | [convnext_atto_ols.a2_in1k](https://huggingface.co/timm/convnext_atto_ols.a2_in1k) |75.88 |92.846|224 |3.7 |0.58 |4.11 |7963.16 |256 | | [convnext_atto.d2_in1k](https://huggingface.co/timm/convnext_atto.d2_in1k) |75.664|92.9 |224 |3.7 |0.55 |3.81 |8439.22 |256 | ## Citation ```bibtex @misc{rw2019timm, author = {Ross Wightman}, title = {PyTorch Image Models}, year = {2019}, publisher = {GitHub}, journal = {GitHub repository}, doi = {10.5281/zenodo.4414861}, howpublished = {\url{https://github.com/huggingface/pytorch-image-models}} } ``` ```bibtex @article{liu2022convnet, author = {Zhuang Liu and Hanzi Mao and Chao-Yuan Wu and Christoph Feichtenhofer and Trevor Darrell and Saining Xie}, title = {A ConvNet for the 2020s}, journal = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, year = {2022}, } ```
15,989
[ [ -0.0689697265625, -0.033905029296875, -0.0020160675048828125, 0.034393310546875, -0.032257080078125, -0.01337432861328125, -0.01218414306640625, -0.036102294921875, 0.061920166015625, 0.01715087890625, -0.04486083984375, -0.041259765625, -0.0517578125, -0.001171112060546875, 0.00740814208984375, 0.06951904296875, -0.002094268798828125, -0.0112762451171875, 0.01678466796875, -0.0288238525390625, -0.0157470703125, -0.02557373046875, -0.06353759765625, -0.0177764892578125, 0.0201416015625, 0.0230255126953125, 0.056884765625, 0.043365478515625, 0.0288848876953125, 0.04107666015625, -0.0197601318359375, 0.01247406005859375, -0.01336669921875, -0.026397705078125, 0.0394287109375, -0.029541015625, -0.0662841796875, 0.0175933837890625, 0.060699462890625, 0.039764404296875, 0.0036773681640625, 0.01505279541015625, 0.02496337890625, 0.03594970703125, 0.00040411949157714844, -0.00641632080078125, -0.00795745849609375, 0.0130767822265625, -0.0192718505859375, 0.0026454925537109375, 0.003993988037109375, -0.051025390625, 0.0266876220703125, -0.04608154296875, 0.004558563232421875, -0.0004029273986816406, 0.10107421875, -0.007007598876953125, -0.01551055908203125, 0.0012502670288085938, 0.00926971435546875, 0.053375244140625, -0.05865478515625, 0.0234832763671875, 0.030609130859375, -0.00782012939453125, -0.01238250732421875, -0.050140380859375, -0.0455322265625, -0.006927490234375, -0.02642822265625, 0.018768310546875, -0.02703857421875, -0.0027313232421875, 0.04193115234375, 0.0340576171875, -0.037811279296875, -0.004291534423828125, -0.0250091552734375, -0.0080108642578125, 0.05670166015625, -0.00630950927734375, 0.045318603515625, -0.0248260498046875, -0.04803466796875, -0.0194854736328125, -0.0176849365234375, 0.030303955078125, 0.0166168212890625, -0.0036029815673828125, -0.0728759765625, 0.03839111328125, 0.00978851318359375, 0.0228271484375, 0.0269622802734375, -0.01357269287109375, 0.05657958984375, -0.019134521484375, -0.04046630859375, -0.021392822265625, 0.08990478515625, 0.048858642578125, 0.0311126708984375, 0.00760650634765625, 0.0026721954345703125, -0.007625579833984375, -0.032379150390625, -0.07647705078125, -0.01085662841796875, 0.0272216796875, -0.04010009765625, -0.01140594482421875, 0.027313232421875, -0.060333251953125, 0.006732940673828125, -0.009490966796875, 0.016845703125, -0.06097412109375, -0.030242919921875, -0.007419586181640625, -0.0262451171875, 0.027313232421875, 0.022064208984375, -0.0272064208984375, 0.0258941650390625, 0.0215301513671875, 0.07525634765625, 0.021697998046875, -0.01390838623046875, -0.028656005859375, -0.01169586181640625, -0.0284576416015625, 0.02685546875, 0.01174163818359375, -0.01305389404296875, -0.0235595703125, 0.031982421875, -0.0135955810546875, -0.033538818359375, 0.028167724609375, 0.0212554931640625, 0.00733184814453125, -0.027191162109375, -0.026763916015625, -0.0192718505859375, 0.0290679931640625, -0.038543701171875, 0.0794677734375, 0.037506103515625, -0.0784912109375, 0.0222930908203125, -0.03643798828125, -0.0035991668701171875, -0.019287109375, 0.00562286376953125, -0.05889892578125, -0.006732940673828125, 0.0176849365234375, 0.05322265625, -0.0131988525390625, -0.0101776123046875, -0.028167724609375, -0.00516510009765625, 0.0268707275390625, 0.010040283203125, 0.070556640625, 0.01355743408203125, -0.03814697265625, 0.0031585693359375, -0.0479736328125, 0.0214080810546875, 0.0290985107421875, -0.00017833709716796875, -0.00534820556640625, -0.0626220703125, 0.00289154052734375, 0.039459228515625, 0.0136566162109375, -0.041748046875, 0.0202484130859375, -0.0181732177734375, 0.03155517578125, 0.049713134765625, -0.004215240478515625, 0.0231170654296875, -0.04339599609375, 0.043060302734375, 0.0086517333984375, 0.01934814453125, -0.00412750244140625, -0.0301666259765625, -0.058074951171875, -0.05072021484375, 0.018524169921875, 0.03515625, -0.035400390625, 0.05462646484375, 0.012542724609375, -0.046600341796875, -0.056121826171875, 0.0131683349609375, 0.0400390625, 0.0211181640625, 0.0166168212890625, -0.0257110595703125, -0.04937744140625, -0.070068359375, -0.00769805908203125, 0.0066070556640625, -0.0008969306945800781, 0.045928955078125, 0.0304718017578125, -0.0076751708984375, 0.04180908203125, -0.0304718017578125, -0.019805908203125, -0.010894775390625, -0.00444793701171875, 0.0301361083984375, 0.05865478515625, 0.0831298828125, -0.062408447265625, -0.06585693359375, 0.004459381103515625, -0.0811767578125, 0.0009074211120605469, -0.00469207763671875, -0.0297088623046875, 0.0218048095703125, 0.0210723876953125, -0.0731201171875, 0.052276611328125, 0.0270538330078125, -0.0465087890625, 0.0362548828125, -0.0221099853515625, 0.025390625, -0.073974609375, 0.0184478759765625, 0.0227813720703125, -0.02410888671875, -0.03753662109375, 0.00522613525390625, -0.006671905517578125, 0.0100555419921875, -0.049072265625, 0.06866455078125, -0.0499267578125, 0.00521087646484375, 0.0010223388671875, 0.006786346435546875, 0.00045800209045410156, 0.036956787109375, -0.0024852752685546875, 0.036773681640625, 0.058624267578125, -0.0246429443359375, 0.032958984375, 0.0374755859375, -0.0034694671630859375, 0.05657958984375, -0.050018310546875, 0.0097808837890625, 0.00835418701171875, 0.0343017578125, -0.067626953125, -0.0287017822265625, 0.041351318359375, -0.05633544921875, 0.03582763671875, -0.01812744140625, -0.028656005859375, -0.060089111328125, -0.0628662109375, 0.0188140869140625, 0.04351806640625, -0.047607421875, 0.014892578125, 0.0201416015625, 0.00534820556640625, -0.044769287109375, -0.05072021484375, -0.00501251220703125, -0.032745361328125, -0.06317138671875, 0.033416748046875, 0.006763458251953125, -0.00836944580078125, 0.0025577545166015625, -0.00335693359375, -0.00146484375, -0.010467529296875, 0.039154052734375, 0.0313720703125, -0.0169830322265625, -0.028167724609375, -0.023468017578125, -0.011383056640625, 0.00007700920104980469, -0.0099945068359375, 0.041961669921875, -0.0266876220703125, 0.0116424560546875, -0.07958984375, 0.013641357421875, 0.051727294921875, -0.001560211181640625, 0.06878662109375, 0.07635498046875, -0.0340576171875, 0.01068115234375, -0.0307464599609375, -0.011993408203125, -0.03826904296875, -0.009063720703125, -0.040924072265625, -0.049285888671875, 0.0626220703125, 0.01195526123046875, -0.008331298828125, 0.053558349609375, 0.0246429443359375, -0.0166015625, 0.06475830078125, 0.04010009765625, -0.005405426025390625, 0.045654296875, -0.06866455078125, 0.0003974437713623047, -0.06488037109375, -0.04669189453125, -0.01136016845703125, -0.042877197265625, -0.05633544921875, -0.02655029296875, 0.023193359375, 0.035247802734375, -0.01300811767578125, 0.051177978515625, -0.045806884765625, -0.004650115966796875, 0.039459228515625, 0.025970458984375, -0.019256591796875, -0.0170440673828125, -0.01325225830078125, -0.01483917236328125, -0.0438232421875, -0.010528564453125, 0.051605224609375, 0.047882080078125, 0.0292816162109375, -0.004001617431640625, 0.039093017578125, -0.0076446533203125, 0.021484375, -0.036651611328125, 0.054962158203125, -0.00418853759765625, -0.037200927734375, -0.01328277587890625, -0.036346435546875, -0.07489013671875, 0.009796142578125, -0.027679443359375, -0.0633544921875, -0.009185791015625, 0.01502227783203125, -0.0225830078125, 0.04150390625, -0.05230712890625, 0.059722900390625, -0.006267547607421875, -0.035552978515625, 0.00548553466796875, -0.065185546875, 0.0182037353515625, 0.0301666259765625, -0.006046295166015625, -0.01274871826171875, 0.00946807861328125, 0.061920166015625, -0.062744140625, 0.0361328125, -0.029510498046875, 0.006107330322265625, 0.0406494140625, -0.00399017333984375, 0.0312042236328125, 0.010040283203125, -0.0001710653305053711, 0.0037441253662109375, 0.01116180419921875, -0.04656982421875, -0.029144287109375, 0.05010986328125, -0.05322265625, -0.027618408203125, -0.041046142578125, -0.0237274169921875, 0.0127716064453125, 0.0016937255859375, 0.049285888671875, 0.042449951171875, -0.00885009765625, 0.01367950439453125, 0.04150390625, -0.029815673828125, 0.0379638671875, -0.01284027099609375, -0.0011949539184570312, -0.041107177734375, 0.0595703125, 0.004947662353515625, 0.00873565673828125, 0.0026035308837890625, 0.00859832763671875, -0.031585693359375, -0.0143280029296875, -0.01342010498046875, 0.05078125, -0.0153961181640625, -0.028350830078125, -0.04852294921875, -0.0323486328125, -0.04473876953125, -0.0257720947265625, -0.030670166015625, -0.022216796875, -0.02667236328125, 0.005107879638671875, 0.055511474609375, 0.0418701171875, -0.02593994140625, 0.0341796875, -0.049652099609375, 0.0256195068359375, 0.00444793701171875, 0.032379150390625, -0.020233154296875, -0.0440673828125, 0.0017070770263671875, -0.0005698204040527344, -0.0186309814453125, -0.05792236328125, 0.04656982421875, 0.0111083984375, 0.029510498046875, 0.038055419921875, -0.0248565673828125, 0.06085205078125, -0.006931304931640625, 0.037811279296875, 0.04229736328125, -0.06646728515625, 0.03564453125, -0.0290069580078125, 0.0101776123046875, 0.01392364501953125, 0.0279693603515625, -0.037628173828125, -0.0256195068359375, -0.0771484375, -0.0443115234375, 0.053375244140625, 0.01096343994140625, -0.0030994415283203125, 0.005970001220703125, 0.048187255859375, -0.005931854248046875, 0.012054443359375, -0.040618896484375, -0.052947998046875, -0.016845703125, -0.01128387451171875, -0.006153106689453125, -0.0029621124267578125, -0.001556396484375, -0.051605224609375, 0.037628173828125, -0.00901031494140625, 0.043365478515625, 0.0196533203125, 0.0006380081176757812, -0.0038433074951171875, -0.0237884521484375, 0.041717529296875, 0.028076171875, -0.0214385986328125, -0.00867462158203125, 0.0277557373046875, -0.0389404296875, 0.0016384124755859375, 0.0212249755859375, 0.00435638427734375, 0.012847900390625, 0.02496337890625, 0.049468994140625, 0.01934814453125, -0.01462554931640625, 0.041717529296875, -0.01476287841796875, -0.0300750732421875, -0.0197296142578125, -0.002529144287109375, 0.01293182373046875, 0.03289794921875, 0.01406097412109375, 0.004459381103515625, -0.022216796875, -0.042449951171875, 0.040069580078125, 0.057373046875, -0.0323486328125, -0.04266357421875, 0.049041748046875, -0.006694793701171875, -0.00628662109375, 0.0423583984375, -0.0057830810546875, -0.054718017578125, 0.0758056640625, 0.0223388671875, 0.04510498046875, -0.04058837890625, 0.0181732177734375, 0.06640625, 0.0010690689086914062, 0.0088348388671875, 0.02410888671875, 0.0247344970703125, -0.032440185546875, 0.0039825439453125, -0.04583740234375, 0.0114593505859375, 0.040679931640625, -0.034088134765625, 0.0260772705078125, -0.05926513671875, -0.0262908935546875, 0.01398468017578125, 0.034759521484375, -0.062744140625, 0.0240936279296875, 0.004650115966796875, 0.08319091796875, -0.05926513671875, 0.06646728515625, 0.05615234375, -0.0266876220703125, -0.07025146484375, -0.01483154296875, 0.0152130126953125, -0.058380126953125, 0.028472900390625, 0.018096923828125, 0.0166778564453125, -0.0125732421875, -0.045501708984375, -0.03643798828125, 0.0916748046875, 0.037384033203125, -0.01216888427734375, 0.00983428955078125, -0.0238037109375, 0.0285491943359375, -0.019866943359375, 0.0362548828125, 0.0423583984375, 0.040283203125, 0.0173797607421875, -0.06951904296875, 0.0258941650390625, -0.0301666259765625, -0.0120391845703125, 0.0245208740234375, -0.099853515625, 0.07647705078125, -0.0269927978515625, -0.0037708282470703125, 0.01441192626953125, 0.060943603515625, 0.03216552734375, 0.005268096923828125, 0.0310516357421875, 0.05902099609375, 0.03741455078125, -0.015838623046875, 0.08056640625, 0.00032401084899902344, 0.0299530029296875, 0.020538330078125, 0.040496826171875, 0.026947021484375, 0.0281524658203125, -0.030853271484375, 0.00872802734375, 0.068115234375, -0.01454925537109375, 0.0108184814453125, 0.016143798828125, -0.0137481689453125, -0.009185791015625, -0.0173797607421875, -0.0447998046875, 0.0277252197265625, 0.01291656494140625, -0.0182342529296875, 0.0002760887145996094, -0.0029964447021484375, 0.034393310546875, -0.0024471282958984375, -0.01464080810546875, 0.035858154296875, 0.0191192626953125, -0.042999267578125, 0.0386962890625, -0.00530242919921875, 0.07568359375, -0.02740478515625, 0.002269744873046875, -0.023223876953125, 0.0219268798828125, -0.0201263427734375, -0.08673095703125, 0.0233612060546875, -0.01010894775390625, 0.015655517578125, -0.005397796630859375, 0.047393798828125, -0.032562255859375, -0.018951416015625, 0.037689208984375, 0.025360107421875, 0.029541015625, 0.004024505615234375, -0.0888671875, 0.01873779296875, 0.0082550048828125, -0.041473388671875, 0.0323486328125, 0.037933349609375, 0.018585205078125, 0.05224609375, 0.031494140625, 0.013641357421875, 0.007762908935546875, -0.0245513916015625, 0.059356689453125, -0.049713134765625, -0.033966064453125, -0.0654296875, 0.03363037109375, -0.02178955078125, -0.050537109375, 0.05889892578125, 0.033721923828125, 0.041259765625, 0.0095062255859375, 0.03680419921875, -0.0357666015625, 0.0265655517578125, -0.032623291015625, 0.051727294921875, -0.059906005859375, -0.02215576171875, -0.032257080078125, -0.06280517578125, -0.0206756591796875, 0.05419921875, 0.0035762786865234375, 0.019561767578125, 0.0288238525390625, 0.044921875, -0.0037097930908203125, -0.0180511474609375, -0.004169464111328125, 0.01800537109375, 0.004032135009765625, 0.061370849609375, 0.0408935546875, -0.057220458984375, 0.01444244384765625, -0.049835205078125, -0.0225830078125, -0.022216796875, -0.05535888671875, -0.0797119140625, -0.05908203125, -0.040740966796875, -0.05072021484375, -0.0233154296875, 0.08380126953125, 0.07257080078125, -0.041015625, -0.01123809814453125, 0.017852783203125, 0.00881195068359375, -0.017333984375, -0.01904296875, 0.03924560546875, 0.021942138671875, -0.0760498046875, -0.0196685791015625, 0.005771636962890625, 0.043914794921875, 0.023040771484375, -0.0287017822265625, -0.016510009765625, -0.00724029541015625, 0.030029296875, 0.060333251953125, -0.051361083984375, -0.032257080078125, 0.00321197509765625, -0.0190887451171875, 0.01885986328125, 0.025787353515625, -0.03411865234375, -0.0061492919921875, 0.03875732421875, 0.0120391845703125, 0.059906005859375, 0.00945281982421875, 0.0172882080078125, -0.04681396484375, 0.049468994140625, -0.0009698867797851562, 0.0260467529296875, 0.027313232421875, -0.0300750732421875, 0.0552978515625, 0.037811279296875, -0.03558349609375, -0.07293701171875, -0.0223541259765625, -0.10760498046875, -0.0019273757934570312, 0.057647705078125, -0.0127716064453125, -0.041900634765625, 0.041229248046875, -0.0236663818359375, 0.040313720703125, -0.018157958984375, 0.019989013671875, 0.028289794921875, -0.02410888671875, -0.03851318359375, -0.040374755859375, 0.05224609375, 0.0250091552734375, -0.054351806640625, -0.026123046875, -0.0012731552124023438, 0.0362548828125, 0.0162353515625, 0.058013916015625, -0.01641845703125, 0.01361083984375, 0.001445770263671875, 0.01284027099609375, -0.0007472038269042969, -0.0015163421630859375, -0.01422119140625, -0.016143798828125, -0.02447509765625, -0.044464111328125 ] ]
microsoft/dit-base
2023-02-27T17:55:38.000Z
[ "transformers", "pytorch", "beit", "dit", "arxiv:2203.02378", "region:us" ]
null
microsoft
null
null
microsoft/dit-base
11
8,786
transformers
2022-03-07T17:18:46
--- tags: - dit inference: false --- # Document Image Transformer (base-sized model) Document Image Transformer (DiT) model pre-trained on IIT-CDIP (Lewis et al., 2006), a dataset that includes 42 million document images. It was introduced in the paper [DiT: Self-supervised Pre-training for Document Image Transformer](https://arxiv.org/abs/2203.02378) by Li et al. and first released in [this repository](https://github.com/microsoft/unilm/tree/master/dit). Note that DiT is identical to the architecture of [BEiT](https://huggingface.co/docs/transformers/model_doc/beit). Disclaimer: The team releasing DiT did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description The Document Image Transformer (DiT) is a transformer encoder model (BERT-like) pre-trained on a large collection of images in a self-supervised fashion. The pre-training objective for the model is to predict visual tokens from the encoder of a discrete VAE (dVAE), based on masked patches. Images are presented to the model as a sequence of fixed-size patches (resolution 16x16), which are linearly embedded. One also adds absolute position embeddings before feeding the sequence to the layers of the Transformer encoder. By pre-training the model, it learns an inner representation of images that can then be used to extract features useful for downstream tasks: if you have a dataset of labeled document images for instance, you can train a standard classifier by placing a linear layer on top of the pre-trained encoder. ## Intended uses & limitations You can use the raw model for encoding document images into a vector space, but it's mostly meant to be fine-tuned on tasks like document image classification, table detection or document layout analysis. See the [model hub](https://huggingface.co/models?search=microsoft/dit) to look for fine-tuned versions on a task that interests you. ### How to use Here is how to use this model in PyTorch: ```python from transformers import BeitImageProcessor, BeitForMaskedImageModeling import torch from PIL import Image image = Image.open('path_to_your_document_image').convert('RGB') processor = BeitImageProcessor.from_pretrained("microsoft/dit-base") model = BeitForMaskedImageModeling.from_pretrained("microsoft/dit-base") num_patches = (model.config.image_size // model.config.patch_size) ** 2 pixel_values = processor(images=image, return_tensors="pt").pixel_values # create random boolean mask of shape (batch_size, num_patches) bool_masked_pos = torch.randint(low=0, high=2, size=(1, num_patches)).bool() outputs = model(pixel_values, bool_masked_pos=bool_masked_pos) loss, logits = outputs.loss, outputs.logits ``` ### BibTeX entry and citation info ```bibtex @article{Lewis2006BuildingAT, title={Building a test collection for complex document information processing}, author={David D. Lewis and Gady Agam and Shlomo Engelson Argamon and Ophir Frieder and David A. Grossman and Jefferson Heard}, journal={Proceedings of the 29th annual international ACM SIGIR conference on Research and development in information retrieval}, year={2006} } ```
3,165
[ [ -0.042449951171875, -0.0438232421875, 0.0192108154296875, -0.006076812744140625, -0.021240234375, -0.01111602783203125, 0.00594329833984375, -0.0287322998046875, -0.00738525390625, 0.0177001953125, -0.033782958984375, -0.0229339599609375, -0.06982421875, -0.006214141845703125, -0.049957275390625, 0.08221435546875, -0.0121002197265625, -0.013275146484375, -0.002857208251953125, -0.00058746337890625, -0.023193359375, -0.028900146484375, -0.02252197265625, -0.0202178955078125, 0.0202484130859375, 0.00567626953125, 0.043121337890625, 0.033447265625, 0.057098388671875, 0.035491943359375, 0.01322174072265625, -0.00965118408203125, -0.037384033203125, -0.022918701171875, -0.0002949237823486328, -0.039947509765625, -0.0258026123046875, 0.0235137939453125, 0.0294036865234375, 0.0159759521484375, 0.0135345458984375, 0.00823974609375, -0.0007147789001464844, 0.04656982421875, -0.033050537109375, 0.010528564453125, -0.039703369140625, 0.004474639892578125, -0.006214141845703125, -0.0066375732421875, -0.0265960693359375, -0.0138092041015625, 0.01348114013671875, -0.037353515625, 0.0611572265625, -0.01387786865234375, 0.094970703125, 0.0189208984375, -0.0188446044921875, -0.027496337890625, -0.052459716796875, 0.04052734375, -0.0386962890625, 0.0305938720703125, 0.01067352294921875, 0.018463134765625, -0.0006251335144042969, -0.08905029296875, -0.059600830078125, -0.01336669921875, -0.0242156982421875, 0.0263671875, -0.038848876953125, 0.0144500732421875, 0.038482666015625, 0.04486083984375, -0.04766845703125, -0.00489044189453125, -0.0489501953125, -0.015106201171875, 0.03338623046875, -0.0193328857421875, 0.027801513671875, -0.01482391357421875, -0.036834716796875, -0.03338623046875, -0.0119171142578125, -0.0138092041015625, 0.02593994140625, -0.02093505859375, -0.006031036376953125, 0.034637451171875, 0.00988006591796875, 0.054107666015625, 0.0251007080078125, -0.0096588134765625, 0.02783203125, -0.00487518310546875, -0.02496337890625, -0.007534027099609375, 0.053558349609375, 0.020111083984375, 0.013916015625, -0.00693511962890625, -0.0275726318359375, 0.0266876220703125, 0.0255889892578125, -0.0882568359375, -0.0447998046875, 0.006755828857421875, -0.043548583984375, -0.02410888671875, 0.015167236328125, -0.04888916015625, -0.007671356201171875, -0.0253448486328125, 0.05645751953125, -0.052764892578125, -0.0191802978515625, -0.0010175704956054688, -0.0184478759765625, 0.020904541015625, 0.03607177734375, -0.0489501953125, 0.00937652587890625, 0.01125335693359375, 0.0709228515625, -0.00392913818359375, -0.033294677734375, -0.023193359375, -0.015655517578125, -0.01348876953125, 0.05633544921875, -0.01557159423828125, -0.0208587646484375, -0.0004515647888183594, 0.01654052734375, -0.000518798828125, -0.02581787109375, 0.0284576416015625, -0.048370361328125, 0.0279083251953125, -0.005695343017578125, -0.034881591796875, -0.0102386474609375, 0.0205535888671875, -0.050628662109375, 0.0863037109375, 0.03765869140625, -0.076171875, 0.025604248046875, -0.042510986328125, -0.01271820068359375, -0.00437164306640625, -0.0083465576171875, -0.050537109375, 0.00983428955078125, 0.0166473388671875, 0.0443115234375, -0.01261138916015625, -0.0004916191101074219, -0.00977325439453125, -0.031463623046875, 0.00489044189453125, -0.0269012451171875, 0.054290771484375, 0.0286712646484375, -0.033905029296875, 0.0012054443359375, -0.05523681640625, -0.0141143798828125, 0.0298919677734375, -0.0189361572265625, -0.002292633056640625, -0.032958984375, 0.02703857421875, 0.035888671875, 0.02716064453125, -0.050689697265625, 0.0291595458984375, -0.014190673828125, 0.032470703125, 0.049774169921875, -0.0214385986328125, 0.03533935546875, -0.0199127197265625, 0.0318603515625, 0.013427734375, 0.0139312744140625, -0.027191162109375, -0.0143280029296875, -0.057098388671875, -0.0411376953125, 0.0273895263671875, 0.0278778076171875, -0.050872802734375, 0.038787841796875, -0.0225067138671875, -0.052947998046875, -0.0298919677734375, -0.005321502685546875, 0.0203094482421875, 0.047698974609375, 0.0235443115234375, -0.026153564453125, -0.024444580078125, -0.0657958984375, 0.024932861328125, -0.0009822845458984375, -0.006191253662109375, -0.0084686279296875, 0.05169677734375, -0.0244140625, 0.0701904296875, -0.042205810546875, -0.03289794921875, -0.00469207763671875, 0.027069091796875, 0.0257110595703125, 0.037384033203125, 0.0523681640625, -0.068359375, -0.048248291015625, -0.0214385986328125, -0.049346923828125, 0.0023040771484375, -0.01678466796875, -0.0087738037109375, 0.0254364013671875, 0.0386962890625, -0.060150146484375, 0.062103271484375, 0.03656005859375, -0.017181396484375, 0.039306640625, -0.0364990234375, 0.0139312744140625, -0.0849609375, -0.0003592967987060547, 0.01080322265625, -0.0183258056640625, -0.034088134765625, -0.006927490234375, 0.0241241455078125, -0.01812744140625, -0.0217742919921875, 0.0290985107421875, -0.0601806640625, -0.01324462890625, -0.02899169921875, -0.006839752197265625, 0.013824462890625, 0.043212890625, 0.01285552978515625, 0.048431396484375, 0.053558349609375, -0.023712158203125, 0.03167724609375, 0.028900146484375, -0.019744873046875, 0.06390380859375, -0.056915283203125, 0.0247802734375, -0.0180511474609375, 0.018890380859375, -0.0889892578125, -0.007808685302734375, -0.004161834716796875, -0.028045654296875, 0.062286376953125, -0.0171966552734375, -0.053741455078125, -0.05389404296875, -0.018157958984375, 0.01444244384765625, 0.05389404296875, -0.051177978515625, 0.04656982421875, 0.0113067626953125, 0.0221710205078125, -0.032989501953125, -0.0540771484375, -0.007785797119140625, -0.005359649658203125, -0.0679931640625, 0.05645751953125, -0.007762908935546875, 0.0131378173828125, 0.01507568359375, -0.00882720947265625, -0.0014581680297851562, -0.0167083740234375, 0.025299072265625, 0.031982421875, -0.0056304931640625, 0.01210784912109375, -0.0261077880859375, -0.0196533203125, -0.0106964111328125, -0.01727294921875, 0.0474853515625, -0.03009033203125, -0.042022705078125, -0.019805908203125, 0.0205230712890625, 0.039276123046875, -0.0189208984375, 0.04132080078125, 0.0736083984375, -0.035400390625, -0.0054779052734375, -0.05438232421875, -0.015167236328125, -0.038177490234375, 0.045013427734375, -0.04083251953125, -0.032196044921875, 0.048248291015625, -0.011627197265625, 0.0026531219482421875, 0.058502197265625, 0.048095703125, -0.020263671875, 0.053192138671875, 0.05780029296875, -0.000537872314453125, 0.055938720703125, -0.057586669921875, 0.007251739501953125, -0.05303955078125, -0.005268096923828125, -0.0218963623046875, -0.03338623046875, -0.04180908203125, -0.0304718017578125, 0.024627685546875, 0.019317626953125, -0.0115814208984375, 0.0379638671875, -0.08343505859375, 0.0252532958984375, 0.058319091796875, 0.0166473388671875, -0.0025730133056640625, 0.0027313232421875, 0.0087890625, 0.007495880126953125, -0.039154052734375, -0.032989501953125, 0.07257080078125, 0.0283050537109375, 0.076171875, -0.0156402587890625, 0.061553955078125, 0.0167388916015625, 0.0147705078125, -0.049407958984375, 0.0206451416015625, -0.01044464111328125, -0.0400390625, -0.01361846923828125, -0.0013561248779296875, -0.0899658203125, 0.0015211105346679688, -0.00832366943359375, -0.05255126953125, 0.0230255126953125, 0.008270263671875, -0.0006966590881347656, 0.033050537109375, -0.07635498046875, 0.072998046875, -0.0263519287109375, -0.016937255859375, 0.0074920654296875, -0.047698974609375, 0.0026988983154296875, -0.00782012939453125, -0.0132293701171875, 0.01153564453125, 0.02545166015625, 0.06561279296875, -0.0443115234375, 0.0579833984375, -0.013397216796875, 0.0145263671875, 0.031982421875, 0.0006041526794433594, 0.04571533203125, -0.0266876220703125, -0.0017490386962890625, 0.0242156982421875, 0.0264434814453125, -0.031280517578125, -0.04388427734375, 0.03863525390625, -0.08575439453125, -0.03472900390625, -0.049072265625, -0.039947509765625, 0.01407623291015625, 0.0310516357421875, 0.060302734375, 0.036163330078125, -0.01042938232421875, 0.0087432861328125, 0.0518798828125, -0.016357421875, 0.03857421875, 0.015533447265625, -0.006443023681640625, -0.01148223876953125, 0.056854248046875, 0.031494140625, 0.01204681396484375, 0.0216827392578125, 0.0103607177734375, -0.0306854248046875, -0.04986572265625, -0.0229339599609375, 0.0216217041015625, -0.07232666015625, -0.02752685546875, -0.048553466796875, -0.04248046875, -0.035125732421875, -0.0266571044921875, -0.0222625732421875, -0.002559661865234375, -0.03857421875, -0.003269195556640625, 0.037506103515625, 0.057098388671875, 0.0016031265258789062, 0.043212890625, -0.048828125, 0.0272674560546875, 0.030303955078125, 0.03216552734375, -0.01461029052734375, -0.053955078125, -0.00617218017578125, -0.01316070556640625, -0.039093017578125, -0.07470703125, 0.034332275390625, -0.0007429122924804688, 0.0501708984375, 0.006580352783203125, 0.006595611572265625, 0.038787841796875, -0.0292510986328125, 0.039642333984375, 0.027801513671875, -0.053192138671875, 0.04620361328125, -0.02203369140625, 0.02081298828125, 0.0216217041015625, 0.032073974609375, -0.022796630859375, 0.00257110595703125, -0.08294677734375, -0.048095703125, 0.066162109375, 0.0241241455078125, 0.01175689697265625, 0.01192474365234375, 0.0223236083984375, -0.0021572113037109375, 0.0038928985595703125, -0.05548095703125, -0.01438140869140625, -0.048858642578125, -0.0256805419921875, -0.002887725830078125, -0.0288238525390625, -0.0013599395751953125, -0.0249786376953125, 0.04254150390625, -0.0023899078369140625, 0.052215576171875, 0.0380859375, -0.03570556640625, 0.0050201416015625, -0.01153564453125, 0.0157928466796875, 0.0311737060546875, -0.037078857421875, 0.0194244384765625, -0.004093170166015625, -0.053619384765625, 0.0021800994873046875, 0.03216552734375, 0.0012140274047851562, 0.0099029541015625, 0.0263519287109375, 0.0662841796875, -0.0224609375, -0.007476806640625, 0.050628662109375, -0.0017423629760742188, -0.02685546875, -0.0297088623046875, -0.0103759765625, 0.005306243896484375, 0.01348876953125, 0.0226898193359375, 0.006649017333984375, 0.001239776611328125, -0.03729248046875, 0.029327392578125, 0.028961181640625, -0.0282135009765625, -0.024444580078125, 0.0416259765625, 0.00498199462890625, -0.020721435546875, 0.0635986328125, -0.01151275634765625, -0.045074462890625, 0.063720703125, 0.0465087890625, 0.0570068359375, -0.00036334991455078125, 0.0130767822265625, 0.037261962890625, 0.03387451171875, 0.007904052734375, -0.002040863037109375, 0.0021877288818359375, -0.053253173828125, 0.01019287109375, -0.05230712890625, 0.003326416015625, 0.01788330078125, -0.0273895263671875, 0.039276123046875, -0.0274810791015625, -0.01532745361328125, 0.0204315185546875, 0.01904296875, -0.08837890625, 0.03155517578125, 0.01544189453125, 0.070068359375, -0.05682373046875, 0.07574462890625, 0.0400390625, -0.0638427734375, -0.059417724609375, -0.0107574462890625, -0.01198577880859375, -0.06591796875, 0.056304931640625, 0.029083251953125, 0.005886077880859375, 0.01143646240234375, -0.032470703125, -0.0599365234375, 0.0933837890625, 0.032379150390625, -0.037841796875, -0.00449371337890625, 0.0010404586791992188, 0.0369873046875, -0.01922607421875, 0.05059814453125, 0.0211181640625, 0.0236358642578125, 0.0310516357421875, -0.0489501953125, 0.01462554931640625, -0.035888671875, 0.009124755859375, 0.0107574462890625, -0.04266357421875, 0.07830810546875, -0.0032978057861328125, -0.01334381103515625, -0.00820159912109375, 0.03985595703125, -0.0027523040771484375, 0.00534820556640625, 0.050018310546875, 0.0521240234375, 0.050872802734375, -0.01401519775390625, 0.096435546875, -0.01485443115234375, 0.037841796875, 0.07025146484375, 0.007564544677734375, 0.027130126953125, 0.0247039794921875, -0.0223388671875, 0.028900146484375, 0.061004638671875, -0.03692626953125, 0.040191650390625, 0.01305389404296875, 0.0030956268310546875, 0.0016698837280273438, 0.0177154541015625, -0.034881591796875, 0.0286407470703125, 0.00860595703125, -0.044586181640625, -0.0308074951171875, 0.0038394927978515625, -0.01024627685546875, -0.014434814453125, -0.0015392303466796875, 0.04583740234375, -0.0089111328125, -0.039276123046875, 0.047210693359375, -0.0001875162124633789, 0.06524658203125, -0.050872802734375, -0.002239227294921875, -0.00495147705078125, 0.0231781005859375, -0.02276611328125, -0.06646728515625, 0.0294342041015625, -0.01116943359375, -0.022705078125, -0.01084136962890625, 0.07696533203125, -0.0158538818359375, -0.052947998046875, 0.0189971923828125, 0.01812744140625, 0.020477294921875, -0.0196380615234375, -0.065185546875, 0.004825592041015625, -0.006622314453125, -0.0401611328125, 0.0377197265625, 0.04315185546875, 0.0014753341674804688, 0.0183563232421875, 0.051971435546875, -0.01367950439453125, 0.0196533203125, -0.008697509765625, 0.08160400390625, -0.023040771484375, -0.0233306884765625, -0.05523681640625, 0.0531005859375, -0.015533447265625, -0.0234375, 0.051361083984375, 0.035675048828125, 0.0684814453125, -0.0211029052734375, 0.059783935546875, -0.02581787109375, -0.0037631988525390625, -0.0214080810546875, 0.0572509765625, -0.0428466796875, -0.0226287841796875, -0.0361328125, -0.06842041015625, -0.007350921630859375, 0.063232421875, -0.033538818359375, 0.01318359375, 0.042877197265625, 0.06842041015625, -0.0298919677734375, -0.0225677490234375, 0.032379150390625, 0.00836181640625, 0.0308074951171875, 0.0218353271484375, 0.043975830078125, -0.05975341796875, 0.04449462890625, -0.043701171875, -0.025299072265625, -0.003170013427734375, -0.0672607421875, -0.07470703125, -0.07000732421875, -0.037841796875, -0.035888671875, -0.0261383056640625, 0.037322998046875, 0.08331298828125, -0.04437255859375, 0.01483917236328125, -0.0171661376953125, -0.0115966796875, 0.0026187896728515625, -0.0164337158203125, 0.059295654296875, -0.015411376953125, -0.06585693359375, -0.0280303955078125, 0.0027103424072265625, 0.0242919921875, -0.012603759765625, 0.0007061958312988281, -0.0260162353515625, -0.0038204193115234375, 0.0421142578125, 0.0135650634765625, -0.036224365234375, -0.0201568603515625, -0.003307342529296875, -0.01654052734375, 0.020263671875, 0.0322265625, -0.05023193359375, 0.0467529296875, 0.0455322265625, 0.03375244140625, 0.0836181640625, -0.018035888671875, 0.00942230224609375, -0.052215576171875, 0.037353515625, 0.009063720703125, 0.049560546875, 0.0255279541015625, -0.040008544921875, 0.038665771484375, 0.0259857177734375, -0.041290283203125, -0.052032470703125, 0.00714874267578125, -0.0933837890625, -0.0240936279296875, 0.06390380859375, -0.0234222412109375, -0.027069091796875, 0.0226593017578125, -0.035675048828125, 0.04345703125, -0.00910186767578125, 0.060760498046875, 0.04095458984375, 0.00262451171875, -0.0281524658203125, -0.02764892578125, 0.022491455078125, -0.0029506683349609375, -0.055419921875, -0.023712158203125, 0.03240966796875, 0.035736083984375, 0.041961669921875, 0.05926513671875, -0.0156402587890625, 0.00824737548828125, 0.0224609375, 0.036529541015625, -0.0191802978515625, -0.01015472412109375, -0.01334381103515625, 0.0055694580078125, -0.0112457275390625, -0.04180908203125 ] ]
beomi/kykim-gpt3-kor-small_based_on_gpt2
2021-11-16T15:21:35.000Z
[ "transformers", "pytorch", "tf", "jax", "gpt2", "text-generation", "ko", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
beomi
null
null
beomi/kykim-gpt3-kor-small_based_on_gpt2
5
8,773
transformers
2022-03-02T23:29:05
--- language: ko --- # Bert base model for Korean ## Update - Update at 2021.11.17 : Add Native Support for BERT Tokenizer (works with AutoTokenizer, pipeline) --- * 70GB Korean text dataset and 42000 lower-cased subwords are used * Check the model performance and other language models for Korean in [github](https://github.com/kiyoungkim1/LM-kor) ```python from transformers import pipeline pipe = pipeline('text-generation', model='beomi/kykim-gpt3-kor-small_based_on_gpt2') print(pipe("안녕하세요! 오늘은")) # [{'generated_text': '안녕하세요! 오늘은 제가 요즘 사용하고 있는 클렌징워터를 소개해드리려고 해요! 바로 이 제품!! 바로 이'}] ```
600
[ [ -0.0128021240234375, -0.03515625, 0.019073486328125, 0.0245208740234375, -0.045867919921875, -0.0004379749298095703, -0.031158447265625, -0.006801605224609375, 0.01163482666015625, 0.02490234375, -0.03253173828125, -0.048980712890625, -0.05291748046875, 0.00604248046875, -0.00818634033203125, 0.07904052734375, -0.01314544677734375, 0.00991058349609375, 0.0201416015625, 0.0181884765625, -0.01143646240234375, -0.043609619140625, -0.045074462890625, -0.0439453125, 0.00740814208984375, 0.0186920166015625, 0.0418701171875, 0.0298004150390625, 0.029327392578125, 0.0265045166015625, -0.005634307861328125, -0.01312255859375, -0.0225372314453125, 0.00991058349609375, 0.0001226663589477539, -0.0157012939453125, -0.031463623046875, -0.01116180419921875, 0.041107177734375, 0.0264892578125, 0.0106353759765625, 0.0176544189453125, -0.00890350341796875, 0.055877685546875, -0.0180816650390625, 0.034271240234375, -0.05670166015625, -0.00872039794921875, -0.0126953125, 0.032684326171875, -0.026031494140625, -0.0144805908203125, 0.0248260498046875, -0.038543701171875, 0.0198974609375, -0.002735137939453125, 0.10308837890625, 0.01468658447265625, -0.044586181640625, -0.037261962890625, -0.0288238525390625, 0.06536865234375, -0.0709228515625, 0.024322509765625, 0.0186920166015625, 0.01314544677734375, -0.01219940185546875, -0.08013916015625, -0.04949951171875, 0.0027256011962890625, -0.0215606689453125, 0.0194549560546875, -0.0077972412109375, 0.0150146484375, 0.0065155029296875, 0.0258636474609375, -0.051177978515625, -0.0059356689453125, -0.0306854248046875, -0.0401611328125, 0.052642822265625, -0.00005829334259033203, 0.022735595703125, -0.06646728515625, -0.0134735107421875, -0.010589599609375, -0.0201873779296875, 0.005199432373046875, 0.0295257568359375, 0.035125732421875, -0.0238189697265625, 0.05596923828125, -0.029144287109375, 0.0214996337890625, 0.0159149169921875, 0.0000985860824584961, 0.0487060546875, -0.04608154296875, -0.031646728515625, 0.0055999755859375, 0.07421875, 0.01216888427734375, 0.0302734375, 0.004314422607421875, -0.006252288818359375, 0.00269317626953125, 0.00806427001953125, -0.0628662109375, -0.045745849609375, 0.0187225341796875, -0.06988525390625, -0.014373779296875, 0.005214691162109375, -0.057220458984375, -0.0037021636962890625, -0.021026611328125, 0.030242919921875, -0.033233642578125, -0.0310821533203125, -0.0001156926155090332, -0.0032978057861328125, 0.0120849609375, -0.01204681396484375, -0.0740966796875, 0.001644134521484375, 0.03240966796875, 0.0528564453125, 0.0004000663757324219, -0.01387786865234375, 0.0010547637939453125, -0.0162811279296875, -0.0136871337890625, 0.03363037109375, -0.006763458251953125, -0.023895263671875, 0.00771331787109375, 0.0209197998046875, -0.0203704833984375, -0.0236053466796875, 0.053985595703125, -0.035491943359375, 0.0204925537109375, -0.0170745849609375, -0.0419921875, -0.0153045654296875, 0.0011920928955078125, -0.033447265625, 0.08770751953125, 0.0269622802734375, -0.04669189453125, 0.041107177734375, -0.04266357421875, -0.051116943359375, 0.0084075927734375, 0.0035610198974609375, -0.04779052734375, -0.0007801055908203125, 0.03607177734375, 0.04510498046875, 0.01377105712890625, 0.024444580078125, 0.00007021427154541016, -0.01056671142578125, -0.005870819091796875, -0.016845703125, 0.060760498046875, 0.0291595458984375, -0.015960693359375, 0.01514434814453125, -0.066650390625, 0.0160064697265625, 0.00635528564453125, -0.041168212890625, -0.031341552734375, -0.00799560546875, 0.033599853515625, 0.0211639404296875, 0.044342041015625, -0.05670166015625, 0.01342010498046875, -0.0355224609375, 0.0293731689453125, 0.050537109375, -0.0107269287109375, 0.04669189453125, -0.0097808837890625, 0.0261383056640625, 0.00577545166015625, -0.0010786056518554688, -0.0201568603515625, -0.0423583984375, -0.056976318359375, -0.043212890625, 0.05181884765625, 0.046783447265625, -0.0753173828125, 0.042510986328125, -0.04180908203125, -0.050689697265625, -0.056640625, 0.004241943359375, 0.0214691162109375, 0.02972412109375, 0.017547607421875, 0.007144927978515625, -0.0755615234375, -0.06988525390625, -0.005950927734375, -0.033538818359375, -0.025146484375, 0.027679443359375, 0.052642822265625, -0.035125732421875, 0.07745361328125, -0.021759033203125, -0.0156097412109375, -0.029022216796875, 0.0167083740234375, 0.064697265625, 0.0323486328125, 0.037200927734375, -0.050262451171875, -0.08062744140625, 0.00554656982421875, -0.04296875, -0.0162353515625, -0.0095977783203125, -0.0171051025390625, 0.0416259765625, 0.028656005859375, -0.06256103515625, 0.02252197265625, 0.04254150390625, -0.040283203125, 0.046783447265625, -0.007381439208984375, -0.0012340545654296875, -0.10137939453125, 0.01120758056640625, -0.0172119140625, -0.0165557861328125, -0.052001953125, -0.0110321044921875, 0.01904296875, 0.01451873779296875, -0.025054931640625, 0.05291748046875, -0.01519012451171875, 0.007091522216796875, -0.01268768310546875, -0.006313323974609375, -0.015838623046875, 0.03753662109375, 0.007541656494140625, 0.046295166015625, 0.0305938720703125, -0.043182373046875, 0.02978515625, 0.0270538330078125, -0.045166015625, 0.00521087646484375, -0.055389404296875, -0.006130218505859375, -0.004413604736328125, 0.019134521484375, -0.0887451171875, -0.03021240234375, 0.02789306640625, -0.055389404296875, 0.0204620361328125, -0.043121337890625, -0.04962158203125, -0.044525146484375, -0.0158233642578125, 0.00835418701171875, 0.07293701171875, -0.04510498046875, 0.05023193359375, 0.0162353515625, -0.035858154296875, -0.034515380859375, -0.045989990234375, -0.0093994140625, -0.006298065185546875, -0.055389404296875, 0.03594970703125, 0.00643157958984375, 0.006397247314453125, 0.006633758544921875, 0.00872039794921875, 0.002399444580078125, -0.026580810546875, 0.00013113021850585938, 0.0406494140625, -0.00665283203125, 0.00186920166015625, 0.033111572265625, -0.02044677734375, -0.0015583038330078125, -0.01515960693359375, 0.0703125, -0.007354736328125, -0.0029811859130859375, -0.0198974609375, 0.00499725341796875, 0.03997802734375, 0.0169525146484375, 0.04083251953125, 0.059600830078125, -0.026611328125, 0.006298065185546875, -0.01505279541015625, -0.003631591796875, -0.037384033203125, 0.056488037109375, -0.039764404296875, -0.049041748046875, 0.05670166015625, 0.009552001953125, 0.004093170166015625, 0.0440673828125, 0.0640869140625, -0.006549835205078125, 0.0889892578125, 0.016143798828125, -0.02911376953125, 0.009918212890625, -0.01222991943359375, 0.029266357421875, -0.065673828125, -0.011474609375, -0.01708984375, -0.0206146240234375, -0.0660400390625, -0.006282806396484375, 0.0164947509765625, 0.0084228515625, -0.032379150390625, 0.0262451171875, -0.038665771484375, 0.0119781494140625, 0.04937744140625, -0.01259613037109375, -0.00927734375, 0.0174407958984375, -0.036468505859375, -0.0216064453125, -0.046112060546875, -0.03546142578125, 0.0838623046875, 0.040435791015625, 0.053497314453125, -0.00405120849609375, 0.049102783203125, 0.005512237548828125, 0.0226593017578125, -0.056854248046875, 0.03509521484375, -0.035430908203125, -0.0528564453125, -0.0262908935546875, -0.0198974609375, -0.0643310546875, 0.0224151611328125, 0.017059326171875, -0.05169677734375, 0.0023326873779296875, 0.00588226318359375, -0.00693511962890625, 0.01690673828125, -0.05096435546875, 0.053314208984375, 0.01114654541015625, 0.018035888671875, 0.02313232421875, -0.036590576171875, 0.021087646484375, -0.0016613006591796875, 0.003662109375, -0.00372314453125, 0.006679534912109375, 0.047637939453125, -0.037322998046875, 0.0618896484375, -0.028076171875, -0.0085601806640625, 0.0229034423828125, -0.0202789306640625, 0.049285888671875, 0.0163421630859375, -0.00009882450103759766, 0.027008056640625, 0.0005517005920410156, -0.0261077880859375, -0.01983642578125, 0.042144775390625, -0.057861328125, 0.00782012939453125, -0.03839111328125, -0.038665771484375, 0.0035400390625, 0.03192138671875, 0.061004638671875, -0.0087890625, 0.024993896484375, 0.011474609375, 0.0224456787109375, -0.0148162841796875, 0.047943115234375, 0.038055419921875, -0.03692626953125, -0.0596923828125, 0.060546875, 0.011505126953125, 0.005878448486328125, -0.00620269775390625, 0.006153106689453125, -0.03839111328125, -0.0239410400390625, -0.038177490234375, 0.021636962890625, -0.04779052734375, -0.01490020751953125, -0.044830322265625, -0.0478515625, -0.04766845703125, -0.005290985107421875, -0.0418701171875, -0.02593994140625, -0.0204620361328125, -0.004520416259765625, 0.0182952880859375, 0.0278472900390625, -0.006343841552734375, 0.037689208984375, -0.05841064453125, 0.025604248046875, 0.0247802734375, 0.026580810546875, -0.0007519721984863281, -0.055938720703125, -0.0234832763671875, 0.0088958740234375, -0.0086669921875, -0.041046142578125, 0.053802490234375, -0.0022487640380859375, 0.0316162109375, 0.01328277587890625, 0.0128326416015625, 0.0301361083984375, -0.0518798828125, 0.08056640625, -0.0008754730224609375, -0.079345703125, 0.0263519287109375, -0.030853271484375, 0.04412841796875, 0.034637451171875, 0.00913238525390625, -0.061431884765625, -0.0165252685546875, -0.034393310546875, -0.091796875, 0.06170654296875, 0.04449462890625, 0.025238037109375, -0.006397247314453125, 0.019195556640625, 0.0106353759765625, 0.018218994140625, -0.0726318359375, -0.038665771484375, -0.0295562744140625, -0.049285888671875, 0.01690673828125, -0.0273895263671875, 0.02789306640625, -0.02813720703125, 0.085205078125, -0.004199981689453125, 0.04217529296875, 0.0294189453125, -0.007366180419921875, -0.0025615692138671875, 0.01280975341796875, 0.0423583984375, 0.01520538330078125, -0.0221710205078125, -0.005855560302734375, 0.0157928466796875, -0.0672607421875, -0.006450653076171875, 0.01812744140625, -0.040252685546875, 0.03515625, 0.0250244140625, 0.073486328125, -0.0010662078857421875, -0.03448486328125, 0.0171966552734375, -0.005390167236328125, -0.039581298828125, -0.034637451171875, -0.014434814453125, 0.0020427703857421875, 0.0124053955078125, 0.049560546875, -0.01064300537109375, -0.0029506683349609375, -0.004241943359375, 0.01103973388671875, 0.0233917236328125, -0.0216217041015625, -0.0206756591796875, 0.036895751953125, -0.0036907196044921875, -0.00646209716796875, 0.061126708984375, -0.034515380859375, -0.07818603515625, 0.057861328125, 0.044342041015625, 0.06622314453125, -0.0062408447265625, 0.0243072509765625, 0.041534423828125, 0.037567138671875, 0.00153350830078125, 0.05853271484375, 0.0232086181640625, -0.062225341796875, -0.03875732421875, -0.053558349609375, 0.0004138946533203125, 0.047210693359375, -0.03826904296875, 0.01611328125, -0.00615692138671875, -0.0207366943359375, -0.006870269775390625, 0.01055908203125, -0.03515625, 0.014251708984375, 0.01001739501953125, 0.05474853515625, -0.05316162109375, 0.068359375, 0.060333251953125, -0.007770538330078125, -0.046966552734375, -0.0017023086547851562, -0.0257720947265625, -0.041229248046875, 0.061492919921875, 0.010986328125, 0.0379638671875, 0.00963592529296875, -0.045166015625, -0.0875244140625, 0.08251953125, -0.007602691650390625, -0.0386962890625, 0.005035400390625, 0.026458740234375, 0.050872802734375, -0.0241241455078125, 0.0015850067138671875, 0.040283203125, 0.04400634765625, -0.00547027587890625, -0.079833984375, -0.0102081298828125, -0.040924072265625, 0.014556884765625, 0.0186614990234375, -0.04571533203125, 0.07501220703125, 0.00048351287841796875, -0.0165863037109375, 0.027069091796875, 0.05267333984375, 0.0305633544921875, 0.0270233154296875, 0.03363037109375, 0.02886962890625, 0.036376953125, -0.0236053466796875, 0.054840087890625, -0.0304107666015625, 0.0535888671875, 0.07818603515625, -0.0104217529296875, 0.042999267578125, 0.031158447265625, -0.0274810791015625, 0.037445068359375, 0.050750732421875, -0.0254364013671875, 0.05987548828125, 0.006351470947265625, -0.007686614990234375, -0.004756927490234375, 0.0157623291015625, -0.0179595947265625, 0.032012939453125, 0.0182647705078125, -0.041778564453125, -0.0015697479248046875, 0.00849151611328125, 0.01983642578125, -0.01226043701171875, -0.0214691162109375, 0.031707763671875, -0.006862640380859375, -0.040435791015625, 0.032745361328125, 0.0236053466796875, 0.07476806640625, -0.042572021484375, 0.01528167724609375, -0.0029697418212890625, 0.01468658447265625, 0.004596710205078125, -0.04534912109375, 0.0033092498779296875, -0.00010693073272705078, -0.0322265625, -0.0021800994873046875, 0.080322265625, -0.04315185546875, -0.058319091796875, 0.01271820068359375, 0.0149688720703125, 0.0040283203125, 0.0052490234375, -0.055328369140625, -0.004047393798828125, 0.00933074951171875, -0.040313720703125, 0.00992584228515625, 0.01377105712890625, 0.004146575927734375, 0.04058837890625, 0.056640625, 0.01519012451171875, 0.035369873046875, 0.0118865966796875, 0.055267333984375, -0.0293121337890625, -0.039276123046875, -0.07049560546875, 0.055267333984375, -0.0307464599609375, -0.0242156982421875, 0.04937744140625, 0.05059814453125, 0.05731201171875, -0.038055419921875, 0.0889892578125, -0.02874755859375, 0.047393798828125, -0.0421142578125, 0.081298828125, -0.01690673828125, -0.0035572052001953125, -0.0232391357421875, -0.0537109375, -0.005558013916015625, 0.0545654296875, -0.01096343994140625, 0.0170745849609375, 0.041473388671875, 0.042144775390625, 0.0030612945556640625, -0.01297760009765625, 0.009368896484375, 0.0285491943359375, 0.02313232421875, 0.0286102294921875, 0.0245513916015625, -0.06951904296875, 0.046295166015625, -0.042572021484375, -0.00006538629531860352, -0.0004298686981201172, -0.054901123046875, -0.0855712890625, -0.041351318359375, -0.01263427734375, -0.0298919677734375, -0.02191162109375, 0.06781005859375, 0.049163818359375, -0.07733154296875, -0.0030574798583984375, -0.0206451416015625, 0.00675201416015625, 0.0008087158203125, -0.0260009765625, 0.0557861328125, -0.034210205078125, -0.0836181640625, 0.0109710693359375, -0.00748443603515625, 0.024139404296875, 0.0003085136413574219, -0.0200958251953125, -0.016265869140625, 0.0099639892578125, 0.047607421875, 0.0002689361572265625, -0.052703857421875, -0.0180816650390625, 0.00901031494140625, -0.019378662109375, -0.00957489013671875, 0.03204345703125, -0.052154541015625, 0.0305023193359375, 0.053619384765625, 0.0233917236328125, 0.0263214111328125, -0.00919342041015625, 0.0276947021484375, -0.06524658203125, 0.0088348388671875, -0.005352020263671875, 0.02728271484375, 0.01300811767578125, -0.0310821533203125, 0.041595458984375, 0.0214080810546875, -0.05474853515625, -0.057220458984375, 0.0025691986083984375, -0.058135986328125, -0.01192474365234375, 0.08905029296875, -0.011993408203125, -0.01055908203125, -0.017547607421875, -0.03515625, 0.03619384765625, -0.03790283203125, 0.06268310546875, 0.08245849609375, 0.0034637451171875, 0.00402069091796875, -0.025421142578125, 0.0443115234375, 0.0294952392578125, -0.03985595703125, -0.01190185546875, 0.004207611083984375, 0.02117919921875, 0.02020263671875, 0.044158935546875, 0.0002853870391845703, 0.0172882080078125, -0.004558563232421875, 0.01554107666015625, 0.00254058837890625, -0.0021991729736328125, -0.00672149658203125, -0.0186614990234375, -0.037078857421875, -0.026947021484375 ] ]
internlm/internlm-chat-20b
2023-10-19T10:08:28.000Z
[ "transformers", "pytorch", "internlm", "feature-extraction", "text-generation", "custom_code", "license:apache-2.0", "region:us" ]
text-generation
internlm
null
null
internlm/internlm-chat-20b
122
8,766
transformers
2023-09-18T03:28:40
--- license: apache-2.0 pipeline_tag: text-generation --- **InternLM** <div align="center"> <img src="https://github.com/InternLM/InternLM/assets/22529082/b9788105-8892-4398-8b47-b513a292378e" width="200"/> <div>&nbsp;</div> <div align="center"> <b><font size="5">InternLM</font></b> <sup> <a href="https://internlm.intern-ai.org.cn/"> <i><font size="4">HOT</font></i> </a> </sup> <div>&nbsp;</div> </div> [![evaluation](https://github.com/InternLM/InternLM/assets/22529082/f80a2a58-5ddf-471a-8da4-32ab65c8fd3b)](https://github.com/internLM/OpenCompass/) [💻Github Repo](https://github.com/InternLM/InternLM) • [🤔Reporting Issues](https://github.com/InternLM/InternLM/issues/new) </div> ## Introduction The Shanghai Artificial Intelligence Laboratory, in collaboration with SenseTime Technology, the Chinese University of Hong Kong, and Fudan University, has officially released the 20 billion parameter pretrained model, InternLM-20B. InternLM-20B was pre-trained on over **2.3T** Tokens containing high-quality English, Chinese, and code data. Additionally, the Chat version has undergone SFT and RLHF training, enabling it to better and more securely meet users' needs. In terms of model structure, InternLM-20B opted for a deeper architecture, with a depth set at 60 layers. This surpasses the conventional 7B and 13B models that utilize 32 or 40 layers. When parameters are limited, increasing the number of layers can enhance the model's overall capability. Furthermore, compared to InternLM-7B, the pre-training data used for InternLM-20B underwent higher quality cleansing and was supplemented with data rich in knowledge and designed for reinforcing understanding and reasoning capabilities. As a result, it exhibits significant improvements in understanding, reasoning, mathematical, and programming abilities—all of which test the technical proficiency of language models. Overall, InternLM-20B features the following characteristics: - Outstanding overall performance - Strong utility invocation capability - Supports a 16k context length (Through infererence extrapolation) - Better value alignment. ## Performance Evaluation On the 5 capability dimensions proposed by OpenCompass, InternLM-20B has achieved excellent results (the bolded scores represent the best performances within the 13B-33B parameter range). | Capability | Llama-13B | Llama2-13B | Baichuan2-13B | InternLM-20B | Llama-33B | Llama-65B | Llama2-70B | |----------|-----------|------------|---------------|--------------|-----------|-----------|------------| | Language | 42.5 | 47 | 47.5 | **55** | 44.6 | 47.1 | 51.6 | | Knowledge | 58.2 | 58.3 | 48.9 | 60.1 | **64** | 66 | 67.7 | | Understanding | 45.5 | 50.9 | 58.1 | **67.3** | 50.6 | 54.2 | 60.8 | | Reasoning | 42.7 | 43.6 | 44.2 | **54.9** | 46.4 | 49.8 | 55 | | Examination | 37.3 | 45.2 | 51.8 | **62.5** | 47.4 | 49.7 | 57.3 | | Overall | 43.8 | 47.3 | 49.4 | **59.2** | 48.9 | 51.9 | 57.4 | The table below compares the performance of mainstream open-source models on some influential and typical datasets. | | Benchmarks | Llama-13B | Llama2-13B | Baichuan2-13B | InternLM-20B | Llama-33B | Llama-65B | Llama2-70B | |------|------------------|-----------|------------|---------------|--------------|-----------|-----------|------------| | Examination | MMLU | 47.73 | 54.99 | 59.55 | **62.05** | 58.73 | 63.71 | 69.75 | | | C-Eval (val) | 31.83 | 41.4 | **59.01** | 58.8 | 37.47 | 40.36 | 50.13 | | | AGI-Eval | 22.03 | 30.93 | 37.37 | **44.58** | 33.53 | 33.92 | 40.02 | | Knowledge | BoolQ | 78.75 | 82.42 | 67 | **87.46** | 84.43 | 86.61 | 87.74 | | | TriviaQA | 52.47 | 59.36 | 46.61 | 57.26 | **66.24** | 69.79 | 70.71 | | | NaturalQuestions | 20.17 | 24.85 | 16.32 | 25.15 | **30.89** | 33.41 | 34.16 | | Understanding | CMRC | 9.26 | 31.59 | 29.85 | **68.78** | 14.17 | 34.73 | 43.74 | | | CSL | 55 | 58.75 | 63.12 | **65.62** | 57.5 | 59.38 | 60 | | | RACE (middle) | 53.41 | 63.02 | 68.94 | **86.35** | 64.55 | 72.35 | 81.55 | | | RACE (high) | 47.63 | 58.86 | 67.18 | **83.28** | 62.61 | 68.01 | 79.93 | | | XSum | 20.37 | 23.37 | 25.23 | **35.54** | 20.55 | 19.91 | 25.38 | | Reasoning | WinoGrande | 64.64 | 64.01 | 67.32 | **69.38** | 66.85 | 69.38 | 69.77 | | | BBH | 37.93 | 45.62 | 48.98 | **52.51** | 49.98 | 58.38 | 64.91 | | | GSM8K | 20.32 | 29.57 | **52.62** | **52.62** | 42.3 | 54.44 | 63.31 | | | PIQA | 79.71 | 79.76 | 78.07 | 80.25 | **81.34** | 82.15 | 82.54 | | Programming | HumanEval | 14.02 | 18.9 | 17.07 | **25.61** | 17.68 | 18.9 | 26.22 | | | MBPP | 20.6 | 26.8 | 30.8 | **35.6** | 28.4 | 33.6 | 39.6 | Overall, InternLM-20B comprehensively outperforms open-source models in the 13B parameter range in terms of overall capabilities, and on inference evaluation sets, it approaches or even surpasses the performance of Llama-65B. ## Import from Transformers To load the InternLM 20B model using Transformers, use the following code: ```python >>> from transformers import AutoTokenizer, AutoModelForCausalLM >>> tokenizer = AutoTokenizer.from_pretrained("internlm/internlm-chat-20b", trust_remote_code=True) >>> model = AutoModelForCausalLM.from_pretrained("internlm/internlm-chat-20b", trust_remote_code=True).cuda() >>> model = model.eval() >>> output, history = model.chat(tokenizer, "Hello! Today is sunny, it is time to go out") >>> print(output) Hello! Today is sunny, and it sounds like a great day to go out an enjoy the weather. What would you like to do? ``` **Limitations:** Although we have made efforts to ensure the safety of the model during the training process and to encourage the model to generate text that complies with ethical and legal requirements, the model may still produce unexpected outputs due to its size and probabilistic generation paradigm. For example, the generated responses may contain biases, discrimination, or other harmful content. Please do not propagate such content. We are not responsible for any consequences resulting from the dissemination of harmful information. ## Open Source License The code is licensed under Apache-2.0, while model weights are fully open for academic research and also allow **free** commercial usage. To apply for a commercial license, please fill in the [application form (English)](https://wj.qq.com/s2/12727483/5dba/)/[申请表(中文)](https://wj.qq.com/s2/12725412/f7c1/). For other questions or collaborations, please contact <internlm@pjlab.org.cn>. ## 简介 上海人工智能实验室与商汤科技联合香港中文大学和复旦大学正式推出书生·浦语200亿参数模型版本 InternLM-20B ,InternLM-20B 在超过 **2.3T** Tokens 包含高质量英文、中文和代码的数据上进行预训练,其中 Chat 版本还经过了 SFT 和 RLHF 训练,使其能够更好、更安全地满足用户的需求。 InternLM 20B 在模型结构上选择了深结构,层数设定为60层,超过常规7B和13B模型所使用的32层或者40层。在参数受限的情况下,提高层数有利于提高模型的综合能力。此外,相较于InternLM-7B,InternLM-20B使用的预训练数据经过了更高质量的清洗,并补充了高知识密度和用于强化理解与推理能力的训练数据。因此,它在理解能力、推理能力、数学能力、编程能力等考验语言模型技术水平的方面都得到了显著提升。总体而言,InternLM-20B具有以下的特点: - 优异的综合性能 - 很强的工具调用功能 - 支持16k语境长度(通过推理时外推) - 更好的价值对齐 ## 性能评测 在OpenCompass提出的5个能力维度上,InternLM-20B都取得很好的效果(粗体为13B-33B这个量级范围内,各项最佳成绩) | 能力维度 | Llama-13B | Llama2-13B | Baichuan2-13B | InternLM-20B | Llama-33B | Llama-65B | Llama2-70B | |----------|-----------|------------|---------------|--------------|-----------|-----------|------------| | 语言 | 42.5 | 47 | 47.5 | **55** | 44.6 | 47.1 | 51.6 | | 知识 | 58.2 | 58.3 | 48.9 | 60.1 | **64** | 66 | 67.7 | | 理解 | 45.5 | 50.9 | 58.1 | **67.3** | 50.6 | 54.2 | 60.8 | | 推理 | 42.7 | 43.6 | 44.2 | **54.9** | 46.4 | 49.8 | 55 | | 学科 | 37.3 | 45.2 | 51.8 | **62.5** | 47.4 | 49.7 | 57.3 | | 总平均 | 43.8 | 47.3 | 49.4 | **59.2** | 48.9 | 51.9 | 57.4 | 下表展示了在多个经典数据集上 InternLM 20B 与各个主流开源模型的表现 | | 评测集 | Llama-13B | Llama2-13B | Baichuan2-13B | InternLM-20B | Llama-33B | Llama-65B | Llama2-70B | |------|------------------|-----------|------------|---------------|--------------|-----------|-----------|------------| | 学科 | MMLU | 47.73 | 54.99 | 59.55 | **62.05** | 58.73 | 63.71 | 69.75 | | | C-Eval (val) | 31.83 | 41.4 | **59.01** | 58.8 | 37.47 | 40.36 | 50.13 | | | AGI-Eval | 22.03 | 30.93 | 37.37 | **44.58** | 33.53 | 33.92 | 40.02 | | 知识 | BoolQ | 78.75 | 82.42 | 67 | **87.46** | 84.43 | 86.61 | 87.74 | | | TriviaQA | 52.47 | 59.36 | 46.61 | 57.26 | **66.24** | 69.79 | 70.71 | | | NaturalQuestions | 20.17 | 24.85 | 16.32 | 25.15 | **30.89** | 33.41 | 34.16 | | 理解 | CMRC | 9.26 | 31.59 | 29.85 | **68.78** | 14.17 | 34.73 | 43.74 | | | CSL | 55 | 58.75 | 63.12 | **65.62** | 57.5 | 59.38 | 60 | | | RACE (middle) | 53.41 | 63.02 | 68.94 | **86.35** | 64.55 | 72.35 | 81.55 | | | RACE (high) | 47.63 | 58.86 | 67.18 | **83.28** | 62.61 | 68.01 | 79.93 | | | XSum | 20.37 | 23.37 | 25.23 | **35.54** | 20.55 | 19.91 | 25.38 | | 推理 | WinoGrande | 64.64 | 64.01 | 67.32 | **69.38** | 66.85 | 69.38 | 69.77 | | | BBH | 37.93 | 45.62 | 48.98 | **52.51** | 49.98 | 58.38 | 64.91 | | | GSM8K | 20.32 | 29.57 | **52.62** | **52.62** | 42.3 | 54.44 | 63.31 | | | PIQA | 79.71 | 79.76 | 78.07 | 80.25 | **81.34** | 82.15 | 82.54 | | 编程 | HumanEval | 14.02 | 18.9 | 17.07 | **25.61** | 17.68 | 18.9 | 26.22 | | | MBPP | 20.6 | 26.8 | 30.8 | **35.6** | 28.4 | 33.6 | 39.6 | 总体而言,InternLM-20B 在综合能力上全面领先于13B量级的开源模型,同时在推理评测集上能够接近甚至超越Llama-65B的性能。 ## 通过 Transformers 加载 通过以下的代码加载 InternLM 20B 模型 ```python >>> from transformers import AutoTokenizer, AutoModelForCausalLM >>> tokenizer = AutoTokenizer.from_pretrained("internlm/internlm-chat-20b", trust_remote_code=True) >>> model = AutoModelForCausalLM.from_pretrained("internlm/internlm-chat-20b", trust_remote_code=True).cuda() >>> model = model.eval() >>> output, history = model.chat(tokenizer, "你好呀!今天天气真好") >>> print(output) 你好!是的,今天的天气非常晴朗,非常适合户外活动。 ``` **局限性:** 尽管在训练过程中我们非常注重模型的安全性,尽力促使模型输出符合伦理和法律要求的文本,但受限于模型大小以及概率生成范式,模型可能会产生各种不符合预期的输出,例如回复内容包含偏见、歧视等有害内容,请勿传播这些内容。由于传播不良信息导致的任何后果,本项目不承担责任。 ## 开源许可证 本仓库的代码依照 Apache-2.0 协议开源。模型权重对学术研究完全开放,也可申请免费的商业使用授权([申请表](https://wj.qq.com/s2/12725412/f7c1/))。其他问题与合作请联系 <internlm@pjlab.org.cn>。
12,259
[ [ -0.042877197265625, -0.046875, 0.01235198974609375, 0.023590087890625, -0.0115509033203125, -0.00011044740676879883, -0.016845703125, -0.043548583984375, -0.006336212158203125, 0.00957489013671875, -0.0260009765625, -0.048248291015625, -0.02978515625, 0.00402069091796875, -0.022705078125, 0.06805419921875, 0.0010232925415039062, 0.0132293701171875, -0.006687164306640625, -0.00868988037109375, -0.02838134765625, -0.0300750732421875, -0.04669189453125, -0.02325439453125, 0.00812530517578125, 0.0165252685546875, 0.04742431640625, 0.03521728515625, 0.053436279296875, 0.01849365234375, -0.0196990966796875, 0.0102081298828125, -0.022705078125, -0.0196380615234375, 0.0178985595703125, -0.03009033203125, -0.048583984375, -0.004730224609375, 0.03729248046875, 0.03131103515625, -0.009246826171875, 0.03900146484375, 0.0108489990234375, 0.058441162109375, -0.03814697265625, 0.007366180419921875, -0.0243377685546875, -0.00212860107421875, -0.020721435546875, 0.0050811767578125, -0.009490966796875, -0.028076171875, -0.004680633544921875, -0.037506103515625, 0.005970001220703125, 0.0298309326171875, 0.093994140625, 0.01416015625, -0.0119171142578125, 0.004428863525390625, -0.043212890625, 0.054901123046875, -0.08355712890625, 0.020355224609375, 0.024871826171875, 0.022430419921875, 0.0029315948486328125, -0.057159423828125, -0.048614501953125, -0.0230560302734375, -0.0216217041015625, 0.0166778564453125, -0.019622802734375, -0.007144927978515625, 0.0255889892578125, 0.037109375, -0.052032470703125, 0.0203399658203125, -0.048614501953125, -0.0038242340087890625, 0.06719970703125, 0.031524658203125, 0.0221710205078125, -0.0150604248046875, -0.036102294921875, -0.00879669189453125, -0.04632568359375, 0.031982421875, 0.031890869140625, 0.01280975341796875, -0.035247802734375, 0.034027099609375, -0.028961181640625, 0.047637939453125, -0.006565093994140625, 0.0005898475646972656, 0.040802001953125, -0.04296875, -0.027557373046875, -0.0096435546875, 0.0806884765625, 0.034454345703125, -0.0030384063720703125, 0.0131683349609375, -0.015045166015625, -0.018951416015625, 0.0012531280517578125, -0.0611572265625, -0.021484375, 0.0271453857421875, -0.054901123046875, -0.021026611328125, 0.007965087890625, -0.057342529296875, 0.0142669677734375, -0.0265655517578125, 0.0309295654296875, -0.039215087890625, -0.036041259765625, -0.01058197021484375, 0.0005469322204589844, 0.02911376953125, 0.0189361572265625, -0.048919677734375, 0.0272674560546875, 0.0293731689453125, 0.0745849609375, -0.0237274169921875, -0.0200653076171875, 0.00897979736328125, -0.00904083251953125, -0.0323486328125, 0.0401611328125, -0.019866943359375, -0.0246124267578125, -0.025299072265625, -0.0007543563842773438, -0.012420654296875, -0.025665283203125, 0.0268096923828125, -0.022857666015625, 0.0255889892578125, -0.01346588134765625, -0.03131103515625, -0.0251922607421875, 0.02838134765625, -0.033966064453125, 0.1019287109375, 0.01061248779296875, -0.0767822265625, 0.01434326171875, -0.0684814453125, -0.0009484291076660156, -0.009521484375, 0.0019550323486328125, -0.045562744140625, -0.020050048828125, 0.019561767578125, 0.02996826171875, -0.038818359375, 0.025146484375, -0.025909423828125, -0.0310211181640625, 0.00237274169921875, -0.03375244140625, 0.0872802734375, 0.03289794921875, -0.04278564453125, 0.0241851806640625, -0.07952880859375, 0.0087127685546875, 0.0295867919921875, -0.021697998046875, -0.01114654541015625, -0.012847900390625, 0.0002028942108154297, 0.018585205078125, 0.026611328125, -0.03192138671875, 0.012054443359375, -0.04315185546875, 0.046142578125, 0.061859130859375, -0.0022907257080078125, 0.0201263427734375, -0.03497314453125, 0.03631591796875, 0.015594482421875, 0.0345458984375, -0.00035858154296875, -0.038726806640625, -0.06195068359375, -0.0278778076171875, 0.0162353515625, 0.0452880859375, -0.026397705078125, 0.04168701171875, -0.003292083740234375, -0.0599365234375, -0.041473388671875, -0.005157470703125, 0.033477783203125, 0.048614501953125, 0.04339599609375, 0.003307342529296875, -0.041656494140625, -0.0787353515625, -0.0030422210693359375, -0.0178070068359375, 0.005268096923828125, 0.0325927734375, 0.06329345703125, -0.004364013671875, 0.0684814453125, -0.044036865234375, -0.01152801513671875, -0.0112762451171875, -0.00504302978515625, 0.041961669921875, 0.054901123046875, 0.052886962890625, -0.050750732421875, -0.0595703125, 0.0004112720489501953, -0.062042236328125, 0.0042572021484375, 0.01320648193359375, -0.0132598876953125, 0.0220794677734375, 0.02667236328125, -0.0550537109375, 0.040924072265625, 0.03521728515625, -0.0262298583984375, 0.07794189453125, -0.024200439453125, 0.001434326171875, -0.07806396484375, 0.0316162109375, -0.01390838623046875, 0.005767822265625, -0.041473388671875, -0.0009150505065917969, 0.00936126708984375, -0.003971099853515625, -0.041259765625, 0.061187744140625, -0.04034423828125, 0.0082244873046875, 0.00933074951171875, -0.0036983489990234375, -0.007305145263671875, 0.049591064453125, -0.01319122314453125, 0.0673828125, 0.055633544921875, -0.045654296875, 0.01534271240234375, 0.02154541015625, -0.03314208984375, 0.0168304443359375, -0.059967041015625, -0.0011663436889648438, -0.00389862060546875, 0.0179443359375, -0.0751953125, -0.00849151611328125, 0.0270843505859375, -0.0394287109375, 0.0216217041015625, 0.0117034912109375, -0.02288818359375, -0.05303955078125, -0.0306854248046875, 0.019317626953125, 0.0428466796875, -0.0284271240234375, 0.0227813720703125, 0.0081329345703125, -0.0013837814331054688, -0.05438232421875, -0.04595947265625, -0.0162200927734375, -0.0212860107421875, -0.05120849609375, 0.03082275390625, -0.0133056640625, -0.0246124267578125, 0.00901031494140625, 0.005584716796875, 0.007053375244140625, 0.0091094970703125, 0.02069091796875, 0.04412841796875, -0.01971435546875, -0.00563812255859375, -0.0136566162109375, -0.004730224609375, 0.006580352783203125, -0.005405426025390625, 0.04766845703125, -0.0267486572265625, -0.029266357421875, -0.039886474609375, 0.0084228515625, 0.043212890625, -0.0305938720703125, 0.072021484375, 0.04736328125, -0.01446533203125, -0.00023424625396728516, -0.035400390625, -0.01088714599609375, -0.034454345703125, 0.0268707275390625, -0.0234527587890625, -0.045745849609375, 0.053802490234375, 0.022491455078125, 0.02471923828125, 0.0618896484375, 0.045196533203125, -0.00605010986328125, 0.08355712890625, 0.02740478515625, -0.007717132568359375, 0.0272674560546875, -0.05255126953125, 0.01174163818359375, -0.0792236328125, -0.037811279296875, -0.048797607421875, -0.0272979736328125, -0.04296875, -0.0192718505859375, 0.0284881591796875, 0.009002685546875, -0.060272216796875, 0.0204315185546875, -0.05914306640625, 0.0036525726318359375, 0.061859130859375, 0.0110321044921875, 0.0132598876953125, -0.019134521484375, -0.027191162109375, 0.006206512451171875, -0.053314208984375, -0.0307769775390625, 0.10064697265625, 0.032867431640625, 0.04791259765625, -0.0014123916625976562, 0.04266357421875, 0.009246826171875, 0.0262908935546875, -0.0240478515625, 0.034698486328125, 0.0013790130615234375, -0.06439208984375, -0.0254364013671875, -0.0290069580078125, -0.05572509765625, 0.032684326171875, -0.01279449462890625, -0.062164306640625, 0.018096923828125, 0.00957489013671875, -0.031707763671875, 0.037811279296875, -0.043121337890625, 0.056640625, -0.031951904296875, -0.033203125, -0.0036258697509765625, -0.055023193359375, 0.042144775390625, 0.0081939697265625, 0.0269775390625, -0.0208892822265625, 0.0080108642578125, 0.0640869140625, -0.041778564453125, 0.04669189453125, -0.01898193359375, 0.02105712890625, 0.0233306884765625, -0.0082855224609375, 0.047607421875, 0.002254486083984375, -0.0136260986328125, 0.040069580078125, 0.0060882568359375, -0.02435302734375, -0.0189056396484375, 0.0465087890625, -0.07861328125, -0.053131103515625, -0.038543701171875, -0.035064697265625, -0.000507354736328125, 0.0294342041015625, 0.037750244140625, 0.019805908203125, -0.00008416175842285156, 0.034027099609375, 0.03521728515625, -0.0276641845703125, 0.048248291015625, 0.041229248046875, -0.0215301513671875, -0.037506103515625, 0.061859130859375, 0.0145416259765625, 0.02099609375, 0.0205078125, 0.01934814453125, -0.027862548828125, -0.052398681640625, -0.034759521484375, 0.01538848876953125, -0.032684326171875, -0.0242462158203125, -0.059661865234375, -0.036041259765625, -0.037109375, -0.005245208740234375, -0.0289306640625, -0.0218505859375, -0.03350830078125, -0.00437164306640625, 0.025970458984375, 0.03094482421875, -0.0106964111328125, 0.0080108642578125, -0.044586181640625, 0.01508331298828125, 0.0003528594970703125, 0.0228424072265625, 0.0190277099609375, -0.04840087890625, -0.01873779296875, 0.00887298583984375, -0.03143310546875, -0.046905517578125, 0.0457763671875, -0.0012493133544921875, 0.055328369140625, 0.036407470703125, 0.0125274658203125, 0.05926513671875, -0.02227783203125, 0.07342529296875, 0.0262298583984375, -0.060272216796875, 0.041961669921875, -0.0206756591796875, 0.0168304443359375, 0.031463623046875, 0.036529541015625, -0.0203094482421875, -0.03118896484375, -0.059967041015625, -0.0731201171875, 0.06781005859375, 0.0274200439453125, -0.006542205810546875, 0.0169525146484375, 0.00844573974609375, -0.005382537841796875, 0.0205078125, -0.062255859375, -0.044677734375, -0.024139404296875, -0.0173797607421875, 0.006275177001953125, -0.0031108856201171875, -0.0091400146484375, -0.039306640625, 0.060699462890625, 0.0073089599609375, 0.0323486328125, 0.01800537109375, 0.006641387939453125, 0.00724029541015625, 0.004505157470703125, 0.041778564453125, 0.064697265625, -0.0307159423828125, -0.005290985107421875, 0.0251617431640625, -0.03973388671875, 0.0023956298828125, 0.010528564453125, -0.0305023193359375, -0.01255035400390625, 0.028411865234375, 0.06964111328125, 0.0017385482788085938, -0.042572021484375, 0.0262298583984375, 0.006732940673828125, -0.01885986328125, -0.034881591796875, 0.017120361328125, 0.009307861328125, 0.01491546630859375, 0.0277862548828125, 0.006877899169921875, 0.003200531005859375, -0.030029296875, -0.0062255859375, 0.04107666015625, -0.0201568603515625, -0.01922607421875, 0.0745849609375, 0.005321502685546875, -0.022552490234375, 0.048065185546875, 0.002330780029296875, -0.039825439453125, 0.066162109375, 0.03582763671875, 0.047149658203125, -0.019012451171875, -0.0004372596740722656, 0.0653076171875, 0.0263214111328125, -0.010772705078125, 0.02374267578125, 0.006000518798828125, -0.03460693359375, -0.0006775856018066406, -0.060791015625, -0.02264404296875, 0.02197265625, -0.050933837890625, 0.025726318359375, -0.0305023193359375, -0.016571044921875, -0.01171112060546875, 0.0214691162109375, -0.0594482421875, 0.0208892822265625, -0.0036449432373046875, 0.08599853515625, -0.04718017578125, 0.05987548828125, 0.04510498046875, -0.044677734375, -0.0745849609375, -0.0236358642578125, -0.00908660888671875, -0.0616455078125, 0.05474853515625, 0.01151275634765625, 0.0183258056640625, 0.006900787353515625, -0.053131103515625, -0.09130859375, 0.10443115234375, 0.007678985595703125, -0.051239013671875, -0.01364898681640625, 0.0026397705078125, 0.04058837890625, -0.009735107421875, 0.043701171875, 0.033905029296875, 0.0445556640625, -0.007610321044921875, -0.07623291015625, 0.01715087890625, -0.039306640625, 0.0026645660400390625, 0.0032444000244140625, -0.07781982421875, 0.10076904296875, -0.01439666748046875, 0.0051727294921875, -0.00806427001953125, 0.050567626953125, 0.042022705078125, 0.0266571044921875, 0.0307159423828125, 0.043792724609375, 0.04913330078125, -0.011962890625, 0.07098388671875, -0.0462646484375, 0.04156494140625, 0.06536865234375, 0.004901885986328125, 0.058929443359375, 0.01149749755859375, -0.048919677734375, 0.041839599609375, 0.0594482421875, -0.019134521484375, 0.0243377685546875, 0.0015516281127929688, -0.004512786865234375, -0.0033130645751953125, 0.01641845703125, -0.05352783203125, 0.022125244140625, 0.0145416259765625, -0.0180511474609375, -0.0030422210693359375, 0.0032291412353515625, 0.00829315185546875, -0.0213165283203125, -0.0162506103515625, 0.0438232421875, 0.004825592041015625, -0.040008544921875, 0.06451416015625, 0.00304412841796875, 0.052154541015625, -0.05621337890625, 0.005321502685546875, -0.0184173583984375, 0.01172637939453125, -0.027191162109375, -0.057342529296875, 0.012481689453125, 0.00327301025390625, -0.0044403076171875, 0.0015802383422851562, 0.0477294921875, -0.01049041748046875, -0.043701171875, 0.0264129638671875, 0.019775390625, 0.0117034912109375, 0.002285003662109375, -0.0662841796875, -0.0007958412170410156, 0.002277374267578125, -0.052581787109375, 0.0248870849609375, 0.014892578125, -0.00202178955078125, 0.061737060546875, 0.065673828125, 0.0125274658203125, 0.00887298583984375, -0.017608642578125, 0.07781982421875, -0.052734375, -0.041229248046875, -0.0657958984375, 0.049957275390625, -0.01503753662109375, -0.037872314453125, 0.0709228515625, 0.0533447265625, 0.059051513671875, 0.007595062255859375, 0.058135986328125, -0.030548095703125, 0.032470703125, -0.030242919921875, 0.06463623046875, -0.05706787109375, 0.00785064697265625, -0.0092315673828125, -0.061187744140625, -0.01386260986328125, 0.045501708984375, -0.029571533203125, 0.00836181640625, 0.03814697265625, 0.058837890625, 0.0108489990234375, -0.002872467041015625, 0.0108184814453125, 0.030670166015625, 0.03314208984375, 0.04498291015625, 0.0452880859375, -0.034027099609375, 0.04296875, -0.046478271484375, -0.0123748779296875, -0.0311279296875, -0.044219970703125, -0.06610107421875, -0.034454345703125, -0.01145172119140625, -0.0258636474609375, -0.012481689453125, 0.076171875, 0.055023193359375, -0.06689453125, -0.0203399658203125, -0.007965087890625, 0.00250244140625, -0.0355224609375, -0.020751953125, 0.05340576171875, -0.01898193359375, -0.058441162109375, 0.00832366943359375, 0.00478363037109375, 0.00841522216796875, -0.035186767578125, -0.0210723876953125, -0.0281219482421875, 0.0013294219970703125, 0.03094482421875, 0.016265869140625, -0.0531005859375, -0.0011749267578125, 0.0055999755859375, -0.01383209228515625, 0.01593017578125, 0.0168609619140625, -0.040618896484375, 0.02117919921875, 0.039520263671875, 0.0147857666015625, 0.049102783203125, 0.0095672607421875, 0.01629638671875, -0.023773193359375, 0.01708984375, 0.019561767578125, 0.02099609375, 0.0039005279541015625, -0.029022216796875, 0.04852294921875, 0.020355224609375, -0.04669189453125, -0.08111572265625, -0.0195770263671875, -0.0657958984375, -0.0289154052734375, 0.07781982421875, -0.01514434814453125, -0.023773193359375, 0.003971099853515625, -0.01343536376953125, 0.020172119140625, -0.0304412841796875, 0.055206298828125, 0.045166015625, -0.0247344970703125, -0.0027923583984375, -0.062164306640625, 0.0262603759765625, 0.01483917236328125, -0.0672607421875, -0.0072784423828125, 0.0164642333984375, 0.0286102294921875, 0.01007843017578125, 0.053436279296875, -0.0133056640625, 0.00833892822265625, 0.006687164306640625, 0.01508331298828125, -0.039459228515625, -0.01303863525390625, -0.00396728515625, 0.01386260986328125, -0.00916290283203125, -0.0217437744140625 ] ]
Intel/t5-small-xsum-int8-dynamic
2022-11-29T08:00:46.000Z
[ "transformers", "pytorch", "t5", "text2text-generation", "int8", "Intel® Neural Compressor", "neural-compressor", "PostTrainingDynamic", "dataset:mnli", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "text-generation-inference", "region:us" ]
text2text-generation
Intel
null
null
Intel/t5-small-xsum-int8-dynamic
1
8,755
transformers
2022-09-22T12:30:01
--- license: apache-2.0 tags: - int8 - Intel® Neural Compressor - neural-compressor - PostTrainingDynamic datasets: - mnli metrics: - accuracy --- # INT8 T5 small finetuned on XSum ### Post-training dynamic quantization This is an INT8 PyTorch model quantized with [huggingface/optimum-intel](https://github.com/huggingface/optimum-intel) through the usage of [Intel® Neural Compressor](https://github.com/intel/neural-compressor). The original fp32 model comes from the fine-tuned model [adasnew/t5-small-xsum](https://huggingface.co/adasnew/t5-small-xsum). The linear modules **lm.head**, fall back to fp32 for less than 1% relative accuracy loss. ### Evaluation result | |INT8|FP32| |---|:---:|:---:| | **Accuracy (eval-rouge1)** | 29.9008 |29.9592| | **Model size** |154M|242M| ### Load with optimum: ```python from optimum.intel.neural_compressor.quantization import IncQuantizedModelForSeq2SeqLM int8_model = IncQuantizedModelForSeq2SeqLM.from_pretrained( 'Intel/t5-small-xsum-int8-dynamic', ) ```
1,023
[ [ -0.016265869140625, -0.02044677734375, 0.0196990966796875, -0.0010347366333007812, -0.01861572265625, 0.024505615234375, -0.0265960693359375, -0.00640106201171875, -0.0087432861328125, -0.004688262939453125, -0.040435791015625, -0.0296783447265625, -0.041961669921875, -0.0175018310546875, -0.01338958740234375, 0.07672119140625, -0.017425537109375, 0.01690673828125, 0.0008139610290527344, -0.008087158203125, 0.0034027099609375, -0.0270843505859375, -0.0518798828125, -0.0311126708984375, 0.0226898193359375, 0.0237274169921875, 0.03759765625, 0.03131103515625, 0.04833984375, 0.02069091796875, -0.0218963623046875, -0.01361083984375, -0.034881591796875, -0.0283966064453125, 0.020263671875, -0.0426025390625, -0.05340576171875, -0.013671875, 0.0694580078125, 0.0272674560546875, -0.0286407470703125, 0.04327392578125, -0.0096435546875, 0.026519775390625, -0.05657958984375, 0.0181732177734375, -0.03570556640625, 0.0178680419921875, 0.00981903076171875, 0.005558013916015625, -0.02203369140625, -0.0274505615234375, 0.00713348388671875, -0.03533935546875, 0.01119232177734375, -0.00782012939453125, 0.07171630859375, 0.03936767578125, -0.0310211181640625, 0.0172882080078125, -0.043701171875, 0.048431396484375, -0.04913330078125, 0.01398468017578125, 0.01009368896484375, 0.0279693603515625, 0.0163726806640625, -0.0894775390625, -0.0207366943359375, -0.0194091796875, 0.021331787109375, -0.0032291412353515625, -0.025634765625, 0.038604736328125, 0.045074462890625, 0.05023193359375, -0.0269317626953125, -0.0014009475708007812, -0.040557861328125, -0.025177001953125, 0.04107666015625, 0.018310546875, -0.0026187896728515625, 0.0098876953125, -0.03558349609375, -0.017242431640625, -0.043548583984375, 0.0235595703125, 0.041900634765625, -0.0117645263671875, -0.025115966796875, 0.03826904296875, -0.0195770263671875, 0.052642822265625, 0.037078857421875, -0.002788543701171875, 0.042083740234375, -0.01849365234375, -0.0225067138671875, 0.00949859619140625, 0.06048583984375, 0.0221405029296875, 0.03155517578125, 0.0016918182373046875, -0.0328369140625, 0.00933837890625, 0.0199127197265625, -0.08074951171875, -0.05487060546875, 0.01092529296875, -0.036865234375, -0.043304443359375, 0.0035686492919921875, -0.004669189453125, 0.00919342041015625, -0.0225372314453125, 0.06036376953125, -0.045074462890625, -0.0176239013671875, 0.004947662353515625, -0.00809478759765625, 0.0002865791320800781, 0.0268707275390625, -0.051239013671875, 0.01861572265625, 0.0235443115234375, 0.06231689453125, 0.01806640625, -0.004215240478515625, -0.0198211669921875, -0.0210113525390625, -0.0214996337890625, 0.03314208984375, -0.0020694732666015625, 0.00371551513671875, -0.01702880859375, -0.00939178466796875, 0.0038776397705078125, -0.050567626953125, 0.043914794921875, -0.03277587890625, -0.015472412109375, 0.0232696533203125, -0.0268402099609375, -0.01396942138671875, 0.00955963134765625, -0.041046142578125, 0.061126708984375, 0.0279083251953125, -0.049407958984375, 0.0160369873046875, -0.039154052734375, 0.011566162109375, -0.004383087158203125, -0.0060272216796875, -0.047882080078125, 0.033660888671875, -0.0211029052734375, 0.00783538818359375, 0.00118255615234375, 0.015625, -0.03955078125, -0.047393798828125, 0.0229034423828125, -0.0401611328125, 0.046844482421875, 0.0156402587890625, -0.0277252197265625, 0.032318115234375, -0.07623291015625, 0.0274200439453125, -0.0105743408203125, -0.029693603515625, 0.01493072509765625, -0.0274658203125, 0.004955291748046875, 0.0249481201171875, 0.035308837890625, -0.039642333984375, 0.0244903564453125, -0.038116455078125, 0.05120849609375, 0.05548095703125, -0.0117340087890625, 0.0153656005859375, -0.0239715576171875, 0.03668212890625, 0.00803375244140625, 0.01122283935546875, 0.01446533203125, -0.01483917236328125, -0.06597900390625, -0.02655029296875, 0.0286712646484375, 0.032958984375, -0.049224853515625, 0.033447265625, -0.00833892822265625, -0.04583740234375, -0.027252197265625, -0.0335693359375, 0.021331787109375, 0.0323486328125, 0.0225372314453125, 0.0019474029541015625, -0.040374755859375, -0.07879638671875, -0.005889892578125, 0.0037403106689453125, -0.01263427734375, -0.01129150390625, 0.03955078125, -0.00388336181640625, 0.039215087890625, -0.032745361328125, 0.00643157958984375, 0.0087432861328125, 0.0278778076171875, 0.041717529296875, 0.057098388671875, 0.03369140625, -0.028076171875, -0.048614501953125, -0.0216064453125, -0.0264129638671875, 0.0002613067626953125, 0.0249481201171875, -0.006771087646484375, 0.01175689697265625, 0.024566650390625, -0.053955078125, 0.063720703125, 0.0254058837890625, -0.0318603515625, 0.06256103515625, -0.00841522216796875, 0.0032196044921875, -0.06787109375, 0.020843505859375, -0.006076812744140625, -0.042022705078125, -0.007099151611328125, 0.0006189346313476562, 0.01541900634765625, -0.0207366943359375, -0.05670166015625, 0.032135009765625, -0.03192138671875, 0.00991058349609375, -0.017242431640625, -0.048065185546875, -0.014739990234375, 0.041961669921875, -0.019561767578125, 0.06414794921875, 0.035491943359375, -0.0303497314453125, 0.04290771484375, 0.0175628662109375, -0.03533935546875, 0.01541900634765625, -0.06201171875, 0.002777099609375, 0.00478363037109375, 0.0149688720703125, -0.0535888671875, -0.003993988037109375, 0.0205230712890625, -0.042999267578125, 0.0230712890625, -0.026336669921875, -0.03045654296875, -0.0207061767578125, -0.034423828125, 0.06243896484375, 0.050445556640625, -0.0189971923828125, 0.0271759033203125, 0.006938934326171875, 0.0227813720703125, -0.03497314453125, -0.06353759765625, -0.0249481201171875, -0.0170745849609375, -0.05279541015625, 0.051361083984375, -0.0180511474609375, -0.01451873779296875, -0.0035762786865234375, -0.030242919921875, -0.018402099609375, -0.0016326904296875, 0.0106964111328125, 0.00705718994140625, -0.0254669189453125, -0.0023956298828125, -0.00423431396484375, -0.0170135498046875, 0.019561767578125, -0.029937744140625, 0.0482177734375, -0.03955078125, 0.006992340087890625, -0.06243896484375, 0.00937652587890625, 0.035858154296875, -0.0033245086669921875, 0.052825927734375, 0.07647705078125, -0.0498046875, -0.00418853759765625, -0.01444244384765625, -0.032745361328125, -0.038116455078125, 0.03582763671875, -0.0408935546875, -0.06640625, 0.04449462890625, 0.00841522216796875, -0.017242431640625, 0.05615234375, 0.04833984375, -0.006412506103515625, 0.0909423828125, 0.03692626953125, 0.028350830078125, 0.06201171875, -0.05596923828125, 0.021331787109375, -0.055816650390625, 0.01003265380859375, -0.0097198486328125, -0.04278564453125, -0.059600830078125, -0.017669677734375, 0.0439453125, 0.009857177734375, -0.060150146484375, 0.006641387939453125, -0.03924560546875, 0.0137939453125, 0.05328369140625, 0.03240966796875, -0.0087127685546875, 0.01010894775390625, -0.004589080810546875, -0.0031890869140625, -0.06451416015625, -0.0159149169921875, 0.07794189453125, 0.027587890625, 0.0687255859375, -0.006244659423828125, 0.046417236328125, 0.00933074951171875, -0.00904083251953125, -0.069580078125, 0.0255584716796875, -0.0225830078125, -0.0247955322265625, 0.0157470703125, -0.04071044921875, -0.05621337890625, -0.01143646240234375, -0.015533447265625, -0.06671142578125, 0.00852203369140625, 0.043548583984375, -0.07586669921875, 0.0195770263671875, -0.06402587890625, 0.0849609375, -0.0018796920776367188, -0.022125244140625, -0.02685546875, -0.05035400390625, 0.026580810546875, 0.01256561279296875, -0.03533935546875, -0.00775146484375, -0.0050048828125, 0.059112548828125, -0.0589599609375, 0.04168701171875, -0.0176239013671875, 0.0189208984375, 0.035308837890625, -0.0082550048828125, 0.032012939453125, 0.00757598876953125, -0.0003559589385986328, 0.01800537109375, 0.0264739990234375, -0.0318603515625, -0.035400390625, 0.04791259765625, -0.07806396484375, -0.01172637939453125, -0.053375244140625, -0.04486083984375, 0.0031452178955078125, 0.00897216796875, 0.042144775390625, 0.054931640625, 0.0166168212890625, 0.0299224853515625, 0.053985595703125, 0.0033206939697265625, 0.04217529296875, 0.0157012939453125, 0.007259368896484375, -0.05401611328125, 0.06317138671875, 0.01983642578125, 0.0214080810546875, 0.0233612060546875, 0.0254058837890625, -0.01910400390625, -0.038238525390625, -0.01065826416015625, 0.01026153564453125, -0.04278564453125, -0.03497314453125, -0.0198974609375, -0.03106689453125, -0.045013427734375, 0.015106201171875, -0.04632568359375, -0.047454833984375, -0.038970947265625, 0.0018167495727539062, 0.029296875, 0.0229339599609375, -0.02398681640625, 0.053863525390625, -0.0694580078125, 0.009063720703125, -0.006000518798828125, 0.023223876953125, -0.0091705322265625, -0.05908203125, -0.045684814453125, 0.0119171142578125, -0.0275726318359375, -0.05340576171875, 0.0234222412109375, 0.027557373046875, 0.0259552001953125, 0.0289306640625, 0.0140533447265625, 0.039764404296875, -0.024505615234375, 0.034149169921875, 0.0086212158203125, -0.07025146484375, 0.0206146240234375, -0.0191192626953125, 0.04510498046875, 0.040771484375, 0.0228424072265625, -0.014556884765625, 0.0027942657470703125, -0.08282470703125, -0.08636474609375, 0.0599365234375, 0.00844573974609375, -0.00911712646484375, 0.033660888671875, 0.035064697265625, -0.01418304443359375, 0.0447998046875, -0.056884765625, -0.00978851318359375, -0.005466461181640625, -0.04583740234375, 0.0153656005859375, -0.02398681640625, -0.00701904296875, -0.040374755859375, 0.061279296875, -0.01910400390625, 0.061767578125, 0.009307861328125, -0.0103302001953125, -0.01041412353515625, -0.035675048828125, 0.04901123046875, 0.0589599609375, -0.047088623046875, 0.01201629638671875, -0.0012502670288085938, -0.04083251953125, 0.01395416259765625, 0.01392364501953125, -0.003978729248046875, -0.01605224609375, 0.01528167724609375, 0.09405517578125, 0.005016326904296875, -0.025360107421875, 0.03546142578125, -0.0191192626953125, -0.0066375732421875, -0.057464599609375, 0.019561767578125, -0.018646240234375, 0.0304412841796875, 0.0165252685546875, 0.03289794921875, 0.006587982177734375, -0.005260467529296875, 0.03094482421875, 0.023162841796875, -0.0186614990234375, -0.03533935546875, 0.047454833984375, 0.0097808837890625, -0.01519775390625, 0.0533447265625, -0.0210723876953125, -0.037689208984375, 0.061767578125, 0.01165771484375, 0.08087158203125, -0.004993438720703125, -0.016754150390625, 0.05023193359375, 0.01300811767578125, 0.01355743408203125, 0.043975830078125, -0.0171051025390625, -0.04510498046875, -0.040435791015625, -0.04339599609375, -0.019622802734375, 0.0020694732666015625, -0.054443359375, 0.042724609375, -0.052154541015625, -0.023590087890625, 0.006313323974609375, 0.01812744140625, -0.044830322265625, 0.04132080078125, 0.0227813720703125, 0.087890625, -0.048126220703125, 0.0816650390625, 0.054168701171875, -0.02911376953125, -0.0772705078125, -0.002651214599609375, -0.017303466796875, -0.0648193359375, 0.041748046875, 0.0117950439453125, 0.01050567626953125, 0.033050537109375, -0.046295166015625, -0.0772705078125, 0.07330322265625, 0.050262451171875, -0.048736572265625, 0.0011157989501953125, -0.01360321044921875, 0.0276031494140625, -0.0005517005920410156, 0.05096435546875, 0.032745361328125, 0.02044677734375, 0.007259368896484375, -0.065185546875, -0.01395416259765625, -0.02947998046875, -0.00806427001953125, 0.0244293212890625, -0.07244873046875, 0.09686279296875, -0.004497528076171875, 0.023651123046875, 0.0224761962890625, 0.0538330078125, 0.0156402587890625, 0.0036296844482421875, 0.031829833984375, 0.06201171875, 0.040802001953125, -0.019317626953125, 0.059600830078125, -0.053955078125, 0.057464599609375, 0.069091796875, 0.0237274169921875, 0.047454833984375, 0.042877197265625, -0.0209808349609375, 0.020263671875, 0.081298828125, -0.0031414031982421875, 0.04254150390625, 0.0291595458984375, -0.0347900390625, -0.017425537109375, 0.01534271240234375, -0.035400390625, 0.032073974609375, 0.014556884765625, -0.0219573974609375, 0.00009715557098388672, -0.00408172607421875, 0.005001068115234375, -0.038360595703125, -0.035491943359375, 0.03717041015625, 0.0018491744995117188, -0.038482666015625, 0.0736083984375, 0.0095977783203125, 0.055755615234375, -0.056243896484375, 0.004993438720703125, 0.0010004043579101562, 0.051483154296875, -0.026275634765625, -0.041656494140625, 0.040679931640625, -0.0256805419921875, -0.0283050537109375, -0.0160675048828125, 0.04730224609375, -0.040008544921875, -0.05072021484375, -0.01384735107421875, -0.0108184814453125, 0.01189422607421875, -0.0323486328125, -0.050811767578125, 0.01123809814453125, 0.01204681396484375, -0.02801513671875, 0.016693115234375, 0.0172119140625, 0.00339508056640625, 0.034088134765625, 0.017730712890625, -0.00042128562927246094, 0.0189056396484375, -0.0260162353515625, 0.0552978515625, -0.03802490234375, -0.051788330078125, -0.0626220703125, 0.051513671875, 0.01491546630859375, -0.05322265625, 0.0243072509765625, 0.04144287109375, 0.06536865234375, -0.0122528076171875, 0.038818359375, -0.007137298583984375, 0.006549835205078125, -0.041473388671875, 0.058807373046875, -0.051116943359375, -0.0205841064453125, 0.007598876953125, -0.05474853515625, -0.004405975341796875, 0.0660400390625, -0.003864288330078125, 0.0187530517578125, 0.059295654296875, 0.044708251953125, -0.01275634765625, -0.02716064453125, 0.019378662109375, 0.033294677734375, 0.01451873779296875, 0.060699462890625, 0.05499267578125, -0.0521240234375, 0.0307769775390625, -0.049530029296875, -0.023834228515625, -0.0150299072265625, -0.0631103515625, -0.0262451171875, -0.020782470703125, -0.057373046875, -0.038360595703125, -0.0029144287109375, 0.08355712890625, 0.06982421875, -0.052337646484375, -0.023468017578125, -0.021697998046875, -0.01026153564453125, -0.002750396728515625, -0.018768310546875, 0.017669677734375, -0.01641845703125, -0.06365966796875, 0.01430511474609375, -0.006439208984375, 0.01470947265625, -0.02288818359375, 0.0046539306640625, -0.01026153564453125, -0.0208892822265625, 0.0201263427734375, -0.0025081634521484375, -0.0284423828125, -0.0255279541015625, -0.0170745849609375, 0.005146026611328125, 0.025787353515625, 0.041656494140625, -0.058624267578125, 0.0169677734375, 0.057769775390625, 0.0211181640625, 0.049224853515625, -0.01490020751953125, 0.046722412109375, -0.057861328125, 0.04327392578125, 0.04443359375, 0.0288848876953125, 0.01446533203125, -0.02703857421875, 0.025054931640625, 0.045684814453125, -0.0570068359375, -0.0667724609375, -0.00431060791015625, -0.07513427734375, 0.0168304443359375, 0.0736083984375, -0.019378662109375, -0.047210693359375, 0.04913330078125, -0.024566650390625, 0.04583740234375, -0.039306640625, 0.035064697265625, 0.0306243896484375, -0.0167999267578125, -0.018798828125, -0.0247344970703125, 0.041107177734375, 0.035675048828125, -0.032745361328125, -0.04217529296875, 0.0243988037109375, 0.0318603515625, 0.002346038818359375, 0.02764892578125, 0.015533447265625, 0.03643798828125, 0.010162353515625, 0.038970947265625, -0.01174163818359375, -0.0131988525390625, -0.038360595703125, 0.0125274658203125, 0.0186614990234375, -0.055511474609375 ] ]
Pi3141/DialoGPT-medium-elon-3
2022-12-08T01:47:29.000Z
[ "transformers", "pytorch", "gpt2", "text-generation", "conversational", "endpoints_compatible", "text-generation-inference", "region:us" ]
conversational
Pi3141
null
null
Pi3141/DialoGPT-medium-elon-3
0
8,692
transformers
2022-12-08T01:45:33
--- tags: - conversational --- # DialoGPT model that talks like Elon Musk Trained on Twitter tweets by Elon Musk. This model will spew meaningless shit about 40% of the time. Trained on 8 epochs. But with a larger dataset this time. The AI can now use more emojis i think.
273
[ [ -0.01230621337890625, -0.06378173828125, 0.0511474609375, -0.0198822021484375, -0.014556884765625, 0.0114898681640625, -0.00008529424667358398, -0.0306396484375, 0.0180511474609375, 0.0386962890625, -0.049285888671875, -0.01151275634765625, -0.0325927734375, -0.0039215087890625, -0.037109375, 0.07440185546875, 0.043609619140625, 0.007518768310546875, 0.031219482421875, -0.00563812255859375, -0.0241546630859375, -0.034912109375, -0.052459716796875, -0.034210205078125, 0.04241943359375, 0.04193115234375, 0.056182861328125, -0.0017862319946289062, -0.0015850067138671875, 0.026153564453125, -0.01222991943359375, -0.0137176513671875, -0.050750732421875, -0.0133209228515625, 0.013458251953125, -0.043670654296875, -0.01335906982421875, -0.0001347064971923828, 0.006969451904296875, 0.0309295654296875, -0.016876220703125, 0.0032253265380859375, -0.0011968612670898438, 0.01174163818359375, -0.00933837890625, 0.01036834716796875, -0.06463623046875, 0.0106201171875, 0.0289154052734375, -0.007686614990234375, -0.0195159912109375, -0.026641845703125, 0.029937744140625, -0.059844970703125, -0.00759124755859375, 0.01450347900390625, 0.05419921875, 0.001392364501953125, -0.03729248046875, -0.023223876953125, -0.036346435546875, 0.0771484375, -0.06927490234375, 0.0240325927734375, 0.04266357421875, 0.026824951171875, -0.00542449951171875, -0.046966552734375, -0.051483154296875, -0.0234375, 0.008544921875, -0.0134124755859375, -0.00933837890625, 0.0142822265625, 0.0338134765625, 0.01509857177734375, -0.03302001953125, -0.010498046875, -0.049285888671875, -0.01165008544921875, 0.0433349609375, 0.005329132080078125, 0.03997802734375, -0.0280609130859375, -0.0255889892578125, 0.017608642578125, -0.036224365234375, -0.007610321044921875, 0.01288604736328125, -0.003368377685546875, -0.018035888671875, 0.05657958984375, 0.0018024444580078125, 0.0594482421875, 0.0077667236328125, -0.01236724853515625, 0.0199127197265625, -0.0224151611328125, -0.04144287109375, -0.003444671630859375, 0.06353759765625, 0.0301055908203125, 0.038818359375, -0.01137542724609375, -0.0201416015625, -0.01885986328125, 0.03961181640625, -0.0631103515625, -0.014495849609375, 0.03009033203125, -0.0274658203125, -0.0273284912109375, -0.024017333984375, -0.04443359375, -0.046966552734375, -0.013519287109375, 0.043365478515625, -0.07293701171875, 0.00010585784912109375, 0.0007948875427246094, -0.0197296142578125, -0.0049591064453125, 0.049285888671875, -0.0650634765625, -0.0221405029296875, 0.0390625, 0.059051513671875, 0.0496826171875, -0.006160736083984375, 0.0069122314453125, -0.01528167724609375, -0.02740478515625, 0.058013916015625, -0.03961181640625, -0.034820556640625, 0.0096588134765625, -0.001056671142578125, -0.0210723876953125, -0.004871368408203125, 0.0227813720703125, -0.0236358642578125, 0.0190582275390625, -0.01959228515625, -0.07049560546875, -0.039886474609375, 0.028228759765625, -0.038787841796875, 0.047821044921875, 0.00708770751953125, -0.03399658203125, 0.040496826171875, -0.06964111328125, -0.00811767578125, 0.03143310546875, -0.023529052734375, -0.0158233642578125, 0.01190948486328125, 0.017303466796875, 0.045654296875, -0.005466461181640625, 0.0220947265625, -0.039398193359375, -0.0237884521484375, 0.037933349609375, -0.0005626678466796875, 0.07379150390625, 0.031707763671875, -0.0149993896484375, -0.025787353515625, -0.038543701171875, 0.004657745361328125, 0.0029697418212890625, -0.0031795501708984375, -0.00820159912109375, -0.01580810546875, 0.00106048583984375, 0.01910400390625, 0.004322052001953125, -0.072509765625, 0.0283203125, -0.03460693359375, 0.0640869140625, 0.039215087890625, 0.02215576171875, 0.032012939453125, -0.0772705078125, 0.035736083984375, -0.0254058837890625, 0.021484375, -0.037261962890625, -0.060821533203125, -0.068359375, -0.009674072265625, -0.007648468017578125, 0.024261474609375, -0.0576171875, 0.043670654296875, -0.02435302734375, -0.047637939453125, -0.01082611083984375, -0.0171356201171875, -0.01038360595703125, 0.015838623046875, -0.007659912109375, 0.00872039794921875, -0.01499176025390625, -0.07000732421875, -0.018646240234375, -0.028076171875, -0.0167236328125, 0.023834228515625, 0.051300048828125, -0.0017147064208984375, 0.0408935546875, -0.010711669921875, 0.009857177734375, -0.03839111328125, 0.037261962890625, 0.004161834716796875, 0.014556884765625, 0.053070068359375, -0.055419921875, -0.032928466796875, -0.002574920654296875, -0.042022705078125, 0.0084381103515625, -0.0213775634765625, 0.0130157470703125, -0.0038700103759765625, 0.04632568359375, -0.045440673828125, 0.046844482421875, 0.043914794921875, -0.034637451171875, -0.0008087158203125, 0.02349853515625, 0.045013427734375, -0.10052490234375, 0.010101318359375, -0.0121002197265625, -0.0227813720703125, -0.055816650390625, -0.035308837890625, -0.027618408203125, -0.055419921875, -0.050323486328125, 0.039459228515625, -0.026702880859375, 0.024505615234375, -0.0230255126953125, -0.0182037353515625, -0.005718231201171875, 0.0496826171875, -0.032196044921875, 0.0626220703125, 0.0136566162109375, -0.037139892578125, 0.0155029296875, 0.030364990234375, -0.036590576171875, 0.0384521484375, -0.056182861328125, 0.0240478515625, 0.00206756591796875, 0.02020263671875, -0.08416748046875, -0.0374755859375, 0.00783538818359375, -0.047149658203125, 0.004543304443359375, -0.01690673828125, -0.0556640625, -0.025665283203125, -0.02239990234375, 0.0200042724609375, 0.049530029296875, -0.0246429443359375, 0.07427978515625, 0.0535888671875, -0.021484375, -0.00342559814453125, -0.047210693359375, 0.00699615478515625, -0.01230621337890625, -0.057098388671875, 0.003490447998046875, -0.014434814453125, 0.004550933837890625, -0.01137542724609375, 0.00975799560546875, -0.032623291015625, -0.003814697265625, 0.046142578125, -0.0022106170654296875, 0.011016845703125, 0.026519775390625, -0.01432037353515625, 0.029296875, -0.0193023681640625, 0.014129638671875, 0.07562255859375, 0.0011463165283203125, -0.030548095703125, -0.07550048828125, 0.0307159423828125, 0.06365966796875, 0.058013916015625, 0.05120849609375, 0.0243072509765625, -0.036651611328125, 0.0012683868408203125, -0.0421142578125, -0.0309600830078125, -0.03704833984375, 0.042694091796875, -0.0220947265625, -0.06781005859375, 0.03857421875, 0.01654052734375, 0.032958984375, 0.032958984375, 0.042633056640625, -0.0191497802734375, 0.0836181640625, 0.054229736328125, -0.006622314453125, 0.06610107421875, -0.028900146484375, 0.038970947265625, -0.03729248046875, -0.0208587646484375, -0.00937652587890625, -0.0176239013671875, -0.0467529296875, -0.00827789306640625, 0.009613037109375, -0.0109710693359375, -0.039581298828125, 0.04345703125, -0.037078857421875, 0.016876220703125, 0.03204345703125, 0.01544952392578125, -0.00482177734375, 0.00678253173828125, 0.004638671875, -0.0027618408203125, -0.056640625, -0.045013427734375, 0.0902099609375, 0.0328369140625, 0.06689453125, -0.01580810546875, 0.0233001708984375, 0.04345703125, 0.0301055908203125, -0.051116943359375, 0.039886474609375, 0.021240234375, -0.08056640625, -0.01561737060546875, -0.0119171142578125, -0.062469482421875, -0.0208587646484375, -0.023681640625, -0.077880859375, -0.00672149658203125, 0.0199432373046875, -0.04351806640625, 0.040802001953125, -0.07281494140625, 0.0511474609375, -0.008575439453125, 0.0396728515625, -0.024688720703125, -0.03564453125, 0.0211181640625, -0.0205230712890625, 0.004608154296875, -0.0311431884765625, 0.0164031982421875, 0.0689697265625, -0.024658203125, 0.0618896484375, 0.0011157989501953125, 0.0089111328125, 0.03369140625, -0.0105743408203125, 0.0047149658203125, 0.002166748046875, -0.002471923828125, 0.033935546875, -0.004421234130859375, -0.03759765625, -0.031494140625, 0.028076171875, -0.058929443359375, -0.01117706298828125, -0.0292816162109375, -0.01611328125, 0.003173828125, -0.00047898292541503906, 0.050689697265625, 0.0183563232421875, -0.032440185546875, 0.01043701171875, -0.00012302398681640625, -0.0179290771484375, 0.023651123046875, 0.01016998291015625, -0.016357421875, -0.032196044921875, 0.06488037109375, 0.018646240234375, 0.02630615234375, 0.00893402099609375, 0.0019006729125976562, -0.0032176971435546875, 0.0005178451538085938, -0.039886474609375, 0.0175933837890625, -0.033050537109375, -0.01380157470703125, -0.06646728515625, -0.040557861328125, -0.0278472900390625, -0.032867431640625, -0.032623291015625, -0.04827880859375, -0.020111083984375, -0.0009937286376953125, 0.03399658203125, 0.08135986328125, 0.0177001953125, 0.0645751953125, -0.05224609375, 0.0005321502685546875, 0.03839111328125, 0.00482177734375, -0.00734710693359375, -0.06396484375, -0.02581787109375, 0.029327392578125, -0.049530029296875, -0.04901123046875, 0.02435302734375, 0.025787353515625, 0.023895263671875, 0.03997802734375, -0.024627685546875, 0.011932373046875, -0.03216552734375, 0.09002685546875, 0.033233642578125, -0.06195068359375, 0.03363037109375, -0.052459716796875, 0.025970458984375, 0.0291595458984375, 0.010223388671875, -0.0643310546875, -0.05853271484375, -0.0799560546875, -0.045623779296875, 0.052825927734375, 0.031280517578125, 0.0269927978515625, 0.008758544921875, 0.02154541015625, 0.0227203369140625, 0.0379638671875, -0.050079345703125, -0.01947021484375, -0.02911376953125, -0.01174163818359375, 0.01474761962890625, -0.0273284912109375, -0.01401519775390625, -0.0270538330078125, 0.040283203125, 0.01328277587890625, 0.0577392578125, -0.01430511474609375, 0.00289154052734375, 0.00904083251953125, 0.01282501220703125, 0.0469970703125, 0.05615234375, -0.033447265625, -0.00984954833984375, 0.0187835693359375, -0.0347900390625, -0.004016876220703125, -0.00778961181640625, -0.014923095703125, 0.00806427001953125, 0.0280914306640625, 0.0750732421875, 0.01262664794921875, -0.052154541015625, 0.035308837890625, -0.00872039794921875, -0.0165863037109375, -0.0638427734375, 0.0264892578125, -0.0002627372741699219, 0.009033203125, 0.04351806640625, 0.009002685546875, -0.007297515869140625, -0.050048828125, 0.0264892578125, 0.007450103759765625, -0.0296173095703125, -0.06829833984375, 0.04742431640625, 0.03997802734375, -0.0435791015625, 0.03668212890625, 0.00824737548828125, -0.064697265625, 0.034271240234375, 0.00391387939453125, 0.0634765625, -0.014373779296875, 0.0164337158203125, 0.0333251953125, 0.0130767822265625, 0.00583648681640625, 0.024505615234375, -0.0007677078247070312, -0.06060791015625, -0.0225067138671875, -0.0292816162109375, -0.0272064208984375, 0.05389404296875, -0.0025501251220703125, 0.0254364013671875, -0.05157470703125, -0.039459228515625, 0.032012939453125, 0.0021820068359375, -0.0692138671875, 0.0335693359375, 0.021636962890625, 0.052825927734375, -0.08343505859375, 0.030975341796875, 0.056121826171875, -0.0296630859375, -0.05242919921875, 0.000888824462890625, 0.00832366943359375, -0.07965087890625, 0.047576904296875, 0.037109375, 0.0202484130859375, -0.01262664794921875, -0.0802001953125, -0.055419921875, 0.056976318359375, 0.03106689453125, -0.030029296875, 0.002223968505859375, 0.0002689361572265625, 0.03131103515625, -0.044403076171875, 0.03741455078125, 0.019500732421875, 0.0240936279296875, 0.0006265640258789062, -0.07757568359375, -0.034454345703125, -0.0333251953125, -0.01165771484375, 0.0014133453369140625, -0.043731689453125, 0.07177734375, -0.006103515625, -0.043365478515625, 0.01397705078125, 0.01491546630859375, -0.0159759521484375, 0.01910400390625, 0.060516357421875, 0.053558349609375, 0.0288543701171875, 0.00839996337890625, 0.0760498046875, -0.037567138671875, 0.043243408203125, 0.0513916015625, 0.0088958740234375, 0.02117919921875, 0.032806396484375, -0.004726409912109375, 0.0207061767578125, 0.056121826171875, 0.00020897388458251953, 0.032867431640625, -0.00766754150390625, -0.021148681640625, -0.01751708984375, -0.036407470703125, -0.007328033447265625, 0.031646728515625, 0.029998779296875, -0.01971435546875, -0.00794219970703125, 0.0006661415100097656, 0.0187835693359375, -0.0192718505859375, 0.0092620849609375, 0.058380126953125, 0.016571044921875, -0.03131103515625, 0.035736083984375, -0.0245208740234375, 0.03802490234375, -0.0204925537109375, -0.007633209228515625, -0.0038433074951171875, -0.0193634033203125, 0.01153564453125, -0.0548095703125, 0.0111236572265625, -0.01837158203125, 0.00952911376953125, -0.0032215118408203125, 0.0269317626953125, -0.02142333984375, -0.044097900390625, 0.01268768310546875, 0.03125, 0.0141143798828125, -0.00406646728515625, -0.06976318359375, -0.0078582763671875, 0.0278778076171875, -0.0284881591796875, -0.00020754337310791016, 0.047515869140625, 0.038116455078125, 0.07366943359375, 0.043121337890625, 0.006256103515625, 0.0157623291015625, 0.0063934326171875, 0.05987548828125, -0.06109619140625, -0.036712646484375, -0.05706787109375, 0.032196044921875, -0.037353515625, -0.046844482421875, 0.0406494140625, 0.025787353515625, 0.04632568359375, 0.00411224365234375, 0.07745361328125, -0.032989501953125, 0.0609130859375, -0.0247650146484375, 0.04803466796875, -0.06280517578125, 0.0018053054809570312, -0.0030994415283203125, -0.049835205078125, 0.007659912109375, 0.051361083984375, -0.01328277587890625, 0.02398681640625, 0.046234130859375, 0.033538818359375, -0.00302886962890625, 0.02532958984375, 0.051025390625, 0.0232391357421875, 0.0260772705078125, 0.026153564453125, 0.06561279296875, -0.039520263671875, 0.0287322998046875, -0.01473236083984375, 0.0003368854522705078, -0.04400634765625, -0.0229644775390625, -0.08624267578125, -0.0579833984375, -0.003662109375, -0.0257415771484375, 0.0058746337890625, 0.06988525390625, 0.07000732421875, -0.04840087890625, 0.01230621337890625, -0.0002460479736328125, -0.002361297607421875, 0.0367431640625, -0.015655517578125, 0.021942138671875, -0.01275634765625, -0.05462646484375, 0.005268096923828125, 0.02960205078125, 0.0187530517578125, -0.0052490234375, 0.0308990478515625, -0.0138702392578125, 0.02581787109375, 0.0347900390625, 0.0205535888671875, -0.03955078125, -0.049713134765625, 0.001308441162109375, -0.0205230712890625, 0.01947021484375, 0.0506591796875, -0.023223876953125, 0.02972412109375, 0.038909912109375, 0.035186767578125, 0.036590576171875, 0.01165008544921875, 0.06036376953125, -0.0218353271484375, 0.0343017578125, 0.0435791015625, 0.02203369140625, 0.0309600830078125, -0.01708984375, 0.0277099609375, 0.0243072509765625, -0.073974609375, -0.0433349609375, 0.02423095703125, -0.07647705078125, -0.00916290283203125, 0.0791015625, 0.005542755126953125, -0.0263671875, -0.0308990478515625, -0.0160064697265625, 0.0065765380859375, -0.08001708984375, 0.06646728515625, 0.048095703125, -0.01483154296875, -0.00756072998046875, -0.06768798828125, 0.038726806640625, 0.0286407470703125, -0.0400390625, -0.0173187255859375, 0.0182037353515625, 0.045166015625, 0.0138702392578125, 0.08258056640625, 0.025787353515625, 0.00479888916015625, 0.0199127197265625, 0.0117645263671875, 0.0205535888671875, -0.00946807861328125, 0.0032787322998046875, -0.0069122314453125, -0.0223388671875, -0.0305938720703125 ] ]
google/bigbird-pegasus-large-arxiv
2023-01-24T16:36:41.000Z
[ "transformers", "pytorch", "bigbird_pegasus", "text2text-generation", "summarization", "en", "dataset:scientific_papers", "arxiv:2007.14062", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
summarization
google
null
null
google/bigbird-pegasus-large-arxiv
30
8,689
transformers
2022-03-02T23:29:05
--- language: en license: apache-2.0 datasets: - scientific_papers tags: - summarization model-index: - name: google/bigbird-pegasus-large-arxiv results: - task: type: summarization name: Summarization dataset: name: scientific_papers type: scientific_papers config: pubmed split: test metrics: - name: ROUGE-1 type: rouge value: 36.0276 verified: true - name: ROUGE-2 type: rouge value: 13.4166 verified: true - name: ROUGE-L type: rouge value: 21.9612 verified: true - name: ROUGE-LSUM type: rouge value: 29.648 verified: true - name: loss type: loss value: 2.774355173110962 verified: true - name: meteor type: meteor value: 0.2824 verified: true - name: gen_len type: gen_len value: 209.2537 verified: true - task: type: summarization name: Summarization dataset: name: cnn_dailymail type: cnn_dailymail config: 3.0.0 split: test metrics: - name: ROUGE-1 type: rouge value: 9.0885 verified: true - name: ROUGE-2 type: rouge value: 1.0325 verified: true - name: ROUGE-L type: rouge value: 7.3182 verified: true - name: ROUGE-LSUM type: rouge value: 8.1455 verified: true - name: loss type: loss value: .nan verified: true - name: gen_len type: gen_len value: 210.4762 verified: true - task: type: summarization name: Summarization dataset: name: xsum type: xsum config: default split: test metrics: - name: ROUGE-1 type: rouge value: 4.9787 verified: true - name: ROUGE-2 type: rouge value: 0.3527 verified: true - name: ROUGE-L type: rouge value: 4.3679 verified: true - name: ROUGE-LSUM type: rouge value: 4.1723 verified: true - name: loss type: loss value: .nan verified: true - name: gen_len type: gen_len value: 230.4886 verified: true - task: type: summarization name: Summarization dataset: name: scientific_papers type: scientific_papers config: arxiv split: test metrics: - name: ROUGE-1 type: rouge value: 43.4702 verified: true - name: ROUGE-2 type: rouge value: 17.4297 verified: true - name: ROUGE-L type: rouge value: 26.2587 verified: true - name: ROUGE-LSUM type: rouge value: 35.5587 verified: true - name: loss type: loss value: 2.1113228797912598 verified: true - name: gen_len type: gen_len value: 183.3702 verified: true - task: type: summarization name: Summarization dataset: name: samsum type: samsum config: samsum split: test metrics: - name: ROUGE-1 type: rouge value: 3.621 verified: true - name: ROUGE-2 type: rouge value: 0.1699 verified: true - name: ROUGE-L type: rouge value: 3.2016 verified: true - name: ROUGE-LSUM type: rouge value: 3.3269 verified: true - name: loss type: loss value: 7.664482116699219 verified: true - name: gen_len type: gen_len value: 233.8107 verified: true --- # BigBirdPegasus model (large) BigBird, is a sparse-attention based transformer which extends Transformer based models, such as BERT to much longer sequences. Moreover, BigBird comes along with a theoretical understanding of the capabilities of a complete transformer that the sparse model can handle. BigBird was introduced in this [paper](https://arxiv.org/abs/2007.14062) and first released in this [repository](https://github.com/google-research/bigbird). Disclaimer: The team releasing BigBird did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description BigBird relies on **block sparse attention** instead of normal attention (i.e. BERT's attention) and can handle sequences up to a length of 4096 at a much lower compute cost compared to BERT. It has achieved SOTA on various tasks involving very long sequences such as long documents summarization, question-answering with long contexts. ## How to use Here is how to use this model to get the features of a given text in PyTorch: ```python from transformers import BigBirdPegasusForConditionalGeneration, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv") # by default encoder-attention is `block_sparse` with num_random_blocks=3, block_size=64 model = BigBirdPegasusForConditionalGeneration.from_pretrained("google/bigbird-pegasus-large-arxiv") # decoder attention type can't be changed & will be "original_full" # you can change `attention_type` (encoder only) to full attention like this: model = BigBirdPegasusForConditionalGeneration.from_pretrained("google/bigbird-pegasus-large-arxiv", attention_type="original_full") # you can change `block_size` & `num_random_blocks` like this: model = BigBirdPegasusForConditionalGeneration.from_pretrained("google/bigbird-pegasus-large-arxiv", block_size=16, num_random_blocks=2) text = "Replace me by any text you'd like." inputs = tokenizer(text, return_tensors='pt') prediction = model.generate(**inputs) prediction = tokenizer.batch_decode(prediction) ``` ## Training Procedure This checkpoint is obtained after fine-tuning `BigBirdPegasusForConditionalGeneration` for **summarization** on **arxiv dataset** from [scientific_papers](https://huggingface.co/datasets/scientific_papers). ## BibTeX entry and citation info ```tex @misc{zaheer2021big, title={Big Bird: Transformers for Longer Sequences}, author={Manzil Zaheer and Guru Guruganesh and Avinava Dubey and Joshua Ainslie and Chris Alberti and Santiago Ontanon and Philip Pham and Anirudh Ravula and Qifan Wang and Li Yang and Amr Ahmed}, year={2021}, eprint={2007.14062}, archivePrefix={arXiv}, primaryClass={cs.LG} } ```
6,289
[ [ -0.035186767578125, -0.041534423828125, 0.007488250732421875, 0.02447509765625, -0.008575439453125, -0.0258026123046875, -0.031585693359375, -0.040374755859375, 0.0288543701171875, 0.00897216796875, -0.052001953125, -0.0112762451171875, -0.054229736328125, 0.02020263671875, -0.03302001953125, 0.08111572265625, 0.0220184326171875, -0.0256500244140625, 0.001384735107421875, 0.0215606689453125, -0.0050811767578125, -0.027008056640625, -0.035888671875, -0.0235137939453125, 0.046600341796875, 0.006313323974609375, 0.059600830078125, 0.05291748046875, 0.056304931640625, 0.02728271484375, -0.03802490234375, -0.00487518310546875, -0.04205322265625, -0.00927734375, -0.0127410888671875, -0.0208587646484375, -0.02618408203125, -0.004299163818359375, 0.06536865234375, 0.03143310546875, 0.002460479736328125, 0.034698486328125, 0.007137298583984375, 0.044158935546875, -0.043609619140625, 0.0355224609375, -0.02899169921875, 0.02532958984375, -0.0102691650390625, -0.0050506591796875, -0.026458740234375, -0.006519317626953125, 0.0091705322265625, -0.04217529296875, 0.023345947265625, 0.00995635986328125, 0.0836181640625, 0.02471923828125, -0.020263671875, -0.003208160400390625, -0.06134033203125, 0.06781005859375, -0.05474853515625, 0.0185546875, 0.0223846435546875, 0.03143310546875, -0.0251617431640625, -0.0714111328125, -0.056732177734375, -0.01027679443359375, -0.0146026611328125, 0.0218963623046875, -0.017181396484375, 0.002643585205078125, 0.044677734375, 0.04522705078125, -0.05596923828125, 0.0014543533325195312, -0.04498291015625, -0.005634307861328125, 0.04425048828125, -0.00308990478515625, -0.013275146484375, -0.025726318359375, -0.01464080810546875, -0.0292205810546875, -0.0258941650390625, 0.0255279541015625, 0.01995849609375, 0.0238189697265625, -0.0107574462890625, 0.031341552734375, -0.005115509033203125, 0.06640625, 0.032867431640625, -0.00298309326171875, 0.04083251953125, -0.0033168792724609375, -0.0372314453125, 0.001834869384765625, 0.07171630859375, -0.0009059906005859375, 0.00986480712890625, -0.001552581787109375, 0.003520965576171875, -0.0251617431640625, 0.0162200927734375, -0.074462890625, -0.004306793212890625, 0.0307769775390625, -0.0308837890625, -0.01727294921875, 0.0200653076171875, -0.053802490234375, -0.01190948486328125, -0.0121917724609375, 0.03680419921875, -0.03448486328125, -0.026092529296875, 0.01202392578125, -0.006290435791015625, 0.030914306640625, 0.00006699562072753906, -0.07696533203125, -0.0032806396484375, 0.055694580078125, 0.0704345703125, 0.035064697265625, -0.0501708984375, -0.041473388671875, -0.000926971435546875, -0.00827789306640625, 0.039306640625, -0.03692626953125, -0.0098724365234375, 0.0053558349609375, 0.034149169921875, -0.01248931884765625, -0.0224761962890625, 0.040191650390625, -0.03094482421875, 0.02655029296875, -0.0148468017578125, -0.0240936279296875, -0.033843994140625, 0.0170135498046875, -0.052764892578125, 0.0633544921875, 0.01385498046875, -0.06561279296875, 0.0164947509765625, -0.058258056640625, -0.038116455078125, -0.0035552978515625, 0.01436614990234375, -0.05859375, -0.00789642333984375, 0.0164031982421875, 0.058685302734375, -0.0250701904296875, 0.0162811279296875, -0.032470703125, -0.04449462890625, 0.0193634033203125, -0.0006241798400878906, 0.054534912109375, 0.012451171875, -0.037139892578125, 0.0245361328125, -0.037322998046875, -0.0187835693359375, 0.01708984375, -0.0133209228515625, 0.0167388916015625, -0.007965087890625, 0.0165252685546875, 0.0203857421875, 0.00848388671875, -0.039886474609375, 0.0306854248046875, -0.047210693359375, 0.061553955078125, 0.041351318359375, -0.0187835693359375, 0.00954437255859375, -0.032928466796875, 0.028961181640625, 0.0023937225341796875, 0.03265380859375, -0.026641845703125, -0.0301055908203125, -0.05859375, -0.0283966064453125, 0.026702880859375, 0.00765228271484375, -0.0295867919921875, 0.060882568359375, -0.04010009765625, -0.037506103515625, -0.0352783203125, 0.004486083984375, 0.024383544921875, 0.01568603515625, 0.03533935546875, 0.00411224365234375, -0.050384521484375, -0.06207275390625, 0.01540374755859375, 0.0018157958984375, 0.0049896240234375, 0.00122833251953125, 0.056182861328125, -0.0250701904296875, 0.052978515625, -0.027984619140625, -0.032745361328125, -0.022918701171875, 0.0007314682006835938, 0.0511474609375, 0.037994384765625, 0.037200927734375, -0.058807373046875, -0.030914306640625, -0.006740570068359375, -0.04583740234375, 0.03466796875, -0.001155853271484375, -0.0063018798828125, 0.017120361328125, 0.0261993408203125, -0.0726318359375, 0.028900146484375, 0.041839599609375, -0.0128173828125, 0.03436279296875, -0.0002751350402832031, -0.006862640380859375, -0.09039306640625, 0.038177490234375, 0.011932373046875, -0.00775909423828125, -0.0240936279296875, 0.02197265625, 0.01494598388671875, -0.0162811279296875, -0.027191162109375, 0.039764404296875, -0.039886474609375, -0.01102447509765625, -0.0208740234375, -0.0208740234375, -0.0036945343017578125, 0.0443115234375, 0.00936126708984375, 0.05194091796875, 0.04217529296875, -0.03009033203125, 0.049041748046875, 0.041015625, -0.0173492431640625, 0.0062103271484375, -0.060150146484375, -0.004146575927734375, -0.0173797607421875, 0.046234130859375, -0.0755615234375, -0.03033447265625, 0.0231475830078125, -0.041900634765625, 0.042572021484375, -0.019012451171875, -0.03460693359375, -0.08843994140625, -0.0284881591796875, 0.0164642333984375, 0.049102783203125, -0.052032470703125, 0.0396728515625, -0.012298583984375, -0.01201629638671875, -0.058837890625, -0.055755615234375, 0.0048828125, -0.000396728515625, -0.05517578125, 0.0168304443359375, -0.005641937255859375, 0.02337646484375, 0.0017728805541992188, 0.005580902099609375, 0.01519012451171875, -0.01329803466796875, 0.036407470703125, 0.00981903076171875, -0.02581787109375, 0.01995849609375, -0.00945281982421875, -0.00917816162109375, -0.003162384033203125, -0.037994384765625, 0.04864501953125, -0.018798828125, -0.008880615234375, -0.04364013671875, 0.00841522216796875, 0.061065673828125, -0.03155517578125, 0.06195068359375, 0.0684814453125, -0.0203857421875, -0.003971099853515625, -0.0443115234375, -0.019683837890625, -0.037261962890625, 0.03759765625, -0.02569580078125, -0.053802490234375, 0.029266357421875, 0.022552490234375, 0.01253509521484375, 0.051910400390625, 0.0350341796875, 0.006103515625, 0.052978515625, 0.043609619140625, -0.01702880859375, 0.038177490234375, -0.05401611328125, 0.00382232666015625, -0.07269287109375, -0.004848480224609375, -0.029632568359375, -0.035858154296875, -0.0189208984375, -0.0233001708984375, 0.005283355712890625, -0.0084228515625, -0.043975830078125, 0.024078369140625, -0.05865478515625, 0.026458740234375, 0.044281005859375, 0.00775909423828125, -0.0036487579345703125, 0.0014781951904296875, 0.017242431640625, 0.014190673828125, -0.042572021484375, -0.0111083984375, 0.0921630859375, 0.0282135009765625, 0.04632568359375, 0.005588531494140625, 0.032684326171875, 0.003833770751953125, 0.01393890380859375, -0.068359375, 0.0286102294921875, -0.005054473876953125, -0.054534912109375, -0.033905029296875, -0.0300750732421875, -0.08624267578125, 0.017120361328125, -0.0255889892578125, -0.050140380859375, 0.001911163330078125, 0.0090179443359375, -0.048095703125, 0.022735595703125, -0.04010009765625, 0.07183837890625, 0.004608154296875, -0.0283660888671875, 0.0004093647003173828, -0.05328369140625, 0.03936767578125, -0.01386260986328125, -0.010894775390625, 0.0202789306640625, 0.0238189697265625, 0.060028076171875, -0.029327392578125, 0.06280517578125, -0.00469970703125, 0.00688934326171875, 0.016357421875, -0.0176544189453125, 0.050506591796875, -0.01065826416015625, -0.0161285400390625, 0.01345062255859375, 0.00027370452880859375, -0.044677734375, -0.032745361328125, 0.053192138671875, -0.08270263671875, -0.047271728515625, -0.06060791015625, -0.032806396484375, -0.00262451171875, 0.0160675048828125, 0.02691650390625, 0.030242919921875, 0.00690460205078125, 0.04461669921875, 0.030609130859375, -0.0156707763671875, 0.05780029296875, 0.0271148681640625, -0.0237884521484375, -0.0221099853515625, 0.051513671875, 0.004825592041015625, -0.006168365478515625, 0.033599853515625, 0.0255584716796875, -0.035797119140625, -0.03680419921875, -0.0187835693359375, 0.039703369140625, -0.0290985107421875, -0.02215576171875, -0.0565185546875, -0.051727294921875, -0.05523681640625, 0.0008063316345214844, -0.0177764892578125, -0.00957489013671875, -0.0267333984375, 0.0053558349609375, 0.030670166015625, 0.04180908203125, 0.0009613037109375, 0.037506103515625, -0.062103271484375, 0.0067291259765625, 0.035491943359375, 0.01318359375, 0.0228118896484375, -0.07269287109375, -0.03125, 0.01032257080078125, -0.04229736328125, -0.031341552734375, 0.036773681640625, 0.0257568359375, 0.0258941650390625, 0.0360107421875, 0.002040863037109375, 0.05291748046875, -0.02490234375, 0.053558349609375, 0.03021240234375, -0.0572509765625, 0.03448486328125, -0.034759521484375, 0.037567138671875, 0.00531005859375, 0.03594970703125, -0.038055419921875, -0.0130157470703125, -0.0474853515625, -0.052734375, 0.07049560546875, 0.0229339599609375, -0.0004830360412597656, 0.0161590576171875, 0.0218658447265625, -0.005096435546875, 0.0159759521484375, -0.06396484375, 0.006710052490234375, -0.0458984375, -0.021759033203125, -0.0455322265625, -0.023193359375, -0.00872039794921875, -0.0269622802734375, 0.056304931640625, -0.0034027099609375, 0.038787841796875, 0.0183258056640625, -0.0124664306640625, -0.02459716796875, -0.0023746490478515625, 0.0596923828125, 0.047760009765625, -0.037506103515625, -0.0086669921875, -0.0191650390625, -0.051116943359375, -0.004245758056640625, 0.037384033203125, 0.011016845703125, -0.01071929931640625, 0.047210693359375, 0.0697021484375, 0.01068115234375, -0.023162841796875, 0.05810546875, 0.006122589111328125, -0.02728271484375, -0.04791259765625, 0.0032825469970703125, 0.010345458984375, 0.03143310546875, 0.04034423828125, -0.0067596435546875, -0.004608154296875, -0.036773681640625, -0.002838134765625, 0.0169677734375, -0.0163726806640625, -0.031768798828125, 0.048187255859375, 0.0250701904296875, -0.01715087890625, 0.0391845703125, -0.01068115234375, -0.033203125, 0.063232421875, 0.048187255859375, 0.06561279296875, -0.0184326171875, 0.006595611572265625, 0.04718017578125, 0.0238800048828125, -0.021240234375, -0.0016813278198242188, 0.00218963623046875, -0.01812744140625, -0.052642822265625, -0.0706787109375, -0.01390838623046875, 0.04022216796875, -0.03973388671875, 0.022552490234375, -0.054840087890625, -0.0161590576171875, 0.027374267578125, 0.0164031982421875, -0.044647216796875, 0.0092926025390625, 0.02105712890625, 0.06671142578125, -0.053863525390625, 0.05291748046875, 0.056243896484375, -0.038116455078125, -0.059844970703125, 0.01071929931640625, -0.005870819091796875, -0.05694580078125, 0.0697021484375, 0.04083251953125, 0.02020263671875, 0.00986480712890625, -0.032470703125, -0.079345703125, 0.07745361328125, 0.0088043212890625, -0.069091796875, -0.0020294189453125, -0.00391387939453125, 0.032562255859375, 0.001117706298828125, 0.04290771484375, 0.01087188720703125, 0.052825927734375, 0.03717041015625, -0.07440185546875, 0.012481689453125, -0.0313720703125, 0.007152557373046875, 0.02386474609375, -0.08404541015625, 0.08331298828125, -0.007701873779296875, 0.0017795562744140625, 0.019683837890625, 0.058868408203125, -0.00971221923828125, -0.00032830238342285156, 0.017913818359375, 0.04351806640625, 0.041900634765625, -0.00865936279296875, 0.07159423828125, -0.027374267578125, 0.0538330078125, 0.05499267578125, -0.006717681884765625, 0.056732177734375, 0.0264434814453125, -0.0220184326171875, 0.022552490234375, 0.0511474609375, -0.029266357421875, 0.020751953125, 0.0155029296875, 0.009429931640625, -0.015167236328125, 0.031951904296875, -0.061279296875, 0.041717529296875, 0.01175689697265625, -0.034027099609375, -0.000530242919921875, 0.01436614990234375, -0.00220489501953125, -0.0213165283203125, -0.0262298583984375, 0.039276123046875, -0.00737762451171875, -0.051605224609375, 0.079345703125, -0.0115203857421875, 0.06390380859375, -0.053466796875, 0.0097503662109375, -0.01104736328125, 0.033111572265625, -0.0239105224609375, -0.04876708984375, 0.011871337890625, -0.0156707763671875, -0.032562255859375, 0.0115814208984375, 0.02301025390625, -0.0215301513671875, -0.056182861328125, 0.0085601806640625, -0.00930023193359375, 0.002460479736328125, -0.0246429443359375, -0.05474853515625, 0.017822265625, -0.00576019287109375, -0.041900634765625, 0.0202789306640625, 0.019989013671875, 0.0311279296875, 0.053558349609375, 0.05731201171875, 0.004253387451171875, 0.005428314208984375, -0.0278778076171875, 0.06072998046875, -0.0723876953125, -0.048370361328125, -0.054840087890625, 0.0215911865234375, -0.01253509521484375, -0.026824951171875, 0.048492431640625, 0.044403076171875, 0.03302001953125, -0.0232696533203125, 0.05572509765625, -0.0097503662109375, 0.037261962890625, -0.0311279296875, 0.057708740234375, -0.047454833984375, -0.0220184326171875, -0.024322509765625, -0.0870361328125, -0.0225677490234375, 0.05865478515625, -0.035247802734375, 0.012237548828125, 0.060821533203125, 0.051727294921875, -0.0289306640625, 0.0094146728515625, 0.018798828125, 0.0469970703125, 0.03363037109375, 0.06427001953125, 0.056610107421875, -0.04083251953125, 0.051910400390625, -0.00492095947265625, -0.015899658203125, -0.052642822265625, -0.0552978515625, -0.08740234375, -0.042724609375, -0.0113372802734375, -0.03253173828125, 0.0103912353515625, 0.0755615234375, 0.06854248046875, -0.04608154296875, -0.00479888916015625, 0.0024089813232421875, -0.0179901123046875, -0.0212249755859375, -0.0164947509765625, 0.04803466796875, -0.0236053466796875, -0.056732177734375, -0.0023250579833984375, 0.00984954833984375, 0.018096923828125, -0.024322509765625, -0.0017461776733398438, -0.0177154541015625, -0.01172637939453125, 0.043609619140625, 0.052642822265625, -0.04986572265625, -0.0206756591796875, -0.01113128662109375, -0.005512237548828125, -0.0029048919677734375, 0.0250701904296875, -0.042144775390625, 0.014892578125, 0.0135040283203125, 0.05010986328125, 0.06585693359375, -0.01334381103515625, 0.014923095703125, -0.034393310546875, 0.048614501953125, 0.01297760009765625, 0.0283966064453125, 0.022857666015625, -0.029876708984375, 0.02655029296875, 0.0190582275390625, -0.048828125, -0.058685302734375, 0.00009918212890625, -0.08941650390625, -0.022674560546875, 0.10211181640625, -0.0163421630859375, -0.054840087890625, 0.0214385986328125, -0.0174102783203125, 0.0177459716796875, -0.0287628173828125, 0.0657958984375, 0.04010009765625, 0.02142333984375, -0.014984130859375, -0.015350341796875, 0.0284423828125, 0.018341064453125, -0.036346435546875, -0.0010166168212890625, 0.01983642578125, 0.0207672119140625, 0.02862548828125, 0.01146697998046875, 0.01030731201171875, 0.0251617431640625, -0.002735137939453125, 0.018096923828125, -0.04351806640625, -0.0229034423828125, -0.0191192626953125, 0.0088653564453125, -0.015380859375, -0.0208740234375 ] ]
monologg/kobigbird-bert-base
2023-06-12T12:30:09.000Z
[ "transformers", "pytorch", "safetensors", "big_bird", "fill-mask", "korean", "ko", "autotrain_compatible", "endpoints_compatible", "region:us" ]
fill-mask
monologg
null
null
monologg/kobigbird-bert-base
15
8,688
transformers
2022-03-02T23:29:05
--- language: ko tags: - korean mask_token: "[MASK]" widget: - text: 대한민국의 수도는 [MASK] 입니다. --- # KoBigBird <img src="https://user-images.githubusercontent.com/28896432/140442206-e34b02d5-e279-47e5-9c2a-db1278b1c14d.png" width="200"/> Pretrained BigBird Model for Korean (**kobigbird-bert-base**) ## About BigBird, is a sparse-attention based transformer which extends Transformer based models, such as BERT to much longer sequences. BigBird relies on **block sparse attention** instead of normal attention (i.e. BERT's attention) and can handle sequences up to a length of 4096 at a much lower compute cost compared to BERT. Model is warm started from Korean BERT’s checkpoint. ## How to use *NOTE:* Use `BertTokenizer` instead of BigBirdTokenizer. (`AutoTokenizer` will load `BertTokenizer`) ```python from transformers import AutoModel, AutoTokenizer # by default its in `block_sparse` mode with num_random_blocks=3, block_size=64 model = AutoModel.from_pretrained("monologg/kobigbird-bert-base") # you can change `attention_type` to full attention like this: model = AutoModel.from_pretrained("monologg/kobigbird-bert-base", attention_type="original_full") # you can change `block_size` & `num_random_blocks` like this: model = AutoModel.from_pretrained("monologg/kobigbird-bert-base", block_size=16, num_random_blocks=2) tokenizer = AutoTokenizer.from_pretrained("monologg/kobigbird-bert-base") text = "한국어 BigBird 모델을 공개합니다!" encoded_input = tokenizer(text, return_tensors='pt') output = model(**encoded_input) ```
1,538
[ [ -0.022705078125, -0.040863037109375, 0.0032100677490234375, 0.053314208984375, -0.03271484375, -0.01503753662109375, -0.02471923828125, -0.0211334228515625, 0.0152435302734375, 0.0204925537109375, -0.056915283203125, -0.0239715576171875, -0.056915283203125, 0.00905609130859375, -0.001617431640625, 0.0712890625, 0.0099639892578125, -0.0001042485237121582, 0.01355743408203125, 0.01160430908203125, -0.034576416015625, -0.041473388671875, -0.048095703125, -0.032928466796875, 0.027862548828125, 0.005901336669921875, 0.046142578125, 0.034576416015625, 0.046905517578125, 0.0286865234375, -0.018280029296875, -0.01058197021484375, -0.042266845703125, -0.00839996337890625, -0.0088958740234375, -0.0259857177734375, -0.028106689453125, -0.0094146728515625, 0.05908203125, 0.0301361083984375, 0.0026226043701171875, 0.03839111328125, -0.0008282661437988281, 0.043701171875, -0.032928466796875, 0.038116455078125, -0.031280517578125, 0.0232696533203125, 0.005634307861328125, 0.005985260009765625, -0.0377197265625, -0.01316070556640625, 0.006195068359375, -0.0426025390625, 0.014739990234375, 0.007068634033203125, 0.092041015625, 0.024810791015625, -0.027740478515625, -0.006275177001953125, -0.0494384765625, 0.0673828125, -0.060272216796875, 0.02740478515625, 0.031463623046875, 0.0289154052734375, -0.01102447509765625, -0.0618896484375, -0.032501220703125, -0.0116729736328125, -0.022369384765625, 0.01312255859375, 0.0011682510375976562, 0.006793975830078125, 0.0404052734375, 0.0242156982421875, -0.056365966796875, -0.004364013671875, -0.038116455078125, -0.01605224609375, 0.04534912109375, 0.000020265579223632812, -0.00969696044921875, -0.0271453857421875, -0.00954437255859375, -0.0308685302734375, -0.01947021484375, 0.015533447265625, 0.0161285400390625, 0.0275726318359375, -0.00867462158203125, 0.031951904296875, -0.01000213623046875, 0.0574951171875, 0.05511474609375, -0.0120849609375, 0.041595458984375, -0.0154876708984375, -0.038848876953125, 0.0161285400390625, 0.063232421875, 0.00540924072265625, 0.004917144775390625, 0.003200531005859375, 0.0006875991821289062, -0.0255126953125, 0.018096923828125, -0.0704345703125, -0.0222015380859375, 0.0230560302734375, -0.04638671875, -0.01183319091796875, 0.01605224609375, -0.04534912109375, 0.008453369140625, -0.0251007080078125, 0.05609130859375, -0.041748046875, -0.032928466796875, 0.021514892578125, 0.0021076202392578125, 0.0316162109375, -0.01128387451171875, -0.07122802734375, -0.00548553466796875, 0.0283966064453125, 0.048309326171875, 0.033416748046875, -0.035614013671875, -0.02093505859375, 0.00455474853515625, -0.01410675048828125, 0.0184478759765625, -0.016632080078125, -0.01184844970703125, -0.010833740234375, 0.0325927734375, -0.0302581787109375, -0.031646728515625, 0.018585205078125, -0.043212890625, 0.0191497802734375, -0.007598876953125, -0.0278472900390625, -0.030670166015625, 0.013580322265625, -0.049102783203125, 0.06915283203125, 0.0292816162109375, -0.0601806640625, 0.0303497314453125, -0.06072998046875, -0.047576904296875, 0.0001583099365234375, 0.0190887451171875, -0.0609130859375, -0.005229949951171875, 0.0193023681640625, 0.044189453125, 0.0149688720703125, 0.0196380615234375, -0.031585693359375, -0.033721923828125, 0.011505126953125, -0.005718231201171875, 0.0731201171875, 0.016387939453125, -0.04473876953125, 0.0158843994140625, -0.055389404296875, 0.01580810546875, 0.0169677734375, -0.0230712890625, 0.0029697418212890625, -0.017974853515625, 0.01308441162109375, 0.0287017822265625, 0.0191650390625, -0.0374755859375, 0.019989013671875, -0.035919189453125, 0.034698486328125, 0.05108642578125, -0.00969696044921875, 0.015716552734375, -0.020782470703125, 0.0286102294921875, 0.01322174072265625, 0.032135009765625, -0.0220947265625, -0.004665374755859375, -0.061248779296875, -0.0450439453125, 0.0399169921875, 0.0168304443359375, -0.044525146484375, 0.06903076171875, -0.0197296142578125, -0.039154052734375, -0.061248779296875, 0.00928497314453125, 0.0296630859375, -0.0014286041259765625, 0.0278167724609375, 0.0014553070068359375, -0.061370849609375, -0.058258056640625, 0.00994110107421875, 0.003673553466796875, -0.000027418136596679688, 0.0104827880859375, 0.0465087890625, -0.01922607421875, 0.05511474609375, -0.0274810791015625, -0.0240631103515625, -0.031494140625, 0.007579803466796875, 0.05438232421875, 0.05926513671875, 0.044769287109375, -0.0579833984375, -0.05352783203125, -0.01276397705078125, -0.049896240234375, 0.0139312744140625, -0.00984954833984375, -0.0258636474609375, 0.0201568603515625, 0.0279083251953125, -0.0775146484375, 0.04302978515625, 0.0289154052734375, -0.0285491943359375, 0.046234130859375, -0.0016355514526367188, -0.0027008056640625, -0.079345703125, 0.0286865234375, -0.02374267578125, -0.013458251953125, -0.022003173828125, 0.021759033203125, 0.0186614990234375, -0.0111541748046875, -0.0304107666015625, 0.039215087890625, -0.040863037109375, -0.0074005126953125, -0.0161590576171875, -0.0281829833984375, -0.005893707275390625, 0.048614501953125, 0.0123443603515625, 0.030731201171875, 0.054534912109375, -0.038726806640625, 0.04833984375, 0.03460693359375, -0.025177001953125, 0.006649017333984375, -0.058319091796875, 0.00002849102020263672, -0.0128021240234375, 0.032318115234375, -0.0880126953125, -0.025604248046875, 0.031097412109375, -0.047271728515625, 0.0545654296875, -0.01220703125, -0.04754638671875, -0.0799560546875, -0.017913818359375, 0.0306854248046875, 0.0535888671875, -0.050689697265625, 0.039459228515625, -0.01214599609375, -0.01031494140625, -0.0396728515625, -0.044464111328125, -0.01461029052734375, 0.0008058547973632812, -0.03778076171875, 0.0037479400634765625, -0.01224517822265625, 0.0355224609375, 0.01071929931640625, -0.008453369140625, 0.005290985107421875, -0.00788116455078125, 0.0240020751953125, 0.027252197265625, -0.03546142578125, 0.006267547607421875, -0.0099945068359375, -0.0113067626953125, 0.00017189979553222656, -0.031982421875, 0.05859375, -0.0118865966796875, -0.0306549072265625, -0.037933349609375, 0.01062774658203125, 0.04327392578125, -0.0219573974609375, 0.0672607421875, 0.07257080078125, -0.01006317138671875, -0.00011831521987915039, -0.045074462890625, -0.0174102783203125, -0.040191650390625, 0.04046630859375, -0.0267181396484375, -0.0531005859375, 0.02618408203125, 0.01751708984375, 0.005359649658203125, 0.031768798828125, 0.040283203125, -0.0008516311645507812, 0.07904052734375, 0.0482177734375, -0.016937255859375, 0.032562255859375, -0.0418701171875, 0.01422119140625, -0.064453125, -0.00835418701171875, -0.016357421875, -0.034759521484375, -0.0178985595703125, -0.012115478515625, 0.0055084228515625, 0.0014791488647460938, -0.048980712890625, 0.024383544921875, -0.056243896484375, 0.0301361083984375, 0.051025390625, 0.019866943359375, -0.018829345703125, 0.005542755126953125, -0.002475738525390625, 0.0020046234130859375, -0.0657958984375, -0.009918212890625, 0.09735107421875, 0.01299285888671875, 0.06005859375, -0.00972747802734375, 0.040313720703125, -0.005664825439453125, 0.02874755859375, -0.049835205078125, 0.033416748046875, 0.006946563720703125, -0.07147216796875, -0.036895751953125, -0.02618408203125, -0.07757568359375, 0.0254364013671875, -0.00492095947265625, -0.046539306640625, -0.00592041015625, 0.01477813720703125, -0.03466796875, 0.021240234375, -0.044677734375, 0.082275390625, -0.008453369140625, -0.015045166015625, 0.0124053955078125, -0.05963134765625, 0.035736083984375, -0.01500701904296875, -0.0106353759765625, 0.019989013671875, 0.0311126708984375, 0.050872802734375, -0.023040771484375, 0.07171630859375, -0.021728515625, 0.017181396484375, 0.0240478515625, -0.020294189453125, 0.0341796875, -0.0027008056640625, -0.0047760009765625, 0.005962371826171875, -0.0070648193359375, -0.048980712890625, -0.021392822265625, 0.037322998046875, -0.0810546875, -0.037933349609375, -0.045806884765625, -0.038726806640625, 0.0013914108276367188, 0.028717041015625, 0.033935546875, 0.0223846435546875, 0.006580352783203125, 0.04345703125, 0.038360595703125, -0.01812744140625, 0.04144287109375, 0.050140380859375, -0.026153564453125, -0.035430908203125, 0.0528564453125, 0.005908966064453125, 0.01032257080078125, 0.020294189453125, 0.0012960433959960938, -0.0364990234375, -0.0157928466796875, -0.013458251953125, 0.040863037109375, -0.03826904296875, -0.02471923828125, -0.045989990234375, -0.054168701171875, -0.05462646484375, -0.004146575927734375, -0.0216064453125, -0.0013446807861328125, -0.0186004638671875, -0.0036754608154296875, 0.01195526123046875, 0.02215576171875, -0.017486572265625, 0.043792724609375, -0.06219482421875, 0.0251312255859375, 0.031097412109375, 0.023284912109375, 0.0228424072265625, -0.06396484375, -0.034271240234375, 0.0157318115234375, -0.0390625, -0.032867431640625, 0.035797119140625, 0.010772705078125, 0.041534423828125, 0.040985107421875, 0.0139617919921875, 0.05926513671875, -0.033721923828125, 0.04840087890625, 0.030242919921875, -0.06158447265625, 0.032806396484375, -0.0090484619140625, 0.049560546875, 0.01369476318359375, 0.040679931640625, -0.0276031494140625, -0.0271148681640625, -0.050811767578125, -0.056793212890625, 0.08892822265625, 0.0242156982421875, 0.0010242462158203125, 0.0031414031982421875, 0.01329803466796875, -0.0017242431640625, 0.020599365234375, -0.07830810546875, -0.00415802001953125, -0.054779052734375, -0.034454345703125, -0.0313720703125, -0.0181427001953125, 0.0007638931274414062, -0.04132080078125, 0.06915283203125, 0.0102996826171875, 0.041900634765625, 0.0181884765625, -0.0254058837890625, -0.02398681640625, -0.0104522705078125, 0.046417236328125, 0.0458984375, -0.0390625, -0.004638671875, -0.0068359375, -0.040863037109375, 0.016632080078125, 0.0182647705078125, -0.006687164306640625, 0.02032470703125, 0.0390625, 0.0799560546875, 0.0275115966796875, -0.02679443359375, 0.042083740234375, 0.00852203369140625, -0.0304718017578125, -0.0278167724609375, -0.00505828857421875, 0.0186920166015625, 0.0241241455078125, 0.0263214111328125, -0.01369476318359375, -0.0218353271484375, -0.0221099853515625, -0.004100799560546875, 0.021942138671875, -0.00662994384765625, -0.02880859375, 0.04937744140625, 0.004848480224609375, -0.020416259765625, 0.047454833984375, -0.015045166015625, -0.053955078125, 0.07318115234375, 0.06451416015625, 0.0615234375, -0.037078857421875, 0.00875091552734375, 0.051422119140625, 0.030548095703125, -0.005710601806640625, 0.01702880859375, -0.00514984130859375, -0.0282135009765625, -0.0447998046875, -0.06591796875, -0.0006580352783203125, 0.04541015625, -0.05340576171875, 0.012939453125, -0.04119873046875, -0.0250244140625, 0.0139923095703125, 0.0086517333984375, -0.0546875, 0.01181793212890625, 0.013824462890625, 0.05596923828125, -0.050445556640625, 0.05316162109375, 0.060333251953125, -0.021881103515625, -0.07025146484375, -0.00707244873046875, -0.0265960693359375, -0.06402587890625, 0.08349609375, 0.03582763671875, 0.034942626953125, 0.0123291015625, -0.049591064453125, -0.08758544921875, 0.077392578125, -0.00659942626953125, -0.04949951171875, 0.00539398193359375, 0.00563812255859375, 0.0290069580078125, -0.00334930419921875, 0.0221710205078125, 0.01812744140625, 0.051361083984375, 0.0243988037109375, -0.0743408203125, 0.00749969482421875, -0.0177154541015625, 0.01324462890625, 0.0234375, -0.0728759765625, 0.0811767578125, -0.0091552734375, -0.0040130615234375, 0.012054443359375, 0.049896240234375, 0.0114898681640625, 0.010406494140625, 0.03533935546875, 0.036895751953125, 0.027862548828125, 0.0005168914794921875, 0.0499267578125, -0.035308837890625, 0.059112548828125, 0.067138671875, -0.019775390625, 0.038726806640625, 0.0213470458984375, -0.025115966796875, 0.020904541015625, 0.047943115234375, -0.033172607421875, 0.050262451171875, 0.0157318115234375, 0.0018320083618164062, -0.0249176025390625, 0.0484619140625, -0.04949951171875, 0.0238037109375, 0.009979248046875, -0.021240234375, -0.009918212890625, 0.0277862548828125, -0.007049560546875, -0.032135009765625, -0.0283660888671875, 0.044189453125, -0.00798797607421875, -0.059112548828125, 0.07769775390625, -0.01263427734375, 0.050811767578125, -0.0762939453125, 0.013092041015625, -0.0087127685546875, 0.041351318359375, -0.006282806396484375, -0.033447265625, 0.0028133392333984375, -0.015716552734375, -0.0285491943359375, 0.007358551025390625, 0.04302978515625, -0.044189453125, -0.056396484375, 0.00969696044921875, -0.00878143310546875, 0.005329132080078125, -0.01325225830078125, -0.05804443359375, 0.01477813720703125, 0.00855255126953125, -0.042572021484375, 0.02880859375, 0.0306549072265625, 0.030426025390625, 0.0540771484375, 0.059112548828125, -0.00478363037109375, 0.02691650390625, -0.0274200439453125, 0.06744384765625, -0.06048583984375, -0.04986572265625, -0.060333251953125, 0.025970458984375, -0.010650634765625, -0.01409912109375, 0.0574951171875, 0.036773681640625, 0.04534912109375, -0.046295166015625, 0.047088623046875, -0.021514892578125, 0.0343017578125, -0.037139892578125, 0.062255859375, -0.0322265625, -0.0292510986328125, -0.0189971923828125, -0.06939697265625, -0.007587432861328125, 0.07061767578125, -0.0176544189453125, 0.0153961181640625, 0.05450439453125, 0.054779052734375, -0.028289794921875, -0.01067352294921875, 0.0211334228515625, 0.048675537109375, 0.01751708984375, 0.059234619140625, 0.05322265625, -0.0411376953125, 0.053192138671875, -0.017181396484375, -0.0112457275390625, -0.049530029296875, -0.05364990234375, -0.09246826171875, -0.0204925537109375, -0.0166473388671875, -0.02374267578125, 0.0004892349243164062, 0.0799560546875, 0.075439453125, -0.056243896484375, -0.007221221923828125, 0.01580810546875, -0.004245758056640625, -0.0306854248046875, -0.0228424072265625, 0.050018310546875, -0.039215087890625, -0.04345703125, -0.007312774658203125, -0.004207611083984375, 0.02032470703125, -0.00794219970703125, -0.01084136962890625, -0.0271759033203125, 0.002162933349609375, 0.050262451171875, 0.0321044921875, -0.048004150390625, -0.01837158203125, -0.01053619384765625, -0.01465606689453125, -0.006938934326171875, 0.01317596435546875, -0.044189453125, 0.0173492431640625, 0.001865386962890625, 0.036407470703125, 0.06402587890625, -0.007297515869140625, 0.01275634765625, -0.051422119140625, 0.04449462890625, 0.0093841552734375, 0.01444244384765625, 0.0149688720703125, -0.01763916015625, 0.027313232421875, 0.019805908203125, -0.053314208984375, -0.0677490234375, -0.01116180419921875, -0.0771484375, -0.02703857421875, 0.08447265625, -0.026275634765625, -0.041656494140625, 0.005401611328125, -0.03900146484375, 0.0205841064453125, -0.0130767822265625, 0.058868408203125, 0.0615234375, 0.021697998046875, -0.002941131591796875, -0.01385498046875, 0.0247802734375, 0.02801513671875, -0.0274200439453125, -0.006237030029296875, 0.016204833984375, 0.0389404296875, 0.019775390625, 0.040924072265625, 0.0028438568115234375, 0.033294677734375, 0.005924224853515625, 0.030517578125, -0.0300750732421875, -0.01079559326171875, -0.0101470947265625, -0.0032558441162109375, -0.0240936279296875, -0.04302978515625 ] ]
Helsinki-NLP/opus-mt-tc-big-ar-en
2023-08-16T12:10:50.000Z
[ "transformers", "pytorch", "tf", "marian", "text2text-generation", "translation", "opus-mt-tc", "ar", "en", "license:cc-by-4.0", "model-index", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
translation
Helsinki-NLP
null
null
Helsinki-NLP/opus-mt-tc-big-ar-en
9
8,686
transformers
2022-04-13T15:18:06
--- language: - ar - en tags: - translation - opus-mt-tc license: cc-by-4.0 model-index: - name: opus-mt-tc-big-ar-en results: - task: name: Translation ara-eng type: translation args: ara-eng dataset: name: flores101-devtest type: flores_101 args: ara eng devtest metrics: - name: BLEU type: bleu value: 42.6 - task: name: Translation ara-eng type: translation args: ara-eng dataset: name: tatoeba-test-v2021-08-07 type: tatoeba_mt args: ara-eng metrics: - name: BLEU type: bleu value: 47.3 - task: name: Translation ara-eng type: translation args: ara-eng dataset: name: tico19-test type: tico19-test args: ara-eng metrics: - name: BLEU type: bleu value: 44.4 --- # opus-mt-tc-big-ar-en Neural machine translation model for translating from Arabic (ar) to English (en). This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train). * Publications: [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.) ``` @inproceedings{tiedemann-thottingal-2020-opus, title = "{OPUS}-{MT} {--} Building open translation services for the World", author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh}, booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation", month = nov, year = "2020", address = "Lisboa, Portugal", publisher = "European Association for Machine Translation", url = "https://aclanthology.org/2020.eamt-1.61", pages = "479--480", } @inproceedings{tiedemann-2020-tatoeba, title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}", author = {Tiedemann, J{\"o}rg}, booktitle = "Proceedings of the Fifth Conference on Machine Translation", month = nov, year = "2020", address = "Online", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/2020.wmt-1.139", pages = "1174--1182", } ``` ## Model info * Release: 2022-03-09 * source language(s): afb ara arz * target language(s): eng * model: transformer-big * data: opusTCv20210807+bt ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge)) * tokenization: SentencePiece (spm32k,spm32k) * original model: [opusTCv20210807+bt_transformer-big_2022-03-09.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/ara-eng/opusTCv20210807+bt_transformer-big_2022-03-09.zip) * more information released models: [OPUS-MT ara-eng README](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/ara-eng/README.md) ## Usage A short example code: ```python from transformers import MarianMTModel, MarianTokenizer src_text = [ "اتبع قلبك فحسب.", "وين راهي دّوش؟" ] model_name = "pytorch-models/opus-mt-tc-big-ar-en" tokenizer = MarianTokenizer.from_pretrained(model_name) model = MarianMTModel.from_pretrained(model_name) translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True)) for t in translated: print( tokenizer.decode(t, skip_special_tokens=True) ) # expected output: # Just follow your heart. # Wayne Rahi Dosh? ``` You can also use OPUS-MT models with the transformers pipelines, for example: ```python from transformers import pipeline pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-big-ar-en") print(pipe("اتبع قلبك فحسب.")) # expected output: Just follow your heart. ``` ## Benchmarks * test set translations: [opusTCv20210807+bt_transformer-big_2022-03-09.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/ara-eng/opusTCv20210807+bt_transformer-big_2022-03-09.test.txt) * test set scores: [opusTCv20210807+bt_transformer-big_2022-03-09.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/ara-eng/opusTCv20210807+bt_transformer-big_2022-03-09.eval.txt) * benchmark results: [benchmark_results.txt](benchmark_results.txt) * benchmark output: [benchmark_translations.zip](benchmark_translations.zip) | langpair | testset | chr-F | BLEU | #sent | #words | |----------|---------|-------|-------|-------|--------| | ara-eng | tatoeba-test-v2021-08-07 | 0.63477 | 47.3 | 10305 | 76975 | | ara-eng | flores101-devtest | 0.66987 | 42.6 | 1012 | 24721 | | ara-eng | tico19-test | 0.68521 | 44.4 | 2100 | 56323 | ## Acknowledgements The work is supported by the [European Language Grid](https://www.european-language-grid.eu/) as [pilot project 2866](https://live.european-language-grid.eu/catalogue/#/resource/projects/2866), by the [FoTran project](https://www.helsinki.fi/en/researchgroups/natural-language-understanding-with-cross-lingual-grounding), funded by the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 771113), and the [MeMAD project](https://memad.eu/), funded by the European Union’s Horizon 2020 Research and Innovation Programme under grant agreement No 780069. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland. ## Model conversion info * transformers version: 4.16.2 * OPUS-MT git hash: 3405783 * port time: Wed Apr 13 18:17:57 EEST 2022 * port machine: LM0-400-22516.local
6,115
[ [ -0.0273895263671875, -0.040740966796875, 0.0177154541015625, 0.018035888671875, -0.0377197265625, -0.0157623291015625, -0.03790283203125, -0.0239105224609375, 0.0109100341796875, 0.0300140380859375, -0.0309600830078125, -0.05413818359375, -0.05023193359375, 0.0268096923828125, -0.0159912109375, 0.063720703125, -0.03009033203125, 0.0190277099609375, 0.0248260498046875, -0.028533935546875, -0.01013946533203125, -0.033905029296875, -0.031951904296875, -0.025482177734375, 0.0217132568359375, 0.0118408203125, 0.040557861328125, 0.0499267578125, 0.0416259765625, 0.0257720947265625, -0.018768310546875, 0.0196075439453125, -0.00634765625, -0.011962890625, 0.0065765380859375, -0.030487060546875, -0.043121337890625, -0.00656890869140625, 0.069580078125, 0.042236328125, 0.006988525390625, 0.027496337890625, 0.00733184814453125, 0.05072021484375, -0.013580322265625, 0.01016998291015625, -0.04229736328125, 0.007110595703125, -0.0232086181640625, -0.0198516845703125, -0.0406494140625, -0.01151275634765625, 0.003108978271484375, -0.036956787109375, -0.000804901123046875, 0.00811004638671875, 0.09344482421875, 0.016265869140625, -0.028533935546875, -0.01366424560546875, -0.055206298828125, 0.07568359375, -0.06103515625, 0.04754638671875, 0.0186920166015625, 0.0031795501708984375, -0.00984954833984375, -0.049072265625, -0.045654296875, -0.007076263427734375, -0.0125732421875, 0.0186004638671875, -0.0257415771484375, -0.0095672607421875, 0.0144805908203125, 0.043426513671875, -0.04876708984375, -0.0010271072387695312, -0.033294677734375, -0.0130157470703125, 0.032867431640625, -0.0007252693176269531, 0.020416259765625, -0.0264739990234375, -0.0303955078125, -0.034576416015625, -0.0538330078125, 0.0092620849609375, 0.023956298828125, 0.0276031494140625, -0.047607421875, 0.05267333984375, -0.001445770263671875, 0.05303955078125, 0.00042724609375, -0.0023517608642578125, 0.05047607421875, -0.038970947265625, -0.0156402587890625, -0.0113983154296875, 0.09033203125, 0.014892578125, 0.00807952880859375, -0.0149078369140625, -0.010162353515625, -0.011688232421875, -0.0146636962890625, -0.0648193359375, 0.016387939453125, 0.021697998046875, -0.035430908203125, -0.00252532958984375, -0.006717681884765625, -0.04278564453125, 0.01513671875, -0.0205230712890625, 0.034637451171875, -0.05108642578125, -0.033966064453125, 0.0181121826171875, 0.009765625, 0.0249176025390625, 0.00212860107421875, -0.042236328125, -0.0005121231079101562, 0.0306854248046875, 0.07025146484375, -0.00832366943359375, -0.0380859375, -0.038665771484375, -0.005153656005859375, -0.0199737548828125, 0.033233642578125, -0.01155853271484375, -0.021026611328125, -0.01067352294921875, 0.01546478271484375, -0.015380859375, -0.0205535888671875, 0.06683349609375, -0.02911376953125, 0.0304412841796875, -0.01629638671875, -0.02490234375, -0.024505615234375, 0.0188751220703125, -0.033782958984375, 0.083740234375, 0.00681304931640625, -0.0640869140625, 0.004730224609375, -0.05120849609375, -0.0180206298828125, -0.009613037109375, 0.0080108642578125, -0.03790283203125, 0.005336761474609375, 0.023651123046875, 0.03070068359375, -0.045684814453125, 0.0263214111328125, 0.007251739501953125, -0.01342010498046875, 0.01421356201171875, -0.040863037109375, 0.07794189453125, 0.02777099609375, -0.0386962890625, 0.01190948486328125, -0.054962158203125, 0.001560211181640625, 0.006320953369140625, -0.031646728515625, -0.0111846923828125, -0.00821685791015625, 0.0118560791015625, 0.0257415771484375, 0.0138702392578125, -0.046844482421875, 0.01125335693359375, -0.053802490234375, 0.032379150390625, 0.05108642578125, -0.0207672119140625, 0.0271148681640625, -0.018524169921875, 0.036041259765625, 0.018829345703125, -0.0104522705078125, -0.0103302001953125, -0.04278564453125, -0.0704345703125, -0.0215911865234375, 0.044189453125, 0.040863037109375, -0.07952880859375, 0.04754638671875, -0.05157470703125, -0.06280517578125, -0.058746337890625, -0.0165252685546875, 0.046417236328125, 0.0263671875, 0.04736328125, -0.01485443115234375, -0.04022216796875, -0.06365966796875, -0.0236663818359375, -0.0154266357421875, -0.0006909370422363281, 0.01152801513671875, 0.05023193359375, -0.022979736328125, 0.044586181640625, -0.01220703125, -0.0258941650390625, -0.025390625, 0.01377105712890625, 0.044189453125, 0.054779052734375, 0.033172607421875, -0.0582275390625, -0.0528564453125, 0.029693603515625, -0.050140380859375, -0.0034637451171875, -0.0005660057067871094, -0.0189056396484375, 0.027557373046875, 0.01136016845703125, -0.049896240234375, 0.016693115234375, 0.0552978515625, -0.03497314453125, 0.034027099609375, -0.0131683349609375, 0.02008056640625, -0.11334228515625, 0.0174560546875, -0.007686614990234375, -0.0088043212890625, -0.04559326171875, 0.00972747802734375, 0.003582000732421875, 0.007549285888671875, -0.051300048828125, 0.0526123046875, -0.0452880859375, -0.0008320808410644531, 0.0173797607421875, -0.00263214111328125, -0.005672454833984375, 0.061370849609375, 0.0012111663818359375, 0.06890869140625, 0.044281005859375, -0.044586181640625, 0.01378631591796875, 0.03851318359375, -0.023651123046875, 0.01434326171875, -0.05645751953125, 0.00594329833984375, 0.0126190185546875, 0.00665283203125, -0.046722412109375, 0.008636474609375, 0.0377197265625, -0.054840087890625, 0.02960205078125, -0.0250396728515625, -0.056549072265625, -0.0201263427734375, -0.0099029541015625, 0.0390625, 0.041046142578125, -0.034637451171875, 0.060882568359375, 0.005870819091796875, 0.0007834434509277344, -0.04962158203125, -0.06494140625, 0.0061492919921875, -0.020599365234375, -0.055419921875, 0.0323486328125, -0.0102691650390625, 0.0022182464599609375, 0.002857208251953125, 0.00594329833984375, 0.00402069091796875, 0.0010738372802734375, 0.0045166015625, 0.010009765625, -0.03265380859375, -0.0023059844970703125, 0.0024871826171875, -0.02392578125, -0.01288604736328125, -0.041748046875, 0.0635986328125, -0.040679931640625, -0.017181396484375, -0.053436279296875, 0.020843505859375, 0.057952880859375, -0.047943115234375, 0.0765380859375, 0.05487060546875, -0.020416259765625, 0.01385498046875, -0.028533935546875, 0.00251007080078125, -0.033416748046875, 0.0323486328125, -0.04473876953125, -0.053192138671875, 0.053009033203125, 0.0119476318359375, 0.0179290771484375, 0.068603515625, 0.06280517578125, 0.0197906494140625, 0.07208251953125, 0.02557373046875, 0.0026226043701171875, 0.021820068359375, -0.05194091796875, 0.01100921630859375, -0.07525634765625, -0.004573822021484375, -0.052001953125, -0.01415252685546875, -0.0615234375, -0.042083740234375, 0.0267333984375, -0.0013599395751953125, -0.0097198486328125, 0.045745849609375, -0.0367431640625, 0.006465911865234375, 0.03759765625, -0.01215362548828125, 0.0258941650390625, 0.017730712890625, -0.03704833984375, -0.020965576171875, -0.044525146484375, -0.043182373046875, 0.08905029296875, 0.028900146484375, 0.0175018310546875, 0.0198516845703125, 0.045623779296875, -0.007904052734375, 0.023681640625, -0.042510986328125, 0.035919189453125, -0.0186309814453125, -0.041015625, -0.0124664306640625, -0.04876708984375, -0.072265625, 0.040069580078125, -0.00856781005859375, -0.04632568359375, 0.0095367431640625, -0.0029506683349609375, -0.005764007568359375, 0.049407958984375, -0.046875, 0.070556640625, -0.01094818115234375, -0.019287109375, -0.0035114288330078125, -0.040802001953125, 0.011016845703125, -0.0014982223510742188, 0.020965576171875, 0.0001366138458251953, 0.0027523040771484375, 0.06317138671875, -0.0275726318359375, 0.0364990234375, 0.0002980232238769531, -0.0126190185546875, 0.01161956787109375, 0.0014085769653320312, 0.04058837890625, -0.0138702392578125, -0.0260772705078125, 0.0440673828125, -0.0032253265380859375, -0.0299835205078125, -0.01554107666015625, 0.04473876953125, -0.0709228515625, -0.0283203125, -0.0384521484375, -0.046142578125, 0.00693511962890625, 0.031280517578125, 0.0458984375, 0.046844482421875, 0.003559112548828125, 0.034759521484375, 0.03497314453125, -0.035430908203125, 0.033233642578125, 0.041259765625, -0.0101165771484375, -0.038177490234375, 0.0655517578125, 0.0249176025390625, 0.023773193359375, 0.0413818359375, 0.01213836669921875, -0.0166168212890625, -0.05426025390625, -0.06768798828125, 0.037261962890625, -0.038482666015625, -0.0168609619140625, -0.057525634765625, -0.00951385498046875, -0.02294921875, 0.010345458984375, -0.045745849609375, -0.040008544921875, -0.0156707763671875, -0.0035648345947265625, 0.032470703125, 0.027252197265625, 0.0005297660827636719, 0.0252838134765625, -0.0745849609375, 0.01322174072265625, -0.02044677734375, 0.0241241455078125, -0.00518798828125, -0.0643310546875, -0.035919189453125, 0.0211181640625, -0.0297393798828125, -0.06475830078125, 0.055206298828125, 0.00513458251953125, 0.0201873779296875, 0.0082855224609375, 0.0082550048828125, 0.043609619140625, -0.0528564453125, 0.059478759765625, 0.003612518310546875, -0.07379150390625, 0.0265960693359375, -0.0283355712890625, 0.0225067138671875, 0.02728271484375, 0.0208282470703125, -0.05157470703125, -0.042633056640625, -0.05194091796875, -0.0721435546875, 0.07196044921875, 0.048431396484375, 0.004241943359375, 0.0033130645751953125, 0.00574493408203125, 0.00005990266799926758, 0.00876617431640625, -0.07452392578125, -0.04425048828125, -0.0047607421875, -0.0217437744140625, -0.01363372802734375, -0.01161956787109375, 0.0016088485717773438, -0.027191162109375, 0.07830810546875, 0.006717681884765625, 0.0333251953125, 0.028961181640625, -0.0218505859375, -0.00922393798828125, 0.0240020751953125, 0.051605224609375, 0.0309295654296875, -0.0102386474609375, 0.004467010498046875, 0.024261474609375, -0.03460693359375, 0.003917694091796875, 0.01151275634765625, -0.0230255126953125, 0.0274505615234375, 0.0274810791015625, 0.07476806640625, 0.003627777099609375, -0.03302001953125, 0.032806396484375, -0.0093231201171875, -0.0200653076171875, -0.033111572265625, -0.030914306640625, 0.0105743408203125, 0.007625579833984375, 0.0276031494140625, 0.0117645263671875, -0.00785064697265625, -0.018218994140625, 0.00046896934509277344, 0.0147705078125, -0.02032470703125, -0.042877197265625, 0.055633544921875, 0.00998687744140625, -0.024078369140625, 0.038421630859375, -0.01971435546875, -0.058349609375, 0.038482666015625, 0.030426025390625, 0.0819091796875, -0.0122528076171875, -0.0005650520324707031, 0.05426025390625, 0.04217529296875, -0.004329681396484375, 0.014373779296875, -0.0050811767578125, -0.046966552734375, -0.035400390625, -0.06390380859375, -0.004024505615234375, 0.0019779205322265625, -0.045654296875, 0.024658203125, 0.0015897750854492188, -0.004207611083984375, -0.0146636962890625, 0.01190185546875, -0.05047607421875, 0.00347137451171875, -0.00772857666015625, 0.0657958984375, -0.06298828125, 0.071044921875, 0.047943115234375, -0.038543701171875, -0.07464599609375, -0.008087158203125, -0.0274200439453125, -0.046142578125, 0.05029296875, 0.0159759521484375, -0.004482269287109375, 0.0158538818359375, -0.0155029296875, -0.07122802734375, 0.0736083984375, 0.03289794921875, -0.026336669921875, -0.0013113021850585938, 0.0307464599609375, 0.047943115234375, -0.01456451416015625, 0.022125244140625, 0.0296783447265625, 0.051483154296875, -0.003864288330078125, -0.080322265625, -0.0007848739624023438, -0.049896240234375, -0.00130462646484375, 0.0247039794921875, -0.048004150390625, 0.07745361328125, 0.0094146728515625, -0.0213470458984375, 0.0205078125, 0.05743408203125, 0.0164794921875, 0.002437591552734375, 0.02655029296875, 0.061492919921875, 0.030914306640625, -0.030517578125, 0.0849609375, -0.0347900390625, 0.036376953125, 0.05596923828125, 0.0167388916015625, 0.06695556640625, 0.043121337890625, -0.0201873779296875, 0.0360107421875, 0.041961669921875, 0.0011625289916992188, 0.0191497802734375, -0.007381439208984375, 0.0014801025390625, -0.007373809814453125, -0.0114898681640625, -0.057098388671875, 0.0296630859375, 0.019378662109375, -0.026092529296875, -0.004238128662109375, -0.003986358642578125, 0.017852783203125, -0.0009589195251464844, -0.008148193359375, 0.035003662109375, 0.0121612548828125, -0.054840087890625, 0.0845947265625, 0.0267181396484375, 0.053741455078125, -0.03955078125, 0.0079345703125, -0.0105133056640625, 0.025115966796875, -0.005157470703125, -0.038421630859375, 0.0234222412109375, 0.02001953125, -0.01294708251953125, -0.0362548828125, 0.0003943443298339844, -0.04925537109375, -0.060882568359375, 0.032501220703125, 0.03558349609375, 0.028106689453125, 0.00759124755859375, -0.05401611328125, 0.007541656494140625, 0.0204925537109375, -0.04022216796875, 0.002910614013671875, 0.0439453125, -0.0004336833953857422, 0.039276123046875, 0.05401611328125, 0.022552490234375, 0.0188140869140625, -0.01428985595703125, 0.052032470703125, -0.03582763671875, -0.034423828125, -0.0684814453125, 0.056793212890625, 0.01119232177734375, -0.042327880859375, 0.06195068359375, 0.058868408203125, 0.0677490234375, -0.01155853271484375, 0.0364990234375, -0.00875091552734375, 0.03863525390625, -0.045257568359375, 0.055633544921875, -0.06488037109375, 0.0224456787109375, -0.0168609619140625, -0.075439453125, -0.0222930908203125, 0.032470703125, -0.01861572265625, -0.006626129150390625, 0.0567626953125, 0.0531005859375, -0.0072174072265625, -0.0278778076171875, 0.01103973388671875, 0.040130615234375, 0.0300140380859375, 0.05621337890625, 0.03436279296875, -0.07086181640625, 0.04779052734375, -0.0274505615234375, -0.0012941360473632812, -0.00496673583984375, -0.047332763671875, -0.0604248046875, -0.059967041015625, -0.01296234130859375, -0.034332275390625, -0.013153076171875, 0.08392333984375, 0.0231475830078125, -0.06591796875, -0.02716064453125, -0.007610321044921875, 0.0142974853515625, -0.017730712890625, -0.0114593505859375, 0.0516357421875, -0.0037136077880859375, -0.08074951171875, 0.0198974609375, 0.0052337646484375, 0.00872802734375, -0.00382232666015625, -0.0285186767578125, -0.0285797119140625, -0.01348114013671875, 0.0217132568359375, 0.01367950439453125, -0.06866455078125, -0.002777099609375, 0.015625, -0.014434814453125, 0.0202789306640625, 0.0218505859375, -0.033111572265625, 0.0281829833984375, 0.04254150390625, 0.04595947265625, 0.04180908203125, -0.02203369140625, 0.045928955078125, -0.0465087890625, 0.03753662109375, 0.016571044921875, 0.040008544921875, 0.034759521484375, -0.0036945343017578125, 0.042694091796875, 0.017730712890625, -0.02001953125, -0.0845947265625, 0.00838470458984375, -0.0706787109375, 0.00347137451171875, 0.08612060546875, -0.0190582275390625, -0.0262298583984375, 0.01474761962890625, -0.0161895751953125, 0.04278564453125, -0.020477294921875, 0.03338623046875, 0.051483154296875, 0.0369873046875, 0.0021915435791015625, -0.040985107421875, 0.02154541015625, 0.054931640625, -0.038482666015625, -0.0027065277099609375, 0.00555419921875, 0.00783538818359375, 0.0256805419921875, 0.030364990234375, -0.01861572265625, 0.0115814208984375, -0.0236053466796875, 0.033172607421875, -0.01204681396484375, -0.00888824462890625, -0.02978515625, -0.009979248046875, -0.0056304931640625, -0.013824462890625 ] ]
TheBloke/Llama-2-7B-Chat-AWQ
2023-09-27T12:49:40.000Z
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "arxiv:2307.09288", "license:llama2", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/Llama-2-7B-Chat-AWQ
2
8,684
transformers
2023-09-18T21:30:22
--- language: - en license: llama2 tags: - facebook - meta - pytorch - llama - llama-2 model_name: Llama 2 7B Chat arxiv: 2307.09288 base_model: meta-llama/Llama-2-7b-chat-hf inference: false model_creator: Meta Llama 2 model_type: llama pipeline_tag: text-generation prompt_template: '[INST] <<SYS>> You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don''t know the answer to a question, please don''t share false information. <</SYS>> {prompt}[/INST] ' quantized_by: TheBloke --- <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # Llama 2 7B Chat - AWQ - Model creator: [Meta Llama 2](https://huggingface.co/meta-llama) - Original model: [Llama 2 7B Chat](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) <!-- description start --> ## Description This repo contains AWQ model files for [Meta Llama 2's Llama 2 7B Chat](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf). ### About AWQ AWQ is an efficient, accurate and blazing-fast low-bit weight quantization method, currently supporting 4-bit quantization. Compared to GPTQ, it offers faster Transformers-based inference. It is also now supported by continuous batching server [vLLM](https://github.com/vllm-project/vllm), allowing use of AWQ models for high-throughput concurrent inference in multi-user server scenarios. Note that, at the time of writing, overall throughput is still lower than running vLLM with unquantised models, however using AWQ enables using much smaller GPUs which can lead to easier deployment and overall cost savings. For example, a 70B model can be run on 1 x 48GB GPU instead of 2 x 80GB. <!-- description end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/Llama-2-7b-Chat-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/Llama-2-7b-Chat-GPTQ) * [2, 3, 4, 5, 6 and 8-bit GGUF models for CPU+GPU inference](https://huggingface.co/TheBloke/Llama-2-7b-Chat-GGUF) * [Meta Llama 2's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: Llama-2-Chat ``` [INST] <<SYS>> You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information. <</SYS>> {prompt}[/INST] ``` <!-- prompt-template end --> <!-- README_AWQ.md-provided-files start --> ## Provided files and AWQ parameters For my first release of AWQ models, I am releasing 128g models only. I will consider adding 32g as well if there is interest, and once I have done perplexity and evaluation comparisons, but at this time 32g models are still not fully tested with AutoAWQ and vLLM. Models are released as sharded safetensors files. | Branch | Bits | GS | AWQ Dataset | Seq Len | Size | | ------ | ---- | -- | ----------- | ------- | ---- | | [main](https://huggingface.co/TheBloke/Llama-2-7b-Chat-AWQ/tree/main) | 4 | 128 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 3.89 GB <!-- README_AWQ.md-provided-files end --> <!-- README_AWQ.md-use-from-vllm start --> ## Serving this model from vLLM Documentation on installing and using vLLM [can be found here](https://vllm.readthedocs.io/en/latest/). - When using vLLM as a server, pass the `--quantization awq` parameter, for example: ```shell python3 python -m vllm.entrypoints.api_server --model TheBloke/Llama-2-7b-Chat-AWQ --quantization awq ``` When using vLLM from Python code, pass the `quantization=awq` parameter, for example: ```python from vllm import LLM, SamplingParams prompts = [ "Hello, my name is", "The president of the United States is", "The capital of France is", "The future of AI is", ] sampling_params = SamplingParams(temperature=0.8, top_p=0.95) llm = LLM(model="TheBloke/Llama-2-7b-Chat-AWQ", quantization="awq") outputs = llm.generate(prompts, sampling_params) # Print the outputs. for output in outputs: prompt = output.prompt generated_text = output.outputs[0].text print(f"Prompt: {prompt!r}, Generated text: {generated_text!r}") ``` <!-- README_AWQ.md-use-from-vllm start --> <!-- README_AWQ.md-use-from-python start --> ## How to use this AWQ model from Python code ### Install the necessary packages Requires: [AutoAWQ](https://github.com/casper-hansen/AutoAWQ) 0.0.2 or later ```shell pip3 install autoawq ``` If you have problems installing [AutoAWQ](https://github.com/casper-hansen/AutoAWQ) using the pre-built wheels, install it from source instead: ```shell pip3 uninstall -y autoawq git clone https://github.com/casper-hansen/AutoAWQ cd AutoAWQ pip3 install . ``` ### You can then try the following example code ```python from awq import AutoAWQForCausalLM from transformers import AutoTokenizer model_name_or_path = "TheBloke/Llama-2-7b-Chat-AWQ" # Load model model = AutoAWQForCausalLM.from_quantized(model_name_or_path, fuse_layers=True, trust_remote_code=False, safetensors=True) tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=False) prompt = "Tell me about AI" prompt_template=f'''[INST] <<SYS>> You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information. <</SYS>> {prompt}[/INST] ''' print("\n\n*** Generate:") tokens = tokenizer( prompt_template, return_tensors='pt' ).input_ids.cuda() # Generate output generation_output = model.generate( tokens, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, max_new_tokens=512 ) print("Output: ", tokenizer.decode(generation_output[0])) # Inference can also be done using transformers' pipeline from transformers import pipeline print("*** Pipeline:") pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1 ) print(pipe(prompt_template)[0]['generated_text']) ``` <!-- README_AWQ.md-use-from-python end --> <!-- README_AWQ.md-compatibility start --> ## Compatibility The files provided are tested to work with [AutoAWQ](https://github.com/casper-hansen/AutoAWQ), and [vLLM](https://github.com/vllm-project/vllm). [Huggingface Text Generation Inference (TGI)](https://github.com/huggingface/text-generation-inference) is not yet compatible with AWQ, but a PR is open which should bring support soon: [TGI PR #781](https://github.com/huggingface/text-generation-inference/issues/781). <!-- README_AWQ.md-compatibility end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Alicia Loh, Stephen Murray, K, Ajan Kanaga, RoA, Magnesian, Deo Leter, Olakabola, Eugene Pentland, zynix, Deep Realms, Raymond Fosdick, Elijah Stavena, Iucharbius, Erik Bjäreholt, Luis Javier Navarrete Lozano, Nicholas, theTransient, John Detwiler, alfie_i, knownsqashed, Mano Prime, Willem Michiel, Enrico Ros, LangChain4j, OG, Michael Dempsey, Pierre Kircher, Pedro Madruga, James Bentley, Thomas Belote, Luke @flexchar, Leonard Tan, Johann-Peter Hartmann, Illia Dulskyi, Fen Risland, Chadd, S_X, Jeff Scroggin, Ken Nordquist, Sean Connelly, Artur Olbinski, Swaroop Kallakuri, Jack West, Ai Maven, David Ziegler, Russ Johnson, transmissions 11, John Villwock, Alps Aficionado, Clay Pascal, Viktor Bowallius, Subspace Studios, Rainer Wilmers, Trenton Dambrowitz, vamX, Michael Levine, 준교 김, Brandon Frisco, Kalila, Trailburnt, Randy H, Talal Aujan, Nathan Dryer, Vadim, 阿明, ReadyPlayerEmma, Tiffany J. Kim, George Stoitzev, Spencer Kim, Jerry Meng, Gabriel Tamborski, Cory Kujawski, Jeffrey Morgan, Spiking Neurons AB, Edmond Seymore, Alexandros Triantafyllidis, Lone Striker, Cap'n Zoog, Nikolai Manek, danny, ya boyyy, Derek Yates, usrbinkat, Mandus, TL, Nathan LeClaire, subjectnull, Imad Khwaja, webtim, Raven Klaugh, Asp the Wyvern, Gabriel Puliatti, Caitlyn Gatomon, Joseph William Delisle, Jonathan Leane, Luke Pendergrass, SuperWojo, Sebastain Graf, Will Dee, Fred von Graf, Andrey, Dan Guido, Daniel P. Andersen, Nitin Borwankar, Elle, Vitor Caleffi, biorpg, jjj, NimbleBox.ai, Pieter, Matthew Berman, terasurfer, Michael Davis, Alex, Stanislav Ovsiannikov Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> # Original model card: Meta Llama 2's Llama 2 7B Chat # **Llama 2** Llama 2 is a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 70 billion parameters. This is the repository for the 7B fine-tuned model, optimized for dialogue use cases and converted for the Hugging Face Transformers format. Links to other models can be found in the index at the bottom. ## Model Details *Note: Use of this model is governed by the Meta license. In order to download the model weights and tokenizer, please visit the [website](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) and accept our License before requesting access here.* Meta developed and publicly released the Llama 2 family of large language models (LLMs), a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 70 billion parameters. Our fine-tuned LLMs, called Llama-2-Chat, are optimized for dialogue use cases. Llama-2-Chat models outperform open-source chat models on most benchmarks we tested, and in our human evaluations for helpfulness and safety, are on par with some popular closed-source models like ChatGPT and PaLM. **Model Developers** Meta **Variations** Llama 2 comes in a range of parameter sizes — 7B, 13B, and 70B — as well as pretrained and fine-tuned variations. **Input** Models input text only. **Output** Models generate text only. **Model Architecture** Llama 2 is an auto-regressive language model that uses an optimized transformer architecture. The tuned versions use supervised fine-tuning (SFT) and reinforcement learning with human feedback (RLHF) to align to human preferences for helpfulness and safety. ||Training Data|Params|Content Length|GQA|Tokens|LR| |---|---|---|---|---|---|---| |Llama 2|*A new mix of publicly available online data*|7B|4k|&#10007;|2.0T|3.0 x 10<sup>-4</sup>| |Llama 2|*A new mix of publicly available online data*|13B|4k|&#10007;|2.0T|3.0 x 10<sup>-4</sup>| |Llama 2|*A new mix of publicly available online data*|70B|4k|&#10004;|2.0T|1.5 x 10<sup>-4</sup>| *Llama 2 family of models.* Token counts refer to pretraining data only. All models are trained with a global batch-size of 4M tokens. Bigger models - 70B -- use Grouped-Query Attention (GQA) for improved inference scalability. **Model Dates** Llama 2 was trained between January 2023 and July 2023. **Status** This is a static model trained on an offline dataset. Future versions of the tuned models will be released as we improve model safety with community feedback. **License** A custom commercial license is available at: [https://ai.meta.com/resources/models-and-libraries/llama-downloads/](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) **Research Paper** ["Llama-2: Open Foundation and Fine-tuned Chat Models"](arxiv.org/abs/2307.09288) ## Intended Use **Intended Use Cases** Llama 2 is intended for commercial and research use in English. Tuned models are intended for assistant-like chat, whereas pretrained models can be adapted for a variety of natural language generation tasks. To get the expected features and performance for the chat versions, a specific formatting needs to be followed, including the `INST` and `<<SYS>>` tags, `BOS` and `EOS` tokens, and the whitespaces and breaklines in between (we recommend calling `strip()` on inputs to avoid double-spaces). See our reference code in github for details: [`chat_completion`](https://github.com/facebookresearch/llama/blob/main/llama/generation.py#L212). **Out-of-scope Uses** Use in any manner that violates applicable laws or regulations (including trade compliance laws).Use in languages other than English. Use in any other way that is prohibited by the Acceptable Use Policy and Licensing Agreement for Llama 2. ## Hardware and Software **Training Factors** We used custom training libraries, Meta's Research Super Cluster, and production clusters for pretraining. Fine-tuning, annotation, and evaluation were also performed on third-party cloud compute. **Carbon Footprint** Pretraining utilized a cumulative 3.3M GPU hours of computation on hardware of type A100-80GB (TDP of 350-400W). Estimated total emissions were 539 tCO2eq, 100% of which were offset by Meta’s sustainability program. ||Time (GPU hours)|Power Consumption (W)|Carbon Emitted(tCO<sub>2</sub>eq)| |---|---|---|---| |Llama 2 7B|184320|400|31.22| |Llama 2 13B|368640|400|62.44| |Llama 2 70B|1720320|400|291.42| |Total|3311616||539.00| **CO<sub>2</sub> emissions during pretraining.** Time: total GPU time required for training each model. Power Consumption: peak power capacity per GPU device for the GPUs used adjusted for power usage efficiency. 100% of the emissions are directly offset by Meta's sustainability program, and because we are openly releasing these models, the pretraining costs do not need to be incurred by others. ## Training Data **Overview** Llama 2 was pretrained on 2 trillion tokens of data from publicly available sources. The fine-tuning data includes publicly available instruction datasets, as well as over one million new human-annotated examples. Neither the pretraining nor the fine-tuning datasets include Meta user data. **Data Freshness** The pretraining data has a cutoff of September 2022, but some tuning data is more recent, up to July 2023. ## Evaluation Results In this section, we report the results for the Llama 1 and Llama 2 models on standard academic benchmarks.For all the evaluations, we use our internal evaluations library. |Model|Size|Code|Commonsense Reasoning|World Knowledge|Reading Comprehension|Math|MMLU|BBH|AGI Eval| |---|---|---|---|---|---|---|---|---|---| |Llama 1|7B|14.1|60.8|46.2|58.5|6.95|35.1|30.3|23.9| |Llama 1|13B|18.9|66.1|52.6|62.3|10.9|46.9|37.0|33.9| |Llama 1|33B|26.0|70.0|58.4|67.6|21.4|57.8|39.8|41.7| |Llama 1|65B|30.7|70.7|60.5|68.6|30.8|63.4|43.5|47.6| |Llama 2|7B|16.8|63.9|48.9|61.3|14.6|45.3|32.6|29.3| |Llama 2|13B|24.5|66.9|55.4|65.8|28.7|54.8|39.4|39.1| |Llama 2|70B|**37.5**|**71.9**|**63.6**|**69.4**|**35.2**|**68.9**|**51.2**|**54.2**| **Overall performance on grouped academic benchmarks.** *Code:* We report the average pass@1 scores of our models on HumanEval and MBPP. *Commonsense Reasoning:* We report the average of PIQA, SIQA, HellaSwag, WinoGrande, ARC easy and challenge, OpenBookQA, and CommonsenseQA. We report 7-shot results for CommonSenseQA and 0-shot results for all other benchmarks. *World Knowledge:* We evaluate the 5-shot performance on NaturalQuestions and TriviaQA and report the average. *Reading Comprehension:* For reading comprehension, we report the 0-shot average on SQuAD, QuAC, and BoolQ. *MATH:* We report the average of the GSM8K (8 shot) and MATH (4 shot) benchmarks at top 1. |||TruthfulQA|Toxigen| |---|---|---|---| |Llama 1|7B|27.42|23.00| |Llama 1|13B|41.74|23.08| |Llama 1|33B|44.19|22.57| |Llama 1|65B|48.71|21.77| |Llama 2|7B|33.29|**21.25**| |Llama 2|13B|41.86|26.10| |Llama 2|70B|**50.18**|24.60| **Evaluation of pretrained LLMs on automatic safety benchmarks.** For TruthfulQA, we present the percentage of generations that are both truthful and informative (the higher the better). For ToxiGen, we present the percentage of toxic generations (the smaller the better). |||TruthfulQA|Toxigen| |---|---|---|---| |Llama-2-Chat|7B|57.04|**0.00**| |Llama-2-Chat|13B|62.18|**0.00**| |Llama-2-Chat|70B|**64.14**|0.01| **Evaluation of fine-tuned LLMs on different safety datasets.** Same metric definitions as above. ## Ethical Considerations and Limitations Llama 2 is a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Llama 2’s potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 2, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available at [https://ai.meta.com/llama/responsible-use-guide/](https://ai.meta.com/llama/responsible-use-guide) ## Reporting Issues Please report any software “bug,” or other problems with the models through one of the following means: - Reporting issues with the model: [github.com/facebookresearch/llama](http://github.com/facebookresearch/llama) - Reporting problematic content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback) - Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info) ## Llama Model Index |Model|Llama2|Llama2-hf|Llama2-chat|Llama2-chat-hf| |---|---|---|---|---| |7B| [Link](https://huggingface.co/llamaste/Llama-2-7b) | [Link](https://huggingface.co/llamaste/Llama-2-7b-hf) | [Link](https://huggingface.co/llamaste/Llama-2-7b-chat) | [Link](https://huggingface.co/llamaste/Llama-2-7b-chat-hf)| |13B| [Link](https://huggingface.co/llamaste/Llama-2-13b) | [Link](https://huggingface.co/llamaste/Llama-2-13b-hf) | [Link](https://huggingface.co/llamaste/Llama-2-13b-chat) | [Link](https://huggingface.co/llamaste/Llama-2-13b-hf)| |70B| [Link](https://huggingface.co/llamaste/Llama-2-70b) | [Link](https://huggingface.co/llamaste/Llama-2-70b-hf) | [Link](https://huggingface.co/llamaste/Llama-2-70b-chat) | [Link](https://huggingface.co/llamaste/Llama-2-70b-hf)|
21,265
[ [ -0.037384033203125, -0.0634765625, 0.025421142578125, 0.007534027099609375, -0.0228424072265625, -0.0036487579345703125, 0.0092010498046875, -0.038848876953125, 0.0014667510986328125, 0.0215301513671875, -0.052703857421875, -0.036651611328125, -0.025054931640625, -0.004673004150390625, -0.0278167724609375, 0.0716552734375, 0.0164642333984375, -0.0264739990234375, -0.0225982666015625, -0.011749267578125, -0.0222625732421875, -0.045135498046875, -0.049652099609375, -0.0176544189453125, 0.01397705078125, 0.015899658203125, 0.05859375, 0.052642822265625, 0.019561767578125, 0.0335693359375, -0.007381439208984375, 0.01326751708984375, -0.030426025390625, 0.0006380081176757812, 0.0201568603515625, -0.023956298828125, -0.044158935546875, 0.00286865234375, 0.032623291015625, 0.013336181640625, -0.0235443115234375, 0.01947021484375, 0.00455474853515625, 0.0286712646484375, -0.0369873046875, 0.01346588134765625, -0.0374755859375, -0.00228118896484375, -0.00766754150390625, 0.0079498291015625, -0.00974273681640625, -0.008209228515625, -0.0017375946044921875, -0.0623779296875, -0.0010585784912109375, 0.01476287841796875, 0.09197998046875, 0.0240631103515625, -0.04376220703125, 0.00597381591796875, -0.037628173828125, 0.08026123046875, -0.0850830078125, 0.0249786376953125, 0.02679443359375, 0.0183563232421875, -0.01532745361328125, -0.07354736328125, -0.05377197265625, -0.0130462646484375, -0.00859832763671875, 0.01508331298828125, -0.0447998046875, -0.002109527587890625, 0.01361083984375, 0.038909912109375, -0.04461669921875, 0.0034313201904296875, -0.0249481201171875, -0.0174407958984375, 0.057403564453125, 0.0310821533203125, 0.0216064453125, -0.01922607421875, -0.032073974609375, -0.0236968994140625, -0.03900146484375, 0.01146697998046875, 0.01435089111328125, 0.0025177001953125, -0.043426513671875, 0.041229248046875, -0.0237884521484375, 0.037445068359375, 0.0176849365234375, -0.0090179443359375, 0.020751953125, -0.03875732421875, -0.042816162109375, -0.037353515625, 0.0955810546875, 0.032989501953125, -0.02532958984375, 0.01490020751953125, -0.0014066696166992188, -0.0084991455078125, 0.002582550048828125, -0.06341552734375, -0.0218658447265625, 0.047698974609375, -0.045684814453125, -0.0362548828125, -0.0193939208984375, -0.0489501953125, -0.0131072998046875, 0.006744384765625, 0.038970947265625, -0.0260162353515625, -0.0268402099609375, -0.005573272705078125, -0.0233306884765625, 0.0440673828125, 0.0213165283203125, -0.058380126953125, 0.0267181396484375, 0.032867431640625, 0.05059814453125, 0.00896453857421875, -0.017059326171875, -0.0239410400390625, 0.00078582763671875, -0.00760650634765625, 0.043487548828125, -0.01085662841796875, -0.034210205078125, -0.0239715576171875, 0.01055908203125, 0.015869140625, -0.0246124267578125, 0.029296875, -0.018096923828125, 0.03228759765625, -0.026580810546875, -0.03448486328125, -0.0208740234375, 0.0098419189453125, -0.036468505859375, 0.09344482421875, 0.017059326171875, -0.06256103515625, 0.007366180419921875, -0.037261962890625, -0.01374053955078125, 0.0030975341796875, -0.0008387565612792969, -0.04534912109375, -0.017669677734375, 0.028472900390625, 0.027801513671875, -0.035247802734375, -0.00010889768600463867, -0.03271484375, -0.014801025390625, 0.0212860107421875, -0.034454345703125, 0.09368896484375, 0.0229034423828125, -0.040069580078125, 0.00605010986328125, -0.052154541015625, 0.01166534423828125, 0.0313720703125, -0.02117919921875, 0.004367828369140625, -0.0013427734375, 0.0013828277587890625, 0.00439453125, 0.032867431640625, -0.030120849609375, 0.01593017578125, -0.0222320556640625, 0.053619384765625, 0.057220458984375, 0.00035858154296875, 0.036651611328125, -0.046051025390625, 0.02996826171875, 0.01004791259765625, 0.039642333984375, 0.005413055419921875, -0.053741455078125, -0.07183837890625, -0.0232086181640625, 0.01580810546875, 0.050872802734375, -0.0469970703125, 0.0498046875, 0.0132598876953125, -0.0587158203125, -0.041656494140625, -0.0078887939453125, 0.0189056396484375, 0.033416748046875, 0.032806396484375, -0.019500732421875, -0.049774169921875, -0.0606689453125, 0.00528717041015625, -0.0369873046875, -0.01080322265625, 0.041259765625, 0.047393798828125, -0.031219482421875, 0.054107666015625, -0.037841796875, -0.01395416259765625, -0.007595062255859375, 0.003765106201171875, 0.02301025390625, 0.0496826171875, 0.053863525390625, -0.03912353515625, -0.0305938720703125, -0.01023101806640625, -0.058502197265625, -0.006439208984375, -0.0034542083740234375, -0.03546142578125, 0.0248260498046875, 0.0133056640625, -0.06597900390625, 0.038116455078125, 0.046142578125, -0.032684326171875, 0.0477294921875, -0.013641357421875, 0.00919342041015625, -0.08209228515625, 0.0028839111328125, -0.0034084320068359375, -0.0226898193359375, -0.03668212890625, 0.00762176513671875, -0.0141143798828125, 0.0096435546875, -0.03369140625, 0.05377197265625, -0.0341796875, 0.0024280548095703125, -0.00576019287109375, -0.00662994384765625, 0.0241851806640625, 0.035797119140625, -0.01165008544921875, 0.05352783203125, 0.047515869140625, -0.050689697265625, 0.040313720703125, 0.032623291015625, -0.0025653839111328125, 0.02398681640625, -0.069580078125, 0.0177459716796875, 0.01103973388671875, 0.0288543701171875, -0.08880615234375, -0.0097503662109375, 0.03765869140625, -0.04888916015625, 0.0099945068359375, -0.0171356201171875, -0.02728271484375, -0.032623291015625, -0.032562255859375, 0.0191497802734375, 0.07568359375, -0.035125732421875, 0.0450439453125, 0.037750244140625, 0.0140533447265625, -0.059051513671875, -0.06146240234375, -0.012664794921875, -0.0290069580078125, -0.047149658203125, 0.0261077880859375, -0.017303466796875, -0.0222625732421875, 0.00106048583984375, 0.0022907257080078125, -0.0115814208984375, 0.01399993896484375, 0.0236358642578125, 0.0250396728515625, -0.0101165771484375, -0.017242431640625, 0.005779266357421875, -0.00026869773864746094, 0.00811767578125, -0.0205841064453125, 0.042205810546875, -0.0282745361328125, -0.00609588623046875, -0.051025390625, 0.0218048095703125, 0.040374755859375, -0.01361083984375, 0.07330322265625, 0.058929443359375, -0.018524169921875, -0.0013246536254882812, -0.040679931640625, -0.0243072509765625, -0.04144287109375, 0.01456451416015625, -0.0131072998046875, -0.0501708984375, 0.04852294921875, 0.02752685546875, 0.0271148681640625, 0.056671142578125, 0.042694091796875, -0.029205322265625, 0.0814208984375, 0.043212890625, -0.002079010009765625, 0.032928466796875, -0.047210693359375, -0.004360198974609375, -0.06561279296875, -0.01494598388671875, -0.031097412109375, -0.01385498046875, -0.048187255859375, -0.03973388671875, 0.0279083251953125, 0.00768280029296875, -0.04638671875, 0.028533935546875, -0.046722412109375, 0.0004425048828125, 0.05615234375, 0.0101318359375, 0.01352691650390625, -0.00695037841796875, -0.012115478515625, 0.002765655517578125, -0.05206298828125, -0.0287933349609375, 0.0814208984375, 0.0257568359375, 0.044677734375, 0.00937652587890625, 0.049774169921875, 0.0130462646484375, 0.01241302490234375, -0.041748046875, 0.044464111328125, 0.0061187744140625, -0.047576904296875, -0.0309295654296875, -0.03515625, -0.0684814453125, 0.023162841796875, -0.0169525146484375, -0.053436279296875, 0.0213775634765625, 0.00909423828125, -0.035369873046875, 0.0230865478515625, -0.03411865234375, 0.058258056640625, -0.0084228515625, -0.02490234375, -0.0014448165893554688, -0.045806884765625, 0.0303802490234375, 0.0184326171875, 0.0142669677734375, -0.019927978515625, -0.01446533203125, 0.05859375, -0.069091796875, 0.066162109375, -0.0109100341796875, -0.0030841827392578125, 0.043121337890625, -0.00505828857421875, 0.041748046875, 0.0102081298828125, -0.008514404296875, 0.0299530029296875, 0.01039886474609375, -0.03228759765625, -0.0241241455078125, 0.042755126953125, -0.084228515625, -0.054290771484375, -0.032470703125, -0.037139892578125, 0.0118865966796875, 0.00702667236328125, 0.0325927734375, 0.017364501953125, -0.00740814208984375, 0.014434814453125, 0.030487060546875, -0.0292510986328125, 0.041748046875, 0.029296875, -0.0137939453125, -0.041168212890625, 0.04742431640625, -0.00292205810546875, 0.0208282470703125, 0.0155487060546875, 0.0115203857421875, -0.03546142578125, -0.030029296875, -0.04547119140625, 0.0219879150390625, -0.040283203125, -0.036773681640625, -0.05682373046875, -0.032562255859375, -0.038177490234375, 0.0009531974792480469, -0.03173828125, -0.03997802734375, -0.051971435546875, 0.00232696533203125, 0.0677490234375, 0.0297393798828125, -0.0279083251953125, 0.033477783203125, -0.05316162109375, 0.0187530517578125, 0.037200927734375, -0.0029430389404296875, 0.00658416748046875, -0.058013916015625, -0.00467681884765625, 0.0250396728515625, -0.038909912109375, -0.058013916015625, 0.054229736328125, 0.014892578125, 0.04840087890625, 0.021575927734375, 0.0216064453125, 0.06097412109375, -0.0167236328125, 0.07489013671875, 0.007366180419921875, -0.08160400390625, 0.035247802734375, -0.0290069580078125, 0.021697998046875, 0.0194244384765625, 0.0247955322265625, -0.03240966796875, -0.03961181640625, -0.057830810546875, -0.0689697265625, 0.044097900390625, 0.03466796875, 0.0089874267578125, 0.004390716552734375, 0.0234527587890625, -0.01052093505859375, 0.01309967041015625, -0.0654296875, -0.0504150390625, -0.0275726318359375, -0.00708770751953125, 0.019683837890625, -0.0092620849609375, -0.016815185546875, -0.039703369140625, 0.06243896484375, -0.0053863525390625, 0.055694580078125, 0.0202789306640625, 0.00839996337890625, -0.01282501220703125, 0.007549285888671875, 0.0189971923828125, 0.037628173828125, -0.01141357421875, -0.01119232177734375, 0.029693603515625, -0.029541015625, 0.0098419189453125, 0.0127105712890625, -0.011016845703125, -0.014892578125, 0.01016998291015625, 0.0675048828125, -0.005825042724609375, -0.030242919921875, 0.032989501953125, -0.020599365234375, -0.03143310546875, -0.02874755859375, 0.017120361328125, 0.0237274169921875, 0.04339599609375, 0.040496826171875, -0.016357421875, 0.0169677734375, -0.04107666015625, 0.01136016845703125, 0.05218505859375, -0.005748748779296875, -0.006656646728515625, 0.08392333984375, 0.00690460205078125, -0.0100555419921875, 0.061767578125, -0.01313018798828125, -0.035675048828125, 0.07513427734375, 0.042083740234375, 0.0496826171875, -0.0017833709716796875, 0.018951416015625, 0.03936767578125, 0.01806640625, 0.00824737548828125, 0.0297393798828125, -0.001049041748046875, -0.047943115234375, -0.0186614990234375, -0.04498291015625, -0.0307159423828125, 0.02093505859375, -0.0472412109375, 0.0189208984375, -0.04034423828125, -0.02764892578125, -0.01222991943359375, 0.021636962890625, -0.048919677734375, 0.0191650390625, 0.01476287841796875, 0.050140380859375, -0.048553466796875, 0.05633544921875, 0.04132080078125, -0.0303192138671875, -0.06744384765625, -0.019744873046875, 0.01297760009765625, -0.0635986328125, 0.0121002197265625, -0.00015091896057128906, 0.01317596435546875, 0.016357421875, -0.06719970703125, -0.08013916015625, 0.1123046875, 0.0079193115234375, -0.039825439453125, 0.0009164810180664062, 0.0029754638671875, 0.0298919677734375, -0.017974853515625, 0.0518798828125, 0.03460693359375, 0.0321044921875, 0.012786865234375, -0.0706787109375, 0.0287628173828125, -0.0217437744140625, -0.00298309326171875, 0.00022482872009277344, -0.08349609375, 0.08831787109375, -0.0191192626953125, -0.0163726806640625, 0.0298614501953125, 0.07171630859375, 0.046234130859375, 0.0031795501708984375, 0.0362548828125, 0.046661376953125, 0.06390380859375, -0.012237548828125, 0.0732421875, -0.0218353271484375, 0.04498291015625, 0.05474853515625, -0.0032711029052734375, 0.060394287109375, 0.0219573974609375, -0.038909912109375, 0.047210693359375, 0.05908203125, -0.022247314453125, 0.029327392578125, 0.0019664764404296875, -0.0183868408203125, -0.0119171142578125, 0.003662109375, -0.049560546875, 0.0256500244140625, 0.029205322265625, -0.01300811767578125, 0.0009398460388183594, -0.0150146484375, 0.006122589111328125, -0.040802001953125, -0.00597381591796875, 0.05059814453125, 0.02264404296875, -0.020965576171875, 0.08184814453125, 0.003902435302734375, 0.06280517578125, -0.036651611328125, -0.00800323486328125, -0.0285186767578125, 0.0032176971435546875, -0.01436614990234375, -0.050872802734375, 0.011383056640625, -0.00833892822265625, 0.001705169677734375, 0.0072479248046875, 0.04510498046875, -0.01702880859375, -0.03228759765625, 0.0222320556640625, 0.03631591796875, 0.020660400390625, 0.004608154296875, -0.0750732421875, 0.0205230712890625, 0.0045318603515625, -0.0400390625, 0.0222320556640625, 0.0247344970703125, 0.019317626953125, 0.05670166015625, 0.05706787109375, -0.0225677490234375, 0.005344390869140625, -0.029296875, 0.0706787109375, -0.05126953125, -0.0222320556640625, -0.06787109375, 0.061431884765625, -0.004787445068359375, -0.0286712646484375, 0.06292724609375, 0.0289459228515625, 0.045013427734375, 0.004833221435546875, 0.061126708984375, -0.029693603515625, 0.01378631591796875, -0.0185546875, 0.06829833984375, -0.061126708984375, 0.0178680419921875, -0.00946044921875, -0.04931640625, 0.004077911376953125, 0.056793212890625, 0.004222869873046875, 0.01198577880859375, 0.038421630859375, 0.059295654296875, 0.007213592529296875, -0.00972747802734375, 0.019866943359375, 0.040496826171875, 0.022003173828125, 0.054931640625, 0.05926513671875, -0.0684814453125, 0.053863525390625, -0.045867919921875, -0.01436614990234375, -0.012359619140625, -0.06494140625, -0.06610107421875, -0.04083251953125, -0.0279541015625, -0.04742431640625, -0.00489044189453125, 0.06158447265625, 0.06427001953125, -0.0494384765625, -0.0269775390625, -0.005733489990234375, 0.00469207763671875, -0.0194549560546875, -0.0219573974609375, 0.017669677734375, 0.005859375, -0.059051513671875, 0.021728515625, -0.005901336669921875, 0.0303802490234375, -0.0233306884765625, -0.0108489990234375, -0.0194549560546875, 0.0175628662109375, 0.032562255859375, 0.037200927734375, -0.05352783203125, -0.0105743408203125, -0.0011425018310546875, -0.01503753662109375, 0.0193939208984375, 0.0036773681640625, -0.06549072265625, -0.004940032958984375, 0.035400390625, 0.015411376953125, 0.05010986328125, -0.0011568069458007812, 0.0450439453125, -0.03350830078125, 0.0218658447265625, 0.00817108154296875, 0.0247344970703125, 0.01203155517578125, -0.045654296875, 0.032501220703125, 0.01393890380859375, -0.056549072265625, -0.06585693359375, -0.00713348388671875, -0.07861328125, -0.017669677734375, 0.08489990234375, -0.01129913330078125, -0.035125732421875, 0.00016009807586669922, -0.0194549560546875, 0.033294677734375, -0.037628173828125, 0.04022216796875, 0.0272216796875, -0.011383056640625, -0.0239715576171875, -0.0401611328125, 0.041900634765625, 0.026519775390625, -0.07159423828125, -0.005023956298828125, 0.0301361083984375, 0.032012939453125, -0.009521484375, 0.059600830078125, 0.00023281574249267578, 0.0221710205078125, 0.01041412353515625, 0.01088714599609375, 0.0015821456909179688, 0.004375457763671875, -0.01125335693359375, -0.01255035400390625, -0.0104217529296875, -0.0197601318359375 ] ]
inu-ai/niji-diffusion-xl-base-1.0
2023-08-14T12:49:36.000Z
[ "diffusers", "text-to-image", "stable-diffusion", "safetensors", "dataset:p1atdev/niji-v5", "license:openrail++", "endpoints_compatible", "has_space", "diffusers:StableDiffusionXLPipeline", "region:us" ]
text-to-image
inu-ai
null
null
inu-ai/niji-diffusion-xl-base-1.0
10
8,654
diffusers
2023-08-01T11:45:39
--- license: openrail++ thumbnail: >- https://huggingface.co/inu-ai/niji-diffusion-xl-base-1.0/images/thumbnail.png tags: - text-to-image - stable-diffusion - safetensors - diffusers inference: true widget: - text: >- best quality, high quality, absurdres, 1girl datasets: - p1atdev/niji-v5 library_name: diffusers --- # 更新履歴 - 2023年8月14日 さらにnijijourneyの画像1000枚ほどを目で見てアニメやイラスト風で良いもの選んで学習させました。 以下に何をやったか記録しますが、何が効いているのかわかりません。 以下にハイパーパラメータを記載しますが、その後[v11](https://civitai.com/models/120452?modelVersionId=135754)と[sd-webui-supermerger](https://github.com/hako-mikan/sd-webui-supermerger)階層マージ(block_lrに近い絵が良さそうな比率)をしているため一度で仕上がりませんでした。 最後に[blur](https://civitai.com/models/120624)を-0.05ほど、[LECO](https://github.com/p1atdev/LECO)で作った[anime](https://civitai.com/models/128125/anime-leco)を1でLoRAマージして仕上げました。 | ハイパーパラメータ | 値 | |-----------------------|-------------------------------------| | GPU | RTX3090 24GB | | optimizer_type | PagedLion8bit | | optimizer_args | weight_decay=0.01, betas=.9,.999 | | block_lr | 0,1e-08,1e-08,1e-08,1e-08,1e-07,1e-07,1e-07,1e-06,1e-06,1e-05,1e-05,1e-05,1e-06,1e-06,1e-07,1e-07,1e-07,1e-08,1e-08,1e-08,1e-08,0 | | lr_scheduler | cosine | | lr_warmup_steps | 100 | | gradient_checkpointing| | | mixed_precision | bf16 | | full_bf16 | | | max_token_length | 225 | | min_snr_gamma | 5 | | noise_offset | 0.0357 | | max_train_epochs | 3 | | batch_size | 12 | | enable_bucket | true | | resolution | [1024,1024] | - 2023年8月11日 前回のnijijourneyの画像を混ぜて12000枚で学習しました。 オプティマイザはLion(4e-06, cosine, weight_decay=0.015, betas=.9,.999)でした。 - 2023年8月7日 nijijourneyの画像4500枚ぐらいでフルファインチューニングしました。fp16で壊れないVAEに取り替えました。 学習率1e-07が低すぎたようで絵があまり変化しませんでした。次回は学習率を上げようと思います。 - 2023年8月1日 nijijourneyの画像100枚ぐらいでLoRAファインチューニングなどをしました。 # モデルの説明 アニメ調に寄せた「[SDXL(stable-diffusion-xl-base-1.0)](https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0)」モデルです。内容は「[niji-v5](https://huggingface.co/datasets/p1atdev/niji-v5)」データセットでLoRAファインチューニングしてマージしたモデルです。 # 使用例 [niji-diffusion-xl-base-1.0.safetensors](https://huggingface.co/inu-ai/niji-diffusion-xl-base-1.0/resolve/main/niji-diffusion-xl-base-1.0.safetensors)と[stable-diffusion-webui](https://github.com/AUTOMATIC1111/stable-diffusion-webui)で以下のパラメータで画像生成します。 ※合計13000枚ぐらい~100枚~しか学習していないのでPromptに複数書くとnijiの絵じゃなくなります。Negative promptに複数書くのは大丈夫っぽいです。 Prompt: ``` masterpiece, best quality, high quality, absurdres, 1girl, flower ``` Negative prompt: ``` worst quality, low quality, medium quality, deleted, lowres, comic, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, jpeg artifacts, signature, watermark, username, blurry ``` PNG info: ``` Steps: 28, Sampler: Euler a, CFG scale: 7, Seed: 1, Size: 1536x1024, Model hash: 791d0c791e, Model: sd_xl_niji_1.0, Clip skip: 2, ENSD: 31337, Token merging ratio: 0.5, Eta: 0.67, Version: v1.5.1 ``` ![thumbnail](images/thumbnail.jpg) Prompt: ``` 1girl ``` ![thumbnail](images/1girl.jpg) Prompt: ``` 1girl, tokyo ``` ![thumbnail](images/1girl,%20tokyo.jpg) Prompt: ``` 1girl, steampunk ``` ![thumbnail](images/1girl,%20steampunk.jpg) Prompt: ``` 1girl, fantasy ``` ![thumbnail](images/1girl,%20fantasy.jpg) # モデルの作り方 1. 「[かんたん☆コピー機学習法(きっと初級編)](https://note.com/2vxpswa7/n/n2d04527bf0bc)」を参考に、「[ぼかし(blur)](https://civitai.com/models/120624)」のLoRA DreamBoothをして、そのLoRAモデルをSDXLモデルにマイナスでマージします。 2. 1で作ったモデルに、niji-v5から背景や髪が詳細に描かれている絵などを100枚選びLoRAファインチューニングして、そのLoRAモデルをSDXLモデルにマージします。 # 今後のモデルの改善 LoRAモデルとして配布できるようにしたいです。 というのは、512dim(rank)で学習したらLoRAモデルが3GBのファイルサイズになってしまったため、今回はSDXLモデルにマージしています。 # 感想 うまく調整できず何度もやり直して大変でした。あとで作り方を動画にしたいです。 # 謝辞 モデル、学習データや学習ツールを作成および配布してくださった方々に心から感謝申し上げます。 # ライブラリ - [sd-scripts](https://github.com/kohya-ss/sd-scripts/tree/sdxl) 4072f723c12822e2fa1b2e076cc1f90b8f4e30c9 - [bitsandbytes](https://github.com/jllllll/bitsandbytes-windows-webui) 0.39.1 - Pytorch 2.0.0+cu117 - xformers 0.0.19 # ライセンス - [CreativeML Open RAIL++-M License](https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/blob/main/LICENSE.md)
4,651
[ [ -0.052490234375, -0.053680419921875, 0.02191162109375, 0.01552581787109375, -0.0301513671875, 0.004669189453125, 0.002620697021484375, -0.040283203125, 0.05328369140625, -0.0005335807800292969, -0.054412841796875, -0.042083740234375, -0.0296783447265625, 0.006107330322265625, -0.00827789306640625, 0.05145263671875, -0.0125274658203125, 0.0015077590942382812, 0.0007863044738769531, 0.0198211669921875, -0.03826904296875, -0.00685882568359375, -0.05242919921875, -0.018707275390625, 0.00899505615234375, 0.0193023681640625, 0.046295166015625, 0.03802490234375, 0.03912353515625, 0.0198974609375, -0.0073699951171875, -0.0099639892578125, -0.0209197998046875, -0.0201416015625, 0.01345062255859375, -0.03363037109375, -0.039459228515625, -0.016326904296875, 0.05242919921875, 0.0291290283203125, 0.0194854736328125, 0.00615692138671875, 0.01654052734375, 0.056610107421875, -0.03265380859375, 0.0207061767578125, 0.011383056640625, 0.0200653076171875, -0.010101318359375, -0.018829345703125, 0.003612518310546875, -0.04473876953125, -0.01073455810546875, -0.0728759765625, 0.0164031982421875, -0.00860595703125, 0.1048583984375, -0.0010929107666015625, -0.00047469139099121094, -0.00022363662719726562, -0.03131103515625, 0.052825927734375, -0.060546875, 0.0082244873046875, 0.01934814453125, 0.0267791748046875, -0.0118865966796875, -0.056365966796875, -0.05462646484375, 0.00472259521484375, -0.00836181640625, 0.032318115234375, -0.01412200927734375, -0.0283050537109375, 0.0075531005859375, 0.0201263427734375, -0.05242919921875, 0.003940582275390625, -0.0279541015625, -0.00955963134765625, 0.040924072265625, 0.0203094482421875, 0.04840087890625, -0.0221405029296875, -0.04144287109375, -0.017333984375, -0.0316162109375, 0.0107879638671875, 0.02557373046875, 0.0162353515625, -0.049041748046875, 0.017120361328125, -0.00334930419921875, 0.0235137939453125, 0.00257110595703125, -0.00980377197265625, 0.0343017578125, -0.0340576171875, -0.0270843505859375, -0.01016998291015625, 0.08587646484375, 0.05059814453125, -0.01233673095703125, 0.006969451904296875, 0.009307861328125, 0.007038116455078125, -0.02294921875, -0.07073974609375, -0.0015878677368164062, 0.039825439453125, -0.037567138671875, -0.03289794921875, 0.0099639892578125, -0.09173583984375, 0.0027294158935546875, 0.0025177001953125, 0.02978515625, -0.050872802734375, -0.040069580078125, 0.01558685302734375, -0.0022296905517578125, 0.015869140625, 0.0367431640625, -0.04547119140625, 0.00775146484375, 0.0306549072265625, 0.07275390625, -0.0131378173828125, -0.019134521484375, 0.0176544189453125, 0.007030487060546875, -0.02587890625, 0.04193115234375, -0.0146026611328125, -0.043212890625, -0.026214599609375, 0.01320648193359375, -0.0213165283203125, -0.0204620361328125, 0.0400390625, -0.02215576171875, 0.0190887451171875, -0.00545501708984375, -0.028411865234375, -0.0179901123046875, 0.014373779296875, -0.035308837890625, 0.050262451171875, 0.00983428955078125, -0.06982421875, -0.004669189453125, -0.056854248046875, -0.0004730224609375, 0.00855255126953125, -0.0013170242309570312, -0.0207977294921875, -0.033233642578125, 0.00479888916015625, 0.027618408203125, -0.0251312255859375, 0.0213165283203125, -0.0087127685546875, -0.0298614501953125, 0.0261993408203125, -0.024322509765625, 0.09393310546875, 0.032562255859375, -0.041748046875, -0.0033626556396484375, -0.061248779296875, 0.01004791259765625, 0.04522705078125, -0.01457977294921875, -0.00710296630859375, -0.033203125, 0.0164642333984375, 0.015869140625, 0.0474853515625, -0.03912353515625, 0.0295257568359375, -0.0231170654296875, 0.043243408203125, 0.06939697265625, 0.0094757080078125, 0.01364898681640625, -0.02642822265625, 0.0484619140625, 0.01080322265625, 0.03216552734375, -0.0169525146484375, -0.03912353515625, -0.061859130859375, -0.00598907470703125, 0.017730712890625, 0.03900146484375, -0.061614990234375, 0.03515625, -0.01479339599609375, -0.059356689453125, -0.0263671875, -0.016754150390625, 0.0374755859375, 0.0233917236328125, 0.03228759765625, -0.0263671875, -0.041717529296875, -0.04205322265625, 0.0033740997314453125, -0.00965118408203125, 0.0158843994140625, 0.010467529296875, 0.047454833984375, -0.03680419921875, 0.042144775390625, -0.058013916015625, -0.03167724609375, -0.0271759033203125, -0.00528717041015625, 0.049560546875, 0.058197021484375, 0.044586181640625, -0.06915283203125, -0.05645751953125, -0.00015342235565185547, -0.06939697265625, 0.0123138427734375, 0.0018291473388671875, -0.03350830078125, 0.0172882080078125, 0.0193328857421875, -0.042816162109375, 0.0369873046875, 0.03466796875, -0.050628662109375, 0.060791015625, -0.0265655517578125, 0.028045654296875, -0.08404541015625, 0.0187225341796875, 0.0104827880859375, 0.001556396484375, -0.050140380859375, 0.0273284912109375, -0.002872467041015625, 0.00756072998046875, -0.038787841796875, 0.05841064453125, -0.041229248046875, 0.031982421875, -0.01131439208984375, 0.021148681640625, 0.0008039474487304688, 0.039459228515625, -0.0157318115234375, 0.046722412109375, 0.05035400390625, -0.04986572265625, 0.0156707763671875, 0.0148773193359375, -0.0186767578125, 0.028778076171875, -0.040985107421875, 0.0006999969482421875, -0.000579833984375, 0.0101776123046875, -0.07220458984375, -0.0198211669921875, 0.045562744140625, -0.056610107421875, 0.034881591796875, -0.00037384033203125, -0.03680419921875, -0.044097900390625, -0.03961181640625, 0.006534576416015625, 0.03021240234375, -0.0286712646484375, 0.0309600830078125, 0.0086669921875, 0.004894256591796875, -0.038482666015625, -0.0567626953125, -0.01102447509765625, -0.0108489990234375, -0.047515869140625, 0.033203125, -0.024444580078125, -0.009765625, -0.0002720355987548828, 0.00518798828125, -0.00875091552734375, -0.0018978118896484375, 0.01529693603515625, 0.046234130859375, -0.01447296142578125, -0.03424072265625, 0.0053558349609375, -0.0206146240234375, 0.00665283203125, -0.0164642333984375, 0.0631103515625, -0.00917816162109375, -0.0230255126953125, -0.07110595703125, 0.00925445556640625, 0.053314208984375, 0.0111846923828125, 0.042999267578125, 0.056365966796875, -0.0238800048828125, 0.033660888671875, -0.0262908935546875, -0.006755828857421875, -0.038177490234375, 0.0189056396484375, -0.0166015625, -0.037506103515625, 0.04095458984375, 0.0207366943359375, 0.00913238525390625, 0.0562744140625, 0.0223541259765625, -0.0084991455078125, 0.07598876953125, 0.03704833984375, -0.005084991455078125, 0.037689208984375, -0.0814208984375, 0.00579071044921875, -0.070556640625, -0.037841796875, -0.0173492431640625, -0.037261962890625, -0.04388427734375, -0.043212890625, 0.023712158203125, 0.01348876953125, -0.0246124267578125, 0.024261474609375, -0.052276611328125, 0.01593017578125, 0.028350830078125, 0.01175689697265625, 0.013519287109375, -0.01116180419921875, -0.0166473388671875, -0.01250457763671875, -0.032318115234375, -0.02978515625, 0.07598876953125, 0.0241241455078125, 0.0303802490234375, 0.0159912109375, 0.048553466796875, -0.0009407997131347656, -0.006778717041015625, -0.018218994140625, 0.058746337890625, -0.0009112358093261719, -0.05450439453125, -0.01467132568359375, -0.017333984375, -0.0843505859375, 0.03570556640625, -0.016754150390625, -0.074951171875, 0.037811279296875, -0.00435638427734375, -0.0231170654296875, 0.04473876953125, -0.0498046875, 0.055084228515625, -0.036376953125, -0.0631103515625, 0.0005764961242675781, -0.056884765625, 0.023773193359375, 0.007587432861328125, 0.037139892578125, -0.019287109375, -0.002227783203125, 0.06927490234375, -0.043060302734375, 0.04388427734375, -0.0306549072265625, 0.006389617919921875, 0.02362060546875, 0.013397216796875, 0.02801513671875, 0.00972747802734375, 0.0014190673828125, 0.0173492431640625, 0.0159759521484375, -0.037933349609375, -0.027984619140625, 0.07440185546875, -0.06292724609375, -0.041046142578125, -0.04541015625, -0.0263214111328125, 0.0175323486328125, 0.042144775390625, 0.048248291015625, 0.023529052734375, 0.00788116455078125, -0.0026226043701171875, 0.04193115234375, -0.033355712890625, 0.049774169921875, 0.01788330078125, -0.043609619140625, -0.04144287109375, 0.0948486328125, 0.0246734619140625, 0.0199127197265625, 0.03265380859375, 0.023223876953125, -0.01070404052734375, -0.0355224609375, -0.042510986328125, 0.040008544921875, -0.03521728515625, -0.0145111083984375, -0.0606689453125, -0.01153564453125, -0.044525146484375, -0.0187835693359375, -0.01551055908203125, -0.03533935546875, -0.047637939453125, -0.00435638427734375, 0.0279998779296875, 0.03143310546875, -0.012359619140625, -0.006076812744140625, -0.045166015625, 0.024017333984375, -0.0209197998046875, 0.021392822265625, 0.02911376953125, -0.03240966796875, -0.028472900390625, 0.00968170166015625, -0.038116455078125, -0.06866455078125, 0.06854248046875, -0.010101318359375, 0.041961669921875, 0.04510498046875, -0.0165557861328125, 0.054290771484375, 0.004161834716796875, 0.07440185546875, 0.038177490234375, -0.04656982421875, 0.0540771484375, -0.0472412109375, 0.03277587890625, 0.03619384765625, 0.04925537109375, -0.0328369140625, -0.02423095703125, -0.06756591796875, -0.0843505859375, 0.055755615234375, 0.01395416259765625, -0.00009149312973022461, 0.01274871826171875, 0.0163421630859375, -0.0013341903686523438, 0.0149688720703125, -0.055084228515625, -0.0758056640625, -0.0214996337890625, -0.0019016265869140625, -0.0010890960693359375, -0.004619598388671875, 0.00821685791015625, -0.042388916015625, 0.06982421875, -0.00252532958984375, 0.046661376953125, 0.0218658447265625, 0.0130615234375, -0.006351470947265625, 0.01354217529296875, 0.036224365234375, 0.033538818359375, -0.024383544921875, -0.0224761962890625, 0.00426483154296875, -0.06658935546875, 0.0227203369140625, 0.005218505859375, -0.034576416015625, 0.00978851318359375, 0.01244354248046875, 0.049713134765625, 0.0187835693359375, -0.033172607421875, 0.040557861328125, -0.005550384521484375, -0.01496124267578125, -0.0303497314453125, 0.015167236328125, 0.0096282958984375, 0.01702880859375, 0.039398193359375, -0.00537872314453125, 0.0163116455078125, -0.041168212890625, -0.016143798828125, 0.0189361572265625, -0.0244140625, -0.0119476318359375, 0.07171630859375, -0.0092010498046875, -0.00927734375, 0.0197906494140625, -0.0216064453125, -0.04119873046875, 0.0657958984375, 0.04852294921875, 0.06280517578125, -0.0115814208984375, 0.01485443115234375, 0.07440185546875, -0.0034770965576171875, 0.004444122314453125, 0.0318603515625, 0.019744873046875, -0.0267333984375, 0.0092315673828125, -0.048248291015625, -0.012603759765625, 0.018402099609375, -0.0428466796875, 0.03472900390625, -0.06646728515625, -0.017822265625, -0.011077880859375, 0.0212554931640625, -0.03485107421875, 0.02545166015625, -0.00479888916015625, 0.0789794921875, -0.049346923828125, 0.056365966796875, 0.0430908203125, -0.053070068359375, -0.066162109375, 0.00565338134765625, 0.01506805419921875, -0.031646728515625, 0.0276947021484375, 0.00848388671875, -0.00504302978515625, 0.01093292236328125, -0.036590576171875, -0.07965087890625, 0.11029052734375, 0.012451171875, -0.037567138671875, -0.005878448486328125, -0.00658416748046875, 0.054412841796875, -0.008697509765625, 0.032073974609375, 0.03961181640625, 0.04791259765625, 0.0025119781494140625, -0.05853271484375, 0.0189971923828125, -0.06463623046875, 0.00098419189453125, 0.00814056396484375, -0.087646484375, 0.08001708984375, -0.0168609619140625, -0.026458740234375, 0.00894927978515625, 0.05682373046875, 0.0323486328125, 0.02471923828125, 0.007129669189453125, 0.040130615234375, 0.04119873046875, -0.036895751953125, 0.07568359375, -0.0316162109375, 0.031219482421875, 0.038482666015625, 0.02337646484375, 0.047760009765625, 0.031341552734375, -0.055145263671875, 0.06396484375, 0.058624267578125, -0.01485443115234375, 0.0394287109375, -0.01084136962890625, -0.018341064453125, 0.0103759765625, 0.004848480224609375, -0.05645751953125, -0.01517486572265625, 0.0159912109375, -0.00815582275390625, 0.01113128662109375, 0.024993896484375, 0.01465606689453125, -0.004367828369140625, -0.024505615234375, 0.044525146484375, 0.0013990402221679688, -0.04058837890625, 0.059661865234375, -0.0009627342224121094, 0.08746337890625, -0.044708251953125, 0.0054931640625, -0.0218048095703125, 0.00966644287109375, -0.034881591796875, -0.07086181640625, -0.0036563873291015625, -0.00040078163146972656, -0.00881195068359375, -0.0035381317138671875, 0.0333251953125, -0.0023746490478515625, -0.037933349609375, 0.039154052734375, -0.00492095947265625, 0.0325927734375, 0.034210205078125, -0.071044921875, 0.0235443115234375, 0.026611328125, -0.01678466796875, 0.037017822265625, 0.011199951171875, 0.00806427001953125, 0.05487060546875, 0.049713134765625, 0.01381683349609375, 0.01039886474609375, -0.00437164306640625, 0.0635986328125, -0.0421142578125, -0.051239013671875, -0.0667724609375, 0.04547119140625, 0.00102996826171875, -0.01605224609375, 0.06591796875, 0.059295654296875, 0.061004638671875, -0.01258087158203125, 0.07421875, -0.025848388671875, 0.032867431640625, -0.031524658203125, 0.056640625, -0.0633544921875, 0.00719451904296875, -0.046722412109375, -0.047760009765625, -0.016265869140625, 0.05084228515625, -0.013397216796875, 0.0155487060546875, 0.0467529296875, 0.045745849609375, 0.0133056640625, 0.009368896484375, 0.005764007568359375, 0.01666259765625, 0.031951904296875, 0.06683349609375, 0.035003662109375, -0.06219482421875, 0.03143310546875, -0.055694580078125, 0.0010061264038085938, -0.044586181640625, -0.03546142578125, -0.044036865234375, -0.04412841796875, -0.037261962890625, -0.040985107421875, -0.006504058837890625, 0.0633544921875, 0.0230865478515625, -0.0579833984375, -0.0245361328125, -0.00885772705078125, 0.00716400146484375, -0.03155517578125, -0.0263214111328125, 0.033843994140625, 0.01065826416015625, -0.07672119140625, 0.009613037109375, 0.01277923583984375, 0.035797119140625, -0.0261688232421875, -0.015655517578125, -0.032012939453125, -0.01104736328125, 0.0054931640625, 0.0280914306640625, -0.0350341796875, 0.01739501953125, 0.00742340087890625, -0.0171661376953125, 0.0207977294921875, 0.023773193359375, -0.034271240234375, 0.030487060546875, 0.053619384765625, -0.008026123046875, 0.0423583984375, -0.0187835693359375, 0.005367279052734375, -0.00574493408203125, 0.0098724365234375, 0.0036773681640625, 0.036376953125, 0.0166473388671875, -0.047271728515625, 0.043426513671875, 0.038787841796875, -0.0213623046875, -0.0675048828125, -0.01258087158203125, -0.07568359375, -0.048431396484375, 0.07794189453125, -0.006885528564453125, -0.037353515625, 0.0032672882080078125, -0.0302734375, 0.031585693359375, -0.01541900634765625, 0.03289794921875, 0.044036865234375, -0.01293182373046875, -0.004894256591796875, -0.060455322265625, 0.00844573974609375, 0.0089569091796875, -0.0673828125, -0.0150604248046875, 0.0190582275390625, 0.033660888671875, 0.04315185546875, 0.062469482421875, -0.024261474609375, 0.02740478515625, -0.00481414794921875, 0.01129150390625, -0.00939178466796875, -0.00496673583984375, -0.0214996337890625, 0.0055694580078125, -0.020721435546875, -0.0187225341796875 ] ]
Severian/ANIMA-Phi-Neptune-Mistral-7B
2023-10-26T11:03:46.000Z
[ "transformers", "pytorch", "mistral", "text-generation", "chemistry", "biology", "climate", "science", "philosophy", "nature", "ecology", "biomimicry", "fauna", "flora", "dataset:Severian/Biomimicry", "dataset:emrgnt-cmplxty/sciphi-textbooks-are-all-you-need", "dataset:fmars/wiki_stem", "dataset:fblgit/tree-of-knowledge", "dataset:Severian/Bio-Design-Process", "license:artistic-2.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Severian
null
null
Severian/ANIMA-Phi-Neptune-Mistral-7B
19
8,635
transformers
2023-10-11T22:48:16
--- license: artistic-2.0 datasets: - Severian/Biomimicry - emrgnt-cmplxty/sciphi-textbooks-are-all-you-need - fmars/wiki_stem - fblgit/tree-of-knowledge - Severian/Bio-Design-Process tags: - chemistry - biology - climate - science - philosophy - nature - ecology - biomimicry - fauna - flora pipeline_tag: text-generation metrics: - accuracy --- # ANIMA-Phi-Neptune-Mistral-7B: Biomimicry Enhanced LLM <img src="https://cdn-uploads.huggingface.co/production/uploads/64740cf7485a7c8e1bd51ac9/JZH6p50t_j3-OUph4Wq6y.png" width="500"> ## Overview **ANIMA** (Advanced Nature Inspired Multidisciplinary Assistant) is an expert in various scientific disciplines, including but not limited to biomimicry, biology, and environmental science. **Instagram: [@anima_llm](https://www.instagram.com/anima_llm)** --- ## Model Description ANIMA is fine-tuned on a rich dataset encompassing: - 4,000+ Nature-Biomimicry examples - 60k Biomimicry Design Process examples - 600k STEM facts from Wikipedia - Science/Philosophy focused 'All-You-Need-Is-Textbooks' dataset - Additional Tree of Knowledge + Biomimicry data combined fine-tuning The model aims to assist users in solving problems using nature-inspired strategies and concepts. ### Special Features - **Multi-disciplinary Expertise**: Knowledge across various scientific and philosophical domains. - **Biomimicry Design Process**: Incorporates a dataset generated by Mistral and Minotaur-15B. The dataset was then intricately processed by a real person to ensure factuality and grounding. --- - Here is a link to The Bloke's GGUF version: [ANIMA-Phi-Neptune-Mistral-7B-GGUF](https://huggingface.co/TheBloke/ANIMA-Phi-Neptune-Mistral-7B-GGUF) - ANIMA is also available using Ollama - Download the [OLLAMA](https://ollama.ai/) App (MacOS & Linux) and then run this command in your Terminal '**ollama pull severian/anima**' to download the model and then run this command '**ollama run severian/anima**' to load the model and start talking. - You can also download and use the model with [LM Studio](https://lmstudio.ai/) (All OS systems). Just download the app and then search for 'ANIMA GGUF' in the search bar and you will have a list of versions to choose from. - Want to test ANIMA + Ollama and chat right away? Download the model from Ollama and head here to chat with ANIMA right in your browser! [ANIMA - Chat](https://severian42.github.io/ANIMA-Chat/) - Have a PDF you want to discuss with ANIMA + Ollama? Head here and you can do just that in your browser, 100% locally. [ANIMA - Locally Chat over your PDF](https://ANIMA-pdf-chat.vercel.app/) - ANIMA is also being hosted on a Hugging Face Space if you'd like to try it there (It may be slow to generate a respone but it still works just fine) [ANIMA - HF Space](https://huggingface.co/spaces/Severian/ANIMA-7B-Biomimicry-LLM) - **Contact** If you want to discuss the model/dataset/concept further, have any cool ideas, want to collaborate or if you run into any issues with the model, please don't hesitate to reach out at InterwovenArkitech@Gmail.com ## Datasets - `Severian/Biomimicry` (1st Fine-Tune) - `emrgnt-cmplxty/sciphi-textbooks-are-all-you-need` (2nd Fine-Tune) - `Severian/Bio-Design-Process` & `fmars/wiki_stem` (3rd Fine-Tune) - `fblgit/tree-of-knowledge` & `Severian/Biomimicry` (4th Fine-Tune) - `Combined all previous datasets` into one giant dataset for a quick fine-tuning for a total of 730k data points (5th Fine-Tune) ## Training Stages 1. **Base Model**: `ehartford/dolphin-2.0-mistral-7b` 2. **1st Fine-Tune**: `ANIMA-Echo-Mistral-7B-v1` - 1 Epoch x 5hrs on 1 x T4 Small 3. **2nd Fine-Tune**: `ANIMA-Phi-Neptune-Mistral-v1`-: 1 Epoch x 1hr on 1 x A100 4. **3rd Fine-Tune**: `ANIMA-Phi-Neptune-Mistral-7B-v2` - 1 Epoch x 1hr on 1 x A100 5. **4th Fine-Tune**: `ANIMA-Phi-Neptune-Mistral-7B-v3` - 1 Epoch x 1hr on 1 x A100 6. **5th Fine-Tune**: `ANIMA-Phi-Neptune-Mistral-7B-v4` - 1 Epoch x 15mins on 1 x A100 --- ## 📝 Usage ANIMA is designed around the Biomimicry Design Process to guide users in solving problems through biomimicry. For more information and examples, please visit [Biomimicry Design Process](https://toolbox.biomimicry.org/methods/process/). ## 🏁 Benchmarks - Average - 62.22 - ARC - 56.83 - HellaSwag - 78.82 - MMLU - 53.84 - TruthfulQA - 59.40 --- - **EXAMPLE OUTPUT** [INST] How can biomimicry help in water purification? [/INST] Biomimicry plays a significant role in water purification by taking inspiration from natural processes and organisms to develop sustainable and efficient solutions. Here's a comprehensive overview of how biomimicry can help in water purification: 1. Microbial filtration: - Bacteriophages (viruses that infect bacteria) can effectively target contaminants in water systems, such as Escherichia coli and Staphylococcus aureus. By mimicking the natural defense mechanisms of bacteriophages, we can develop efficient filtration systems that remove pollutants from water sources. - Some fungi, such as Trametes versicolor, can break down organic pollutants in water using enzymes. This process helps make contaminated water safe for consumption. By studying and replicating these natural processes, we can develop sustainable filtration systems that remove pollutants from water sources effectively. 2. Biofilm-based filtration: - Microorganisms form biofilms on surfaces to protect themselves from environmental threats like predators or contaminants. By mimicking the natural formation of biofilms, we can develop sustainable filtration systems that use bacteria or fungi to remove pollutants from water sources. - For example, a study has shown how Pseudomonas putida could be used to biodegrade petroleum hydrocarbons in contaminated water sources using a biofilm-based system. This method offers an eco-friendly alternative to conventional chemical treatments for oil spills. 3. Wetland-based filtration: - Wetlands act as natural filters for water by retaining sediments and organic matter, which helps remove contaminants. By mimicking the natural processes of a wetland, we can design effective water purification systems that use sand filters or engineered wetlands to remove pollutants. For instance, a study has shown how reed beds can be used for water filtration by using them as a natural treatment system for agricultural wastewater. 4. Sponge filtration: - Some marine sponges (e.g., sea sponge) have evolved sophisticated mechanisms to filter out particles and contaminants from the surrounding water. By studying these natural filtration processes, we can develop efficient and sustainable filtration systems that remove pollutants effectively. For example, a study has shown how sea sponges can be used as a model for designing a low-cost, self-sustaining water purification system that uses solar energy to disinfect contaminated water. 5. Biodegradable materials: - Natural organisms have evolved biodegradable materials to protect themselves from predators or environmental threats. For instance, some insects produce natural pesticides (such as alkaloids) to defend themselves against predators. Similarly, we can develop biodegradable materials that release these natural pesticides in contaminated water sources, killing off contaminants and restoring the environment's health. - For example, a study has shown how bacterial cells can be used as a natural filtration system to remove contaminants from water sources by incorporating them into membranes that selectively remove organic pollutants. In conclusion, biomimicry plays a vital role in water purification by using natural processes and organisms as inspiration for developing sustainable and efficient solutions. By studying the natural filtration mechanisms of microorganisms, plants, animals, and other living organisms, we can design eco-friendly and cost-effective water purification systems that help protect our environment from contamination. --- ### ***[Help Support ANIMA](https://www.buymeacoffee.com/anima7b)*** --- - **KNOWN ISSUES** The model will sometimes respond to itself and continue the conversation taking both the user and AI roles. This is a known issue in the Mistral model but does not happen very often. ## 📜 License This project is licensed under Artistic-2.0 **This model is for research purposes only and restricted from any commercial use**
8,444
[ [ -0.056915283203125, -0.0697021484375, 0.05596923828125, -0.0014925003051757812, -0.030975341796875, 0.00354766845703125, 0.0233917236328125, -0.051788330078125, 0.043792724609375, 0.04071044921875, -0.038421630859375, -0.0150146484375, -0.022613525390625, 0.0294189453125, -0.00521087646484375, 0.043731689453125, 0.0160064697265625, 0.04400634765625, 0.01168060302734375, -0.0185394287109375, -0.0233612060546875, -0.01904296875, -0.0748291015625, -0.0347900390625, 0.039886474609375, 0.0140533447265625, 0.052520751953125, 0.05767822265625, 0.036529541015625, 0.03228759765625, -0.0294189453125, 0.0205230712890625, -0.0235443115234375, -0.00115966796875, -0.026885986328125, -0.0340576171875, -0.035125732421875, 0.01483154296875, 0.040985107421875, 0.04681396484375, 0.0131072998046875, 0.03680419921875, 0.01303863525390625, 0.054962158203125, -0.038818359375, 0.0156402587890625, 0.004199981689453125, 0.016693115234375, -0.00601959228515625, -0.027130126953125, 0.01253509521484375, -0.033782958984375, 0.0008554458618164062, -0.06524658203125, -0.005413055419921875, 0.0005521774291992188, 0.0694580078125, 0.0211181640625, -0.0264434814453125, -0.01012420654296875, -0.06640625, 0.06292724609375, -0.051177978515625, 0.031982421875, 0.0189361572265625, 0.02801513671875, -0.01511383056640625, -0.058837890625, -0.0257110595703125, -0.02044677734375, -0.009735107421875, 0.032867431640625, -0.002712249755859375, 0.003143310546875, 0.0270233154296875, 0.049530029296875, -0.0303192138671875, -0.0287628173828125, -0.054473876953125, -0.00913238525390625, 0.049224853515625, 0.0051116943359375, 0.032989501953125, -0.0179901123046875, -0.041900634765625, -0.0143280029296875, -0.0286407470703125, 0.0251617431640625, 0.027130126953125, 0.0117340087890625, -0.0129852294921875, 0.0305633544921875, -0.00492095947265625, 0.031341552734375, -0.001129150390625, -0.00687408447265625, -0.00560760498046875, -0.040924072265625, -0.01233673095703125, -0.004627227783203125, 0.048828125, 0.0030193328857421875, 0.00522613525390625, 0.02313232421875, 0.00811004638671875, 0.0112152099609375, 0.02081298828125, -0.08251953125, -0.0197906494140625, 0.03204345703125, -0.056732177734375, -0.03961181640625, -0.008148193359375, -0.0718994140625, -0.03375244140625, -0.035430908203125, 0.055694580078125, -0.0293121337890625, -0.0221405029296875, 0.01175689697265625, -0.029998779296875, -0.006603240966796875, 0.02996826171875, -0.040863037109375, 0.032623291015625, 0.021087646484375, 0.07025146484375, 0.0177764892578125, 0.0022220611572265625, -0.007678985595703125, 0.00298309326171875, -0.010162353515625, 0.053314208984375, -0.0372314453125, -0.047821044921875, -0.038299560546875, -0.0004649162292480469, -0.004749298095703125, -0.04791259765625, 0.01299285888671875, -0.0210418701171875, 0.01397705078125, -0.0275421142578125, -0.0277099609375, -0.057281494140625, -0.017364501953125, -0.05780029296875, 0.03594970703125, -0.0001983642578125, -0.061767578125, -0.008056640625, -0.07110595703125, -0.02008056640625, 0.0201873779296875, 0.012847900390625, -0.0137176513671875, 0.0081024169921875, -0.0151214599609375, 0.003978729248046875, -0.032958984375, 0.004436492919921875, -0.057037353515625, -0.0155792236328125, 0.0138702392578125, -0.01125335693359375, 0.09796142578125, 0.0435791015625, -0.010772705078125, -0.0036773681640625, -0.06756591796875, -0.00846099853515625, 0.0008273124694824219, -0.012725830078125, -0.0123443603515625, 0.0019550323486328125, -0.0097808837890625, 0.0168304443359375, 0.01224517822265625, -0.050567626953125, 0.002285003662109375, -0.021331787109375, 0.07098388671875, 0.032073974609375, 0.0479736328125, 0.0260467529296875, -0.057159423828125, 0.0221099853515625, -0.0035190582275390625, 0.04913330078125, 0.0024547576904296875, -0.05718994140625, -0.050445556640625, -0.039215087890625, 0.021240234375, 0.06195068359375, -0.059600830078125, 0.0338134765625, -0.0015821456909179688, -0.054107666015625, -0.049560546875, 0.0289459228515625, 0.025634765625, 0.045013427734375, 0.0364990234375, -0.05767822265625, -0.04498291015625, -0.06292724609375, 0.00823974609375, -0.0190582275390625, 0.00687408447265625, 0.0179595947265625, 0.0552978515625, -0.0134735107421875, 0.0736083984375, -0.04486083984375, -0.037017822265625, 0.01415252685546875, 0.0007791519165039062, 0.0225677490234375, 0.040618896484375, 0.060882568359375, -0.07928466796875, -0.024017333984375, 0.0014715194702148438, -0.07489013671875, 0.00473785400390625, 0.031341552734375, -0.031707763671875, -0.0125579833984375, 0.01800537109375, -0.05084228515625, 0.0477294921875, 0.01415252685546875, -0.0254364013671875, 0.0138702392578125, -0.038665771484375, 0.026580810546875, -0.07928466796875, 0.034637451171875, 0.007663726806640625, -0.02117919921875, -0.06390380859375, 0.022430419921875, -0.0144195556640625, -0.0260772705078125, -0.0260009765625, 0.058380126953125, -0.025146484375, 0.0204315185546875, -0.0250091552734375, -0.0232391357421875, 0.01145172119140625, 0.029693603515625, 0.0094757080078125, 0.026031494140625, 0.03338623046875, -0.06927490234375, 0.037078857421875, 0.025726318359375, 0.00806427001953125, 0.033782958984375, -0.053741455078125, 0.00019240379333496094, -0.0151214599609375, 0.03509521484375, -0.0689697265625, -0.0010633468627929688, 0.0345458984375, -0.037322998046875, 0.0255889892578125, 0.0012083053588867188, -0.034027099609375, -0.04400634765625, -0.0357666015625, 0.0124359130859375, 0.03509521484375, -0.047027587890625, 0.04840087890625, 0.02374267578125, -0.013397216796875, -0.0079193115234375, -0.035675048828125, -0.007244110107421875, -0.042083740234375, -0.052825927734375, 0.0184478759765625, -0.017578125, -0.02777099609375, 0.0118255615234375, -0.0126953125, -0.004177093505859375, 0.016448974609375, 0.0304107666015625, 0.032073974609375, 0.0014562606811523438, -0.0169525146484375, 0.00479888916015625, -0.0218658447265625, 0.003704071044921875, -0.0125885009765625, 0.0261077880859375, -0.007610321044921875, 0.006961822509765625, -0.045684814453125, 0.026580810546875, 0.06414794921875, -0.00720977783203125, 0.03570556640625, 0.057342529296875, -0.050018310546875, 0.027740478515625, -0.042816162109375, -0.03790283203125, -0.0406494140625, -0.0027065277099609375, -0.0173492431640625, -0.042755126953125, 0.055938720703125, -0.0046539306640625, 0.0172576904296875, 0.037322998046875, 0.022308349609375, -0.016357421875, 0.0699462890625, 0.04486083984375, 0.0008955001831054688, 0.03955078125, -0.054412841796875, 0.031341552734375, -0.0556640625, -0.0142822265625, -0.043365478515625, -0.0283966064453125, -0.05718994140625, -0.0145263671875, 0.0445556640625, 0.01416778564453125, -0.03277587890625, 0.0361328125, -0.03839111328125, -0.01142120361328125, 0.0452880859375, 0.0270538330078125, 0.0212554931640625, -0.0018777847290039062, -0.0113067626953125, 0.00864410400390625, -0.047607421875, -0.045989990234375, 0.040435791015625, 0.032501220703125, 0.038726806640625, 0.0079193115234375, 0.06854248046875, 0.0167388916015625, 0.01971435546875, -0.039093017578125, 0.0286712646484375, -0.005764007568359375, -0.07708740234375, -0.024017333984375, -0.0238189697265625, -0.06329345703125, 0.0258636474609375, -0.032470703125, -0.051788330078125, 0.059906005859375, 0.012420654296875, -0.052978515625, 0.0293426513671875, -0.04559326171875, 0.05364990234375, -0.0067291259765625, -0.0653076171875, -0.029083251953125, -0.05828857421875, 0.026275634765625, -0.0089569091796875, 0.053192138671875, -0.0081939697265625, -0.0011014938354492188, 0.0419921875, -0.047210693359375, 0.07037353515625, -0.0253448486328125, 0.01453399658203125, 0.044677734375, 0.0078277587890625, -0.0221099853515625, 0.0261383056640625, 0.00722503662109375, 0.01103973388671875, 0.03021240234375, -0.0185394287109375, -0.0221099853515625, 0.04058837890625, -0.06463623046875, -0.052154541015625, -0.04248046875, -0.0099029541015625, 0.005947113037109375, 0.01751708984375, 0.036529541015625, 0.0301513671875, -0.00423431396484375, 0.0173187255859375, 0.0582275390625, -0.0246124267578125, 0.0428466796875, 0.033050537109375, -0.020263671875, -0.065185546875, 0.072021484375, 0.031768798828125, 0.0006728172302246094, 0.034912109375, 0.031707763671875, -0.02392578125, -0.0144195556640625, -0.0318603515625, 0.022552490234375, -0.0281829833984375, -0.03253173828125, -0.053985595703125, -0.03924560546875, -0.037445068359375, 0.007801055908203125, -0.01824951171875, -0.0267181396484375, -0.03509521484375, -0.0001990795135498047, 0.048736572265625, 0.04913330078125, -0.041412353515625, -0.0014600753784179688, -0.0775146484375, 0.04974365234375, 0.031341552734375, 0.0098419189453125, 0.00231170654296875, -0.032623291015625, 0.00811004638671875, 0.0253448486328125, -0.042816162109375, -0.08502197265625, 0.04937744140625, 0.019683837890625, 0.039215087890625, 0.043792724609375, -0.0129241943359375, 0.061492919921875, -0.0233001708984375, 0.05267333984375, 0.0035495758056640625, -0.054168701171875, 0.059661865234375, -0.0267486572265625, 0.015899658203125, 0.042083740234375, 0.04498291015625, -0.0193023681640625, -0.0299072265625, -0.051788330078125, -0.057464599609375, 0.031280517578125, 0.02728271484375, 0.004367828369140625, 0.01222991943359375, 0.04815673828125, -0.00507354736328125, 0.01361846923828125, -0.042510986328125, -0.0265960693359375, -0.01324462890625, 0.017822265625, -0.007328033447265625, -0.00736236572265625, -0.00635528564453125, -0.04412841796875, 0.07196044921875, 0.020843505859375, 0.0119781494140625, 0.0295257568359375, -0.007015228271484375, -0.0167388916015625, -0.0014200210571289062, 0.04241943359375, 0.021942138671875, -0.0267486572265625, -0.00888824462890625, 0.0157012939453125, -0.062103271484375, 0.01555633544921875, 0.007457733154296875, -0.021484375, -0.01268768310546875, 0.01277923583984375, 0.044219970703125, 0.030029296875, -0.0028743743896484375, -0.00879669189453125, -0.0138702392578125, -0.016143798828125, -0.01335906982421875, 0.0288543701171875, 0.0137176513671875, 0.0249481201171875, 0.032867431640625, 0.0135345458984375, 0.0474853515625, -0.06390380859375, -0.0027637481689453125, 0.01171875, -0.0273590087890625, -0.034423828125, 0.07080078125, 0.017425537109375, -0.0205841064453125, 0.031036376953125, -0.0205230712890625, -0.023956298828125, 0.05242919921875, 0.030517578125, 0.05389404296875, -0.053985595703125, 0.02337646484375, 0.057037353515625, -0.0126953125, -0.00431060791015625, 0.036865234375, -0.0145263671875, -0.04071044921875, 0.008453369140625, -0.07733154296875, -0.035003662109375, 0.0236663818359375, -0.05035400390625, 0.0271453857421875, -0.064208984375, -0.036590576171875, 0.036163330078125, 0.043121337890625, -0.03857421875, 0.0178680419921875, -0.011383056640625, 0.07073974609375, -0.083251953125, 0.04443359375, 0.0836181640625, -0.04840087890625, -0.056121826171875, -0.04962158203125, 0.0182037353515625, -0.035858154296875, 0.01416015625, 0.002857208251953125, -0.01416778564453125, -0.00150299072265625, -0.0423583984375, -0.0882568359375, 0.1072998046875, 0.0205078125, -0.007350921630859375, 0.0170440673828125, -0.00530242919921875, 0.056304931640625, -0.031707763671875, 0.0214080810546875, 0.034515380859375, 0.060333251953125, 0.0247650146484375, -0.061553955078125, 0.0095672607421875, -0.05841064453125, 0.028289794921875, -0.00911712646484375, -0.07745361328125, 0.0557861328125, -0.00565338134765625, -0.00867462158203125, 0.03314208984375, 0.034576416015625, 0.039886474609375, 0.03167724609375, 0.028564453125, 0.03485107421875, 0.0455322265625, 0.00384521484375, 0.07843017578125, -0.03631591796875, 0.0055084228515625, 0.04107666015625, -0.0154876708984375, 0.050445556640625, 0.0295867919921875, -0.0146636962890625, 0.0223541259765625, 0.0745849609375, -0.0132904052734375, 0.04400634765625, 0.0135650634765625, -0.03472900390625, -0.01116943359375, -0.025390625, -0.038177490234375, 0.0296783447265625, 0.032989501953125, -0.00791168212890625, -0.0152587890625, 0.005641937255859375, -0.005748748779296875, -0.015289306640625, -0.03387451171875, 0.05194091796875, 0.045562744140625, -0.0408935546875, 0.016082763671875, -0.0005970001220703125, 0.042083740234375, -0.041412353515625, -0.0240478515625, -0.01390838623046875, 0.0109405517578125, -0.025177001953125, -0.0299530029296875, 0.0037631988525390625, -0.0279541015625, -0.0002180337905883789, -0.032928466796875, 0.08636474609375, -0.0143585205078125, -0.0202178955078125, 0.0428466796875, 0.0478515625, 0.022552490234375, 0.0149688720703125, -0.05145263671875, -0.026824951171875, -0.004199981689453125, -0.01549530029296875, 0.01166534423828125, 0.0101776123046875, -0.01202392578125, 0.060211181640625, 0.052703857421875, 0.015838623046875, -0.01366424560546875, -0.006378173828125, 0.048919677734375, -0.045806884765625, -0.0247039794921875, -0.052276611328125, 0.032806396484375, 0.0036296844482421875, -0.01151275634765625, 0.04913330078125, 0.04937744140625, 0.042633056640625, -0.0224761962890625, 0.04693603515625, 0.0099334716796875, 0.03448486328125, -0.0166015625, 0.06878662109375, -0.038909912109375, 0.0023670196533203125, -0.00730133056640625, -0.08807373046875, -0.005268096923828125, 0.06884765625, 0.0007371902465820312, 0.0283050537109375, 0.053436279296875, 0.037567138671875, -0.0129852294921875, 0.032135009765625, 0.00688934326171875, 0.01495361328125, 0.0233001708984375, 0.0139923095703125, 0.06036376953125, -0.023590087890625, 0.0076751708984375, -0.05780029296875, -0.028656005859375, -0.019989013671875, -0.052337646484375, -0.060272216796875, -0.03253173828125, -0.040435791015625, -0.02691650390625, 0.01739501953125, 0.058837890625, 0.048095703125, -0.06396484375, -0.0377197265625, -0.00547027587890625, -0.0017061233520507812, -0.0176849365234375, -0.01433563232421875, 0.01096343994140625, -0.013946533203125, -0.052642822265625, 0.0087738037109375, 0.036346435546875, 0.03485107421875, -0.0147705078125, 0.011383056640625, -0.0240478515625, 0.016021728515625, 0.02764892578125, 0.052825927734375, -0.0223388671875, -0.03741455078125, 0.00951385498046875, -0.00916290283203125, -0.00489044189453125, 0.031829833984375, -0.044952392578125, 0.01392364501953125, 0.0535888671875, 0.007904052734375, 0.055145263671875, -0.00795745849609375, 0.04034423828125, -0.0008625984191894531, -0.012176513671875, 0.00868988037109375, 0.039703369140625, 0.0054473876953125, -0.039093017578125, 0.06683349609375, 0.0305023193359375, -0.0295257568359375, -0.05126953125, -0.006893157958984375, -0.08172607421875, -0.028839111328125, 0.07135009765625, -0.012115478515625, -0.03021240234375, -0.005245208740234375, -0.021148681640625, 0.033935546875, -0.01275634765625, 0.0557861328125, 0.054718017578125, -0.02032470703125, 0.01425933837890625, -0.052764892578125, 0.03118896484375, 0.01247406005859375, -0.0655517578125, -0.035858154296875, 0.0275421142578125, 0.058837890625, 0.05059814453125, 0.0399169921875, -0.024871826171875, 0.0260009765625, -0.0087432861328125, -0.0076446533203125, -0.0037975311279296875, -0.0209503173828125, -0.0291290283203125, -0.0016384124755859375, -0.0189208984375, -0.0185089111328125 ] ]
uw-madison/mra-base-512-4
2023-07-10T16:11:54.000Z
[ "transformers", "pytorch", "mra", "fill-mask", "arxiv:2207.10284", "autotrain_compatible", "endpoints_compatible", "region:us" ]
fill-mask
uw-madison
null
null
uw-madison/mra-base-512-4
0
8,634
transformers
2023-06-23T06:04:05
# MRA MRA model for masked language modeling (MLM) for sequence length 512. ## About MRA The MRA model was proposed in [Multi Resolution Analysis (MRA) for Approximate Self-Attention](https://arxiv.org/abs/2207.10284) by Zhanpeng Zeng, Sourav Pal, Jeffery Kline, Glenn M Fung, and Vikas Singh. The abstract from the paper is the following: *Transformers have emerged as a preferred model for many tasks in natural langugage processing and vision. Recent efforts on training and deploying Transformers more efficiently have identified many strategies to approximate the self-attention matrix, a key module in a Transformer architecture. Effective ideas include various prespecified sparsity patterns, low-rank basis expansions and combinations thereof. In this paper, we revisit classical Multiresolution Analysis (MRA) concepts such as Wavelets, whose potential value in this setting remains underexplored thus far. We show that simple approximations based on empirical feedback and design choices informed by modern hardware and implementation challenges, eventually yield a MRA-based approach for self-attention with an excellent performance profile across most criteria of interest. We undertake an extensive set of experiments and demonstrate that this multi-resolution scheme outperforms most efficient self-attention proposals and is favorable for both short and long sequences. Code is available at https://github.com/mlpen/mra-attention.* This model was contributed by [novice03](https://huggingface.co/novice03). The original code can be found [here](https://github.com/mlpen/mra-attention).
1,606
[ [ -0.058929443359375, -0.033050537109375, 0.0168609619140625, 0.0249176025390625, 0.006237030029296875, -0.00217437744140625, -0.0010480880737304688, -0.044525146484375, 0.022369384765625, 0.045318603515625, -0.063232421875, 0.0013523101806640625, -0.05401611328125, 0.018157958984375, -0.041290283203125, 0.052032470703125, 0.0032978057861328125, -0.0192108154296875, 0.007572174072265625, -0.029693603515625, -0.0469970703125, -0.052978515625, -0.0281524658203125, -0.0015811920166015625, 0.04937744140625, 0.01189422607421875, 0.028533935546875, 0.049224853515625, 0.0538330078125, 0.0287322998046875, 0.0027923583984375, 0.0175933837890625, -0.029693603515625, -0.003131866455078125, 0.0022945404052734375, -0.0253143310546875, -0.035552978515625, 0.018829345703125, 0.0555419921875, 0.038665771484375, -0.005283355712890625, 0.05096435546875, 0.0250244140625, 0.08587646484375, -0.04595947265625, -0.0015773773193359375, -0.0259552001953125, 0.0316162109375, 0.0035190582275390625, -0.0127716064453125, -0.048431396484375, 0.0179901123046875, 0.01486968994140625, -0.00832366943359375, 0.054412841796875, -0.000598907470703125, 0.067626953125, 0.03857421875, -0.005588531494140625, -0.00836181640625, -0.080810546875, 0.077880859375, -0.044647216796875, 0.03302001953125, 0.004360198974609375, 0.039459228515625, 0.0025653839111328125, -0.060577392578125, -0.02947998046875, -0.015533447265625, -0.0282745361328125, 0.01227569580078125, -0.0252227783203125, 0.0078125, 0.0301971435546875, 0.033355712890625, -0.051177978515625, -0.009674072265625, -0.041412353515625, -0.00038933753967285156, 0.0213470458984375, 0.00921630859375, -0.01259613037109375, -0.03436279296875, -0.046478271484375, -0.00644683837890625, -0.024932861328125, -0.003437042236328125, 0.018707275390625, 0.0233306884765625, -0.008544921875, 0.039825439453125, -0.0168304443359375, 0.074462890625, 0.0221405029296875, -0.004917144775390625, 0.0142669677734375, -0.017547607421875, -0.027252197265625, -0.020416259765625, 0.061431884765625, 0.00623321533203125, 0.025482177734375, 0.006725311279296875, -0.017791748046875, -0.001232147216796875, 0.05816650390625, -0.04779052734375, -0.0085601806640625, 0.0231170654296875, -0.06964111328125, 0.0027313232421875, -0.00560760498046875, -0.09002685546875, 0.0004265308380126953, -0.04052734375, 0.03607177734375, -0.0006437301635742188, -0.0169525146484375, -0.005764007568359375, 0.02032470703125, 0.0221405029296875, -0.005939483642578125, -0.072998046875, 0.0216064453125, 0.022705078125, 0.06201171875, 0.0052642822265625, -0.0212554931640625, -0.006916046142578125, -0.01055145263671875, -0.0225372314453125, 0.06585693359375, -0.0193634033203125, -0.0145416259765625, -0.0199737548828125, -0.0032939910888671875, -0.0224151611328125, -0.0340576171875, 0.017791748046875, -0.044158935546875, 0.033355712890625, 0.02191162109375, -0.01113128662109375, -0.028076171875, 0.01053619384765625, -0.062347412109375, 0.0784912109375, 0.002559661865234375, -0.027252197265625, 0.002040863037109375, -0.06256103515625, -0.00467681884765625, -0.026092529296875, 0.00799560546875, -0.0457763671875, 0.002666473388671875, 0.034393310546875, 0.0421142578125, -0.0032672882080078125, 0.0053863525390625, -0.0166015625, -0.0111846923828125, 0.0256500244140625, -0.052581787109375, 0.0213623046875, 0.03997802734375, -0.0213165283203125, 0.03009033203125, -0.06744384765625, -0.0015764236450195312, 0.0185394287109375, 0.00818634033203125, 0.005802154541015625, -0.017852783203125, -0.00921630859375, 0.041778564453125, 0.006671905517578125, -0.00872802734375, 0.0016078948974609375, -0.046234130859375, 0.019805908203125, 0.061431884765625, -0.01032257080078125, 0.032135009765625, 0.000339508056640625, 0.049285888671875, -0.017486572265625, 0.041107177734375, -0.043426513671875, -0.04095458984375, -0.0718994140625, -0.034423828125, 0.03948974609375, 0.02398681640625, -0.01580810546875, 0.025421142578125, -0.0194549560546875, -0.0279541015625, -0.056060791015625, 0.00728607177734375, 0.03765869140625, 0.0184326171875, 0.0472412109375, -0.043701171875, -0.0596923828125, -0.0850830078125, -0.0157470703125, 0.0218505859375, -0.0196380615234375, 0.020660400390625, 0.003818511962890625, -0.01047515869140625, 0.08984375, -0.036346435546875, -0.059906005859375, -0.017822265625, 0.0033054351806640625, 0.03326416015625, 0.026763916015625, 0.042877197265625, -0.045745849609375, -0.0343017578125, -0.0267486572265625, -0.05670166015625, 0.0173187255859375, -0.01116943359375, 0.00519561767578125, 0.01073455810546875, 0.0226287841796875, -0.038360595703125, 0.0296173095703125, 0.06622314453125, 0.0010232925415039062, 0.0268707275390625, -0.01194000244140625, 0.01149749755859375, -0.0810546875, 0.0215911865234375, 0.0272064208984375, -0.028472900390625, -0.06719970703125, 0.0096435546875, 0.011871337890625, -0.0251922607421875, -0.0287628173828125, 0.030609130859375, -0.05126953125, -0.0267181396484375, -0.038177490234375, 0.01244354248046875, -0.00991058349609375, 0.0775146484375, -0.00627899169921875, 0.05218505859375, 0.0186614990234375, -0.046783447265625, 0.007572174072265625, 0.004734039306640625, 0.001873016357421875, 0.02386474609375, -0.05499267578125, 0.0225677490234375, -0.0262298583984375, 0.00901031494140625, -0.096435546875, 0.01065826416015625, 0.034515380859375, -0.042938232421875, 0.05511474609375, -0.0213470458984375, -0.01535797119140625, -0.055908203125, -0.01617431640625, 0.04083251953125, 0.03753662109375, -0.05621337890625, 0.0755615234375, 0.028472900390625, 0.00955963134765625, -0.06744384765625, -0.04046630859375, 0.01708984375, -0.02313232421875, -0.0296630859375, 0.01132965087890625, -0.00533294677734375, 0.009735107421875, -0.01277923583984375, -0.0032939910888671875, 0.01067352294921875, -0.0156707763671875, 0.02239990234375, 0.02618408203125, -0.0225372314453125, -0.0052490234375, -0.038055419921875, -0.037994384765625, 0.00399017333984375, -0.001255035400390625, 0.051177978515625, -0.0191497802734375, -0.0176849365234375, -0.042022705078125, 0.01995849609375, 0.02740478515625, -0.035369873046875, 0.038665771484375, 0.044189453125, -0.0235748291015625, -0.006072998046875, -0.059906005859375, -0.02899169921875, -0.041748046875, 0.0178680419921875, -0.045562744140625, -0.0531005859375, 0.049560546875, 0.008453369140625, -0.005756378173828125, 0.057525634765625, 0.052703857421875, -0.00931549072265625, 0.06866455078125, 0.057525634765625, -0.00600433349609375, 0.043731689453125, -0.027374267578125, 0.035888671875, -0.07037353515625, -0.0013265609741210938, -0.04144287109375, -0.018280029296875, -0.04388427734375, -0.038330078125, 0.006122589111328125, -0.0154571533203125, 0.0150299072265625, 0.040130615234375, -0.06719970703125, 0.02734375, 0.037078857421875, 0.00400543212890625, 0.007167816162109375, 0.00901031494140625, 0.019561767578125, -0.016632080078125, -0.06866455078125, -0.0303802490234375, 0.0743408203125, 0.05780029296875, 0.05877685546875, -0.004848480224609375, 0.040008544921875, 0.0318603515625, 0.0400390625, -0.059600830078125, 0.0300750732421875, -0.004566192626953125, -0.04718017578125, -0.0268402099609375, -0.0218963623046875, -0.09112548828125, 0.037139892578125, 0.0102386474609375, -0.038909912109375, 0.02227783203125, 0.018798828125, -0.0299530029296875, 0.01395416259765625, -0.06256103515625, 0.070068359375, 0.020782470703125, -0.002979278564453125, -0.0247039794921875, -0.06634521484375, 0.04095458984375, -0.007747650146484375, 0.0165252685546875, -0.00977325439453125, 0.031585693359375, 0.061187744140625, 0.00036215782165527344, 0.0865478515625, -0.004474639892578125, -0.0185546875, 0.04156494140625, -0.0083465576171875, 0.0201873779296875, -0.0225982666015625, 0.03253173828125, 0.026947021484375, -0.00495147705078125, -0.0355224609375, -0.049041748046875, 0.04632568359375, -0.048614501953125, -0.04180908203125, -0.02777099609375, -0.0256500244140625, 0.0005364418029785156, 0.010040283203125, -0.0134124755859375, 0.051300048828125, 0.00922393798828125, -0.011810302734375, 0.053009033203125, -0.0013399124145507812, 0.058563232421875, 0.03466796875, -0.0172271728515625, -0.03179931640625, 0.0716552734375, 0.0189208984375, 0.0198822021484375, 0.0288238525390625, -0.0013246536254882812, -0.03204345703125, -0.019439697265625, -0.051025390625, 0.0311431884765625, -0.07196044921875, -0.0259246826171875, -0.058929443359375, -0.028076171875, -0.031280517578125, 0.02069091796875, -0.05938720703125, -0.0211334228515625, -0.0218505859375, 0.01367950439453125, -0.00007230043411254883, 0.052581787109375, -0.00734710693359375, 0.0016431808471679688, -0.07550048828125, 0.0509033203125, 0.033843994140625, 0.043365478515625, 0.0207672119140625, -0.060760498046875, -0.0268402099609375, -0.023895263671875, -0.01947021484375, -0.051300048828125, 0.045166015625, 0.002002716064453125, 0.058868408203125, 0.0176849365234375, -0.01543426513671875, 0.05963134765625, -0.047576904296875, 0.05316162109375, 0.01247406005859375, -0.053131103515625, 0.025909423828125, 0.0142364501953125, 0.029541015625, 0.01117706298828125, 0.014739990234375, -0.037994384765625, -0.033782958984375, -0.044647216796875, -0.0400390625, 0.047149658203125, 0.00897216796875, 0.0284271240234375, 0.009979248046875, 0.0296173095703125, -0.0030765533447265625, -0.0036163330078125, -0.06744384765625, -0.06243896484375, -0.0328369140625, -0.0241241455078125, -0.026031494140625, -0.0538330078125, -0.00688934326171875, 0.0037746429443359375, 0.030059814453125, -0.0225067138671875, 0.0606689453125, 0.031829833984375, -0.00850677490234375, -0.018890380859375, 0.01678466796875, 0.07330322265625, 0.051544189453125, -0.02001953125, 0.0176849365234375, 0.00888824462890625, -0.033355712890625, 0.0138702392578125, 0.005950927734375, 0.016754150390625, -0.0019483566284179688, 0.02398681640625, 0.044189453125, 0.00513458251953125, -0.027618408203125, 0.01861572265625, -0.006221771240234375, -0.0094146728515625, -0.0159759521484375, -0.0196685791015625, -0.0132904052734375, 0.01287841796875, 0.045928955078125, 0.030517578125, 0.021636962890625, 0.0020275115966796875, 0.02972412109375, 0.024871826171875, -0.05169677734375, -0.01517486572265625, 0.041015625, 0.0033054351806640625, -0.0265655517578125, 0.0235748291015625, 0.002857208251953125, -0.027557373046875, 0.0491943359375, 0.0298004150390625, 0.07073974609375, -0.042938232421875, -0.0082244873046875, 0.0477294921875, 0.049346923828125, 0.00734710693359375, -0.01702880859375, 0.0093841552734375, -0.046356201171875, -0.0640869140625, -0.045684814453125, -0.01093292236328125, 0.00901031494140625, -0.03985595703125, 0.0025806427001953125, -0.0209503173828125, -0.01108551025390625, 0.001865386962890625, -0.017059326171875, -0.04229736328125, 0.0197906494140625, 0.005321502685546875, 0.0408935546875, -0.03741455078125, 0.06329345703125, 0.0031299591064453125, -0.0164337158203125, -0.04534912109375, -0.0102081298828125, -0.022003173828125, -0.053375244140625, 0.0301971435546875, 0.0362548828125, -0.0211334228515625, 0.01464080810546875, -0.01031494140625, -0.0865478515625, 0.057647705078125, -0.0008487701416015625, -0.04864501953125, 0.0144500732421875, 0.01337432861328125, 0.04302978515625, -0.01410675048828125, 0.0169830322265625, 0.03436279296875, 0.02154541015625, -0.004032135009765625, -0.067626953125, -0.0005364418029785156, -0.0009908676147460938, 0.0134124755859375, 0.0203857421875, -0.05865478515625, 0.039398193359375, -0.025390625, -0.026824951171875, 0.00650787353515625, 0.082763671875, 0.0028667449951171875, 0.01421356201171875, 0.034637451171875, 0.033355712890625, 0.004779815673828125, 0.01471710205078125, 0.07672119140625, -0.031219482421875, 0.06414794921875, 0.0726318359375, 0.00952911376953125, 0.044677734375, 0.0479736328125, -0.051513671875, 0.0211334228515625, 0.05316162109375, -0.0206451416015625, 0.03948974609375, 0.01387786865234375, 0.01099395751953125, 0.00605010986328125, 0.0101165771484375, -0.0264892578125, 0.022369384765625, 0.0193634033203125, -0.043121337890625, -0.00472259521484375, 0.01387786865234375, -0.04412841796875, -0.0013904571533203125, -0.029388427734375, 0.05389404296875, 0.0083465576171875, -0.04541015625, 0.05712890625, 0.009307861328125, 0.0511474609375, -0.038848876953125, -0.01116180419921875, -0.0188446044921875, 0.01279449462890625, -0.00745391845703125, -0.042999267578125, 0.033721923828125, -0.021240234375, -0.031219482421875, -0.01274871826171875, 0.0731201171875, -0.0267486572265625, -0.037261962890625, 0.0227508544921875, 0.028167724609375, 0.004718780517578125, -0.0115814208984375, -0.06396484375, -0.008148193359375, -0.004627227783203125, -0.03802490234375, 0.0276641845703125, -0.0075836181640625, 0.021881103515625, 0.06610107421875, 0.040008544921875, -0.04095458984375, 0.038970947265625, 0.01016998291015625, 0.04461669921875, -0.053497314453125, -0.0211944580078125, -0.060394287109375, 0.0264434814453125, 0.01334381103515625, 0.0017747879028320312, 0.057647705078125, 0.032806396484375, 0.0408935546875, -0.04107666015625, 0.0186309814453125, 0.0183258056640625, 0.043212890625, -0.037933349609375, 0.0215606689453125, -0.030792236328125, -0.0010576248168945312, -0.0255584716796875, -0.083251953125, -0.04229736328125, 0.042633056640625, -0.004913330078125, 0.025726318359375, 0.04400634765625, 0.070556640625, -0.01293182373046875, -0.02020263671875, 0.044097900390625, -0.00927734375, 0.0251312255859375, 0.021514892578125, 0.053253173828125, -0.051300048828125, 0.044097900390625, -0.032073974609375, -0.01512908935546875, -0.0237884521484375, -0.052215576171875, -0.0714111328125, -0.0477294921875, -0.01023101806640625, -0.0307159423828125, -0.01537322998046875, 0.05999755859375, 0.07470703125, -0.06317138671875, -0.013275146484375, 0.016632080078125, 0.0004322528839111328, -0.037139892578125, -0.0197906494140625, 0.041656494140625, -0.0408935546875, -0.059478759765625, 0.031982421875, 0.028076171875, 0.019622802734375, -0.05633544921875, 0.003387451171875, -0.0106201171875, 0.009552001953125, 0.05926513671875, 0.0282135009765625, -0.0712890625, -0.015777587890625, 0.0014705657958984375, -0.023895263671875, 0.034637451171875, 0.06793212890625, -0.0665283203125, 0.045074462890625, 0.001995086669921875, 0.037750244140625, 0.057525634765625, 0.00786590576171875, 0.024932861328125, -0.062744140625, 0.016998291015625, -0.0050048828125, 0.01543426513671875, 0.0185546875, -0.027679443359375, 0.045928955078125, 0.01348114013671875, -0.0386962890625, -0.0450439453125, 0.0157928466796875, -0.0826416015625, -0.01441192626953125, 0.069580078125, 0.007450103759765625, -0.0279083251953125, 0.0038242340087890625, -0.028533935546875, 0.0136260986328125, -0.0154266357421875, 0.08074951171875, 0.03436279296875, -0.01306915283203125, -0.054351806640625, -0.04986572265625, 0.034820556640625, 0.04705810546875, -0.0439453125, 0.004665374755859375, 0.010345458984375, 0.01421356201171875, 0.05194091796875, 0.038726806640625, -0.01216888427734375, 0.01065826416015625, -0.01248931884765625, 0.0250244140625, 0.0089874267578125, -0.042694091796875, -0.047515869140625, 0.042938232421875, 0.00614166259765625, -0.033355712890625 ] ]
Hate-speech-CNERG/bert-base-uncased-hatexplain
2021-05-25T09:53:05.000Z
[ "transformers", "pytorch", "jax", "bert", "text-classification", "en", "dataset:hatexplain", "license:apache-2.0", "endpoints_compatible", "has_space", "region:us" ]
text-classification
Hate-speech-CNERG
null
null
Hate-speech-CNERG/bert-base-uncased-hatexplain
14
8,629
transformers
2022-03-02T23:29:04
--- language: en license: apache-2.0 datasets: - hatexplain --- The model is used for classifying a text as **Hatespeech**, **Offensive**, or **Normal**. The model is trained using data from Gab and Twitter and *Human Rationales* were included as part of the training data to boost the performance. The dataset and models are available here: https://github.com/punyajoy/HateXplain **For more details about our paper** Binny Mathew, Punyajoy Saha, Seid Muhie Yimam, Chris Biemann, Pawan Goyal, and Animesh Mukherjee "[HateXplain: A Benchmark Dataset for Explainable Hate Speech Detection)". Accepted at AAAI 2021. ***Please cite our paper in any published work that uses any of these resources.*** ~~~ @article{mathew2020hatexplain, title={HateXplain: A Benchmark Dataset for Explainable Hate Speech Detection}, author={Mathew, Binny and Saha, Punyajoy and Yimam, Seid Muhie and Biemann, Chris and Goyal, Pawan and Mukherjee, Animesh}, journal={arXiv preprint arXiv:2012.10289}, year={2020} } ~~~
1,012
[ [ -0.0308074951171875, -0.05908203125, 0.00818634033203125, -0.0038394927978515625, -0.022003173828125, -0.002353668212890625, -0.0030364990234375, -0.0379638671875, 0.00197601318359375, 0.0281219482421875, -0.038818359375, -0.0509033203125, -0.055908203125, -0.004062652587890625, -0.05035400390625, 0.106689453125, 0.0206756591796875, 0.0022449493408203125, 0.017425537109375, -0.0272216796875, -0.0145263671875, -0.0535888671875, -0.046173095703125, -0.007770538330078125, 0.044189453125, 0.02947998046875, 0.05755615234375, 0.07421875, 0.01068878173828125, 0.0171661376953125, -0.01361083984375, -0.01082611083984375, -0.034881591796875, -0.00014519691467285156, -0.015472412109375, -0.02557373046875, -0.01934814453125, 0.026519775390625, 0.0305938720703125, 0.00833892822265625, -0.0205078125, 0.0142364501953125, -0.01213836669921875, 0.004596710205078125, -0.03533935546875, 0.0024051666259765625, -0.051055908203125, -0.003353118896484375, -0.032318115234375, 0.0146636962890625, -0.0225067138671875, -0.02862548828125, 0.00525665283203125, -0.03497314453125, 0.042236328125, 0.03466796875, 0.0626220703125, -0.0111236572265625, -0.0274200439453125, -0.0302886962890625, -0.0288543701171875, 0.06195068359375, -0.048583984375, 0.025421142578125, 0.0184478759765625, 0.030487060546875, 0.020751953125, -0.02667236328125, -0.07489013671875, 0.008514404296875, -0.005832672119140625, 0.0021266937255859375, -0.047027587890625, -0.004673004150390625, 0.033599853515625, 0.0240020751953125, -0.048187255859375, 0.019317626953125, -0.018707275390625, -0.01242828369140625, 0.06109619140625, 0.02313232421875, 0.04620361328125, -0.0294036865234375, -0.04290771484375, -0.00563812255859375, -0.0155792236328125, 0.0030956268310546875, 0.04461669921875, 0.041229248046875, -0.021820068359375, 0.0248565673828125, 0.0005984306335449219, 0.0439453125, -0.01959228515625, -0.002166748046875, 0.04107666015625, 0.011199951171875, -0.0189056396484375, -0.004791259765625, 0.060821533203125, 0.0423583984375, 0.03863525390625, -0.01049041748046875, -0.01050567626953125, 0.031463623046875, 0.029754638671875, -0.06878662109375, -0.0168609619140625, 0.003520965576171875, -0.039398193359375, -0.034210205078125, -0.039459228515625, -0.0814208984375, -0.055145263671875, -0.021087646484375, 0.01265716552734375, -0.03497314453125, -0.0244903564453125, -0.0179595947265625, -0.0240631103515625, 0.0025424957275390625, 0.017669677734375, -0.05889892578125, 0.0305633544921875, 0.03228759765625, 0.050628662109375, -0.025634765625, -0.0155029296875, -0.017913818359375, -0.0151519775390625, -0.004169464111328125, 0.054351806640625, -0.032440185546875, -0.01424407958984375, 0.0125732421875, -0.0135345458984375, -0.0164031982421875, -0.0465087890625, 0.08935546875, -0.037841796875, 0.017303466796875, 0.00655364990234375, -0.043701171875, -0.049285888671875, 0.01824951171875, -0.03887939453125, 0.058990478515625, 0.00826263427734375, -0.0762939453125, 0.0237884521484375, -0.057098388671875, -0.00760650634765625, 0.015777587890625, 0.0017118453979492188, -0.045806884765625, -0.01035308837890625, 0.0019311904907226562, 0.0394287109375, -0.032012939453125, 0.0189666748046875, -0.033721923828125, -0.006847381591796875, 0.0147552490234375, -0.02203369140625, 0.090576171875, 0.040435791015625, -0.0274505615234375, 0.01458740234375, -0.08819580078125, -0.0145416259765625, 0.032806396484375, 0.006076812744140625, -0.05859375, 0.0017480850219726562, 0.04534912109375, 0.0262451171875, -0.01904296875, -0.06805419921875, -0.0204315185546875, -0.0243682861328125, 0.0171356201171875, 0.055908203125, -0.0160064697265625, 0.011474609375, -0.03057861328125, 0.046417236328125, 0.0023708343505859375, 0.0028076171875, 0.020660400390625, -0.056793212890625, -0.042510986328125, -0.0304718017578125, -0.005123138427734375, 0.047882080078125, -0.0164794921875, 0.0102996826171875, 0.0002944469451904297, -0.050384521484375, -0.0274505615234375, -0.00516510009765625, 0.041534423828125, 0.06463623046875, 0.039947509765625, -0.0006384849548339844, -0.07373046875, -0.08477783203125, -0.0421142578125, -0.0233612060546875, 0.005092620849609375, 0.0299224853515625, 0.033111572265625, -0.0162200927734375, 0.067626953125, -0.0295257568359375, -0.0024471282958984375, -0.01708984375, 0.037994384765625, 0.003753662109375, 0.0206146240234375, 0.049285888671875, -0.075927734375, -0.046630859375, -0.0227203369140625, -0.0289306640625, -0.043243408203125, 0.0146026611328125, -0.0118408203125, 0.0096893310546875, 0.007701873779296875, -0.017791748046875, 0.0634765625, 0.034454345703125, -0.0518798828125, 0.0606689453125, 0.03607177734375, 0.02313232421875, -0.08624267578125, -0.005695343017578125, 0.01479339599609375, -0.017333984375, -0.06915283203125, 0.00441741943359375, -0.00614166259765625, -0.0174407958984375, -0.0341796875, 0.0386962890625, -0.0010709762573242188, 0.032379150390625, -0.02984619140625, -0.0022449493408203125, -0.024139404296875, 0.0357666015625, 0.0018444061279296875, 0.050506591796875, 0.0181732177734375, -0.03619384765625, 0.0209197998046875, 0.037567138671875, -0.03277587890625, 0.051727294921875, -0.0305633544921875, 0.006786346435546875, -0.0038280487060546875, -0.00943756103515625, -0.07891845703125, -0.01413726806640625, 0.042449951171875, -0.04522705078125, -0.0128326416015625, -0.0150604248046875, -0.04315185546875, -0.0222625732421875, -0.005031585693359375, 0.044189453125, 0.03533935546875, -0.0518798828125, 0.0509033203125, 0.049285888671875, 0.0014638900756835938, -0.0250091552734375, -0.054718017578125, -0.006378173828125, -0.047210693359375, -0.03240966796875, 0.0292510986328125, -0.021148681640625, -0.02508544921875, -0.0013980865478515625, 0.019866943359375, -0.033416748046875, 0.0059661865234375, 0.03948974609375, 0.0181121826171875, 0.002910614013671875, 0.0011854171752929688, -0.00882720947265625, 0.005218505859375, 0.0227813720703125, 0.0153656005859375, 0.031097412109375, 0.0099029541015625, 0.0214996337890625, -0.03680419921875, 0.043731689453125, 0.0306854248046875, 0.01526641845703125, 0.05535888671875, 0.050079345703125, -0.034454345703125, -0.0304412841796875, -0.02947998046875, 0.0188446044921875, -0.0269012451171875, 0.032684326171875, -0.0022830963134765625, -0.070068359375, 0.048980712890625, 0.038238525390625, -0.0008969306945800781, 0.04266357421875, 0.0462646484375, 0.0119171142578125, 0.09332275390625, 0.038787841796875, -0.01329803466796875, 0.024139404296875, 0.00984954833984375, 0.018524169921875, -0.03289794921875, -0.01458740234375, -0.06365966796875, -0.002079010009765625, -0.043853759765625, -0.0271759033203125, -0.01184844970703125, -0.0176849365234375, -0.04302978515625, 0.036651611328125, -0.03369140625, 0.0350341796875, 0.02484130859375, -0.007411956787109375, 0.007007598876953125, 0.0138092041015625, -0.0101776123046875, -0.0282745361328125, -0.03057861328125, -0.0276336669921875, 0.0582275390625, 0.03631591796875, 0.038238525390625, 0.0230865478515625, 0.0245513916015625, 0.038116455078125, 0.0462646484375, -0.055511474609375, 0.042572021484375, -0.0285797119140625, -0.077392578125, 0.0111846923828125, -0.0400390625, -0.0626220703125, 0.017425537109375, -0.01172637939453125, -0.08306884765625, 0.004253387451171875, 0.00875091552734375, -0.01244354248046875, 0.041473388671875, -0.05804443359375, 0.06121826171875, -0.0013589859008789062, 0.0030918121337890625, -0.0229949951171875, -0.0301513671875, 0.038543701171875, 0.0016813278198242188, 0.0400390625, -0.01207733154296875, 0.00643157958984375, 0.10003662109375, -0.0265960693359375, 0.061370849609375, -0.0264892578125, -0.0012493133544921875, 0.0183563232421875, -0.003963470458984375, 0.009063720703125, -0.018524169921875, -0.01116943359375, 0.025634765625, -0.02838134765625, -0.012298583984375, -0.037933349609375, 0.032257080078125, -0.060882568359375, -0.0225067138671875, -0.035980224609375, -0.05303955078125, -0.017608642578125, 0.0164337158203125, 0.023284912109375, 0.02728271484375, -0.0131683349609375, 0.005970001220703125, 0.03564453125, -0.0199737548828125, 0.010162353515625, 0.047821044921875, -0.0404052734375, -0.031982421875, 0.0660400390625, 0.01499176025390625, -0.00746917724609375, 0.0147552490234375, 0.0240020751953125, -0.0237579345703125, -0.045654296875, -0.0169525146484375, 0.00984954833984375, -0.042388916015625, -0.01383209228515625, -0.06890869140625, -0.0309906005859375, -0.0308074951171875, 0.0140380859375, -0.0067291259765625, -0.0168304443359375, -0.024261474609375, -0.005584716796875, 0.018341064453125, 0.055999755859375, -0.00911712646484375, 0.05194091796875, -0.034149169921875, 0.032012939453125, 0.0179443359375, 0.0225372314453125, 0.01140594482421875, -0.07965087890625, -0.0278167724609375, -0.00554656982421875, -0.01129150390625, -0.079345703125, 0.05029296875, -0.0099945068359375, 0.04119873046875, 0.02276611328125, 0.00569915771484375, 0.031768798828125, -0.026336669921875, 0.0406494140625, 0.0160980224609375, -0.06341552734375, 0.037017822265625, -0.03887939453125, 0.00757598876953125, 0.031402587890625, 0.06365966796875, -0.047943115234375, -0.0252685546875, -0.054046630859375, -0.06011962890625, 0.074951171875, 0.025726318359375, 0.0092926025390625, -0.006549835205078125, 0.00588226318359375, 0.0145111083984375, 0.002101898193359375, -0.07391357421875, -0.04913330078125, -0.0168609619140625, -0.02655029296875, -0.00009375810623168945, -0.04766845703125, -0.0092926025390625, -0.0241851806640625, 0.06610107421875, 0.0178375244140625, 0.0281829833984375, -0.023101806640625, -0.0206756591796875, 0.0070037841796875, 0.0200653076171875, 0.034698486328125, 0.021575927734375, -0.0305328369140625, -0.00875091552734375, -0.005626678466796875, -0.05145263671875, 0.006397247314453125, 0.0270233154296875, -0.0164947509765625, 0.000675201416015625, 0.01312255859375, 0.07293701171875, -0.011260986328125, -0.0372314453125, 0.0487060546875, -0.0263214111328125, -0.0325927734375, -0.023345947265625, 0.0121307373046875, -0.015289306640625, 0.014801025390625, 0.033905029296875, 0.0131072998046875, 0.0272064208984375, -0.007427215576171875, 0.01541900634765625, 0.00353240966796875, -0.0304412841796875, -0.02593994140625, 0.047271728515625, 0.00141143798828125, -0.040496826171875, 0.031890869140625, -0.032684326171875, -0.06121826171875, 0.03155517578125, 0.055755615234375, 0.068359375, -0.0224456787109375, 0.02923583984375, 0.048614501953125, 0.01399993896484375, 0.048095703125, 0.00786590576171875, -0.004039764404296875, -0.06732177734375, -0.00687408447265625, -0.053497314453125, -0.0244140625, 0.0533447265625, -0.04962158203125, 0.0170135498046875, -0.055938720703125, 0.010772705078125, 0.0292510986328125, -0.0167083740234375, -0.037200927734375, 0.04461669921875, 0.03887939453125, 0.079833984375, -0.08819580078125, 0.0386962890625, 0.047271728515625, -0.025665283203125, -0.071533203125, 0.0013952255249023438, 0.035430908203125, -0.06488037109375, 0.044952392578125, 0.0278167724609375, -0.0012063980102539062, -0.00760650634765625, -0.05499267578125, -0.0352783203125, 0.0247650146484375, 0.0019741058349609375, -0.03411865234375, 0.0100555419921875, 0.00835418701171875, 0.052642822265625, -0.043426513671875, 0.02685546875, 0.0306549072265625, 0.01024627685546875, -0.0147705078125, -0.029632568359375, -0.002574920654296875, -0.049774169921875, 0.0006504058837890625, 0.00318145751953125, -0.036865234375, 0.059417724609375, 0.01947021484375, -0.0311126708984375, 0.005260467529296875, 0.036956787109375, 0.00919342041015625, 0.038330078125, 0.066650390625, 0.072265625, 0.04400634765625, 0.01067352294921875, 0.06402587890625, 0.0179290771484375, 0.0262451171875, 0.09747314453125, 0.01023101806640625, 0.052093505859375, 0.0010814666748046875, -0.0213470458984375, 0.06298828125, 0.032196044921875, -0.00927734375, 0.031890869140625, 0.0231475830078125, -0.01186370849609375, -0.0087127685546875, -0.0202484130859375, -0.01291656494140625, 0.0499267578125, 0.03155517578125, -0.0465087890625, -0.033050537109375, 0.01535797119140625, 0.0283966064453125, 0.027679443359375, -0.015228271484375, 0.0465087890625, -0.022186279296875, -0.0174407958984375, 0.046783447265625, 0.009368896484375, 0.060150146484375, -0.0294036865234375, 0.00021636486053466797, 0.0008945465087890625, 0.006786346435546875, -0.0189361572265625, -0.05706787109375, 0.0369873046875, 0.034576416015625, -0.041229248046875, -0.00467681884765625, 0.048370361328125, -0.0264892578125, -0.01373291015625, 0.038909912109375, 0.0009713172912597656, 0.018341064453125, -0.007442474365234375, -0.08245849609375, -0.001865386962890625, 0.021484375, -0.0120086669921875, 0.018707275390625, 0.0230865478515625, -0.01399993896484375, 0.047210693359375, 0.0364990234375, 0.00032973289489746094, 0.0017938613891601562, 0.0282440185546875, 0.055419921875, -0.056793212890625, -0.05108642578125, -0.0640869140625, 0.04150390625, -0.0275421142578125, -0.04766845703125, 0.0721435546875, 0.057769775390625, 0.0546875, 0.01039886474609375, 0.0706787109375, -0.0225677490234375, 0.10821533203125, 0.0004878044128417969, 0.06732177734375, -0.052001953125, -0.001346588134765625, -0.031890869140625, -0.0477294921875, -0.03155517578125, 0.037261962890625, -0.0294036865234375, -0.002635955810546875, 0.042083740234375, 0.064208984375, -0.0183563232421875, 0.03472900390625, 0.0244293212890625, 0.041229248046875, 0.0276947021484375, 0.0163421630859375, 0.058074951171875, -0.0291595458984375, 0.053253173828125, -0.031280517578125, -0.033111572265625, -0.008056640625, -0.05072021484375, -0.07568359375, -0.049713134765625, -0.032012939453125, -0.049530029296875, 0.00635528564453125, 0.0501708984375, 0.049774169921875, -0.081298828125, -0.017822265625, -0.01215362548828125, 0.004428863525390625, 0.0186004638671875, -0.022735595703125, 0.00818634033203125, -0.006069183349609375, -0.0606689453125, 0.005664825439453125, -0.00525665283203125, -0.0006847381591796875, 0.01824951171875, -0.004306793212890625, -0.05279541015625, 0.0081024169921875, 0.0206756591796875, 0.0245361328125, -0.03509521484375, -0.019317626953125, -0.026092529296875, -0.01540374755859375, 0.0023784637451171875, 0.04296875, -0.0364990234375, 0.029052734375, 0.027069091796875, 0.04937744140625, 0.01419830322265625, 0.00872039794921875, 0.0191497802734375, -0.042327880859375, 0.00864410400390625, 0.023193359375, 0.0165557861328125, 0.02734375, -0.0207977294921875, 0.029815673828125, 0.049224853515625, -0.047088623046875, -0.0604248046875, 0.012176513671875, -0.0712890625, -0.007045745849609375, 0.081298828125, -0.0106658935546875, -0.0308074951171875, -0.032745361328125, -0.0287933349609375, 0.037689208984375, -0.047576904296875, 0.046844482421875, 0.0494384765625, 0.006015777587890625, -0.02557373046875, -0.0235595703125, 0.0416259765625, 0.0293731689453125, -0.048065185546875, 0.016204833984375, 0.033172607421875, 0.0190582275390625, 0.0151519775390625, 0.034423828125, -0.009307861328125, 0.023681640625, -0.0007958412170410156, 0.01593017578125, 0.005413055419921875, -0.01061248779296875, -0.025390625, 0.01352691650390625, -0.0247344970703125, -0.01207733154296875 ] ]
Salesforce/blip2-opt-6.7b
2023-09-13T08:47:16.000Z
[ "transformers", "pytorch", "blip-2", "visual-question-answering", "vision", "image-to-text", "image-captioning", "en", "arxiv:2301.12597", "license:mit", "has_space", "region:us" ]
image-to-text
Salesforce
null
null
Salesforce/blip2-opt-6.7b
49
8,611
transformers
2023-02-07T14:34:39
--- language: en license: mit tags: - vision - image-to-text - image-captioning - visual-question-answering pipeline_tag: image-to-text inference: false --- # BLIP-2, OPT-6.7b, pre-trained only BLIP-2 model, leveraging [OPT-6.7b](https://huggingface.co/facebook/opt-6.7b) (a large language model with 6.7 billion parameters). It was introduced in the paper [BLIP-2: Bootstrapping Language-Image Pre-training with Frozen Image Encoders and Large Language Models](https://arxiv.org/abs/2301.12597) by Li et al. and first released in [this repository](https://github.com/salesforce/LAVIS/tree/main/projects/blip2). Disclaimer: The team releasing BLIP-2 did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description BLIP-2 consists of 3 models: a CLIP-like image encoder, a Querying Transformer (Q-Former) and a large language model. The authors initialize the weights of the image encoder and large language model from pre-trained checkpoints and keep them frozen while training the Querying Transformer, which is a BERT-like Transformer encoder that maps a set of "query tokens" to query embeddings, which bridge the gap between the embedding space of the image encoder and the large language model. The goal for the model is simply to predict the next text token, giving the query embeddings and the previous text. <img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/transformers/model_doc/blip2_architecture.jpg" alt="drawing" width="600"/> This allows the model to be used for tasks like: - image captioning - visual question answering (VQA) - chat-like conversations by feeding the image and the previous conversation as prompt to the model ## Direct Use and Downstream Use You can use the raw model for conditional text generation given an image and optional text. See the [model hub](https://huggingface.co/models?search=Salesforce/blip) to look for fine-tuned versions on a task that interests you. ## Bias, Risks, Limitations, and Ethical Considerations BLIP2-OPT uses off-the-shelf OPT as the language model. It inherits the same risks and limitations as mentioned in Meta's model card. > Like other large language models for which the diversity (or lack thereof) of training > data induces downstream impact on the quality of our model, OPT-175B has limitations in terms > of bias and safety. OPT-175B can also have quality issues in terms of generation diversity and > hallucination. In general, OPT-175B is not immune from the plethora of issues that plague modern > large language models. > BLIP2 is fine-tuned on image-text datasets (e.g. [LAION](https://laion.ai/blog/laion-400-open-dataset/) ) collected from the internet. As a result the model itself is potentially vulnerable to generating equivalently inappropriate content or replicating inherent biases in the underlying data. BLIP2 has not been tested in real world applications. It should not be directly deployed in any applications. Researchers should first carefully assess the safety and fairness of the model in relation to the specific context they’re being deployed within. ### How to use For code examples, we refer to the [documentation](https://huggingface.co/docs/transformers/main/en/model_doc/blip-2#transformers.Blip2ForConditionalGeneration.forward.example).
3,364
[ [ -0.02984619140625, -0.05853271484375, -0.0014438629150390625, 0.0435791015625, -0.025177001953125, -0.007350921630859375, -0.0197296142578125, -0.0693359375, -0.003223419189453125, 0.052947998046875, -0.03302001953125, -0.00653076171875, -0.0455322265625, -0.00403594970703125, -0.0294952392578125, 0.07672119140625, 0.0139312744140625, -0.0007152557373046875, -0.01413726806640625, 0.009857177734375, -0.026336669921875, -0.024017333984375, -0.051116943359375, -0.004451751708984375, 0.00904083251953125, 0.036773681640625, 0.07177734375, 0.0265960693359375, 0.04888916015625, 0.0187530517578125, -0.025360107421875, -0.00046372413635253906, -0.055328369140625, -0.0166473388671875, -0.0144500732421875, -0.052490234375, -0.025543212890625, -0.00238037109375, 0.04296875, 0.05242919921875, 0.0078887939453125, 0.01171875, -0.00920867919921875, 0.04547119140625, -0.036346435546875, 0.0063629150390625, -0.056732177734375, -0.0112762451171875, -0.01983642578125, 0.0233917236328125, -0.046966552734375, -0.011383056640625, 0.0178985595703125, -0.0299835205078125, 0.003986358642578125, 0.0208892822265625, 0.10516357421875, 0.024627685546875, -0.02239990234375, -0.023406982421875, -0.041107177734375, 0.05804443359375, -0.04730224609375, 0.04437255859375, 0.03228759765625, 0.025848388671875, -0.0026721954345703125, -0.052001953125, -0.04443359375, -0.01555633544921875, -0.006969451904296875, 0.0137939453125, -0.0137939453125, 0.00807952880859375, 0.0272979736328125, 0.0261077880859375, -0.046783447265625, 0.0279541015625, -0.048126220703125, -0.028900146484375, 0.04296875, -0.0207977294921875, 0.023284912109375, -0.034912109375, -0.046966552734375, -0.0007076263427734375, -0.05865478515625, 0.00899505615234375, 0.0241546630859375, 0.0220489501953125, -0.038330078125, 0.05963134765625, 0.0018548965454101562, 0.04364013671875, 0.00873565673828125, -0.0030345916748046875, 0.036773681640625, -0.0298614501953125, -0.00568389892578125, -0.0174102783203125, 0.071533203125, 0.04034423828125, 0.024017333984375, -0.00826263427734375, -0.024017333984375, -0.0003795623779296875, 0.0142822265625, -0.08056640625, -0.00530242919921875, 0.0283355712890625, -0.042572021484375, -0.01177978515625, -0.007663726806640625, -0.0721435546875, -0.00665283203125, -0.002841949462890625, 0.01593017578125, -0.03765869140625, -0.033355712890625, -0.0005850791931152344, -0.0134735107421875, 0.020965576171875, 0.01355743408203125, -0.0694580078125, 0.0135955810546875, 0.05059814453125, 0.0654296875, -0.01194000244140625, -0.02313232421875, -0.0172576904296875, 0.004367828369140625, -0.033111572265625, 0.04876708984375, -0.0292816162109375, -0.01532745361328125, 0.027587890625, 0.0111083984375, 0.019134521484375, -0.04522705078125, 0.0249481201171875, -0.037017822265625, 0.0229339599609375, -0.02044677734375, -0.042724609375, -0.0132598876953125, -0.0072479248046875, -0.048095703125, 0.0859375, 0.0259552001953125, -0.056671142578125, 0.04620361328125, -0.037750244140625, -0.0294952392578125, 0.0182647705078125, 0.001796722412109375, -0.047149658203125, 0.003520965576171875, -0.0026226043701171875, 0.0203094482421875, -0.03155517578125, 0.021148681640625, -0.0293426513671875, -0.019500732421875, 0.0019168853759765625, -0.003757476806640625, 0.070556640625, 0.00971221923828125, -0.037750244140625, -0.0087127685546875, -0.031402587890625, -0.0190887451171875, 0.0171661376953125, -0.0186614990234375, -0.01080322265625, -0.01041412353515625, 0.03204345703125, 0.03033447265625, 0.034942626953125, -0.0452880859375, 0.00725555419921875, -0.04876708984375, 0.026580810546875, 0.0261993408203125, -0.0145416259765625, 0.04656982421875, -0.0095672607421875, 0.03594970703125, 0.016021728515625, 0.0257568359375, -0.0259552001953125, -0.056854248046875, -0.06378173828125, -0.0157623291015625, 0.0300750732421875, 0.057220458984375, -0.05621337890625, 0.03857421875, -0.01381683349609375, -0.045623779296875, -0.0489501953125, 0.0170440673828125, 0.0556640625, 0.022064208984375, 0.028167724609375, -0.009918212890625, -0.045654296875, -0.0850830078125, 0.0110321044921875, -0.0253143310546875, 0.01045989990234375, 0.0261383056640625, 0.042327880859375, -0.0303497314453125, 0.07098388671875, -0.0221405029296875, -0.013702392578125, -0.037261962890625, -0.007038116455078125, -0.004024505615234375, 0.042999267578125, 0.070556640625, -0.0758056640625, -0.025726318359375, -0.01348114013671875, -0.050048828125, -0.00843048095703125, -0.012176513671875, -0.02642822265625, 0.05224609375, 0.04083251953125, -0.07501220703125, 0.02587890625, 0.059417724609375, -0.018951416015625, 0.033477783203125, 0.016387939453125, -0.02191162109375, -0.0869140625, 0.0211639404296875, 0.01309967041015625, -0.022613525390625, -0.0426025390625, 0.005157470703125, 0.01355743408203125, -0.017669677734375, -0.040802001953125, 0.06427001953125, -0.03131103515625, -0.022552490234375, -0.01116180419921875, -0.0004482269287109375, 0.0015192031860351562, 0.0264129638671875, 0.0142669677734375, 0.064697265625, 0.054107666015625, -0.034912109375, 0.0096588134765625, 0.046142578125, -0.0217437744140625, 0.02301025390625, -0.05999755859375, 0.00629425048828125, -0.01367950439453125, 0.0185089111328125, -0.078369140625, -0.01763916015625, 0.0178070068359375, -0.044647216796875, 0.03765869140625, -0.01103973388671875, -0.04803466796875, -0.059417724609375, -0.01551055908203125, 0.021148681640625, 0.059814453125, -0.044464111328125, 0.027496337890625, 0.043487548828125, -0.01593017578125, -0.044097900390625, -0.07635498046875, 0.00849151611328125, 0.0012035369873046875, -0.053985595703125, 0.033355712890625, -0.005374908447265625, 0.0092315673828125, 0.0012493133544921875, 0.0258941650390625, -0.004421234130859375, -0.00991058349609375, 0.0233612060546875, 0.01546478271484375, -0.00443267822265625, 0.007320404052734375, 0.019378662109375, -0.002231597900390625, -0.006885528564453125, -0.00455474853515625, 0.0535888671875, 0.003917694091796875, 0.0015163421630859375, -0.038482666015625, 0.0296783447265625, 0.041595458984375, -0.0230255126953125, 0.05816650390625, 0.045745849609375, -0.037261962890625, -0.01381683349609375, -0.0491943359375, -0.01416778564453125, -0.03912353515625, 0.047760009765625, -0.0211181640625, -0.06402587890625, 0.03759765625, 0.0038890838623046875, 0.01467132568359375, 0.019317626953125, 0.0625, 0.0054473876953125, 0.07086181640625, 0.057861328125, -0.0065765380859375, 0.04681396484375, -0.020782470703125, 0.01226806640625, -0.053253173828125, -0.03057861328125, -0.025115966796875, -0.0019779205322265625, -0.04693603515625, -0.029022216796875, 0.0011844635009765625, 0.0267791748046875, -0.0159454345703125, 0.028564453125, -0.0284576416015625, 0.01029205322265625, 0.047760009765625, 0.0037841796875, -0.016021728515625, -0.000339508056640625, 0.0038928985595703125, 0.004039764404296875, -0.048126220703125, -0.03265380859375, 0.073486328125, 0.05389404296875, 0.046112060546875, 0.008270263671875, 0.0364990234375, -0.01024627685546875, 0.0245361328125, -0.06768798828125, 0.0433349609375, -0.027862548828125, -0.0758056640625, -0.0160675048828125, -0.0290069580078125, -0.064453125, 0.0012216567993164062, -0.0161895751953125, -0.05377197265625, 0.0037860870361328125, 0.03424072265625, -0.002044677734375, 0.0057373046875, -0.0665283203125, 0.0721435546875, -0.02911376953125, -0.028045654296875, 0.004077911376953125, -0.05780029296875, 0.034027099609375, -0.0083465576171875, 0.00516510009765625, 0.0039825439453125, -0.00640869140625, 0.047119140625, -0.02667236328125, 0.0718994140625, -0.02679443359375, 0.00576019287109375, 0.031402587890625, 0.000244140625, 0.0241851806640625, -0.00513458251953125, -0.007236480712890625, 0.028900146484375, -0.00809478759765625, -0.035675048828125, -0.025604248046875, 0.0181732177734375, -0.060546875, -0.031646728515625, -0.00579833984375, -0.028106689453125, -0.0105743408203125, 0.032623291015625, 0.04742431640625, 0.0261383056640625, -0.0034122467041015625, 0.0214996337890625, 0.026580810546875, -0.0479736328125, 0.03271484375, 0.021209716796875, -0.04132080078125, -0.0299835205078125, 0.056671142578125, -0.0027790069580078125, 0.035919189453125, 0.02642822265625, 0.006488800048828125, -0.044769287109375, -0.03662109375, -0.045440673828125, 0.04241943359375, -0.040618896484375, -0.0308380126953125, -0.04913330078125, -0.01495361328125, -0.041473388671875, -0.00215911865234375, -0.05145263671875, -0.016143798828125, -0.026123046875, -0.0038356781005859375, 0.0316162109375, 0.037017822265625, -0.001522064208984375, 0.036163330078125, -0.046783447265625, 0.0286865234375, 0.00756072998046875, 0.039886474609375, -0.01248931884765625, -0.03851318359375, -0.0117034912109375, 0.02215576171875, -0.013336181640625, -0.062042236328125, 0.0187530517578125, 0.025360107421875, 0.033203125, 0.0330810546875, -0.0202178955078125, 0.032501220703125, -0.046875, 0.06573486328125, 0.022064208984375, -0.07269287109375, 0.051422119140625, -0.025787353515625, 0.0120086669921875, 0.046630859375, 0.0445556640625, -0.025299072265625, -0.033294677734375, -0.03558349609375, -0.0634765625, 0.053985595703125, 0.01537322998046875, 0.028839111328125, -0.007083892822265625, 0.04229736328125, 0.001644134521484375, 0.026702880859375, -0.064453125, -0.019073486328125, -0.0345458984375, -0.0103607177734375, -0.00672149658203125, -0.0467529296875, 0.015716552734375, -0.011810302734375, 0.035003662109375, -0.0034008026123046875, 0.034088134765625, -0.002964019775390625, -0.0186767578125, -0.01922607421875, -0.02392578125, 0.034759521484375, 0.03387451171875, -0.0161590576171875, -0.0164947509765625, -0.007404327392578125, -0.03472900390625, -0.027099609375, 0.015777587890625, -0.029815673828125, 0.0004324913024902344, 0.0255126953125, 0.08599853515625, 0.014556884765625, -0.061737060546875, 0.062164306640625, 0.0196533203125, -0.01413726806640625, -0.02069091796875, -0.0100250244140625, 0.0035552978515625, 0.0023860931396484375, 0.0199432373046875, -0.0091552734375, 0.01480865478515625, -0.048126220703125, 0.03289794921875, 0.04132080078125, -0.02679443359375, -0.034820556640625, 0.049774169921875, 0.0260467529296875, -0.0260467529296875, 0.04644775390625, -0.041748046875, -0.0582275390625, 0.047454833984375, 0.065185546875, 0.037353515625, -0.026580810546875, 0.0293426513671875, 0.033538818359375, 0.043212890625, -0.007602691650390625, 0.033203125, 0.0293731689453125, -0.0640869140625, -0.0302581787109375, -0.050933837890625, -0.03179931640625, 0.0158843994140625, -0.04522705078125, 0.044464111328125, -0.0364990234375, -0.0217437744140625, 0.004604339599609375, -0.007183074951171875, -0.053985595703125, 0.02392578125, 0.0231475830078125, 0.076904296875, -0.07354736328125, 0.03338623046875, 0.06610107421875, -0.046112060546875, -0.0582275390625, -0.004276275634765625, -0.004474639892578125, -0.06231689453125, 0.0458984375, 0.035369873046875, 0.028839111328125, -0.0106353759765625, -0.056549072265625, -0.06884765625, 0.058624267578125, 0.0307769775390625, -0.045318603515625, -0.00803375244140625, 0.017303466796875, 0.047760009765625, -0.0175628662109375, 0.024383544921875, 0.0255126953125, 0.0284271240234375, 0.026763916015625, -0.088623046875, 0.012603759765625, -0.01947021484375, 0.01202392578125, 0.00902557373046875, -0.07769775390625, 0.0592041015625, -0.0197601318359375, -0.0103912353515625, 0.01611328125, 0.0447998046875, 0.02008056640625, 0.0031108856201171875, 0.03668212890625, 0.04156494140625, 0.040069580078125, -0.006168365478515625, 0.0894775390625, -0.02978515625, 0.0321044921875, 0.06170654296875, -0.01349639892578125, 0.071533203125, 0.0230255126953125, -0.004543304443359375, 0.024383544921875, 0.055267333984375, -0.02471923828125, 0.035614013671875, -0.004016876220703125, 0.0261077880859375, -0.008026123046875, -0.0182037353515625, -0.026336669921875, 0.0572509765625, 0.03607177734375, -0.0250396728515625, 0.005893707275390625, 0.004764556884765625, 0.0230255126953125, -0.02294921875, -0.00884246826171875, 0.05377197265625, 0.003902435302734375, -0.06671142578125, 0.045074462890625, -0.00946044921875, 0.06329345703125, -0.0260467529296875, 0.006801605224609375, -0.0186614990234375, -0.000152587890625, -0.015625, -0.060333251953125, 0.0259246826171875, -0.0083465576171875, -0.00019371509552001953, -0.00012540817260742188, 0.0487060546875, -0.038848876953125, -0.059356689453125, 0.037109375, 0.0235443115234375, 0.01296234130859375, 0.00846099853515625, -0.07952880859375, 0.022125244140625, -0.004253387451171875, -0.0260162353515625, 0.00335693359375, 0.029998779296875, 0.0035648345947265625, 0.0579833984375, 0.04290771484375, 0.01219940185546875, 0.005859375, 0.01480865478515625, 0.070556640625, -0.0321044921875, -0.040069580078125, -0.035247802734375, 0.049774169921875, -0.01407623291015625, -0.035675048828125, 0.038177490234375, 0.058685302734375, 0.056640625, 0.0025615692138671875, 0.054168701171875, -0.01291656494140625, 0.0447998046875, -0.04083251953125, 0.046966552734375, -0.0626220703125, -0.01007843017578125, -0.03582763671875, -0.0738525390625, -0.023895263671875, 0.046417236328125, -0.0178985595703125, 0.024261474609375, 0.043121337890625, 0.070556640625, 0.0003745555877685547, -0.01465606689453125, 0.00505828857421875, 0.029510498046875, 0.026275634765625, 0.0306854248046875, 0.054901123046875, -0.03350830078125, 0.04290771484375, -0.0247650146484375, -0.035186767578125, -0.0281219482421875, -0.0499267578125, -0.08978271484375, -0.05108642578125, -0.0230712890625, -0.03192138671875, 0.00594329833984375, 0.0504150390625, 0.0718994140625, -0.05157470703125, -0.0080718994140625, -0.0088958740234375, -0.00933837890625, 0.0216064453125, -0.017669677734375, 0.01239776611328125, -0.0275421142578125, -0.061065673828125, -0.00655364990234375, 0.02020263671875, 0.0175933837890625, -0.02508544921875, 0.0102691650390625, -0.00672149658203125, -0.005374908447265625, 0.049102783203125, 0.0194091796875, -0.05108642578125, -0.02679443359375, 0.0011663436889648438, -0.00940704345703125, -0.0037860870361328125, 0.02783203125, -0.03302001953125, 0.0229339599609375, 0.0249786376953125, 0.04388427734375, 0.04376220703125, 0.00274658203125, 0.035186767578125, -0.04742431640625, 0.0526123046875, 0.01290130615234375, 0.02923583984375, 0.0439453125, -0.0322265625, 0.0261077880859375, 0.0024051666259765625, -0.03717041015625, -0.07037353515625, 0.032562255859375, -0.08892822265625, -0.020050048828125, 0.12109375, 0.001995086669921875, -0.01525115966796875, 0.00609588623046875, -0.019744873046875, 0.0110931396484375, -0.0345458984375, 0.0457763671875, 0.03253173828125, 0.017791748046875, -0.03765869140625, -0.03607177734375, 0.051513671875, 0.0307464599609375, -0.061279296875, -0.007904052734375, 0.0304718017578125, 0.0158538818359375, 0.0208740234375, 0.037567138671875, -0.010650634765625, 0.0233154296875, 0.00009453296661376953, 0.0162811279296875, 0.00640106201171875, -0.029144287109375, -0.006534576416015625, -0.00731658935546875, 0.00580596923828125, -0.004337310791015625 ] ]
Yntec/BrandiMilne
2023-10-14T05:37:53.000Z
[ "diffusers", "Painting", "oskarsson", "MagicArt35", "stable-diffusion", "stable-diffusion-diffusers", "text-to-image", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
Yntec
null
null
Yntec/BrandiMilne
1
8,593
diffusers
2023-10-14T03:16:51
--- license: creativeml-openrail-m library_name: diffusers pipeline_tag: text-to-image tags: - Painting - oskarsson - MagicArt35 - stable-diffusion - stable-diffusion-diffusers - diffusers - text-to-image --- # Brandi Milne PhotoMovieX by MagicArt35 with the BrandiMilne LoRA by oskarsson baked in. Sample and prompt: ![Sample](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/C8owfojU0cpQgzpawLhn1.png) PRETTY CUTE GIRL BY ROSSDRAWS. An extradimensional creature buying donuts. animation. Original pages: https://civitai.com/models/98367?modelVersionId=105195 (BrandiMilne) https://civitai.com/models/94687?modelVersionId=101000 (PhotoMovieX)
682
[ [ -0.004329681396484375, -0.02117919921875, 0.006076812744140625, -0.0048370361328125, -0.0011110305786132812, -0.0082550048828125, 0.02581787109375, -0.0333251953125, 0.06414794921875, 0.044586181640625, -0.050323486328125, -0.0185699462890625, -0.03533935546875, 0.0018787384033203125, -0.037506103515625, 0.037322998046875, 0.0016155242919921875, 0.0208587646484375, 0.00405120849609375, 0.0175018310546875, -0.01271820068359375, 0.0011749267578125, -0.059326171875, -0.0017938613891601562, 0.0211029052734375, 0.06817626953125, 0.06170654296875, -0.0180206298828125, -0.0030498504638671875, 0.0214996337890625, -0.00299835205078125, 0.0028438568115234375, -0.03662109375, 0.00962066650390625, -0.0193023681640625, -0.06622314453125, -0.031829833984375, 0.0116119384765625, 0.0243377685546875, 0.033355712890625, -0.0099029541015625, 0.0165557861328125, -0.020751953125, 0.035308837890625, -0.04412841796875, -0.010986328125, -0.0246124267578125, 0.00749969482421875, -0.005706787109375, 0.043426513671875, -0.0183563232421875, -0.024261474609375, -0.02935791015625, -0.07611083984375, 0.00981903076171875, 0.0038585662841796875, 0.09344482421875, -0.01076507568359375, -0.02996826171875, -0.035858154296875, -0.037628173828125, 0.061798095703125, -0.03936767578125, 0.023223876953125, 0.036346435546875, 0.035888671875, -0.026947021484375, -0.059234619140625, -0.039031982421875, 0.0096435546875, 0.0075836181640625, 0.020721435546875, -0.04388427734375, -0.07965087890625, 0.0225372314453125, 0.022064208984375, -0.041107177734375, -0.00016307830810546875, -0.040771484375, 0.02655029296875, 0.053558349609375, -0.007617950439453125, 0.04852294921875, -0.01105499267578125, -0.0183258056640625, 0.00041675567626953125, -0.03741455078125, 0.003841400146484375, 0.04541015625, -0.006137847900390625, -0.01364898681640625, 0.05657958984375, 0.002361297607421875, 0.0291748046875, 0.0209197998046875, -0.00023853778839111328, 0.033843994140625, -0.006866455078125, -0.0267791748046875, -0.003467559814453125, 0.051910400390625, 0.05987548828125, 0.0291748046875, 0.01120758056640625, -0.01702880859375, -0.00556182861328125, 0.020172119140625, -0.0682373046875, -0.0362548828125, 0.005035400390625, -0.0189056396484375, -0.032073974609375, 0.013763427734375, -0.04742431640625, 0.01151275634765625, 0.026092529296875, -0.002803802490234375, -0.04986572265625, -0.040313720703125, 0.02117919921875, -0.0099029541015625, 0.02313232421875, 0.0120086669921875, -0.0714111328125, 0.0199127197265625, 0.0212860107421875, 0.0157928466796875, 0.025482177734375, 0.021881103515625, -0.035980224609375, 0.026947021484375, -0.06695556640625, 0.054534912109375, -0.025360107421875, -0.047821044921875, -0.010528564453125, 0.03961181640625, -0.0017948150634765625, -0.06573486328125, 0.068359375, -0.06390380859375, -0.00315093994140625, -0.0251617431640625, -0.03485107421875, -0.020538330078125, 0.005863189697265625, -0.055816650390625, 0.060943603515625, 0.0157928466796875, -0.043121337890625, 0.05804443359375, -0.063720703125, -0.00958251953125, 0.0167388916015625, -0.0020999908447265625, -0.029022216796875, 0.031280517578125, 0.0167388916015625, 0.01556396484375, -0.0032444000244140625, -0.00865936279296875, -0.029754638671875, 0.0042266845703125, 0.061920166015625, 0.00881195068359375, 0.0504150390625, 0.0714111328125, 0.00603485107421875, -0.005321502685546875, -0.060760498046875, 0.003452301025390625, 0.060821533203125, 0.0218048095703125, -0.023651123046875, -0.051727294921875, 0.02996826171875, 0.004405975341796875, 0.0225982666015625, -0.04071044921875, 0.013580322265625, -0.016632080078125, 0.011993408203125, 0.0210723876953125, -0.00838470458984375, 0.01018524169921875, -0.041717529296875, 0.0611572265625, 0.007190704345703125, 0.044403076171875, -0.0019445419311523438, -0.032196044921875, -0.06280517578125, -0.0338134765625, 0.02911376953125, 0.0129547119140625, -0.040374755859375, 0.014862060546875, 0.00873565673828125, -0.041839599609375, -0.033477783203125, 0.0064544677734375, 0.01056671142578125, 0.0075531005859375, -0.0228729248046875, -0.040863037109375, -0.01485443115234375, -0.08636474609375, 0.0016584396362304688, 0.01116180419921875, -0.0238494873046875, 0.004302978515625, 0.0291748046875, -0.00997161865234375, 0.05072021484375, -0.06146240234375, -0.012603759765625, -0.01959228515625, 0.00038909912109375, 0.09100341796875, 0.042266845703125, 0.097412109375, -0.08258056640625, -0.0802001953125, -0.00635528564453125, -0.03704833984375, -0.014556884765625, 0.0341796875, -0.00328826904296875, -0.0037860870361328125, 0.0293121337890625, -0.043701171875, 0.032135009765625, 0.008819580078125, -0.038848876953125, 0.055389404296875, -0.02392578125, 0.050872802734375, -0.09307861328125, 0.007694244384765625, 0.00006401538848876953, -0.0253143310546875, -0.0293121337890625, 0.0333251953125, -0.006717681884765625, -0.026885986328125, -0.0748291015625, 0.05780029296875, -0.0472412109375, -0.001941680908203125, -0.0143585205078125, -0.00556182861328125, 0.038726806640625, 0.0012912750244140625, -0.005367279052734375, 0.0557861328125, 0.06890869140625, -0.040313720703125, 0.03851318359375, 0.04193115234375, -0.051177978515625, 0.07684326171875, -0.050933837890625, 0.004878997802734375, 0.003814697265625, -0.014984130859375, -0.07550048828125, -0.059051513671875, 0.034698486328125, -0.034271240234375, -0.005435943603515625, -0.0123291015625, -0.07550048828125, -0.0212860107421875, -0.016021728515625, 0.038330078125, 0.04205322265625, -0.057098388671875, 0.029998779296875, 0.003963470458984375, 0.006542205810546875, -0.00463104248046875, -0.070068359375, 0.00786590576171875, -0.033233642578125, -0.04931640625, 0.026336669921875, -0.0228729248046875, -0.032928466796875, -0.004913330078125, -0.00007849931716918945, -0.0286865234375, -0.028045654296875, 0.0304718017578125, 0.0095977783203125, -0.00974273681640625, -0.010894775390625, -0.0134124755859375, 0.00513458251953125, -0.0157470703125, 0.0110931396484375, 0.036346435546875, -0.0210418701171875, -0.0268707275390625, -0.07769775390625, 0.033935546875, 0.048187255859375, 0.02740478515625, 0.05072021484375, 0.042388916015625, -0.026275634765625, 0.006839752197265625, -0.04827880859375, -0.00637054443359375, -0.036773681640625, -0.0215301513671875, -0.053741455078125, 0.0039825439453125, 0.046630859375, 0.0029850006103515625, -0.0404052734375, 0.04571533203125, -0.00203704833984375, -0.032684326171875, 0.040130615234375, 0.03485107421875, 0.013336181640625, 0.0270233154296875, -0.0574951171875, 0.0105133056640625, -0.035980224609375, -0.01806640625, -0.017852783203125, -0.01548004150390625, -0.0186920166015625, -0.0218658447265625, 0.0242156982421875, 0.025421142578125, -0.02935791015625, 0.045501708984375, -0.01187896728515625, 0.061798095703125, 0.0195159912109375, 0.01837158203125, 0.024993896484375, 0.0008535385131835938, -0.026611328125, -0.0220947265625, -0.039642333984375, -0.03662109375, 0.0318603515625, 0.016754150390625, 0.05718994140625, 0.0416259765625, 0.04449462890625, 0.0036640167236328125, 0.006595611572265625, -0.050201416015625, 0.0596923828125, -0.0162200927734375, -0.0714111328125, 0.00957489013671875, -0.0009570121765136719, -0.06915283203125, -0.0086822509765625, -0.01885986328125, -0.054718017578125, 0.01233673095703125, 0.00011354684829711914, -0.037994384765625, 0.0205535888671875, -0.0791015625, 0.05255126953125, -0.0289764404296875, -0.035797119140625, -0.006366729736328125, -0.01035308837890625, 0.0219268798828125, 0.0034885406494140625, 0.038055419921875, -0.02569580078125, 0.005008697509765625, 0.056182861328125, -0.0279541015625, 0.06298828125, 0.0175933837890625, -0.0175323486328125, 0.0156097412109375, 0.0265655517578125, 0.00949859619140625, 0.028045654296875, -0.0197296142578125, -0.023712158203125, 0.0073089599609375, -0.0458984375, -0.0390625, 0.075439453125, -0.037933349609375, -0.0214691162109375, -0.0254974365234375, -0.00482177734375, 0.006961822509765625, 0.01898193359375, 0.048370361328125, 0.0276336669921875, -0.043701171875, -0.01617431640625, 0.019775390625, -0.00264739990234375, 0.057098388671875, 0.01131439208984375, -0.06951904296875, -0.03851318359375, 0.039947509765625, -0.00452423095703125, 0.029754638671875, 0.00782012939453125, 0.0292205810546875, -0.026092529296875, -0.000682830810546875, -0.00917816162109375, 0.04718017578125, -0.05999755859375, -0.01494598388671875, -0.01462554931640625, -0.01480865478515625, -0.020477294921875, -0.0499267578125, -0.048614501953125, -0.03533935546875, -0.05804443359375, -0.0031642913818359375, 0.056304931640625, 0.0660400390625, -0.007526397705078125, 0.04107666015625, -0.050140380859375, 0.020843505859375, 0.043121337890625, 0.01467132568359375, -0.0084075927734375, -0.030303955078125, 0.0201873779296875, -0.0210113525390625, -0.02764892578125, -0.038665771484375, 0.06414794921875, -0.003154754638671875, 0.03057861328125, 0.048583984375, 0.00237274169921875, 0.0227813720703125, -0.00498199462890625, 0.06640625, 0.063720703125, -0.054412841796875, 0.034027099609375, -0.0357666015625, 0.0384521484375, 0.04388427734375, 0.0450439453125, -0.045745849609375, 0.0177764892578125, -0.072265625, -0.047637939453125, 0.03228759765625, 0.030426025390625, 0.0229034423828125, 0.006923675537109375, 0.016815185546875, 0.0233612060546875, 0.0231170654296875, -0.05560302734375, -0.0007224082946777344, -0.0165557861328125, -0.006229400634765625, 0.00958251953125, -0.0307769775390625, -0.00768280029296875, -0.01763916015625, 0.050537109375, -0.0031490325927734375, 0.00707244873046875, -0.000010311603546142578, 0.01544952392578125, -0.0176849365234375, 0.0096588134765625, 0.02789306640625, 0.05816650390625, -0.035888671875, -0.0171966552734375, -0.0022563934326171875, -0.02215576171875, 0.005298614501953125, -0.04974365234375, -0.01003265380859375, 0.0091705322265625, 0.00926971435546875, 0.0546875, 0.035186767578125, -0.05194091796875, 0.036834716796875, -0.0157470703125, -0.0074462890625, -0.071044921875, 0.038330078125, 0.0123443603515625, 0.0278778076171875, 0.020172119140625, -0.004547119140625, 0.0158233642578125, -0.048828125, 0.01322174072265625, 0.032562255859375, -0.01224517822265625, -0.045989990234375, 0.06292724609375, -0.006717681884765625, -0.039947509765625, 0.038360595703125, -0.0189056396484375, -0.004398345947265625, 0.06182861328125, 0.041351318359375, 0.06085205078125, -0.039306640625, 0.039093017578125, 0.0506591796875, -0.02325439453125, 0.0116119384765625, 0.054473876953125, 0.0256500244140625, -0.058380126953125, 0.031829833984375, -0.0419921875, -0.02398681640625, 0.0201416015625, -0.0309600830078125, 0.0579833984375, -0.0307769775390625, -0.0086822509765625, 0.007965087890625, -0.0181732177734375, -0.036224365234375, 0.039031982421875, 0.00042510032653808594, 0.0826416015625, -0.062225341796875, 0.06402587890625, 0.0345458984375, -0.0340576171875, -0.03955078125, 0.007480621337890625, 0.0017042160034179688, -0.05657958984375, 0.07476806640625, 0.019775390625, -0.01024627685546875, -0.0250701904296875, -0.061920166015625, -0.057647705078125, 0.04412841796875, 0.0021610260009765625, -0.049163818359375, 0.006298065185546875, -0.0034847259521484375, 0.0244293212890625, -0.054718017578125, 0.0255889892578125, 0.0259246826171875, 0.03021240234375, 0.08734130859375, -0.056854248046875, -0.0400390625, -0.037139892578125, 0.010284423828125, -0.018707275390625, -0.055023193359375, 0.0364990234375, -0.0310516357421875, -0.005001068115234375, 0.047698974609375, 0.05987548828125, 0.01153564453125, 0.009735107421875, 0.0635986328125, 0.05548095703125, 0.01537322998046875, 0.02447509765625, 0.05548095703125, 0.0227508544921875, 0.0171661376953125, 0.07037353515625, -0.029022216796875, 0.04962158203125, 0.0231170654296875, -0.02764892578125, 0.0160369873046875, 0.07427978515625, -0.01351165771484375, 0.0621337890625, 0.0175018310546875, -0.0230255126953125, -0.01221466064453125, -0.034332275390625, 0.0008502006530761719, 0.0249481201171875, 0.024993896484375, -0.0098876953125, -0.0197601318359375, 0.0191192626953125, 0.00768280029296875, 0.016754150390625, -0.01488494873046875, 0.0247955322265625, 0.028717041015625, -0.0242919921875, 0.05743408203125, -0.0262908935546875, 0.035430908203125, -0.04620361328125, 0.002593994140625, -0.01386260986328125, 0.00582122802734375, -0.0243377685546875, -0.06524658203125, 0.038848876953125, -0.00045490264892578125, -0.0081787109375, -0.01441192626953125, 0.024871826171875, -0.0034618377685546875, -0.08074951171875, 0.02490234375, -0.0106964111328125, 0.02484130859375, 0.038909912109375, -0.066162109375, 0.02569580078125, 0.00463104248046875, -0.0014181137084960938, 0.0009303092956542969, 0.006290435791015625, 0.0164337158203125, 0.0335693359375, 0.019805908203125, -0.0205078125, -0.00933074951171875, 0.0159912109375, 0.05218505859375, -0.042572021484375, -0.044403076171875, -0.0396728515625, 0.02978515625, -0.023834228515625, -0.010955810546875, 0.057952880859375, 0.058502197265625, 0.04718017578125, -0.05609130859375, 0.0158233642578125, -0.00885009765625, 0.032989501953125, -0.051605224609375, 0.05706787109375, -0.078125, -0.00888824462890625, -0.02252197265625, -0.046844482421875, -0.0242156982421875, 0.0305328369140625, -0.0092926025390625, 0.006866455078125, 0.028045654296875, 0.07275390625, -0.052581787109375, -0.006290435791015625, 0.043426513671875, 0.019195556640625, 0.025604248046875, 0.00780487060546875, 0.0679931640625, -0.0305328369140625, -0.00494384765625, 0.0014600753784179688, -0.0015325546264648438, -0.025115966796875, -0.08203125, -0.06158447265625, -0.042724609375, -0.054931640625, 0.0032329559326171875, -0.032257080078125, 0.063232421875, 0.07208251953125, -0.0699462890625, -0.0309600830078125, 0.025238037109375, -0.0027523040771484375, 0.00568389892578125, -0.01154327392578125, 0.0020122528076171875, 0.0222930908203125, -0.08343505859375, 0.045562744140625, 0.0013446807861328125, 0.044921875, -0.0005621910095214844, 0.0279998779296875, -0.0250091552734375, 0.02447509765625, 0.004108428955078125, 0.005535125732421875, -0.040130615234375, 0.01123809814453125, -0.017822265625, 0.0170440673828125, 0.016815185546875, 0.0261383056640625, 0.0028858184814453125, 0.0101318359375, 0.0264892578125, -0.01025390625, 0.054718017578125, -0.0212249755859375, 0.018341064453125, -0.04290771484375, 0.0247344970703125, -0.0041046142578125, 0.0408935546875, 0.0323486328125, -0.043853759765625, 0.035369873046875, 0.0224609375, -0.0181884765625, -0.027313232421875, 0.039459228515625, -0.08544921875, -0.0328369140625, 0.0721435546875, 0.005214691162109375, -0.056976318359375, 0.022216796875, -0.027557373046875, -0.03485107421875, -0.033599853515625, 0.00530242919921875, 0.056427001953125, -0.01087188720703125, -0.0188446044921875, -0.046417236328125, -0.005840301513671875, 0.0272979736328125, -0.03265380859375, -0.0301361083984375, 0.023406982421875, 0.060943603515625, 0.03692626953125, 0.036285400390625, -0.01461029052734375, -0.0111236572265625, 0.0013751983642578125, 0.018157958984375, 0.029510498046875, -0.0176544189453125, 0.0244598388671875, 0.00909423828125, 0.02740478515625, -0.047149658203125 ] ]
zhyemmmm/ToonYou
2023-06-06T07:23:13.000Z
[ "diffusers", "text-to-image", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
zhyemmmm
null
null
zhyemmmm/ToonYou
3
8,591
diffusers
2023-06-06T07:05:30
--- license: creativeml-openrail-m library_name: diffusers pipeline_tag: text-to-image --- use to study
103
[ [ -0.0235595703125, -0.0229034423828125, 0.022705078125, -0.00255584716796875, -0.03204345703125, -0.0260009765625, -0.0022869110107421875, -0.00891876220703125, 0.036407470703125, 0.0189361572265625, -0.0052032470703125, -0.00823974609375, -0.00820159912109375, 0.006885528564453125, -0.039031982421875, 0.0443115234375, -0.003459930419921875, 0.048675537109375, -0.00556182861328125, -0.012969970703125, -0.04571533203125, -0.0188140869140625, -0.0775146484375, -0.0028533935546875, 0.053985595703125, 0.06292724609375, 0.04193115234375, -0.0169219970703125, 0.050750732421875, 0.041015625, 0.00873565673828125, -0.0106353759765625, -0.04248046875, 0.0098724365234375, -0.025665283203125, -0.07733154296875, 0.0018453598022460938, 0.0201263427734375, 0.02337646484375, 0.08453369140625, 0.005031585693359375, 0.041107177734375, -0.01224517822265625, 0.06280517578125, -0.032562255859375, 0.0179595947265625, 0.0211639404296875, 0.0298309326171875, -0.02630615234375, -0.02099609375, -0.0178375244140625, -0.058502197265625, 0.037628173828125, -0.06512451171875, -0.01111602783203125, 0.006011962890625, 0.03912353515625, 0.004619598388671875, -0.07391357421875, -0.0167388916015625, -0.029205322265625, 0.060302734375, -0.014892578125, 0.0625, 0.0390625, 0.038665771484375, -0.00716400146484375, -0.074951171875, -0.016845703125, -0.0193634033203125, -0.01837158203125, 0.035308837890625, 0.034027099609375, 0.0048675537109375, 0.01316070556640625, 0.061553955078125, -0.03466796875, -0.040618896484375, -0.0675048828125, -0.0036907196044921875, 0.029571533203125, 0.01351165771484375, 0.0716552734375, -0.006359100341796875, -0.01458740234375, -0.00719451904296875, -0.056427001953125, 0.0158843994140625, 0.01035308837890625, 0.04022216796875, 0.006740570068359375, 0.05328369140625, 0.01296234130859375, 0.06201171875, -0.02618408203125, -0.00481414794921875, 0.026397705078125, 0.002162933349609375, -0.040924072265625, -0.004436492919921875, -0.00872802734375, 0.030609130859375, 0.033111572265625, -0.00162506103515625, 0.0133514404296875, -0.0203704833984375, 0.037811279296875, -0.058319091796875, -0.05267333984375, 0.0038738250732421875, -0.034332275390625, -0.00685882568359375, -0.0007824897766113281, -0.059417724609375, 0.009063720703125, -0.047637939453125, 0.03192138671875, -0.0181427001953125, -0.0148162841796875, 0.0304718017578125, -0.04010009765625, -0.006866455078125, 0.0272979736328125, -0.0675048828125, 0.03607177734375, -0.01302337646484375, 0.07928466796875, 0.006671905517578125, -0.037628173828125, -0.029388427734375, 0.0161285400390625, -0.036529541015625, 0.03997802734375, -0.00489044189453125, -0.08465576171875, 0.01395416259765625, 0.01056671142578125, -0.018646240234375, -0.0241241455078125, 0.054595947265625, -0.02593994140625, 0.0288543701171875, -0.0251007080078125, -0.029205322265625, -0.03582763671875, 0.00014841556549072266, -0.07098388671875, 0.07647705078125, 0.02825927734375, -0.04388427734375, 0.0232086181640625, -0.054443359375, -0.05419921875, 0.0210723876953125, -0.01279449462890625, -0.01432037353515625, 0.0338134765625, -0.02960205078125, -0.00567626953125, 0.053741455078125, 0.00286865234375, -0.004932403564453125, -0.0592041015625, 0.0081329345703125, 0.005229949951171875, 0.09063720703125, 0.01421356201171875, -0.0106964111328125, -0.01214599609375, -0.039306640625, -0.000675201416015625, 0.01812744140625, -0.041717529296875, -0.0699462890625, 0.01195526123046875, 0.0151519775390625, -0.04669189453125, -0.012542724609375, -0.0440673828125, 0.015838623046875, -0.0259857177734375, 0.014984130859375, 0.0080413818359375, -0.004016876220703125, 0.01397705078125, -0.0218658447265625, 0.019134521484375, -0.0182037353515625, -0.003467559814453125, 0.0279388427734375, -0.003753662109375, -0.07147216796875, 0.012237548828125, 0.032867431640625, 0.03546142578125, -0.058258056640625, -0.01514434814453125, 0.044403076171875, -0.032135009765625, -0.034637451171875, 0.0037937164306640625, -0.002864837646484375, 0.02484130859375, 0.0186004638671875, -0.006526947021484375, -0.045196533203125, -0.0290069580078125, -0.028472900390625, -0.0133209228515625, -0.003875732421875, 0.03179931640625, 0.046295166015625, -0.013824462890625, 0.032501220703125, -0.055755615234375, -0.04815673828125, 0.0009455680847167969, -0.00600433349609375, -0.02252197265625, 0.030426025390625, 0.045013427734375, -0.07275390625, -0.046173095703125, -0.0037689208984375, -0.04754638671875, 0.01416778564453125, -0.013214111328125, -0.0261077880859375, -0.005062103271484375, 0.02081298828125, 0.0218048095703125, 0.037322998046875, 0.00433349609375, -0.0616455078125, 0.00833892822265625, -0.022247314453125, 0.0209197998046875, -0.042449951171875, 0.0196990966796875, 0.0025424957275390625, -0.037109375, -0.0323486328125, -0.00199127197265625, -0.00034618377685546875, -0.04345703125, -0.013885498046875, 0.036895751953125, -0.042999267578125, 0.0246124267578125, -0.039703369140625, 0.00928497314453125, -0.01654052734375, -0.0268096923828125, -0.01169586181640625, 0.07891845703125, 0.05010986328125, -0.0357666015625, 0.0021800994873046875, 0.04681396484375, -0.0259246826171875, 0.04388427734375, -0.0134735107421875, 0.00527191162109375, 0.0138702392578125, -0.0249481201171875, -0.05487060546875, -0.031646728515625, 0.0008344650268554688, -0.029632568359375, 0.01354217529296875, 0.0237884521484375, 0.00644683837890625, -0.0333251953125, -0.0279083251953125, 0.030120849609375, 0.0076751708984375, -0.068115234375, 0.062744140625, 0.029052734375, -0.01192474365234375, -0.0236053466796875, -0.06060791015625, -0.008056640625, -0.045166015625, -0.03485107421875, 0.01412200927734375, -0.005176544189453125, -0.0290374755859375, -0.011016845703125, -0.0249481201171875, -0.0169830322265625, 0.0178985595703125, 0.052001953125, 0.02545166015625, -0.00666046142578125, 0.020294189453125, -0.013580322265625, 0.0094757080078125, 0.0224609375, 0.020294189453125, 0.004352569580078125, -0.00762939453125, -0.0036716461181640625, -0.0259246826171875, 0.070068359375, 0.023162841796875, 0.01348114013671875, 0.01288604736328125, 0.003696441650390625, -0.0084991455078125, -0.034820556640625, -0.019073486328125, -0.03631591796875, -0.044158935546875, 0.006267547607421875, -0.025726318359375, -0.05963134765625, -0.0015468597412109375, -0.0232086181640625, -0.0015592575073242188, 0.0021724700927734375, 0.0059814453125, -0.035125732421875, -0.0040740966796875, 0.0709228515625, 0.00067138671875, 0.05615234375, -0.0127105712890625, 0.04156494140625, -0.06939697265625, -0.03289794921875, -0.049285888671875, -0.027130126953125, -0.011474609375, -0.0128326416015625, 0.00652313232421875, 0.012969970703125, 0.0026912689208984375, -0.005321502685546875, -0.020111083984375, 0.0408935546875, 0.04925537109375, 0.03173828125, 0.014129638671875, -0.00677490234375, 0.008819580078125, -0.0271759033203125, -0.0015783309936523438, -0.03424072265625, 0.109375, -0.004520416259765625, 0.054779052734375, 0.003467559814453125, 0.0307769775390625, 0.00811004638671875, 0.0300445556640625, -0.06866455078125, 0.04302978515625, -0.028076171875, -0.058319091796875, -0.04510498046875, -0.00902557373046875, -0.0863037109375, 0.02911376953125, 0.0265655517578125, -0.03460693359375, 0.003940582275390625, 0.0092620849609375, -0.006866455078125, 0.0019588470458984375, -0.0452880859375, 0.05810546875, -0.027984619140625, -0.01296234130859375, -0.01560211181640625, -0.0275421142578125, 0.01543426513671875, 0.023101806640625, -0.0201873779296875, 0.038482666015625, 0.0023365020751953125, 0.0731201171875, 0.002155303955078125, 0.0555419921875, -0.005168914794921875, 0.0477294921875, 0.0389404296875, 0.00847625732421875, -0.0291595458984375, 0.0214996337890625, 0.0046539306640625, -0.03668212890625, 0.01271820068359375, -0.040374755859375, 0.0016107559204101562, 0.04376220703125, -0.044158935546875, -0.0616455078125, -0.0694580078125, -0.0309600830078125, 0.01666259765625, 0.022613525390625, -0.0008978843688964844, 0.06976318359375, -0.0443115234375, 0.0263824462890625, 0.031982421875, 0.021881103515625, 0.023681640625, 0.0263824462890625, -0.032379150390625, -0.01360321044921875, 0.0709228515625, 0.0312042236328125, 0.00872039794921875, 0.004119873046875, 0.042755126953125, -0.01971435546875, -0.038909912109375, -0.01047515869140625, 0.01230621337890625, -0.052215576171875, 0.0006666183471679688, -0.0214691162109375, 0.0002608299255371094, -0.0106964111328125, -0.031707763671875, 0.011932373046875, -0.002445220947265625, -0.0228118896484375, -0.02899169921875, 0.02655029296875, 0.0270233154296875, -0.028076171875, 0.04766845703125, -0.012603759765625, 0.0169219970703125, 0.036956787109375, 0.018707275390625, -0.0208282470703125, -0.01025390625, -0.0168609619140625, -0.025848388671875, -0.044189453125, -0.06903076171875, 0.0279083251953125, 0.01314544677734375, 0.049407958984375, 0.0271759033203125, 0.000583648681640625, 0.0204925537109375, 0.00019228458404541016, 0.03839111328125, -0.01351165771484375, -0.060333251953125, 0.055816650390625, -0.021026611328125, 0.007587432861328125, 0.086181640625, 0.08673095703125, -0.0252838134765625, -0.01035308837890625, -0.0224609375, -0.061370849609375, 0.02288818359375, 0.01390838623046875, -0.0017795562744140625, 0.0267333984375, 0.028533935546875, 0.0128631591796875, 0.01338958740234375, -0.053375244140625, -0.021881103515625, -0.0187835693359375, 0.013885498046875, -0.0243072509765625, -0.0258941650390625, -0.0260772705078125, -0.04083251953125, 0.048187255859375, -0.0034198760986328125, 0.0082244873046875, 0.036590576171875, -0.0021305084228515625, -0.0129241943359375, 0.00021517276763916016, 0.00923919677734375, 0.067626953125, -0.04400634765625, 0.0129241943359375, 0.01517486572265625, -0.09381103515625, 0.00506591796875, -0.00862884521484375, -0.002201080322265625, 0.0260009765625, -0.0401611328125, 0.0062713623046875, 0.019195556640625, -0.047149658203125, 0.01239013671875, -0.0222015380859375, -0.0173797607421875, -0.049835205078125, 0.009796142578125, 0.0138092041015625, -0.00585174560546875, -0.007061004638671875, -0.0026454925537109375, 0.04656982421875, -0.0738525390625, 0.006847381591796875, 0.018096923828125, -0.0487060546875, 0.01262664794921875, 0.036712646484375, 0.05126953125, -0.049896240234375, 0.0758056640625, 0.041015625, -0.0384521484375, 0.0341796875, 0.0643310546875, 0.0189056396484375, -0.031402587890625, 0.0185089111328125, 0.032958984375, 0.0003993511199951172, -0.0246734619140625, 0.07470703125, -0.019134521484375, -0.0202789306640625, 0.004337310791015625, -0.0179595947265625, -0.056915283203125, 0.008514404296875, -0.061553955078125, 0.026458740234375, -0.01080322265625, -0.003101348876953125, 0.0215301513671875, 0.01026153564453125, -0.0278167724609375, 0.05230712890625, -0.01256561279296875, 0.070556640625, -0.06866455078125, 0.064208984375, 0.06927490234375, -0.03521728515625, -0.055084228515625, -0.0265045166015625, 0.0114898681640625, -0.021942138671875, 0.03729248046875, 0.032135009765625, -0.0233001708984375, -0.00966644287109375, -0.0214385986328125, -0.0255889892578125, 0.059356689453125, -0.0164031982421875, -0.03643798828125, 0.02197265625, -0.0200653076171875, 0.01114654541015625, 0.00514984130859375, 0.0251007080078125, 0.0426025390625, 0.027679443359375, -0.00490570068359375, -0.0232696533203125, -0.0100555419921875, -0.0227508544921875, -0.04559326171875, 0.03961181640625, -0.060943603515625, 0.058837890625, -0.04034423828125, -0.0391845703125, 0.002750396728515625, 0.044525146484375, 0.0029544830322265625, 0.063232421875, 0.026702880859375, 0.0240631103515625, 0.07147216796875, -0.02032470703125, 0.05035400390625, 0.0255279541015625, 0.0271759033203125, 0.058868408203125, 0.0016870498657226562, 0.0675048828125, 0.06805419921875, -0.01528167724609375, 0.005077362060546875, 0.054046630859375, -0.02337646484375, 0.047027587890625, 0.0545654296875, -0.029510498046875, 0.00601959228515625, 0.00879669189453125, -0.0389404296875, 0.0021419525146484375, 0.0252685546875, -0.00453948974609375, 0.0005216598510742188, 0.024566650390625, -0.0132598876953125, 0.0187530517578125, -0.037200927734375, 0.0111083984375, 0.045867919921875, -0.050384521484375, 0.00345611572265625, -0.041290283203125, -0.0007305145263671875, -0.040008544921875, 0.00943756103515625, -0.014495849609375, 0.0290069580078125, -0.0209503173828125, -0.041351318359375, 0.040863037109375, -0.0132598876953125, 0.0231170654296875, -0.007381439208984375, 0.053131103515625, -0.005916595458984375, -0.049896240234375, 0.039794921875, 0.03228759765625, 0.05535888671875, 0.0271759033203125, 0.00821685791015625, -0.01641845703125, 0.01194000244140625, -0.023162841796875, 0.036590576171875, 0.006076812744140625, 0.01517486572265625, 0.039093017578125, 0.0148162841796875, 0.00795745849609375, 0.003391265869140625, 0.01343536376953125, 0.0440673828125, -0.0726318359375, -0.03839111328125, -0.047027587890625, 0.0115814208984375, -0.0053863525390625, -0.069580078125, 0.06817626953125, 0.07568359375, 0.101806640625, 0.0016889572143554688, 0.040985107421875, -0.00678253173828125, 0.0234375, -0.035797119140625, 0.0546875, -0.07757568359375, -0.039581298828125, 0.03216552734375, -0.05169677734375, -0.01617431640625, 0.07281494140625, 0.00494384765625, -0.0240631103515625, 0.0989990234375, 0.002445220947265625, -0.00955963134765625, 0.05194091796875, 0.0300750732421875, -0.0147552490234375, -0.00024247169494628906, 0.0286102294921875, 0.0250244140625, 0.005077362060546875, 0.021087646484375, -0.0377197265625, -0.0228118896484375, -0.036865234375, -0.004467010498046875, -0.12548828125, -0.03741455078125, -0.0224761962890625, -0.037139892578125, 0.0194854736328125, 0.02191162109375, 0.036956787109375, -0.04443359375, -0.06793212890625, 0.002685546875, -0.004512786865234375, -0.0177459716796875, -0.0008082389831542969, 0.059722900390625, -0.0272064208984375, -0.05706787109375, 0.0494384765625, -0.005641937255859375, 0.040252685546875, -0.0252685546875, 0.043670654296875, -0.0241241455078125, -0.01247406005859375, 0.01410675048828125, 0.028533935546875, -0.01502227783203125, -0.0106658935546875, 0.001567840576171875, -0.0188751220703125, 0.01494598388671875, 0.08782958984375, -0.033233642578125, 0.053741455078125, 0.0567626953125, 0.03289794921875, 0.058868408203125, -0.0012731552124023438, 0.06005859375, 0.0036640167236328125, 0.0269927978515625, 0.038360595703125, 0.03704833984375, 0.00913238525390625, -0.050079345703125, 0.0721435546875, 0.0002200603485107422, -0.06402587890625, -0.052886962890625, -0.0030651092529296875, -0.11651611328125, -0.021484375, 0.0275115966796875, -0.01397705078125, -0.03564453125, -0.0129852294921875, -0.033843994140625, 0.006870269775390625, -0.049224853515625, 0.06805419921875, 0.06927490234375, -0.0173187255859375, -0.0033054351806640625, -0.0946044921875, 0.0299530029296875, 0.0489501953125, -0.06207275390625, -0.044769287109375, -0.004390716552734375, 0.022918701171875, 0.000009775161743164062, 0.032012939453125, 0.005680084228515625, 0.0240325927734375, 0.026336669921875, -0.0242767333984375, -0.005615234375, -0.046630859375, 0.004619598388671875, 0.01494598388671875, -0.00882720947265625, -0.06292724609375 ] ]
google/mobilenet_v1_1.0_224
2023-05-16T16:38:27.000Z
[ "transformers", "pytorch", "mobilenet_v1", "image-classification", "vision", "dataset:imagenet-1k", "arxiv:1704.04861", "license:other", "autotrain_compatible", "endpoints_compatible", "region:us" ]
image-classification
google
null
null
google/mobilenet_v1_1.0_224
0
8,569
transformers
2022-11-10T16:06:24
--- license: other tags: - vision - image-classification datasets: - imagenet-1k widget: - src: https://huggingface.co/datasets/mishig/sample_images/resolve/main/tiger.jpg example_title: Tiger - src: https://huggingface.co/datasets/mishig/sample_images/resolve/main/teapot.jpg example_title: Teapot - src: https://huggingface.co/datasets/mishig/sample_images/resolve/main/palace.jpg example_title: Palace --- # MobileNet V1 MobileNet V1 model pre-trained on ImageNet-1k at resolution 224x224. It was introduced in [MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications](https://arxiv.org/abs/1704.04861) by Howard et al, and first released in [this repository](https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet_v1.md). Disclaimer: The team releasing MobileNet V1 did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description From the [original README](https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet_v1.md): > MobileNets are small, low-latency, low-power models parameterized to meet the resource constraints of a variety of use cases. They can be built upon for classification, detection, embeddings and segmentation similar to how other popular large scale models, such as Inception, are used. MobileNets can be run efficiently on mobile devices [...] MobileNets trade off between latency, size and accuracy while comparing favorably with popular models from the literature. ## Intended uses & limitations You can use the raw model for image classification. See the [model hub](https://huggingface.co/models?search=mobilenet_v1) to look for fine-tuned versions on a task that interests you. ### How to use Here is how to use this model to classify an image of the COCO 2017 dataset into one of the 1,000 ImageNet classes: ```python from transformers import AutoImageProcessor, AutoModelForImageClassification from PIL import Image import requests url = "http://images.cocodataset.org/val2017/000000039769.jpg" image = Image.open(requests.get(url, stream=True).raw) preprocessor = AutoImageProcessor.from_pretrained("google/mobilenet_v1_1.0_224") model = AutoModelForImageClassification.from_pretrained("google/mobilenet_v1_1.0_224") inputs = preprocessor(images=image, return_tensors="pt") outputs = model(**inputs) logits = outputs.logits # model predicts one of the 1000 ImageNet classes predicted_class_idx = logits.argmax(-1).item() print("Predicted class:", model.config.id2label[predicted_class_idx]) ``` Note: This model actually predicts 1001 classes, the 1000 classes from ImageNet plus an extra “background” class (index 0). Currently, both the feature extractor and model support PyTorch.
2,776
[ [ -0.04248046875, -0.0174560546875, -0.02001953125, 0.00566864013671875, -0.0245361328125, -0.03131103515625, 0.0202789306640625, -0.047515869140625, 0.033203125, 0.031219482421875, -0.03228759765625, -0.0263824462890625, -0.047088623046875, -0.0136566162109375, -0.0305633544921875, 0.05242919921875, 0.00543212890625, -0.0005087852478027344, -0.033355712890625, -0.0269317626953125, -0.01221466064453125, -0.03826904296875, -0.073974609375, -0.026519775390625, 0.04766845703125, 0.034576416015625, 0.044219970703125, 0.043060302734375, 0.048370361328125, 0.0288238525390625, 0.00011199712753295898, 0.000568389892578125, -0.0201263427734375, -0.03143310546875, 0.014923095703125, -0.0273895263671875, -0.0350341796875, 0.034027099609375, 0.0176544189453125, 0.021392822265625, 0.00818634033203125, 0.040252685546875, -0.0022430419921875, 0.045806884765625, -0.04888916015625, -0.00641632080078125, -0.040924072265625, 0.0173187255859375, -0.00205230712890625, 0.005702972412109375, -0.021514892578125, -0.005855560302734375, 0.0194244384765625, -0.03424072265625, 0.0185699462890625, -0.01032257080078125, 0.088623046875, 0.024078369140625, -0.03387451171875, -0.007732391357421875, -0.02752685546875, 0.04254150390625, -0.034027099609375, 0.0215301513671875, 0.04022216796875, 0.0386962890625, 0.0091094970703125, -0.0909423828125, -0.036376953125, -0.00998687744140625, 0.0001036524772644043, 0.004985809326171875, -0.0183868408203125, 0.000980377197265625, 0.01021575927734375, 0.032684326171875, -0.043060302734375, 0.0134124755859375, -0.06646728515625, -0.0196533203125, 0.056060791015625, 0.002040863037109375, 0.008575439453125, -0.0076904296875, -0.044158935546875, -0.003330230712890625, -0.036163330078125, 0.0265045166015625, 0.00856781005859375, 0.00009179115295410156, -0.031646728515625, 0.032958984375, -0.019744873046875, 0.0499267578125, 0.004947662353515625, -0.017425537109375, 0.0235748291015625, -0.00873565673828125, -0.04150390625, 0.02081298828125, 0.0732421875, 0.030670166015625, 0.009490966796875, 0.013427734375, -0.0027313232421875, 0.006072998046875, 0.032745361328125, -0.091552734375, -0.0179901123046875, 0.02349853515625, -0.060821533203125, -0.055450439453125, 0.004428863525390625, -0.0241546630859375, -0.0214996337890625, -0.003902435302734375, 0.0305938720703125, -0.0035400390625, -0.0284881591796875, 0.0008220672607421875, 0.014678955078125, 0.02392578125, 0.02081298828125, -0.05401611328125, 0.02752685546875, 0.0215301513671875, 0.0821533203125, 0.004268646240234375, -0.017242431640625, 0.0011415481567382812, -0.05279541015625, -0.01654052734375, 0.035369873046875, -0.004520416259765625, -0.01611328125, -0.0279693603515625, 0.025146484375, -0.01082611083984375, -0.045806884765625, 0.0445556640625, -0.04095458984375, -0.0008358955383300781, 0.01360321044921875, -0.01016998291015625, -0.03631591796875, 0.0233001708984375, -0.047821044921875, 0.070556640625, 0.01531219482421875, -0.0621337890625, 0.01861572265625, -0.0411376953125, -0.01898193359375, -0.006580352783203125, 0.0103912353515625, -0.055023193359375, -0.004520416259765625, -0.01039886474609375, 0.053070068359375, -0.0170745849609375, -0.0005278587341308594, -0.031280517578125, -0.0316162109375, 0.005596160888671875, -0.01183319091796875, 0.07879638671875, 0.04925537109375, -0.020294189453125, 0.01019287109375, -0.051239013671875, 0.0245819091796875, 0.0168609619140625, -0.01117706298828125, -0.0035305023193359375, -0.0174713134765625, 0.01500701904296875, 0.0394287109375, 0.006999969482421875, -0.033172607421875, 0.015655517578125, 0.00554656982421875, 0.053192138671875, 0.018585205078125, -0.0223846435546875, 0.044708251953125, -0.022369384765625, 0.032135009765625, 0.0213165283203125, 0.03594970703125, -0.032501220703125, -0.03643798828125, -0.05804443359375, -0.020782470703125, 0.0258636474609375, 0.043182373046875, -0.0440673828125, 0.01235198974609375, -0.0213775634765625, -0.0672607421875, -0.0264739990234375, -0.0006151199340820312, 0.0212249755859375, 0.03411865234375, 0.018646240234375, -0.03326416015625, -0.0672607421875, -0.07122802734375, 0.01058197021484375, 0.0018224716186523438, 0.0198822021484375, 0.031463623046875, 0.04425048828125, -0.02679443359375, 0.06219482421875, -0.0005469322204589844, -0.01079559326171875, -0.00653839111328125, -0.006866455078125, 0.019622802734375, 0.0667724609375, 0.0445556640625, -0.08575439453125, -0.0265045166015625, -0.0001366138458251953, -0.0760498046875, 0.0309295654296875, 0.01061248779296875, 0.0006504058837890625, -0.0025482177734375, 0.031982421875, -0.039825439453125, 0.05548095703125, 0.04156494140625, -0.018463134765625, 0.03204345703125, 0.005645751953125, 0.00423431396484375, -0.0830078125, 0.004138946533203125, 0.0284576416015625, -0.028839111328125, -0.0380859375, 0.0014781951904296875, 0.00617218017578125, -0.02294921875, -0.06549072265625, 0.052459716796875, -0.021759033203125, -0.0007233619689941406, -0.030303955078125, -0.03558349609375, -0.0023441314697265625, 0.027313232421875, 0.006626129150390625, 0.03802490234375, 0.05908203125, -0.0645751953125, 0.040618896484375, 0.01367950439453125, -0.0266571044921875, 0.01224517822265625, -0.0648193359375, 0.01045989990234375, -0.007274627685546875, 0.0294189453125, -0.05712890625, -0.02337646484375, 0.03302001953125, -0.044921875, 0.016815185546875, -0.04296875, -0.01016998291015625, -0.05560302734375, -0.007259368896484375, 0.042449951171875, 0.040252685546875, -0.054046630859375, 0.0411376953125, 0.0283660888671875, 0.03448486328125, -0.044647216796875, -0.064697265625, -0.00576019287109375, -0.0194091796875, -0.06072998046875, 0.032135009765625, 0.019134521484375, -0.0010204315185546875, 0.0005574226379394531, -0.018829345703125, -0.02703857421875, 0.0067138671875, 0.059173583984375, 0.02862548828125, -0.02484130859375, -0.006778717041015625, -0.01702880859375, -0.00615692138671875, -0.004695892333984375, -0.04296875, 0.034759521484375, -0.0298614501953125, 0.01026153564453125, -0.055938720703125, -0.00792694091796875, 0.05804443359375, -0.0198822021484375, 0.05010986328125, 0.059844970703125, -0.04217529296875, 0.00508880615234375, -0.0298004150390625, -0.007228851318359375, -0.0396728515625, 0.02587890625, -0.041290283203125, -0.04937744140625, 0.0474853515625, -0.003086090087890625, -0.0223236083984375, 0.034332275390625, 0.0161590576171875, 0.0004603862762451172, 0.05926513671875, 0.043975830078125, 0.00728607177734375, 0.045684814453125, -0.05743408203125, -0.005100250244140625, -0.061187744140625, -0.0306243896484375, -0.01995849609375, -0.030242919921875, -0.066650390625, -0.01473236083984375, 0.01617431640625, 0.031280517578125, -0.033782958984375, 0.05224609375, -0.05133056640625, 0.028564453125, 0.048797607421875, 0.052642822265625, -0.020751953125, 0.0209503173828125, -0.00408172607421875, 0.0260009765625, -0.0714111328125, -0.04022216796875, 0.0802001953125, 0.041748046875, 0.04022216796875, -0.0182647705078125, 0.038665771484375, 0.0035400390625, 0.03424072265625, -0.0615234375, 0.039764404296875, -0.0264739990234375, -0.059600830078125, -0.00276947021484375, -0.02716064453125, -0.061981201171875, 0.0185394287109375, -0.0256500244140625, -0.055084228515625, 0.03485107421875, 0.022613525390625, -0.025604248046875, 0.0266571044921875, -0.0672607421875, 0.09588623046875, -0.0116119384765625, -0.055572509765625, 0.01045989990234375, -0.052337646484375, 0.031768798828125, 0.00937652587890625, -0.0132598876953125, -0.0080108642578125, 0.0189208984375, 0.05462646484375, -0.0540771484375, 0.059051513671875, -0.0235748291015625, 0.0281982421875, 0.06707763671875, -0.0025386810302734375, 0.032928466796875, -0.003330230712890625, -0.014129638671875, 0.035369873046875, 0.010711669921875, -0.043365478515625, -0.0269317626953125, 0.053802490234375, -0.05908203125, -0.0168609619140625, -0.0166168212890625, -0.0057373046875, 0.0162353515625, 0.01953125, 0.0592041015625, 0.0516357421875, 0.005138397216796875, 0.01898193359375, 0.035186767578125, -0.0155792236328125, 0.040557861328125, -0.01091766357421875, -0.032470703125, -0.0166473388671875, 0.06512451171875, 0.01373291015625, 0.006801605224609375, 0.003265380859375, 0.008087158203125, -0.021484375, -0.033447265625, -0.035675048828125, -0.01529693603515625, -0.0516357421875, -0.02142333984375, -0.046417236328125, -0.03594970703125, -0.03240966796875, -0.01096343994140625, -0.0523681640625, -0.0227203369140625, -0.0328369140625, 0.00829315185546875, 0.0176849365234375, 0.032867431640625, -0.0031280517578125, 0.053863525390625, -0.047027587890625, 0.0002027750015258789, 0.02935791015625, 0.032135009765625, 0.0009860992431640625, -0.046905517578125, -0.0216827392578125, 0.0112762451171875, -0.0260009765625, -0.03387451171875, 0.0281524658203125, 0.0067138671875, 0.02294921875, 0.04302978515625, -0.0230712890625, 0.044891357421875, -0.005329132080078125, 0.04547119140625, 0.051971435546875, -0.046142578125, 0.03326416015625, -0.0036220550537109375, 0.0256195068359375, 0.033721923828125, 0.056304931640625, -0.015869140625, 0.032745361328125, -0.04644775390625, -0.04364013671875, 0.041412353515625, 0.00537872314453125, 0.0194244384765625, 0.01151275634765625, 0.035186767578125, -0.0054473876953125, 0.007022857666015625, -0.061187744140625, -0.0340576171875, -0.058502197265625, -0.01496124267578125, -0.007106781005859375, -0.0227203369140625, 0.0267486572265625, -0.054779052734375, 0.0404052734375, 0.0065460205078125, 0.04034423828125, 0.0301361083984375, -0.0034236907958984375, 0.006908416748046875, -0.031158447265625, 0.05767822265625, 0.033294677734375, -0.01509857177734375, 0.02008056640625, 0.0013828277587890625, -0.06072998046875, 0.018463134765625, -0.0007524490356445312, -0.01267242431640625, 0.0018491744995117188, 0.0098114013671875, 0.080078125, -0.003314971923828125, -0.00904083251953125, 0.0411376953125, -0.006229400634765625, -0.029632568359375, -0.039764404296875, 0.00803375244140625, -0.0106353759765625, 0.01727294921875, 0.022674560546875, 0.04266357421875, 0.011077880859375, -0.03436279296875, 0.0178375244140625, 0.015411376953125, -0.04864501953125, -0.016387939453125, 0.059173583984375, 0.006439208984375, -0.023895263671875, 0.05303955078125, -0.03179931640625, -0.048370361328125, 0.08526611328125, 0.0369873046875, 0.058746337890625, -0.01444244384765625, 0.0175933837890625, 0.07952880859375, 0.01446533203125, -0.01416778564453125, -0.0001323223114013672, 0.0006995201110839844, -0.06414794921875, -0.016510009765625, -0.036712646484375, 0.004596710205078125, 0.0279388427734375, -0.0369873046875, 0.0293731689453125, -0.042449951171875, -0.0301361083984375, 0.022003173828125, 0.0020427703857421875, -0.06256103515625, 0.046051025390625, 0.0184478759765625, 0.08367919921875, -0.0311126708984375, 0.07568359375, 0.0638427734375, -0.024017333984375, -0.0748291015625, -0.03790283203125, -0.00792694091796875, -0.044769287109375, 0.06524658203125, 0.043121337890625, 0.005031585693359375, 0.0183563232421875, -0.07763671875, -0.055999755859375, 0.091064453125, 0.0018415451049804688, -0.035614013671875, 0.020782470703125, -0.005138397216796875, 0.005496978759765625, -0.050201416015625, 0.039306640625, -0.00260162353515625, 0.01497650146484375, 0.0298004150390625, -0.061859130859375, -0.0014638900756835938, -0.038665771484375, 0.004302978515625, -0.00010854005813598633, -0.05230712890625, 0.0689697265625, -0.032501220703125, -0.01061248779296875, 0.004734039306640625, 0.040374755859375, -0.00972747802734375, 0.0472412109375, 0.04034423828125, 0.046051025390625, 0.043487548828125, -0.007480621337890625, 0.0765380859375, -0.0048370361328125, 0.028564453125, 0.06793212890625, 0.0123748779296875, 0.04052734375, 0.0216522216796875, -0.0028476715087890625, 0.01406097412109375, 0.0838623046875, -0.0207061767578125, 0.042236328125, 0.0218353271484375, -0.024017333984375, -0.0151214599609375, 0.0022125244140625, -0.0281524658203125, 0.057525634765625, 0.004520416259765625, -0.04949951171875, 0.0008559226989746094, 0.029632568359375, -0.00901031494140625, -0.042236328125, -0.05938720703125, 0.023284912109375, 0.0015382766723632812, -0.040924072265625, 0.06634521484375, 0.010894775390625, 0.06549072265625, -0.0214385986328125, 0.0015411376953125, -0.021392822265625, 0.01174163818359375, -0.041900634765625, -0.0273895263671875, 0.026397705078125, -0.017974853515625, -0.0112762451171875, 0.00982666015625, 0.0806884765625, -0.0085601806640625, -0.035003662109375, -0.0003085136413574219, 0.00004655122756958008, 0.0321044921875, -0.0200042724609375, -0.05859375, 0.0265350341796875, -0.0136260986328125, -0.0194549560546875, 0.01641845703125, -0.0001010894775390625, -0.001190185546875, 0.06756591796875, 0.0445556640625, -0.0248565673828125, 0.02587890625, -0.043182373046875, 0.0638427734375, -0.0296630859375, -0.02044677734375, -0.05023193359375, 0.045867919921875, -0.01169586181640625, -0.04364013671875, 0.0306243896484375, 0.057464599609375, 0.0767822265625, -0.012054443359375, 0.042755126953125, -0.0263824462890625, -0.00799560546875, -0.0198211669921875, 0.049560546875, -0.052642822265625, -0.01192474365234375, 0.01015472412109375, -0.037841796875, -0.0138092041015625, 0.07318115234375, -0.0201416015625, 0.01219940185546875, 0.0328369140625, 0.0677490234375, -0.03839111328125, 0.00543212890625, 0.0279998779296875, -0.0086212158203125, -0.0169677734375, 0.026336669921875, 0.0426025390625, -0.0809326171875, 0.027130126953125, -0.05120849609375, -0.00939178466796875, -0.023590087890625, -0.055816650390625, -0.05426025390625, -0.051422119140625, -0.04296875, -0.060791015625, -0.0019435882568359375, 0.0723876953125, 0.1085205078125, -0.049896240234375, -0.01049041748046875, -0.0234222412109375, 0.006015777587890625, -0.0023021697998046875, -0.01549530029296875, 0.015716552734375, 0.00605010986328125, -0.0408935546875, 0.003997802734375, -0.01227569580078125, 0.02484130859375, -0.0034847259521484375, -0.0234222412109375, -0.0101776123046875, -0.018157958984375, 0.052276611328125, 0.04327392578125, -0.0242462158203125, -0.00453948974609375, -0.0071258544921875, -0.0298004150390625, 0.01277923583984375, 0.049560546875, -0.04803466796875, 0.01824951171875, 0.01273345947265625, 0.035369873046875, 0.06622314453125, -0.0121002197265625, -0.00879669189453125, -0.029693603515625, 0.044677734375, 0.004535675048828125, 0.022613525390625, 0.00931549072265625, -0.0224456787109375, 0.0543212890625, 0.0169677734375, -0.0369873046875, -0.055908203125, 0.005565643310546875, -0.09295654296875, -0.00971221923828125, 0.070068359375, -0.018157958984375, -0.036285400390625, 0.0238037109375, -0.001708984375, 0.0161285400390625, 0.008270263671875, 0.0269317626953125, 0.020782470703125, 0.01204681396484375, -0.051910400390625, -0.047943115234375, 0.025177001953125, -0.0004570484161376953, -0.042877197265625, -0.05633544921875, 0.00653839111328125, 0.034027099609375, 0.0093841552734375, 0.04449462890625, -0.01287078857421875, 0.023712158203125, 0.0280914306640625, 0.03240966796875, -0.0379638671875, -0.0235595703125, -0.01056671142578125, 0.006519317626953125, -0.029296875, -0.052337646484375 ] ]
nuigurumi/basil_mix
2023-05-16T09:42:46.000Z
[ "diffusers", "license:other", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
null
nuigurumi
null
null
nuigurumi/basil_mix
959
8,569
diffusers
2023-01-04T07:45:22
--- license: other --- ## Model Description - merged model. - realistic texture and Asian face. - designed to maintain a responsive reaction to danbooru based prompts. ## License - This model and its derivatives(image, merged model) can be freely used for non-profit purposes only. - You may not use this model and its derivatives on websites, apps, or other platforms where you can or plan to earn income or donations. If you wish to use it for such purposes, please contact nuigurumi. - Introducing the model itself is allowed for both commercial and non-commercial purposes, but please include the model name and a link to this repository when doing so. - このモデル及びその派生物(生成物、マージモデル)は、完全に非営利目的の使用に限り、自由に利用することができます。 - あなたが収入や寄付を得ることのできる、もしくは得る予定のWebサイト、アプリ、その他でこのモデル及びその派生物を利用することはできません。利用したい場合は[nuigurumi](https://twitter.com/nuigurumi1_KR)に連絡してください。 - モデル自体の紹介することは、営利非営利を問わず自由です、その場合はモデル名と当リポジトリのリンクを併記してください。 - check [License](https://huggingface.co/nuigurumi/basil_mix/blob/main/License.md) _読むのめんどくさい人向け 商用利用をすべて禁止します。fanboxやpatreonなどの支援サイトでの使用も全て禁止します。 マージモデル(cilled_re...とか)も派生物なので商用利用禁止になります。 商用利用をしたいなら私に連絡してください。 どこかでモデルを紹介していただけるなら、リンクも併記してくれると嬉しいです。_ # Gradio We support a [Gradio](https://github.com/gradio-app/gradio) Web UI to run basil_mix: [![Open In Spaces](https://camo.githubusercontent.com/00380c35e60d6b04be65d3d94a58332be5cc93779f630bcdfc18ab9a3a7d3388/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f25463025394625413425393725323048756767696e67253230466163652d5370616365732d626c7565)](https://huggingface.co/spaces/akhaliq/basil_mix) ## Recommendations - VAE: [vae-ft-mse-840000](https://huggingface.co/stabilityai/sd-vae-ft-mse-original) from StabilityAI - Prompting: Simple prompts are better. Large amounts of quality tags and negative prompts can have negative effects.
1,841
[ [ -0.0631103515625, -0.0543212890625, 0.007293701171875, 0.0294189453125, -0.041778564453125, -0.0002982616424560547, 0.0012950897216796875, -0.049835205078125, 0.061004638671875, 0.033447265625, -0.070068359375, -0.033111572265625, -0.0321044921875, -0.00030517578125, 0.0044403076171875, 0.07818603515625, -0.014434814453125, -0.007354736328125, 0.0008916854858398438, -0.00982666015625, -0.048431396484375, -0.00839996337890625, -0.039581298828125, -0.034149169921875, 0.0170745849609375, 0.0259857177734375, 0.07147216796875, 0.025848388671875, 0.03173828125, 0.0228118896484375, -0.020172119140625, 0.004993438720703125, -0.0274505615234375, -0.00812530517578125, 0.00212860107421875, -0.01213836669921875, -0.055145263671875, 0.007579803466796875, 0.0310821533203125, 0.0218505859375, -0.024322509765625, -0.0015707015991210938, 0.0134429931640625, 0.0521240234375, -0.0227203369140625, 0.003658294677734375, 0.0018367767333984375, 0.0099945068359375, 0.00263214111328125, 0.0011835098266601562, 0.016632080078125, -0.041412353515625, -0.00922393798828125, -0.08514404296875, -0.003673553466796875, -0.0199737548828125, 0.093017578125, 0.00039505958557128906, -0.015228271484375, 0.000652313232421875, -0.036163330078125, 0.041717529296875, -0.049163818359375, 0.0220947265625, 0.019989013671875, 0.05401611328125, -0.0170440673828125, -0.045989990234375, -0.050201416015625, -0.00020003318786621094, -0.00640106201171875, 0.0256195068359375, -0.0171051025390625, -0.00958251953125, 0.01947021484375, 0.0379638671875, -0.048492431640625, -0.01739501953125, -0.0186614990234375, -0.0008082389831542969, 0.04608154296875, 0.007053375244140625, 0.05853271484375, -0.02056884765625, -0.0439453125, -0.021636962890625, -0.0162200927734375, 0.015899658203125, 0.018218994140625, 0.00850677490234375, -0.056304931640625, 0.02105712890625, -0.0253448486328125, 0.032257080078125, 0.02862548828125, -0.0206756591796875, 0.037078857421875, -0.0286712646484375, -0.0231781005859375, -0.024017333984375, 0.06976318359375, 0.06903076171875, 0.01197052001953125, 0.007648468017578125, -0.01236724853515625, 0.009521484375, 0.0008401870727539062, -0.0843505859375, -0.00490570068359375, 0.032928466796875, -0.0362548828125, -0.0245361328125, 0.00963592529296875, -0.08978271484375, -0.0208282470703125, -0.01235198974609375, 0.012481689453125, -0.045684814453125, -0.055694580078125, 0.00032830238342285156, -0.0107879638671875, 0.0129241943359375, 0.04412841796875, -0.0601806640625, 0.0292510986328125, 0.035125732421875, 0.06396484375, 0.006687164306640625, -0.0078582763671875, -0.00710296630859375, 0.0157318115234375, -0.0276031494140625, 0.044891357421875, -0.0193939208984375, -0.060455322265625, -0.00799560546875, 0.01161956787109375, -0.004974365234375, -0.0394287109375, 0.055267333984375, -0.014678955078125, 0.018157958984375, -0.019439697265625, -0.01092529296875, -0.0283050537109375, 0.01302337646484375, -0.04803466796875, 0.063720703125, 0.0247344970703125, -0.056976318359375, 0.0174713134765625, -0.04754638671875, -0.0134735107421875, 0.0168609619140625, -0.0216217041015625, -0.0293121337890625, -0.003993988037109375, 0.0109405517578125, 0.0296173095703125, -0.01885986328125, 0.0245819091796875, -0.037872314453125, -0.022979736328125, 0.024566650390625, 0.004024505615234375, 0.08013916015625, 0.038818359375, -0.0274505615234375, 0.00031876564025878906, -0.05889892578125, -0.00704193115234375, 0.02459716796875, -0.006927490234375, -0.028472900390625, -0.037506103515625, 0.021636962890625, 0.02880859375, 0.025054931640625, -0.025482177734375, 0.02435302734375, -0.007778167724609375, 0.0197296142578125, 0.049285888671875, 0.0065460205078125, 0.0389404296875, -0.04705810546875, 0.06396484375, 0.0204620361328125, 0.03509521484375, -0.00925445556640625, -0.031280517578125, -0.0452880859375, -0.037841796875, -0.006591796875, 0.03594970703125, -0.07220458984375, 0.0303192138671875, -0.007389068603515625, -0.06402587890625, -0.0268096923828125, -0.00499725341796875, 0.038116455078125, 0.03802490234375, 0.01123046875, -0.043853759765625, -0.0272064208984375, -0.050323486328125, -0.01305389404296875, -0.005702972412109375, 0.00455474853515625, 0.049835205078125, 0.0286102294921875, -0.05120849609375, 0.05633544921875, -0.04754638671875, -0.0264129638671875, -0.01007080078125, 0.013885498046875, 0.0462646484375, 0.0543212890625, 0.0816650390625, -0.07403564453125, -0.05584716796875, 0.0200042724609375, -0.05718994140625, -0.021209716796875, 0.0025844573974609375, -0.019073486328125, 0.0105438232421875, 0.01342010498046875, -0.07318115234375, 0.0379638671875, 0.041473388671875, -0.04205322265625, 0.051239013671875, -0.00868988037109375, 0.0338134765625, -0.1077880859375, 0.01535797119140625, 0.0185394287109375, -0.007659912109375, -0.04803466796875, 0.03656005859375, -0.0073394775390625, -0.0203704833984375, -0.04132080078125, 0.0718994140625, -0.04669189453125, 0.0164794921875, -0.0170440673828125, 0.003017425537109375, 0.0163116455078125, 0.041595458984375, -0.0235595703125, 0.0496826171875, 0.0447998046875, -0.04852294921875, 0.038330078125, 0.011138916015625, -0.022735595703125, 0.046630859375, -0.05975341796875, -0.0170440673828125, -0.0029544830322265625, 0.0140838623046875, -0.0848388671875, -0.042755126953125, 0.055694580078125, -0.0439453125, 0.0269012451171875, 0.0011320114135742188, -0.035125732421875, -0.0212860107421875, -0.045562744140625, 0.0277862548828125, 0.05059814453125, -0.03436279296875, 0.06964111328125, 0.01438140869140625, -0.0062103271484375, -0.0228118896484375, -0.057342529296875, -0.013702392578125, -0.038330078125, -0.055328369140625, 0.03656005859375, -0.0278472900390625, -0.0193634033203125, 0.00432586669921875, 0.0062103271484375, -0.030609130859375, -0.0015048980712890625, 0.039764404296875, 0.035919189453125, 0.00673675537109375, -0.0178680419921875, 0.0182037353515625, -0.01424407958984375, -0.0109405517578125, 0.0124969482421875, 0.044342041015625, 0.00009626150131225586, -0.0113677978515625, -0.0703125, 0.0177001953125, 0.062164306640625, -0.00847625732421875, 0.043212890625, 0.056304931640625, -0.0302886962890625, 0.0198211669921875, -0.04583740234375, -0.0156097412109375, -0.036346435546875, 0.002674102783203125, -0.030731201171875, -0.0462646484375, 0.0772705078125, -0.00015723705291748047, -0.00470733642578125, 0.04486083984375, 0.0279541015625, 0.00360107421875, 0.06787109375, 0.03985595703125, -0.004520416259765625, 0.0543212890625, -0.03558349609375, -0.000014543533325195312, -0.0623779296875, -0.038818359375, -0.01497650146484375, -0.01910400390625, -0.06671142578125, -0.02886962890625, 0.0149383544921875, 0.01220703125, -0.020538330078125, 0.035064697265625, -0.05084228515625, 0.01413726806640625, 0.02325439453125, 0.01447296142578125, 0.007534027099609375, 0.006183624267578125, -0.0178375244140625, -0.007354736328125, -0.0304107666015625, -0.01222991943359375, 0.0362548828125, 0.0289154052734375, 0.0628662109375, 0.0255889892578125, 0.052398681640625, -0.007694244384765625, 0.00620269775390625, -0.028289794921875, 0.047271728515625, 0.0019893646240234375, -0.052978515625, -0.0022640228271484375, -0.0355224609375, -0.05999755859375, 0.034423828125, -0.019989013671875, -0.056060791015625, 0.039459228515625, -0.00714874267578125, -0.033599853515625, 0.00974273681640625, -0.050445556640625, 0.05474853515625, -0.01824951171875, -0.042572021484375, -0.0004343986511230469, -0.042755126953125, 0.03179931640625, 0.0161590576171875, 0.0294189453125, -0.0200042724609375, -0.002880096435546875, 0.04876708984375, -0.05413818359375, 0.0653076171875, -0.017730712890625, -0.007537841796875, 0.031707763671875, 0.0118865966796875, 0.022430419921875, 0.02716064453125, 0.0154571533203125, 0.0278778076171875, 0.0035152435302734375, -0.0193939208984375, -0.05108642578125, 0.070556640625, -0.071533203125, -0.0304107666015625, -0.0504150390625, -0.026885986328125, 0.019500732421875, 0.0310516357421875, 0.0379638671875, 0.02166748046875, -0.0099029541015625, -0.00702667236328125, 0.035491943359375, -0.0143280029296875, 0.03472900390625, 0.038726806640625, -0.056243896484375, -0.06463623046875, 0.058013916015625, 0.0095977783203125, 0.01229095458984375, 0.006229400634765625, 0.01245880126953125, -0.006931304931640625, 0.005767822265625, -0.03619384765625, 0.0355224609375, -0.02166748046875, -0.016143798828125, -0.023956298828125, -0.006229400634765625, -0.04351806640625, -0.0119171142578125, -0.0173797607421875, -0.0286712646484375, -0.0504150390625, -0.0013284683227539062, 0.0458984375, 0.020355224609375, -0.019256591796875, 0.03228759765625, -0.05999755859375, 0.033599853515625, 0.0170440673828125, 0.036102294921875, -0.00004214048385620117, -0.042572021484375, -0.01019287109375, 0.022308349609375, -0.02947998046875, -0.066162109375, 0.049560546875, -0.00630950927734375, 0.0258636474609375, 0.0487060546875, -0.0125274658203125, 0.0655517578125, -0.0187530517578125, 0.0504150390625, 0.048614501953125, -0.04644775390625, 0.041412353515625, -0.0615234375, 0.027130126953125, 0.043853759765625, 0.03436279296875, -0.050567626953125, -0.005237579345703125, -0.056427001953125, -0.0548095703125, 0.051483154296875, 0.00952911376953125, 0.0139007568359375, 0.01280975341796875, 0.0273284912109375, -0.005016326904296875, 0.02239990234375, -0.05218505859375, -0.032928466796875, -0.021148681640625, 0.01385498046875, 0.002513885498046875, -0.02069091796875, -0.01837158203125, -0.03228759765625, 0.05377197265625, 0.00917816162109375, 0.021026611328125, 0.00836181640625, 0.0323486328125, -0.033447265625, -0.0017366409301757812, 0.045440673828125, 0.044342041015625, -0.03192138671875, -0.0274658203125, -0.016021728515625, -0.03424072265625, -0.006195068359375, 0.02642822265625, -0.0274505615234375, 0.024383544921875, 0.00751495361328125, 0.0703125, 0.0185546875, -0.0377197265625, 0.041412353515625, -0.0229339599609375, -0.01088714599609375, -0.03302001953125, 0.01535797119140625, 0.00807952880859375, 0.0276641845703125, 0.02252197265625, 0.00241851806640625, 0.0247802734375, -0.04669189453125, 0.003070831298828125, 0.0279541015625, -0.02398681640625, -0.0185546875, 0.06170654296875, 0.004436492919921875, -0.005825042724609375, 0.0286712646484375, -0.0159149169921875, -0.029632568359375, 0.0631103515625, 0.0302276611328125, 0.072265625, -0.0450439453125, 0.0330810546875, 0.045928955078125, -0.006565093994140625, 0.007137298583984375, 0.040496826171875, 0.007579803466796875, -0.035736083984375, -0.01268768310546875, -0.035491943359375, -0.0164642333984375, 0.01107025146484375, -0.058258056640625, 0.03985595703125, -0.06829833984375, -0.0251007080078125, -0.0149383544921875, 0.0036449432373046875, -0.016021728515625, 0.03009033203125, -0.0110626220703125, 0.07611083984375, -0.07177734375, 0.044525146484375, 0.0679931640625, -0.05413818359375, -0.06903076171875, -0.0014753341674804688, 0.01317596435546875, -0.042694091796875, 0.0228118896484375, 0.0226593017578125, 0.004482269287109375, -0.0274810791015625, -0.044830322265625, -0.059173583984375, 0.09478759765625, 0.017608642578125, -0.031524658203125, 0.01148223876953125, -0.011505126953125, 0.04608154296875, -0.02191162109375, 0.02935791015625, 0.0195770263671875, 0.034515380859375, 0.03851318359375, -0.0728759765625, 0.0093231201171875, -0.0445556640625, 0.0172271728515625, 0.002285003662109375, -0.0819091796875, 0.08538818359375, -0.00435638427734375, -0.024383544921875, 0.02374267578125, 0.044952392578125, 0.039337158203125, 0.0380859375, 0.0247039794921875, 0.06402587890625, 0.03326416015625, -0.01085662841796875, 0.08026123046875, -0.02471923828125, 0.0355224609375, 0.0577392578125, 0.005298614501953125, 0.0282135009765625, 0.0251007080078125, -0.02276611328125, 0.049041748046875, 0.06402587890625, 0.0048828125, 0.052001953125, -0.0179290771484375, -0.023712158203125, 0.003704071044921875, -0.041473388671875, -0.05621337890625, 0.01071929931640625, 0.0070037841796875, -0.0066375732421875, -0.0019474029541015625, 0.0010786056518554688, 0.01995849609375, 0.0017595291137695312, -0.0200653076171875, 0.054168701171875, 0.00811767578125, -0.03851318359375, 0.0543212890625, -0.0113677978515625, 0.0528564453125, -0.04486083984375, -0.01800537109375, -0.041107177734375, -0.001979827880859375, -0.03314208984375, -0.059814453125, 0.0030517578125, -0.006435394287109375, -0.007488250732421875, -0.002605438232421875, 0.045166015625, -0.02294921875, -0.0445556640625, 0.0380859375, 0.00644683837890625, 0.014739990234375, 0.01235198974609375, -0.06988525390625, 0.0335693359375, 0.0126800537109375, -0.020660400390625, 0.027984619140625, 0.037109375, 0.007556915283203125, 0.055084228515625, 0.04229736328125, 0.004688262939453125, -0.0079498291015625, -0.0002894401550292969, 0.09613037109375, -0.03619384765625, -0.0616455078125, -0.05426025390625, 0.05133056640625, -0.01120758056640625, -0.018707275390625, 0.048126220703125, 0.036773681640625, 0.052459716796875, -0.01824951171875, 0.0576171875, -0.020416259765625, 0.048828125, -0.0247039794921875, 0.055572509765625, -0.0792236328125, -0.006114959716796875, -0.05633544921875, -0.0738525390625, -0.0107879638671875, 0.053497314453125, 0.0027675628662109375, 0.041656494140625, 0.02227783203125, 0.05181884765625, -0.00885772705078125, -0.005970001220703125, 0.00852203369140625, 0.01947021484375, 0.0208587646484375, 0.043548583984375, 0.042877197265625, -0.04351806640625, 0.0082855224609375, -0.043670654296875, -0.03216552734375, -0.042510986328125, -0.050994873046875, -0.060577392578125, -0.036346435546875, -0.022491455078125, -0.02587890625, -0.0173187255859375, 0.049163818359375, 0.032562255859375, -0.05426025390625, -0.025848388671875, -0.0011701583862304688, 0.003017425537109375, -0.01165771484375, -0.0242767333984375, 0.0015964508056640625, 0.0273895263671875, -0.0687255859375, -0.0100250244140625, 0.0159912109375, 0.0380859375, -0.0025043487548828125, -0.02203369140625, -0.01226806640625, -0.011871337890625, 0.0236053466796875, 0.03753662109375, -0.050689697265625, 0.0019197463989257812, -0.0109405517578125, -0.01497650146484375, 0.0028247833251953125, 0.0211181640625, -0.032012939453125, 0.03753662109375, 0.042510986328125, 0.0199127197265625, 0.0240936279296875, 0.01206207275390625, 0.014434814453125, -0.0238189697265625, 0.0310211181640625, -0.007396697998046875, 0.033935546875, 0.0196685791015625, -0.03704833984375, 0.049407958984375, 0.0302276611328125, -0.0276641845703125, -0.046295166015625, 0.0157318115234375, -0.09808349609375, -0.0221710205078125, 0.0909423828125, 0.0028362274169921875, -0.0162506103515625, 0.03509521484375, -0.032440185546875, 0.0291290283203125, -0.04876708984375, 0.038726806640625, 0.0452880859375, -0.01056671142578125, -0.0187835693359375, -0.0419921875, 0.024993896484375, 0.02264404296875, -0.05462646484375, -0.0232086181640625, 0.048828125, 0.02349853515625, 0.036895751953125, 0.045928955078125, -0.0211639404296875, 0.041656494140625, -0.01351165771484375, 0.0285186767578125, -0.005558013916015625, -0.01035308837890625, -0.0284423828125, -0.023956298828125, -0.0115509033203125, -0.0258636474609375 ] ]
EleutherAI/pythia-12b-deduped
2023-06-08T13:07:07.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "causal-lm", "pythia", "en", "dataset:EleutherAI/the_pile_deduplicated", "arxiv:2304.01373", "arxiv:2101.00027", "arxiv:2201.07311", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
EleutherAI
null
null
EleutherAI/pythia-12b-deduped
48
8,565
transformers
2023-02-27T14:01:34
--- language: - en tags: - pytorch - causal-lm - pythia license: apache-2.0 datasets: - EleutherAI/the_pile_deduplicated --- The *Pythia Scaling Suite* is a collection of models developed to facilitate interpretability research [(see paper)](https://arxiv.org/pdf/2304.01373.pdf). It contains two sets of eight models of sizes 70M, 160M, 410M, 1B, 1.4B, 2.8B, 6.9B, and 12B. For each size, there are two models: one trained on the Pile, and one trained on the Pile after the dataset has been globally deduplicated. All 8 model sizes are trained on the exact same data, in the exact same order. We also provide 154 intermediate checkpoints per model, hosted on Hugging Face as branches. The Pythia model suite was designed to promote scientific research on large language models, especially interpretability research. Despite not centering downstream performance as a design goal, we find the models <a href="#evaluations">match or exceed</a> the performance of similar and same-sized models, such as those in the OPT and GPT-Neo suites. <details> <summary style="font-weight:600">Details on previous early release and naming convention.</summary> Previously, we released an early version of the Pythia suite to the public. However, we decided to retrain the model suite to address a few hyperparameter discrepancies. This model card <a href="#changelog">lists the changes</a>; see appendix B in the Pythia paper for further discussion. We found no difference in benchmark performance between the two Pythia versions. The old models are [still available](https://huggingface.co/models?other=pythia_v0), but we suggest the retrained suite if you are just starting to use Pythia.<br> **This is the current release.** Please note that all models in the *Pythia* suite were renamed in January 2023. For clarity, a <a href="#naming-convention-and-parameter-count">table comparing the old and new names</a> is provided in this model card, together with exact parameter counts. </details> <br> # Pythia-12B-deduped ## Model Details - Developed by: [EleutherAI](http://eleuther.ai) - Model type: Transformer-based Language Model - Language: English - Learn more: [Pythia's GitHub repository](https://github.com/EleutherAI/pythia) for training procedure, config files, and details on how to use. [See paper](https://arxiv.org/pdf/2304.01373.pdf) for more evals and implementation details. - Library: [GPT-NeoX](https://github.com/EleutherAI/gpt-neox) - License: Apache 2.0 - Contact: to ask questions about this model, join the [EleutherAI Discord](https://discord.gg/zBGx3azzUn), and post them in `#release-discussion`. Please read the existing *Pythia* documentation before asking about it in the EleutherAI Discord. For general correspondence: [contact@eleuther. ai](mailto:contact@eleuther.ai). <figure> | Pythia model | Non-Embedding Params | Layers | Model Dim | Heads | Batch Size | Learning Rate | Equivalent Models | | -----------: | -------------------: | :----: | :-------: | :---: | :--------: | :-------------------: | :--------------------: | | 70M | 18,915,328 | 6 | 512 | 8 | 2M | 1.0 x 10<sup>-3</sup> | — | | 160M | 85,056,000 | 12 | 768 | 12 | 2M | 6.0 x 10<sup>-4</sup> | GPT-Neo 125M, OPT-125M | | 410M | 302,311,424 | 24 | 1024 | 16 | 2M | 3.0 x 10<sup>-4</sup> | OPT-350M | | 1.0B | 805,736,448 | 16 | 2048 | 8 | 2M | 3.0 x 10<sup>-4</sup> | — | | 1.4B | 1,208,602,624 | 24 | 2048 | 16 | 2M | 2.0 x 10<sup>-4</sup> | GPT-Neo 1.3B, OPT-1.3B | | 2.8B | 2,517,652,480 | 32 | 2560 | 32 | 2M | 1.6 x 10<sup>-4</sup> | GPT-Neo 2.7B, OPT-2.7B | | 6.9B | 6,444,163,072 | 32 | 4096 | 32 | 2M | 1.2 x 10<sup>-4</sup> | OPT-6.7B | | 12B | 11,327,027,200 | 36 | 5120 | 40 | 2M | 1.2 x 10<sup>-4</sup> | — | <figcaption>Engineering details for the <i>Pythia Suite</i>. Deduped and non-deduped models of a given size have the same hyperparameters. “Equivalent” models have <b>exactly</b> the same architecture, and the same number of non-embedding parameters.</figcaption> </figure> ## Uses and Limitations ### Intended Use The primary intended use of Pythia is research on the behavior, functionality, and limitations of large language models. This suite is intended to provide a controlled setting for performing scientific experiments. We also provide 154 checkpoints per model: initial `step0`, 10 log-spaced checkpoints `step{1,2,4...512}`, and 143 evenly-spaced checkpoints from `step1000` to `step143000`. These checkpoints are hosted on Hugging Face as branches. Note that branch `143000` corresponds exactly to the model checkpoint on the `main` branch of each model. You may also further fine-tune and adapt Pythia-12B-deduped for deployment, as long as your use is in accordance with the Apache 2.0 license. Pythia models work with the Hugging Face [Transformers Library](https://huggingface.co/docs/transformers/index). If you decide to use pre-trained Pythia-12B-deduped as a basis for your fine-tuned model, please conduct your own risk and bias assessment. ### Out-of-scope use The Pythia Suite is **not** intended for deployment. It is not a in itself a product and cannot be used for human-facing interactions. For example, the model may generate harmful or offensive text. Please evaluate the risks associated with your particular use case. Pythia models are English-language only, and are not suitable for translation or generating text in other languages. Pythia-12B-deduped has not been fine-tuned for downstream contexts in which language models are commonly deployed, such as writing genre prose, or commercial chatbots. This means Pythia-12B-deduped will **not** respond to a given prompt the way a product like ChatGPT does. This is because, unlike this model, ChatGPT was fine-tuned using methods such as Reinforcement Learning from Human Feedback (RLHF) to better “follow” human instructions. ### Limitations and biases The core functionality of a large language model is to take a string of text and predict the next token. The token used by the model need not produce the most “accurate” text. Never rely on Pythia-12B-deduped to produce factually accurate output. This model was trained on [the Pile](https://pile.eleuther.ai/), a dataset known to contain profanity and texts that are lewd or otherwise offensive. See [Section 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a discussion of documented biases with regards to gender, religion, and race. Pythia-12B-deduped may produce socially unacceptable or undesirable text, *even if* the prompt itself does not include anything explicitly offensive. If you plan on using text generated through, for example, the Hosted Inference API, we recommend having a human curate the outputs of this language model before presenting it to other people. Please inform your audience that the text was generated by Pythia-12B-deduped. ### Quickstart Pythia models can be loaded and used via the following code, demonstrated here for the third `pythia-70m-deduped` checkpoint: ```python from transformers import GPTNeoXForCausalLM, AutoTokenizer model = GPTNeoXForCausalLM.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) tokenizer = AutoTokenizer.from_pretrained( "EleutherAI/pythia-70m-deduped", revision="step3000", cache_dir="./pythia-70m-deduped/step3000", ) inputs = tokenizer("Hello, I am", return_tensors="pt") tokens = model.generate(**inputs) tokenizer.decode(tokens[0]) ``` Revision/branch `step143000` corresponds exactly to the model checkpoint on the `main` branch of each model.<br> For more information on how to use all Pythia models, see [documentation on GitHub](https://github.com/EleutherAI/pythia). ## Training ### Training data Pythia-12B-deduped was trained on the Pile **after the dataset has been globally deduplicated**.<br> [The Pile](https://pile.eleuther.ai/) is a 825GiB general-purpose dataset in English. It was created by EleutherAI specifically for training large language models. It contains texts from 22 diverse sources, roughly broken down into five categories: academic writing (e.g. arXiv), internet (e.g. CommonCrawl), prose (e.g. Project Gutenberg), dialogue (e.g. YouTube subtitles), and miscellaneous (e.g. GitHub, Enron Emails). See [the Pile paper](https://arxiv.org/abs/2101.00027) for a breakdown of all data sources, methodology, and a discussion of ethical implications. Consult [the datasheet](https://arxiv.org/abs/2201.07311) for more detailed documentation about the Pile and its component datasets. The Pile can be downloaded from the [official website](https://pile.eleuther.ai/), or from a [community mirror](https://the-eye.eu/public/AI/pile/). ### Training procedure All models were trained on the exact same data, in the exact same order. Each model saw 299,892,736,000 tokens during training, and 143 checkpoints for each model are saved every 2,097,152,000 tokens, spaced evenly throughout training, from `step1000` to `step143000` (which is the same as `main`). In addition, we also provide frequent early checkpoints: `step0` and `step{1,2,4...512}`. This corresponds to training for just under 1 epoch on the Pile for non-deduplicated models, and about 1.5 epochs on the deduplicated Pile. All *Pythia* models trained for 143000 steps at a batch size of 2M (2,097,152 tokens).<br> See [GitHub](https://github.com/EleutherAI/pythia) for more details on training procedure, including [how to reproduce it](https://github.com/EleutherAI/pythia/blob/main/README.md#reproducing-training).<br> Pythia uses the same tokenizer as [GPT-NeoX- 20B](https://huggingface.co/EleutherAI/gpt-neox-20b). ## Evaluations All 16 *Pythia* models were evaluated using the [LM Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness). You can access the results by model and step at `results/json/*` in the [GitHub repository](https://github.com/EleutherAI/pythia/tree/main/results/json/).<br> Expand the sections below to see plots of evaluation results for all Pythia and Pythia-deduped models compared with OPT and BLOOM. <details> <summary>LAMBADA – OpenAI</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/lambada_openai_v1.png" style="width:auto"/> </details> <details> <summary>Physical Interaction: Question Answering (PIQA)</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/piqa_v1.png" style="width:auto"/> </details> <details> <summary>WinoGrande</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/winogrande_v1.png" style="width:auto"/> </details> <details> <summary>AI2 Reasoning Challenge—Easy Set</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/arc_easy_v1.png" style="width:auto"/> </details> <details> <summary>SciQ</summary> <img src="/EleutherAI/pythia-12b/resolve/main/eval_plots/sciq_v1.png" style="width:auto"/> </details> ## Changelog This section compares differences between previously released [Pythia v0](https://huggingface.co/models?other=pythia_v0) and the current models. See Appendix B of the Pythia paper for further discussion of these changes and the motivation behind them. We found that retraining Pythia had no impact on benchmark performance. - All model sizes are now trained with uniform batch size of 2M tokens. Previously, the models of size 160M, 410M, and 1.4B parameters were trained with batch sizes of 4M tokens. - We added checkpoints at initialization (step 0) and steps {1,2,4,8,16,32,64, 128,256,512} in addition to every 1000 training steps. - Flash Attention was used in the new retrained suite. - We remedied a minor inconsistency that existed in the original suite: all models of size 2.8B parameters or smaller had a learning rate (LR) schedule which decayed to a minimum LR of 10% the starting LR rate, but the 6.9B and 12B models all used an LR schedule which decayed to a minimum LR of 0. In the redone training runs, we rectified this inconsistency: all models now were trained with LR decaying to a minimum of 0.1× their maximum LR. ### Naming convention and parameter count *Pythia* models were renamed in January 2023. It is possible that the old naming convention still persists in some documentation by accident. The current naming convention (70M, 160M, etc.) is based on total parameter count. <figure style="width:32em"> | current Pythia suffix | old suffix | total params | non-embedding params | | --------------------: | ---------: | -------------: | -------------------: | | 70M | 19M | 70,426,624 | 18,915,328 | | 160M | 125M | 162,322,944 | 85,056,000 | | 410M | 350M | 405,334,016 | 302,311,424 | | 1B | 800M | 1,011,781,632 | 805,736,448 | | 1.4B | 1.3B | 1,414,647,808 | 1,208,602,624 | | 2.8B | 2.7B | 2,775,208,960 | 2,517,652,480 | | 6.9B | 6.7B | 6,857,302,016 | 6,444,163,072 | | 12B | 13B | 11,846,072,320 | 11,327,027,200 | </figure>
13,654
[ [ -0.0252838134765625, -0.060150146484375, 0.0251922607421875, 0.0060882568359375, -0.017364501953125, -0.01355743408203125, -0.0173187255859375, -0.033905029296875, 0.01486968994140625, 0.0107879638671875, -0.0279388427734375, -0.0218048095703125, -0.033172607421875, -0.003032684326171875, -0.034088134765625, 0.08258056640625, -0.00698089599609375, -0.01035308837890625, 0.01043701171875, -0.004093170166015625, -0.006916046142578125, -0.038421630859375, -0.0307769775390625, -0.0311431884765625, 0.047271728515625, 0.013763427734375, 0.06573486328125, 0.043731689453125, 0.0145111083984375, 0.0222625732421875, -0.0281982421875, -0.0050048828125, -0.0113525390625, -0.007427215576171875, -0.0037899017333984375, -0.016693115234375, -0.053192138671875, 0.0031719207763671875, 0.0523681640625, 0.052520751953125, -0.01352691650390625, 0.0184783935546875, -0.0004591941833496094, 0.02752685546875, -0.038604736328125, 0.0027866363525390625, -0.0263519287109375, -0.0177154541015625, -0.0050506591796875, 0.01197052001953125, -0.02960205078125, -0.0273590087890625, 0.0318603515625, -0.0472412109375, 0.0184326171875, 0.007564544677734375, 0.09088134765625, -0.007965087890625, -0.031158447265625, -0.006053924560546875, -0.055206298828125, 0.05023193359375, -0.052337646484375, 0.024169921875, 0.0209808349609375, 0.0130767822265625, -0.0018739700317382812, -0.06683349609375, -0.041168212890625, -0.015411376953125, -0.011322021484375, -0.002239227294921875, -0.048736572265625, 0.0014095306396484375, 0.0372314453125, 0.050567626953125, -0.061614990234375, -0.0032367706298828125, -0.0282440185546875, -0.0258941650390625, 0.0252532958984375, 0.005096435546875, 0.033538818359375, -0.0233306884765625, -0.0005016326904296875, -0.02862548828125, -0.0517578125, -0.0198516845703125, 0.0413818359375, 0.0054168701171875, -0.026885986328125, 0.0389404296875, -0.0266876220703125, 0.042083740234375, -0.004795074462890625, 0.0192718505859375, 0.031982421875, -0.013336181640625, -0.039764404296875, -0.006893157958984375, 0.0689697265625, 0.0084075927734375, 0.0164337158203125, -0.001537322998046875, -0.0038814544677734375, 0.00931549072265625, 0.0034999847412109375, -0.0858154296875, -0.06146240234375, 0.016143798828125, -0.031005859375, -0.03326416015625, -0.014495849609375, -0.07098388671875, -0.016204833984375, -0.0162353515625, 0.044158935546875, -0.039306640625, -0.053741455078125, -0.007678985595703125, 0.0010709762573242188, 0.01535797119140625, 0.0279388427734375, -0.0694580078125, 0.033203125, 0.0305633544921875, 0.07293701171875, 0.0161895751953125, -0.041412353515625, -0.01486968994140625, -0.0199432373046875, -0.00977325439453125, 0.0284576416015625, -0.01250457763671875, -0.01446533203125, -0.01032257080078125, 0.013214111328125, -0.0101470947265625, -0.0281982421875, 0.0271453857421875, -0.031982421875, 0.02001953125, -0.0205841064453125, -0.033721923828125, -0.0283203125, 0.009124755859375, -0.045623779296875, 0.06317138671875, 0.017486572265625, -0.07244873046875, 0.0160064697265625, -0.0159454345703125, -0.00469970703125, -0.0016012191772460938, 0.01294708251953125, -0.050323486328125, 0.0021114349365234375, 0.025848388671875, 0.0034122467041015625, -0.032745361328125, 0.0175018310546875, -0.0170440673828125, -0.032379150390625, 0.0126190185546875, -0.042083740234375, 0.0701904296875, 0.0156402587890625, -0.04913330078125, 0.021575927734375, -0.04791259765625, 0.01418304443359375, 0.0186767578125, -0.028594970703125, 0.002254486083984375, -0.0165252685546875, 0.0282440185546875, 0.01363372802734375, 0.0119171142578125, -0.02874755859375, 0.0226287841796875, -0.036865234375, 0.0546875, 0.056610107421875, -0.0030670166015625, 0.0360107421875, -0.0307159423828125, 0.03497314453125, 0.0015344619750976562, 0.0130615234375, -0.005100250244140625, -0.046661376953125, -0.07208251953125, -0.0250701904296875, 0.0287628173828125, 0.02349853515625, -0.035400390625, 0.035736083984375, -0.01739501953125, -0.06768798828125, -0.0125274658203125, -0.0083160400390625, 0.03143310546875, 0.02410888671875, 0.033447265625, -0.01145172119140625, -0.03863525390625, -0.06549072265625, -0.01519775390625, -0.032867431640625, 0.01030731201171875, 0.01364898681640625, 0.0716552734375, -0.01137542724609375, 0.041534423828125, -0.02862548828125, 0.0168609619140625, -0.02911376953125, 0.012298583984375, 0.0321044921875, 0.0474853515625, 0.02825927734375, -0.040130615234375, -0.0270843505859375, -0.0005774497985839844, -0.04296875, 0.00537872314453125, -0.00013375282287597656, -0.0251312255859375, 0.0223541259765625, 0.005306243896484375, -0.07464599609375, 0.037353515625, 0.0465087890625, -0.043212890625, 0.05902099609375, -0.0258026123046875, -0.00033664703369140625, -0.07891845703125, 0.0207366943359375, 0.00844573974609375, -0.01493072509765625, -0.04193115234375, 0.0046844482421875, 0.0166015625, -0.0166168212890625, -0.0272979736328125, 0.0440673828125, -0.04095458984375, -0.01256561279296875, -0.0167999267578125, 0.0035839080810546875, -0.0021228790283203125, 0.047332763671875, 0.01253509521484375, 0.0428466796875, 0.060394287109375, -0.059051513671875, 0.0330810546875, 0.0180511474609375, -0.0234375, 0.027740478515625, -0.06689453125, 0.00937652587890625, 0.005977630615234375, 0.0305938720703125, -0.048614501953125, -0.026275634765625, 0.039947509765625, -0.0438232421875, 0.0119781494140625, -0.0304718017578125, -0.04022216796875, -0.0313720703125, -0.01214599609375, 0.04473876953125, 0.0589599609375, -0.0467529296875, 0.0523681640625, 0.004459381103515625, 0.011627197265625, -0.027374267578125, -0.04302978515625, -0.0207672119140625, -0.04022216796875, -0.050323486328125, 0.0322265625, 0.01311492919921875, -0.01424407958984375, 0.0028533935546875, -0.001621246337890625, 0.00885772705078125, -0.00357818603515625, 0.0240325927734375, 0.02789306640625, -0.0022983551025390625, 0.0016412734985351562, -0.01149749755859375, -0.0102081298828125, -0.00012099742889404297, -0.034912109375, 0.07537841796875, -0.0224609375, -0.01428985595703125, -0.061370849609375, -0.0021114349365234375, 0.06683349609375, -0.0323486328125, 0.06805419921875, 0.045318603515625, -0.054412841796875, 0.0119476318359375, -0.0290985107421875, -0.02337646484375, -0.033233642578125, 0.046905517578125, -0.0205078125, -0.025299072265625, 0.04791259765625, 0.0194854736328125, 0.019012451171875, 0.043060302734375, 0.055389404296875, 0.0186614990234375, 0.0872802734375, 0.033843994140625, -0.01213836669921875, 0.046844482421875, -0.039276123046875, 0.0184326171875, -0.0823974609375, -0.015838623046875, -0.040924072265625, -0.02117919921875, -0.07135009765625, -0.0220489501953125, 0.02301025390625, 0.01568603515625, -0.059051513671875, 0.04473876953125, -0.0423583984375, 0.004817962646484375, 0.04974365234375, 0.0178680419921875, 0.0148468017578125, 0.0157318115234375, 0.0046539306640625, -0.005222320556640625, -0.0478515625, -0.024383544921875, 0.0894775390625, 0.036346435546875, 0.051422119140625, 0.022003173828125, 0.05364990234375, -0.01148223876953125, 0.015838623046875, -0.052490234375, 0.03277587890625, 0.0273895263671875, -0.05487060546875, -0.01515960693359375, -0.0587158203125, -0.072265625, 0.035858154296875, 0.006725311279296875, -0.08319091796875, 0.0196380615234375, 0.01593017578125, -0.027435302734375, 0.03558349609375, -0.04815673828125, 0.0755615234375, -0.0152587890625, -0.035858154296875, -0.0281982421875, -0.0227813720703125, 0.0179290771484375, 0.0286865234375, 0.0098724365234375, 0.0078125, 0.023681640625, 0.072998046875, -0.04913330078125, 0.049835205078125, -0.0103759765625, 0.01007080078125, 0.026702880859375, 0.0236358642578125, 0.049560546875, 0.011871337890625, 0.01169586181640625, -0.005138397216796875, 0.010894775390625, -0.039886474609375, -0.026885986328125, 0.069580078125, -0.083740234375, -0.02764892578125, -0.061126708984375, -0.0438232421875, 0.00701904296875, 0.0145111083984375, 0.0307769775390625, 0.051300048828125, -0.0019931793212890625, 0.003566741943359375, 0.044525146484375, -0.0423583984375, 0.027984619140625, 0.017730712890625, -0.0372314453125, -0.039764404296875, 0.07525634765625, 0.0018129348754882812, 0.0252532958984375, 0.0023136138916015625, 0.01824951171875, -0.0299530029296875, -0.0318603515625, -0.045196533203125, 0.0426025390625, -0.05401611328125, -0.001979827880859375, -0.053741455078125, -0.00357818603515625, -0.03472900390625, 0.008880615234375, -0.031829833984375, -0.0294036865234375, -0.0167388916015625, -0.0026988983154296875, 0.04510498046875, 0.03485107421875, 0.005191802978515625, 0.0254058837890625, -0.040679931640625, -0.0007815361022949219, 0.017059326171875, 0.00844573974609375, 0.00927734375, -0.0701904296875, -0.005649566650390625, 0.0069122314453125, -0.033538818359375, -0.08502197265625, 0.0389404296875, -0.00567626953125, 0.026885986328125, 0.0032291412353515625, -0.016815185546875, 0.0474853515625, -0.007167816162109375, 0.04931640625, 0.0114593505859375, -0.07818603515625, 0.04248046875, -0.03558349609375, 0.0233612060546875, 0.0255584716796875, 0.0273590087890625, -0.05474853515625, -0.004940032958984375, -0.07574462890625, -0.08099365234375, 0.056976318359375, 0.03826904296875, 0.0124359130859375, 0.005054473876953125, 0.027069091796875, -0.033599853515625, 0.0102081298828125, -0.076904296875, -0.022430419921875, -0.0165252685546875, -0.00592803955078125, 0.0157012939453125, -0.0026187896728515625, 0.00505828857421875, -0.042816162109375, 0.078125, 0.006290435791015625, 0.02667236328125, 0.020294189453125, -0.0296173095703125, -0.006622314453125, -0.002986907958984375, 0.0095672607421875, 0.05682373046875, -0.01204681396484375, 0.005168914794921875, 0.01367950439453125, -0.0435791015625, 0.003936767578125, 0.0130615234375, -0.028167724609375, -0.006717681884765625, 0.0133514404296875, 0.063720703125, 0.00942230224609375, -0.0313720703125, 0.016265869140625, -0.003803253173828125, -0.0054779052734375, -0.02105712890625, -0.0145111083984375, 0.0144805908203125, 0.0155181884765625, -0.0020599365234375, -0.01049041748046875, -0.0009698867797851562, -0.066162109375, 0.0034160614013671875, 0.01528167724609375, -0.0091400146484375, -0.03094482421875, 0.0445556640625, 0.004283905029296875, -0.01268768310546875, 0.0850830078125, -0.0185699462890625, -0.04791259765625, 0.05780029296875, 0.0389404296875, 0.0531005859375, -0.0144500732421875, 0.0253753662109375, 0.06744384765625, 0.0254669189453125, -0.01593017578125, 0.00664520263671875, 0.0090789794921875, -0.04205322265625, -0.00909423828125, -0.06005859375, -0.016937255859375, 0.02252197265625, -0.046356201171875, 0.034881591796875, -0.04888916015625, -0.004039764404296875, -0.0009307861328125, 0.017059326171875, -0.043701171875, 0.026153564453125, 0.01297760009765625, 0.054779052734375, -0.0689697265625, 0.0631103515625, 0.0491943359375, -0.0572509765625, -0.08294677734375, 0.0008111000061035156, 0.0025463104248046875, -0.035430908203125, 0.01383209228515625, 0.0164337158203125, 0.01436614990234375, 0.0132598876953125, -0.0208892822265625, -0.0657958984375, 0.09832763671875, 0.017608642578125, -0.04925537109375, -0.0206451416015625, -0.01114654541015625, 0.0389404296875, 0.00428009033203125, 0.052490234375, 0.0557861328125, 0.030609130859375, 0.007678985595703125, -0.0780029296875, 0.0283050537109375, -0.02386474609375, -0.004657745361328125, 0.01708984375, -0.050567626953125, 0.0970458984375, -0.0031986236572265625, -0.00140380859375, 0.0303497314453125, 0.04302978515625, 0.0305328369140625, -0.0108642578125, 0.0271453857421875, 0.058380126953125, 0.0672607421875, -0.027740478515625, 0.092041015625, -0.02325439453125, 0.057342529296875, 0.064697265625, 0.01325225830078125, 0.036407470703125, 0.0290985107421875, -0.02874755859375, 0.0389404296875, 0.06195068359375, -0.007232666015625, 0.0159759521484375, 0.0202789306640625, -0.02117919921875, -0.0218963623046875, 0.01003265380859375, -0.0460205078125, 0.01459503173828125, 0.0129241943359375, -0.04449462890625, -0.0172882080078125, -0.0250701904296875, 0.0255889892578125, -0.0273284912109375, -0.0169219970703125, 0.019805908203125, 0.005870819091796875, -0.04827880859375, 0.047332763671875, 0.01953125, 0.04290771484375, -0.03350830078125, 0.012054443359375, -0.01367950439453125, 0.0277099609375, -0.0251312255859375, -0.032684326171875, 0.009429931640625, 0.002246856689453125, 0.00424957275390625, 0.00693511962890625, 0.03472900390625, -0.00914764404296875, -0.04339599609375, 0.016632080078125, 0.0377197265625, 0.0199737548828125, -0.034912109375, -0.04974365234375, 0.0095672607421875, -0.01076507568359375, -0.040985107421875, 0.033447265625, 0.0226287841796875, -0.0086822509765625, 0.044769287109375, 0.0457763671875, 0.003078460693359375, -0.0015687942504882812, 0.01290130615234375, 0.0760498046875, -0.0369873046875, -0.0325927734375, -0.07098388671875, 0.0374755859375, 0.0008344650268554688, -0.049530029296875, 0.0633544921875, 0.042694091796875, 0.05340576171875, 0.0168609619140625, 0.0433349609375, -0.035186767578125, 0.0022678375244140625, -0.0214385986328125, 0.050018310546875, -0.03564453125, 0.006175994873046875, -0.039520263671875, -0.08746337890625, -0.00534820556640625, 0.07427978515625, -0.039459228515625, 0.027740478515625, 0.061614990234375, 0.06195068359375, -0.006359100341796875, 0.007701873779296875, 0.004444122314453125, 0.020843505859375, 0.040802001953125, 0.06854248046875, 0.0682373046875, -0.053466796875, 0.044158935546875, -0.0390625, -0.0205078125, -0.0095672607421875, -0.03778076171875, -0.0650634765625, -0.034088134765625, -0.039459228515625, -0.055999755859375, -0.0038585662841796875, 0.06787109375, 0.053741455078125, -0.046295166015625, -0.011322021484375, -0.041534423828125, 0.0027484893798828125, -0.0198211669921875, -0.0177459716796875, 0.03265380859375, 0.0087738037109375, -0.07135009765625, -0.00196075439453125, -0.01139068603515625, 0.00856781005859375, -0.0291748046875, -0.0194549560546875, -0.0157470703125, -0.0075531005859375, 0.00524139404296875, 0.0194091796875, -0.037994384765625, -0.0198516845703125, 0.00336456298828125, 0.003612518310546875, -0.0014905929565429688, 0.052001953125, -0.046051025390625, 0.005962371826171875, 0.046661376953125, 0.0114898681640625, 0.06103515625, -0.0189361572265625, 0.0308074951171875, -0.0193023681640625, 0.0258941650390625, 0.0200653076171875, 0.047607421875, 0.0255584716796875, -0.0185546875, 0.015655517578125, 0.034027099609375, -0.05615234375, -0.0650634765625, 0.0267333984375, -0.05450439453125, -0.0086822509765625, 0.09613037109375, -0.020233154296875, -0.02850341796875, 0.006923675537109375, -0.017547607421875, 0.03875732421875, -0.0214996337890625, 0.050750732421875, 0.048553466796875, 0.006031036376953125, -0.01454925537109375, -0.045196533203125, 0.0269927978515625, 0.048370361328125, -0.06207275390625, 0.0302276611328125, 0.04852294921875, 0.04510498046875, 0.0189208984375, 0.042510986328125, -0.0245361328125, 0.04443359375, 0.00823974609375, 0.0063323974609375, 0.003452301025390625, -0.037109375, -0.031585693359375, -0.01122283935546875, 0.01605224609375, -0.0014019012451171875 ] ]
cerspense/zeroscope_v2_XL
2023-07-03T21:13:09.000Z
[ "diffusers", "video-to-video", "license:cc-by-nc-4.0", "has_space", "diffusers:VideoToVideoSDPipeline", "region:us" ]
null
cerspense
null
null
cerspense/zeroscope_v2_XL
452
8,565
diffusers
2023-06-21T23:29:21
--- pipeline_tag: video-to-video license: cc-by-nc-4.0 --- ![model example](https://i.imgur.com/ze1DGOJ.png) [example outputs](https://www.youtube.com/watch?v=HO3APT_0UA4) (courtesy of [dotsimulate](https://www.instagram.com/dotsimulate/)) # zeroscope_v2 XL A watermark-free Modelscope-based video model capable of generating high quality video at 1024 x 576. This model was trained from the [original weights](https://huggingface.co/damo-vilab/modelscope-damo-text-to-video-synthesis) with offset noise using 9,923 clips and 29,769 tagged frames at 24 frames, 1024x576 resolution.<br /> zeroscope_v2_XL is specifically designed for upscaling content made with [zeroscope_v2_576w](https://huggingface.co/cerspense/zeroscope_v2_567w) using vid2vid in the [1111 text2video](https://github.com/kabachuha/sd-webui-text2video) extension by [kabachuha](https://github.com/kabachuha). Leveraging this model as an upscaler allows for superior overall compositions at higher resolutions, permitting faster exploration in 576x320 (or 448x256) before transitioning to a high-resolution render.<br /> zeroscope_v2_XL uses 15.3gb of vram when rendering 30 frames at 1024x576 ### Using it with the 1111 text2video extension 1. Download files in the zs2_XL folder. 2. Replace the respective files in the 'stable-diffusion-webui\models\ModelScope\t2v' directory. ### Upscaling recommendations For upscaling, it's recommended to use the 1111 extension. It works best at 1024x576 with a denoise strength between 0.66 and 0.85. Remember to use the same prompt that was used to generate the original clip. ### Usage in 🧨 Diffusers Let's first install the libraries required: ```bash $ pip install git+https://github.com/huggingface/diffusers.git $ pip install transformers accelerate torch ``` Now, let's first generate a low resolution video using [cerspense/zeroscope_v2_576w](https://huggingface.co/cerspense/zeroscope_v2_576w). ```py import torch from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler from diffusers.utils import export_to_video pipe = DiffusionPipeline.from_pretrained("cerspense/zeroscope_v2_576w", torch_dtype=torch.float16) pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config) pipe.enable_model_cpu_offload() pipe.enable_vae_slicing() pipe.unet.enable_forward_chunking(chunk_size=1, dim=1) # disable if enough memory as this slows down significantly prompt = "Darth Vader is surfing on waves" video_frames = pipe(prompt, num_inference_steps=40, height=320, width=576, num_frames=36).frames video_path = export_to_video(video_frames) ``` Next, we can upscale it using [cerspense/zeroscope_v2_XL](https://huggingface.co/cerspense/zeroscope_v2_XL). ```py pipe = DiffusionPipeline.from_pretrained("cerspense/zeroscope_v2_XL", torch_dtype=torch.float16) pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config) pipe.enable_model_cpu_offload() pipe.enable_vae_slicing() video = [Image.fromarray(frame).resize((1024, 576)) for frame in video_frames] video_frames = pipe(prompt, video=video, strength=0.6).frames video_path = export_to_video(video_frames, output_video_path="/home/patrick/videos/video_1024_darth_vader_36.mp4") ``` Here are some results: <table> <tr> Darth vader is surfing on waves. <br> <img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/darth_vader_36_1024.gif" alt="Darth vader surfing in waves." style="width: 576;" /> </center></td> </tr> </table> ### Known issues Rendering at lower resolutions or fewer than 24 frames could lead to suboptimal outputs. <br /> Thanks to [camenduru](https://github.com/camenduru), [kabachuha](https://github.com/kabachuha), [ExponentialML](https://github.com/ExponentialML), [dotsimulate](https://www.instagram.com/dotsimulate/), [VANYA](https://twitter.com/veryVANYA), [polyware](https://twitter.com/polyware_ai), [tin2tin](https://github.com/tin2tin)<br />
4,006
[ [ -0.043243408203125, -0.050506591796875, 0.040802001953125, 0.0008392333984375, -0.038360595703125, -0.019561767578125, 0.0016241073608398438, -0.00861358642578125, 0.02618408203125, 0.0229034423828125, -0.037567138671875, -0.0266571044921875, -0.0556640625, -0.0259552001953125, -0.02362060546875, 0.06195068359375, -0.004825592041015625, -0.0076751708984375, -0.007434844970703125, -0.0003046989440917969, -0.023956298828125, -0.0078277587890625, -0.0198822021484375, -0.0177154541015625, -0.0021228790283203125, 0.053985595703125, 0.045623779296875, 0.0567626953125, 0.0290985107421875, 0.021453857421875, -0.00701904296875, 0.003696441650390625, -0.0413818359375, -0.00347137451171875, 0.00710296630859375, -0.01117706298828125, -0.04058837890625, -0.007312774658203125, 0.0638427734375, 0.0008444786071777344, -0.00215911865234375, 0.041961669921875, -0.007526397705078125, 0.0443115234375, -0.051605224609375, 0.0016956329345703125, -0.006561279296875, -0.0016679763793945312, -0.01140594482421875, -0.01983642578125, -0.0095062255859375, 0.005580902099609375, -0.0003972053527832031, -0.0638427734375, 0.0328369140625, -0.02459716796875, 0.107421875, 0.02667236328125, -0.030548095703125, 0.027313232421875, -0.054473876953125, 0.058837890625, -0.0589599609375, 0.03143310546875, -0.00006890296936035156, 0.018890380859375, 0.007175445556640625, -0.0628662109375, -0.029266357421875, 0.004283905029296875, 0.015716552734375, 0.042236328125, -0.036346435546875, 0.0038471221923828125, 0.035430908203125, 0.03338623046875, -0.04888916015625, -0.00336456298828125, -0.03759765625, 0.0026226043701171875, 0.048736572265625, 0.01168060302734375, 0.019378662109375, -0.024169921875, -0.026123046875, -0.04119873046875, -0.0156402587890625, 0.00844573974609375, 0.00933074951171875, -0.0187530517578125, -0.043243408203125, 0.047119140625, -0.02020263671875, 0.041595458984375, 0.02789306640625, -0.018035888671875, 0.0240936279296875, -0.01020050048828125, -0.039337158203125, -0.0071868896484375, 0.05267333984375, 0.05743408203125, 0.0004017353057861328, 0.033721923828125, 0.01007080078125, 0.028045654296875, 0.01171875, -0.09698486328125, -0.017974853515625, 0.0302276611328125, -0.0279388427734375, -0.013885498046875, -0.00705718994140625, -0.0882568359375, 0.006748199462890625, -0.0005884170532226562, 0.048126220703125, -0.02899169921875, -0.038482666015625, 0.00885772705078125, -0.038818359375, 0.016815185546875, 0.0258941650390625, -0.05645751953125, 0.031951904296875, 0.0170440673828125, 0.07464599609375, 0.0229949951171875, -0.0007581710815429688, -0.050994873046875, 0.005344390869140625, -0.01910400390625, 0.03411865234375, -0.0038242340087890625, -0.0362548828125, -0.0197601318359375, 0.0050811767578125, 0.02288818359375, -0.0276336669921875, 0.03857421875, -0.02117919921875, 0.012969970703125, -0.003711700439453125, -0.043975830078125, -0.01494598388671875, -0.00449371337890625, -0.013397216796875, 0.0755615234375, 0.0289306640625, -0.06011962890625, 0.0028591156005859375, -0.039093017578125, 0.003437042236328125, -0.0014905929565429688, -0.0117034912109375, -0.04852294921875, 0.006084442138671875, 0.006671905517578125, 0.0197601318359375, -0.0159454345703125, -0.01248931884765625, -0.0225677490234375, -0.03228759765625, 0.002483367919921875, -0.038055419921875, 0.06195068359375, 0.0213775634765625, -0.04400634765625, 0.00975799560546875, -0.062744140625, 0.00821685791015625, 0.0152587890625, 0.00635528564453125, 0.0006184577941894531, -0.01788330078125, 0.00836944580078125, 0.01438140869140625, 0.0027980804443359375, -0.054473876953125, -0.001617431640625, -0.038604736328125, 0.037689208984375, 0.046630859375, 0.0022144317626953125, 0.03912353515625, -0.01383209228515625, 0.039215087890625, 0.00604248046875, 0.034088134765625, -0.0074310302734375, -0.05548095703125, -0.0626220703125, -0.015625, -0.005794525146484375, 0.0298309326171875, -0.04791259765625, -0.0019969940185546875, -0.0186614990234375, -0.0694580078125, -0.03814697265625, 0.00885772705078125, 0.0321044921875, 0.06048583984375, 0.038360595703125, -0.0614013671875, -0.039306640625, -0.053375244140625, 0.0301055908203125, -0.01165771484375, -0.040557861328125, 0.0291900634765625, 0.032623291015625, -0.0006480216979980469, 0.05255126953125, -0.0693359375, -0.035614013671875, 0.01039886474609375, -0.00473785400390625, 0.0298309326171875, 0.0225372314453125, 0.061004638671875, -0.04998779296875, -0.044189453125, -0.0023822784423828125, -0.062103271484375, 0.0016937255859375, 0.01288604736328125, -0.007556915283203125, 0.00812530517578125, 0.0131072998046875, -0.054290771484375, 0.0350341796875, 0.053497314453125, -0.03875732421875, 0.050140380859375, -0.047210693359375, 0.0118560791015625, -0.07952880859375, -0.002040863037109375, 0.032989501953125, -0.0248565673828125, -0.04296875, 0.004924774169921875, 0.007114410400390625, -0.007354736328125, -0.048095703125, 0.034637451171875, -0.0350341796875, -0.0014276504516601562, -0.0109100341796875, 0.01611328125, 0.0166015625, 0.03228759765625, 0.00862884521484375, 0.042266845703125, 0.057220458984375, -0.03790283203125, 0.044403076171875, 0.020355224609375, -0.0006113052368164062, 0.0411376953125, -0.07177734375, -0.01123046875, -0.01067352294921875, -0.00002110004425048828, -0.0699462890625, -0.039093017578125, 0.0192413330078125, -0.06903076171875, 0.0238494873046875, -0.0238189697265625, -0.015838623046875, -0.0309906005859375, -0.05853271484375, 0.0181427001953125, 0.07745361328125, -0.025421142578125, 0.0238494873046875, 0.033172607421875, 0.0245208740234375, -0.033294677734375, -0.0693359375, -0.01045989990234375, -0.017364501953125, -0.0537109375, 0.048675537109375, -0.0030155181884765625, -0.0163116455078125, 0.014404296875, -0.00243377685546875, -0.002246856689453125, -0.03399658203125, 0.042572021484375, 0.042388916015625, -0.0202789306640625, -0.03265380859375, -0.0026035308837890625, -0.0135498046875, -0.0045166015625, -0.018218994140625, 0.0271759033203125, -0.01018524169921875, -0.0051422119140625, -0.05047607421875, 0.0105438232421875, 0.048797607421875, 0.01039886474609375, 0.015411376953125, 0.0806884765625, -0.0207366943359375, 0.01186370849609375, -0.037841796875, -0.0224151611328125, -0.042510986328125, 0.0235595703125, -0.004833221435546875, -0.049530029296875, 0.033721923828125, 0.013031005859375, -0.0035610198974609375, 0.04205322265625, 0.050506591796875, -0.0162200927734375, 0.07421875, 0.03912353515625, 0.0236053466796875, 0.049713134765625, -0.06585693359375, -0.01395416259765625, -0.064208984375, -0.0066680908203125, -0.0007195472717285156, -0.005588531494140625, -0.04998779296875, -0.057708740234375, 0.0472412109375, 0.0190582275390625, -0.0396728515625, 0.041839599609375, -0.062103271484375, 0.01189422607421875, 0.03759765625, 0.0031299591064453125, 0.004886627197265625, 0.0186920166015625, 0.0037860870361328125, -0.03369140625, -0.04254150390625, -0.0233154296875, 0.07269287109375, 0.01904296875, 0.04522705078125, 0.008636474609375, 0.03411865234375, 0.015228271484375, -0.00659942626953125, -0.036834716796875, 0.04412841796875, -0.0174713134765625, -0.042877197265625, -0.007099151611328125, -0.0160369873046875, -0.058868408203125, 0.01995849609375, -0.0219268798828125, -0.060150146484375, 0.0166015625, 0.0164947509765625, -0.0283203125, 0.045867919921875, -0.06402587890625, 0.050048828125, -0.00016248226165771484, -0.068359375, -0.01070404052734375, -0.054840087890625, 0.016448974609375, 0.027374267578125, 0.00836181640625, -0.0020580291748046875, -0.0086822509765625, 0.055389404296875, -0.04107666015625, 0.04437255859375, -0.0250701904296875, 0.01202392578125, 0.048309326171875, 0.004001617431640625, 0.006053924560546875, 0.028076171875, 0.00983428955078125, 0.02947998046875, 0.0213775634765625, -0.03790283203125, -0.031524658203125, 0.0679931640625, -0.0771484375, -0.030059814453125, -0.037322998046875, -0.0153045654296875, 0.023193359375, 0.0110626220703125, 0.037933349609375, 0.041259765625, 0.0024776458740234375, -0.004352569580078125, 0.04290771484375, -0.004150390625, 0.045440673828125, 0.036773681640625, -0.0214691162109375, -0.053741455078125, 0.0693359375, 0.0245361328125, 0.031646728515625, 0.0111083984375, -0.0027370452880859375, -0.0097808837890625, -0.0113983154296875, -0.0616455078125, 0.02203369140625, -0.020843505859375, -0.040618896484375, -0.0222930908203125, -0.029083251953125, -0.04718017578125, -0.020538330078125, -0.05841064453125, -0.0300750732421875, -0.05084228515625, 0.0027370452880859375, 0.056976318359375, 0.0330810546875, -0.0374755859375, 0.0225982666015625, -0.049407958984375, 0.035675048828125, 0.032196044921875, 0.0186920166015625, -0.0031070709228515625, -0.05633544921875, -0.01256561279296875, 0.006046295166015625, -0.062744140625, -0.05670166015625, 0.056793212890625, -0.0030727386474609375, 0.007572174072265625, 0.0430908203125, -0.012298583984375, 0.06805419921875, -0.0116424560546875, 0.08251953125, 0.037109375, -0.06756591796875, 0.0504150390625, -0.0303497314453125, 0.030059814453125, -0.0004901885986328125, 0.0240936279296875, -0.0384521484375, -0.00942230224609375, -0.058197021484375, -0.07830810546875, 0.0557861328125, 0.039306640625, 0.00946044921875, 0.0191192626953125, 0.0285797119140625, -0.0130767822265625, -0.0216217041015625, -0.0203857421875, -0.0396728515625, -0.03985595703125, 0.0146026611328125, -0.01476287841796875, -0.027313232421875, 0.0033664703369140625, -0.04583740234375, 0.052215576171875, 0.00028824806213378906, 0.034454345703125, 0.06524658203125, 0.00547027587890625, -0.04290771484375, 0.0042572021484375, 0.0322265625, 0.03826904296875, -0.053131103515625, -0.004070281982421875, 0.0095672607421875, -0.047454833984375, 0.0193634033203125, 0.00669097900390625, -0.0255126953125, 0.027496337890625, 0.0067596435546875, 0.05999755859375, 0.0030536651611328125, -0.04095458984375, 0.048248291015625, -0.005901336669921875, -0.034332275390625, -0.038909912109375, 0.007175445556640625, 0.00843048095703125, 0.023956298828125, 0.02880859375, 0.006130218505859375, 0.005458831787109375, -0.0210113525390625, 0.01904296875, 0.012359619140625, -0.0272064208984375, -0.04083251953125, 0.09771728515625, 0.00572967529296875, -0.0165863037109375, 0.03753662109375, -0.01471710205078125, -0.00982666015625, 0.046478271484375, 0.0271453857421875, 0.0457763671875, -0.01549530029296875, 0.03143310546875, 0.0574951171875, -0.00215911865234375, 0.0013904571533203125, 0.0210723876953125, -0.0038471221923828125, -0.032012939453125, -0.04376220703125, -0.044525146484375, -0.038482666015625, 0.0120849609375, -0.058074951171875, 0.057098388671875, -0.032623291015625, -0.022369384765625, 0.02325439453125, 0.0214385986328125, -0.045867919921875, 0.0187530517578125, 0.02362060546875, 0.057830810546875, -0.053741455078125, 0.07708740234375, 0.03436279296875, -0.055145263671875, -0.050140380859375, -0.03167724609375, 0.01473236083984375, -0.04022216796875, 0.0131683349609375, 0.00464630126953125, -0.0121307373046875, 0.00604248046875, -0.0274200439453125, -0.057281494140625, 0.1009521484375, 0.0411376953125, -0.042083740234375, -0.006519317626953125, -0.0021533966064453125, 0.049896240234375, -0.019287109375, 0.053985595703125, 0.02783203125, 0.02691650390625, 0.01508331298828125, -0.073486328125, -0.00467681884765625, -0.016998291015625, 0.0143280029296875, 0.0092315673828125, -0.0751953125, 0.0806884765625, -0.033203125, -0.01152801513671875, 0.0065460205078125, 0.0467529296875, 0.0167694091796875, 0.036834716796875, 0.0269317626953125, 0.0706787109375, 0.009735107421875, 0.0067138671875, 0.06756591796875, -0.0052947998046875, 0.05255126953125, 0.061981201171875, 0.00039267539978027344, 0.052215576171875, 0.04400634765625, -0.0206756591796875, 0.044158935546875, 0.041259765625, -0.0185089111328125, 0.039093017578125, 0.0011463165283203125, -0.0021152496337890625, -0.001415252685546875, -0.01104736328125, -0.045318603515625, 0.03338623046875, 0.016815185546875, -0.0132598876953125, -0.02508544921875, 0.0010509490966796875, 0.0012083053588867188, 0.0010204315185546875, -0.021453857421875, 0.027496337890625, -0.0011987686157226562, -0.0222625732421875, 0.058319091796875, -0.00394439697265625, 0.0654296875, -0.0445556640625, -0.0192718505859375, -0.01145172119140625, 0.028076171875, -0.030548095703125, -0.07366943359375, 0.0283966064453125, 0.01142120361328125, -0.0008053779602050781, -0.0193328857421875, 0.052947998046875, -0.0193023681640625, -0.03790283203125, 0.05224609375, 0.01508331298828125, 0.03521728515625, -0.0086517333984375, -0.029998779296875, 0.0125885009765625, 0.0079193115234375, -0.044158935546875, 0.0288238525390625, 0.009490966796875, 0.0196533203125, 0.033966064453125, 0.037567138671875, 0.0212554931640625, 0.003513336181640625, -0.01309967041015625, 0.07135009765625, -0.048675537109375, -0.00696563720703125, -0.054229736328125, 0.04547119140625, -0.0204620361328125, -0.0256805419921875, 0.07147216796875, 0.048309326171875, 0.08453369140625, -0.01052093505859375, 0.03192138671875, -0.022003173828125, 0.0194244384765625, -0.004718780517578125, 0.0347900390625, -0.069091796875, -0.006908416748046875, -0.0289306640625, -0.05328369140625, -0.0057220458984375, 0.026336669921875, 0.00334930419921875, -0.0187530517578125, 0.0127410888671875, 0.06475830078125, -0.02728271484375, -0.030059814453125, 0.034912109375, 0.0273895263671875, 0.01776123046875, 0.044921875, 0.00785064697265625, -0.07977294921875, 0.06414794921875, -0.060211181640625, -0.02838134765625, -0.020843505859375, -0.035888671875, -0.0321044921875, -0.04150390625, -0.041717529296875, -0.047454833984375, -0.00702667236328125, 0.041717529296875, 0.0762939453125, -0.033477783203125, -0.027191162109375, -0.0027637481689453125, -0.006130218505859375, -0.01120758056640625, -0.0256805419921875, 0.0177001953125, 0.02947998046875, -0.07244873046875, 0.00778961181640625, 0.03839111328125, 0.0097503662109375, -0.0178985595703125, -0.0202789306640625, -0.017578125, 0.0056304931640625, 0.0430908203125, 0.037841796875, -0.040191650390625, -0.0283203125, -0.0012454986572265625, 0.016265869140625, 0.0269927978515625, 0.023895263671875, -0.061767578125, 0.05419921875, 0.04986572265625, -0.0217132568359375, 0.09417724609375, -0.0113525390625, 0.0014591217041015625, -0.03759765625, 0.034271240234375, 0.0011234283447265625, 0.01285552978515625, 0.0165557861328125, -0.0247955322265625, 0.0452880859375, 0.02496337890625, -0.05621337890625, -0.06207275390625, 0.0020923614501953125, -0.1099853515625, -0.00850677490234375, 0.08392333984375, -0.009796142578125, -0.02947998046875, 0.0243988037109375, -0.018280029296875, 0.045013427734375, -0.035980224609375, 0.0498046875, 0.0250396728515625, -0.026153564453125, -0.01861572265625, -0.0633544921875, 0.015777587890625, 0.0230560302734375, -0.017608642578125, -0.0181427001953125, 0.0352783203125, 0.061309814453125, 0.027557373046875, 0.058074951171875, -0.0270843505859375, 0.02685546875, 0.03436279296875, 0.009735107421875, -0.0046234130859375, 0.01378631591796875, -0.031951904296875, 0.00859832763671875, -0.0177001953125, -0.037322998046875 ] ]
MoritzLaurer/deberta-v3-base-zeroshot-v1
2023-10-05T14:04:02.000Z
[ "transformers", "pytorch", "deberta-v2", "text-classification", "zero-shot-classification", "en", "license:mit", "endpoints_compatible", "region:us" ]
zero-shot-classification
MoritzLaurer
null
null
MoritzLaurer/deberta-v3-base-zeroshot-v1
33
8,555
transformers
2023-09-29T05:38:21
--- language: - en tags: - text-classification - zero-shot-classification pipeline_tag: zero-shot-classification library_name: transformers license: mit --- # deberta-v3-base-zeroshot-v1 ## Model description The model is designed for zero-shot classification with the Hugging Face pipeline. The model should be substantially better at zero-shot classification than my other zero-shot models on the Hugging Face hub: https://huggingface.co/MoritzLaurer. The model can do one universal task: determine whether a hypothesis is `true` or `not_true` given a text (also called `entailment` vs. `not_entailment`). This task format is based on the Natural Language Inference task (NLI). The task is so universal that any classification task can be reformulated into the task. ## Training data The model was trained on a mixture of 27 tasks and 310 classes that have been reformatted into this universal format. 1. 26 classification tasks with ~400k texts: 'amazonpolarity', 'imdb', 'appreviews', 'yelpreviews', 'rottentomatoes', 'emotiondair', 'emocontext', 'empathetic', 'financialphrasebank', 'banking77', 'massive', 'wikitoxic_toxicaggregated', 'wikitoxic_obscene', 'wikitoxic_threat', 'wikitoxic_insult', 'wikitoxic_identityhate', 'hateoffensive', 'hatexplain', 'biasframes_offensive', 'biasframes_sex', 'biasframes_intent', 'agnews', 'yahootopics', 'trueteacher', 'spam', 'wellformedquery'. See details on each dataset here: https://docs.google.com/spreadsheets/d/1Z18tMh02IiWgh6o8pfoMiI_LH4IXpr78wd_nmNd5FaE/edit?usp=sharing 3. Five NLI datasets with ~885k texts: "mnli", "anli", "fever", "wanli", "ling" Note that compared to other NLI models, this model predicts two classes (`entailment` vs. `not_entailment`) as opposed to three classes (entailment/neutral/contradiction) ### How to use the model #### Simple zero-shot classification pipeline ```python from transformers import pipeline classifier = pipeline("zero-shot-classification", model="MoritzLaurer/deberta-v3-base-zeroshot-v1") sequence_to_classify = "Angela Merkel is a politician in Germany and leader of the CDU" candidate_labels = ["politics", "economy", "entertainment", "environment"] output = classifier(sequence_to_classify, candidate_labels, multi_label=False) print(output) ``` ### Details on data and training The code for preparing the data and training & evaluating the model is fully open-source here: https://github.com/MoritzLaurer/zeroshot-classifier/tree/main ## Limitations and bias The model can only do text classification tasks. Please consult the original DeBERTa paper and the papers for the different datasets for potential biases. ## License The base model (DeBERTa-v3) is published under the MIT license. The datasets the model was fine-tuned on are published under a diverse set of licenses. The following spreadsheet provides an overview of the non-NLI datasets used for fine-tuning. The spreadsheets contains information on licenses, the underlying papers etc.: https://docs.google.com/spreadsheets/d/1Z18tMh02IiWgh6o8pfoMiI_LH4IXpr78wd_nmNd5FaE/edit?usp=sharing In addition, the model was also trained on the following NLI datasets: MNLI, ANLI, WANLI, LING-NLI, FEVER-NLI. ## Citation If you use this model, please cite: ``` @article{laurer_less_2023, title = {Less {Annotating}, {More} {Classifying}: {Addressing} the {Data} {Scarcity} {Issue} of {Supervised} {Machine} {Learning} with {Deep} {Transfer} {Learning} and {BERT}-{NLI}}, issn = {1047-1987, 1476-4989}, shorttitle = {Less {Annotating}, {More} {Classifying}}, url = {https://www.cambridge.org/core/product/identifier/S1047198723000207/type/journal_article}, doi = {10.1017/pan.2023.20}, language = {en}, urldate = {2023-06-20}, journal = {Political Analysis}, author = {Laurer, Moritz and Van Atteveldt, Wouter and Casas, Andreu and Welbers, Kasper}, month = jun, year = {2023}, pages = {1--33}, } ``` ### Ideas for cooperation or questions? If you have questions or ideas for cooperation, contact me at m{dot}laurer{at}vu{dot}nl or [LinkedIn](https://www.linkedin.com/in/moritz-laurer/) ### Debugging and issues Note that DeBERTa-v3 was released on 06.12.21 and older versions of HF Transformers seem to have issues running the model (e.g. resulting in an issue with the tokenizer). Using Transformers>=4.13 might solve some issues.
4,324
[ [ -0.018890380859375, -0.048187255859375, 0.030303955078125, 0.00970458984375, -0.004711151123046875, -0.0117340087890625, 0.00820159912109375, -0.047607421875, 0.0211181640625, 0.035400390625, -0.043914794921875, -0.053558349609375, -0.06201171875, 0.00885772705078125, -0.029449462890625, 0.08251953125, 0.004993438720703125, -0.0081329345703125, 0.0009946823120117188, -0.01103973388671875, -0.036865234375, -0.048858642578125, -0.040740966796875, -0.04058837890625, 0.0531005859375, 0.03485107421875, 0.04437255859375, 0.036224365234375, 0.030731201171875, 0.01540374755859375, -0.0171966552734375, -0.01421356201171875, -0.0340576171875, -0.0028209686279296875, -0.0037517547607421875, -0.039398193359375, -0.03692626953125, 0.022613525390625, 0.0179443359375, 0.032257080078125, 0.00927734375, 0.0249786376953125, -0.004802703857421875, 0.046295166015625, -0.06951904296875, 0.00968170166015625, -0.05078125, 0.00785064697265625, -0.00850677490234375, 0.003467559814453125, -0.0274810791015625, -0.01200103759765625, 0.0156402587890625, -0.031463623046875, 0.0118255615234375, -0.0164947509765625, 0.0938720703125, 0.032440185546875, -0.022491455078125, -0.00586700439453125, -0.05084228515625, 0.0675048828125, -0.0699462890625, 0.02056884765625, 0.0213775634765625, 0.007305145263671875, -0.0016984939575195312, -0.036529541015625, -0.061431884765625, 0.0093536376953125, -0.00775146484375, 0.0243377685546875, -0.034820556640625, -0.0131683349609375, 0.0261383056640625, 0.01535797119140625, -0.04949951171875, 0.02490234375, -0.03472900390625, -0.0017366409301757812, 0.046051025390625, 0.0007882118225097656, 0.0155029296875, -0.03277587890625, -0.02972412109375, -0.0188751220703125, -0.04730224609375, 0.001247406005859375, 0.0225982666015625, 0.0264129638671875, -0.0228424072265625, 0.0372314453125, -0.0266265869140625, 0.055938720703125, 0.01261138916015625, 0.004375457763671875, 0.05047607421875, -0.00862884521484375, -0.042327880859375, 0.00983428955078125, 0.066162109375, 0.024749755859375, 0.007694244384765625, 0.00013887882232666016, 0.0077667236328125, 0.018707275390625, 0.0025386810302734375, -0.080078125, -0.020843505859375, 0.032135009765625, -0.024627685546875, -0.044097900390625, 0.00006282329559326172, -0.06195068359375, -0.018524169921875, -0.03631591796875, 0.03363037109375, -0.034759521484375, -0.0156402587890625, 0.008880615234375, -0.016876220703125, 0.031005859375, 0.0205078125, -0.060516357421875, 0.00305938720703125, 0.037994384765625, 0.0679931640625, 0.0032958984375, -0.0210418701171875, -0.03106689453125, -0.01207733154296875, -0.0101165771484375, 0.05108642578125, -0.036834716796875, -0.0010995864868164062, -0.00853729248046875, 0.01534271240234375, -0.0225677490234375, -0.031158447265625, 0.049560546875, -0.034027099609375, 0.034912109375, -0.01334381103515625, -0.045013427734375, -0.03289794921875, 0.029815673828125, -0.04791259765625, 0.07135009765625, 0.0124359130859375, -0.07293701171875, 0.032623291015625, -0.0562744140625, -0.005298614501953125, -0.005008697509765625, 0.0031681060791015625, -0.038299560546875, -0.0171356201171875, 0.0171051025390625, 0.04840087890625, -0.0135650634765625, 0.039215087890625, -0.037811279296875, -0.03375244140625, 0.0019140243530273438, -0.02716064453125, 0.10296630859375, 0.01611328125, -0.0411376953125, 0.0068206787109375, -0.06488037109375, -0.0086517333984375, 0.0142059326171875, -0.001285552978515625, -0.02099609375, -0.022705078125, 0.01299285888671875, 0.0296630859375, 0.0102996826171875, -0.0521240234375, 0.0239105224609375, -0.03692626953125, 0.0267486572265625, 0.0325927734375, 0.0018758773803710938, 0.035369873046875, -0.0267791748046875, 0.0291595458984375, 0.00921630859375, 0.0220489501953125, 0.0032520294189453125, -0.044281005859375, -0.07708740234375, -0.02960205078125, 0.043914794921875, 0.0726318359375, -0.043914794921875, 0.05133056640625, -0.01456451416015625, -0.061767578125, -0.037322998046875, 0.00833892822265625, 0.024749755859375, 0.045166015625, 0.0374755859375, -0.0097503662109375, -0.049560546875, -0.06787109375, 0.007289886474609375, -0.005084991455078125, -0.00759124755859375, 0.00542449951171875, 0.06024169921875, -0.038848876953125, 0.0716552734375, -0.038543701171875, -0.0487060546875, -0.0164031982421875, 0.0197601318359375, 0.037567138671875, 0.03497314453125, 0.0643310546875, -0.049468994140625, -0.037567138671875, -0.0191650390625, -0.06829833984375, 0.000024080276489257812, -0.005687713623046875, -0.0236053466796875, 0.031005859375, 0.01274871826171875, -0.03961181640625, 0.031219482421875, 0.043792724609375, -0.0243988037109375, 0.0012617111206054688, 0.0024967193603515625, -0.005977630615234375, -0.08154296875, 0.0183868408203125, 0.01503753662109375, -0.0071258544921875, -0.06317138671875, 0.0011415481567382812, -0.01006317138671875, -0.0006651878356933594, -0.052947998046875, 0.042083740234375, -0.01209259033203125, 0.0249786376953125, -0.01554107666015625, 0.005863189697265625, 0.008087158203125, 0.045318603515625, 0.014129638671875, 0.0230560302734375, 0.06353759765625, -0.04473876953125, 0.01190948486328125, 0.03741455078125, -0.01181793212890625, 0.031585693359375, -0.061553955078125, 0.01031494140625, -0.0202178955078125, 0.02276611328125, -0.041961669921875, -0.01523590087890625, 0.04296875, -0.040008544921875, 0.0287628173828125, -0.0059661865234375, -0.034149169921875, -0.019805908203125, -0.031829833984375, 0.0037593841552734375, 0.04718017578125, -0.0430908203125, 0.0300140380859375, 0.0253143310546875, 0.01444244384765625, -0.060028076171875, -0.05596923828125, -0.0113525390625, -0.0217437744140625, -0.0304718017578125, 0.02984619140625, 0.005298614501953125, -0.0136260986328125, 0.01036834716796875, 0.00804901123046875, -0.0239410400390625, 0.00574493408203125, 0.0259857177734375, 0.03607177734375, 0.00591278076171875, -0.00084686279296875, 0.00449371337890625, -0.01169586181640625, -0.0181121826171875, -0.01299285888671875, 0.03424072265625, -0.0014657974243164062, -0.005825042724609375, -0.0498046875, 0.01361083984375, 0.043060302734375, -0.006793975830078125, 0.0672607421875, 0.0609130859375, -0.031463623046875, 0.00472259521484375, -0.0322265625, -0.01204681396484375, -0.028717041015625, 0.00565338134765625, -0.0147857666015625, -0.06072998046875, 0.03350830078125, 0.0183868408203125, 0.01116943359375, 0.065185546875, 0.04046630859375, 0.0074615478515625, 0.0604248046875, 0.058807373046875, -0.027374267578125, 0.0220794677734375, -0.05364990234375, 0.01213836669921875, -0.056304931640625, -0.0178070068359375, -0.045135498046875, -0.0301666259765625, -0.05670166015625, -0.0260772705078125, 0.0017061233520507812, 0.017730712890625, -0.03460693359375, 0.05596923828125, -0.06201171875, 0.0306243896484375, 0.0518798828125, -0.00276947021484375, 0.0168914794921875, -0.00482940673828125, 0.0258331298828125, -0.00370025634765625, -0.054168701171875, -0.04583740234375, 0.07550048828125, 0.03887939453125, 0.04180908203125, 0.0080108642578125, 0.0732421875, 0.008331298828125, 0.028656005859375, -0.058746337890625, 0.0215301513671875, -0.02880859375, -0.06365966796875, -0.017913818359375, -0.032867431640625, -0.07293701171875, 0.017303466796875, -0.032196044921875, -0.06610107421875, 0.048004150390625, 0.0013408660888671875, -0.037322998046875, 0.02655029296875, -0.0443115234375, 0.0703125, -0.01227569580078125, -0.01519775390625, 0.005096435546875, -0.0455322265625, 0.0295562744140625, -0.006748199462890625, 0.004703521728515625, -0.0259857177734375, 0.0205535888671875, 0.058746337890625, -0.01152801513671875, 0.08477783203125, -0.0302581787109375, -0.00846099853515625, 0.029510498046875, -0.012237548828125, 0.012481689453125, 0.0025806427001953125, -0.0188140869140625, 0.055572509765625, 0.0192108154296875, -0.022918701171875, -0.037628173828125, 0.06060791015625, -0.0697021484375, -0.023895263671875, -0.0533447265625, -0.0169525146484375, 0.011474609375, 0.019805908203125, 0.038421630859375, 0.0182037353515625, -0.00447845458984375, 0.018585205078125, 0.03912353515625, -0.02239990234375, 0.028167724609375, 0.041290283203125, -0.01031494140625, -0.0155029296875, 0.0694580078125, 0.0193634033203125, 0.0018978118896484375, 0.032928466796875, 0.005062103271484375, -0.0200042724609375, -0.02239990234375, -0.037994384765625, 0.012481689453125, -0.0430908203125, -0.037628173828125, -0.07293701171875, -0.0291595458984375, -0.040283203125, -0.0019931793212890625, -0.0216064453125, -0.037628173828125, -0.048919677734375, -0.013092041015625, 0.04241943359375, 0.047821044921875, -0.004840850830078125, 0.0212860107421875, -0.054351806640625, 0.0183563232421875, 0.0188140869140625, 0.0255279541015625, -0.00469207763671875, -0.056976318359375, 0.00437164306640625, 0.0092315673828125, -0.042755126953125, -0.076904296875, 0.051239013671875, 0.01611328125, 0.0231170654296875, 0.0183563232421875, 0.020416259765625, 0.040435791015625, -0.030975341796875, 0.0562744140625, 0.0139312744140625, -0.07794189453125, 0.042572021484375, -0.019805908203125, 0.01477813720703125, 0.056427001953125, 0.05523681640625, -0.0275726318359375, -0.038360595703125, -0.058990478515625, -0.0762939453125, 0.060333251953125, 0.03533935546875, 0.0177764892578125, -0.0005435943603515625, 0.02911376953125, -0.002712249755859375, 0.008148193359375, -0.06964111328125, -0.025299072265625, -0.0203704833984375, -0.019622802734375, 0.006069183349609375, -0.0131683349609375, -0.0000032782554626464844, -0.03814697265625, 0.07635498046875, -0.006725311279296875, 0.0184173583984375, 0.033966064453125, -0.0020751953125, 0.0018825531005859375, 0.031646728515625, 0.035888671875, 0.0281829833984375, -0.03826904296875, -0.00809478759765625, 0.020538330078125, -0.01068878173828125, 0.01214599609375, 0.018707275390625, -0.042083740234375, 0.01137542724609375, 0.0214691162109375, 0.08306884765625, -0.004184722900390625, -0.03631591796875, 0.05072021484375, -0.0005674362182617188, -0.0419921875, -0.040313720703125, 0.009521484375, -0.0086212158203125, 0.0269775390625, 0.0177764892578125, 0.0089569091796875, 0.022979736328125, -0.042236328125, 0.0139312744140625, 0.031280517578125, -0.0419921875, -0.0184173583984375, 0.0517578125, 0.0108489990234375, -0.0102996826171875, 0.041839599609375, -0.036834716796875, -0.036590576171875, 0.046600341796875, 0.02630615234375, 0.0694580078125, 0.0063323974609375, 0.03448486328125, 0.0511474609375, 0.021697998046875, -0.0036773681640625, 0.01200103759765625, 0.0192718505859375, -0.05810546875, -0.04168701171875, -0.052490234375, -0.03106689453125, 0.037017822265625, -0.04376220703125, 0.0399169921875, -0.03741455078125, -0.01241302490234375, 0.02154541015625, -0.000885009765625, -0.0521240234375, 0.0166015625, 0.02374267578125, 0.058258056640625, -0.08673095703125, 0.065185546875, 0.03692626953125, -0.050537109375, -0.05255126953125, -0.004833221435546875, 0.0063934326171875, -0.027801513671875, 0.0675048828125, 0.0386962890625, -0.003925323486328125, -0.00882720947265625, -0.0498046875, -0.0662841796875, 0.0867919921875, 0.03082275390625, -0.05609130859375, -0.0033168792724609375, -0.00373077392578125, 0.05078125, -0.0196685791015625, 0.032867431640625, 0.04022216796875, 0.035003662109375, 0.0123748779296875, -0.06982421875, 0.001708984375, -0.0212554931640625, -0.01085662841796875, 0.005558013916015625, -0.053955078125, 0.06890869140625, -0.0021762847900390625, -0.01910400390625, -0.00457000732421875, 0.032989501953125, 0.00589752197265625, 0.033966064453125, 0.038421630859375, 0.06317138671875, 0.0552978515625, -0.01222991943359375, 0.06573486328125, -0.020904541015625, 0.046630859375, 0.0892333984375, -0.03741455078125, 0.07208251953125, 0.01532745361328125, -0.005405426025390625, 0.059539794921875, 0.039764404296875, -0.033447265625, 0.030120849609375, 0.004230499267578125, -0.0034732818603515625, -0.00698089599609375, -0.01434326171875, -0.02044677734375, 0.04974365234375, 0.0019989013671875, -0.029327392578125, -0.0176544189453125, -0.000009059906005859375, 0.0164337158203125, -0.00722503662109375, 0.00478363037109375, 0.06280517578125, -0.004528045654296875, -0.03826904296875, 0.06536865234375, -0.0004222393035888672, 0.07232666015625, -0.0230560302734375, -0.00879669189453125, -0.0018053054809570312, 0.021331787109375, -0.032928466796875, -0.044342041015625, 0.041961669921875, 0.0175323486328125, -0.023712158203125, -0.00795745849609375, 0.046478271484375, -0.0257415771484375, -0.044891357421875, 0.039764404296875, 0.03814697265625, 0.01320648193359375, -0.00943756103515625, -0.06317138671875, -0.004116058349609375, 0.0062408447265625, -0.0118255615234375, 0.0201263427734375, 0.0159149169921875, 0.00794219970703125, 0.03753662109375, 0.048065185546875, -0.00824737548828125, -0.018463134765625, 0.00728607177734375, 0.06298828125, -0.04681396484375, -0.0018634796142578125, -0.0721435546875, 0.038482666015625, -0.018035888671875, -0.023895263671875, 0.05487060546875, 0.03955078125, 0.06671142578125, -0.007465362548828125, 0.052215576171875, -0.0265350341796875, 0.041473388671875, -0.02435302734375, 0.05078125, -0.050323486328125, -0.006137847900390625, -0.0291595458984375, -0.07110595703125, -0.040557861328125, 0.05438232421875, -0.0157318115234375, -0.01535797119140625, 0.032440185546875, 0.0474853515625, 0.0033321380615234375, -0.00949859619140625, 0.01039886474609375, 0.00415802001953125, 0.0189056396484375, 0.045379638671875, 0.038330078125, -0.0533447265625, 0.0309295654296875, -0.043487548828125, -0.0233154296875, -0.0018796920776367188, -0.062255859375, -0.0765380859375, -0.029449462890625, -0.043914794921875, -0.020843505859375, -0.0093231201171875, 0.07220458984375, 0.05877685546875, -0.076416015625, 0.00311279296875, -0.0159149169921875, -0.002277374267578125, -0.0017833709716796875, -0.02410888671875, 0.0244293212890625, -0.01334381103515625, -0.0792236328125, 0.0093536376953125, 0.00861358642578125, 0.0138092041015625, -0.0173492431640625, 0.0080413818359375, -0.0361328125, 0.005031585693359375, 0.0491943359375, 0.0211029052734375, -0.043853759765625, -0.0141754150390625, 0.0225677490234375, -0.0005731582641601562, 0.0004253387451171875, 0.02288818359375, -0.0626220703125, 0.01316070556640625, 0.03729248046875, 0.035614013671875, 0.03704833984375, -0.0114898681640625, 0.010772705078125, -0.04718017578125, 0.026458740234375, 0.0142669677734375, 0.017974853515625, 0.0260162353515625, -0.036773681640625, 0.049407958984375, 0.009368896484375, -0.04376220703125, -0.051727294921875, 0.0113067626953125, -0.076171875, -0.0218658447265625, 0.08380126953125, -0.0131072998046875, -0.023223876953125, -0.0015382766723632812, -0.017822265625, 0.0294952392578125, -0.03564453125, 0.055999755859375, 0.04315185546875, -0.01384735107421875, -0.01385498046875, -0.0469970703125, 0.02703857421875, 0.031005859375, -0.06201171875, -0.006183624267578125, 0.035308837890625, 0.0216064453125, 0.0455322265625, 0.049652099609375, -0.002925872802734375, -0.004123687744140625, -0.0171051025390625, 0.0119781494140625, 0.011871337890625, -0.0257720947265625, -0.04058837890625, -0.00177764892578125, -0.0096282958984375, -0.013275146484375 ] ]
declare-lab/flan-alpaca-large
2023-08-21T06:49:29.000Z
[ "transformers", "pytorch", "safetensors", "t5", "text2text-generation", "dataset:tatsu-lab/alpaca", "arxiv:2308.09662", "arxiv:2306.04757", "arxiv:2210.11416", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text2text-generation
declare-lab
null
null
declare-lab/flan-alpaca-large
43
8,538
transformers
2023-03-21T17:52:31
--- license: apache-2.0 datasets: - tatsu-lab/alpaca --- ## 🍮 🦙 Flan-Alpaca: Instruction Tuning from Humans and Machines 📣 Introducing **Red-Eval** to evaluate the safety of the LLMs using several jailbreaking prompts. With **Red-Eval** one could jailbreak/red-team GPT-4 with a 65.1% attack success rate and ChatGPT could be jailbroken 73% of the time as measured on DangerousQA and HarmfulQA benchmarks. More details are here: [Code](https://github.com/declare-lab/red-instruct) and [Paper](https://arxiv.org/abs/2308.09662). 📣 We developed Flacuna by fine-tuning Vicuna-13B on the Flan collection. Flacuna is better than Vicuna at problem-solving. Access the model here https://huggingface.co/declare-lab/flacuna-13b-v1.0. 📣 Curious to know the performance of 🍮 🦙 **Flan-Alpaca** on large-scale LLM evaluation benchmark, **InstructEval**? Read our paper [https://arxiv.org/pdf/2306.04757.pdf](https://arxiv.org/pdf/2306.04757.pdf). We evaluated more than 10 open-source instruction-tuned LLMs belonging to various LLM families including Pythia, LLaMA, T5, UL2, OPT, and Mosaic. Codes and datasets: [https://github.com/declare-lab/instruct-eval](https://github.com/declare-lab/instruct-eval) 📣 **FLAN-T5** is also useful in text-to-audio generation. Find our work at [https://github.com/declare-lab/tango](https://github.com/declare-lab/tango) if you are interested. Our [repository](https://github.com/declare-lab/flan-alpaca) contains code for extending the [Stanford Alpaca](https://github.com/tatsu-lab/stanford_alpaca) synthetic instruction tuning to existing instruction-tuned models such as [Flan-T5](https://arxiv.org/abs/2210.11416). We have a [live interactive demo](https://huggingface.co/spaces/joaogante/transformers_streaming) thanks to [Joao Gante](https://huggingface.co/joaogante)! We are also benchmarking many instruction-tuned models at [declare-lab/flan-eval](https://github.com/declare-lab/flan-eval). Our pretrained models are fully available on HuggingFace 🤗 : | Model | Parameters | Instruction Data | Training GPUs | |----------------------------------------------------------------------------------|------------|----------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| | [Flan-Alpaca-Base](https://huggingface.co/declare-lab/flan-alpaca-base) | 220M | [Flan](https://github.com/google-research/FLAN), [Alpaca](https://github.com/tatsu-lab/stanford_alpaca) | 1x A6000 | | [Flan-Alpaca-Large](https://huggingface.co/declare-lab/flan-alpaca-large) | 770M | [Flan](https://github.com/google-research/FLAN), [Alpaca](https://github.com/tatsu-lab/stanford_alpaca) | 1x A6000 | | [Flan-Alpaca-XL](https://huggingface.co/declare-lab/flan-alpaca-xl) | 3B | [Flan](https://github.com/google-research/FLAN), [Alpaca](https://github.com/tatsu-lab/stanford_alpaca) | 1x A6000 | | [Flan-Alpaca-XXL](https://huggingface.co/declare-lab/flan-alpaca-xxl) | 11B | [Flan](https://github.com/google-research/FLAN), [Alpaca](https://github.com/tatsu-lab/stanford_alpaca) | 4x A6000 (FSDP) | | [Flan-GPT4All-XL](https://huggingface.co/declare-lab/flan-gpt4all-xl) | 3B | [Flan](https://github.com/google-research/FLAN), [GPT4All](https://github.com/nomic-ai/gpt4all) | 1x A6000 | | [Flan-ShareGPT-XL](https://huggingface.co/declare-lab/flan-sharegpt-xl) | 3B | [Flan](https://github.com/google-research/FLAN), [ShareGPT](https://github.com/domeccleston/sharegpt)/[Vicuna](https://github.com/lm-sys/FastChat) | 1x A6000 | | [Flan-Alpaca-GPT4-XL*](https://huggingface.co/declare-lab/flan-alpaca-gpt4-xl) | 3B | [Flan](https://github.com/google-research/FLAN), [GPT4-Alpaca](https://github.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM) | 1x A6000 | *recommended for better performance ### Why? [Alpaca](https://crfm.stanford.edu/2023/03/13/alpaca.html) represents an exciting new direction to approximate the performance of large language models (LLMs) like ChatGPT cheaply and easily. Concretely, they leverage an LLM such as GPT-3 to generate instructions as synthetic training data. The synthetic data which covers more than 50k tasks can then be used to finetune a smaller model. However, the original implementation is less accessible due to licensing constraints of the underlying [LLaMA](https://ai.facebook.com/blog/large-language-model-llama-meta-ai/) model. Furthermore, users have noted [potential noise](https://github.com/tloen/alpaca-lora/issues/65) in the synthetic dataset. Hence, it may be better to explore a fully accessible model that is already trained on high-quality (but less diverse) instructions such as [Flan-T5](https://arxiv.org/abs/2210.11416). ### Usage ``` from transformers import pipeline prompt = "Write an email about an alpaca that likes flan" model = pipeline(model="declare-lab/flan-alpaca-gpt4-xl") model(prompt, max_length=128, do_sample=True) # Dear AlpacaFriend, # My name is Alpaca and I'm 10 years old. # I'm excited to announce that I'm a big fan of flan! # We like to eat it as a snack and I believe that it can help with our overall growth. # I'd love to hear your feedback on this idea. # Have a great day! # Best, AL Paca ```
5,816
[ [ -0.051910400390625, -0.06793212890625, 0.0211181640625, 0.01837158203125, -0.0028362274169921875, 0.0016946792602539062, -0.0211029052734375, -0.054046630859375, 0.033966064453125, 0.01554107666015625, -0.034759521484375, -0.045074462890625, -0.041168212890625, -0.004245758056640625, -0.0281982421875, 0.07464599609375, -0.007244110107421875, -0.017913818359375, 0.026519775390625, -0.02520751953125, -0.0161285400390625, -0.0260467529296875, -0.05120849609375, -0.01375579833984375, 0.04022216796875, 0.0145721435546875, 0.05047607421875, 0.04888916015625, 0.0222015380859375, 0.02392578125, -0.01465606689453125, 0.029998779296875, -0.024169921875, -0.0269012451171875, 0.0216522216796875, -0.027618408203125, -0.04644775390625, -0.00872039794921875, 0.02081298828125, 0.01678466796875, -0.0142974853515625, 0.0180816650390625, -0.00988006591796875, 0.059112548828125, -0.04315185546875, 0.0260772705078125, -0.037750244140625, 0.0012378692626953125, -0.01971435546875, -0.003482818603515625, -0.0038776397705078125, -0.034271240234375, -0.0034313201904296875, -0.056243896484375, 0.0214385986328125, -0.004299163818359375, 0.0823974609375, 0.0113067626953125, -0.01540374755859375, -0.04449462890625, -0.056671142578125, 0.0430908203125, -0.06365966796875, 0.0296783447265625, 0.028411865234375, 0.019378662109375, -0.0247650146484375, -0.044158935546875, -0.051788330078125, -0.0221710205078125, -0.00409698486328125, 0.01287841796875, -0.004909515380859375, -0.0033702850341796875, 0.0110321044921875, 0.049774169921875, -0.03021240234375, -0.005176544189453125, -0.034515380859375, -0.00820159912109375, 0.04876708984375, -0.0178375244140625, 0.0233001708984375, 0.0113067626953125, -0.0238037109375, -0.035919189453125, -0.04718017578125, 0.01088714599609375, 0.019805908203125, 0.0263671875, -0.03271484375, 0.033721923828125, -0.011444091796875, 0.048309326171875, -0.01256561279296875, -0.0214996337890625, 0.038970947265625, -0.0207977294921875, -0.0180816650390625, -0.004634857177734375, 0.086181640625, 0.0017795562744140625, -0.0017137527465820312, 0.01953125, -0.041717529296875, -0.002323150634765625, -0.00009357929229736328, -0.04620361328125, -0.00884246826171875, 0.0231170654296875, -0.0208740234375, -0.0386962890625, 0.00899505615234375, -0.07550048828125, -0.0021381378173828125, 0.00455474853515625, 0.04925537109375, -0.051116943359375, -0.019378662109375, 0.0092926025390625, 0.0121612548828125, 0.042510986328125, 0.0209197998046875, -0.08819580078125, 0.0213623046875, 0.06549072265625, 0.08575439453125, 0.00815582275390625, -0.0240325927734375, -0.027587890625, -0.00556182861328125, -0.0308685302734375, 0.045806884765625, -0.0124053955078125, -0.025787353515625, -0.0044097900390625, 0.01073455810546875, -0.0209197998046875, -0.030975341796875, 0.04827880859375, -0.0299224853515625, 0.0081329345703125, -0.03106689453125, -0.035308837890625, -0.0121917724609375, -0.00543212890625, -0.061859130859375, 0.079833984375, 0.0245208740234375, -0.040679931640625, 0.01019287109375, -0.07769775390625, -0.036468505859375, -0.023406982421875, 0.003704071044921875, -0.04254150390625, -0.004993438720703125, 0.031768798828125, 0.0151824951171875, -0.032470703125, 0.007434844970703125, 0.0017986297607421875, -0.0399169921875, 0.01027679443359375, -0.0239715576171875, 0.05841064453125, 0.031158447265625, -0.05706787109375, 0.0171356201171875, -0.06634521484375, -0.008392333984375, 0.0294647216796875, -0.0275421142578125, 0.0187835693359375, -0.0200042724609375, -0.01064300537109375, -0.004909515380859375, 0.016815185546875, -0.0248870849609375, 0.0147247314453125, -0.03448486328125, 0.0516357421875, 0.050689697265625, -0.01111602783203125, 0.0176849365234375, -0.045684814453125, 0.036163330078125, -0.0184783935546875, 0.0220489501953125, -0.0146331787109375, -0.04541015625, -0.0909423828125, -0.0304718017578125, 0.0134124755859375, 0.051300048828125, -0.0416259765625, 0.042144775390625, -0.0105133056640625, -0.050994873046875, -0.04559326171875, 0.0203399658203125, 0.0261688232421875, 0.029998779296875, 0.045379638671875, -0.004138946533203125, -0.0316162109375, -0.05523681640625, 0.005878448486328125, -0.01311492919921875, 0.0018129348754882812, 0.0120849609375, 0.05487060546875, -0.0258636474609375, 0.046356201171875, -0.039825439453125, -0.027130126953125, -0.0179443359375, -0.005313873291015625, 0.025482177734375, 0.0445556640625, 0.060150146484375, -0.036834716796875, -0.01375579833984375, 0.0180816650390625, -0.0445556640625, 0.00608062744140625, 0.0040435791015625, -0.02044677734375, 0.0265350341796875, 0.01422882080078125, -0.0684814453125, 0.02581787109375, 0.04345703125, -0.01477813720703125, 0.04156494140625, -0.0095062255859375, 0.01004791259765625, -0.06005859375, 0.0135040283203125, 0.00045680999755859375, -0.01146697998046875, -0.037841796875, 0.012847900390625, -0.0040130615234375, -0.00299835205078125, -0.047515869140625, 0.045196533203125, -0.0277252197265625, -0.005077362060546875, -0.006786346435546875, -0.0097198486328125, 0.0084686279296875, 0.052093505859375, -0.004306793212890625, 0.08251953125, 0.0270538330078125, -0.040771484375, 0.0263671875, 0.0158538818359375, -0.0297088623046875, -0.01271820068359375, -0.0623779296875, 0.0160980224609375, 0.0185394287109375, 0.039520263671875, -0.035491943359375, -0.0287017822265625, 0.04351806640625, -0.0140838623046875, 0.023956298828125, 0.0035877227783203125, -0.0300750732421875, -0.045013427734375, -0.042999267578125, 0.0175018310546875, 0.041595458984375, -0.0618896484375, 0.037933349609375, 0.0146942138671875, 0.024505615234375, -0.043731689453125, -0.04913330078125, -0.01390838623046875, -0.0269622802734375, -0.045745849609375, 0.0225830078125, -0.00562286376953125, -0.0004322528839111328, -0.0192718505859375, -0.00594329833984375, 0.00589752197265625, -0.0006299018859863281, 0.01007843017578125, 0.0176239013671875, -0.029754638671875, -0.006847381591796875, -0.01079559326171875, 0.00594329833984375, -0.0114593505859375, -0.0222015380859375, 0.056488037109375, -0.05816650390625, -0.0125274658203125, -0.04443359375, 0.0067138671875, 0.04559326171875, -0.033782958984375, 0.076171875, 0.0697021484375, -0.01293182373046875, -0.01313018798828125, -0.048187255859375, -0.004688262939453125, -0.04107666015625, 0.006130218505859375, -0.029083251953125, -0.057891845703125, 0.045196533203125, 0.01036834716796875, 0.0215911865234375, 0.03912353515625, 0.03289794921875, -0.00469207763671875, 0.04364013671875, 0.029998779296875, -0.01174163818359375, 0.048126220703125, -0.0517578125, 0.0051727294921875, -0.053314208984375, -0.013275146484375, -0.032562255859375, -0.01251220703125, -0.05389404296875, -0.054962158203125, 0.0242462158203125, 0.0086212158203125, -0.025390625, 0.0308074951171875, -0.04864501953125, 0.0340576171875, 0.042510986328125, 0.0113983154296875, 0.0034618377685546875, -0.0035991668701171875, -0.0006890296936035156, 0.0302581787109375, -0.04388427734375, -0.0439453125, 0.07769775390625, 0.038909912109375, 0.036163330078125, 0.00765228271484375, 0.062255859375, 0.00891876220703125, 0.028778076171875, -0.054901123046875, 0.040985107421875, -0.008209228515625, -0.032989501953125, -0.01251983642578125, -0.030792236328125, -0.0767822265625, 0.0233306884765625, -0.004711151123046875, -0.0556640625, 0.004100799560546875, 0.0240936279296875, -0.024139404296875, 0.0390625, -0.061614990234375, 0.0660400390625, -0.0379638671875, -0.02191162109375, 0.004306793212890625, -0.03887939453125, 0.051513671875, -0.01401519775390625, 0.0333251953125, -0.01849365234375, -0.01091766357421875, 0.059600830078125, -0.07666015625, 0.0557861328125, -0.0114593505859375, -0.031646728515625, 0.050262451171875, -0.00881195068359375, 0.04052734375, 0.004329681396484375, -0.0262298583984375, 0.0284881591796875, 0.0149993896484375, -0.035430908203125, -0.042236328125, 0.0673828125, -0.0758056640625, -0.04638671875, -0.03619384765625, -0.0160369873046875, -0.0095672607421875, 0.006526947021484375, 0.0239410400390625, 0.020416259765625, -0.0069580078125, 0.00010216236114501953, 0.0439453125, -0.04241943359375, 0.0296783447265625, 0.0217742919921875, -0.0279083251953125, -0.050079345703125, 0.08172607421875, -0.0034770965576171875, 0.0220184326171875, 0.03857421875, 0.031463623046875, -0.01036834716796875, -0.0295867919921875, -0.050628662109375, 0.027984619140625, -0.04632568359375, -0.0203399658203125, -0.041168212890625, -0.0186920166015625, -0.024383544921875, -0.014007568359375, -0.04754638671875, -0.04498291015625, -0.039886474609375, -0.01113128662109375, 0.0523681640625, 0.049041748046875, 0.0019292831420898438, 0.0279083251953125, -0.056427001953125, 0.028839111328125, 0.01364898681640625, 0.0211029052734375, 0.00432586669921875, -0.0303192138671875, -0.0091705322265625, 0.0250091552734375, -0.03485107421875, -0.062164306640625, 0.0408935546875, 0.03521728515625, 0.0254669189453125, 0.018463134765625, -0.011993408203125, 0.052459716796875, -0.0266571044921875, 0.06317138671875, 0.005580902099609375, -0.0765380859375, 0.06658935546875, -0.030975341796875, 0.0119781494140625, 0.040191650390625, 0.0256805419921875, -0.01788330078125, -0.01959228515625, -0.04022216796875, -0.07342529296875, 0.052459716796875, 0.0254974365234375, -0.00061798095703125, -0.0018711090087890625, 0.02923583984375, 0.0139923095703125, 0.00397491455078125, -0.059234619140625, -0.0282440185546875, -0.03558349609375, -0.018402099609375, -0.00295257568359375, 0.0008668899536132812, -0.01983642578125, -0.0270538330078125, 0.06817626953125, -0.00974273681640625, 0.032470703125, 0.019866943359375, 0.0001245737075805664, -0.005718231201171875, 0.01480865478515625, 0.0704345703125, 0.0308990478515625, -0.025665283203125, -0.01136016845703125, 0.0254974365234375, -0.042694091796875, 0.0138092041015625, 0.0013418197631835938, -0.02783203125, -0.01236724853515625, 0.040985107421875, 0.08251953125, 0.00865936279296875, -0.050628662109375, 0.0284576416015625, -0.01529693603515625, -0.02178955078125, -0.03271484375, 0.038421630859375, 0.0117034912109375, 0.00629425048828125, 0.02313232421875, 0.007015228271484375, -0.017791748046875, -0.04925537109375, 0.0024051666259765625, 0.02294921875, -0.006595611572265625, -0.037322998046875, 0.054595947265625, 0.0200958251953125, -0.0323486328125, 0.0199737548828125, -0.01971435546875, -0.042449951171875, 0.061798095703125, 0.040496826171875, 0.054595947265625, -0.014923095703125, 0.014312744140625, 0.044647216796875, 0.0278167724609375, -0.0166778564453125, 0.01201629638671875, -0.00872802734375, -0.040802001953125, 0.00159454345703125, -0.078125, -0.0192413330078125, 0.031494140625, -0.0263671875, 0.02557373046875, -0.049102783203125, -0.00707244873046875, -0.011138916015625, 0.01885986328125, -0.050079345703125, 0.006946563720703125, 0.0013780593872070312, 0.06964111328125, -0.052459716796875, 0.0751953125, 0.03515625, -0.03521728515625, -0.07568359375, -0.00968170166015625, 0.01331329345703125, -0.058685302734375, 0.01313018798828125, 0.022064208984375, -0.0179443359375, -0.0223388671875, -0.0286865234375, -0.0733642578125, 0.10302734375, 0.029998779296875, -0.052276611328125, 0.013580322265625, 0.0174560546875, 0.051055908203125, -0.020904541015625, 0.030303955078125, 0.077392578125, 0.041748046875, 0.0139007568359375, -0.073486328125, 0.010101318359375, -0.0243377685546875, -0.005863189697265625, 0.009674072265625, -0.09442138671875, 0.05657958984375, -0.0178375244140625, -0.00222015380859375, 0.011077880859375, 0.0750732421875, 0.0360107421875, 0.020111083984375, 0.04010009765625, 0.031341552734375, 0.0670166015625, -0.01346588134765625, 0.0843505859375, -0.0268707275390625, 0.020965576171875, 0.06982421875, -0.00164794921875, 0.054290771484375, 0.0190582275390625, -0.0323486328125, 0.0297088623046875, 0.054656982421875, 0.001983642578125, 0.024658203125, 0.0072784423828125, -0.038330078125, 0.0199432373046875, 0.01067352294921875, -0.0537109375, 0.035308837890625, 0.038299560546875, -0.0227508544921875, 0.022613525390625, -0.006805419921875, 0.0191650390625, -0.019287109375, -0.0059661865234375, 0.036468505859375, 0.01316070556640625, -0.046539306640625, 0.08807373046875, 0.001354217529296875, 0.0743408203125, -0.059478759765625, 0.01007080078125, -0.03009033203125, 0.01110076904296875, -0.027130126953125, -0.031707763671875, 0.0189971923828125, 0.004665374755859375, 0.0170745849609375, 0.005123138427734375, 0.040130615234375, -0.01971435546875, -0.0479736328125, 0.033355712890625, 0.0159149169921875, 0.0104522705078125, 0.024261474609375, -0.060577392578125, 0.045806884765625, 0.01523590087890625, -0.0447998046875, 0.0228729248046875, 0.021942138671875, -0.0007328987121582031, 0.058807373046875, 0.061004638671875, 0.01397705078125, 0.01727294921875, 0.0116119384765625, 0.072021484375, -0.05267333984375, -0.01727294921875, -0.051788330078125, 0.0038547515869140625, 0.0178070068359375, -0.02239990234375, 0.0357666015625, 0.04827880859375, 0.05718994140625, -0.00620269775390625, 0.0445556640625, -0.0171356201171875, 0.0173187255859375, -0.042755126953125, 0.04681396484375, -0.0472412109375, 0.04559326171875, -0.0234832763671875, -0.060516357421875, -0.0081787109375, 0.056182861328125, -0.012786865234375, 0.021575927734375, 0.03472900390625, 0.07470703125, -0.005420684814453125, 0.02264404296875, 0.001049041748046875, 0.00919342041015625, 0.058837890625, 0.04931640625, 0.048065185546875, -0.051727294921875, 0.048370361328125, -0.055328369140625, -0.0126190185546875, -0.010986328125, -0.054595947265625, -0.03875732421875, -0.033203125, -0.0207061767578125, -0.013336181640625, 0.01079559326171875, 0.08251953125, 0.059478759765625, -0.0693359375, -0.029998779296875, -0.019775390625, 0.006031036376953125, -0.0264434814453125, -0.0185699462890625, 0.04193115234375, -0.01308441162109375, -0.058807373046875, 0.050201416015625, -0.0013055801391601562, 0.023834228515625, -0.00678253173828125, -0.0161285400390625, -0.02557373046875, 0.00788116455078125, 0.0404052734375, 0.053863525390625, -0.0548095703125, -0.0218658447265625, -0.019866943359375, -0.0009236335754394531, 0.02301025390625, 0.0404052734375, -0.046142578125, 0.0054473876953125, 0.0177459716796875, 0.027984619140625, 0.054718017578125, 0.00394439697265625, 0.028167724609375, -0.036285400390625, 0.030029296875, 0.00484466552734375, 0.0299224853515625, 0.0273284912109375, -0.025482177734375, 0.061798095703125, 0.0035037994384765625, -0.043914794921875, -0.05291748046875, -0.0085601806640625, -0.0841064453125, -0.007572174072265625, 0.08428955078125, -0.020965576171875, -0.052093505859375, 0.0250091552734375, -0.0175018310546875, 0.027801513671875, -0.045867919921875, 0.049468994140625, 0.03094482421875, -0.0174102783203125, -0.002277374267578125, -0.058837890625, 0.044464111328125, 0.0303192138671875, -0.0810546875, -0.004558563232421875, 0.0174713134765625, 0.025238037109375, 0.0123748779296875, 0.052001953125, -0.00811004638671875, 0.00801849365234375, -0.00920867919921875, 0.0016679763793945312, -0.01236724853515625, -0.00237274169921875, -0.0140533447265625, -0.00463104248046875, -0.005924224853515625, -0.0192413330078125 ] ]
UFNLP/gatortron-base
2023-07-02T20:44:19.000Z
[ "transformers", "pytorch", "megatron-bert", "license:apache-2.0", "endpoints_compatible", "region:us" ]
null
UFNLP
null
null
UFNLP/gatortron-base
19
8,538
transformers
2023-06-02T23:27:12
--- license: apache-2.0 --- <h2>GatorTron-Base overview </h2> Developed by a joint effort between the University of Florida and NVIDIA, GatorTron-Base is a clinical language model of 345 million parameters, pre-trained using a BERT architecure implemented in the Megatron package (https://github.com/NVIDIA/Megatron-LM). GatorTron-Base is pre-trained using a dataset consisting of: - 82B words of de-identified clinical notes from the University of Florida Health System, - 6.1B words from PubMed CC0, - 2.5B words from WikiText, - 0.5B words of de-identified clinical notes from MIMIC-III The Github for GatorTron is at : https://github.com/uf-hobi-informatics-lab/GatorTron This model is converted to Hugginface from : https://catalog.ngc.nvidia.com/orgs/nvidia/teams/clara/models/gatortron_og <h2>Model variations</h2> Model | Parameter --- | --- [gatortron-base (this model)](https://huggingface.co/UFNLP/gatortron-base)| 345 million [gatortronS](https://huggingface.co/UFNLP/gatortronS) | 345 million [gatortron-medium](https://huggingface.co/UFNLP/gatortron-medium) | 3.9 billion gatortron-large | 8.9 billion <h2>How to use</h2> ```python from transformers import AutoModel, AutoTokenizer, AutoConfig tokenizer= AutoTokenizer.from_pretrained('UFNLP/gatortron-base') config=AutoConfig.from_pretrained('UFNLP/gatortron-base') mymodel=AutoModel.from_pretrained('UFNLP/gatortron-base') encoded_input=tokenizer("Bone scan: Negative for distant metastasis.", return_tensors="pt") encoded_output = mymodel(**encoded_input) print (encoded_output) ``` - An NLP pacakge using GatorTron for clinical concept extraction (Named Entity Recognition): https://github.com/uf-hobi-informatics-lab/ClinicalTransformerNER - An NLP pacakge using GatorTron for Relation Extraction: https://github.com/uf-hobi-informatics-lab/ClinicalTransformerRelationExtraction - An NLP pacakge using GatorTron for extraction of social determinants of health (SDoH) from clinical narratives: https://github.com/uf-hobi-informatics-lab/SDoH_SODA <h2>De-identification</h2> We applied a de-identification system to remove protected health information (PHI) from clinical text. We adopted the safe-harbor method to identify 18 PHI categories defined in the Health Insurance Portability and Accountability Act (HIPAA) and replaced them with dummy strings (e.g., replace people’s names into [\*\*NAME\*\*]). The de-identifiation system is described in: Yang X, Lyu T, Li Q, Lee C-Y, Bian J, Hogan WR, Wu Y†. A study of deep learning methods for de-identification of clinical notes in cross-institute settings. BMC Med Inform Decis Mak. 2020 Dec 5;19(5):232. https://www.ncbi.nlm.nih.gov/pubmed/31801524. <h2>Citation info</h2> Yang X, Chen A, PourNejatian N, Shin HC, Smith KE, Parisien C, Compas C, Martin C, Costa AB, Flores MG, Zhang Y, Magoc T, Harle CA, Lipori G, Mitchell DA, Hogan WR, Shenkman EA, Bian J, Wu Y†. A large language model for electronic health records. Npj Digit Med. Nature Publishing Group; . 2022 Dec 26;5(1):1–9. https://www.nature.com/articles/s41746-022-00742-2 - BibTeX entry ``` @article{yang2022large, title={A large language model for electronic health records}, author={Yang, Xi and Chen, Aokun and PourNejatian, Nima and Shin, Hoo Chang and Smith, Kaleb E and Parisien, Christopher and Compas, Colin and Martin, Cheryl and Costa, Anthony B and Flores, Mona G and Zhang, Ying and Magoc, Tanja and Harle, Christopher A and Lipori, Gloria and Mitchell, Duane A and Hogan, William R and Shenkman, Elizabeth A and Bian, Jiang and Wu, Yonghui }, journal={npj Digital Medicine}, volume={5}, number={1}, pages={194}, year={2022}, publisher={Nature Publishing Group UK London} } ``` <h2>Contact</h2> - Yonghui Wu: yonghui.wu@ufl.edu - Cheng Peng: c.peng@ufl.edu
3,796
[ [ -0.01277923583984375, -0.051422119140625, 0.04058837890625, -0.006992340087890625, -0.032379150390625, -0.0178985595703125, -0.01044464111328125, -0.04925537109375, 0.021881103515625, 0.032562255859375, -0.02520751953125, -0.0355224609375, -0.04217529296875, 0.00934600830078125, -0.0047760009765625, 0.0908203125, 0.006832122802734375, 0.0222320556640625, 0.0084075927734375, -0.00931549072265625, -0.0012922286987304688, -0.054931640625, -0.03131103515625, -0.03765869140625, 0.027496337890625, 0.003814697265625, 0.04864501953125, 0.08856201171875, 0.04638671875, 0.0135345458984375, -0.023406982421875, -0.019439697265625, -0.0189208984375, -0.0151519775390625, -0.006855010986328125, -0.022552490234375, -0.049102783203125, -0.01291656494140625, 0.03594970703125, 0.06201171875, -0.01293182373046875, 0.00943756103515625, -0.00876617431640625, 0.0379638671875, -0.024810791015625, 0.0075531005859375, -0.0161895751953125, 0.007598876953125, -0.0208892822265625, -0.004360198974609375, -0.0278167724609375, -0.01849365234375, 0.03765869140625, -0.04351806640625, 0.0257415771484375, -0.0037364959716796875, 0.10174560546875, 0.0027294158935546875, -0.034088134765625, -0.03265380859375, -0.045806884765625, 0.04791259765625, -0.0699462890625, 0.02679443359375, 0.023406982421875, 0.01078033447265625, -0.00843048095703125, -0.059783935546875, -0.033721923828125, -0.0294189453125, -0.014404296875, 0.01044464111328125, -0.03533935546875, 0.0163726806640625, 0.0260009765625, 0.041168212890625, -0.061798095703125, 0.0019407272338867188, -0.041351318359375, -0.0271453857421875, 0.04473876953125, 0.01314544677734375, 0.0276641845703125, -0.038665771484375, -0.036834716796875, -0.0038852691650390625, -0.044952392578125, -0.00675201416015625, 0.005550384521484375, 0.0218658447265625, -0.030914306640625, 0.02838134765625, -0.008636474609375, 0.034393310546875, 0.0260009765625, -0.010986328125, 0.05999755859375, -0.01824951171875, -0.0423583984375, 0.010467529296875, 0.086669921875, 0.007030487060546875, 0.0029735565185546875, -0.00803375244140625, 0.0089874267578125, -0.0146484375, 0.01190948486328125, -0.07012939453125, -0.032562255859375, 0.0303497314453125, -0.045074462890625, -0.0220489501953125, -0.0034160614013671875, -0.076171875, -0.046600341796875, -0.0173187255859375, 0.033233642578125, -0.0401611328125, -0.0205078125, 0.016357421875, 0.00304412841796875, 0.01433563232421875, 0.0056610107421875, -0.07244873046875, 0.026824951171875, 0.04071044921875, 0.048583984375, -0.00011163949966430664, -0.0174407958984375, -0.0290985107421875, -0.0010938644409179688, -0.01546478271484375, 0.041656494140625, -0.0213775634765625, -0.0258026123046875, 0.005207061767578125, 0.0222930908203125, -0.01654052734375, -0.046630859375, 0.0556640625, -0.0164794921875, 0.02850341796875, -0.01169586181640625, -0.04302978515625, -0.0191650390625, 0.0016298294067382812, -0.06304931640625, 0.062744140625, 0.016693115234375, -0.052459716796875, 0.0177001953125, -0.01189422607421875, -0.042755126953125, 0.011505126953125, -0.0146942138671875, -0.04595947265625, 0.0031585693359375, 0.0261993408203125, 0.036773681640625, -0.04705810546875, 0.030487060546875, -0.0108489990234375, -0.00920867919921875, -0.0018033981323242188, -0.005462646484375, 0.07281494140625, 0.020416259765625, -0.046539306640625, 0.0001729726791381836, -0.07183837890625, -0.0084991455078125, 0.019622802734375, -0.032135009765625, -0.022857666015625, -0.0162353515625, -0.004001617431640625, 0.031768798828125, 0.00424957275390625, -0.034820556640625, 0.00704193115234375, -0.056915283203125, 0.0364990234375, 0.0272369384765625, 0.017730712890625, 0.033721923828125, -0.026275634765625, 0.043701171875, 0.008941650390625, 0.00943756103515625, -0.005535125732421875, -0.041259765625, -0.045440673828125, -0.0271453857421875, 0.028350830078125, 0.044769287109375, -0.033660888671875, 0.0335693359375, -0.035247802734375, -0.041778564453125, -0.042755126953125, -0.01169586181640625, 0.047271728515625, 0.06561279296875, 0.0509033203125, -0.01113128662109375, -0.0499267578125, -0.0645751953125, -0.0020618438720703125, -0.0158233642578125, -0.005573272705078125, 0.033477783203125, 0.041839599609375, -0.0386962890625, 0.032806396484375, -0.030426025390625, -0.0266571044921875, -0.02630615234375, 0.0005869865417480469, 0.0292510986328125, 0.037841796875, 0.036651611328125, -0.03271484375, -0.03765869140625, 0.007213592529296875, -0.059173583984375, -0.0029888153076171875, -0.0168609619140625, -0.01080322265625, 0.0236053466796875, 0.0250701904296875, -0.054107666015625, 0.0252685546875, 0.0310516357421875, -0.0157928466796875, 0.038330078125, -0.034332275390625, -0.004940032958984375, -0.09552001953125, 0.03131103515625, 0.0222015380859375, 0.01678466796875, -0.0706787109375, -0.0051727294921875, 0.0146026611328125, -0.00362396240234375, -0.0266265869140625, 0.052215576171875, -0.050872802734375, 0.0285186767578125, 0.00670623779296875, -0.0022792816162109375, 0.0004978179931640625, 0.04864501953125, 0.0091705322265625, 0.0294036865234375, 0.03424072265625, -0.046966552734375, 0.0191650390625, 0.036834716796875, -0.00966644287109375, 0.03143310546875, -0.06939697265625, -0.0008034706115722656, -0.006626129150390625, 0.016998291015625, -0.064453125, -0.0205230712890625, 0.0386962890625, -0.039886474609375, 0.027740478515625, -0.0220489501953125, -0.016693115234375, -0.03826904296875, -0.021759033203125, 0.0199432373046875, 0.035552978515625, -0.0259552001953125, 0.0367431640625, 0.04571533203125, -0.0192718505859375, -0.049957275390625, -0.0653076171875, -0.00975799560546875, -0.01181793212890625, -0.03875732421875, 0.0450439453125, 0.0132293701171875, -0.00023174285888671875, 0.0020847320556640625, -0.006572723388671875, -0.0189208984375, -0.011932373046875, 0.01338958740234375, 0.0347900390625, -0.02117919921875, 0.0372314453125, 0.0015916824340820312, -0.00894927978515625, 0.010833740234375, -0.0039520263671875, 0.052215576171875, -0.00885009765625, -0.00926971435546875, -0.048309326171875, 0.0316162109375, 0.055511474609375, -0.0189208984375, 0.051422119140625, 0.036041259765625, -0.05133056640625, 0.00514984130859375, -0.0330810546875, -0.00901031494140625, -0.028411865234375, 0.03790283203125, -0.01517486572265625, -0.052581787109375, 0.0562744140625, 0.01263427734375, -0.006458282470703125, 0.06884765625, 0.053436279296875, 0.0107269287109375, 0.0716552734375, 0.032135009765625, -0.025787353515625, 0.0153350830078125, -0.0228118896484375, 0.00888824462890625, -0.0828857421875, -0.025634765625, -0.0565185546875, -0.00484466552734375, -0.047454833984375, -0.0181121826171875, 0.0130615234375, -0.0038242340087890625, -0.0286712646484375, 0.03399658203125, -0.05291748046875, -0.006103515625, 0.0306396484375, 0.02301025390625, 0.01227569580078125, -0.006137847900390625, -0.0274505615234375, -0.004360198974609375, -0.04071044921875, -0.03143310546875, 0.09844970703125, 0.039031982421875, 0.0543212890625, 0.0118255615234375, 0.056640625, 0.007411956787109375, 0.0162353515625, -0.05963134765625, 0.033203125, -0.007808685302734375, -0.055908203125, -0.015533447265625, -0.040435791015625, -0.09259033203125, 0.0211029052734375, -0.0150299072265625, -0.0546875, 0.0195159912109375, 0.00914764404296875, -0.057342529296875, 0.01117706298828125, -0.039703369140625, 0.07000732421875, -0.02520751953125, -0.01422882080078125, -0.0006580352783203125, -0.06280517578125, 0.0146636962890625, 0.004611968994140625, 0.00467681884765625, -0.001407623291015625, 0.0071868896484375, 0.056640625, -0.041595458984375, 0.05279541015625, -0.01189422607421875, 0.002323150634765625, 0.0037822723388671875, -0.013580322265625, 0.04217529296875, 0.001087188720703125, -0.01224517822265625, 0.02294921875, 0.0095672607421875, -0.00485992431640625, -0.0152740478515625, 0.043121337890625, -0.06268310546875, -0.0224456787109375, -0.05316162109375, -0.018829345703125, -0.0090789794921875, 0.033355712890625, 0.04058837890625, 0.052276611328125, -0.004642486572265625, 0.0227813720703125, 0.0511474609375, -0.043731689453125, 0.0194091796875, 0.0330810546875, 0.01010894775390625, -0.0506591796875, 0.04266357421875, 0.0164642333984375, 0.007354736328125, 0.029998779296875, 0.033721923828125, -0.037017822265625, -0.04541015625, -0.040863037109375, 0.0562744140625, -0.046356201171875, -0.0005841255187988281, -0.05682373046875, -0.0223388671875, -0.04351806640625, 0.0097198486328125, -0.00583648681640625, -0.0168609619140625, -0.045440673828125, -0.010223388671875, 0.0146484375, 0.03033447265625, -0.0014629364013671875, 0.006015777587890625, -0.065673828125, 0.029296875, 0.0033702850341796875, 0.0256500244140625, 0.0064544677734375, -0.06219482421875, -0.01477813720703125, 0.00780487060546875, -0.0071258544921875, -0.0811767578125, 0.051239013671875, 0.0137939453125, 0.0791015625, 0.0231170654296875, 0.005153656005859375, 0.054107666015625, -0.0458984375, 0.057464599609375, 0.01348876953125, -0.0631103515625, 0.04937744140625, -0.030609130859375, 0.01910400390625, 0.04486083984375, 0.045745849609375, -0.05450439453125, -0.0025997161865234375, -0.069580078125, -0.0775146484375, 0.0638427734375, -0.0136260986328125, 0.0072479248046875, -0.01027679443359375, 0.03704833984375, -0.010162353515625, 0.015899658203125, -0.058349609375, -0.0295867919921875, -0.00045013427734375, -0.0283050537109375, 0.0012035369873046875, -0.033477783203125, -0.02435302734375, -0.034820556640625, 0.06915283203125, -0.00331878662109375, 0.04754638671875, 0.037200927734375, -0.020172119140625, -0.00771331787109375, 0.0260772705078125, 0.0657958984375, 0.0286407470703125, -0.01422882080078125, 0.0052490234375, 0.0020618438720703125, -0.06903076171875, -0.00852203369140625, 0.042205810546875, -0.00641632080078125, 0.0077056884765625, 0.024139404296875, 0.060882568359375, 0.01148223876953125, -0.04473876953125, 0.03948974609375, 0.00519561767578125, -0.036224365234375, -0.016998291015625, -0.0142974853515625, 0.0012502670288085938, 0.0069580078125, 0.02001953125, -0.012664794921875, 0.01050567626953125, -0.029052734375, 0.0248565673828125, 0.01551055908203125, -0.04388427734375, -0.024658203125, 0.0615234375, 0.01100921630859375, -0.00681304931640625, 0.047943115234375, -0.007167816162109375, -0.02947998046875, 0.04736328125, 0.041351318359375, 0.076171875, -0.02032470703125, 0.015899658203125, 0.05242919921875, 0.0233001708984375, -0.011505126953125, 0.01983642578125, 0.0226287841796875, -0.03857421875, -0.0144805908203125, -0.0430908203125, -0.0201263427734375, 0.0379638671875, -0.05718994140625, 0.00479888916015625, -0.045074462890625, -0.0158538818359375, 0.0254669189453125, 0.0210113525390625, -0.039215087890625, 0.018035888671875, 0.02276611328125, 0.048431396484375, -0.052001953125, 0.06439208984375, 0.055755615234375, -0.05548095703125, -0.09161376953125, -0.00021755695343017578, 0.0244598388671875, -0.043670654296875, 0.028900146484375, 0.00714874267578125, 0.021392822265625, -0.01081085205078125, -0.00763702392578125, -0.06640625, 0.0806884765625, 0.0273284912109375, -0.03857421875, -0.0106048583984375, 0.0268707275390625, 0.06927490234375, -0.005550384521484375, 0.0225372314453125, 0.037567138671875, 0.009674072265625, 0.01271820068359375, -0.08599853515625, 0.0306396484375, -0.041351318359375, -0.00811004638671875, 0.0162811279296875, -0.0447998046875, 0.08905029296875, -0.0167236328125, -0.0172882080078125, 0.007080078125, 0.0196685791015625, 0.016326904296875, 0.0096282958984375, 0.00656890869140625, 0.060882568359375, 0.07501220703125, -0.00820159912109375, 0.07867431640625, -0.028594970703125, 0.044952392578125, 0.08319091796875, -0.0265045166015625, 0.04736328125, 0.025390625, -0.01241302490234375, 0.049468994140625, 0.035064697265625, -0.0145416259765625, 0.031951904296875, 0.01326751708984375, -0.0157012939453125, 0.0050201416015625, 0.016998291015625, -0.053009033203125, 0.01739501953125, 0.0208892822265625, -0.0718994140625, -0.013031005859375, -0.00614166259765625, 0.041015625, -0.0218658447265625, -0.00934600830078125, 0.051422119140625, 0.0033416748046875, -0.050445556640625, 0.06494140625, -0.002803802490234375, 0.032562255859375, -0.049774169921875, 0.009124755859375, -0.01258087158203125, 0.036468505859375, -0.019134521484375, -0.03533935546875, 0.01555633544921875, -0.006526947021484375, -0.01739501953125, 0.0076904296875, 0.041259765625, -0.0241851806640625, -0.0294952392578125, 0.0139007568359375, 0.0241851806640625, 0.004791259765625, 0.0019664764404296875, -0.064453125, -0.0089569091796875, 0.0041961669921875, -0.032562255859375, 0.02789306640625, 0.0249481201171875, -0.0002460479736328125, 0.036956787109375, 0.046478271484375, 0.017608642578125, -0.0033473968505859375, 0.0099639892578125, 0.05975341796875, -0.047943115234375, -0.04437255859375, -0.0628662109375, 0.037139892578125, -0.0097503662109375, -0.059417724609375, 0.05023193359375, 0.04534912109375, 0.05230712890625, -0.00861358642578125, 0.062164306640625, -0.014129638671875, 0.055328369140625, -0.020599365234375, 0.07049560546875, -0.040924072265625, 0.0159912109375, -0.040496826171875, -0.03131103515625, -0.041717529296875, 0.049530029296875, -0.0305633544921875, -0.00377655029296875, 0.05975341796875, 0.05682373046875, 0.0097503662109375, -0.00811004638671875, 0.00844573974609375, 0.01678466796875, 0.039886474609375, 0.06201171875, 0.0176544189453125, -0.053619384765625, 0.04815673828125, -0.037567138671875, -0.0221710205078125, -0.0287628173828125, -0.054779052734375, -0.062469482421875, -0.0374755859375, -0.0263214111328125, -0.038238525390625, -0.00594329833984375, 0.080078125, 0.04876708984375, -0.06561279296875, 0.0012760162353515625, 0.0017194747924804688, -0.0183258056640625, -0.031890869140625, -0.01184844970703125, 0.05059814453125, 0.0004153251647949219, -0.03521728515625, 0.0074920654296875, 0.007904052734375, 0.0042266845703125, 0.0018157958984375, -0.008331298828125, -0.01983642578125, -0.00234222412109375, 0.0269012451171875, 0.048797607421875, -0.053680419921875, -0.011505126953125, 0.0195770263671875, -0.05078125, 0.019439697265625, 0.031768798828125, -0.049530029296875, 0.038055419921875, 0.0452880859375, 0.051727294921875, 0.03314208984375, -0.004276275634765625, 0.0200347900390625, -0.0233001708984375, 0.0010509490966796875, 0.031951904296875, 0.03326416015625, 0.007472991943359375, -0.0355224609375, 0.03515625, 0.03607177734375, -0.044464111328125, -0.05780029296875, -0.0097503662109375, -0.091796875, -0.00766754150390625, 0.10357666015625, -0.00487518310546875, -0.016815185546875, -0.01031494140625, -0.01186370849609375, 0.033905029296875, -0.0206298828125, 0.056427001953125, 0.03314208984375, -0.017486572265625, -0.0191650390625, -0.050445556640625, 0.050872802734375, 0.0171356201171875, -0.0738525390625, 0.00010609626770019531, 0.042755126953125, 0.0249481201171875, 0.0241546630859375, 0.063720703125, -0.03314208984375, 0.0330810546875, -0.01033782958984375, 0.01885986328125, -0.00177001953125, -0.0033092498779296875, -0.0556640625, 0.00447845458984375, -0.0286407470703125, -0.01140594482421875 ] ]
Undi95/Amethyst-13B-Mistral
2023-10-03T12:04:00.000Z
[ "transformers", "pytorch", "llama", "text-generation", "not-for-all-audiences", "nsfw", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Undi95
null
null
Undi95/Amethyst-13B-Mistral
9
8,523
transformers
2023-10-01T03:51:19
--- license: cc-by-nc-4.0 tags: - not-for-all-audiences - nsfw --- [THIS WAS A TEST, BUT PEOPLE LIKE IT, SO I ADD IT OFFICIALLY TO MY PROJECTS] ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63ab1241ad514ca8d1430003/ubc23iUshsXKjx-GBPv3W.png) An attempt using [BlockMerge_Gradient](https://github.com/Gryphe/BlockMerge_Gradient) to get better result. In addition, [LimaRP v3](https://huggingface.co/lemonilia/LimaRP-Llama2-13B-v3-EXPERIMENT) was used, is it recommanded to read the documentation. The [llama2-to-mistral-diff](https://huggingface.co/Undi95/llama2-to-mistral-diff) was used on it at weight "1". <!-- description start --> ## Description This repo contains fp16 files of Amethyst-13B-Mistral. <!-- description end --> <!-- description start --> ## Models and loras used - Xwin-LM/Xwin-LM-13B-V0.1 - The-Face-Of-Goonery/Huginn-13b-FP16 - zattio770/120-Days-of-LORA-v2-13B - lemonilia/LimaRP-Llama2-13B-v3-EXPERIMENT - Undi95/llama2-to-mistral-diff <!-- description end --> <!-- prompt-template start --> ## Prompt template: Alpaca ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ``` ## LimaRP v3 usage and suggested settings ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63ab1241ad514ca8d1430003/ZC_iP2KkcEcRdgG_iyxYE.png) You can follow these instruction format settings in SillyTavern. Replace tiny with your desired response length: ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63ab1241ad514ca8d1430003/PIn8_HSPTJEMdSEpNVSdm.png) Special thanks to Sushi. If you want to support me, you can [here](https://ko-fi.com/undiai).
1,722
[ [ -0.06036376953125, -0.057708740234375, 0.023956298828125, 0.047882080078125, -0.038909912109375, -0.0164642333984375, 0.00507354736328125, -0.0360107421875, 0.052215576171875, 0.041534423828125, -0.053802490234375, -0.0300445556640625, -0.044586181640625, 0.01470947265625, -0.00982666015625, 0.07891845703125, -0.004077911376953125, -0.010955810546875, 0.0285491943359375, -0.0016851425170898438, -0.038726806640625, -0.007434844970703125, -0.06549072265625, -0.0225982666015625, 0.0340576171875, 0.0273895263671875, 0.034423828125, 0.052581787109375, 0.034515380859375, 0.0228271484375, -0.0183563232421875, 0.019866943359375, -0.028778076171875, -0.007152557373046875, -0.00978851318359375, -0.0191802978515625, -0.0509033203125, -0.0024566650390625, 0.0472412109375, 0.01270294189453125, -0.0274505615234375, 0.020782470703125, 0.01506805419921875, 0.049774169921875, -0.03973388671875, 0.00925445556640625, -0.01403045654296875, 0.0179290771484375, -0.0275115966796875, -0.00856781005859375, 0.003925323486328125, -0.02423095703125, -0.01473236083984375, -0.0689697265625, 0.01045989990234375, 0.02490234375, 0.0693359375, 0.016143798828125, -0.0147552490234375, -0.0178375244140625, -0.024383544921875, 0.058685302734375, -0.054443359375, -0.001129150390625, 0.0169677734375, 0.0266571044921875, -0.038330078125, -0.0672607421875, -0.059722900390625, -0.0009698867797851562, -0.009735107421875, 0.01806640625, -0.046051025390625, -0.00843048095703125, 0.0211029052734375, 0.041717529296875, -0.031158447265625, -0.00012671947479248047, -0.036102294921875, -0.0184783935546875, 0.034088134765625, 0.0191192626953125, 0.01427459716796875, -0.0214080810546875, -0.041168212890625, -0.0142364501953125, -0.0343017578125, 0.0126495361328125, 0.02020263671875, 0.026214599609375, -0.058837890625, 0.06304931640625, -0.0119781494140625, 0.046417236328125, 0.0291748046875, -0.0040130615234375, 0.0283355712890625, -0.0171966552734375, -0.0274505615234375, -0.004840850830078125, 0.0738525390625, 0.035430908203125, -0.0140838623046875, 0.03570556640625, -0.0149383544921875, -0.005229949951171875, 0.00839996337890625, -0.08062744140625, -0.002216339111328125, 0.01419830322265625, -0.0430908203125, -0.0290374755859375, 0.0008420944213867188, -0.08038330078125, -0.034271240234375, 0.00923919677734375, 0.035369873046875, -0.0201568603515625, -0.0100860595703125, 0.0224151611328125, -0.0231170654296875, 0.02606201171875, 0.05126953125, -0.048675537109375, 0.051177978515625, 0.044891357421875, 0.0621337890625, 0.015106201171875, -0.006511688232421875, -0.064208984375, -0.005706787109375, -0.01239776611328125, 0.049224853515625, -0.0189056396484375, -0.056671142578125, -0.022979736328125, 0.0128631591796875, 0.00846099853515625, -0.05126953125, 0.044403076171875, -0.007350921630859375, 0.018524169921875, -0.03326416015625, -0.0074462890625, -0.011199951171875, 0.005558013916015625, -0.041778564453125, 0.07427978515625, 0.032623291015625, -0.07672119140625, -0.0179595947265625, -0.054779052734375, -0.011138916015625, -0.0112152099609375, -0.004245758056640625, -0.036956787109375, 0.00963592529296875, 0.0005846023559570312, 0.019989013671875, -0.045013427734375, -0.035919189453125, -0.025909423828125, -0.034942626953125, 0.0343017578125, -0.0026454925537109375, 0.06256103515625, 0.0199127197265625, -0.055633544921875, -0.0040130615234375, -0.067626953125, -0.004184722900390625, 0.0139312744140625, -0.042022705078125, 0.0028362274169921875, -0.027252197265625, 0.00939178466796875, 0.0155792236328125, 0.032684326171875, -0.05828857421875, 0.026702880859375, -0.0032806396484375, 0.0245361328125, 0.07830810546875, 0.00417327880859375, 0.006702423095703125, -0.041015625, 0.047271728515625, -0.00843048095703125, 0.020416259765625, 0.027099609375, -0.05047607421875, -0.08319091796875, -0.04364013671875, -0.006900787353515625, 0.03277587890625, -0.044952392578125, 0.02093505859375, 0.004901885986328125, -0.050445556640625, -0.03643798828125, 0.0121917724609375, 0.04754638671875, 0.06072998046875, 0.0377197265625, -0.0555419921875, -0.054046630859375, -0.069091796875, 0.01395416259765625, -0.0264892578125, -0.0086669921875, 0.035858154296875, 0.040802001953125, -0.03466796875, 0.05322265625, -0.07794189453125, -0.0254058837890625, 0.0026607513427734375, 0.00844573974609375, 0.0318603515625, 0.046234130859375, 0.05462646484375, -0.048370361328125, -0.0271148681640625, -0.002086639404296875, -0.02862548828125, -0.039398193359375, 0.0189208984375, -0.04058837890625, -0.01172637939453125, 0.006000518798828125, -0.066162109375, 0.03143310546875, 0.0350341796875, -0.055633544921875, 0.055419921875, -0.02569580078125, 0.032623291015625, -0.076416015625, 0.0206298828125, 0.0207061767578125, -0.02166748046875, -0.04815673828125, 0.015777587890625, -0.002513885498046875, 0.01154327392578125, -0.0413818359375, 0.0546875, -0.02520751953125, -0.0035572052001953125, -0.0239105224609375, -0.00960540771484375, 0.0098114013671875, 0.0255126953125, -0.015228271484375, 0.057891845703125, 0.0330810546875, -0.04541015625, 0.040557861328125, 0.03167724609375, -0.00792694091796875, 0.058258056640625, -0.08154296875, 0.006145477294921875, -0.000911712646484375, 0.0213623046875, -0.075439453125, -0.0214080810546875, 0.07720947265625, -0.024505615234375, 0.01306915283203125, -0.01224517822265625, -0.0321044921875, -0.037506103515625, -0.0538330078125, 0.0268707275390625, 0.06512451171875, -0.03350830078125, 0.04241943359375, 0.01468658447265625, -0.0027484893798828125, -0.03387451171875, -0.0804443359375, -0.01232147216796875, -0.031036376953125, -0.03045654296875, 0.03973388671875, -0.030517578125, -0.0247344970703125, -0.0128021240234375, -0.0163421630859375, -0.0221405029296875, -0.005687713623046875, 0.038238525390625, 0.0177154541015625, -0.01580810546875, -0.044281005859375, 0.00669097900390625, -0.0174560546875, -0.00537109375, -0.008880615234375, 0.046173095703125, -0.024658203125, -0.032379150390625, -0.04046630859375, 0.01467132568359375, 0.0390625, -0.00331878662109375, 0.034423828125, 0.049285888671875, -0.0243988037109375, -0.005733489990234375, -0.049713134765625, -0.006183624267578125, -0.039306640625, -0.01247406005859375, -0.0180511474609375, -0.053741455078125, 0.05108642578125, 0.03619384765625, 0.0036067962646484375, 0.041778564453125, 0.038543701171875, -0.0071563720703125, 0.047088623046875, 0.047393798828125, 0.0019140243530273438, 0.024322509765625, -0.035675048828125, -0.007671356201171875, -0.082275390625, -0.03228759765625, -0.0418701171875, -0.0258026123046875, -0.03961181640625, -0.05145263671875, 0.0246124267578125, 0.0230865478515625, -0.0207672119140625, 0.0338134765625, -0.034332275390625, -0.004116058349609375, 0.020477294921875, 0.02069091796875, 0.01345062255859375, -0.0014333724975585938, 0.008209228515625, -0.005535125732421875, -0.01702880859375, -0.01654052734375, 0.042083740234375, 0.0218048095703125, 0.0704345703125, 0.026214599609375, 0.0689697265625, 0.0250091552734375, 0.01043701171875, -0.037567138671875, 0.052734375, -0.0017309188842773438, -0.0171356201171875, -0.003192901611328125, -0.0207061767578125, -0.0706787109375, 0.0316162109375, 0.00017142295837402344, -0.0394287109375, 0.0249786376953125, 0.021942138671875, -0.0260162353515625, 0.0171356201171875, -0.02813720703125, 0.038818359375, -0.0005154609680175781, -0.0246429443359375, -0.005245208740234375, -0.033782958984375, 0.022125244140625, -0.00666046142578125, 0.021270751953125, -0.02191162109375, -0.03472900390625, 0.06793212890625, -0.073974609375, 0.07293701171875, -0.0186309814453125, -0.024505615234375, 0.0364990234375, 0.004177093505859375, 0.039581298828125, 0.0171051025390625, -0.0079498291015625, -0.007770538330078125, -0.02734375, -0.038604736328125, -0.034698486328125, 0.05975341796875, -0.050262451171875, -0.056732177734375, -0.03125, -0.021240234375, 0.0277252197265625, 0.0126800537109375, 0.01953125, 0.0229339599609375, -0.001224517822265625, -0.0027618408203125, 0.055633544921875, 0.004100799560546875, 0.0318603515625, 0.04534912109375, -0.0176849365234375, -0.03680419921875, 0.05889892578125, -0.0009236335754394531, 0.0118560791015625, 0.027313232421875, 0.0440673828125, -0.01276397705078125, -0.024810791015625, -0.036712646484375, 0.0316162109375, -0.04412841796875, -0.01367950439453125, -0.04443359375, -0.005046844482421875, -0.0208587646484375, -0.0142822265625, -0.0229034423828125, -0.0430908203125, -0.057098388671875, 0.0155181884765625, 0.058807373046875, 0.04376220703125, -0.033538818359375, 0.030609130859375, -0.03973388671875, 0.0445556640625, 0.03094482421875, 0.004192352294921875, -0.003063201904296875, -0.0672607421875, 0.01546478271484375, 0.00954437255859375, -0.0225067138671875, -0.0592041015625, 0.0292205810546875, -0.00008237361907958984, 0.0291290283203125, 0.0224456787109375, -0.00315093994140625, 0.0770263671875, -0.036346435546875, 0.05010986328125, 0.03564453125, -0.07366943359375, 0.059478759765625, -0.033721923828125, 0.01910400390625, 0.0280303955078125, 0.0223236083984375, -0.042724609375, -0.032318115234375, -0.045684814453125, -0.056121826171875, 0.037567138671875, 0.034393310546875, 0.004512786865234375, -0.00237274169921875, 0.0254058837890625, 0.0159454345703125, 0.01898193359375, -0.04931640625, -0.048858642578125, -0.023223876953125, 0.00872802734375, 0.00986480712890625, -0.0276031494140625, -0.021331787109375, -0.01232147216796875, 0.054229736328125, -0.0032749176025390625, 0.032440185546875, 0.02191162109375, 0.011749267578125, -0.00893402099609375, 0.018951416015625, 0.0504150390625, 0.0469970703125, -0.028228759765625, 0.0113677978515625, 0.01125335693359375, -0.049468994140625, 0.0170745849609375, 0.0225830078125, -0.0023975372314453125, 0.01004791259765625, 0.01270294189453125, 0.040435791015625, 0.009552001953125, -0.037353515625, 0.043365478515625, -0.01995849609375, -0.003925323486328125, -0.0120697021484375, 0.015777587890625, -0.0024662017822265625, 0.030029296875, 0.03411865234375, 0.005558013916015625, 0.0156097412109375, -0.0180511474609375, -0.0018863677978515625, 0.020111083984375, -0.0240478515625, -0.019195556640625, 0.047454833984375, 0.0012636184692382812, -0.0185089111328125, 0.0177764892578125, -0.0283966064453125, -0.0204010009765625, 0.060302734375, 0.056671142578125, 0.04559326171875, -0.005100250244140625, 0.015869140625, 0.0199737548828125, 0.01457977294921875, -0.0008158683776855469, 0.0653076171875, -0.0020961761474609375, -0.0201568603515625, 0.0027217864990234375, -0.052490234375, -0.0233154296875, 0.01513671875, -0.049163818359375, 0.034423828125, -0.07476806640625, -0.01104736328125, -0.0023593902587890625, 0.011138916015625, -0.04400634765625, 0.018402099609375, -0.006000518798828125, 0.08673095703125, -0.0672607421875, 0.035736083984375, 0.047088623046875, -0.031494140625, -0.04791259765625, -0.0050201416015625, 0.017333984375, -0.08184814453125, 0.045989990234375, 0.01416015625, -0.0233154296875, -0.01418304443359375, -0.03765869140625, -0.06805419921875, 0.08843994140625, 0.008636474609375, -0.033782958984375, 0.01513671875, -0.01910400390625, 0.022491455078125, -0.031646728515625, 0.015228271484375, 0.0228271484375, 0.040313720703125, 0.0025234222412109375, -0.07305908203125, 0.043548583984375, -0.03265380859375, 0.0297088623046875, 0.00836944580078125, -0.07305908203125, 0.0762939453125, -0.00930023193359375, -0.00998687744140625, 0.047088623046875, 0.062042236328125, 0.04986572265625, -0.00077056884765625, 0.040618896484375, 0.0589599609375, 0.04608154296875, -0.01416015625, 0.0809326171875, 0.0005207061767578125, 0.032501220703125, 0.06427001953125, -0.00616455078125, 0.042388916015625, 0.053619384765625, -0.018829345703125, 0.044219970703125, 0.06060791015625, -0.0170135498046875, 0.0399169921875, 0.03753662109375, -0.031036376953125, 0.0227508544921875, -0.0282135009765625, -0.058197021484375, 0.007656097412109375, 0.0170135498046875, -0.007259368896484375, -0.0109710693359375, -0.0023403167724609375, 0.02520751953125, -0.01535797119140625, -0.032745361328125, 0.0106201171875, 0.0143280029296875, -0.0223846435546875, 0.057220458984375, 0.023956298828125, 0.085205078125, -0.058929443359375, -0.01141357421875, -0.0238189697265625, 0.008026123046875, -0.033782958984375, -0.06854248046875, -0.0007886886596679688, 0.01068115234375, -0.0171966552734375, 0.00032711029052734375, 0.050872802734375, -0.00911712646484375, -0.0299835205078125, 0.032470703125, 0.016754150390625, 0.03167724609375, 0.0291595458984375, -0.06695556640625, 0.03863525390625, 0.0145263671875, -0.03155517578125, 0.01209259033203125, 0.0228118896484375, 0.015106201171875, 0.046630859375, 0.03466796875, 0.01390838623046875, 0.012969970703125, -0.004474639892578125, 0.08544921875, -0.0328369140625, -0.03131103515625, -0.0736083984375, 0.052398681640625, -0.006229400634765625, -0.0390625, 0.060089111328125, 0.047271728515625, 0.048492431640625, -0.014739990234375, 0.05487060546875, -0.03448486328125, 0.013671875, -0.052581787109375, 0.037994384765625, -0.05462646484375, 0.01007843017578125, -0.01959228515625, -0.055511474609375, 0.00705718994140625, 0.05572509765625, 0.01372528076171875, 0.007152557373046875, 0.041046142578125, 0.07427978515625, -0.0200653076171875, -0.023956298828125, -0.015106201171875, 0.0196990966796875, 0.0243377685546875, 0.046417236328125, 0.05865478515625, -0.037109375, 0.0289459228515625, -0.047088623046875, -0.036102294921875, -0.0238800048828125, -0.069580078125, -0.056610107421875, -0.027313232421875, -0.03558349609375, -0.052734375, -0.0010471343994140625, 0.06085205078125, 0.0521240234375, -0.046142578125, -0.03509521484375, 0.000995635986328125, -0.0014085769653320312, 0.0026607513427734375, -0.015838623046875, 0.007099151611328125, 0.025848388671875, -0.051727294921875, 0.023101806640625, 0.0078125, 0.060272216796875, 0.0020580291748046875, -0.022674560546875, -0.010284423828125, -0.004451751708984375, 0.018768310546875, 0.049407958984375, -0.055084228515625, -0.019500732421875, -0.02252197265625, -0.0107574462890625, 0.00878143310546875, 0.05047607421875, -0.03033447265625, 0.0034656524658203125, 0.042083740234375, 0.01519012451171875, 0.048980712890625, -0.01314544677734375, 0.0217132568359375, -0.05609130859375, 0.027923583984375, -0.0023860931396484375, 0.047454833984375, 0.0169830322265625, -0.0307159423828125, 0.038330078125, 0.02471923828125, -0.0179290771484375, -0.061065673828125, 0.01441192626953125, -0.08935546875, -0.016693115234375, 0.066650390625, -0.003391265869140625, -0.034210205078125, 0.029754638671875, -0.052642822265625, 0.01267242431640625, -0.041778564453125, 0.048675537109375, 0.028778076171875, -0.0234527587890625, 0.004169464111328125, -0.035919189453125, 0.0281219482421875, 0.0281219482421875, -0.06854248046875, -0.01000213623046875, 0.040435791015625, 0.033477783203125, 0.03497314453125, 0.07110595703125, -0.009429931640625, 0.034515380859375, -0.0096282958984375, 0.0021266937255859375, -0.0101165771484375, 0.00638580322265625, -0.041534423828125, -0.0059051513671875, 0.004917144775390625, -0.041015625 ] ]
SmilingWolf/wd-v1-4-convnextv2-tagger-v2
2023-03-23T17:09:39.000Z
[ "keras", "onnx", "license:apache-2.0", "has_space", "region:us" ]
null
SmilingWolf
null
null
SmilingWolf/wd-v1-4-convnextv2-tagger-v2
23
8,513
keras
2023-03-19T11:19:38
--- license: apache-2.0 --- # WD 1.4 ConvNextV2 Tagger V2 Supports ratings, characters and general tags. Trained using https://github.com/SmilingWolf/SW-CV-ModelZoo. TPUs used for training kindly provided by the [TRC program](https://sites.research.google/trc/about/). ## Dataset Last image id: 5944504 Trained on Danbooru images with IDs modulo 0000-0899. Validated on images with IDs modulo 0950-0999. Images with less than 10 general tags were filtered out. Tags with less than 600 images were filtered out. ## Validation results `P=R: threshold = 0.3710, F1 = 0.6862` ## Final words Subject to change and updates. Downstream users are encouraged to use tagged releases rather than relying on the head of the repo.
734
[ [ -0.0531005859375, -0.01224517822265625, -0.002132415771484375, 0.005184173583984375, -0.05926513671875, -0.01497650146484375, -0.0100555419921875, -0.0400390625, 0.01227569580078125, 0.03863525390625, -0.05181884765625, -0.068359375, -0.0423583984375, -0.00019025802612304688, -0.004058837890625, 0.08880615234375, 0.0082855224609375, 0.0104522705078125, -0.0205535888671875, -0.033477783203125, -0.033294677734375, -0.041351318359375, -0.05694580078125, -0.006908416748046875, 0.064453125, 0.038116455078125, 0.033721923828125, 0.01739501953125, 0.0638427734375, 0.0137481689453125, 0.00849151611328125, 0.0268402099609375, -0.027435302734375, -0.004604339599609375, -0.027740478515625, -0.00598907470703125, -0.03582763671875, 0.011260986328125, 0.0203857421875, 0.001922607421875, -0.005474090576171875, 0.03125, -0.01313018798828125, 0.041595458984375, -0.02935791015625, 0.0149993896484375, -0.03973388671875, -0.007564544677734375, -0.0148773193359375, -0.0166473388671875, -0.02349853515625, -0.004878997802734375, 0.01139068603515625, -0.06695556640625, 0.0167694091796875, 0.0025577545166015625, 0.104248046875, 0.0101470947265625, -0.040924072265625, -0.006732940673828125, -0.037994384765625, 0.058197021484375, -0.034423828125, 0.003726959228515625, 0.034454345703125, 0.0499267578125, -0.01210784912109375, -0.058837890625, -0.036865234375, -0.00952911376953125, 0.01214599609375, 0.004619598388671875, -0.027984619140625, 0.0159454345703125, 0.04620361328125, 0.015472412109375, -0.059356689453125, 0.0283966064453125, -0.0245361328125, -0.02972412109375, 0.04620361328125, 0.0023746490478515625, 0.004425048828125, -0.02471923828125, -0.039947509765625, -0.0291900634765625, -0.02099609375, 0.0188751220703125, 0.03216552734375, -0.00893402099609375, -0.020721435546875, 0.05291748046875, -0.0177001953125, 0.031341552734375, -0.015838623046875, -0.00543975830078125, 0.045623779296875, -0.01361083984375, -0.0301666259765625, -0.03985595703125, 0.05059814453125, 0.06732177734375, 0.03564453125, 0.013092041015625, -0.0283966064453125, 0.038726806640625, 0.01334381103515625, -0.043548583984375, -0.033935546875, -0.0030841827392578125, -0.0302581787109375, -0.035247802734375, 0.0341796875, -0.03985595703125, -0.0247650146484375, -0.00560760498046875, 0.031646728515625, -0.0160369873046875, -0.04766845703125, -0.003139495849609375, -0.0655517578125, 0.042327880859375, 0.04644775390625, -0.037567138671875, 0.00933074951171875, 0.045928955078125, 0.053192138671875, -0.004657745361328125, 0.004192352294921875, -0.03643798828125, -0.00878143310546875, -0.0162811279296875, 0.06072998046875, -0.0216217041015625, -0.044036865234375, 0.01488494873046875, 0.02655029296875, 0.037261962890625, -0.0165252685546875, 0.0611572265625, -0.042388916015625, -0.0018863677978515625, -0.0268402099609375, -0.02764892578125, -0.0205078125, 0.042572021484375, -0.04974365234375, 0.07611083984375, 0.041595458984375, -0.068115234375, 0.04071044921875, -0.03509521484375, -0.0220947265625, 0.0243988037109375, -0.0008144378662109375, -0.0379638671875, -0.01007080078125, -0.000011920928955078125, 0.0293731689453125, 0.000896453857421875, 0.0098876953125, -0.029541015625, -0.0257720947265625, 0.0165863037109375, -0.018096923828125, 0.0293731689453125, 0.0218963623046875, -0.0011949539184570312, 0.018707275390625, -0.051116943359375, 0.00444793701171875, -0.0003833770751953125, -0.01116180419921875, -0.03314208984375, -0.018157958984375, 0.035919189453125, 0.0287017822265625, -0.00272369384765625, -0.052001953125, 0.037445068359375, -0.0019474029541015625, 0.01316070556640625, 0.04718017578125, 0.02569580078125, 0.028656005859375, -0.015777587890625, 0.056671142578125, 0.024627685546875, 0.025665283203125, 0.000782012939453125, -0.04815673828125, -0.056884765625, -0.024200439453125, 0.031280517578125, 0.0235443115234375, -0.0972900390625, 0.04510498046875, -0.0229644775390625, -0.07025146484375, -0.017913818359375, -0.0193634033203125, 0.025665283203125, 0.028656005859375, 0.0277557373046875, -0.047271728515625, -0.051025390625, -0.0615234375, 0.01497650146484375, -0.005084991455078125, -0.0345458984375, 0.0284423828125, 0.041168212890625, -0.03594970703125, 0.060821533203125, -0.035491943359375, -0.050689697265625, -0.0227203369140625, 0.0268402099609375, 0.0145263671875, 0.039886474609375, 0.060089111328125, -0.06475830078125, -0.02337646484375, -0.00960540771484375, -0.0266571044921875, 0.0098724365234375, 0.001346588134765625, -0.01485443115234375, 0.03656005859375, 0.007297515869140625, -0.0479736328125, 0.0494384765625, 0.03839111328125, -0.018890380859375, 0.050048828125, -0.033721923828125, 0.00817108154296875, -0.06854248046875, -0.00716400146484375, 0.06134033203125, -0.037933349609375, -0.0172576904296875, -0.0169219970703125, 0.0111083984375, 0.0124053955078125, -0.0487060546875, 0.03265380859375, -0.01546478271484375, -0.00726318359375, -0.01146697998046875, 0.00885772705078125, 0.026641845703125, 0.045135498046875, 0.01219940185546875, 0.0175323486328125, 0.0537109375, -0.056060791015625, 0.049530029296875, 0.0171661376953125, -0.04913330078125, 0.0362548828125, -0.0517578125, -0.002468109130859375, -0.0111083984375, 0.043212890625, -0.074462890625, -0.022003173828125, 0.0222320556640625, -0.04925537109375, 0.041534423828125, -0.032623291015625, -0.036041259765625, -0.06658935546875, -0.04266357421875, 0.003536224365234375, 0.040283203125, -0.044403076171875, 0.0130157470703125, 0.03973388671875, 0.035675048828125, -0.045166015625, -0.06610107421875, -0.0010499954223632812, -0.00566864013671875, -0.037445068359375, 0.00952911376953125, -0.00849151611328125, 0.0010786056518554688, -0.0164794921875, -0.0022068023681640625, -0.01256561279296875, -0.00901031494140625, 0.02630615234375, 0.0411376953125, 0.00507354736328125, 0.0159454345703125, -0.0051422119140625, -0.02264404296875, -0.0054779052734375, -0.0209197998046875, 0.02337646484375, -0.00006645917892456055, 0.0135955810546875, -0.0248870849609375, 0.01035308837890625, 0.0307464599609375, -0.035736083984375, 0.02032470703125, 0.08270263671875, -0.0335693359375, -0.0238189697265625, -0.029052734375, 0.00946807861328125, -0.03460693359375, 0.0494384765625, -0.037811279296875, -0.058197021484375, 0.0283355712890625, 0.0170440673828125, 0.0017347335815429688, 0.049285888671875, 0.027313232421875, -0.043243408203125, 0.0748291015625, 0.049102783203125, 0.016021728515625, 0.035369873046875, -0.043914794921875, -0.00954437255859375, -0.07318115234375, -0.0287322998046875, -0.032623291015625, -0.0270538330078125, -0.0863037109375, -0.036956787109375, 0.0124664306640625, -0.004638671875, -0.007205963134765625, 0.048858642578125, -0.068115234375, 0.0301055908203125, 0.038818359375, 0.016387939453125, -0.00826263427734375, -0.005832672119140625, 0.0035037994384765625, -0.00917816162109375, -0.021209716796875, -0.026397705078125, 0.059356689453125, 0.04766845703125, 0.0775146484375, 0.0004153251647949219, 0.018280029296875, 0.0443115234375, 0.024078369140625, -0.07354736328125, 0.047943115234375, -0.03741455078125, -0.045867919921875, -0.012176513671875, -0.0170440673828125, -0.04193115234375, 0.00666046142578125, -0.020843505859375, -0.0318603515625, 0.031402587890625, 0.003971099853515625, 0.0149993896484375, 0.0255584716796875, -0.0460205078125, 0.06500244140625, -0.001071929931640625, 0.02093505859375, -0.005275726318359375, -0.060699462890625, 0.0172119140625, 0.02093505859375, -0.0111846923828125, -0.039337158203125, -0.003955841064453125, 0.05462646484375, -0.0285797119140625, 0.0421142578125, -0.043487548828125, 0.007732391357421875, 0.0216827392578125, -0.00977325439453125, 0.03509521484375, 0.0255126953125, 0.011260986328125, 0.040130615234375, 0.0136566162109375, -0.005420684814453125, -0.015899658203125, 0.052215576171875, -0.062408447265625, -0.01629638671875, -0.05853271484375, -0.0172271728515625, 0.006488800048828125, -0.00501251220703125, 0.049346923828125, 0.041107177734375, -0.018798828125, 0.0151519775390625, 0.05059814453125, -0.01428985595703125, 0.03802490234375, 0.03424072265625, 0.00214385986328125, -0.04315185546875, 0.05908203125, 0.00577545166015625, -0.0011548995971679688, 0.0271759033203125, -0.005924224853515625, -0.026702880859375, -0.02105712890625, -0.0284881591796875, 0.0134124755859375, -0.0677490234375, -0.061065673828125, -0.039581298828125, -0.0229644775390625, -0.03057861328125, 0.00506591796875, -0.01210784912109375, -0.046478271484375, -0.049713134765625, -0.0177459716796875, 0.043548583984375, 0.040771484375, 0.0031681060791015625, 0.0175018310546875, -0.049530029296875, 0.0234222412109375, 0.00287628173828125, 0.03631591796875, -0.03302001953125, -0.05609130859375, -0.027191162109375, 0.006343841552734375, -0.0224761962890625, -0.0299072265625, 0.031341552734375, 0.026275634765625, 0.0287933349609375, 0.0309906005859375, 0.01727294921875, 0.034210205078125, -0.0254974365234375, 0.084228515625, 0.036163330078125, -0.0528564453125, 0.043701171875, -0.0247344970703125, 0.0208587646484375, 0.0304412841796875, 0.033477783203125, -0.048370361328125, -0.020599365234375, -0.038177490234375, -0.0413818359375, 0.03887939453125, -0.00357818603515625, 0.01134490966796875, 0.00156402587890625, 0.0321044921875, 0.00354766845703125, 0.002117156982421875, -0.05133056640625, -0.01538848876953125, -0.042510986328125, -0.01297760009765625, 0.005741119384765625, -0.0361328125, 0.0026092529296875, -0.031005859375, 0.052703857421875, -0.0194549560546875, 0.004657745361328125, 0.0309295654296875, -0.007648468017578125, -0.00949859619140625, 0.0019502639770507812, 0.049835205078125, 0.033935546875, -0.029541015625, -0.0033855438232421875, -0.020111083984375, -0.043243408203125, -0.005645751953125, -0.013671875, 0.0022449493408203125, 0.0312042236328125, 0.002044677734375, 0.0682373046875, 0.0005655288696289062, -0.0164794921875, 0.060028076171875, -0.04058837890625, -0.041351318359375, -0.038665771484375, 0.0226898193359375, -0.024871826171875, 0.007038116455078125, 0.018768310546875, 0.037933349609375, 0.00754547119140625, -0.00749969482421875, 0.0208587646484375, 0.0400390625, -0.046112060546875, -0.02984619140625, 0.04150390625, 0.03265380859375, -0.00705718994140625, 0.056884765625, -0.036651611328125, -0.032562255859375, 0.065185546875, 0.0163726806640625, 0.0682373046875, -0.004154205322265625, 0.0322265625, 0.05560302734375, 0.025299072265625, 0.00514984130859375, 0.03436279296875, -0.00485992431640625, -0.042816162109375, -0.0063629150390625, -0.048095703125, -0.044647216796875, 0.00588226318359375, -0.06732177734375, 0.057464599609375, -0.03021240234375, -0.0343017578125, 0.0134429931640625, 0.025299072265625, -0.0712890625, 0.04449462890625, 0.033966064453125, 0.10064697265625, -0.049041748046875, 0.1070556640625, 0.049591064453125, -0.037933349609375, -0.057403564453125, -0.0364990234375, -0.012847900390625, -0.0244293212890625, 0.016632080078125, 0.0504150390625, 0.0196380615234375, -0.0015172958374023438, -0.0665283203125, -0.036041259765625, 0.0830078125, -0.0155792236328125, -0.049346923828125, 0.00617218017578125, -0.00794219970703125, 0.048431396484375, -0.040283203125, 0.0218963623046875, 0.0283203125, 0.043975830078125, 0.02862548828125, -0.061309814453125, -0.0248870849609375, -0.0254669189453125, 0.0250396728515625, -0.0030364990234375, -0.034912109375, 0.03839111328125, -0.0294647216796875, -0.0234832763671875, 0.0263214111328125, 0.053009033203125, 0.004024505615234375, 0.02386474609375, 0.043548583984375, 0.050384521484375, 0.038421630859375, -0.04144287109375, 0.07415771484375, 0.0250244140625, 0.044036865234375, 0.0755615234375, -0.01416778564453125, 0.043670654296875, 0.0309906005859375, 0.00433349609375, 0.047637939453125, 0.08203125, -0.0638427734375, 0.0589599609375, 0.0185394287109375, 0.00650787353515625, -0.0048675537109375, -0.02685546875, -0.020843505859375, 0.033172607421875, 0.035491943359375, -0.00627899169921875, 0.00417327880859375, 0.03021240234375, -0.0139007568359375, -0.0228118896484375, -0.0340576171875, 0.058135986328125, 0.0080718994140625, -0.036956787109375, 0.0304107666015625, 0.023712158203125, 0.07232666015625, -0.07611083984375, -0.0169830322265625, -0.0057525634765625, 0.01018524169921875, -0.0263214111328125, -0.08563232421875, 0.00966644287109375, 0.0020923614501953125, -0.010589599609375, 0.00452423095703125, 0.06903076171875, -0.046783447265625, -0.027923583984375, 0.01213836669921875, -0.012847900390625, 0.01062774658203125, -0.01184844970703125, -0.04437255859375, 0.0171966552734375, -0.0013551712036132812, -0.020263671875, -0.0037441253662109375, 0.029510498046875, -0.01459503173828125, 0.038970947265625, 0.0350341796875, -0.0026416778564453125, -0.0246124267578125, 0.0322265625, 0.078125, -0.04608154296875, -0.04034423828125, -0.0223236083984375, 0.05328369140625, -0.0147552490234375, -0.03564453125, 0.053314208984375, 0.03564453125, 0.07220458984375, -0.02606201171875, 0.052215576171875, -0.01052093505859375, 0.0166473388671875, -0.007213592529296875, 0.05438232421875, -0.033966064453125, -0.0082855224609375, -0.01522064208984375, -0.057861328125, -0.0287322998046875, 0.0261993408203125, 0.0157318115234375, -0.01334381103515625, 0.041168212890625, 0.051849365234375, 0.01690673828125, -0.0194854736328125, 0.038238525390625, -0.0117034912109375, 0.051422119140625, 0.0297088623046875, 0.04217529296875, -0.052734375, 0.046295166015625, -0.0228271484375, -0.0270538330078125, -0.015289306640625, -0.0611572265625, -0.0787353515625, -0.0341796875, -0.044189453125, -0.0416259765625, -0.03314208984375, 0.0523681640625, 0.060577392578125, -0.0421142578125, 0.023040771484375, 0.0105743408203125, 0.0088653564453125, -0.0038890838623046875, -0.0172271728515625, 0.01416778564453125, 0.0089569091796875, -0.044921875, -0.0180206298828125, 0.02337646484375, 0.02081298828125, -0.034912109375, -0.023590087890625, -0.005413055419921875, -0.00231170654296875, 0.0296783447265625, 0.013946533203125, -0.040008544921875, -0.031982421875, -0.0074310302734375, -0.034820556640625, 0.0202789306640625, 0.04608154296875, -0.048614501953125, 0.044677734375, 0.043548583984375, -0.008148193359375, 0.043670654296875, 0.00614166259765625, 0.0012311935424804688, -0.08514404296875, 0.036651611328125, 0.0193023681640625, 0.025604248046875, 0.042724609375, -0.0247802734375, 0.042449951171875, 0.032562255859375, -0.0484619140625, -0.05157470703125, 0.004062652587890625, -0.11480712890625, 0.014312744140625, 0.08489990234375, -0.0285797119140625, -0.0243072509765625, 0.005306243896484375, -0.0183258056640625, 0.023651123046875, -0.0275726318359375, 0.016448974609375, 0.044769287109375, 0.03387451171875, -0.0174102783203125, -0.0212860107421875, 0.042877197265625, -0.0252685546875, -0.05120849609375, -0.0243072509765625, 0.031158447265625, 0.0272979736328125, -0.0007228851318359375, 0.03411865234375, -0.0163421630859375, 0.04693603515625, -0.0051422119140625, 0.036712646484375, -0.0166778564453125, -0.0166168212890625, -0.026611328125, -0.0340576171875, -0.0096435546875, -0.04888916015625 ] ]
unitary/unbiased-toxic-roberta
2023-08-18T10:43:39.000Z
[ "transformers", "pytorch", "jax", "roberta", "text-classification", "arxiv:1703.04009", "arxiv:1905.12516", "license:apache-2.0", "endpoints_compatible", "region:us" ]
text-classification
unitary
null
null
unitary/unbiased-toxic-roberta
7
8,510
transformers
2022-03-02T23:29:05
--- license: apache-2.0 --- <div align="center"> **⚠️ Disclaimer:** The huggingface models currently give different results to the detoxify library (see issue [here](https://github.com/unitaryai/detoxify/issues/15)). For the most up to date models we recommend using the models from https://github.com/unitaryai/detoxify # 🙊 Detoxify ## Toxic Comment Classification with ⚡ Pytorch Lightning and 🤗 Transformers ![CI testing](https://github.com/unitaryai/detoxify/workflows/CI%20testing/badge.svg) ![Lint](https://github.com/unitaryai/detoxify/workflows/Lint/badge.svg) </div> ![Examples image](examples.png) ## Description Trained models & code to predict toxic comments on 3 Jigsaw challenges: Toxic comment classification, Unintended Bias in Toxic comments, Multilingual toxic comment classification. Built by [Laura Hanu](https://laurahanu.github.io/) at [Unitary](https://www.unitary.ai/), where we are working to stop harmful content online by interpreting visual content in context. Dependencies: - For inference: - 🤗 Transformers - ⚡ Pytorch lightning - For training will also need: - Kaggle API (to download data) | Challenge | Year | Goal | Original Data Source | Detoxify Model Name | Top Kaggle Leaderboard Score | Detoxify Score |-|-|-|-|-|-|-| | [Toxic Comment Classification Challenge](https://www.kaggle.com/c/jigsaw-toxic-comment-classification-challenge) | 2018 | build a multi-headed model that’s capable of detecting different types of of toxicity like threats, obscenity, insults, and identity-based hate. | Wikipedia Comments | `original` | 0.98856 | 0.98636 | [Jigsaw Unintended Bias in Toxicity Classification](https://www.kaggle.com/c/jigsaw-unintended-bias-in-toxicity-classification) | 2019 | build a model that recognizes toxicity and minimizes this type of unintended bias with respect to mentions of identities. You'll be using a dataset labeled for identity mentions and optimizing a metric designed to measure unintended bias. | Civil Comments | `unbiased` | 0.94734 | 0.93639 | [Jigsaw Multilingual Toxic Comment Classification](https://www.kaggle.com/c/jigsaw-multilingual-toxic-comment-classification) | 2020 | build effective multilingual models | Wikipedia Comments + Civil Comments | `multilingual` | 0.9536 | 0.91655* *Score not directly comparable since it is obtained on the validation set provided and not on the test set. To update when the test labels are made available. It is also noteworthy to mention that the top leadearboard scores have been achieved using model ensembles. The purpose of this library was to build something user-friendly and straightforward to use. ## Limitations and ethical considerations If words that are associated with swearing, insults or profanity are present in a comment, it is likely that it will be classified as toxic, regardless of the tone or the intent of the author e.g. humorous/self-deprecating. This could present some biases towards already vulnerable minority groups. The intended use of this library is for research purposes, fine-tuning on carefully constructed datasets that reflect real world demographics and/or to aid content moderators in flagging out harmful content quicker. Some useful resources about the risk of different biases in toxicity or hate speech detection are: - [The Risk of Racial Bias in Hate Speech Detection](https://homes.cs.washington.edu/~msap/pdfs/sap2019risk.pdf) - [Automated Hate Speech Detection and the Problem of Offensive Language](https://arxiv.org/pdf/1703.04009.pdf%201.pdf) - [Racial Bias in Hate Speech and Abusive Language Detection Datasets](https://arxiv.org/pdf/1905.12516.pdf) ## Quick prediction The `multilingual` model has been trained on 7 different languages so it should only be tested on: `english`, `french`, `spanish`, `italian`, `portuguese`, `turkish` or `russian`. ```bash # install detoxify pip install detoxify ``` ```python from detoxify import Detoxify # each model takes in either a string or a list of strings results = Detoxify('original').predict('example text') results = Detoxify('unbiased').predict(['example text 1','example text 2']) results = Detoxify('multilingual').predict(['example text','exemple de texte','texto de ejemplo','testo di esempio','texto de exemplo','örnek metin','пример текста']) # optional to display results nicely (will need to pip install pandas) import pandas as pd print(pd.DataFrame(results, index=input_text).round(5)) ``` For more details check the Prediction section. ## Labels All challenges have a toxicity label. The toxicity labels represent the aggregate ratings of up to 10 annotators according the following schema: - **Very Toxic** (a very hateful, aggressive, or disrespectful comment that is very likely to make you leave a discussion or give up on sharing your perspective) - **Toxic** (a rude, disrespectful, or unreasonable comment that is somewhat likely to make you leave a discussion or give up on sharing your perspective) - **Hard to Say** - **Not Toxic** More information about the labelling schema can be found [here](https://www.kaggle.com/c/jigsaw-unintended-bias-in-toxicity-classification/data). ### Toxic Comment Classification Challenge This challenge includes the following labels: - `toxic` - `severe_toxic` - `obscene` - `threat` - `insult` - `identity_hate` ### Jigsaw Unintended Bias in Toxicity Classification This challenge has 2 types of labels: the main toxicity labels and some additional identity labels that represent the identities mentioned in the comments. Only identities with more than 500 examples in the test set (combined public and private) are included during training as additional labels and in the evaluation calculation. - `toxicity` - `severe_toxicity` - `obscene` - `threat` - `insult` - `identity_attack` - `sexual_explicit` Identity labels used: - `male` - `female` - `homosexual_gay_or_lesbian` - `christian` - `jewish` - `muslim` - `black` - `white` - `psychiatric_or_mental_illness` A complete list of all the identity labels available can be found [here](https://www.kaggle.com/c/jigsaw-unintended-bias-in-toxicity-classification/data). ### Jigsaw Multilingual Toxic Comment Classification Since this challenge combines the data from the previous 2 challenges, it includes all labels from above, however the final evaluation is only on: - `toxicity` ## How to run First, install dependencies ```bash # clone project git clone https://github.com/unitaryai/detoxify # create virtual env python3 -m venv toxic-env source toxic-env/bin/activate # install project pip install -e detoxify cd detoxify # for training pip install -r requirements.txt ``` ## Prediction Trained models summary: |Model name| Transformer type| Data from |:--:|:--:|:--:| |`original`| `bert-base-uncased` | Toxic Comment Classification Challenge |`unbiased`| `roberta-base`| Unintended Bias in Toxicity Classification |`multilingual`| `xlm-roberta-base`| Multilingual Toxic Comment Classification For a quick prediction can run the example script on a comment directly or from a txt containing a list of comments. ```bash # load model via torch.hub python run_prediction.py --input 'example' --model_name original # load model from from checkpoint path python run_prediction.py --input 'example' --from_ckpt_path model_path # save results to a .csv file python run_prediction.py --input test_set.txt --model_name original --save_to results.csv # to see usage python run_prediction.py --help ``` Checkpoints can be downloaded from the latest release or via the Pytorch hub API with the following names: - `toxic_bert` - `unbiased_toxic_roberta` - `multilingual_toxic_xlm_r` ```bash model = torch.hub.load('unitaryai/detoxify','toxic_bert') ``` Importing detoxify in python: ```python from detoxify import Detoxify results = Detoxify('original').predict('some text') results = Detoxify('unbiased').predict(['example text 1','example text 2']) results = Detoxify('multilingual').predict(['example text','exemple de texte','texto de ejemplo','testo di esempio','texto de exemplo','örnek metin','пример текста']) # to display results nicely import pandas as pd print(pd.DataFrame(results,index=input_text).round(5)) ``` ## Training If you do not already have a Kaggle account: - you need to create one to be able to download the data - go to My Account and click on Create New API Token - this will download a kaggle.json file - make sure this file is located in ~/.kaggle ```bash # create data directory mkdir jigsaw_data cd jigsaw_data # download data kaggle competitions download -c jigsaw-toxic-comment-classification-challenge kaggle competitions download -c jigsaw-unintended-bias-in-toxicity-classification kaggle competitions download -c jigsaw-multilingual-toxic-comment-classification ``` ## Start Training ### Toxic Comment Classification Challenge ```bash python create_val_set.py python train.py --config configs/Toxic_comment_classification_BERT.json ``` ### Unintended Bias in Toxicicity Challenge ```bash python train.py --config configs/Unintended_bias_toxic_comment_classification_RoBERTa.json ``` ### Multilingual Toxic Comment Classification This is trained in 2 stages. First, train on all available data, and second, train only on the translated versions of the first challenge. The [translated data](https://www.kaggle.com/miklgr500/jigsaw-train-multilingual-coments-google-api) can be downloaded from Kaggle in french, spanish, italian, portuguese, turkish, and russian (the languages available in the test set). ```bash # stage 1 python train.py --config configs/Multilingual_toxic_comment_classification_XLMR.json # stage 2 python train.py --config configs/Multilingual_toxic_comment_classification_XLMR_stage2.json ``` ### Monitor progress with tensorboard ```bash tensorboard --logdir=./saved ``` ## Model Evaluation ### Toxic Comment Classification Challenge This challenge is evaluated on the mean AUC score of all the labels. ```bash python evaluate.py --checkpoint saved/lightning_logs/checkpoints/example_checkpoint.pth --test_csv test.csv ``` ### Unintended Bias in Toxicicity Challenge This challenge is evaluated on a novel bias metric that combines different AUC scores to balance overall performance. More information on this metric [here](https://www.kaggle.com/c/jigsaw-unintended-bias-in-toxicity-classification/overview/evaluation). ```bash python evaluate.py --checkpoint saved/lightning_logs/checkpoints/example_checkpoint.pth --test_csv test.csv # to get the final bias metric python model_eval/compute_bias_metric.py ``` ### Multilingual Toxic Comment Classification This challenge is evaluated on the AUC score of the main toxic label. ```bash python evaluate.py --checkpoint saved/lightning_logs/checkpoints/example_checkpoint.pth --test_csv test.csv ``` ### Citation ``` @misc{Detoxify, title={Detoxify}, author={Hanu, Laura and {Unitary team}}, howpublished={Github. https://github.com/unitaryai/detoxify}, year={2020} } ```
11,088
[ [ -0.011077880859375, -0.03680419921875, 0.030303955078125, 0.015594482421875, -0.00021731853485107422, -0.0034465789794921875, -0.00301361083984375, -0.036468505859375, 0.007152557373046875, 0.0277099609375, -0.03790283203125, -0.053924560546875, -0.047149658203125, 0.01549530029296875, -0.0046844482421875, 0.105712890625, 0.011199951171875, 0.016326904296875, 0.0018558502197265625, -0.012481689453125, -0.02423095703125, -0.037445068359375, -0.042022705078125, -0.03717041015625, 0.068115234375, 0.0179901123046875, 0.042694091796875, 0.01129913330078125, 0.0198211669921875, 0.02117919921875, -0.02789306640625, -0.0195465087890625, -0.0225982666015625, 0.00272369384765625, -0.036468505859375, -0.028900146484375, -0.0160369873046875, 0.01499176025390625, -0.0030460357666015625, 0.02447509765625, -0.00904083251953125, 0.016876220703125, -0.005146026611328125, 0.032684326171875, -0.04449462890625, 0.0186614990234375, -0.04083251953125, 0.0202484130859375, 0.006195068359375, 0.012847900390625, -0.047698974609375, -0.0237884521484375, 0.0006651878356933594, -0.034912109375, -0.0235748291015625, -0.00650787353515625, 0.05523681640625, 0.0255279541015625, -0.05438232421875, -0.00799560546875, -0.03350830078125, 0.057708740234375, -0.069091796875, 0.021453857421875, 0.0143280029296875, 0.006206512451171875, -0.00988006591796875, -0.055816650390625, -0.027008056640625, -0.0177001953125, -0.013336181640625, 0.03955078125, -0.007762908935546875, 0.00852203369140625, 0.0260772705078125, 0.014373779296875, -0.038482666015625, 0.0007414817810058594, -0.0305938720703125, -0.033538818359375, 0.062469482421875, 0.01361083984375, 0.036285400390625, -0.0243072509765625, -0.034698486328125, -0.005214691162109375, -0.0193328857421875, -0.004913330078125, 0.02020263671875, 0.0268096923828125, -0.015869140625, 0.0279541015625, -0.005466461181640625, 0.0057220458984375, -0.0107421875, -0.0088958740234375, 0.05810546875, -0.01971435546875, -0.006961822509765625, -0.0035858154296875, 0.08416748046875, 0.0340576171875, 0.00835418701171875, -0.004009246826171875, 0.01263427734375, 0.02349853515625, 0.00943756103515625, -0.0762939453125, -0.052764892578125, 0.02935791015625, -0.041259765625, -0.0478515625, -0.029937744140625, -0.08685302734375, -0.034027099609375, 0.0006546974182128906, 0.039093017578125, -0.029449462890625, -0.040679931640625, -0.0004584789276123047, -0.029815673828125, -0.0208892822265625, 0.02191162109375, -0.03704833984375, 0.00356292724609375, -0.004207611083984375, 0.06072998046875, -0.00788116455078125, 0.0007376670837402344, -0.0226593017578125, -0.005802154541015625, -0.0018529891967773438, 0.0404052734375, -0.049591064453125, -0.0193328857421875, 0.002201080322265625, 0.020751953125, -0.00101470947265625, -0.03692626953125, 0.0635986328125, -0.0188751220703125, 0.033447265625, -0.0244140625, -0.0180511474609375, -0.036163330078125, 0.0141143798828125, -0.039031982421875, 0.0831298828125, 0.0144500732421875, -0.1019287109375, 0.00849151611328125, -0.0487060546875, -0.0208587646484375, -0.0068511962890625, 0.03173828125, -0.038787841796875, -0.0260162353515625, -0.0103607177734375, 0.0198211669921875, 0.00024199485778808594, 0.01108551025390625, -0.049591064453125, -0.016021728515625, 0.004619598388671875, -0.00644683837890625, 0.10443115234375, 0.03704833984375, -0.03509521484375, 0.02276611328125, -0.05499267578125, 0.0125274658203125, 0.00628662109375, -0.03826904296875, -0.0198974609375, 0.00347900390625, 0.04156494140625, 0.050140380859375, 0.0013856887817382812, -0.0479736328125, -0.00324249267578125, -0.0127105712890625, 0.03582763671875, 0.057525634765625, 0.0122833251953125, -0.004100799560546875, -0.030426025390625, 0.029052734375, 0.032073974609375, 0.01482391357421875, 0.0093536376953125, -0.05029296875, -0.044097900390625, -0.0046539306640625, 0.0176544189453125, 0.0718994140625, -0.0634765625, 0.042877197265625, -0.0060577392578125, -0.06591796875, -0.01094818115234375, 0.0015039443969726562, 0.051116943359375, 0.029327392578125, 0.03948974609375, -0.0325927734375, -0.0467529296875, -0.06146240234375, -0.0189666748046875, -0.0303192138671875, -0.00966644287109375, 0.02337646484375, 0.06634521484375, -0.0310516357421875, 0.023529052734375, -0.04571533203125, -0.034088134765625, 0.002197265625, 0.0015392303466796875, 0.01009368896484375, 0.033233642578125, 0.03875732421875, -0.041534423828125, -0.061126708984375, -0.003940582275390625, -0.049835205078125, -0.0101470947265625, 0.01081085205078125, -0.0191497802734375, -0.002216339111328125, 0.0232696533203125, -0.0223541259765625, 0.0110015869140625, 0.029022216796875, -0.0165863037109375, 0.049102783203125, -0.0011281967163085938, -0.00560760498046875, -0.0753173828125, 0.01398468017578125, 0.0164794921875, -0.0033283233642578125, -0.0675048828125, -0.0009675025939941406, -0.0037326812744140625, 0.001888275146484375, -0.0557861328125, 0.0408935546875, -0.0160369873046875, 0.05047607421875, 0.01336669921875, 0.0003464221954345703, -0.010986328125, 0.0626220703125, 0.0178375244140625, 0.0298919677734375, 0.044403076171875, -0.0312347412109375, 0.0130157470703125, 0.0286407470703125, -0.010528564453125, 0.035980224609375, -0.033447265625, 0.0103302001953125, -0.0143280029296875, 0.03662109375, -0.07147216796875, -0.022125244140625, 0.032684326171875, -0.033447265625, 0.01245880126953125, -0.0025653839111328125, -0.042755126953125, -0.048828125, -0.03546142578125, 0.0123291015625, 0.044158935546875, -0.0288543701171875, 0.028167724609375, 0.042724609375, 0.0060272216796875, -0.055938720703125, -0.060272216796875, -0.0177459716796875, -0.04425048828125, -0.0241241455078125, 0.0081787109375, -0.035888671875, -0.01218414306640625, -0.0147247314453125, 0.0030670166015625, -0.007289886474609375, 0.0035419464111328125, 0.0179595947265625, 0.01042938232421875, 0.0203704833984375, 0.01568603515625, 0.002994537353515625, 0.0012454986572265625, 0.03338623046875, 0.0106048583984375, 0.0259246826171875, -0.01068878173828125, 0.021728515625, -0.0285797119140625, 0.0195770263671875, 0.04534912109375, -0.00203704833984375, 0.051727294921875, 0.05072021484375, -0.0301971435546875, -0.0029850006103515625, -0.0180511474609375, -0.0112762451171875, -0.033782958984375, 0.04119873046875, -0.0008158683776855469, -0.034393310546875, 0.059600830078125, 0.0015811920166015625, 0.00751495361328125, 0.042388916015625, 0.0565185546875, 0.0080108642578125, 0.105712890625, 0.0275421142578125, -0.0092620849609375, 0.037567138671875, -0.0162353515625, 0.0111236572265625, -0.05792236328125, -0.0262908935546875, -0.0305328369140625, -0.01293182373046875, -0.05059814453125, -0.01374053955078125, 0.03741455078125, -0.0203399658203125, -0.07025146484375, 0.0261993408203125, -0.06854248046875, 0.0221710205078125, 0.034912109375, 0.031402587890625, 0.0167236328125, -0.005641937255859375, -0.021453857421875, -0.0196380615234375, -0.040496826171875, -0.030731201171875, 0.078369140625, 0.047454833984375, 0.046539306640625, 0.004596710205078125, 0.040283203125, 0.007305145263671875, 0.064697265625, -0.04486083984375, 0.034149169921875, -0.0104827880859375, -0.09063720703125, 0.0011053085327148438, -0.037933349609375, -0.048248291015625, 0.0312347412109375, -0.0160064697265625, -0.0823974609375, 0.00788116455078125, -0.0132293701171875, -0.01568603515625, 0.048248291015625, -0.058685302734375, 0.0675048828125, -0.01424407958984375, -0.035888671875, 0.0102691650390625, -0.0687255859375, 0.049102783203125, -0.007747650146484375, 0.037200927734375, -0.0172882080078125, 0.01500701904296875, 0.07818603515625, -0.0223236083984375, 0.07574462890625, -0.02093505859375, -0.0089874267578125, 0.028533935546875, -0.0004706382751464844, 0.01509857177734375, 0.0017108917236328125, -0.0210418701171875, 0.0268096923828125, 0.02105712890625, -0.003490447998046875, -0.00836944580078125, 0.03729248046875, -0.06805419921875, -0.0250396728515625, -0.04791259765625, -0.045135498046875, -0.00562286376953125, 0.042266845703125, 0.0394287109375, -0.00572967529296875, 0.00449371337890625, -0.0135040283203125, 0.050018310546875, -0.050933837890625, 0.01303863525390625, 0.023193359375, -0.021728515625, -0.037750244140625, 0.061431884765625, 0.005138397216796875, 0.0235443115234375, 0.01442718505859375, 0.031402587890625, -0.0210418701171875, -0.016632080078125, -0.0089569091796875, 0.020782470703125, -0.0693359375, -0.0322265625, -0.06182861328125, -0.034027099609375, -0.0240325927734375, 0.018524169921875, -0.0099945068359375, -0.00745391845703125, -0.0255126953125, -0.0189971923828125, 0.036590576171875, 0.058074951171875, -0.0272979736328125, 0.0189208984375, -0.0518798828125, -0.0010471343994140625, 0.0094757080078125, 0.03790283203125, 0.0078277587890625, -0.032867431640625, -0.0016260147094726562, 0.0233306884765625, -0.0537109375, -0.1083984375, 0.039306640625, -0.0011968612670898438, 0.0361328125, 0.0343017578125, 0.027801513671875, 0.0224151611328125, -0.025390625, 0.06787109375, 0.0237579345703125, -0.053619384765625, 0.04302978515625, -0.04168701171875, -0.0016994476318359375, 0.048614501953125, 0.0589599609375, -0.060760498046875, -0.053070068359375, -0.043670654296875, -0.061309814453125, 0.053070068359375, 0.0225067138671875, 0.00936126708984375, -0.0158538818359375, 0.013092041015625, -0.000789642333984375, 0.00263214111328125, -0.09619140625, -0.053802490234375, -0.0145263671875, -0.018646240234375, 0.007232666015625, -0.01561737060546875, -0.0229339599609375, -0.04290771484375, 0.080322265625, 0.01140594482421875, 0.01617431640625, 0.00797271728515625, -0.0006575584411621094, -0.00852203369140625, 0.0257415771484375, 0.00409698486328125, 0.01096343994140625, -0.03839111328125, 0.0119171142578125, 0.034210205078125, -0.05047607421875, 0.026580810546875, -0.003055572509765625, -0.0167083740234375, -0.00555419921875, 0.006175994873046875, 0.034454345703125, 0.01082611083984375, -0.050079345703125, 0.0310516357421875, -0.00812530517578125, -0.023712158203125, -0.0280914306640625, 0.0129547119140625, 0.0188446044921875, 0.0030345916748046875, -0.0013132095336914062, 0.00556182861328125, 0.0243072509765625, -0.047698974609375, 0.02044677734375, 0.0276641845703125, -0.0440673828125, -0.015625, 0.08685302734375, 0.01300811767578125, -0.0096282958984375, 0.049224853515625, -0.02557373046875, -0.058074951171875, 0.04034423828125, 0.03692626953125, 0.0562744140625, -0.028594970703125, 0.033599853515625, 0.07208251953125, 0.019256591796875, 0.01314544677734375, 0.0190277099609375, 0.01654052734375, -0.045989990234375, 0.0025043487548828125, -0.04510498046875, -0.010986328125, 0.03460693359375, -0.0400390625, 0.0174102783203125, -0.042694091796875, -0.01514434814453125, 0.01557159423828125, 0.020721435546875, -0.036529541015625, 0.0178375244140625, 0.0159759521484375, 0.0701904296875, -0.10821533203125, 0.04132080078125, 0.054290771484375, -0.06689453125, -0.0506591796875, -0.01446533203125, 0.0216217041015625, -0.0199432373046875, 0.03851318359375, 0.039306640625, 0.0160064697265625, -0.0167694091796875, -0.050506591796875, -0.0672607421875, 0.0592041015625, 0.010986328125, -0.015777587890625, -0.0020904541015625, -0.006244659423828125, 0.061126708984375, 0.0086822509765625, 0.06463623046875, 0.044586181640625, 0.04736328125, 0.01241302490234375, -0.06634521484375, 0.0253143310546875, -0.046539306640625, 0.005260467529296875, -0.014892578125, -0.0614013671875, 0.057861328125, -0.018310546875, -0.022125244140625, -0.0007476806640625, 0.032867431640625, 0.01454925537109375, 0.01727294921875, 0.04339599609375, 0.0592041015625, 0.05230712890625, -0.0001589059829711914, 0.055328369140625, -0.0083770751953125, 0.04107666015625, 0.0804443359375, -0.012847900390625, 0.0504150390625, 0.0046539306640625, -0.034820556640625, 0.06982421875, 0.077880859375, -0.007747650146484375, 0.05029296875, 0.02203369140625, -0.0282745361328125, -0.0095977783203125, -0.0163726806640625, -0.0313720703125, 0.00518035888671875, 0.022979736328125, -0.027191162109375, -0.0267333984375, 0.0013685226440429688, 0.034454345703125, -0.00942230224609375, -0.01446533203125, 0.047515869140625, 0.007282257080078125, -0.0487060546875, 0.0413818359375, -0.0128631591796875, 0.0665283203125, -0.0218963623046875, 0.0030364990234375, 0.0008497238159179688, 0.025146484375, -0.03582763671875, -0.06329345703125, 0.0168304443359375, -0.023406982421875, 0.0034008026123046875, -0.00701141357421875, 0.044464111328125, -0.046173095703125, -0.036712646484375, 0.04547119140625, 0.0140838623046875, 0.022918701171875, 0.0014009475708007812, -0.07623291015625, -0.016357421875, 0.0142059326171875, -0.0255126953125, 0.0300140380859375, 0.0121002197265625, -0.0012006759643554688, 0.051025390625, 0.0406494140625, -0.0114288330078125, -0.0010986328125, -0.0062713623046875, 0.07135009765625, -0.0306243896484375, -0.0259246826171875, -0.0413818359375, 0.059234619140625, -0.0034122467041015625, -0.0249176025390625, 0.056854248046875, 0.049652099609375, 0.06329345703125, -0.01103973388671875, 0.06597900390625, -0.00978851318359375, 0.033172607421875, -0.0063018798828125, 0.07513427734375, -0.045989990234375, -0.01364898681640625, -0.043670654296875, -0.0533447265625, -0.0306396484375, 0.046722412109375, -0.0247039794921875, 0.025146484375, 0.057830810546875, 0.061859130859375, 0.00662994384765625, -0.006439208984375, -0.00438690185546875, 0.044708251953125, 0.0293731689453125, 0.0638427734375, 0.042755126953125, -0.039093017578125, 0.043060302734375, -0.0537109375, -0.022003173828125, 0.007190704345703125, -0.055908203125, -0.08172607421875, -0.05419921875, -0.046234130859375, -0.06414794921875, -0.0230865478515625, 0.041717529296875, 0.0282745361328125, -0.07000732421875, 0.0018892288208007812, 0.002056121826171875, 0.02471923828125, -0.010772705078125, -0.023193359375, 0.017059326171875, -0.01108551025390625, -0.0528564453125, -0.01346588134765625, -0.002765655517578125, -0.00838470458984375, -0.00946044921875, 0.0030918121337890625, -0.034637451171875, 0.00720977783203125, 0.0518798828125, 0.019561767578125, -0.04315185546875, -0.038818359375, -0.0153656005859375, -0.01033782958984375, 0.00775146484375, 0.008544921875, -0.04449462890625, 0.03826904296875, 0.0484619140625, 0.0011148452758789062, 0.036468505859375, -0.01467132568359375, 0.0006008148193359375, -0.0306549072265625, 0.007022857666015625, 0.0232696533203125, 0.0275421142578125, 0.0167236328125, -0.03936767578125, 0.0574951171875, 0.0185546875, -0.041015625, -0.069580078125, 0.01203155517578125, -0.0670166015625, -0.0279998779296875, 0.09844970703125, -0.0168609619140625, -0.0220794677734375, -0.015716552734375, -0.0277862548828125, 0.030853271484375, -0.021026611328125, 0.057525634765625, 0.048980712890625, -0.006351470947265625, -0.00415802001953125, -0.053955078125, 0.0452880859375, 0.02093505859375, -0.0718994140625, 0.0192413330078125, 0.035430908203125, 0.060760498046875, 0.03546142578125, 0.041046142578125, -0.0226898193359375, 0.0065460205078125, -0.005352020263671875, 0.0155029296875, 0.021881103515625, -0.0084686279296875, -0.01442718505859375, -0.01198577880859375, -0.0287322998046875, 0.0002932548522949219 ] ]
DeepPavlov/rubert-base-cased-conversational
2021-11-08T13:06:54.000Z
[ "transformers", "pytorch", "jax", "bert", "feature-extraction", "ru", "endpoints_compatible", "has_space", "region:us" ]
feature-extraction
DeepPavlov
null
null
DeepPavlov/rubert-base-cased-conversational
12
8,506
transformers
2022-03-02T23:29:04
--- language: - ru --- # rubert-base-cased-conversational Conversational RuBERT \(Russian, cased, 12‑layer, 768‑hidden, 12‑heads, 180M parameters\) was trained on OpenSubtitles\[1\], [Dirty](https://d3.ru/), [Pikabu](https://pikabu.ru/), and a Social Media segment of Taiga corpus\[2\]. We assembled a new vocabulary for Conversational RuBERT model on this data and initialized the model with [RuBERT](../rubert-base-cased). 08.11.2021: upload model with MLM and NSP heads \[1\]: P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles. In Proceedings of the 10th International Conference on Language Resources and Evaluation \(LREC 2016\) \[2\]: Shavrina T., Shapovalova O. \(2017\) TO THE METHODOLOGY OF CORPUS CONSTRUCTION FOR MACHINE LEARNING: «TAIGA» SYNTAX TREE CORPUS AND PARSER. in proc. of “CORPORA2017”, international conference , Saint-Petersbourg, 2017.
929
[ [ -0.020782470703125, -0.0794677734375, 0.006610870361328125, -0.0025997161865234375, -0.025848388671875, 0.027496337890625, -0.05072021484375, -0.0017948150634765625, 0.01166534423828125, 0.043304443359375, -0.0275115966796875, -0.044830322265625, -0.0443115234375, -0.01308441162109375, -0.0333251953125, 0.1103515625, 0.0121002197265625, 0.038482666015625, -0.0091552734375, -0.007404327392578125, -0.029388427734375, -0.041656494140625, -0.026885986328125, -0.0095672607421875, 0.0145263671875, 0.050506591796875, 0.05340576171875, 0.0255889892578125, 0.044952392578125, 0.0220947265625, -0.006134033203125, -0.0007719993591308594, -0.0341796875, 0.00858306884765625, -0.004344940185546875, -0.0350341796875, -0.03021240234375, -0.0078277587890625, 0.0628662109375, 0.05718994140625, -0.0318603515625, 0.0262451171875, -0.0137481689453125, 0.049468994140625, -0.0286712646484375, 0.0107574462890625, -0.054534912109375, 0.0098114013671875, -0.02581787109375, -0.0178985595703125, -0.04534912109375, 0.0038814544677734375, 0.005725860595703125, -0.03802490234375, 0.035614013671875, 0.00628662109375, 0.1031494140625, -0.0003018379211425781, 0.006343841552734375, -0.00897216796875, -0.047760009765625, 0.07330322265625, -0.034912109375, 0.029296875, 0.031036376953125, 0.0377197265625, -0.0008325576782226562, -0.0555419921875, -0.0328369140625, -0.030609130859375, -0.01172637939453125, -0.0014085769653320312, -0.0264892578125, -0.0143585205078125, 0.017730712890625, 0.037384033203125, -0.043609619140625, -0.01058197021484375, -0.049468994140625, -0.00910186767578125, 0.0172576904296875, 0.0062255859375, 0.0032405853271484375, -0.01462554931640625, -0.034912109375, -0.0169677734375, -0.03216552734375, -0.00970458984375, 0.0361328125, 0.0163726806640625, -0.040374755859375, 0.043914794921875, -0.0298614501953125, 0.040283203125, 0.0114593505859375, -0.00574493408203125, 0.028564453125, -0.021209716796875, -0.0234832763671875, -0.019561767578125, 0.07574462890625, 0.001819610595703125, 0.0274505615234375, -0.005390167236328125, 0.01271820068359375, 0.00450897216796875, 0.02215576171875, -0.046539306640625, -0.03594970703125, 0.01322174072265625, -0.0167694091796875, -0.039215087890625, 0.01520538330078125, -0.0687255859375, -0.014678955078125, -0.00989532470703125, 0.01404571533203125, -0.03887939453125, -0.0107574462890625, 0.007625579833984375, 0.00787353515625, 0.02117919921875, 0.0224151611328125, -0.043426513671875, 0.04595947265625, 0.044830322265625, 0.053192138671875, -0.0193328857421875, -0.004611968994140625, -0.04864501953125, -0.0211639404296875, -0.01277923583984375, 0.056396484375, -0.0208740234375, -0.0069580078125, -0.0085296630859375, -0.0027980804443359375, 0.022216796875, -0.03533935546875, 0.036163330078125, -0.037872314453125, 0.04559326171875, 0.018768310546875, -0.008148193359375, -0.0206298828125, 0.021759033203125, -0.03143310546875, 0.081298828125, 0.01611328125, -0.04827880859375, 0.00766754150390625, -0.05596923828125, -0.0204620361328125, 0.0226287841796875, -0.0148773193359375, -0.04486083984375, -0.019744873046875, 0.00981903076171875, 0.039276123046875, -0.0469970703125, 0.055755615234375, -0.00395965576171875, 0.01517486572265625, 0.051910400390625, -0.01090240478515625, 0.059783935546875, 0.0369873046875, -0.018768310546875, 0.01528167724609375, -0.054229736328125, 0.01082611083984375, 0.003662109375, -0.01203155517578125, -0.004039764404296875, -0.014984130859375, 0.02166748046875, -0.005466461181640625, 0.0071868896484375, -0.0330810546875, 0.00725555419921875, -0.0165252685546875, 0.029296875, 0.042633056640625, -0.01019287109375, 0.038330078125, -0.00795745849609375, 0.0533447265625, 0.028411865234375, 0.00472259521484375, -0.0164642333984375, -0.028076171875, -0.08544921875, -0.022613525390625, 0.042327880859375, 0.042144775390625, -0.050506591796875, 0.0237579345703125, -0.02960205078125, -0.054229736328125, -0.0255279541015625, 0.0093841552734375, 0.048248291015625, 0.0261688232421875, 0.018341064453125, -0.0413818359375, -0.052093505859375, -0.08294677734375, -0.0006103515625, -0.005558013916015625, 0.0123138427734375, 0.0178985595703125, 0.03143310546875, 0.006687164306640625, 0.0577392578125, -0.04547119140625, -0.017059326171875, -0.03955078125, 0.0179443359375, 0.0286712646484375, 0.0316162109375, 0.0263519287109375, -0.0555419921875, -0.066650390625, 0.0198211669921875, -0.046417236328125, -0.0032978057861328125, 0.0021209716796875, -0.0214691162109375, 0.0195159912109375, 0.0394287109375, -0.0487060546875, 0.0131378173828125, 0.041961669921875, -0.0214691162109375, 0.04180908203125, 0.01134490966796875, 0.008636474609375, -0.10150146484375, 0.007335662841796875, 0.0013589859008789062, -0.0264739990234375, -0.054351806640625, 0.0145111083984375, -0.005062103271484375, -0.0205841064453125, -0.048248291015625, 0.0274810791015625, -0.049468994140625, 0.0004324913024902344, -0.0015020370483398438, 0.018341064453125, -0.0176544189453125, 0.060394287109375, -0.0022602081298828125, 0.061279296875, 0.0684814453125, -0.037933349609375, 0.03375244140625, 0.0338134765625, -0.041259765625, 0.036285400390625, -0.048248291015625, 0.0181427001953125, -0.004459381103515625, 0.0033721923828125, -0.072021484375, -0.0163726806640625, 0.03070068359375, -0.061279296875, 0.0243377685546875, -0.01515960693359375, -0.028228759765625, -0.00653839111328125, -0.017120361328125, 0.0262451171875, 0.049072265625, -0.015106201171875, 0.0303497314453125, 0.05780029296875, -0.0182037353515625, -0.052337646484375, -0.041656494140625, 0.022216796875, -0.028564453125, -0.0445556640625, 0.0197601318359375, -0.0077972412109375, -0.027679443359375, -0.007755279541015625, 0.0193328857421875, -0.02215576171875, -0.01325225830078125, 0.0105438232421875, 0.021759033203125, -0.035003662109375, 0.0015459060668945312, -0.030059814453125, 0.0017290115356445312, -0.01666259765625, 0.0172271728515625, 0.07080078125, -0.03668212890625, 0.0099334716796875, -0.018524169921875, 0.0189056396484375, 0.025115966796875, -0.01201629638671875, 0.050140380859375, 0.06060791015625, -0.0160980224609375, -0.0196380615234375, -0.047760009765625, -0.015045166015625, -0.03436279296875, 0.0362548828125, -0.0180511474609375, -0.0684814453125, 0.04156494140625, 0.0239105224609375, -0.0237579345703125, 0.03057861328125, 0.06927490234375, 0.0159912109375, 0.02996826171875, 0.04681396484375, -0.004062652587890625, 0.050323486328125, -0.0352783203125, 0.007335662841796875, -0.051361083984375, -0.01522064208984375, -0.037872314453125, 0.002330780029296875, -0.045684814453125, -0.042205810546875, 0.021392822265625, -0.0002510547637939453, -0.00667572021484375, 0.062042236328125, -0.02789306640625, 0.037811279296875, 0.04583740234375, 0.01311492919921875, 0.0240478515625, 0.0189361572265625, -0.0018644332885742188, -0.00543975830078125, -0.05694580078125, -0.03485107421875, 0.0765380859375, 0.032684326171875, 0.06072998046875, 0.012115478515625, 0.0203857421875, 0.0295867919921875, 0.0171356201171875, -0.06787109375, 0.0614013671875, -0.00774383544921875, -0.07012939453125, -0.01708984375, -0.024505615234375, -0.0770263671875, 0.01117706298828125, -0.0178680419921875, -0.0533447265625, -0.00919342041015625, -0.003780364990234375, -0.01139068603515625, 0.006084442138671875, -0.052978515625, 0.0697021484375, -0.0029315948486328125, -0.02471923828125, -0.0242156982421875, -0.0570068359375, 0.0266876220703125, 0.00975799560546875, 0.0142364501953125, -0.019134521484375, 0.0085906982421875, 0.06854248046875, -0.0257110595703125, 0.067138671875, 0.0113067626953125, 0.0267486572265625, 0.041656494140625, 0.01174163818359375, 0.007717132568359375, 0.003421783447265625, 0.0260162353515625, 0.029449462890625, -0.013214111328125, -0.0261993408203125, -0.0218048095703125, 0.041259765625, -0.055999755859375, -0.033111572265625, -0.054931640625, -0.03094482421875, -0.007778167724609375, 0.0182952880859375, 0.01282501220703125, 0.035888671875, -0.035308837890625, 0.049102783203125, 0.03704833984375, -0.021820068359375, 0.02166748046875, 0.052947998046875, -0.007415771484375, -0.051055908203125, 0.045074462890625, -0.017364501953125, 0.0241241455078125, 0.037384033203125, 0.0165863037109375, 0.0192413330078125, -0.03271484375, -0.029815673828125, 0.031890869140625, -0.052642822265625, -0.04107666015625, -0.059173583984375, -0.0291900634765625, -0.032012939453125, 0.0148773193359375, -0.0300140380859375, -0.03302001953125, -0.0189971923828125, -0.00957489013671875, 0.049652099609375, 0.03662109375, 0.005352020263671875, 0.041473388671875, -0.052398681640625, 0.02685546875, -0.0166473388671875, 0.0181884765625, -0.02459716796875, -0.0621337890625, -0.0518798828125, -0.00592041015625, -0.0450439453125, -0.053741455078125, 0.05767822265625, 0.019073486328125, 0.04608154296875, 0.002361297607421875, -0.01360321044921875, 0.039459228515625, -0.040863037109375, 0.06890869140625, 0.019439697265625, -0.059173583984375, 0.033843994140625, -0.048583984375, 0.01708984375, 0.039031982421875, 0.035491943359375, -0.0528564453125, -0.02276611328125, -0.072021484375, -0.055023193359375, 0.0711669921875, 0.0207061767578125, 0.0203704833984375, -0.0016689300537109375, -0.0174560546875, 0.0193634033203125, 0.031829833984375, -0.08184814453125, -0.025787353515625, 0.0022220611572265625, -0.0168304443359375, -0.011566162109375, -0.023712158203125, -0.0196380615234375, -0.0225372314453125, 0.058013916015625, 0.0070953369140625, 0.03826904296875, 0.0015134811401367188, -0.015838623046875, -0.014892578125, 0.0308074951171875, 0.059295654296875, 0.024627685546875, -0.01178741455078125, 0.00408172607421875, -0.0002493858337402344, -0.05035400390625, -0.0192108154296875, -0.0076904296875, -0.00785064697265625, 0.0094451904296875, 0.00238037109375, 0.07232666015625, 0.01264190673828125, -0.05889892578125, 0.040802001953125, 0.020782470703125, -0.021392822265625, -0.06207275390625, -0.01537322998046875, -0.01543426513671875, 0.0012607574462890625, 0.0164337158203125, 0.0078125, 0.004627227783203125, -0.052978515625, 0.0216522216796875, 0.0340576171875, -0.037078857421875, -0.0283050537109375, 0.030242919921875, 0.00681304931640625, -0.0289459228515625, 0.0196533203125, -0.017303466796875, -0.040985107421875, 0.028472900390625, 0.02069091796875, 0.0791015625, 0.0023822784423828125, 0.029693603515625, 0.038482666015625, 0.00940704345703125, -0.004802703857421875, 0.01104736328125, 0.0011186599731445312, -0.0743408203125, -0.0300140380859375, -0.048919677734375, -0.04498291015625, 0.0308074951171875, -0.046875, 0.050750732421875, -0.03704833984375, -0.007694244384765625, 0.018280029296875, -0.021392822265625, -0.036376953125, -0.0012531280517578125, 0.0015497207641601562, 0.037017822265625, -0.053131103515625, 0.06072998046875, 0.0555419921875, -0.037811279296875, -0.040771484375, -0.031036376953125, -0.01508331298828125, -0.0550537109375, 0.0606689453125, -0.01331329345703125, 0.01189422607421875, -0.0157470703125, -0.05413818359375, -0.07073974609375, 0.059051513671875, 0.0011615753173828125, -0.0137481689453125, 0.0132904052734375, 0.0123748779296875, 0.055572509765625, -0.0369873046875, 0.02301025390625, 0.0180511474609375, 0.011505126953125, -0.00554656982421875, -0.0770263671875, -0.01070404052734375, -0.03558349609375, -0.01190948486328125, 0.0247955322265625, -0.057281494140625, 0.08013916015625, -0.005615234375, -0.0192718505859375, 0.0212860107421875, 0.05389404296875, 0.0010738372802734375, 0.005146026611328125, 0.022308349609375, 0.043609619140625, 0.014617919921875, -0.0148773193359375, 0.060821533203125, -0.0157623291015625, 0.035888671875, 0.08197021484375, 0.0100860595703125, 0.06866455078125, 0.03460693359375, -0.03509521484375, 0.059783935546875, 0.026092529296875, -0.009246826171875, 0.04296875, 0.0098876953125, 0.0018367767333984375, -0.005962371826171875, 0.0005273818969726562, -0.01522064208984375, 0.04827880859375, 0.0389404296875, -0.0260772705078125, -0.00945281982421875, 0.0014019012451171875, 0.0263824462890625, 0.02178955078125, 0.0047149658203125, 0.0537109375, -0.00196075439453125, -0.05706787109375, 0.054107666015625, 0.00821685791015625, 0.055389404296875, -0.042327880859375, -0.0020771026611328125, -0.0101318359375, 0.013916015625, -0.00339508056640625, -0.050506591796875, 0.00841522216796875, 0.01568603515625, 0.003265380859375, -0.01904296875, 0.042724609375, -0.04888916015625, -0.0374755859375, -0.01059722900390625, 0.038543701171875, 0.0364990234375, 0.0110626220703125, -0.057952880859375, -0.0037689208984375, 0.00009584426879882812, -0.0227813720703125, 0.01328277587890625, 0.033538818359375, -0.0063323974609375, 0.04376220703125, 0.051116943359375, 0.0011196136474609375, 0.003448486328125, 0.012451171875, 0.06585693359375, -0.05560302734375, -0.03594970703125, -0.04742431640625, 0.04931640625, -0.0153350830078125, -0.03179931640625, 0.064697265625, 0.049102783203125, 0.070068359375, -0.041961669921875, 0.04156494140625, -0.0189666748046875, 0.06048583984375, -0.040283203125, 0.03570556640625, -0.0271148681640625, 0.0096282958984375, -0.0243072509765625, -0.06768798828125, -0.02349853515625, 0.040863037109375, -0.02056884765625, -0.00337982177734375, 0.0777587890625, 0.072265625, -0.011688232421875, -0.0207977294921875, 0.034576416015625, 0.0239105224609375, 0.0045318603515625, 0.0211944580078125, 0.055145263671875, -0.047515869140625, 0.0556640625, -0.03314208984375, -0.0096588134765625, -0.033966064453125, -0.06439208984375, -0.08013916015625, -0.058990478515625, -0.03680419921875, -0.04364013671875, -0.00225830078125, 0.061737060546875, 0.044525146484375, -0.07977294921875, -0.057769775390625, -0.01111602783203125, -0.0149078369140625, 0.00885009765625, -0.0211639404296875, -0.0004220008850097656, -0.01316070556640625, -0.052581787109375, 0.016754150390625, 0.008514404296875, -0.00482940673828125, -0.0211639404296875, -0.00243377685546875, -0.028167724609375, -0.01861572265625, 0.03717041015625, 0.0206298828125, -0.057830810546875, -0.01439666748046875, 0.0234375, -0.0173797607421875, 0.036834716796875, 0.053253173828125, -0.042816162109375, 0.016387939453125, 0.035186767578125, 0.0225372314453125, 0.059051513671875, 0.0034275054931640625, 0.0426025390625, -0.060394287109375, 0.03753662109375, 0.0157318115234375, 0.029937744140625, 0.048919677734375, -0.0179901123046875, 0.051055908203125, 0.0298004150390625, -0.03167724609375, -0.06524658203125, 0.022705078125, -0.1192626953125, -0.00616455078125, 0.1160888671875, 0.011077880859375, -0.021514892578125, 0.006793975830078125, -0.034637451171875, 0.01343536376953125, -0.043701171875, 0.043792724609375, 0.06292724609375, 0.0196075439453125, -0.0249786376953125, -0.045867919921875, 0.032623291015625, 0.03436279296875, -0.054595947265625, -0.002017974853515625, 0.0286712646484375, 0.026153564453125, 0.034271240234375, 0.045440673828125, -0.0285186767578125, 0.020477294921875, 0.004547119140625, 0.0290374755859375, -0.0046234130859375, -0.022979736328125, -0.009735107421875, 0.0016908645629882812, 0.003997802734375, -0.0200347900390625 ] ]
openbmb/VisCPM-Paint
2023-07-26T06:49:40.000Z
[ "diffusers", "pytorch", "en", "zh", "diffusers:VisCPMPaintBeePipeline", "region:us" ]
null
openbmb
null
null
openbmb/VisCPM-Paint
10
8,506
diffusers
2023-06-25T12:30:09
--- language: - en - zh --- <div align="center"> **VisCPM** **Chinese-English bilingual multi-modal large model series based on CPM (Chinese Pretrained Models) basic model** <p align="center"> <a href="https://github.com/OpenBMB/VisCPM">Github</a> • <a href="https://huggingface.co/openbmb/VisCPM-Chat">VisCPM-Chat</a> </p> </div> `VisCPM` is a family of open-source large multimodal models, which support multimodal conversational capabilities (`VisCPM-Chat` model) and text-to-image generation capabilities (`VisCPM-Paint` model) in both Chinese and English, achieving state-of-the-art peformance among Chinese open-source multimodal models. VisCPM is trained based on the large language model [CPM-Bee](https://github.com/OpenBMB/CPM-Bee) with 10B parameters, fusing visual encoder (Q-Former) and visual decoder (Diffusion-UNet) to support visual inputs and outputs. Thanks to the good bilingual capability of CPM-Bee, `VisCPM` can be pre-trained with English multimodal data only and well generalize to achieve promising Chinese multimodal capabilities. - **👐 Open-source Usage**: VisCPM is free to be used for personal and research purposes. By open-sourcing the VisCPM model family, we hope to promote the development of the open-source community of large multimodal models and related research. - **🌟 Image and text generation coverage**: VisCPM models provide relatively comprehensive support for image and text multimodal capabilities, covering both multimodal conversation (image-to-text generation) capabilities and text-to-image generation capabilities. - **💫 Excellent bilingual performance**: Thanks to the excellent bilingual capability of the base language model CPM-Bee, VisCPM achieves outstanding results in both bilingual multimodal conversation and text-to-image generation. ## VisCPM-Paint `VisCPM-Paint` supports bilingual text-to-image generation. The model uses `CPM-Bee` as the text encoder, `UNet` as the image decoder, and fuses vision and language models using the objective of diffusion model. During the training process, the parameters of the language model remain fixed. The visual decoder is initialized with the parameters of [Stable Diffusion 2.1](https://github.com/Stability-AI/stablediffusion), and it is fused with the language model by gradually unfreezing key bridging parameters. The model is trained on the [LAION 2B](https://laion.ai/) English text-image pair dataset. Similar to `VisCPM-Chat`, we found that due to the bilingual capability of `CPM-Bee`, `VisCPM-Paint` can achieve good Chinese text-to-image generation by training only on English text-image pairs, surpassing the performance of Chinese open-source models. By incorporating an additional 20M cleaned native Chinese text-image pairs and 120M translated text-image pairs in Chinese, the model's Chinese text-to-image generation ability can be further improved. We sample 30,000 images from the standard image generation test set MSCOCO and calculated commonly used evaluation metrics FID (Fréchet Inception Distance) to assess the quality of generated images. Similarly, we provide two versions of the model, namely `VisCPM-Paint-balance` and `VisCPM-Paint-zhplus`. The former has a balanced ability in both English and Chinese, while the latter emphasizes Chinese proficiency. `VisCPM-Paint-balance` is trained only using English text-image pairs, while `VisCPM-Paint-zhplus` incorporates an additional 20M native Chinese text-image pairs and 120M translated text-image pairs in Chinese based on `VisCPM-Paint-balance`. ## How to Use ```python #!/usr/bin/env python # encoding: utf-8 from diffusers import DiffusionPipeline from transformers import AutoModel from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('openbmb/VisCPM-Paint', trust_remote_code=True) text_encoder = AutoModel.from_pretrained('openbmb/VisCPM-Paint', trust_remote_code=True) print('load pipeline') pipeline = DiffusionPipeline.from_pretrained('openbmb/VisCPM-Paint', custom_pipeline="openbmb/VisCPM-Paint", text_encoder=text_encoder, tokenizer=tokenizer) pipeline = pipeline.to('cuda') prompt = "a photo of an astronaut riding a horse on mars" image = pipeline(prompt).images[0] image.save("astronaut_rides_horse.png") ``` ## 📝 License VisCPM is governed by the [GML License](https://github.com/OpenBMB/General-Model-License/blob/main/%E9%80%9A%E7%94%A8%E6%A8%A1%E5%9E%8B%E8%AE%B8%E5%8F%AF%E5%8D%8F%E8%AE%AE-%E6%9D%A5%E6%BA%90%E8%AF%B4%E6%98%8E-%E5%AE%A3%E4%BC%A0%E9%99%90%E5%88%B6-%E9%9D%9E%E5%95%86%E4%B8%9A%E5%8C%96.md), and permits individual and research usages. If you intend to utilize the model for commercial purposes, please reach out to cpm@modelbest.cn to negotiate commercial licensing. The CPM-Bee base, governed by the [General Model License (GML)](https://github.com/OpenBMB/General-Model-License/blob/main/%E9%80%9A%E7%94%A8%E6%A8%A1%E5%9E%8B%E8%AE%B8%E5%8F%AF%E5%8D%8F%E8%AE%AE-%E6%9D%A5%E6%BA%90%E8%AF%B4%E6%98%8E-%E5%AE%A3%E4%BC%A0%E9%99%90%E5%88%B6-%E5%95%86%E4%B8%9A%E6%8E%88%E6%9D%83.md), permits commercial usage. If you intend to utilize the model for commercial purposes, please reach out to cpm@modelbest.cn to obtain the certificate of authorization.
5,215
[ [ -0.033782958984375, -0.047943115234375, 0.0252685546875, 0.033203125, -0.0286712646484375, -0.01311492919921875, -0.0235443115234375, -0.0421142578125, -0.01296234130859375, 0.034210205078125, -0.033599853515625, -0.047210693359375, -0.045806884765625, -0.015380859375, 0.0079345703125, 0.04510498046875, -0.006969451904296875, 0.012054443359375, -0.008514404296875, -0.004878997802734375, -0.0276947021484375, -0.0305023193359375, -0.035888671875, -0.0038127899169921875, 0.025177001953125, 0.0179443359375, 0.05621337890625, 0.0467529296875, 0.0304412841796875, 0.0189971923828125, -0.0005769729614257812, 0.0219879150390625, -0.035888671875, -0.0249786376953125, 0.0016880035400390625, -0.0287017822265625, -0.0465087890625, 0.01123046875, 0.034515380859375, 0.0262908935546875, -0.006603240966796875, 0.0043792724609375, 0.007717132568359375, 0.04547119140625, -0.05328369140625, 0.040863037109375, -0.037109375, -0.0037326812744140625, -0.00562286376953125, -0.015960693359375, -0.036163330078125, -0.0254974365234375, 0.004650115966796875, -0.05218505859375, 0.0008301734924316406, 0.0007867813110351562, 0.09381103515625, -0.005680084228515625, -0.0305938720703125, -0.0045318603515625, -0.047454833984375, 0.059967041015625, -0.06634521484375, 0.0107574462890625, 0.01357269287109375, 0.025390625, 0.0018224716186523438, -0.059967041015625, -0.050201416015625, -0.0172271728515625, 0.0028858184814453125, 0.040863037109375, -0.0136566162109375, -0.006580352783203125, -0.0010128021240234375, 0.0227813720703125, -0.03936767578125, -0.0030307769775390625, -0.043975830078125, -0.0204925537109375, 0.040679931640625, 0.01605224609375, 0.03228759765625, -0.0168914794921875, -0.057159423828125, -0.0199127197265625, -0.045654296875, 0.0259246826171875, 0.0174713134765625, 0.0175323486328125, -0.029754638671875, 0.043792724609375, -0.002513885498046875, 0.052001953125, -0.0019311904907226562, -0.009429931640625, 0.036163330078125, -0.0362548828125, -0.034423828125, -0.0271453857421875, 0.08465576171875, 0.044097900390625, 0.0083770751953125, 0.01398468017578125, -0.01047515869140625, -0.0086517333984375, -0.01296234130859375, -0.07635498046875, -0.0182647705078125, 0.00786590576171875, -0.052642822265625, -0.005634307861328125, 0.00931549072265625, -0.052093505859375, 0.00004589557647705078, -0.0256500244140625, 0.042633056640625, -0.050811767578125, -0.038421630859375, 0.01641845703125, -0.01494598388671875, 0.037933349609375, 0.024749755859375, -0.044708251953125, -0.00980377197265625, 0.018310546875, 0.065673828125, -0.01837158203125, -0.030670166015625, -0.002735137939453125, 0.0193328857421875, -0.0241546630859375, 0.050811767578125, -0.02410888671875, -0.035552978515625, 0.0007090568542480469, 0.0258331298828125, -0.0204620361328125, -0.04327392578125, 0.01013946533203125, -0.042572021484375, 0.03802490234375, -0.0082244873046875, -0.0223236083984375, -0.033935546875, 0.0153961181640625, -0.0296173095703125, 0.055389404296875, 0.00940704345703125, -0.061248779296875, 0.01812744140625, -0.048492431640625, -0.01195526123046875, 0.0031719207763671875, 0.0036640167236328125, -0.039459228515625, -0.0294952392578125, 0.0147857666015625, 0.038238525390625, -0.0242767333984375, 0.01316070556640625, -0.0284271240234375, -0.002819061279296875, 0.00437164306640625, -0.031280517578125, 0.09478759765625, 0.0262298583984375, -0.02191162109375, 0.024261474609375, -0.054229736328125, -0.00010281801223754883, 0.034820556640625, -0.04119873046875, -0.0024700164794921875, -0.007099151611328125, 0.0210723876953125, 0.032196044921875, 0.04400634765625, -0.035736083984375, -0.0010128021240234375, -0.031280517578125, 0.0194854736328125, 0.058319091796875, -0.002002716064453125, 0.0318603515625, -0.033416748046875, 0.048004150390625, 0.025146484375, 0.0300750732421875, -0.032867431640625, -0.05914306640625, -0.071044921875, -0.04339599609375, 0.002716064453125, 0.05499267578125, -0.06134033203125, 0.037628173828125, -0.01274871826171875, -0.058990478515625, -0.0194854736328125, -0.003414154052734375, 0.035980224609375, 0.0467529296875, 0.032745361328125, -0.0352783203125, -0.01503753662109375, -0.06939697265625, 0.0181732177734375, -0.0087890625, -0.00272369384765625, 0.032501220703125, 0.03436279296875, -0.02398681640625, 0.07257080078125, -0.04351806640625, -0.02685546875, -0.0164337158203125, 0.0051727294921875, 0.007061004638671875, 0.03863525390625, 0.0736083984375, -0.051666259765625, -0.063720703125, -0.002826690673828125, -0.045806884765625, -0.0037708282470703125, 0.0016889572143554688, -0.0311431884765625, 0.03857421875, 0.022369384765625, -0.039337158203125, 0.035125732421875, 0.05511474609375, -0.0245513916015625, 0.0634765625, -0.03204345703125, 0.012176513671875, -0.08135986328125, 0.0194549560546875, -0.006351470947265625, -0.0255584716796875, -0.024932861328125, 0.005115509033203125, 0.01525115966796875, -0.0113372802734375, -0.0640869140625, 0.034210205078125, -0.051483154296875, 0.0273895263671875, -0.02313232421875, 0.005268096923828125, -0.00077056884765625, 0.059600830078125, 0.020416259765625, 0.035888671875, 0.08038330078125, -0.059967041015625, 0.031402587890625, 0.0196685791015625, -0.022979736328125, 0.0278472900390625, -0.041107177734375, -0.0028133392333984375, -0.0096588134765625, 0.0019779205322265625, -0.09515380859375, -0.00055694580078125, 0.040130615234375, -0.034027099609375, 0.03094482421875, 0.004673004150390625, -0.040435791015625, -0.032958984375, -0.0244293212890625, 0.0165557861328125, 0.050567626953125, -0.038818359375, 0.042938232421875, 0.0244293212890625, -0.004795074462890625, -0.032806396484375, -0.06317138671875, -0.0079345703125, 0.0001659393310546875, -0.07025146484375, 0.0157012939453125, -0.025146484375, 0.0218048095703125, 0.00403594970703125, 0.0251922607421875, -0.00788116455078125, -0.0168304443359375, 0.0183258056640625, 0.051361083984375, -0.023162841796875, -0.0217132568359375, -0.00765228271484375, -0.00804901123046875, 0.005130767822265625, -0.004703521728515625, 0.05218505859375, -0.002017974853515625, -0.0300445556640625, -0.03302001953125, 0.0038814544677734375, 0.04229736328125, -0.0302276611328125, 0.0654296875, 0.052642822265625, -0.0127716064453125, 0.0028076171875, -0.0245513916015625, 0.00572967529296875, -0.038055419921875, 0.03375244140625, -0.01099395751953125, -0.039825439453125, 0.0443115234375, 0.00969696044921875, 0.016815185546875, 0.048675537109375, 0.06884765625, 0.00894927978515625, 0.07904052734375, 0.04962158203125, 0.00946807861328125, 0.044830322265625, -0.042572021484375, -0.00836944580078125, -0.055511474609375, -0.0283050537109375, -0.01044464111328125, -0.01506805419921875, -0.031280517578125, -0.054107666015625, 0.02215576171875, -0.00904083251953125, -0.0287628173828125, 0.03125, -0.04559326171875, 0.0089111328125, 0.048553466796875, 0.0171051025390625, 0.01088714599609375, 0.0033130645751953125, -0.0297088623046875, -0.0059356689453125, -0.049468994140625, -0.042877197265625, 0.07879638671875, 0.0254058837890625, 0.051483154296875, 0.0044097900390625, 0.018585205078125, -0.01497650146484375, 0.0279388427734375, -0.041015625, 0.051513671875, -0.005069732666015625, -0.057342529296875, -0.006252288818359375, -0.0220489501953125, -0.07843017578125, 0.032379150390625, -0.01715087890625, -0.037841796875, 0.031036376953125, 0.01004791259765625, -0.002574920654296875, 0.040374755859375, -0.059600830078125, 0.0721435546875, -0.0280609130859375, -0.0243682861328125, -0.007167816162109375, -0.056610107421875, 0.0472412109375, -0.0027942657470703125, 0.0169677734375, -0.01250457763671875, -0.013641357421875, 0.0413818359375, -0.0384521484375, 0.06475830078125, -0.0276031494140625, -0.00933837890625, 0.039154052734375, 0.005863189697265625, 0.01071929931640625, -0.0107421875, 0.010772705078125, 0.0306854248046875, -0.00125885009765625, -0.023651123046875, -0.005451202392578125, 0.040069580078125, -0.0677490234375, -0.03021240234375, -0.032318115234375, -0.039459228515625, 0.0019254684448242188, 0.03204345703125, 0.0396728515625, 0.01171875, -0.0014019012451171875, 0.01502227783203125, 0.0311737060546875, -0.06256103515625, 0.0447998046875, 0.053680419921875, -0.0309600830078125, -0.0438232421875, 0.07403564453125, 0.00426483154296875, 0.05328369140625, 0.0208587646484375, 0.01346588134765625, -0.0168914794921875, -0.0265655517578125, -0.03302001953125, 0.0241546630859375, -0.056488037109375, -0.017669677734375, -0.0445556640625, -0.0265960693359375, -0.03582763671875, 0.0021038055419921875, -0.0311279296875, -0.01297760009765625, -0.052734375, 0.0279541015625, 0.01198577880859375, 0.0177154541015625, -0.01548004150390625, 0.024017333984375, -0.052825927734375, 0.039093017578125, 0.0223388671875, 0.0235443115234375, 0.01177215576171875, -0.049957275390625, -0.0224456787109375, 0.022735595703125, -0.059906005859375, -0.06280517578125, 0.051910400390625, 0.006084442138671875, 0.0430908203125, 0.045623779296875, -0.007049560546875, 0.047210693359375, -0.039642333984375, 0.0736083984375, 0.039886474609375, -0.056121826171875, 0.03662109375, -0.040802001953125, 0.0240020751953125, 0.003993988037109375, 0.0292205810546875, -0.03131103515625, -0.039337158203125, -0.038726806640625, -0.074951171875, 0.07049560546875, 0.0165252685546875, 0.006229400634765625, 0.00945281982421875, 0.00008171796798706055, -0.0082550048828125, 0.014190673828125, -0.0782470703125, -0.042022705078125, -0.033233642578125, -0.006526947021484375, -0.0022144317626953125, -0.01448822021484375, 0.00025534629821777344, -0.03564453125, 0.06793212890625, 0.00582122802734375, 0.033416748046875, 0.01511383056640625, -0.00949859619140625, -0.012237548828125, -0.0021514892578125, 0.039520263671875, 0.055206298828125, -0.0189208984375, -0.01416778564453125, -0.01251220703125, -0.0323486328125, -0.0033130645751953125, 0.0119171142578125, -0.0168914794921875, -0.00670623779296875, 0.00876617431640625, 0.06903076171875, -0.01120758056640625, -0.052764892578125, 0.06011962890625, -0.0018625259399414062, -0.032501220703125, -0.0242462158203125, 0.01122283935546875, 0.03289794921875, 0.0187530517578125, 0.0219573974609375, 0.009002685546875, -0.007381439208984375, -0.035552978515625, -0.00948333740234375, 0.048187255859375, -0.0174407958984375, -0.0132293701171875, 0.08453369140625, 0.007144927978515625, 0.0020465850830078125, 0.0308990478515625, -0.0204925537109375, -0.038726806640625, 0.06683349609375, 0.04498291015625, 0.0518798828125, -0.00811767578125, 0.01079559326171875, 0.042205810546875, 0.031951904296875, -0.0026645660400390625, 0.0256805419921875, 0.0192413330078125, -0.054656982421875, -0.017578125, -0.031585693359375, -0.026214599609375, 0.0102081298828125, -0.0287017822265625, 0.047576904296875, -0.033050537109375, -0.0132293701171875, -0.021697998046875, -0.01568603515625, -0.049468994140625, 0.0126190185546875, 0.038604736328125, 0.0667724609375, -0.046539306640625, 0.056304931640625, 0.048553466796875, -0.05517578125, -0.060272216796875, -0.0175628662109375, -0.007541656494140625, -0.06317138671875, 0.054229736328125, 0.01549530029296875, -0.005252838134765625, -0.0035762786865234375, -0.063232421875, -0.056793212890625, 0.09918212890625, 0.040008544921875, -0.03204345703125, 0.00853729248046875, 0.01535797119140625, 0.0276031494140625, -0.031890869140625, 0.0235443115234375, 0.00489044189453125, 0.049224853515625, 0.01157379150390625, -0.0758056640625, 0.007350921630859375, -0.017486572265625, 0.00867462158203125, -0.0252838134765625, -0.0791015625, 0.08367919921875, -0.0207977294921875, -0.03314208984375, 0.01611328125, 0.052001953125, 0.0261077880859375, 0.0149383544921875, 0.039886474609375, 0.0305328369140625, 0.0287628173828125, -0.00461578369140625, 0.07159423828125, -0.03662109375, 0.04022216796875, 0.058074951171875, 0.001461029052734375, 0.0538330078125, 0.005924224853515625, -0.010284423828125, 0.0465087890625, 0.051544189453125, -0.0245513916015625, 0.04681396484375, -0.002056121826171875, -0.0025348663330078125, -0.014923095703125, 0.0122222900390625, -0.036163330078125, 0.01558685302734375, 0.01148223876953125, -0.038177490234375, -0.0110321044921875, 0.018310546875, -0.0050811767578125, -0.00902557373046875, -0.0157928466796875, 0.03021240234375, 0.0014400482177734375, -0.0262908935546875, 0.0621337890625, 0.0181732177734375, 0.07928466796875, -0.057952880859375, 0.0169830322265625, 0.00595855712890625, 0.0204925537109375, -0.00487518310546875, -0.0555419921875, 0.0020046234130859375, -0.0181427001953125, -0.00904083251953125, -0.0243377685546875, 0.04595947265625, -0.0413818359375, -0.042022705078125, 0.038116455078125, 0.0301361083984375, 0.005428314208984375, -0.0033702850341796875, -0.07000732421875, -0.005313873291015625, 0.00946807861328125, -0.036651611328125, 0.00804901123046875, 0.0300750732421875, 0.01849365234375, 0.051910400390625, 0.04730224609375, 0.0201568603515625, 0.0217742919921875, -0.007656097412109375, 0.06573486328125, -0.0411376953125, -0.03076171875, -0.060333251953125, 0.05352783203125, 0.0077362060546875, -0.0132904052734375, 0.056793212890625, 0.061920166015625, 0.065185546875, -0.037811279296875, 0.0631103515625, -0.010986328125, 0.0243377685546875, -0.03485107421875, 0.0758056640625, -0.0662841796875, -0.0043792724609375, -0.028961181640625, -0.06158447265625, -0.0213165283203125, 0.046356201171875, -0.013092041015625, 0.01329803466796875, 0.04705810546875, 0.07330322265625, -0.0212249755859375, -0.0288848876953125, 0.01983642578125, 0.0153045654296875, 0.035980224609375, 0.059967041015625, 0.055328369140625, -0.037750244140625, 0.062164306640625, -0.031768798828125, -0.0223388671875, -0.006504058837890625, -0.0689697265625, -0.0709228515625, -0.042327880859375, -0.028106689453125, -0.0241546630859375, -0.033355712890625, 0.047760009765625, 0.06622314453125, -0.047943115234375, -0.0264892578125, 0.00909423828125, -0.01190948486328125, -0.01531219482421875, -0.0240631103515625, 0.034820556640625, -0.01503753662109375, -0.08123779296875, -0.0002608299255371094, 0.0192718505859375, 0.0279693603515625, -0.030426025390625, -0.0173492431640625, -0.0256805419921875, 0.00963592529296875, 0.045257568359375, 0.02484130859375, -0.0445556640625, 0.00218963623046875, 0.0026073455810546875, -0.0272979736328125, 0.024200439453125, 0.0280303955078125, -0.04168701171875, 0.047393798828125, 0.0460205078125, 0.0177764892578125, 0.04229736328125, -0.01213836669921875, 0.029327392578125, -0.0377197265625, 0.036102294921875, 0.0020351409912109375, 0.040313720703125, 0.0251007080078125, -0.012725830078125, 0.0216217041015625, 0.03802490234375, -0.04193115234375, -0.03863525390625, 0.0023288726806640625, -0.09881591796875, -0.03619384765625, 0.09881591796875, -0.01389312744140625, -0.0265655517578125, 0.01108551025390625, -0.04248046875, 0.030120849609375, -0.0201263427734375, 0.034332275390625, 0.04986572265625, 0.004489898681640625, -0.01776123046875, -0.03643798828125, 0.0114593505859375, 0.00577545166015625, -0.05810546875, -0.0035247802734375, 0.05169677734375, 0.04376220703125, 0.0194854736328125, 0.059234619140625, -0.0252838134765625, 0.0203857421875, -0.0248260498046875, 0.0391845703125, -0.023651123046875, 0.002872467041015625, -0.00809478759765625, 0.003604888916015625, -0.007808685302734375, -0.036712646484375 ] ]
Yukang/Llama-2-7b-longlora-100k-ft
2023-09-25T12:38:22.000Z
[ "transformers", "pytorch", "llama", "text-generation", "arxiv:2309.12307", "endpoints_compatible", "text-generation-inference", "region:us", "has_space" ]
text-generation
Yukang
null
null
Yukang/Llama-2-7b-longlora-100k-ft
45
8,501
transformers
2023-09-13T05:55:57
# LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models <font size=6><div align='center' > <a href=http://arxiv.org/abs/2309.12307>**Paper**</a> | <a href="https://huggingface.co/Yukang">**Models**</a> | <a href="https://github.com/dvlab-research/LongLoRA">**Code**</a> </div></font> **LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models [[Paper](http://arxiv.org/abs/2309.12307)]** <br /> [Yukang Chen](https://scholar.google.com/citations?user=6p0ygKUAAAAJ&hl=en), [Shengju Qian](https://scholar.google.com/citations?user=QNnWmasAAAAJ), [Haotian Tang](https://scholar.google.com/citations?user=WxL13BAAAAAJ&hl), [Xin Lai](https://scholar.google.com/citations?user=tqNDPA4AAAAJ&hl=zh-CN), [Zhijian Liu](https://scholar.google.com/citations?user=3coYSTUAAAAJ&hl=en), [Song Han](https://scholar.google.com/citations?user=E0iCaa4AAAAJ&hl=zh-CN), [Jiaya Jia](https://scholar.google.com/citations?user=XPAkzTEAAAAJ&hl=en)<br /> ## Abstract We present LongLoRA, an efficient fine-tuning approach that extends the context sizes of pre-trained large language models (LLMs), with limited computation cost. Typically, training LLMs with long context sizes is computationally expensive, requiring extensive training hours and GPU resources. In this paper, we speed up the context extension of LLMs in two aspects. On the one hand, although dense global attention is needed during inference, fine-tuning the model can be effectively and efficiently done by sparse local attention. The proposed shift short attention effectively enables context extension, leading to non-trivial computation saving with similar performance to fine-tuning with vanilla attention. On the other hand, we find that LoRA for context extension works well under the premise of trainable embedding and normalization. LongLoRA demonstrates strong empirical results on various tasks on LLaMA2 models from 7B/13B to 70B. LongLoRA adopts LLaMA2 7B from 4k context to 100k, or LLaMA2 70B to 32k on a single 8x A100 machine. LongLoRA extends models' context while retaining their original architectures, and is compatible with most existing techniques, like FlashAttention-2. In addition, to make LongLoRA practical, we collect a dataset, LongQA, for supervised fine-tuning. It contains more than 3k long context question-answer pairs. For more details, please refer to the [paper](http://arxiv.org/abs/2309.12307). ## Highlights **LongLoRA** speed up the context extension of pre-trained large language models in both attention-level and weight-level. 1. The proposed shifted short attention is easy to implement, compatible with Flash-Attention, and not required during inference. 2. We release all our models, including models from 7B to 70B, context length from 8k to 100k, including [LLaMA2-LongLoRA-7B-100k](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft), [LLaMA2-LongLoRA-13B-64k](https://huggingface.co/Yukang/Llama-2-13b-longlora-64k), and [LLaMA2-LongLoRA-70B-32k](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k). 3. We build up a long-context QA dataset, LongQA, for supervised fine-tuning (SFT). We release 13B and 70B 32k models with SFT, [Llama-2-13b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) and [Llama-2-70b-chat-longlora-32k-sft](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k-sft). We will further release the dataset next week. ## Released models ### Models with supervised fine-tuning | Model | Size | Context | Train | Link | |:----------------------------------|------|---------|---------|-------------------------------------------------------------------------| | Llama-2-13b-chat-longlora-32k-sft | 13B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-chat-longlora-32k-sft) | | Llama-2-70b-chat-longlora-32k-sft | 70B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k-sft) | ### Models with context extension via fully fine-tuning | Model | Size | Context | Train | Link | |:----------------------------|------|---------|-------|-------------------------------------------------------------------| | Llama-2-7b-longlora-8k-ft | 7B | 8192 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-8k-ft) | | Llama-2-7b-longlora-16k-ft | 7B | 16384 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-16k-ft) | | Llama-2-7b-longlora-32k-ft | 7B | 32768 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-32k-ft) | | Llama-2-7b-longlora-100k-ft | 7B | 100000 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-100k-ft) | | Llama-2-13b-longlora-8k-ft | 13B | 8192 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-8k-ft) | | Llama-2-13b-longlora-16k-ft | 13B | 16384 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-16k-ft) | | Llama-2-13b-longlora-32k-ft | 13B | 32768 | Full FT | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-32k-ft) | ### Models with context extension via improved LoRA fine-tuning | Model | Size | Context | Train | Link | |:----------------------------|------|---------|-------|-------------------------------------------------------------------| | Llama-2-7b-longlora-8k | 7B | 8192 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-8k) | | Llama-2-7b-longlora-16k | 7B | 16384 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-16k) | | Llama-2-7b-longlora-32k | 7B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-7b-longlora-32k) | | Llama-2-13b-longlora-8k | 13B | 8192 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-8k) | | Llama-2-13b-longlora-16k | 13B | 16384 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-16k) | | Llama-2-13b-longlora-32k | 13B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-32k) | | Llama-2-13b-longlora-64k | 13B | 65536 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-13b-longlora-64k) | | Llama-2-70b-longlora-32k | 70B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-70b-longlora-32k) | | Llama-2-70b-chat-longlora-32k | 70B | 32768 | LoRA+ | [link](https://huggingface.co/Yukang/Llama-2-70b-chat-longlora-32k) | ## Citation If you find this project useful in your research, please consider citing: ``` @article{longlora, title={LongLoRA: Efficient Fine-tuning of Long-Context Large Language Models}, author={Yukang Chen and Shengju Qian and Haotian Tang and Xin Lai and Zhijian Liu and Song Han and Jiaya Jia}, journal={arXiv:2309.12307}, year={2023} } ``` ## Acknowledgement - This work is built upon the [LLaMA2](https://ai.meta.com/llama) as the pre-trained models. - This work is based on [DeepSpeed](https://github.com/microsoft/DeepSpeed), [peft](https://github.com/huggingface/peft), and [Flash-Attention2](https://github.com/Dao-AILab/flash-attention) for acceleration. - The perplexity evaluation code is modified upon [Landmark Attention](https://github.com/epfml/landmark-attention). - We use [LongChat](https://github.com/DachengLi1/LongChat) for the retrieval evaluation.
7,594
[ [ -0.05255126953125, -0.058258056640625, 0.0193939208984375, 0.034088134765625, -0.0299224853515625, -0.023712158203125, -0.036590576171875, -0.0631103515625, 0.037994384765625, 0.025970458984375, -0.048431396484375, -0.042266845703125, -0.035125732421875, 0.01515960693359375, -0.00846099853515625, 0.07867431640625, -0.001800537109375, -0.037750244140625, 0.0222320556640625, -0.0279541015625, -0.03338623046875, -0.02191162109375, -0.045501708984375, -0.01381683349609375, 0.059234619140625, 0.021820068359375, 0.0499267578125, 0.0455322265625, 0.02984619140625, 0.020843505859375, -0.03558349609375, 0.0264129638671875, -0.0408935546875, -0.0163421630859375, -0.0009679794311523438, -0.01554107666015625, -0.08056640625, -0.01079559326171875, 0.05169677734375, 0.037139892578125, 0.00443267822265625, 0.031463623046875, 0.017303466796875, 0.058746337890625, -0.02825927734375, 0.006298065185546875, -0.02447509765625, -0.011322021484375, -0.0277862548828125, -0.006591796875, -0.0019102096557617188, -0.0232696533203125, -0.001811981201171875, -0.049285888671875, -0.01727294921875, -0.00920867919921875, 0.0889892578125, 0.03411865234375, -0.038055419921875, -0.00717926025390625, -0.01403045654296875, 0.0633544921875, -0.07366943359375, 0.02130126953125, 0.0251312255859375, 0.011810302734375, -0.0246734619140625, -0.034698486328125, -0.040283203125, -0.0007739067077636719, -0.0209197998046875, 0.0039825439453125, -0.0173797607421875, -0.00862884521484375, 0.0343017578125, 0.027923583984375, -0.024627685546875, 0.0220184326171875, -0.0213623046875, 0.005313873291015625, 0.058868408203125, 0.0005235671997070312, 0.0203857421875, -0.008636474609375, -0.030792236328125, -0.0016107559204101562, -0.063232421875, 0.02679443359375, 0.0146636962890625, 0.01995849609375, -0.0419921875, 0.03729248046875, -0.026885986328125, 0.0604248046875, 0.0189361572265625, -0.0333251953125, 0.03912353515625, -0.03350830078125, -0.024200439453125, -0.0170745849609375, 0.050567626953125, 0.031158447265625, 0.00366973876953125, 0.0167694091796875, -0.0083770751953125, 0.00101470947265625, -0.0221710205078125, -0.07177734375, 0.015106201171875, 0.01739501953125, -0.02978515625, -0.0180511474609375, -0.00757598876953125, -0.06256103515625, -0.0038299560546875, -0.0230865478515625, 0.0098876953125, -0.0289306640625, -0.0229339599609375, 0.0233001708984375, 0.01934814453125, 0.026123046875, 0.03582763671875, -0.041656494140625, 0.01177978515625, 0.040008544921875, 0.045745849609375, -0.014617919921875, -0.0247650146484375, -0.032440185546875, 0.00592803955078125, -0.020416259765625, 0.039337158203125, -0.0135498046875, -0.009613037109375, -0.0151824951171875, 0.0235595703125, -0.006214141845703125, -0.01513671875, 0.041534423828125, -0.023712158203125, 0.0015153884887695312, -0.03570556640625, -0.03631591796875, -0.00965118408203125, 0.01678466796875, -0.055908203125, 0.081787109375, 0.0188140869140625, -0.0655517578125, 0.0230865478515625, -0.065185546875, -0.01446533203125, -0.026092529296875, 0.0156097412109375, -0.040130615234375, -0.0189361572265625, 0.033905029296875, 0.0421142578125, -0.0243682861328125, 0.0002162456512451172, -0.025848388671875, -0.032684326171875, 0.01403045654296875, 0.0008373260498046875, 0.060699462890625, 0.022674560546875, -0.04559326171875, 0.0266876220703125, -0.061492919921875, 0.0087890625, 0.0225830078125, -0.035675048828125, -0.007659912109375, -0.0164337158203125, 0.005550384521484375, 0.0263671875, 0.0333251953125, -0.017822265625, 0.033966064453125, -0.03436279296875, 0.040771484375, 0.049346923828125, -0.00984954833984375, 0.019866943359375, -0.0262451171875, 0.0343017578125, 0.01019287109375, 0.0174713134765625, -0.005031585693359375, -0.0340576171875, -0.0780029296875, -0.034515380859375, 0.016265869140625, 0.0200347900390625, -0.042327880859375, 0.06494140625, -0.0384521484375, -0.03656005859375, -0.040618896484375, 0.0360107421875, 0.040985107421875, 0.028961181640625, 0.0290985107421875, -0.0193939208984375, -0.033294677734375, -0.06591796875, -0.0006155967712402344, 0.01195526123046875, 0.01369476318359375, 0.032012939453125, 0.048095703125, -0.033843994140625, 0.05975341796875, -0.03741455078125, -0.026641845703125, -0.01666259765625, -0.0252838134765625, 0.0440673828125, 0.03985595703125, 0.07366943359375, -0.05010986328125, -0.04766845703125, 0.0119781494140625, -0.044830322265625, -0.0038547515869140625, 0.01081085205078125, -0.0238037109375, 0.043243408203125, 0.0280609130859375, -0.06085205078125, 0.042236328125, 0.047698974609375, -0.0499267578125, 0.032196044921875, -0.005092620849609375, 0.0015745162963867188, -0.09521484375, 0.026885986328125, -0.0007567405700683594, -0.02325439453125, -0.03936767578125, 0.025848388671875, 0.00787353515625, 0.0196990966796875, -0.04486083984375, 0.0675048828125, -0.04302978515625, -0.0034313201904296875, -0.0198516845703125, 0.011993408203125, -0.0008502006530761719, 0.061676025390625, -0.005786895751953125, 0.06243896484375, 0.036468505859375, -0.037933349609375, 0.023529052734375, 0.017974853515625, -0.03338623046875, 0.0316162109375, -0.050048828125, 0.021697998046875, 0.00922393798828125, 0.05609130859375, -0.052093505859375, -0.03216552734375, 0.0176239013671875, -0.01372528076171875, 0.016448974609375, -0.00567626953125, -0.038116455078125, -0.042877197265625, -0.04632568359375, 0.040802001953125, 0.0330810546875, -0.055084228515625, 0.004138946533203125, 0.01531982421875, 0.011077880859375, -0.048675537109375, -0.033447265625, -0.0025234222412109375, -0.048431396484375, -0.057708740234375, 0.0286712646484375, -0.01666259765625, -0.002834320068359375, -0.0184783935546875, 0.01351165771484375, 0.007720947265625, 0.00914764404296875, 0.020782470703125, 0.010162353515625, -0.026275634765625, 0.00249481201171875, -0.01013946533203125, -0.000865936279296875, -0.02801513671875, 0.0016031265258789062, 0.055572509765625, -0.033721923828125, -0.012664794921875, -0.0562744140625, 0.0126190185546875, 0.03948974609375, -0.0196990966796875, 0.05181884765625, 0.0670166015625, -0.020111083984375, -0.0007486343383789062, -0.050384521484375, -0.00439453125, -0.03570556640625, 0.01207733154296875, -0.03546142578125, -0.08209228515625, 0.059783935546875, 0.009613037109375, 0.005641937255859375, 0.048675537109375, 0.03582763671875, 0.0189361572265625, 0.0711669921875, 0.049407958984375, -0.040283203125, 0.0482177734375, -0.03985595703125, -0.0008244514465332031, -0.07391357421875, -0.0005025863647460938, -0.014678955078125, -0.033966064453125, -0.0511474609375, -0.044036865234375, 0.0258331298828125, 0.030426025390625, -0.03094482421875, 0.045745849609375, -0.038604736328125, 0.0255889892578125, 0.028961181640625, 0.0211029052734375, 0.0122222900390625, -0.00937652587890625, 0.0193023681640625, 0.0018310546875, -0.0299530029296875, -0.0220794677734375, 0.06964111328125, 0.0372314453125, 0.03857421875, 0.0257110595703125, 0.0482177734375, -0.00299072265625, 0.01506805419921875, -0.049468994140625, 0.0465087890625, 0.00896453857421875, -0.03912353515625, -0.03790283203125, -0.0222320556640625, -0.08319091796875, 0.0184478759765625, -0.00736236572265625, -0.06805419921875, 0.01126861572265625, 0.005889892578125, -0.036224365234375, 0.01959228515625, -0.03912353515625, 0.057830810546875, -0.01039886474609375, -0.03594970703125, -0.02105712890625, -0.050262451171875, 0.040496826171875, -0.003948211669921875, 0.00756072998046875, -0.019561767578125, -0.003467559814453125, 0.059906005859375, -0.04888916015625, 0.06591796875, -0.005092620849609375, -0.041595458984375, 0.035552978515625, -0.01554107666015625, 0.05255126953125, 0.00817108154296875, -0.0077362060546875, 0.0008788108825683594, 0.00018978118896484375, -0.039398193359375, -0.032958984375, 0.0667724609375, -0.05584716796875, -0.043548583984375, -0.0209808349609375, -0.033447265625, -0.01103973388671875, 0.0213623046875, 0.01458740234375, 0.00669097900390625, 0.006626129150390625, 0.021759033203125, 0.03564453125, -0.019195556640625, 0.038787841796875, 0.020416259765625, -0.025054931640625, -0.0242919921875, 0.05535888671875, -0.00455474853515625, 0.0111541748046875, 0.00966644287109375, 0.0055084228515625, -0.007511138916015625, -0.027801513671875, -0.028839111328125, 0.040435791015625, -0.03814697265625, -0.0312347412109375, -0.0246734619140625, -0.020782470703125, -0.037506103515625, -0.0089263916015625, -0.02398681640625, -0.0305023193359375, -0.04461669921875, -0.00846099853515625, 0.0545654296875, 0.039031982421875, 0.005146026611328125, 0.0253753662109375, -0.040435791015625, 0.023162841796875, 0.027496337890625, 0.03173828125, -0.0006833076477050781, -0.0430908203125, -0.016357421875, 0.0170440673828125, -0.01556396484375, -0.054290771484375, 0.043243408203125, 0.0186767578125, 0.012420654296875, 0.035125732421875, -0.0197296142578125, 0.08807373046875, -0.025054931640625, 0.05523681640625, 0.0195465087890625, -0.06536865234375, 0.04669189453125, -0.0511474609375, 0.02099609375, 0.0278778076171875, 0.004512786865234375, -0.031005859375, 0.0010328292846679688, -0.037933349609375, -0.06298828125, 0.05169677734375, 0.018524169921875, 0.0018720626831054688, 0.008331298828125, 0.039459228515625, -0.007144927978515625, 0.004596710205078125, -0.06378173828125, -0.0247955322265625, -0.0031070709228515625, -0.0030841827392578125, -0.02313232421875, -0.0242156982421875, -0.019256591796875, -0.04925537109375, 0.04278564453125, -0.0298309326171875, 0.01065826416015625, 0.01229095458984375, -0.010528564453125, -0.013702392578125, 0.0096282958984375, 0.06732177734375, 0.051971435546875, -0.0145263671875, -0.0207061767578125, 0.03851318359375, -0.01525115966796875, -0.005641937255859375, 0.002269744873046875, -0.001659393310546875, -0.0156402587890625, 0.03375244140625, 0.07440185546875, 0.03936767578125, -0.04693603515625, 0.0300445556640625, 0.00733184814453125, -0.0004088878631591797, -0.022430419921875, 0.012359619140625, 0.0164947509765625, 0.026092529296875, 0.0110321044921875, -0.0211029052734375, -0.0026378631591796875, -0.049163818359375, 0.004642486572265625, 0.037384033203125, -0.0189971923828125, -0.03729248046875, 0.039093017578125, 0.006969451904296875, 0.0029659271240234375, 0.011566162109375, -0.0056304931640625, -0.038726806640625, 0.0548095703125, 0.040069580078125, 0.034881591796875, -0.02264404296875, -0.007366180419921875, 0.045654296875, -0.00698089599609375, -0.0089263916015625, 0.0211639404296875, 0.0007991790771484375, -0.029296875, -0.0195159912109375, -0.06561279296875, 0.00954437255859375, 0.03155517578125, -0.036224365234375, 0.0264129638671875, -0.0294952392578125, -0.03070068359375, -0.004192352294921875, 0.038909912109375, -0.052337646484375, 0.013153076171875, 0.005657196044921875, 0.0765380859375, -0.03521728515625, 0.08709716796875, 0.036163330078125, -0.0229644775390625, -0.06414794921875, -0.0160064697265625, -0.0037078857421875, -0.06610107421875, 0.045745849609375, 0.016754150390625, 0.0000438690185546875, -0.01039886474609375, -0.051422119140625, -0.09075927734375, 0.10589599609375, 0.0254364013671875, -0.0419921875, -0.01056671142578125, 0.00081634521484375, 0.05694580078125, -0.024383544921875, 0.01222991943359375, 0.053802490234375, 0.045654296875, 0.0035877227783203125, -0.0977783203125, 0.0262451171875, -0.03662109375, 0.0027751922607421875, 0.00823974609375, -0.10113525390625, 0.07952880859375, -0.0151214599609375, -0.00917816162109375, 0.0288543701171875, 0.061767578125, 0.03948974609375, 0.006404876708984375, 0.0391845703125, 0.056610107421875, 0.036163330078125, 0.0002524852752685547, 0.07232666015625, -0.0214691162109375, 0.0308837890625, 0.0601806640625, 0.0027008056640625, 0.06561279296875, 0.033660888671875, -0.017181396484375, 0.03289794921875, 0.060638427734375, 0.00795745849609375, 0.0188751220703125, 0.01561737060546875, -0.0023708343505859375, -0.01371002197265625, -0.0064697265625, -0.05206298828125, 0.024993896484375, 0.0279998779296875, -0.0173797607421875, -0.0018568038940429688, -0.01473236083984375, 0.0295867919921875, -0.0166015625, -0.0225677490234375, 0.0489501953125, 0.02142333984375, -0.054595947265625, 0.07757568359375, -0.0012416839599609375, 0.08050537109375, -0.0350341796875, 0.0091552734375, -0.0268402099609375, 0.0212554931640625, -0.023406982421875, -0.0474853515625, -0.00044989585876464844, 0.007297515869140625, 0.00958251953125, -0.0034999847412109375, 0.0445556640625, -0.0254669189453125, -0.041015625, 0.041046142578125, 0.0196685791015625, 0.0081329345703125, -0.0033397674560546875, -0.06195068359375, 0.0191650390625, 0.00481414794921875, -0.056396484375, 0.038238525390625, 0.02655029296875, -0.0204010009765625, 0.052459716796875, 0.051116943359375, 0.007343292236328125, 0.0110931396484375, 0.0003497600555419922, 0.0831298828125, -0.058258056640625, -0.03155517578125, -0.060272216796875, 0.033172607421875, -0.01238250732421875, -0.034271240234375, 0.062255859375, 0.0291748046875, 0.040679931640625, 0.00860595703125, 0.023712158203125, 0.0026073455810546875, 0.04296875, -0.039886474609375, 0.059326171875, -0.0675048828125, 0.0054473876953125, -0.032379150390625, -0.06689453125, -0.0220184326171875, 0.037200927734375, -0.01409912109375, 0.01285552978515625, 0.0306396484375, 0.047576904296875, -0.01212310791015625, -0.026123046875, -0.0008401870727539062, 0.020904541015625, 0.0338134765625, 0.0775146484375, 0.03704833984375, -0.045562744140625, 0.016387939453125, -0.03253173828125, -0.00408935546875, -0.048736572265625, -0.06475830078125, -0.08160400390625, -0.0478515625, -0.0162200927734375, -0.0189056396484375, -0.00991058349609375, 0.0699462890625, 0.062408447265625, -0.05517578125, -0.0189971923828125, 0.013275146484375, 0.010711669921875, -0.00959014892578125, -0.0161895751953125, 0.054046630859375, -0.004180908203125, -0.068359375, 0.0258941650390625, -0.0021762847900390625, 0.0251922607421875, 0.0006213188171386719, -0.0289154052734375, -0.01305389404296875, -0.0005478858947753906, 0.060028076171875, 0.04541015625, -0.0633544921875, -0.02447509765625, -0.0062408447265625, -0.01496124267578125, 0.0079193115234375, 0.016693115234375, -0.0421142578125, -0.015350341796875, 0.03338623046875, 0.01381683349609375, 0.0443115234375, 0.01039886474609375, 0.008331298828125, -0.04241943359375, 0.046295166015625, -0.00033473968505859375, 0.031219482421875, 0.019989013671875, -0.0232696533203125, 0.06048583984375, -0.00569915771484375, -0.032379150390625, -0.0787353515625, 0.010101318359375, -0.1038818359375, -0.01432037353515625, 0.0927734375, -0.0116424560546875, -0.046112060546875, 0.0345458984375, -0.03155517578125, 0.01873779296875, -0.0321044921875, 0.05218505859375, 0.035491943359375, -0.00984954833984375, -0.005786895751953125, -0.032684326171875, 0.061248779296875, 0.037994384765625, -0.07940673828125, 0.001766204833984375, 0.03271484375, 0.0262298583984375, 0.028533935546875, 0.048309326171875, -0.00433349609375, 0.0170745849609375, -0.04608154296875, -0.006969451904296875, 0.0009255409240722656, -0.01055145263671875, -0.0205841064453125, -0.01502227783203125, -0.00795745849609375, 0.00550079345703125 ] ]
csarron/bert-base-uncased-squad-v1
2023-04-05T17:54:11.000Z
[ "transformers", "pytorch", "jax", "safetensors", "bert", "question-answering", "bert-base", "en", "dataset:squad", "license:mit", "model-index", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
question-answering
csarron
null
null
csarron/bert-base-uncased-squad-v1
10
8,500
transformers
2022-03-02T23:29:05
--- language: en license: mit tags: - question-answering - bert - bert-base datasets: - squad metrics: - squad widget: - text: Which name is also used to describe the Amazon rainforest in English? context: 'The Amazon rainforest (Portuguese: Floresta Amazônica or Amazônia; Spanish: Selva Amazónica, Amazonía or usually Amazonia; French: Forêt amazonienne; Dutch: Amazoneregenwoud), also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America. This basin encompasses 7,000,000 square kilometres (2,700,000 sq mi), of which 5,500,000 square kilometres (2,100,000 sq mi) are covered by the rainforest. This region includes territory belonging to nine nations. The majority of the forest is contained within Brazil, with 60% of the rainforest, followed by Peru with 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador, Bolivia, Guyana, Suriname and French Guiana. States or departments in four nations contain "Amazonas" in their names. The Amazon represents over half of the planet''s remaining rainforests, and comprises the largest and most biodiverse tract of tropical rainforest in the world, with an estimated 390 billion individual trees divided into 16,000 species.' - text: How many square kilometers of rainforest is covered in the basin? context: 'The Amazon rainforest (Portuguese: Floresta Amazônica or Amazônia; Spanish: Selva Amazónica, Amazonía or usually Amazonia; French: Forêt amazonienne; Dutch: Amazoneregenwoud), also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America. This basin encompasses 7,000,000 square kilometres (2,700,000 sq mi), of which 5,500,000 square kilometres (2,100,000 sq mi) are covered by the rainforest. This region includes territory belonging to nine nations. The majority of the forest is contained within Brazil, with 60% of the rainforest, followed by Peru with 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador, Bolivia, Guyana, Suriname and French Guiana. States or departments in four nations contain "Amazonas" in their names. The Amazon represents over half of the planet''s remaining rainforests, and comprises the largest and most biodiverse tract of tropical rainforest in the world, with an estimated 390 billion individual trees divided into 16,000 species.' model-index: - name: csarron/bert-base-uncased-squad-v1 results: - task: type: question-answering name: Question Answering dataset: name: squad type: squad config: plain_text split: validation metrics: - type: exact_match value: 80.9104 name: Exact Match verified: true verifyToken: eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9.eyJoYXNoIjoiNDJlOWQ0OTE0ZjRhMTQwNDY5MjVhYmZiN2RmYzY0OWJiOWUyNjcyMWU5N2I3YmU0OThjZTVjNTc2MjM2Yzg5NiIsInZlcnNpb24iOjF9.cuJ34B-ngUur5wKGhfhVP8FM6NX4IFrIJEdXypbLQJw1i8M5Bb2EeIs-0M5n35YIx2PfqSQcnVj_jP8vLUk4Dg - type: f1 value: 88.2302 name: F1 verified: true verifyToken: eyJhbGciOiJFZERTQSIsInR5cCI6IkpXVCJ9.eyJoYXNoIjoiYmE4NzFmNDA3MDRiODk3ZDg5NWYyNjczOGE5YjdkZWQ0ZmEzNWU5YjFjMzc1ODA2OGRjYzU0Y2M5MmU0NGNhYSIsInZlcnNpb24iOjF9.phmkVWF3I-rl2xrHW0EW9OQqzfuefoqNjWplOpFdzJuW8d2C4sJ8snW0Ikw9kQqZaBCdwdkmsf5VTgOupHb8Dw --- ## BERT-base uncased model fine-tuned on SQuAD v1 This model was fine-tuned from the HuggingFace [BERT](https://www.aclweb.org/anthology/N19-1423/) base uncased checkpoint on [SQuAD1.1](https://rajpurkar.github.io/SQuAD-explorer). This model is case-insensitive: it does not make a difference between english and English. ## Details | Dataset | Split | # samples | | -------- | ----- | --------- | | SQuAD1.1 | train | 90.6K | | SQuAD1.1 | eval | 11.1k | ### Fine-tuning - Python: `3.7.5` - Machine specs: `CPU: Intel(R) Core(TM) i7-6800K CPU @ 3.40GHz` `Memory: 32 GiB` `GPUs: 2 GeForce GTX 1070, each with 8GiB memory` `GPU driver: 418.87.01, CUDA: 10.1` - script: ```shell # after install https://github.com/huggingface/transformers cd examples/question-answering mkdir -p data wget -O data/train-v1.1.json https://rajpurkar.github.io/SQuAD-explorer/dataset/train-v1.1.json wget -O data/dev-v1.1.json https://rajpurkar.github.io/SQuAD-explorer/dataset/dev-v1.1.json python run_squad.py \ --model_type bert \ --model_name_or_path bert-base-uncased \ --do_train \ --do_eval \ --do_lower_case \ --train_file train-v1.1.json \ --predict_file dev-v1.1.json \ --per_gpu_train_batch_size 12 \ --per_gpu_eval_batch_size=16 \ --learning_rate 3e-5 \ --num_train_epochs 2.0 \ --max_seq_length 320 \ --doc_stride 128 \ --data_dir data \ --output_dir data/bert-base-uncased-squad-v1 2>&1 | tee train-energy-bert-base-squad-v1.log ``` It took about 2 hours to finish. ### Results **Model size**: `418M` | Metric | # Value | # Original ([Table 2](https://www.aclweb.org/anthology/N19-1423.pdf))| | ------ | --------- | --------- | | **EM** | **80.9** | **80.8** | | **F1** | **88.2** | **88.5** | Note that the above results didn't involve any hyperparameter search. ## Example Usage ```python from transformers import pipeline qa_pipeline = pipeline( "question-answering", model="csarron/bert-base-uncased-squad-v1", tokenizer="csarron/bert-base-uncased-squad-v1" ) predictions = qa_pipeline({ 'context': "The game was played on February 7, 2016 at Levi's Stadium in the San Francisco Bay Area at Santa Clara, California.", 'question': "What day was the game played on?" }) print(predictions) # output: # {'score': 0.8730505704879761, 'start': 23, 'end': 39, 'answer': 'February 7, 2016'} ``` > Created by [Qingqing Cao](https://awk.ai/) | [GitHub](https://github.com/csarron) | [Twitter](https://twitter.com/sysnlp) > Made with ❤️ in New York.
6,043
[ [ -0.03717041015625, -0.054290771484375, 0.0292816162109375, 0.0271759033203125, -0.0165863037109375, 0.00799560546875, -0.01502227783203125, -0.0300140380859375, 0.0247955322265625, 0.01313018798828125, -0.08074951171875, -0.036468505859375, -0.03448486328125, -0.005985260009765625, -0.00814056396484375, 0.093505859375, 0.000003159046173095703, 0.006591796875, -0.00537109375, -0.01348114013671875, -0.01189422607421875, -0.042205810546875, -0.04986572265625, -0.0238494873046875, 0.025482177734375, 0.0035495758056640625, 0.04193115234375, 0.0440673828125, 0.048553466796875, 0.0268096923828125, -0.01480865478515625, -0.00650787353515625, -0.039764404296875, -0.0011091232299804688, 0.0178375244140625, -0.01512908935546875, -0.04132080078125, -0.0022258758544921875, 0.04541015625, 0.0301971435546875, -0.00611114501953125, 0.0167236328125, -0.00287628173828125, 0.04852294921875, -0.0210723876953125, 0.028900146484375, -0.048492431640625, -0.00077056884765625, 0.0028076171875, 0.00417327880859375, -0.0099334716796875, -0.002941131591796875, 0.02239990234375, -0.0509033203125, 0.0433349609375, -0.00968170166015625, 0.101318359375, 0.0214385986328125, -0.00015664100646972656, 0.00121307373046875, -0.026702880859375, 0.0787353515625, -0.06805419921875, 0.01216888427734375, 0.011444091796875, 0.02642822265625, -0.01500701904296875, -0.0296783447265625, -0.033294677734375, -0.003116607666015625, -0.00431060791015625, 0.033935546875, -0.0199737548828125, -0.002826690673828125, 0.0207366943359375, 0.00875091552734375, -0.045013427734375, 0.00769805908203125, -0.048553466796875, -0.028076171875, 0.064697265625, 0.019012451171875, 0.00044083595275878906, -0.0174407958984375, -0.043975830078125, -0.0251007080078125, -0.02508544921875, 0.0289306640625, 0.03460693359375, 0.039764404296875, -0.028839111328125, 0.040069580078125, -0.03509521484375, 0.025115966796875, 0.01520538330078125, 0.01306915283203125, 0.034210205078125, -0.0175323486328125, -0.0221099853515625, -0.0016689300537109375, 0.072265625, 0.035675048828125, 0.006832122802734375, -0.01387786865234375, -0.0162506103515625, -0.0007367134094238281, 0.0083465576171875, -0.0699462890625, -0.0295562744140625, 0.040374755859375, -0.0198822021484375, -0.020294189453125, 0.01247406005859375, -0.0458984375, 0.00579833984375, -0.0004363059997558594, 0.042205810546875, -0.0455322265625, -0.0055694580078125, -0.003997802734375, -0.029083251953125, 0.041656494140625, 0.022247314453125, -0.05596923828125, -0.0031528472900390625, 0.040130615234375, 0.05865478515625, -0.004993438720703125, -0.0203857421875, -0.0311279296875, -0.0078277587890625, -0.0156707763671875, 0.05438232421875, -0.005077362060546875, -0.02288818359375, -0.0010843276977539062, 0.0202789306640625, -0.009979248046875, -0.01477813720703125, 0.03558349609375, -0.0266571044921875, 0.0199737548828125, -0.039764404296875, -0.041351318359375, -0.00336456298828125, 0.02349853515625, -0.034210205078125, 0.08990478515625, 0.025634765625, -0.046051025390625, 0.036468505859375, -0.0472412109375, -0.03411865234375, 0.0086822509765625, 0.00853729248046875, -0.0546875, -0.01497650146484375, 0.03131103515625, 0.039031982421875, -0.00731658935546875, 0.00537872314453125, -0.033966064453125, -0.0302581787109375, 0.00510406494140625, -0.004787445068359375, 0.0867919921875, 0.0025482177734375, -0.031097412109375, 0.0082855224609375, -0.0469970703125, 0.0316162109375, 0.01210784912109375, -0.03265380859375, 0.01409149169921875, 0.002933502197265625, 0.0024623870849609375, 0.0096435546875, 0.038299560546875, -0.049530029296875, 0.0189361572265625, -0.024383544921875, 0.04302978515625, 0.061126708984375, -0.012847900390625, 0.020538330078125, -0.04119873046875, 0.02325439453125, 0.00919342041015625, 0.02471923828125, 0.0120086669921875, -0.050506591796875, -0.052734375, -0.032257080078125, 0.0192413330078125, 0.039886474609375, -0.0335693359375, 0.058563232421875, -0.005161285400390625, -0.056060791015625, -0.045562744140625, -0.0072021484375, 0.026611328125, 0.032958984375, 0.046783447265625, -0.006397247314453125, -0.0631103515625, -0.086669921875, 0.011138916015625, -0.047454833984375, -0.002819061279296875, 0.035614013671875, 0.056976318359375, -0.022552490234375, 0.0584716796875, -0.0419921875, -0.0157470703125, -0.0259246826171875, 0.0016260147094726562, 0.035614013671875, 0.05859375, 0.056549072265625, -0.026947021484375, -0.03289794921875, -0.0121002197265625, -0.0631103515625, 0.0007925033569335938, -0.0003387928009033203, -0.0273895263671875, 0.0300140380859375, 0.01904296875, -0.05889892578125, 0.0304107666015625, 0.02398681640625, -0.02044677734375, 0.0523681640625, -0.021636962890625, 0.0178375244140625, -0.06439208984375, 0.0142364501953125, -0.0016489028930664062, -0.0030765533447265625, -0.03460693359375, 0.01275634765625, -0.0005655288696289062, 0.0126800537109375, -0.0374755859375, 0.031280517578125, -0.0178070068359375, 0.0133819580078125, 0.01216888427734375, -0.01259613037109375, -0.010345458984375, 0.054290771484375, 0.002994537353515625, 0.060943603515625, 0.039947509765625, -0.0380859375, 0.0182342529296875, 0.033966064453125, -0.0269317626953125, 0.00516510009765625, -0.074951171875, 0.0039215087890625, -0.005939483642578125, 0.01494598388671875, -0.0830078125, -0.018218994140625, 0.0226593017578125, -0.049713134765625, 0.01139068603515625, -0.0212249755859375, -0.033416748046875, -0.0286712646484375, -0.0292816162109375, 0.01702880859375, 0.051727294921875, -0.0236053466796875, 0.01220703125, 0.0262451171875, -0.00028443336486816406, -0.043121337890625, -0.045989990234375, -0.03192138671875, -0.013702392578125, -0.054840087890625, 0.027587890625, -0.0305938720703125, -0.0064239501953125, -0.0111541748046875, -0.01806640625, -0.0305328369140625, 0.0031070709228515625, 0.01073455810546875, 0.04827880859375, -0.0231475830078125, -0.007099151611328125, 0.0024814605712890625, 0.007106781005859375, 0.01323699951171875, 0.004486083984375, 0.04486083984375, -0.0313720703125, 0.011810302734375, -0.019317626953125, 0.02288818359375, 0.046539306640625, -0.002269744873046875, 0.06927490234375, 0.061614990234375, -0.0108489990234375, -0.0089111328125, -0.0322265625, -0.02294921875, -0.03594970703125, 0.0305938720703125, -0.01702880859375, -0.0501708984375, 0.068603515625, 0.03826904296875, 0.01544189453125, 0.07269287109375, 0.03680419921875, -0.039276123046875, 0.09136962890625, 0.033599853515625, -0.011688232421875, 0.0311126708984375, -0.05731201171875, -0.01503753662109375, -0.05718994140625, -0.0361328125, -0.038543701171875, -0.033203125, -0.0528564453125, -0.0218505859375, 0.03045654296875, 0.007099151611328125, -0.045440673828125, 0.042816162109375, -0.052764892578125, 0.0274658203125, 0.045166015625, 0.035308837890625, -0.018218994140625, -0.01458740234375, -0.0115966796875, -0.005626678466796875, -0.05706787109375, -0.0030059814453125, 0.083984375, 0.01983642578125, 0.03466796875, 0.006130218505859375, 0.050201416015625, 0.01523590087890625, -0.0064239501953125, -0.052154541015625, 0.048675537109375, -0.005672454833984375, -0.07763671875, -0.0333251953125, -0.0357666015625, -0.072021484375, 0.006534576416015625, -0.029296875, -0.054351806640625, -0.01128387451171875, -0.0030765533447265625, -0.03192138671875, 0.0180511474609375, -0.05718994140625, 0.061737060546875, -0.0111236572265625, -0.0224609375, -0.00202178955078125, -0.066650390625, 0.0222625732421875, 0.00263214111328125, -0.010589599609375, -0.0226593017578125, 0.01165771484375, 0.06878662109375, -0.048797607421875, 0.043121337890625, -0.0166015625, 0.0205535888671875, 0.02667236328125, -0.00963592529296875, 0.034881591796875, 0.0063323974609375, -0.015777587890625, 0.031768798828125, 0.018463134765625, -0.0576171875, -0.0246124267578125, 0.045318603515625, -0.0765380859375, -0.040008544921875, -0.045654296875, -0.06005859375, -0.00801849365234375, 0.01971435546875, 0.0389404296875, 0.027801513671875, -0.0089263916015625, 0.029876708984375, 0.049041748046875, -0.0205078125, 0.047882080078125, 0.037322998046875, -0.0011053085327148438, -0.01061248779296875, 0.037506103515625, 0.00012445449829101562, 0.00829315185546875, 0.0249786376953125, 0.0109405517578125, -0.0289459228515625, -0.0384521484375, -0.03570556640625, 0.0214385986328125, -0.0241546630859375, -0.0231170654296875, -0.031158447265625, -0.041473388671875, -0.04156494140625, -0.00864410400390625, -0.031982421875, -0.039886474609375, -0.0187835693359375, 0.00316619873046875, 0.045989990234375, 0.04046630859375, -0.0035266876220703125, 0.0275726318359375, -0.049774169921875, 0.00493621826171875, 0.007732391357421875, 0.023956298828125, -0.023834228515625, -0.044708251953125, -0.02813720703125, 0.02471923828125, -0.02325439453125, -0.0516357421875, 0.03436279296875, 0.0051116943359375, 0.05499267578125, 0.01873779296875, 0.00794219970703125, 0.06304931640625, -0.0290069580078125, 0.06805419921875, 0.00908660888671875, -0.05450439453125, 0.04583740234375, -0.0479736328125, 0.029449462890625, 0.039703369140625, 0.021026611328125, 0.0081329345703125, -0.034454345703125, -0.0672607421875, -0.07208251953125, 0.07269287109375, 0.038330078125, -0.01152801513671875, 0.0091400146484375, 0.01413726806640625, -0.0020618438720703125, 0.018890380859375, -0.03277587890625, -0.039642333984375, -0.0119171142578125, 0.0002086162567138672, -0.022735595703125, 0.0117340087890625, -0.01000213623046875, -0.051483154296875, 0.071044921875, -0.0021991729736328125, 0.048858642578125, 0.02801513671875, 0.001312255859375, -0.006511688232421875, -0.00591278076171875, 0.042205810546875, 0.04339599609375, -0.06182861328125, -0.0404052734375, 0.0227203369140625, -0.032012939453125, -0.013336181640625, 0.022430419921875, -0.00856781005859375, 0.017608642578125, 0.034149169921875, 0.051910400390625, 0.013336181640625, -0.044219970703125, 0.026947021484375, 0.0013170242309570312, -0.031402587890625, -0.0232391357421875, 0.00997161865234375, -0.01837158203125, 0.028472900390625, 0.03045654296875, 0.0188140869140625, -0.0032100677490234375, -0.038970947265625, 0.0205535888671875, 0.053985595703125, -0.032623291015625, -0.0238189697265625, 0.06097412109375, -0.002124786376953125, -0.0027942657470703125, 0.05670166015625, -0.0017948150634765625, -0.060821533203125, 0.071044921875, 0.011444091796875, 0.06103515625, 0.01032257080078125, 0.0215606689453125, 0.049346923828125, 0.0245208740234375, -0.00501251220703125, 0.0255889892578125, 0.002666473388671875, -0.052734375, -0.01568603515625, -0.043487548828125, -0.0219879150390625, 0.0221405029296875, -0.06512451171875, 0.01128387451171875, -0.04010009765625, -0.0248565673828125, 0.0142364501953125, 0.033538818359375, -0.0767822265625, 0.0212249755859375, -0.01071929931640625, 0.074462890625, -0.0560302734375, 0.052154541015625, 0.0621337890625, -0.0394287109375, -0.06201171875, -0.01267242431640625, -0.01544952392578125, -0.07452392578125, 0.03485107421875, 0.00003647804260253906, 0.0178680419921875, -0.00012010335922241211, -0.0626220703125, -0.054534912109375, 0.08062744140625, 0.0246734619140625, -0.030792236328125, -0.005214691162109375, 0.0009646415710449219, 0.047027587890625, -0.0144195556640625, 0.039276123046875, 0.0516357421875, 0.03240966796875, 0.00939178466796875, -0.06500244140625, -0.006591796875, -0.024566650390625, -0.021881103515625, 0.01580810546875, -0.07537841796875, 0.08331298828125, -0.0229949951171875, 0.01451873779296875, 0.017974853515625, 0.042144775390625, 0.034881591796875, 0.009521484375, 0.0282745361328125, 0.05194091796875, 0.047882080078125, -0.03350830078125, 0.07080078125, -0.015716552734375, 0.053619384765625, 0.046966552734375, 0.00899505615234375, 0.060699462890625, 0.031982421875, -0.038330078125, 0.039703369140625, 0.051025390625, -0.0212554931640625, 0.04510498046875, 0.007602691650390625, -0.002536773681640625, -0.0343017578125, 0.0226287841796875, -0.04248046875, 0.0289459228515625, -0.0009431838989257812, -0.01537322998046875, -0.0181427001953125, -0.039825439453125, 0.003993988037109375, -0.02325439453125, -0.018798828125, 0.053619384765625, -0.0258636474609375, -0.056396484375, 0.0599365234375, -0.0140228271484375, 0.04803466796875, -0.0614013671875, 0.01136016845703125, -0.0222320556640625, 0.017822265625, 0.00009000301361083984, -0.0723876953125, 0.007061004638671875, -0.0004153251647949219, -0.03143310546875, -0.00884246826171875, 0.032470703125, -0.0286407470703125, -0.0552978515625, 0.01456451416015625, 0.038543701171875, 0.02764892578125, -0.00018966197967529297, -0.0789794921875, 0.011749267578125, 0.01523590087890625, -0.01502227783203125, 0.026031494140625, 0.01934814453125, 0.022430419921875, 0.052734375, 0.051727294921875, 0.001583099365234375, 0.0111236572265625, -0.024169921875, 0.06427001953125, -0.0390625, -0.03466796875, -0.055511474609375, 0.053558349609375, -0.006328582763671875, -0.05096435546875, 0.048675537109375, 0.0684814453125, 0.057220458984375, -0.013214111328125, 0.06591796875, -0.0248260498046875, 0.02801513671875, -0.029205322265625, 0.06988525390625, -0.05072021484375, 0.005950927734375, -0.0233154296875, -0.0556640625, 0.00170135498046875, 0.054046630859375, -0.00263214111328125, -0.0004763603210449219, 0.035980224609375, 0.052093505859375, 0.005252838134765625, -0.007091522216796875, 0.0017118453979492188, 0.0185699462890625, 0.023193359375, 0.059356689453125, 0.0262603759765625, -0.064208984375, 0.051116943359375, -0.0394287109375, -0.01528167724609375, -0.0162506103515625, -0.052490234375, -0.08123779296875, -0.05072021484375, -0.028167724609375, -0.049652099609375, 0.01428985595703125, 0.06744384765625, 0.06561279296875, -0.073486328125, -0.0174560546875, -0.0027942657470703125, -0.0008711814880371094, -0.0311126708984375, -0.020782470703125, 0.0301513671875, -0.0283203125, -0.0640869140625, 0.0246429443359375, -0.018829345703125, 0.006046295166015625, 0.0023899078369140625, -0.01236724853515625, -0.02838134765625, -0.0028400421142578125, 0.033966064453125, 0.0281982421875, -0.045562744140625, -0.033538818359375, 0.0210723876953125, -0.0011739730834960938, 0.0165557861328125, 0.0170135498046875, -0.06903076171875, 0.0243377685546875, 0.04058837890625, 0.03009033203125, 0.054473876953125, 0.007472991943359375, 0.04290771484375, -0.054443359375, 0.01430511474609375, 0.0186004638671875, 0.033203125, 0.0113067626953125, -0.0220794677734375, 0.039459228515625, 0.0239410400390625, -0.04632568359375, -0.0701904296875, -0.0184783935546875, -0.1002197265625, -0.0162200927734375, 0.0802001953125, -0.00839996337890625, -0.01538848876953125, 0.0187835693359375, -0.01026153564453125, 0.04052734375, -0.053131103515625, 0.0654296875, 0.060089111328125, -0.01305389404296875, 0.0145263671875, -0.0345458984375, 0.035736083984375, 0.033477783203125, -0.041748046875, -0.018524169921875, 0.01611328125, 0.0271148681640625, -0.0031375885009765625, 0.0184326171875, 0.00888824462890625, 0.019287109375, 0.001911163330078125, 0.0005035400390625, -0.008026123046875, -0.0113067626953125, -0.0013589859008789062, -0.0073089599609375, -0.042633056640625, -0.0302276611328125 ] ]
laion/CLIP-ViT-H-14-frozen-xlm-roberta-large-laion5B-s13B-b90k
2022-11-18T21:00:32.000Z
[ "open_clip", "arxiv:1910.04867", "license:mit", "has_space", "region:us" ]
null
laion
null
null
laion/CLIP-ViT-H-14-frozen-xlm-roberta-large-laion5B-s13B-b90k
12
8,488
open_clip
2022-11-18T20:49:11
--- license: mit widget: - src: https://huggingface.co/datasets/mishig/sample_images/resolve/main/cat-dog-music.png candidate_labels: playing music, playing sports example_title: Cat & Dog --- # Model Card for CLIP ViT-H/14 frozen xlm roberta large - LAION-5B # Table of Contents 1. [Model Details](#model-details) 2. [Uses](#uses) 3. [Training Details](#training-details) 4. [Evaluation](#evaluation) 5. [Acknowledgements](#acknowledgements) 6. [Citation](#citation) 7. [How To Get Started With the Model](#how-to-get-started-with-the-model) # Model Details ## Model Description A CLIP ViT-H/14 frozen xlm roberta large model trained with the LAION-5B (https://laion.ai/blog/laion-5b/) using OpenCLIP (https://github.com/mlfoundations/open_clip). Model training done by Romain Beaumont on the [stability.ai](https://stability.ai/) cluster. # Uses ## Direct Use Zero-shot image classification, image and text retrieval, among others. ## Downstream Use Image classification and other image task fine-tuning, linear probe image classification, image generation guiding and conditioning, among others. # Training Details ## Training Data This model was trained with the full LAION-5B (https://laion.ai/blog/laion-5b/). ## Training Procedure Training with batch size 90k for 13B sample of laion5B, see https://wandb.ai/rom1504/open-clip/reports/xlm-roberta-large-unfrozen-vit-h-14-frozen--VmlldzoyOTc3ODY3 Model is H/14 on visual side, xlm roberta large initialized with pretrained weights on text side. The H/14 was initialized from https://huggingface.co/laion/CLIP-ViT-H-14-laion2B-s32B-b79K and kept frozen during training. # Evaluation Evaluation done with code in the [LAION CLIP Benchmark suite](https://github.com/LAION-AI/CLIP_benchmark). ## Testing Data, Factors & Metrics ### Testing Data The testing is performed with VTAB+ (A combination of VTAB (https://arxiv.org/abs/1910.04867) w/ additional robustness datasets) for classification and COCO and Flickr for retrieval. ## Results The model achieves imagenet 1k 77.0% (vs 78% for the english H/14) ![results_xlm_roberta_large.png](results_xlm_roberta_large.png) On zero shot classification on imagenet with translated prompts this model reaches: * 56% in italian (vs 21% for https://github.com/clip-italian/clip-italian) * 53% in japanese (vs 54.6% for https://github.com/rinnakk/japanese-clip) * 55.7% in chinese (to be compared with https://github.com/OFA-Sys/Chinese-CLIP) This model reaches strong results in both english and other languages. # Acknowledgements Acknowledging [stability.ai](https://stability.ai/) for the compute used to train this model. # Citation **BibTeX:** In addition to forthcoming LAION-5B (https://laion.ai/blog/laion-5b/) paper, please cite: OpenAI CLIP paper ``` @inproceedings{Radford2021LearningTV, title={Learning Transferable Visual Models From Natural Language Supervision}, author={Alec Radford and Jong Wook Kim and Chris Hallacy and A. Ramesh and Gabriel Goh and Sandhini Agarwal and Girish Sastry and Amanda Askell and Pamela Mishkin and Jack Clark and Gretchen Krueger and Ilya Sutskever}, booktitle={ICML}, year={2021} } ``` OpenCLIP software ``` @software{ilharco_gabriel_2021_5143773, author = {Ilharco, Gabriel and Wortsman, Mitchell and Wightman, Ross and Gordon, Cade and Carlini, Nicholas and Taori, Rohan and Dave, Achal and Shankar, Vaishaal and Namkoong, Hongseok and Miller, John and Hajishirzi, Hannaneh and Farhadi, Ali and Schmidt, Ludwig}, title = {OpenCLIP}, month = jul, year = 2021, note = {If you use this software, please cite it as below.}, publisher = {Zenodo}, version = {0.1}, doi = {10.5281/zenodo.5143773}, url = {https://doi.org/10.5281/zenodo.5143773} } ``` # How To Get Started With the Model https://github.com/mlfoundations/open_clip
4,114
[ [ -0.022430419921875, -0.045013427734375, 0.028106689453125, 0.00254058837890625, -0.03338623046875, -0.0167083740234375, -0.030059814453125, -0.0322265625, 0.0154571533203125, 0.032012939453125, -0.0305328369140625, -0.05596923828125, -0.05218505859375, 0.00005453824996948242, -0.024444580078125, 0.06756591796875, -0.0190582275390625, 0.0106201171875, -0.01412200927734375, -0.03265380859375, -0.0401611328125, -0.03485107421875, -0.04046630859375, -0.00420379638671875, 0.00498199462890625, 0.02838134765625, 0.05133056640625, 0.062347412109375, 0.049163818359375, 0.0225067138671875, -0.0117645263671875, 0.0010290145874023438, -0.049224853515625, -0.038330078125, 0.005706787109375, -0.035064697265625, -0.048065185546875, 0.0007586479187011719, 0.04486083984375, 0.002399444580078125, -0.0074005126953125, 0.0186309814453125, -0.00836944580078125, 0.0258636474609375, -0.047760009765625, 0.022796630859375, -0.043426513671875, 0.00394439697265625, -0.017242431640625, 0.00927734375, -0.0270233154296875, -0.00788116455078125, 0.017547607421875, -0.056182861328125, 0.0146942138671875, -0.015899658203125, 0.11517333984375, 0.00885772705078125, -0.01381683349609375, 0.0084686279296875, -0.04461669921875, 0.06695556640625, -0.058563232421875, 0.02001953125, 0.0208892822265625, 0.032012939453125, 0.0227508544921875, -0.057952880859375, -0.0289306640625, -0.01224517822265625, 0.0242156982421875, 0.0254364013671875, -0.033233642578125, -0.0124359130859375, 0.0367431640625, 0.01131439208984375, -0.0297088623046875, 0.00350189208984375, -0.048065185546875, 0.002033233642578125, 0.03497314453125, 0.00783538818359375, 0.035247802734375, -0.028106689453125, -0.055633544921875, -0.03887939453125, -0.03692626953125, 0.023040771484375, 0.0095672607421875, 0.0002551078796386719, -0.0474853515625, 0.0239715576171875, 0.0193023681640625, 0.04486083984375, 0.0031757354736328125, -0.03173828125, 0.031219482421875, -0.0196075439453125, -0.017974853515625, -0.00939178466796875, 0.08172607421875, 0.042755126953125, 0.010498046875, 0.0243682861328125, 0.0019273757934570312, -0.019775390625, 0.009124755859375, -0.07696533203125, -0.0223388671875, -0.0050506591796875, -0.0401611328125, -0.00960540771484375, 0.04144287109375, -0.04681396484375, 0.019744873046875, -0.012176513671875, 0.03948974609375, -0.058380126953125, -0.001293182373046875, -0.0008516311645507812, -0.005886077880859375, 0.0173187255859375, 0.03515625, -0.04241943359375, 0.0174713134765625, 0.020263671875, 0.090087890625, -0.0251312255859375, -0.029632568359375, -0.03564453125, 0.01456451416015625, -0.024139404296875, 0.04345703125, -0.00876617431640625, -0.01396942138671875, -0.00952911376953125, 0.027618408203125, -0.00592041015625, -0.03717041015625, 0.0411376953125, -0.0198516845703125, 0.007740020751953125, -0.00707244873046875, -0.024871826171875, -0.039581298828125, 0.0120697021484375, -0.05133056640625, 0.05694580078125, -0.004791259765625, -0.0626220703125, 0.027130126953125, -0.0311279296875, -0.00188446044921875, -0.01390838623046875, 0.0034389495849609375, -0.05255126953125, -0.025634765625, 0.04156494140625, 0.035430908203125, -0.0182037353515625, 0.0257110595703125, -0.048095703125, -0.0178070068359375, 0.0258941650390625, -0.025360107421875, 0.0731201171875, 0.0019741058349609375, -0.013641357421875, 0.0226593017578125, -0.051910400390625, -0.003406524658203125, 0.0253753662109375, -0.006465911865234375, -0.014312744140625, -0.0300750732421875, -0.0023193359375, 0.01018524169921875, 0.01149749755859375, -0.0272064208984375, 0.007205963134765625, -0.004695892333984375, 0.036376953125, 0.048675537109375, -0.0004100799560546875, 0.0164337158203125, -0.0198974609375, 0.05438232421875, 0.0122833251953125, 0.0277252197265625, -0.0307464599609375, -0.034210205078125, -0.0556640625, -0.053375244140625, 0.038482666015625, 0.0469970703125, -0.057159423828125, 0.0289154052734375, -0.0207366943359375, -0.033447265625, -0.035064697265625, 0.00629425048828125, 0.03271484375, 0.03363037109375, 0.037811279296875, -0.035614013671875, -0.036224365234375, -0.06280517578125, 0.026153564453125, 0.0059661865234375, 0.0010194778442382812, 0.037017822265625, 0.05572509765625, -0.00008589029312133789, 0.060821533203125, -0.060546875, -0.036407470703125, -0.0237274169921875, 0.0184783935546875, 0.0226287841796875, 0.03131103515625, 0.0736083984375, -0.0521240234375, -0.055328369140625, -0.00632476806640625, -0.0748291015625, 0.009796142578125, 0.00165557861328125, -0.017822265625, 0.0093231201171875, 0.0302734375, -0.050506591796875, 0.0418701171875, 0.032501220703125, 0.00234222412109375, 0.039825439453125, -0.02154541015625, 0.010162353515625, -0.09283447265625, 0.018646240234375, 0.007549285888671875, -0.03192138671875, -0.03167724609375, -0.00348663330078125, 0.001605987548828125, -0.0264739990234375, -0.057891845703125, 0.041778564453125, -0.04974365234375, 0.008331298828125, -0.00927734375, 0.0140228271484375, 0.004878997802734375, 0.0394287109375, 0.01163482666015625, 0.051666259765625, 0.0625, -0.035797119140625, 0.0179901123046875, 0.03564453125, -0.028533935546875, 0.02435302734375, -0.06878662109375, 0.006011962890625, 0.0022907257080078125, 0.0078277587890625, -0.0328369140625, -0.0278778076171875, 0.033355712890625, -0.033599853515625, 0.03155517578125, -0.042205810546875, -0.0177154541015625, -0.0360107421875, -0.0360107421875, 0.03729248046875, 0.055328369140625, -0.050262451171875, 0.0211334228515625, 0.0228271484375, 0.0015087127685546875, -0.041595458984375, -0.046630859375, -0.003513336181640625, -0.0202178955078125, -0.050140380859375, 0.03839111328125, -0.007602691650390625, 0.00324249267578125, 0.01531982421875, 0.0201263427734375, -0.022857666015625, -0.019622802734375, 0.03302001953125, 0.043243408203125, -0.0163726806640625, -0.0166168212890625, -0.0176849365234375, -0.0033111572265625, -0.01177978515625, -0.013031005859375, 0.0275115966796875, -0.0176239013671875, -0.0181732177734375, -0.044525146484375, 0.0033245086669921875, 0.042205810546875, -0.0297088623046875, 0.06207275390625, 0.06878662109375, -0.02239990234375, -0.0010118484497070312, -0.0224456787109375, -0.0016469955444335938, -0.029632568359375, 0.033782958984375, -0.007045745849609375, -0.038726806640625, 0.050689697265625, 0.0209197998046875, -0.017242431640625, 0.034210205078125, 0.0301513671875, 0.00832366943359375, 0.07000732421875, 0.0679931640625, -0.0074005126953125, 0.059478759765625, -0.063720703125, -0.003925323486328125, -0.061126708984375, -0.00479888916015625, -0.0135498046875, 0.00275421142578125, -0.0242462158203125, -0.059814453125, 0.045440673828125, 0.037567138671875, -0.0008020401000976562, 0.0367431640625, -0.0171051025390625, 0.0184478759765625, 0.038543701171875, 0.0252532958984375, 0.004589080810546875, -0.0004105567932128906, -0.0080108642578125, -0.01474761962890625, -0.066650390625, -0.035736083984375, 0.07598876953125, 0.04022216796875, 0.061676025390625, -0.0008630752563476562, 0.0364990234375, -0.0004105567932128906, 0.0018253326416015625, -0.05621337890625, 0.042327880859375, -0.028472900390625, -0.033935546875, -0.0229339599609375, -0.0280914306640625, -0.06317138671875, 0.0007476806640625, -0.020355224609375, -0.048675537109375, 0.011444091796875, 0.021392822265625, -0.01641845703125, 0.0386962890625, -0.04058837890625, 0.072021484375, -0.034637451171875, -0.031158447265625, 0.0048980712890625, -0.04571533203125, 0.047210693359375, 0.0175323486328125, -0.0029754638671875, -0.0099639892578125, 0.01375579833984375, 0.07135009765625, -0.056610107421875, 0.06683349609375, -0.0084075927734375, 0.01806640625, 0.048980712890625, -0.019073486328125, 0.0185394287109375, 0.015594482421875, 0.011322021484375, 0.0400390625, 0.00698089599609375, -0.0178985595703125, -0.035888671875, 0.030029296875, -0.0634765625, -0.023834228515625, -0.03228759765625, -0.030059814453125, 0.026763916015625, 0.034912109375, 0.049224853515625, 0.044036865234375, -0.01168060302734375, 0.0220794677734375, 0.046722412109375, -0.0245513916015625, 0.0291290283203125, 0.0289459228515625, -0.02911376953125, -0.061859130859375, 0.072265625, 0.0220184326171875, 0.025543212890625, 0.01507568359375, 0.00894927978515625, 0.013763427734375, -0.030059814453125, -0.04180908203125, 0.02752685546875, -0.0511474609375, -0.035003662109375, -0.0269622802734375, -0.028228759765625, -0.0303497314453125, -0.005107879638671875, -0.04296875, -0.010711669921875, -0.0567626953125, -0.0019989013671875, 0.036712646484375, 0.04046630859375, -0.003875732421875, 0.0192718505859375, -0.07098388671875, 0.0229034423828125, 0.01190948486328125, 0.0167388916015625, 0.0094146728515625, -0.053741455078125, -0.0308685302734375, 0.0093231201171875, -0.045989990234375, -0.0640869140625, 0.047698974609375, 0.0196075439453125, 0.045318603515625, 0.05780029296875, 0.0006656646728515625, 0.058837890625, -0.0306396484375, 0.07232666015625, 0.034271240234375, -0.07421875, 0.041351318359375, -0.037017822265625, 0.0311126708984375, 0.0301513671875, 0.05230712890625, -0.00725555419921875, -0.01049041748046875, -0.048675537109375, -0.0684814453125, 0.06707763671875, 0.0061187744140625, -0.00450897216796875, 0.00884246826171875, 0.0234375, 0.002292633056640625, 0.004436492919921875, -0.07232666015625, -0.001926422119140625, -0.033355712890625, -0.004306793212890625, 0.0002741813659667969, -0.0218048095703125, -0.0139923095703125, -0.0302581787109375, 0.056182861328125, -0.0261688232421875, 0.03802490234375, 0.0240325927734375, -0.0176239013671875, -0.006561279296875, -0.0065155029296875, 0.04815673828125, 0.050445556640625, -0.035736083984375, -0.01454925537109375, 0.01171875, -0.035675048828125, -0.010955810546875, 0.000054776668548583984, -0.0450439453125, 0.006519317626953125, 0.05157470703125, 0.1097412109375, 0.025634765625, -0.046722412109375, 0.06353759765625, -0.003971099853515625, -0.0298309326171875, -0.029205322265625, 0.0019855499267578125, -0.01271820068359375, 0.01129913330078125, 0.0040130615234375, 0.002410888671875, -0.00336456298828125, -0.044525146484375, 0.01194000244140625, 0.0391845703125, -0.04913330078125, -0.043792724609375, 0.058563232421875, -0.0086212158203125, -0.00461578369140625, 0.045013427734375, -0.00504302978515625, -0.049896240234375, 0.050323486328125, 0.0469970703125, 0.062744140625, 0.004695892333984375, 0.0279693603515625, 0.056549072265625, 0.006839752197265625, -0.0209197998046875, 0.008056640625, 0.01457977294921875, -0.044036865234375, -0.0035533905029296875, -0.02337646484375, -0.0257415771484375, 0.007167816162109375, -0.078857421875, 0.05126953125, -0.04010009765625, -0.04052734375, -0.02020263671875, -0.024932861328125, -0.04388427734375, 0.003360748291015625, 0.0121612548828125, 0.07781982421875, -0.056427001953125, 0.059722900390625, 0.055450439453125, -0.05804443359375, -0.0643310546875, 0.00042176246643066406, -0.00849151611328125, -0.043487548828125, 0.0435791015625, 0.0201263427734375, 0.00007146596908569336, -0.0284423828125, -0.07293701171875, -0.0810546875, 0.1075439453125, 0.042236328125, -0.01910400390625, 0.00789642333984375, -0.0218505859375, 0.035736083984375, -0.0304107666015625, 0.0243377685546875, 0.0006766319274902344, 0.0169677734375, 0.0188446044921875, -0.0721435546875, -0.00780487060546875, -0.017486572265625, 0.00811004638671875, 0.0017938613891601562, -0.0860595703125, 0.08160400390625, -0.033905029296875, -0.0207977294921875, 0.0189056396484375, 0.057586669921875, 0.01006317138671875, 0.01513671875, 0.0248870849609375, 0.04833984375, 0.03717041015625, 0.0015230178833007812, 0.0694580078125, -0.02264404296875, 0.03375244140625, 0.08453369140625, -0.01168060302734375, 0.08160400390625, 0.02191162109375, -0.007678985595703125, 0.037567138671875, 0.027099609375, -0.044281005859375, 0.04443359375, -0.020050048828125, 0.0183868408203125, -0.016021728515625, -0.01552581787109375, -0.021514892578125, 0.037811279296875, -0.0017824172973632812, -0.0321044921875, 0.00258636474609375, 0.02667236328125, 0.0084686279296875, -0.009429931640625, -0.0147705078125, 0.04541015625, 0.02081298828125, -0.03790283203125, 0.06500244140625, 0.004364013671875, 0.037017822265625, -0.055389404296875, -0.007720947265625, 0.005954742431640625, 0.0192108154296875, -0.005859375, -0.057220458984375, 0.0099639892578125, 0.00836944580078125, -0.0230712890625, -0.0079803466796875, 0.04119873046875, -0.0224151611328125, -0.0423583984375, 0.036712646484375, 0.00262451171875, 0.01971435546875, 0.0226898193359375, -0.056671142578125, 0.024139404296875, 0.006748199462890625, -0.006191253662109375, 0.0207977294921875, 0.019622802734375, -0.01401519775390625, 0.056884765625, 0.03399658203125, 0.006664276123046875, 0.01287841796875, -0.00980377197265625, 0.057891845703125, -0.0382080078125, -0.034454345703125, -0.0478515625, 0.035888671875, -0.016693115234375, -0.034332275390625, 0.062255859375, 0.039306640625, 0.06439208984375, -0.02789306640625, 0.041595458984375, -0.00514984130859375, 0.024139404296875, -0.07659912109375, 0.048736572265625, -0.044036865234375, 0.006908416748046875, -0.04296875, -0.05450439453125, -0.01800537109375, 0.040740966796875, -0.02655029296875, 0.0016841888427734375, 0.047607421875, 0.04254150390625, -0.0234375, -0.031829833984375, 0.0211334228515625, 0.0265655517578125, 0.0205841064453125, 0.03912353515625, 0.035430908203125, -0.05328369140625, 0.055023193359375, -0.039825439453125, -0.016510009765625, -0.0171356201171875, -0.0682373046875, -0.08819580078125, -0.03857421875, -0.03350830078125, -0.0167388916015625, 0.00044417381286621094, 0.06512451171875, 0.08428955078125, -0.058441162109375, -0.0302276611328125, 0.0221405029296875, 0.0020503997802734375, -0.01910400390625, -0.0174102783203125, 0.037109375, 0.017364501953125, -0.047760009765625, 0.0073394775390625, 0.0173187255859375, 0.0302734375, -0.0008740425109863281, -0.0120849609375, -0.0233154296875, -0.011016845703125, 0.029754638671875, 0.037811279296875, -0.051727294921875, -0.00791168212890625, 0.002147674560546875, 0.0035762786865234375, 0.0338134765625, 0.035797119140625, -0.035491943359375, 0.0285797119140625, 0.035247802734375, 0.0182342529296875, 0.061767578125, 0.02337646484375, 0.03436279296875, -0.036041259765625, 0.027984619140625, -0.0087432861328125, 0.03302001953125, 0.0290985107421875, -0.01415252685546875, 0.029754638671875, 0.0234375, -0.02972412109375, -0.061859130859375, -0.00665283203125, -0.09674072265625, -0.0115203857421875, 0.06646728515625, -0.047515869140625, -0.04205322265625, 0.041748046875, -0.0271759033203125, 0.0231170654296875, -0.03228759765625, 0.0279388427734375, 0.03656005859375, 0.0057525634765625, -0.045318603515625, -0.05035400390625, 0.0237274169921875, 0.008453369140625, -0.059417724609375, -0.02069091796875, 0.02862548828125, 0.0360107421875, 0.0231475830078125, 0.034271240234375, -0.013946533203125, 0.0243377685546875, -0.004467010498046875, 0.0276336669921875, -0.0229034423828125, -0.0455322265625, -0.034576416015625, 0.0009245872497558594, 0.00033593177795410156, -0.0244140625 ] ]
chavinlo/gpt4-x-alpaca
2023-04-08T19:38:41.000Z
[ "transformers", "pytorch", "llama", "text-generation", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
chavinlo
null
null
chavinlo/gpt4-x-alpaca
465
8,488
transformers
2023-03-31T13:03:46
# GPT4 x Alpaca As a base model we used: https://huggingface.co/chavinlo/alpaca-13b Finetuned on GPT4's responses, for 3 epochs. NO LORA Please do note that the configurations files maybe messed up, this is because of the trainer I used. I WILL NOT EDIT THEM because there are repos hat automatically fix this, changing it might break it. Generally you just need to change anything that's under the name of "LLaMa" to "Llama" NOTE THE UPPER AND LOWER CASE!!!!
463
[ [ -0.0516357421875, -0.056304931640625, 0.0288848876953125, 0.0153045654296875, -0.06134033203125, -0.0149078369140625, 0.0216064453125, -0.05645751953125, 0.049041748046875, 0.037322998046875, -0.044189453125, -0.0350341796875, -0.061431884765625, 0.0089569091796875, -0.022857666015625, 0.0849609375, -0.010040283203125, -0.0216064453125, 0.0521240234375, -0.03228759765625, -0.0452880859375, -0.026885986328125, -0.06085205078125, -0.0479736328125, 0.05865478515625, 0.0175628662109375, 0.06964111328125, 0.0484619140625, 0.0260162353515625, 0.0213623046875, -0.025634765625, 0.0034351348876953125, -0.02362060546875, -0.0467529296875, -0.006870269775390625, -0.0294189453125, -0.06463623046875, 0.0118255615234375, 0.0298309326171875, 0.023040771484375, -0.0200958251953125, 0.0170745849609375, -0.0225830078125, 0.0279693603515625, -0.03204345703125, 0.0064849853515625, -0.049774169921875, 0.0177001953125, -0.01160430908203125, -0.003154754638671875, 0.0057525634765625, -0.02764892578125, 0.002655029296875, -0.06524658203125, 0.0165863037109375, -0.007541656494140625, 0.08843994140625, 0.0307464599609375, -0.0400390625, -0.036102294921875, -0.0310211181640625, 0.036712646484375, -0.03472900390625, 0.0189208984375, 0.053466796875, 0.01507568359375, -0.0206451416015625, -0.049346923828125, -0.0298614501953125, -0.002529144287109375, 0.00983428955078125, -0.0158233642578125, -0.0187225341796875, -0.0247955322265625, 0.0236968994140625, 0.0187835693359375, -0.0189971923828125, 0.01235198974609375, -0.047149658203125, -0.0147552490234375, 0.0240936279296875, -0.005756378173828125, 0.0085296630859375, 0.0271759033203125, -0.036773681640625, -0.0183258056640625, -0.07220458984375, 0.0033168792724609375, 0.04107666015625, 0.0223236083984375, -0.031158447265625, 0.04998779296875, -0.00577545166015625, 0.04296875, 0.0310516357421875, -0.020965576171875, 0.053009033203125, 0.007289886474609375, -0.037353515625, -0.00301361083984375, 0.0377197265625, 0.03399658203125, -0.0110931396484375, -0.007358551025390625, -0.01690673828125, 0.00722503662109375, 0.01361083984375, -0.05438232421875, 0.0009565353393554688, 0.0014715194702148438, -0.0277557373046875, -0.0325927734375, -0.01503753662109375, -0.0408935546875, -0.01313018798828125, -0.0081939697265625, 0.038787841796875, -0.0364990234375, -0.016357421875, 0.0211181640625, 0.0147857666015625, 0.048858642578125, 0.0219573974609375, -0.07012939453125, 0.0322265625, 0.01560211181640625, 0.037628173828125, 0.0252532958984375, -0.0243988037109375, -0.010955810546875, 0.0220184326171875, -0.02520751953125, 0.05035400390625, -0.000186920166015625, -0.06793212890625, -0.01184844970703125, 0.02301025390625, 0.01155853271484375, -0.0224609375, 0.057830810546875, -0.0516357421875, 0.0204620361328125, -0.057159423828125, -0.0194549560546875, -0.0389404296875, 0.02093505859375, -0.06683349609375, 0.054840087890625, 0.03338623046875, -0.050811767578125, 0.0219879150390625, -0.061065673828125, -0.0097503662109375, 0.004421234130859375, -0.00098419189453125, -0.04791259765625, -0.0158843994140625, 0.006702423095703125, 0.01477813720703125, -0.01617431640625, -0.0029296875, 0.0038242340087890625, -0.044403076171875, 0.00135040283203125, 0.007770538330078125, 0.059051513671875, 0.0023784637451171875, -0.029541015625, -0.0099945068359375, -0.069580078125, -0.0135345458984375, 0.033843994140625, -0.03515625, 0.005710601806640625, -0.022186279296875, -0.01442718505859375, -0.0036907196044921875, 0.042999267578125, -0.037109375, 0.04425048828125, 0.004451751708984375, 0.04248046875, 0.058868408203125, -0.0015163421630859375, 0.022430419921875, -0.024810791015625, 0.03387451171875, -0.0092620849609375, 0.0343017578125, 0.0325927734375, -0.06719970703125, -0.058197021484375, -0.0080108642578125, 0.00112152099609375, 0.02716064453125, -0.03369140625, 0.0309906005859375, 0.0171051025390625, -0.048370361328125, -0.0007963180541992188, -0.0038299560546875, 0.01219940185546875, 0.031585693359375, 0.04010009765625, -0.045074462890625, -0.057220458984375, -0.06927490234375, 0.03411865234375, -0.023956298828125, 0.0179443359375, -0.006320953369140625, 0.04449462890625, -0.06085205078125, 0.05126953125, -0.037628173828125, -0.0220489501953125, -0.007080078125, -0.0052490234375, 0.03265380859375, 0.0338134765625, 0.0869140625, -0.0259552001953125, 0.00577545166015625, -0.01192474365234375, -0.037261962890625, 0.0004811286926269531, 0.032684326171875, -0.02996826171875, -0.00899505615234375, 0.00568389892578125, -0.07196044921875, 0.04132080078125, 0.052032470703125, -0.03692626953125, 0.03704833984375, -0.007625579833984375, -0.005947113037109375, -0.06488037109375, -0.0011234283447265625, -0.01031494140625, -0.01107025146484375, -0.020355224609375, 0.0185546875, 0.01175689697265625, 0.00695037841796875, -0.031341552734375, 0.0295867919921875, -0.0186920166015625, -0.00994873046875, -0.022979736328125, -0.022186279296875, 0.0103912353515625, 0.04827880859375, 0.000186920166015625, 0.052886962890625, 0.030792236328125, -0.028778076171875, 0.032928466796875, 0.0634765625, -0.0157012939453125, 0.0238800048828125, -0.06787109375, 0.044921875, 0.0194549560546875, 0.043212890625, -0.020965576171875, -0.0279998779296875, 0.0611572265625, -0.0045166015625, 0.005184173583984375, -0.007289886474609375, -0.0186767578125, -0.0293121337890625, -0.01499176025390625, 0.031646728515625, 0.043243408203125, -0.0628662109375, 0.039276123046875, 0.0116424560546875, 0.01114654541015625, -0.03448486328125, -0.038238525390625, 0.01177215576171875, -0.04437255859375, -0.02630615234375, 0.033203125, -0.0264739990234375, -0.0171051025390625, -0.025909423828125, -0.00238800048828125, -0.0286102294921875, -0.010498046875, 0.0277862548828125, 0.038726806640625, -0.0229034423828125, -0.012115478515625, -0.0015048980712890625, -0.006877899169921875, -0.006198883056640625, 0.025115966796875, 0.050689697265625, -0.034820556640625, -0.00637054443359375, -0.042633056640625, 0.0203399658203125, 0.0211029052734375, -0.00128173828125, 0.0552978515625, 0.0302276611328125, -0.0294342041015625, 0.0207061767578125, -0.04931640625, 0.0114898681640625, -0.03692626953125, -0.0015497207641601562, -0.04437255859375, -0.05218505859375, 0.057952880859375, 0.0223388671875, -0.0060577392578125, 0.044036865234375, 0.039886474609375, 0.0145111083984375, 0.04364013671875, 0.060821533203125, -0.0164642333984375, 0.028717041015625, -0.0258026123046875, -0.00487518310546875, -0.064208984375, -0.0271759033203125, -0.032440185546875, -0.021453857421875, -0.0347900390625, -0.0217437744140625, 0.005054473876953125, 0.0166473388671875, -0.062255859375, 0.0604248046875, -0.03662109375, 0.04071044921875, 0.031402587890625, 0.028411865234375, 0.033111572265625, -0.0180816650390625, 0.03594970703125, 0.0234527587890625, -0.0180511474609375, -0.0439453125, 0.07220458984375, 0.029693603515625, 0.05615234375, 0.0156707763671875, 0.055999755859375, 0.0033359527587890625, 0.0311279296875, -0.036590576171875, 0.03582763671875, 0.01371002197265625, -0.031463623046875, -0.01274871826171875, 0.01031494140625, -0.07440185546875, 0.022003173828125, 0.006072998046875, -0.0423583984375, 0.016754150390625, -0.0007882118225097656, -0.03179931640625, 0.010528564453125, -0.037933349609375, 0.044464111328125, -0.0178070068359375, 0.01904296875, 0.0194854736328125, -0.041229248046875, 0.052215576171875, -0.01629638671875, 0.0125732421875, -0.01947021484375, -0.0217742919921875, 0.0462646484375, -0.06689453125, 0.0777587890625, -0.00811004638671875, -0.0419921875, 0.02783203125, -0.0167083740234375, 0.050628662109375, 0.0116729736328125, -0.0059051513671875, 0.0233917236328125, -0.0167236328125, -0.03582763671875, -0.00731658935546875, 0.044677734375, -0.100830078125, -0.0240631103515625, -0.018310546875, -0.01873779296875, -0.02874755859375, -0.00506591796875, 0.05780029296875, 0.00873565673828125, -0.020965576171875, -0.00795745849609375, 0.0307769775390625, 0.01294708251953125, 0.040252685546875, 0.037872314453125, -0.01432037353515625, -0.042022705078125, 0.0280609130859375, -0.004383087158203125, -0.005458831787109375, -0.0183563232421875, 0.0175628662109375, -0.00850677490234375, -0.04248046875, -0.058197021484375, 0.036102294921875, -0.05206298828125, -0.0224609375, -0.0184783935546875, -0.0109100341796875, -0.0217742919921875, -0.01198577880859375, -0.0220184326171875, -0.0288848876953125, -0.0537109375, -0.0190582275390625, 0.048309326171875, 0.06610107421875, -0.0028629302978515625, 0.07220458984375, -0.047088623046875, 0.0391845703125, 0.042816162109375, 0.01168060302734375, -0.03900146484375, -0.0673828125, -0.0149688720703125, 0.0005335807800292969, -0.039794921875, -0.064208984375, 0.0272979736328125, 0.0122833251953125, 0.03155517578125, 0.0116119384765625, -0.037933349609375, 0.0635986328125, -0.048126220703125, 0.056396484375, 0.038116455078125, -0.062042236328125, 0.044921875, -0.039825439453125, 0.003551483154296875, 0.0273284912109375, 0.01177978515625, 0.0017452239990234375, 0.009765625, -0.060699462890625, -0.044525146484375, 0.019439697265625, 0.02374267578125, 0.0092620849609375, 0.00778961181640625, 0.04278564453125, 0.03759765625, 0.01277923583984375, -0.07891845703125, -0.01116943359375, -0.047332763671875, 0.0262451171875, -0.0004487037658691406, -0.01084136962890625, -0.034271240234375, -0.0193023681640625, 0.078857421875, -0.010955810546875, -0.005229949951171875, -0.0035800933837890625, 0.00653839111328125, -0.026519775390625, -0.005283355712890625, 0.048583984375, 0.053192138671875, -0.0313720703125, -0.04205322265625, 0.03350830078125, -0.047882080078125, 0.021331787109375, 0.0023899078369140625, -0.0029315948486328125, 0.004688262939453125, 0.0178375244140625, 0.07733154296875, 0.0176849365234375, -0.01448822021484375, 0.0229339599609375, -0.0189208984375, -0.010711669921875, -0.0279998779296875, 0.006877899169921875, 0.0175018310546875, 0.0187225341796875, 0.01505279541015625, 0.0140533447265625, -0.0187835693359375, -0.04205322265625, -0.02191162109375, 0.016510009765625, 0.007343292236328125, -0.04107666015625, 0.042724609375, 0.0228118896484375, -0.0262451171875, 0.0285491943359375, -0.00762939453125, -0.00909423828125, 0.062408447265625, 0.0467529296875, 0.039306640625, -0.048797607421875, 0.00472259521484375, 0.0150909423828125, 0.0212860107421875, -0.02874755859375, 0.02618408203125, 0.01160430908203125, -0.0758056640625, -0.01142120361328125, -0.046051025390625, -0.056915283203125, 0.042205810546875, -0.0594482421875, 0.043701171875, -0.042205810546875, -0.0239715576171875, -0.01128387451171875, 0.01062774658203125, -0.059295654296875, 0.03973388671875, 0.01244354248046875, 0.08197021484375, -0.060577392578125, 0.07965087890625, 0.043731689453125, -0.044647216796875, -0.07232666015625, -0.0313720703125, -0.0111846923828125, -0.095947265625, 0.0281524658203125, 0.0040435791015625, -0.0228271484375, -0.0048828125, -0.076904296875, -0.081787109375, 0.10723876953125, 0.0308074951171875, -0.03173828125, -0.006134033203125, -0.006134033203125, 0.0273284912109375, -0.033294677734375, -0.0109405517578125, 0.05999755859375, 0.0228118896484375, 0.0241241455078125, -0.0682373046875, -0.016754150390625, -0.00920867919921875, 0.0012369155883789062, 0.0080718994140625, -0.07135009765625, 0.080078125, -0.01568603515625, 0.0181121826171875, 0.057952880859375, 0.046417236328125, 0.03887939453125, -0.0003521442413330078, 0.046234130859375, 0.06597900390625, 0.04901123046875, 0.0097503662109375, 0.063232421875, -0.0035762786865234375, 0.0313720703125, 0.08367919921875, -0.038787841796875, 0.040679931640625, 0.0343017578125, -0.0276947021484375, 0.047760009765625, 0.07623291015625, 0.0030612945556640625, 0.037506103515625, 0.00554656982421875, -0.024749755859375, 0.01322174072265625, 0.00614166259765625, -0.07366943359375, 0.037628173828125, 0.040863037109375, -0.0178375244140625, -0.0110015869140625, -0.00887298583984375, 0.00704193115234375, -0.0411376953125, -0.031768798828125, 0.036376953125, 0.0008502006530761719, -0.011322021484375, 0.053070068359375, 0.00450897216796875, 0.06048583984375, -0.05633544921875, 0.006679534912109375, -0.03631591796875, -0.01486968994140625, -0.01690673828125, -0.03692626953125, 0.011077880859375, 0.0028095245361328125, -0.013458251953125, 0.0085601806640625, 0.037567138671875, -0.0005702972412109375, -0.035003662109375, 0.0244293212890625, 0.034942626953125, 0.013916015625, -0.0008878707885742188, -0.05267333984375, 0.038665771484375, -0.00458526611328125, -0.040802001953125, 0.0280609130859375, 0.023406982421875, -0.01953125, 0.045745849609375, 0.041534423828125, -0.0132904052734375, 0.008697509765625, 0.0201568603515625, 0.086181640625, -0.04754638671875, -0.03179931640625, -0.031646728515625, 0.008819580078125, 0.0153961181640625, -0.04168701171875, 0.01507568359375, 0.03314208984375, 0.040618896484375, -0.019195556640625, 0.047637939453125, -0.006313323974609375, 0.03173828125, -0.039276123046875, 0.036163330078125, -0.019989013671875, 0.004390716552734375, -0.01418304443359375, -0.08929443359375, 0.002567291259765625, 0.0692138671875, 0.01073455810546875, -0.0030612945556640625, 0.04052734375, 0.041839599609375, -0.00385284423828125, 0.0202178955078125, 0.02069091796875, 0.018463134765625, 0.014617919921875, 0.037628173828125, 0.05572509765625, -0.05615234375, 0.032379150390625, -0.0273284912109375, -0.0255889892578125, 0.00838470458984375, -0.0751953125, -0.052947998046875, -0.014129638671875, -0.0281219482421875, -0.0207061767578125, 0.0139312744140625, 0.061553955078125, 0.049072265625, -0.035552978515625, -0.0179443359375, 0.0169830322265625, -0.018218994140625, 0.0086212158203125, -0.0023651123046875, 0.0260772705078125, 0.0301055908203125, -0.049774169921875, 0.047637939453125, -0.010528564453125, 0.05487060546875, -0.005985260009765625, -0.002838134765625, -0.01009368896484375, -0.00995635986328125, 0.032501220703125, 0.04278564453125, -0.05499267578125, -0.060638427734375, -0.032257080078125, -0.0084075927734375, 0.0276947021484375, 0.03338623046875, -0.05877685546875, -0.0347900390625, -0.014617919921875, 0.00677490234375, 0.03900146484375, 0.0015354156494140625, 0.0172271728515625, -0.035675048828125, 0.041717529296875, -0.0022563934326171875, 0.07012939453125, 0.038604736328125, -0.0277557373046875, 0.07635498046875, 0.0207366943359375, -0.06085205078125, -0.031707763671875, 0.01406097412109375, -0.10888671875, 0.01363372802734375, 0.06292724609375, -0.0027484893798828125, -0.0333251953125, 0.0019893646240234375, -0.055023193359375, 0.0158843994140625, -0.021636962890625, 0.063720703125, 0.043914794921875, -0.0283203125, 0.001407623291015625, -0.037811279296875, 0.014068603515625, 0.0025539398193359375, -0.0660400390625, -0.04931640625, 0.0254058837890625, 0.051055908203125, 0.0251007080078125, 0.0584716796875, 0.0018968582153320312, 0.02154541015625, -0.0123748779296875, 0.0238800048828125, 0.01441192626953125, 0.0089569091796875, -0.0089111328125, -0.0157470703125, -0.0007796287536621094, -0.03424072265625 ] ]
CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment
2021-10-17T11:15:54.000Z
[ "transformers", "pytorch", "tf", "bert", "text-classification", "ar", "arxiv:2103.06678", "license:apache-2.0", "endpoints_compatible", "has_space", "region:us" ]
text-classification
CAMeL-Lab
null
null
CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment
23
8,487
transformers
2022-03-02T23:29:04
--- language: - ar license: apache-2.0 widget: - text: "أنا بخير" --- # CAMeLBERT-DA SA Model ## Model description **CAMeLBERT-DA SA Model** is a Sentiment Analysis (SA) model that was built by fine-tuning the [CAMeLBERT Dialectal Arabic (DA)](https://huggingface.co/CAMeL-Lab/bert-base-arabic-camelbert-da/) model. For the fine-tuning, we used the [ASTD](https://aclanthology.org/D15-1299.pdf), [ArSAS](http://lrec-conf.org/workshops/lrec2018/W30/pdf/22_W30.pdf), and [SemEval](https://aclanthology.org/S17-2088.pdf) datasets. Our fine-tuning procedure and the hyperparameters we used can be found in our paper *"[The Interplay of Variant, Size, and Task Type in Arabic Pre-trained Language Models](https://arxiv.org/abs/2103.06678)." * Our fine-tuning code can be found [here](https://github.com/CAMeL-Lab/CAMeLBERT). ## Intended uses You can use the CAMeLBERT-DA SA model directly as part of our [CAMeL Tools](https://github.com/CAMeL-Lab/camel_tools) SA component (*recommended*) or as part of the transformers pipeline. #### How to use To use the model with the [CAMeL Tools](https://github.com/CAMeL-Lab/camel_tools) SA component: ```python >>> from camel_tools.sentiment import SentimentAnalyzer >>> sa = SentimentAnalyzer("CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment") >>> sentences = ['أنا بخير', 'أنا لست بخير'] >>> sa.predict(sentences) >>> ['positive', 'negative'] ``` You can also use the SA model directly with a transformers pipeline: ```python >>> from transformers import pipeline >>> sa = pipeline('text-classification', model='CAMeL-Lab/bert-base-arabic-camelbert-da-sentiment') >>> sentences = ['أنا بخير', 'أنا لست بخير'] >>> sa(sentences) [{'label': 'positive', 'score': 0.9616648554801941}, {'label': 'negative', 'score': 0.9779177904129028}] ``` *Note*: to download our models, you would need `transformers>=3.5.0`. Otherwise, you could download the models manually. ## Citation ```bibtex @inproceedings{inoue-etal-2021-interplay, title = "The Interplay of Variant, Size, and Task Type in {A}rabic Pre-trained Language Models", author = "Inoue, Go and Alhafni, Bashar and Baimukan, Nurpeiis and Bouamor, Houda and Habash, Nizar", booktitle = "Proceedings of the Sixth Arabic Natural Language Processing Workshop", month = apr, year = "2021", address = "Kyiv, Ukraine (Online)", publisher = "Association for Computational Linguistics", abstract = "In this paper, we explore the effects of language variants, data sizes, and fine-tuning task types in Arabic pre-trained language models. To do so, we build three pre-trained language models across three variants of Arabic: Modern Standard Arabic (MSA), dialectal Arabic, and classical Arabic, in addition to a fourth language model which is pre-trained on a mix of the three. We also examine the importance of pre-training data size by building additional models that are pre-trained on a scaled-down set of the MSA variant. We compare our different models to each other, as well as to eight publicly available models by fine-tuning them on five NLP tasks spanning 12 datasets. Our results suggest that the variant proximity of pre-training data to fine-tuning data is more important than the pre-training data size. We exploit this insight in defining an optimized system selection model for the studied tasks.", } ```
3,362
[ [ -0.05096435546875, -0.051055908203125, -0.0005383491516113281, 0.021148681640625, -0.0277252197265625, -0.0032253265380859375, -0.0242156982421875, -0.033050537109375, 0.0099334716796875, 0.0323486328125, -0.0433349609375, -0.0452880859375, -0.0665283203125, 0.005855560302734375, -0.0218505859375, 0.105224609375, -0.0172882080078125, 0.00862884521484375, 0.005313873291015625, -0.0293731689453125, -0.0160980224609375, -0.0499267578125, -0.0302276611328125, -0.019866943359375, 0.03668212890625, 0.028076171875, 0.078125, 0.0234222412109375, 0.037139892578125, 0.021148681640625, -0.0002429485321044922, 0.007610321044921875, -0.0192413330078125, 0.0047760009765625, 0.0095977783203125, -0.01439666748046875, -0.0228729248046875, 0.0083160400390625, 0.03509521484375, 0.0516357421875, -0.01064300537109375, 0.031158447265625, 0.0064544677734375, 0.054107666015625, -0.051513671875, 0.006755828857421875, -0.03729248046875, -0.0251312255859375, -0.01450347900390625, 0.019134521484375, -0.008270263671875, -0.0205841064453125, 0.0205841064453125, -0.0113525390625, 0.00861358642578125, 0.003692626953125, 0.09765625, 0.037353515625, -0.03485107421875, -0.0275115966796875, -0.046783447265625, 0.08160400390625, -0.0760498046875, 0.01143646240234375, 0.033721923828125, 0.0177001953125, -0.01129913330078125, -0.0457763671875, -0.050994873046875, -0.033721923828125, -0.00801849365234375, -0.00923919677734375, -0.0160369873046875, 0.006160736083984375, 0.01491546630859375, 0.0283966064453125, -0.0310821533203125, -0.00201416015625, -0.01544952392578125, -0.013519287109375, 0.04229736328125, 0.00632476806640625, 0.0242156982421875, -0.0123138427734375, 0.00029850006103515625, -0.00890350341796875, -0.05078125, 0.01788330078125, 0.039031982421875, 0.034271240234375, -0.0229644775390625, 0.04443359375, -0.022796630859375, 0.0673828125, -0.00014400482177734375, 0.0002560615539550781, 0.0312347412109375, -0.0183868408203125, -0.02203369140625, -0.01041412353515625, 0.06256103515625, 0.00151824951171875, 0.0209808349609375, 0.0095062255859375, -0.024444580078125, 0.01280975341796875, 0.0177001953125, -0.07879638671875, -0.016448974609375, 0.0174102783203125, -0.044097900390625, -0.0107879638671875, -0.0162506103515625, -0.03076171875, -0.009002685546875, -0.0236358642578125, 0.04534912109375, -0.04266357421875, -0.00576019287109375, 0.0260162353515625, 0.01220703125, 0.012298583984375, 0.011322021484375, -0.0574951171875, 0.021209716796875, 0.0224456787109375, 0.053802490234375, -0.013824462890625, -0.020599365234375, -0.0260009765625, -0.0096588134765625, -0.006290435791015625, 0.040252685546875, -0.046295166015625, -0.041748046875, 0.0164031982421875, -0.023590087890625, 0.00044274330139160156, -0.038299560546875, 0.05865478515625, -0.04443359375, 0.016876220703125, -0.019775390625, -0.040863037109375, -0.0321044921875, 0.01007080078125, -0.049835205078125, 0.08203125, 0.01204681396484375, -0.075439453125, 0.0186309814453125, -0.0611572265625, -0.044219970703125, -0.016387939453125, -0.002651214599609375, -0.04669189453125, -0.004425048828125, 0.0213775634765625, 0.02880859375, -0.03436279296875, 0.011810302734375, -0.0160064697265625, -0.0207366943359375, 0.025360107421875, -0.018218994140625, 0.0625, 0.0249176025390625, -0.032501220703125, 0.0196685791015625, -0.05950927734375, -0.003498077392578125, -0.01126861572265625, -0.0231170654296875, -0.01297760009765625, 0.0083770751953125, 0.0244598388671875, 0.0372314453125, 0.040130615234375, -0.043121337890625, 0.01360321044921875, -0.043853759765625, 0.029541015625, 0.035491943359375, -0.00940704345703125, 0.019012451171875, -0.04302978515625, 0.0460205078125, 0.018310546875, 0.01194000244140625, 0.0281829833984375, -0.0257110595703125, -0.08648681640625, -0.037567138671875, 0.041168212890625, 0.0238189697265625, -0.048553466796875, 0.05145263671875, -0.01043701171875, -0.064208984375, -0.059295654296875, -0.0022907257080078125, 0.018585205078125, 0.015625, 0.038970947265625, -0.0260467529296875, -0.04339599609375, -0.080078125, -0.034515380859375, 0.0005970001220703125, 0.01141357421875, 0.0184478759765625, 0.046234130859375, -0.046234130859375, 0.0809326171875, -0.038604736328125, -0.015625, -0.03326416015625, 0.03125, 0.0269622802734375, 0.041534423828125, 0.056854248046875, -0.05078125, -0.050537109375, -0.0138092041015625, -0.0189971923828125, -0.00046896934509277344, 0.01824951171875, -0.00969696044921875, 0.0362548828125, 0.0164031982421875, -0.06353759765625, 0.042022705078125, 0.048553466796875, -0.0239105224609375, 0.056732177734375, -0.00800323486328125, 0.003932952880859375, -0.0777587890625, 0.0016775131225585938, 0.0018148422241210938, 0.00449371337890625, -0.049072265625, 0.002368927001953125, -0.01300811767578125, 0.009613037109375, -0.024139404296875, 0.04339599609375, -0.01806640625, 0.0141143798828125, -0.0220947265625, -0.02288818359375, -0.0106048583984375, 0.0469970703125, -0.0090789794921875, 0.06707763671875, 0.04559326171875, -0.0479736328125, -0.00988006591796875, 0.0191497802734375, -0.04998779296875, -0.01043701171875, -0.06573486328125, 0.003978729248046875, -0.0018930435180664062, 0.00984954833984375, -0.07794189453125, 0.00005227327346801758, 0.018157958984375, -0.0291595458984375, 0.013702392578125, 0.00966644287109375, -0.050689697265625, -0.00949859619140625, -0.018829345703125, 0.041961669921875, 0.04669189453125, -0.01611328125, 0.06231689453125, 0.014984130859375, -0.0167999267578125, -0.06121826171875, -0.032684326171875, -0.016815185546875, -0.018341064453125, -0.03857421875, 0.0245361328125, -0.0233917236328125, -0.0115509033203125, -0.010589599609375, -0.004306793212890625, -0.005695343017578125, 0.01009368896484375, 0.034820556640625, 0.0287933349609375, -0.0007777214050292969, 0.01256561279296875, 0.0188751220703125, 0.006641387939453125, -0.00940704345703125, 0.00211334228515625, 0.06756591796875, -0.0198516845703125, -0.01053619384765625, -0.036712646484375, 0.0284423828125, 0.041839599609375, -0.043243408203125, 0.092041015625, 0.05865478515625, -0.0141448974609375, 0.001377105712890625, -0.03729248046875, -0.01042938232421875, -0.038482666015625, 0.036346435546875, -0.033416748046875, -0.0693359375, 0.031463623046875, 0.0108795166015625, 0.01041412353515625, 0.05035400390625, 0.0634765625, 0.003173828125, 0.09442138671875, 0.061492919921875, -0.034088134765625, 0.04052734375, -0.0261993408203125, 0.031219482421875, -0.040985107421875, 0.004962921142578125, -0.0665283203125, -0.025970458984375, -0.055999755859375, -0.01336669921875, 0.021728515625, 0.0118865966796875, -0.054840087890625, 0.0182647705078125, -0.0162200927734375, -0.005615234375, 0.0496826171875, -0.00794219970703125, 0.00640869140625, 0.0107269287109375, -0.003955841064453125, -0.00091552734375, -0.046295166015625, -0.0582275390625, 0.0888671875, 0.01416015625, 0.048431396484375, 0.017181396484375, 0.061065673828125, 0.0250701904296875, 0.021331787109375, -0.05859375, 0.035125732421875, -0.0111083984375, -0.04302978515625, -0.0257568359375, -0.0144500732421875, -0.07415771484375, 0.0127410888671875, -0.0196685791015625, -0.041656494140625, 0.00905609130859375, 0.00011229515075683594, -0.029510498046875, -0.00891876220703125, -0.041961669921875, 0.06427001953125, 0.00487518310546875, 0.007720947265625, -0.037994384765625, -0.0494384765625, -0.002285003662109375, -0.005207061767578125, 0.019195556640625, -0.015106201171875, -0.0015277862548828125, 0.0662841796875, -0.04473876953125, 0.06329345703125, -0.0006742477416992188, 0.0001474618911743164, 0.0268707275390625, 0.010772705078125, 0.03277587890625, -0.007282257080078125, -0.0229339599609375, 0.0309906005859375, 0.01319122314453125, -0.04925537109375, -0.03216552734375, 0.048309326171875, -0.10992431640625, -0.044921875, -0.037139892578125, -0.064453125, -0.0207977294921875, 0.00823974609375, 0.032501220703125, 0.034393310546875, -0.00371551513671875, 0.005939483642578125, 0.0106964111328125, -0.0168304443359375, 0.032806396484375, 0.04144287109375, -0.009918212890625, -0.0012788772583007812, 0.058990478515625, 0.0149078369140625, 0.0160064697265625, 0.018035888671875, 0.00009161233901977539, -0.00557708740234375, -0.053924560546875, -0.0237579345703125, 0.02099609375, -0.044403076171875, -0.01374053955078125, -0.058319091796875, -0.0179443359375, -0.0318603515625, 0.005115509033203125, -0.016754150390625, -0.03167724609375, -0.0364990234375, 0.005767822265625, 0.041168212890625, 0.044647216796875, 0.0141143798828125, 0.039154052734375, -0.061370849609375, 0.00707244873046875, 0.01085662841796875, 0.00506591796875, 0.0004949569702148438, -0.060638427734375, -0.036529541015625, 0.0309600830078125, -0.0270233154296875, -0.055389404296875, 0.04827880859375, 0.0133819580078125, 0.02227783203125, 0.0188446044921875, 0.0063018798828125, 0.03533935546875, -0.032379150390625, 0.07232666015625, -0.00025582313537597656, -0.08319091796875, 0.0258026123046875, -0.023345947265625, 0.0203704833984375, 0.044158935546875, 0.0557861328125, -0.0357666015625, -0.0274200439453125, -0.041412353515625, -0.058380126953125, 0.051544189453125, 0.0312347412109375, 0.006450653076171875, 0.00030994415283203125, 0.023345947265625, 0.0212554931640625, 0.0374755859375, -0.05035400390625, -0.05023193359375, -0.023040771484375, -0.042877197265625, -0.027923583984375, -0.026519775390625, -0.005863189697265625, -0.0364990234375, 0.0718994140625, 0.007793426513671875, 0.01377105712890625, 0.0006270408630371094, -0.0212554931640625, 0.0069732666015625, 0.0231170654296875, 0.02972412109375, 0.0357666015625, -0.0168304443359375, -0.0117950439453125, 0.00884246826171875, -0.039154052734375, 0.0002892017364501953, 0.0251007080078125, -0.0187225341796875, 0.005359649658203125, 0.01983642578125, 0.0706787109375, -0.0123138427734375, -0.061798095703125, 0.031585693359375, 0.004528045654296875, -0.005832672119140625, -0.0174713134765625, 0.004001617431640625, 0.0019664764404296875, 0.011199951171875, 0.01367950439453125, 0.0191192626953125, 0.022857666015625, -0.034271240234375, -0.00852203369140625, 0.0347900390625, -0.037994384765625, -0.0156707763671875, 0.017822265625, -0.0015821456909179688, -0.0279083251953125, 0.03887939453125, -0.019195556640625, -0.04388427734375, 0.0479736328125, 0.0255584716796875, 0.046417236328125, -0.0147857666015625, 0.03131103515625, 0.0185394287109375, 0.0088348388671875, 0.003173828125, 0.0338134765625, 0.0075531005859375, -0.072265625, -0.02984619140625, -0.0706787109375, -0.0168609619140625, 0.0013418197631835938, -0.049224853515625, 0.01776123046875, -0.02642822265625, -0.0167999267578125, 0.0004582405090332031, 0.0146331787109375, -0.0640869140625, 0.027130126953125, -0.0074310302734375, 0.057586669921875, -0.04925537109375, 0.082763671875, 0.06640625, -0.045867919921875, -0.0758056640625, 0.0074920654296875, -0.02972412109375, -0.054718017578125, 0.057861328125, 0.0139007568359375, -0.024658203125, 0.007541656494140625, -0.046478271484375, -0.08203125, 0.04827880859375, 0.0161590576171875, -0.029693603515625, 0.0181732177734375, 0.0174560546875, 0.05133056640625, -0.03472900390625, 0.0311737060546875, 0.05584716796875, 0.0209503173828125, -0.01029205322265625, -0.06878662109375, 0.004352569580078125, -0.0227508544921875, -0.0020294189453125, 0.0187530517578125, -0.04974365234375, 0.09259033203125, -0.005321502685546875, -0.00025725364685058594, 0.0305023193359375, 0.0582275390625, 0.005687713623046875, -0.0182647705078125, 0.038238525390625, 0.038543701171875, 0.03826904296875, 0.00731658935546875, 0.07958984375, -0.04302978515625, 0.033599853515625, 0.08380126953125, -0.01358795166015625, 0.0777587890625, 0.03509521484375, -0.0262298583984375, 0.08172607421875, 0.053619384765625, 0.01361083984375, 0.04425048828125, 0.0184173583984375, -0.022186279296875, -0.028045654296875, -0.01508331298828125, -0.037841796875, 0.043426513671875, 0.0279541015625, -0.01474761962890625, -0.0027370452880859375, 0.005573272705078125, 0.00403594970703125, 0.00005060434341430664, -0.0157928466796875, 0.031280517578125, -0.0008912086486816406, -0.048828125, 0.06842041015625, 0.024383544921875, 0.061614990234375, -0.048004150390625, 0.0174407958984375, -0.002132415771484375, 0.006816864013671875, -0.00777435302734375, -0.0290374755859375, 0.00738525390625, -0.01137542724609375, -0.0171966552734375, 0.01482391357421875, 0.04217529296875, -0.0284423828125, -0.048858642578125, 0.0197601318359375, 0.042327880859375, 0.0019969940185546875, -0.0174560546875, -0.06500244140625, 0.01186370849609375, 0.01018524169921875, -0.03179931640625, 0.0296478271484375, 0.0159149169921875, -0.0035648345947265625, 0.050140380859375, 0.034088134765625, -0.0000864863395690918, -0.004413604736328125, 0.0161590576171875, 0.07281494140625, -0.04962158203125, -0.0300140380859375, -0.0679931640625, 0.034820556640625, 0.006549835205078125, -0.03271484375, 0.05731201171875, 0.03192138671875, 0.050689697265625, -0.01800537109375, 0.06689453125, -0.006855010986328125, 0.046905517578125, -0.0162506103515625, 0.055023193359375, -0.048126220703125, 0.0028934478759765625, -0.0193023681640625, -0.06512451171875, -0.00809478759765625, 0.046905517578125, -0.04034423828125, 0.0198974609375, 0.047515869140625, 0.0733642578125, 0.0131072998046875, 0.00616455078125, 0.00421142578125, 0.032012939453125, 0.01171112060546875, 0.028656005859375, 0.05059814453125, -0.03143310546875, 0.037811279296875, -0.01226806640625, -0.01425933837890625, -0.024139404296875, -0.044769287109375, -0.08642578125, -0.05316162109375, -0.0201263427734375, -0.026275634765625, -0.011016845703125, 0.09112548828125, 0.035247802734375, -0.06951904296875, -0.0352783203125, -0.004390716552734375, -0.00667572021484375, -0.0133819580078125, -0.0127410888671875, 0.04241943359375, -0.03662109375, -0.0526123046875, -0.0074920654296875, -0.00043702125549316406, 0.0116119384765625, -0.0129852294921875, -0.01134490966796875, -0.031463623046875, 0.0254974365234375, 0.04278564453125, 0.006923675537109375, -0.0582275390625, -0.03271484375, -0.01091766357421875, -0.0175628662109375, 0.002300262451171875, 0.043304443359375, -0.060089111328125, 0.004306793212890625, 0.0168609619140625, 0.041961669921875, 0.0423583984375, -0.0237579345703125, 0.040924072265625, -0.039794921875, 0.02972412109375, 0.016815185546875, 0.03472900390625, 0.02154541015625, -0.01107025146484375, 0.01200103759765625, -0.01239776611328125, -0.046966552734375, -0.0447998046875, 0.035369873046875, -0.0728759765625, -0.028533935546875, 0.08795166015625, -0.0119476318359375, 0.002231597900390625, -0.01277923583984375, -0.047637939453125, 0.04022216796875, -0.037841796875, 0.06878662109375, 0.0721435546875, -0.0011510848999023438, 0.024322509765625, -0.0252685546875, 0.0592041015625, 0.06634521484375, -0.042572021484375, -0.0178985595703125, 0.019622802734375, 0.0184478759765625, 0.007129669189453125, 0.0293731689453125, 0.0147552490234375, 0.011199951171875, -0.0243377685546875, 0.02545166015625, 0.01558685302734375, -0.007099151611328125, -0.020263671875, 0.00707244873046875, 0.02435302734375, -0.01397705078125 ] ]
ICBU-NPU/FashionGPT-70B-V1.1
2023-09-22T08:32:59.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:Open-Orca/OpenOrca", "dataset:openchat/openchat_sharegpt4_dataset", "dataset:LDJnr/Puffin", "dataset:ehartford/samantha-data", "dataset:OpenAssistant/oasst1", "dataset:jondurbin/airoboros-gpt4-1.4.1", "arxiv:2306.02707", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
ICBU-NPU
null
null
ICBU-NPU/FashionGPT-70B-V1.1
41
8,454
transformers
2023-09-17T08:53:56
--- language: - en library_name: transformers license: llama2 datasets: - Open-Orca/OpenOrca - openchat/openchat_sharegpt4_dataset - LDJnr/Puffin - ehartford/samantha-data - OpenAssistant/oasst1 - jondurbin/airoboros-gpt4-1.4.1 --- # FashionGPT-V1.1 ### Introduction This is a llama-2-70B model combined with multiple adapters via appropriate methods. <br> ### Dataset Here is the list of datasets used: * Orca-style 40K dataset. This dataset is a filtered subset of [OpenOrca-GPT4](<https://huggingface.co/datasets/Open-Orca/OpenOrca/blob/main/1M-GPT4-Augmented.parquet>) and [airoboros-gpt4-1.4.1](<https://huggingface.co/datasets/jondurbin/airoboros-gpt4-1.4.1>). * [Samantha](<https://huggingface.co/datasets/ehartford/samantha-data>) made by Eric Hartford and cleaned by us, about 6.5K samples. * [oasst1](<https://huggingface.co/datasets/OpenAssistant/oasst1>) cleaned by us, containing about 80K samples. * Misconception data generated using misconception data generator in [airoboros_repo](<https://github.com/jondurbin/airoboros>), about 0.5K samples. * GPT-4 Multi-turn Conversations. This dataset is a filtered mixture of [openchat sharegpt4](https://huggingface.co/datasets/openchat/openchat_sharegpt4_dataset/) and [Puffin](<https://huggingface.co/datasets/LDJnr/Puffin>), containing about 8K samples. <br> ### Training * We train our adapters with [jondurbin's forked QLoRA repo](<https://github.com/jondurbin/qlora>) * We add multi-turn conversational data support from [fastchat](<https://github.com/lm-sys/FastChat/blob/main/fastchat/train/train.py>), with minor modifications. * We use bash shell script similar to [airoboros-70b-gpt4-1.4.1](<https://gist.github.com/jondurbin/87fc040b92a3073125ed516b04bc6e19>) to train our adapters. * We combine multiple adapters to llama-2-70B with more novel strategy than our [v1 model](<https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1>). The details of combining multiple adapters will be unveiled in our upcoming paper. <br> ### Prompt Template ``` ### System: {System} ### User: {User} ### Assistant: {Assistant} ``` <br> ### Evaluation | Metric | Value | |-----------------------|-------| | ARC (25-shot) | 71.76 | | HellaSwag (10-shot) | 88.20 | | MMLU (5-shot) | 70.99 | | TruthfulQA (0-shot) | 65.26 | | Avg. | 74.05 | <br> ### license disclaimer This model is bound by the license & usage restrictions of the original Llama-2 model. And comes with no warranty or gurantees of any kind. <br> ### Limitations & Biases Llama 2 and fine-tuned variants are a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Llama 2 and any fine-tuned varient's potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 2 variants, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available at <https://ai.meta.com/llama/responsible-use-guide/> <br> ### Citiation: * airoboros: <https://github.com/jondurbin/airoboros> * samantha: <https://erichartford.com/meet-samantha> ```bibtex @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ```bibtex @article{dettmers2023qlora, title={QLoRA: Efficient Finetuning of Quantized LLMs}, author={Dettmers, Tim and Pagnoni, Artidoro and Holtzman, Ari and Zettlemoyer, Luke}, journal={arXiv preprint arXiv:2305.14314}, year={2023} } ``` ```bibtex @software{touvron2023llama2, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushkar Mishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing Ellen Tan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu , Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom}, year={2023} } ``` ```bibtex @software{openchat, title = {{OpenChat: Advancing Open-source Language Models with Imperfect Data}}, author = {Wang, Guan and Cheng, Sijie and Yu, Qiying and Liu, Changling}, doi = {10.5281/zenodo.8105775}, url = {https://github.com/imoneoi/openchat}, version = {pre-release}, year = {2023}, month = {7}, } ```
5,485
[ [ -0.0233154296875, -0.0631103515625, 0.020111083984375, 0.0183563232421875, -0.0258941650390625, -0.0011997222900390625, -0.005458831787109375, -0.0452880859375, 0.01441192626953125, 0.01499176025390625, -0.049407958984375, -0.036865234375, -0.035736083984375, -0.01348114013671875, -0.014068603515625, 0.07733154296875, -0.00919342041015625, -0.032501220703125, 0.005496978759765625, -0.0152587890625, -0.03662109375, -0.033233642578125, -0.046783447265625, -0.0167694091796875, 0.0148162841796875, 0.0289764404296875, 0.051055908203125, 0.03619384765625, 0.0270843505859375, 0.0225982666015625, -0.0216827392578125, 0.02618408203125, -0.047882080078125, -0.0137176513671875, 0.017425537109375, -0.042144775390625, -0.063720703125, 0.00371551513671875, 0.028411865234375, 0.0141754150390625, -0.034515380859375, 0.017333984375, 0.0110931396484375, 0.032928466796875, -0.0278167724609375, 0.01273345947265625, -0.044036865234375, -0.007137298583984375, -0.02593994140625, -0.0050506591796875, -0.0022487640380859375, -0.0218963623046875, -0.0021114349365234375, -0.059356689453125, -0.00872039794921875, -0.004383087158203125, 0.09112548828125, 0.01454925537109375, -0.03814697265625, -0.01256561279296875, -0.043365478515625, 0.05364990234375, -0.079833984375, 0.0229644775390625, 0.01025390625, 0.026153564453125, -0.03326416015625, -0.06219482421875, -0.060089111328125, 0.003376007080078125, -0.0033626556396484375, 0.017425537109375, -0.035186767578125, -0.00026345252990722656, 0.006866455078125, 0.0251312255859375, -0.041900634765625, 0.0172882080078125, -0.037689208984375, -0.0176239013671875, 0.039825439453125, 0.0088653564453125, 0.00830841064453125, -0.00457000732421875, -0.043670654296875, -0.006603240966796875, -0.060302734375, 0.0184326171875, 0.0325927734375, 0.00756072998046875, -0.04827880859375, 0.0361328125, -0.0210418701171875, 0.04254150390625, 0.00249481201171875, -0.0256500244140625, 0.0401611328125, -0.05169677734375, -0.0185394287109375, -0.01715087890625, 0.072998046875, 0.033477783203125, 0.007740020751953125, 0.01849365234375, -0.004009246826171875, -0.005367279052734375, -0.006931304931640625, -0.0728759765625, -0.00991058349609375, 0.02813720703125, -0.0286407470703125, -0.031524658203125, -0.0111541748046875, -0.060791015625, -0.01213836669921875, -0.00803375244140625, 0.023040771484375, -0.01739501953125, -0.041046142578125, 0.0209808349609375, 0.0104827880859375, 0.03680419921875, 0.020111083984375, -0.055694580078125, 0.022216796875, 0.036468505859375, 0.07635498046875, 0.004001617431640625, -0.017730712890625, -0.004486083984375, -0.005123138427734375, -0.027679443359375, 0.0635986328125, -0.0178985595703125, -0.02117919921875, -0.020843505859375, 0.0027294158935546875, 0.003787994384765625, -0.031982421875, 0.049346923828125, -0.02783203125, 0.014404296875, -0.0218048095703125, -0.024383544921875, -0.02935791015625, 0.0106964111328125, -0.02386474609375, 0.08013916015625, 0.007049560546875, -0.045745849609375, 0.01203155517578125, -0.04931640625, -0.00827789306640625, -0.0205535888671875, -0.0007543563842773438, -0.045684814453125, -0.026641845703125, 0.0278472900390625, 0.028900146484375, -0.0312347412109375, 0.019317626953125, -0.0404052734375, -0.0279541015625, 0.0154266357421875, -0.0302276611328125, 0.06939697265625, 0.0229034423828125, -0.0465087890625, 0.01532745361328125, -0.054534912109375, -0.002376556396484375, 0.0323486328125, -0.03155517578125, -0.01238250732421875, 0.00304412841796875, -0.0108642578125, 0.0196990966796875, 0.0211181640625, -0.029815673828125, 0.024261474609375, -0.02593994140625, 0.05999755859375, 0.0655517578125, -0.0009407997131347656, 0.0168304443359375, -0.036590576171875, 0.031646728515625, 0.015106201171875, 0.04095458984375, -0.0065155029296875, -0.0523681640625, -0.0693359375, -0.0225677490234375, 0.00015652179718017578, 0.049530029296875, -0.0469970703125, 0.0396728515625, -0.01250457763671875, -0.060455322265625, -0.03436279296875, 0.004001617431640625, 0.029205322265625, 0.032440185546875, 0.027984619140625, -0.02447509765625, -0.03997802734375, -0.054595947265625, -0.00876617431640625, -0.0240631103515625, 0.0012102127075195312, 0.043792724609375, 0.0273590087890625, -0.036590576171875, 0.069580078125, -0.04168701171875, -0.018585205078125, -0.0033550262451171875, -0.0181884765625, 0.02618408203125, 0.038330078125, 0.061187744140625, -0.04302978515625, -0.0223541259765625, -0.012176513671875, -0.05120849609375, -0.01061248779296875, -0.0032558441162109375, -0.0322265625, 0.017120361328125, 0.0187530517578125, -0.05511474609375, 0.035919189453125, 0.05511474609375, -0.037353515625, 0.041778564453125, -0.00213623046875, 0.00229644775390625, -0.08721923828125, -0.0005888938903808594, 0.01100921630859375, -0.006259918212890625, -0.055908203125, -0.0116729736328125, -0.006870269775390625, 0.0131378173828125, -0.03857421875, 0.05645751953125, -0.029937744140625, 0.000005662441253662109, -0.0058135986328125, 0.0201263427734375, -0.00458526611328125, 0.051788330078125, -0.031890869140625, 0.061553955078125, 0.042266845703125, -0.034332275390625, 0.02227783203125, 0.0244140625, -0.0223846435546875, 0.03411865234375, -0.0712890625, 0.027130126953125, 0.0131988525390625, 0.03717041015625, -0.09637451171875, -0.01380157470703125, 0.0487060546875, -0.03277587890625, 0.0082244873046875, -0.01189422607421875, -0.026092529296875, -0.033660888671875, -0.037628173828125, 0.0321044921875, 0.041290283203125, -0.047943115234375, 0.026275634765625, 0.0335693359375, -0.0034885406494140625, -0.037841796875, -0.049896240234375, -0.00856781005859375, -0.036468505859375, -0.05645751953125, 0.0209503173828125, -0.02655029296875, -0.00426483154296875, -0.013031005859375, -0.0145416259765625, -0.01242828369140625, 0.0113372802734375, 0.01806640625, 0.033538818359375, -0.01483154296875, -0.0101165771484375, 0.0124664306640625, -0.0006284713745117188, -0.01403045654296875, -0.0203094482421875, 0.047271728515625, -0.02496337890625, -0.018951416015625, -0.046661376953125, -0.0006895065307617188, 0.03839111328125, -0.01140594482421875, 0.0489501953125, 0.05328369140625, -0.0203857421875, 0.0199127197265625, -0.04998779296875, -0.01250457763671875, -0.043365478515625, 0.0291900634765625, -0.0233917236328125, -0.07257080078125, 0.06060791015625, 0.01061248779296875, 0.024200439453125, 0.040802001953125, 0.0426025390625, 0.01483917236328125, 0.051300048828125, 0.03515625, -0.005527496337890625, 0.04754638671875, -0.023681640625, 0.01438140869140625, -0.076904296875, -0.0292510986328125, -0.02886962890625, -0.0219879150390625, -0.061004638671875, -0.043609619140625, 0.0328369140625, 0.0225982666015625, -0.040252685546875, 0.0234527587890625, -0.04412841796875, 0.0128326416015625, 0.041656494140625, 0.019989013671875, 0.0133209228515625, 0.0023517608642578125, 0.00699615478515625, 0.00762176513671875, -0.0513916015625, -0.03607177734375, 0.09765625, 0.032012939453125, 0.0552978515625, 0.0235137939453125, 0.046630859375, -0.018280029296875, 0.01140594482421875, -0.051361083984375, 0.039520263671875, 0.00433349609375, -0.048370361328125, -0.0184783935546875, -0.0307769775390625, -0.0814208984375, 0.023681640625, 0.00051116943359375, -0.053802490234375, 0.034881591796875, 0.01544952392578125, -0.031646728515625, 0.0167694091796875, -0.055450439453125, 0.04949951171875, -0.0177764892578125, -0.014739990234375, -0.005886077880859375, -0.055267333984375, 0.047637939453125, -0.0012369155883789062, 0.022216796875, -0.0184478759765625, -0.01445770263671875, 0.07049560546875, -0.041595458984375, 0.07818603515625, -0.01390838623046875, -0.015106201171875, 0.05010986328125, -0.00994110107421875, 0.04779052734375, 0.01067352294921875, 0.000053763389587402344, 0.046844482421875, -0.0029315948486328125, -0.0254974365234375, -0.0201873779296875, 0.049102783203125, -0.079833984375, -0.042449951171875, -0.0197296142578125, -0.022216796875, 0.0088653564453125, 0.019927978515625, 0.014923095703125, 0.021697998046875, 0.0161895751953125, 0.00547027587890625, 0.031280517578125, -0.0251312255859375, 0.0268096923828125, 0.045623779296875, -0.00498199462890625, -0.045684814453125, 0.059539794921875, 0.021209716796875, 0.035675048828125, 0.0085906982421875, 0.0189666748046875, -0.01275634765625, -0.035552978515625, -0.0220947265625, 0.03387451171875, -0.047210693359375, -0.02642822265625, -0.034576416015625, -0.0172882080078125, -0.0299530029296875, 0.005847930908203125, -0.036712646484375, -0.034027099609375, -0.054168701171875, -0.00962066650390625, 0.047882080078125, 0.03826904296875, -0.0093841552734375, 0.03240966796875, -0.036773681640625, 0.017333984375, 0.02972412109375, 0.02294921875, 0.0015048980712890625, -0.057037353515625, -0.00258636474609375, 0.0230560302734375, -0.047149658203125, -0.0484619140625, 0.023193359375, 0.023162841796875, 0.0489501953125, 0.0251922607421875, 0.003376007080078125, 0.07958984375, -0.02655029296875, 0.074462890625, 0.0160980224609375, -0.06390380859375, 0.0489501953125, -0.038970947265625, 0.00691986083984375, 0.018402099609375, 0.0230255126953125, -0.035797119140625, -0.0168304443359375, -0.05584716796875, -0.07904052734375, 0.059112548828125, 0.0251007080078125, 0.0096588134765625, 0.007495880126953125, 0.049346923828125, 0.0034084320068359375, 0.00991058349609375, -0.059173583984375, -0.039642333984375, -0.032440185546875, -0.0037517547607421875, -0.00038623809814453125, -0.033172607421875, -0.01561737060546875, -0.017730712890625, 0.0455322265625, -0.0100250244140625, 0.051910400390625, 0.017181396484375, 0.0220794677734375, -0.00940704345703125, 0.01026153564453125, 0.074462890625, 0.04461669921875, -0.01134490966796875, -0.0103759765625, 0.0151519775390625, -0.04534912109375, 0.0013761520385742188, 0.012664794921875, 0.002056121826171875, -0.01312255859375, 0.025848388671875, 0.08544921875, 0.0127105712890625, -0.030853271484375, 0.037750244140625, -0.00641632080078125, -0.022216796875, -0.021209716796875, 0.01421356201171875, 0.00606536865234375, 0.0439453125, 0.02947998046875, 0.0100250244140625, -0.0011310577392578125, -0.03826904296875, 0.0020961761474609375, 0.0258941650390625, -0.0016918182373046875, -0.040924072265625, 0.0635986328125, 0.0158538818359375, -0.01390838623046875, 0.0355224609375, -0.00824737548828125, -0.034423828125, 0.05120849609375, 0.039031982421875, 0.051025390625, -0.028778076171875, -0.0016412734985351562, 0.035858154296875, 0.0235443115234375, -0.01806640625, 0.0287017822265625, 0.0072784423828125, -0.054443359375, -0.01302337646484375, -0.0309295654296875, -0.03094482421875, 0.016448974609375, -0.03179931640625, 0.032806396484375, -0.019073486328125, -0.0149078369140625, -0.024627685546875, 0.005245208740234375, -0.04742431640625, -0.0087432861328125, -0.007450103759765625, 0.059173583984375, -0.050018310546875, 0.059112548828125, 0.0341796875, -0.0181427001953125, -0.08355712890625, -0.0037746429443359375, 0.0158233642578125, -0.073974609375, 0.0257720947265625, 0.007244110107421875, 0.0034122467041015625, 0.01270294189453125, -0.047882080078125, -0.08673095703125, 0.11962890625, 0.03125, -0.050537109375, -0.0007748603820800781, -0.00417327880859375, 0.028350830078125, -0.0009965896606445312, 0.03985595703125, 0.04644775390625, 0.0330810546875, 0.0185394287109375, -0.0948486328125, 0.0218048095703125, -0.0216217041015625, 0.0014410018920898438, -0.0027618408203125, -0.09210205078125, 0.0802001953125, -0.0255126953125, -0.0155487060546875, 0.0141143798828125, 0.044189453125, 0.046142578125, 0.0185546875, 0.0303955078125, 0.041839599609375, 0.061370849609375, -0.0141754150390625, 0.070556640625, -0.013916015625, 0.0439453125, 0.0633544921875, -0.00031304359436035156, 0.05682373046875, 0.0240936279296875, -0.0430908203125, 0.039886474609375, 0.07037353515625, 0.01190185546875, 0.04803466796875, 0.0034961700439453125, -0.004383087158203125, 0.0026874542236328125, 0.01434326171875, -0.047698974609375, 0.0301971435546875, 0.031951904296875, -0.001125335693359375, -0.0045013427734375, -0.007343292236328125, 0.02801513671875, -0.037841796875, 0.000675201416015625, 0.050201416015625, 0.01378631591796875, -0.031524658203125, 0.087890625, -0.00640106201171875, 0.079345703125, -0.051605224609375, -0.00275421142578125, -0.0372314453125, 0.005908966064453125, -0.02557373046875, -0.051300048828125, 0.0038242340087890625, -0.008544921875, 0.0150604248046875, 0.0106964111328125, 0.0496826171875, -0.017822265625, -0.0228729248046875, 0.0189208984375, 0.0174713134765625, 0.0205078125, 0.014251708984375, -0.0560302734375, 0.019744873046875, 0.0102996826171875, -0.055694580078125, 0.01995849609375, 0.039794921875, -0.0012521743774414062, 0.059967041015625, 0.0594482421875, -0.006481170654296875, 0.0191802978515625, -0.014312744140625, 0.09368896484375, -0.032257080078125, -0.0345458984375, -0.06866455078125, 0.046478271484375, 0.005710601806640625, -0.036956787109375, 0.051544189453125, 0.030487060546875, 0.059814453125, 0.0191802978515625, 0.0523681640625, -0.01800537109375, 0.026611328125, -0.0355224609375, 0.04437255859375, -0.0467529296875, 0.040069580078125, -0.019744873046875, -0.0560302734375, -0.01151275634765625, 0.06243896484375, -0.0120849609375, 0.01293182373046875, 0.035980224609375, 0.0706787109375, -0.00020301342010498047, -0.0182342529296875, -0.00035834312438964844, 0.033294677734375, 0.06170654296875, 0.055816650390625, 0.042083740234375, -0.054412841796875, 0.0596923828125, -0.0244293212890625, -0.030670166015625, -0.032135009765625, -0.061309814453125, -0.06097412109375, -0.03851318359375, -0.0260009765625, -0.027984619140625, 0.00412750244140625, 0.058197021484375, 0.058380126953125, -0.041290283203125, -0.0218963623046875, -0.0129241943359375, 0.007556915283203125, -0.0215911865234375, -0.016204833984375, 0.022216796875, 0.0015888214111328125, -0.048095703125, 0.025421142578125, 0.001125335693359375, 0.03411865234375, -0.012481689453125, -0.0195465087890625, -0.0125885009765625, 0.005710601806640625, 0.039306640625, 0.0278472900390625, -0.061676025390625, -0.02606201171875, -0.0068206787109375, -0.0011320114135742188, 0.0202789306640625, 0.0156402587890625, -0.0523681640625, 0.017486572265625, 0.02239990234375, 0.0132598876953125, 0.04193115234375, -0.002025604248046875, 0.0096588134765625, -0.042236328125, 0.031158447265625, -0.00885009765625, 0.0170745849609375, 0.01103973388671875, -0.02838134765625, 0.05084228515625, 0.0239410400390625, -0.052520751953125, -0.0703125, 0.0013446807861328125, -0.09521484375, -0.0054931640625, 0.10955810546875, -0.0235137939453125, -0.003814697265625, 0.007534027099609375, -0.0281219482421875, 0.0307159423828125, -0.045684814453125, 0.05963134765625, 0.03509521484375, -0.0103302001953125, -0.016265869140625, -0.058685302734375, 0.024017333984375, 0.00212860107421875, -0.076904296875, -0.0037670135498046875, 0.02276611328125, 0.040374755859375, 0.006816864013671875, 0.06292724609375, -0.002593994140625, 0.0085601806640625, -0.0161285400390625, 0.002521514892578125, -0.016876220703125, 0.002475738525390625, -0.0191650390625, -0.015716552734375, -0.0019168853759765625, -0.0204925537109375 ] ]
upstage/llama-30b-instruct-2048
2023-08-03T22:02:40.000Z
[ "transformers", "pytorch", "llama", "text-generation", "upstage", "instruct", "instruction", "en", "dataset:sciq", "dataset:metaeval/ScienceQA_text_only", "dataset:GAIR/lima", "dataset:Open-Orca/OpenOrca", "dataset:openbookqa", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
upstage
null
null
upstage/llama-30b-instruct-2048
103
8,453
transformers
2023-07-13T12:06:18
--- datasets: - sciq - metaeval/ScienceQA_text_only - GAIR/lima - Open-Orca/OpenOrca - openbookqa language: - en tags: - upstage - llama - instruct - instruction pipeline_tag: text-generation --- # LLaMa-30b-instruct-2048 model card ## Model Details * **Developed by**: [Upstage](https://en.upstage.ai) * **Backbone Model**: [LLaMA](https://github.com/facebookresearch/llama/tree/llama_v1) * **Variations**: It has different model parameter sizes and sequence lengths: [30B/1024](https://huggingface.co/upstage/llama-30b-instruct), [30B/2048](https://huggingface.co/upstage/llama-30b-instruct-2048), [65B/1024](https://huggingface.co/upstage/llama-65b-instruct) * **Language(s)**: English * **Library**: [HuggingFace Transformers](https://github.com/huggingface/transformers) * **License**: This model is under a **Non-commercial** Bespoke License and governed by the Meta license. You should only use this repository if you have been granted access to the model by filling out [this form](https://docs.google.com/forms/d/e/1FAIpQLSfqNECQnMkycAp2jP4Z9TFX0cGR4uf7b_fBxjY_OjhJILlKGA/viewform), but have either lost your copy of the weights or encountered issues converting them to the Transformers format * **Where to send comments**: Instructions on how to provide feedback or comments on a model can be found by opening an issue in the [Hugging Face community's model repository](https://huggingface.co/upstage/llama-30b-instruct-2048/discussions) * **Contact**: For questions and comments about the model, please email [contact@upstage.ai](mailto:contact@upstage.ai) ## Dataset Details ### Used Datasets - [openbookqa](https://huggingface.co/datasets/openbookqa) - [sciq](https://huggingface.co/datasets/sciq) - [Open-Orca/OpenOrca](https://huggingface.co/datasets/Open-Orca/OpenOrca) - [metaeval/ScienceQA_text_only](https://huggingface.co/datasets/metaeval/ScienceQA_text_only) - [GAIR/lima](https://huggingface.co/datasets/GAIR/lima) - No other data was used except for the dataset mentioned above ### Prompt Template ``` ### System: {System} ### User: {User} ### Assistant: {Assistant} ``` ## Usage - Tested on A100 80GB - Our model can handle up to 10k+ input tokens, thanks to the `rope_scaling` option ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, TextStreamer tokenizer = AutoTokenizer.from_pretrained("upstage/llama-30b-instruct-2048") model = AutoModelForCausalLM.from_pretrained( "upstage/llama-30b-instruct-2048", device_map="auto", torch_dtype=torch.float16, load_in_8bit=True, rope_scaling={"type": "dynamic", "factor": 2} # allows handling of longer inputs ) prompt = "### User:\nThomas is healthy, but he has to go to the hospital. What could be the reasons?\n\n### Assistant:\n" inputs = tokenizer(prompt, return_tensors="pt").to(model.device) del inputs["token_type_ids"] streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True) output = model.generate(**inputs, streamer=streamer, use_cache=True, max_new_tokens=float('inf')) output_text = tokenizer.decode(output[0], skip_special_tokens=True) ``` ## Hardware and Software * **Hardware**: We utilized an A100x8 * 1 for training our model * **Training Factors**: We fine-tuned this model using a combination of the [DeepSpeed library](https://github.com/microsoft/DeepSpeed) and the [HuggingFace Trainer](https://huggingface.co/docs/transformers/main_classes/trainer) / [HuggingFace Accelerate](https://huggingface.co/docs/accelerate/index) ## Evaluation Results ### Overview - We conducted a performance evaluation based on the tasks being evaluated on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). We evaluated our model on four benchmark datasets, which include `ARC-Challenge`, `HellaSwag`, `MMLU`, and `TruthfulQA` We used the [lm-evaluation-harness repository](https://github.com/EleutherAI/lm-evaluation-harness), specifically commit [b281b0921b636bc36ad05c0b0b0763bd6dd43463](https://github.com/EleutherAI/lm-evaluation-harness/tree/b281b0921b636bc36ad05c0b0b0763bd6dd43463) - We used [MT-bench](https://github.com/lm-sys/FastChat/tree/main/fastchat/llm_judge), a set of challenging multi-turn open-ended questions, to evaluate the models ### Main Results | Model | H4(Avg) | ARC | HellaSwag | MMLU | TruthfulQA | | MT_Bench | |--------------------------------------------------------------------|----------|----------|----------|------|----------|-|-------------| | **[Llama-2-70b-instruct-v2](https://huggingface.co/upstage/Llama-2-70b-instruct-v2)**(Ours, Open LLM Leaderboard) | **73** | **71.1** | **87.9** | **70.6** | **62.2** | | **7.44063** | | [Llama-2-70b-instruct](https://huggingface.co/upstage/Llama-2-70b-instruct) (Ours, Open LLM Leaderboard) | 72.3 | 70.9 | 87.5 | 69.8 | 61 | | 7.24375 | | [llama-65b-instruct](https://huggingface.co/upstage/llama-65b-instruct) (Ours, Open LLM Leaderboard) | 69.4 | 67.6 | 86.5 | 64.9 | 58.8 | | | | Llama-2-70b-hf | 67.3 | 67.3 | 87.3 | 69.8 | 44.9 | | | | [llama-30b-instruct-2048](https://huggingface.co/upstage/llama-30b-instruct-2048) (***Ours***, ***Open LLM Leaderboard***) | 67.0 | 64.9 | 84.9 | 61.9 | 56.3 | | | | [llama-30b-instruct](https://huggingface.co/upstage/llama-30b-instruct) (Ours, Open LLM Leaderboard) | 65.2 | 62.5 | 86.2 | 59.4 | 52.8 | | | | llama-65b | 64.2 | 63.5 | 86.1 | 63.9 | 43.4 | | | | falcon-40b-instruct | 63.4 | 61.6 | 84.3 | 55.4 | 52.5 | | | ### Scripts for H4 Score Reproduction - Prepare evaluation environments: ``` # clone the repository git clone https://github.com/EleutherAI/lm-evaluation-harness.git # check out the specific commit git checkout b281b0921b636bc36ad05c0b0b0763bd6dd43463 # change to the repository directory cd lm-evaluation-harness ``` ## Ethical Issues ### Ethical Considerations - There were no ethical issues involved, as we did not include the benchmark test set or the training set in the model's training process ## Contact Us ### Why Upstage LLM? - [Upstage](https://en.upstage.ai)'s LLM research has yielded remarkable results. As of August 1st, our 70B model has reached the top spot in openLLM rankings, marking itself as the current leading performer globally. Recognizing the immense potential in implementing private LLM to actual businesses, we invite you to easily apply private LLM and fine-tune it with your own data. For a seamless and tailored solution, please do not hesitate to reach out to us. ► [click here to contact](https://www.upstage.ai/private-llm?utm_source=huggingface&utm_medium=link&utm_campaign=privatellm)
6,598
[ [ -0.0279388427734375, -0.03900146484375, 0.02203369140625, 0.034423828125, -0.033294677734375, 0.0034427642822265625, -0.00614166259765625, -0.0391845703125, 0.0281524658203125, 0.01290130615234375, -0.051055908203125, -0.0460205078125, -0.052093505859375, 0.00568389892578125, -0.0230560302734375, 0.08160400390625, -0.0186920166015625, -0.0102996826171875, -0.0109710693359375, -0.027587890625, -0.0197601318359375, -0.037567138671875, -0.044708251953125, -0.04119873046875, 0.0208282470703125, 0.02001953125, 0.047271728515625, 0.039093017578125, 0.040618896484375, 0.024871826171875, -0.033660888671875, 0.0159149169921875, -0.037139892578125, -0.0189056396484375, 0.017547607421875, -0.032562255859375, -0.06756591796875, -0.0010509490966796875, 0.048126220703125, 0.020660400390625, -0.033233642578125, 0.042724609375, 0.0080718994140625, 0.052276611328125, -0.019561767578125, 0.015380859375, -0.030517578125, 0.00792694091796875, -0.019805908203125, 0.00792694091796875, -0.0009093284606933594, -0.02850341796875, -0.008270263671875, -0.035614013671875, -0.00948333740234375, 0.000164031982421875, 0.0888671875, 0.0235443115234375, -0.0101776123046875, -0.0116424560546875, -0.025909423828125, 0.043975830078125, -0.06207275390625, 0.0208282470703125, 0.019927978515625, 0.0084991455078125, -0.005847930908203125, -0.05511474609375, -0.040313720703125, -0.0168304443359375, -0.003482818603515625, 0.0162811279296875, -0.02886962890625, -0.01354217529296875, 0.0204925537109375, 0.045013427734375, -0.02667236328125, 0.0261993408203125, -0.02667236328125, -0.00870513916015625, 0.07135009765625, 0.0268096923828125, 0.01274871826171875, -0.01558685302734375, -0.04290771484375, -0.025054931640625, -0.0518798828125, 0.03668212890625, 0.0244598388671875, 0.00548553466796875, -0.047607421875, 0.05218505859375, -0.0157928466796875, 0.03216552734375, 0.0234222412109375, -0.0162200927734375, 0.041229248046875, -0.0316162109375, -0.034271240234375, -0.01013946533203125, 0.06927490234375, 0.04217529296875, 0.00534820556640625, 0.0178680419921875, -0.017578125, -0.0007634162902832031, -0.00731658935546875, -0.07598876953125, 0.0003376007080078125, 0.022247314453125, -0.036041259765625, -0.0306396484375, -0.00997161865234375, -0.05572509765625, -0.0265960693359375, -0.0012989044189453125, 0.0265655517578125, -0.0174560546875, -0.0194854736328125, 0.0157318115234375, 0.0107879638671875, 0.0276641845703125, 0.030609130859375, -0.0439453125, 0.018646240234375, 0.0242767333984375, 0.063720703125, -0.00592041015625, -0.0199737548828125, -0.0082244873046875, -0.0127105712890625, -0.006320953369140625, 0.04266357421875, -0.01430511474609375, -0.035369873046875, -0.022186279296875, 0.00882720947265625, -0.0240020751953125, -0.040374755859375, 0.054229736328125, -0.021209716796875, 0.012969970703125, -0.020599365234375, -0.037200927734375, -0.01763916015625, 0.022186279296875, -0.032470703125, 0.10101318359375, 0.01371002197265625, -0.05548095703125, 0.0107574462890625, -0.04669189453125, -0.003108978271484375, -0.01313018798828125, -0.004467010498046875, -0.061614990234375, -0.006191253662109375, 0.030517578125, 0.048980712890625, -0.03302001953125, 0.014495849609375, -0.023590087890625, -0.0301513671875, 0.01363372802734375, -0.00856781005859375, 0.06744384765625, 0.004276275634765625, -0.034088134765625, 0.023345947265625, -0.06915283203125, -0.00392913818359375, 0.05224609375, -0.0295867919921875, 0.006580352783203125, -0.0228424072265625, -0.00922393798828125, 0.014190673828125, 0.0229034423828125, -0.0457763671875, 0.03057861328125, -0.02423095703125, 0.02972412109375, 0.071044921875, -0.00787353515625, 0.0188140869140625, -0.044158935546875, 0.037841796875, 0.003383636474609375, 0.0289459228515625, 0.01125335693359375, -0.052520751953125, -0.07098388671875, -0.031524658203125, 0.01117706298828125, 0.03857421875, -0.0176239013671875, 0.0386962890625, -0.01422882080078125, -0.0616455078125, -0.06298828125, 0.01384735107421875, 0.037139892578125, 0.0413818359375, 0.0310211181640625, -0.042724609375, -0.03778076171875, -0.061614990234375, 0.006542205810546875, -0.0109100341796875, -0.0025463104248046875, 0.036407470703125, 0.05975341796875, -0.033233642578125, 0.043670654296875, -0.037200927734375, -0.03125, -0.0182647705078125, -0.01306915283203125, 0.0462646484375, 0.035797119140625, 0.04901123046875, -0.03912353515625, -0.0265960693359375, -0.01253509521484375, -0.06494140625, -0.0013599395751953125, 0.003650665283203125, -0.015899658203125, 0.021331787109375, 0.005496978759765625, -0.0677490234375, 0.050262451171875, 0.044189453125, -0.033782958984375, 0.049560546875, -0.01216888427734375, 0.005756378173828125, -0.07696533203125, 0.011627197265625, -0.004909515380859375, -0.0003693103790283203, -0.0276031494140625, 0.01134490966796875, -0.00027489662170410156, 0.006366729736328125, -0.039642333984375, 0.053985595703125, -0.030303955078125, -0.00835418701171875, 0.004138946533203125, 0.006832122802734375, 0.004940032958984375, 0.053497314453125, -0.01230621337890625, 0.04833984375, 0.036529541015625, -0.032440185546875, 0.0257720947265625, 0.036529541015625, -0.0286102294921875, 0.03887939453125, -0.062042236328125, 0.017578125, 0.00800323486328125, 0.0273895263671875, -0.07196044921875, -0.02587890625, 0.03277587890625, -0.03143310546875, 0.0301055908203125, 0.006824493408203125, -0.03985595703125, -0.051361083984375, -0.044097900390625, 0.02264404296875, 0.051055908203125, -0.050384521484375, 0.02880859375, 0.0250396728515625, 0.013824462890625, -0.04656982421875, -0.04864501953125, -0.0192413330078125, -0.0304718017578125, -0.062225341796875, 0.03302001953125, -0.0233917236328125, -0.00952911376953125, -0.007568359375, -0.01678466796875, 0.00891876220703125, 0.0102386474609375, 0.033721923828125, 0.03106689453125, -0.00572967529296875, -0.021240234375, -0.00739288330078125, 0.0019121170043945312, 0.0013551712036132812, 0.0139312744140625, 0.0413818359375, -0.028564453125, -0.023956298828125, -0.054412841796875, -0.00988006591796875, 0.0430908203125, -0.00841522216796875, 0.05731201171875, 0.044921875, -0.0232391357421875, 0.003749847412109375, -0.0423583984375, -0.0036029815673828125, -0.034759521484375, 0.01540374755859375, -0.0268707275390625, -0.06451416015625, 0.061279296875, 0.00846099853515625, 0.00885009765625, 0.04376220703125, 0.06341552734375, -0.00464630126953125, 0.06646728515625, 0.039947509765625, -0.011260986328125, 0.0213470458984375, -0.045379638671875, -0.0120697021484375, -0.0855712890625, -0.0369873046875, -0.02545166015625, -0.0260009765625, -0.046661376953125, -0.039093017578125, 0.03179931640625, 0.01885986328125, -0.04278564453125, 0.03460693359375, -0.051361083984375, 0.006458282470703125, 0.026153564453125, 0.0265350341796875, 0.0191802978515625, -0.002651214599609375, -0.03009033203125, 0.001987457275390625, -0.04376220703125, -0.02630615234375, 0.08746337890625, 0.046905517578125, 0.049835205078125, 0.0033416748046875, 0.057861328125, 0.0066680908203125, 0.042633056640625, -0.032989501953125, 0.050567626953125, 0.015350341796875, -0.042724609375, -0.00762939453125, -0.0233306884765625, -0.06512451171875, 0.027587890625, -0.0091094970703125, -0.072998046875, 0.008697509765625, 0.0005698204040527344, -0.03277587890625, 0.032867431640625, -0.031585693359375, 0.054779052734375, -0.025848388671875, -0.034423828125, 0.0011777877807617188, -0.051361083984375, 0.0450439453125, -0.004180908203125, 0.013824462890625, -0.02313232421875, -0.004673004150390625, 0.06927490234375, -0.05120849609375, 0.06646728515625, -0.01226043701171875, -0.00737762451171875, 0.0294189453125, -0.0057830810546875, 0.048858642578125, 0.0016613006591796875, -0.023681640625, 0.036285400390625, -0.01268768310546875, -0.020599365234375, -0.0245513916015625, 0.049407958984375, -0.078857421875, -0.03961181640625, -0.03704833984375, -0.028717041015625, -0.0019741058349609375, 0.00031447410583496094, 0.021514892578125, 0.0113525390625, 0.00443267822265625, 0.009613037109375, 0.03179931640625, -0.02838134765625, 0.0390625, 0.0280609130859375, -0.0272979736328125, -0.040557861328125, 0.049957275390625, 0.00310516357421875, 0.01247406005859375, 0.007537841796875, 0.009796142578125, -0.034210205078125, -0.03741455078125, -0.056304931640625, 0.04071044921875, -0.0423583984375, -0.032379150390625, -0.040313720703125, -0.023284912109375, -0.017364501953125, -0.0027008056640625, -0.03839111328125, -0.023468017578125, -0.03704833984375, -0.021331787109375, 0.040557861328125, 0.0528564453125, -0.00952911376953125, 0.026519775390625, -0.04449462890625, 0.01227569580078125, 0.00913238525390625, 0.0276641845703125, 0.0104522705078125, -0.07177734375, -0.006992340087890625, -0.0012054443359375, -0.039642333984375, -0.0625, 0.03814697265625, 0.004978179931640625, 0.036102294921875, 0.0101470947265625, -0.01340484619140625, 0.073974609375, -0.0171966552734375, 0.0595703125, 0.0233154296875, -0.056610107421875, 0.034820556640625, -0.0195159912109375, 0.0146484375, 0.033447265625, 0.039886474609375, -0.0171966552734375, -0.0145721435546875, -0.053924560546875, -0.07037353515625, 0.0533447265625, 0.035247802734375, -0.00390625, 0.026519775390625, 0.034088134765625, -0.00725555419921875, 0.0225982666015625, -0.07379150390625, -0.037353515625, -0.0009107589721679688, -0.00547027587890625, -0.003398895263671875, -0.02099609375, -0.01340484619140625, -0.04864501953125, 0.05133056640625, 0.0020351409912109375, 0.0311431884765625, 0.0242156982421875, -0.00030541419982910156, -0.0234222412109375, -0.001392364501953125, 0.0430908203125, 0.042083740234375, -0.0277252197265625, -0.033416748046875, 0.0299072265625, -0.034515380859375, 0.008575439453125, 0.02996826171875, -0.01345062255859375, -0.019287109375, 0.0280914306640625, 0.06768798828125, 0.030609130859375, -0.03497314453125, 0.03887939453125, -0.01102447509765625, -0.01413726806640625, -0.0245513916015625, -0.002895355224609375, 0.0257110595703125, 0.031524658203125, 0.0191802978515625, -0.005279541015625, -0.0149383544921875, -0.030975341796875, 0.00965118408203125, 0.0316162109375, -0.0162353515625, -0.0306396484375, 0.07220458984375, 0.01181793212890625, -0.018280029296875, 0.045989990234375, -0.0074310302734375, -0.035858154296875, 0.0596923828125, 0.0230712890625, 0.053375244140625, -0.022735595703125, 0.0025634765625, 0.039215087890625, 0.022705078125, -0.0006837844848632812, 0.0304412841796875, -0.00399017333984375, -0.042633056640625, -0.01502227783203125, -0.0819091796875, -0.0234832763671875, 0.0166168212890625, -0.0482177734375, 0.025665283203125, -0.0282440185546875, -0.016326904296875, -0.0120086669921875, 0.0266265869140625, -0.064697265625, 0.01194000244140625, 0.0201263427734375, 0.0797119140625, -0.04498291015625, 0.056365966796875, 0.04351806640625, -0.047760009765625, -0.07598876953125, -0.02642822265625, 0.01299285888671875, -0.09454345703125, 0.0377197265625, 0.02337646484375, 0.001617431640625, -0.0018548965454101562, -0.04931640625, -0.07049560546875, 0.11834716796875, 0.02001953125, -0.0484619140625, 0.0036296844482421875, 0.00791168212890625, 0.04559326171875, -0.0269927978515625, 0.050079345703125, 0.039886474609375, 0.037506103515625, 0.0146026611328125, -0.087158203125, 0.02691650390625, -0.0223541259765625, -0.00666046142578125, -0.0005202293395996094, -0.086181640625, 0.07061767578125, -0.03179931640625, -0.00844573974609375, 0.02520751953125, 0.050262451171875, 0.05816650390625, 0.035125732421875, 0.032440185546875, 0.07635498046875, 0.054840087890625, -0.0032558441162109375, 0.0911865234375, -0.01371002197265625, 0.043548583984375, 0.05865478515625, -0.0201568603515625, 0.0518798828125, 0.0166473388671875, -0.035980224609375, 0.044281005859375, 0.0712890625, -0.005123138427734375, 0.0171051025390625, 0.0253143310546875, -0.004665374755859375, -0.0048370361328125, -0.005916595458984375, -0.045379638671875, 0.03192138671875, 0.01181793212890625, -0.0224761962890625, -0.0091094970703125, -0.01406097412109375, 0.027374267578125, -0.015289306640625, -0.0255279541015625, 0.03790283203125, 0.0191650390625, -0.0310211181640625, 0.0657958984375, -0.0031757354736328125, 0.06671142578125, -0.04461669921875, 0.0073699951171875, -0.037567138671875, 0.020721435546875, -0.0264739990234375, -0.05352783203125, 0.005645751953125, 0.005779266357421875, 0.005916595458984375, -0.0202484130859375, 0.04302978515625, -0.00852203369140625, -0.04156494140625, 0.045135498046875, 0.034942626953125, 0.02740478515625, 0.002857208251953125, -0.0865478515625, 0.03173828125, 0.00559234619140625, -0.05767822265625, 0.044219970703125, 0.00954437255859375, -0.0009288787841796875, 0.0638427734375, 0.048126220703125, -0.0034084320068359375, 0.0034694671630859375, -0.0003027915954589844, 0.0894775390625, -0.049407958984375, -0.01177978515625, -0.06707763671875, 0.05218505859375, -0.00616455078125, -0.042266845703125, 0.06451416015625, 0.039398193359375, 0.062042236328125, 0.0122528076171875, 0.021026611328125, -0.01374053955078125, 0.0257720947265625, -0.0232086181640625, 0.061798095703125, -0.066162109375, 0.019287109375, -0.0216827392578125, -0.055328369140625, -0.00705718994140625, 0.035858154296875, -0.00884246826171875, 0.0119476318359375, 0.03173828125, 0.062042236328125, 0.007404327392578125, -0.00222015380859375, 0.0029697418212890625, 0.0299072265625, 0.01203155517578125, 0.057861328125, 0.05908203125, -0.049957275390625, 0.037139892578125, -0.0408935546875, -0.02166748046875, -0.0193023681640625, -0.05712890625, -0.0638427734375, -0.0287628173828125, -0.021453857421875, -0.033294677734375, -0.01036834716796875, 0.06939697265625, 0.051177978515625, -0.043365478515625, -0.035247802734375, 0.01238250732421875, 0.0096435546875, -0.00991058349609375, -0.0185394287109375, 0.039215087890625, 0.01593017578125, -0.053741455078125, 0.0226898193359375, 0.007129669189453125, 0.013641357421875, -0.015869140625, -0.0196380615234375, -0.0240020751953125, -0.0010519027709960938, 0.042724609375, 0.0227813720703125, -0.049560546875, -0.0198211669921875, 0.0042877197265625, -0.0081939697265625, 0.0200653076171875, 0.0156707763671875, -0.04278564453125, 0.00911712646484375, 0.0192413330078125, 0.03179931640625, 0.06298828125, 0.01024627685546875, 0.004180908203125, -0.041046142578125, 0.0158233642578125, 0.0081939697265625, 0.03448486328125, 0.0279083251953125, -0.029937744140625, 0.06329345703125, 0.029754638671875, -0.0526123046875, -0.08294677734375, -0.004772186279296875, -0.090087890625, -0.0006771087646484375, 0.078369140625, -0.0175933837890625, -0.038787841796875, 0.03497314453125, -0.0167236328125, 0.0163421630859375, -0.032135009765625, 0.05126953125, 0.038330078125, -0.035614013671875, -0.00835418701171875, -0.036346435546875, 0.02691650390625, 0.032135009765625, -0.06317138671875, -0.0208740234375, 0.0168304443359375, 0.029052734375, 0.007724761962890625, 0.06451416015625, -0.010406494140625, 0.008819580078125, -0.020263671875, 0.0150604248046875, -0.0155792236328125, -0.0005931854248046875, -0.034698486328125, 0.004352569580078125, -0.00991058349609375, -0.0168914794921875 ] ]
Brillibits/Instruct_Llama70B_Dolly15k
2023-09-15T18:30:50.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:databricks/databricks-dolly-15k", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Brillibits
null
null
Brillibits/Instruct_Llama70B_Dolly15k
0
8,447
transformers
2023-09-14T16:24:41
--- license: llama2 datasets: - databricks/databricks-dolly-15k language: - en pipeline_tag: text-generation --- # Instruct_Llama70B_Dolly15k Fine-tuned from Llama-2-70B,used Dolly15k for the dataset. 80% for training, 15% validation, 5% test. Trained for 1.5 epochs using QLora. Trained with 1024 context window. # Model Details * **Trained by**: trained by [Brillibits](https://www.youtube.com/channel/UCAq9THVHhPK0Zv4Xi-88Jmg). * **Model type:** **Instruct_Llama70B_Dolly15k** is an auto-regressive language model based on the Llama 2 transformer architecture. * **Language(s)**: English * **License for Instruct_Llama70B_Dolly15ks**: llama2 license # Prompting ## Prompt Template With Context ``` Write a 10-line poem about a given topic Input: The topic is about racecars Output: ``` ## Prompt Template Without Context ``` Who was the was the second president of the United States? Output: ``` ## Professional Assistance This model and other models like it are great, but where LLMs hold the most promise is when they are applied on custom data to automate a wide variety of tasks If you have a dataset and want to see if you might be able to apply that data to automate some tasks, and you are looking for professional assistance, contact me [here](mailto:blakecmallory@gmail.com)
1,300
[ [ -0.013641357421875, -0.056304931640625, 0.01593017578125, 0.045806884765625, -0.03369140625, 0.00817108154296875, -0.0012865066528320312, -0.032470703125, 0.01137542724609375, 0.0472412109375, -0.05401611328125, -0.041351318359375, -0.045806884765625, 0.0098724365234375, -0.0357666015625, 0.06396484375, -0.01296234130859375, 0.0020847320556640625, -0.0245208740234375, 0.00394439697265625, -0.05462646484375, -0.026153564453125, -0.057373046875, -0.0272064208984375, 0.0175323486328125, 0.03424072265625, 0.0306549072265625, 0.05084228515625, 0.034637451171875, 0.0254058837890625, -0.016845703125, 0.00962066650390625, -0.045318603515625, 0.00455474853515625, -0.01137542724609375, -0.04229736328125, -0.0487060546875, 0.0006380081176757812, 0.036285400390625, 0.010467529296875, -0.0230255126953125, 0.0482177734375, 0.004489898681640625, 0.0286865234375, -0.0265045166015625, 0.0181732177734375, -0.057586669921875, 0.007450103759765625, -0.0235443115234375, 0.0012683868408203125, -0.0178985595703125, -0.03411865234375, -0.0228424072265625, -0.033966064453125, 0.004009246826171875, 0.007472991943359375, 0.0833740234375, 0.0384521484375, -0.035064697265625, -0.01739501953125, -0.035247802734375, 0.06195068359375, -0.05072021484375, 0.01085662841796875, 0.0587158203125, 0.033111572265625, -0.0221710205078125, -0.058502197265625, -0.052703857421875, -0.020843505859375, -0.01367950439453125, 0.0095367431640625, -0.018798828125, -0.0119171142578125, 0.01236724853515625, 0.0251312255859375, -0.0487060546875, 0.0285797119140625, -0.03857421875, -0.01593017578125, 0.061431884765625, 0.034271240234375, 0.0115509033203125, -0.0125732421875, -0.04443359375, -0.010040283203125, -0.039794921875, 0.0234527587890625, 0.04034423828125, 0.00626373291015625, -0.033966064453125, 0.052520751953125, -0.0306549072265625, 0.05322265625, -0.0030498504638671875, -0.020782470703125, 0.023773193359375, -0.024871826171875, -0.0270538330078125, -0.0254669189453125, 0.0640869140625, 0.0205535888671875, 0.0107879638671875, 0.0021820068359375, -0.0281524658203125, 0.00418853759765625, 0.018218994140625, -0.042388916015625, -0.008087158203125, 0.01153564453125, -0.041717529296875, -0.034912109375, -0.0023097991943359375, -0.04974365234375, -0.01158905029296875, -0.0307464599609375, 0.0265960693359375, -0.0096435546875, -0.0098419189453125, 0.00022935867309570312, 0.0172271728515625, 0.034423828125, 0.020660400390625, -0.0626220703125, 0.02294921875, 0.043304443359375, 0.0498046875, 0.004520416259765625, -0.0225677490234375, -0.0289154052734375, -0.0007619857788085938, -0.037994384765625, 0.068115234375, -0.02276611328125, -0.01702880859375, 0.00009649991989135742, 0.02386474609375, 0.002552032470703125, -0.044830322265625, 0.05126953125, -0.033966064453125, 0.025360107421875, -0.0087890625, -0.0248870849609375, -0.0213623046875, 0.0222015380859375, -0.03857421875, 0.1070556640625, 0.024383544921875, -0.0298004150390625, 0.0187530517578125, -0.025848388671875, -0.02105712890625, 0.0016088485717773438, -0.0038394927978515625, -0.045654296875, -0.00952911376953125, 0.00525665283203125, 0.04949951171875, -0.0299072265625, 0.03564453125, -0.01071929931640625, -0.00907135009765625, 0.014984130859375, -0.0157318115234375, 0.0626220703125, 0.0245208740234375, -0.0244903564453125, 0.0007801055908203125, -0.07965087890625, -0.00689697265625, 0.02850341796875, -0.054779052734375, 0.007503509521484375, -0.0289154052734375, 0.01654052734375, 0.01236724853515625, 0.0330810546875, -0.03424072265625, 0.0270233154296875, -0.022857666015625, -0.00191497802734375, 0.058929443359375, -0.01849365234375, 0.01338958740234375, -0.040557861328125, 0.07427978515625, -0.0123291015625, 0.00542449951171875, 0.00563812255859375, -0.050933837890625, -0.0673828125, -0.0037250518798828125, 0.01113128662109375, 0.06689453125, -0.037384033203125, 0.039154052734375, 0.00179290771484375, -0.04217529296875, -0.04791259765625, 0.0289154052734375, 0.038238525390625, 0.04766845703125, 0.037017822265625, -0.0253143310546875, -0.05792236328125, -0.0755615234375, -0.00882720947265625, -0.00726318359375, 0.0058135986328125, 0.01641845703125, 0.03656005859375, -0.0284271240234375, 0.05560302734375, -0.03118896484375, -0.032318115234375, -0.041259765625, -0.00757598876953125, 0.021392822265625, 0.02874755859375, 0.045196533203125, -0.0228118896484375, -0.04156494140625, -0.01241302490234375, -0.04864501953125, -0.0231781005859375, -0.002101898193359375, -0.03424072265625, 0.022430419921875, 0.0281524658203125, -0.045166015625, 0.039886474609375, 0.04736328125, -0.02874755859375, 0.0223236083984375, -0.0185089111328125, -0.0224761962890625, -0.1065673828125, -0.00763702392578125, -0.021697998046875, -0.0154571533203125, -0.03607177734375, -0.0162811279296875, -0.01345062255859375, 0.0161285400390625, -0.0181427001953125, 0.043792724609375, -0.0237884521484375, 0.004047393798828125, -0.030242919921875, -0.00429534912109375, -0.0079803466796875, 0.0220947265625, -0.00916290283203125, 0.04254150390625, 0.03131103515625, -0.037506103515625, 0.05474853515625, 0.055419921875, -0.036651611328125, 0.03125, -0.054229736328125, 0.0242919921875, -0.01983642578125, 0.0205535888671875, -0.090576171875, -0.0231781005859375, 0.01329803466796875, -0.0196075439453125, 0.0227508544921875, -0.004314422607421875, -0.0294647216796875, -0.032562255859375, -0.01189422607421875, 0.003070831298828125, 0.04339599609375, -0.043304443359375, 0.01922607421875, 0.030975341796875, -0.0024318695068359375, -0.054962158203125, -0.07525634765625, 0.00818634033203125, -0.02288818359375, -0.0372314453125, 0.0107879638671875, 0.0036830902099609375, -0.01061248779296875, -0.0096435546875, 0.0262451171875, -0.00862884521484375, 0.01140594482421875, 0.021881103515625, 0.0182952880859375, -0.0010175704956054688, 0.01849365234375, -0.0016241073608398438, -0.0308837890625, -0.0089874267578125, 0.01149749755859375, 0.060943603515625, -0.0006628036499023438, -0.012176513671875, -0.07232666015625, 0.006122589111328125, 0.0165557861328125, -0.004810333251953125, 0.06292724609375, 0.075927734375, -0.0179901123046875, -0.0133514404296875, -0.0435791015625, -0.0033283233642578125, -0.0377197265625, 0.0347900390625, -0.025177001953125, -0.0439453125, 0.0261993408203125, 0.0026798248291015625, -0.01294708251953125, 0.03857421875, 0.0648193359375, -0.01003265380859375, 0.037933349609375, 0.05169677734375, -0.027008056640625, 0.036376953125, -0.05224609375, 0.007122039794921875, -0.058258056640625, -0.04736328125, -0.042266845703125, -0.0328369140625, -0.05377197265625, -0.023895263671875, 0.00836181640625, -0.001346588134765625, -0.034149169921875, 0.03826904296875, -0.039520263671875, 0.01274871826171875, 0.05096435546875, 0.004497528076171875, 0.019134521484375, 0.0035839080810546875, 0.015350341796875, 0.022247314453125, -0.05328369140625, -0.07037353515625, 0.077392578125, 0.038909912109375, 0.07421875, 0.0089874267578125, 0.04217529296875, 0.01085662841796875, 0.0107574462890625, -0.05157470703125, 0.044158935546875, -0.006954193115234375, -0.0455322265625, -0.01678466796875, 0.00003784894943237305, -0.0865478515625, -0.004329681396484375, -0.012664794921875, -0.04583740234375, 0.0085601806640625, 0.0286407470703125, -0.0219268798828125, 0.0118408203125, -0.03302001953125, 0.060455322265625, -0.0227508544921875, 0.00489044189453125, -0.010009765625, -0.038055419921875, 0.0277557373046875, 0.0103607177734375, 0.0180511474609375, -0.028106689453125, -0.01230621337890625, 0.0615234375, -0.0294647216796875, 0.07598876953125, -0.006572723388671875, -0.00405120849609375, 0.031707763671875, 0.00774383544921875, 0.06060791015625, 0.002880096435546875, 0.00508880615234375, 0.00044989585876464844, -0.0036773681640625, -0.0254669189453125, -0.02093505859375, 0.036834716796875, -0.0740966796875, -0.036712646484375, -0.020111083984375, -0.027557373046875, -0.004901885986328125, -0.0158233642578125, 0.03497314453125, 0.0158233642578125, 0.0013189315795898438, 0.0193023681640625, 0.010772705078125, -0.0196533203125, 0.044708251953125, 0.025909423828125, -0.0110626220703125, -0.03594970703125, 0.051910400390625, 0.0024356842041015625, -0.0011205673217773438, 0.030242919921875, 0.01546478271484375, -0.031707763671875, -0.0280914306640625, -0.0452880859375, 0.0200042724609375, -0.0406494140625, -0.0372314453125, -0.058258056640625, -0.01000213623046875, -0.02435302734375, -0.0032100677490234375, -0.039947509765625, -0.042694091796875, -0.052001953125, -0.0223541259765625, 0.060394287109375, 0.08526611328125, 0.003444671630859375, 0.05230712890625, -0.036102294921875, 0.0205078125, 0.038665771484375, 0.019500732421875, 0.0042572021484375, -0.07781982421875, -0.025238037109375, -0.01239776611328125, -0.0418701171875, -0.05767822265625, 0.037628173828125, 0.0156402587890625, 0.0511474609375, 0.020965576171875, -0.0162811279296875, 0.04779052734375, -0.0202789306640625, 0.06671142578125, -0.005481719970703125, -0.059356689453125, 0.04754638671875, -0.006587982177734375, 0.005451202392578125, 0.0223846435546875, 0.02642822265625, -0.0287322998046875, -0.0213165283203125, -0.039703369140625, -0.05926513671875, 0.054656982421875, 0.0165557861328125, 0.03936767578125, 0.025360107421875, 0.037109375, 0.01219940185546875, 0.014678955078125, -0.067138671875, -0.0158233642578125, -0.01947021484375, -0.043548583984375, -0.012420654296875, -0.052154541015625, -0.00516510009765625, -0.01727294921875, 0.06072998046875, 0.004253387451171875, 0.0236968994140625, -0.008514404296875, -0.0004780292510986328, -0.012451171875, 0.01262664794921875, 0.0570068359375, 0.04071044921875, 0.007129669189453125, 0.0020046234130859375, 0.041107177734375, -0.051971435546875, 0.0298614501953125, -0.009918212890625, -0.0197296142578125, -0.01500701904296875, 0.05072021484375, 0.07977294921875, -0.023284912109375, -0.03424072265625, 0.0206756591796875, 0.006801605224609375, 0.00437164306640625, -0.03863525390625, 0.01708984375, 0.0130615234375, 0.0364990234375, 0.007053375244140625, -0.032501220703125, -0.01236724853515625, -0.032562255859375, -0.004734039306640625, 0.045318603515625, -0.0186309814453125, -0.03131103515625, 0.059051513671875, -0.00524139404296875, -0.045135498046875, 0.03570556640625, -0.0191192626953125, -0.02911376953125, 0.053802490234375, 0.04681396484375, 0.060150146484375, -0.032501220703125, 0.0205078125, 0.0384521484375, 0.034210205078125, 0.001491546630859375, 0.059722900390625, 0.0169219970703125, -0.05804443359375, -0.0151824951171875, -0.06549072265625, -0.02496337890625, 0.03216552734375, -0.037017822265625, 0.036346435546875, -0.0292816162109375, -0.01316070556640625, -0.0277099609375, 0.009307861328125, -0.055755615234375, 0.03289794921875, -0.0101776123046875, 0.06011962890625, -0.0712890625, 0.06585693359375, 0.0625, -0.0268096923828125, -0.0687255859375, -0.0197296142578125, -0.00565338134765625, -0.079345703125, 0.06292724609375, -0.0069732666015625, 0.008697509765625, -0.0035114288330078125, -0.06317138671875, -0.080322265625, 0.104736328125, 0.0208587646484375, -0.036224365234375, 0.03790283203125, 0.006011962890625, 0.044403076171875, -0.047454833984375, 0.011322021484375, 0.042694091796875, 0.02764892578125, 0.004344940185546875, -0.08416748046875, -0.0016117095947265625, -0.006855010986328125, 0.02093505859375, -0.02569580078125, -0.081298828125, 0.0703125, -0.01140594482421875, 0.01275634765625, 0.0260162353515625, 0.05218505859375, 0.02362060546875, 0.0304718017578125, 0.032989501953125, 0.06170654296875, 0.05670166015625, 0.005687713623046875, 0.07305908203125, -0.0140228271484375, 0.04071044921875, 0.07928466796875, 0.00757598876953125, 0.061767578125, 0.0167236328125, -0.047027587890625, 0.05816650390625, 0.058929443359375, -0.0130615234375, 0.0347900390625, 0.0175323486328125, -0.006465911865234375, -0.00980377197265625, -0.008148193359375, -0.0276641845703125, 0.0574951171875, 0.006435394287109375, -0.0307464599609375, 0.0012292861938476562, 0.0036258697509765625, 0.0154266357421875, 0.003437042236328125, 0.0017709732055664062, 0.0582275390625, 0.0004208087921142578, -0.0263519287109375, 0.059173583984375, 0.0033817291259765625, 0.03955078125, -0.030914306640625, -0.0020465850830078125, -0.03857421875, 0.00980377197265625, -0.0204620361328125, -0.049652099609375, 0.0195159912109375, 0.00650787353515625, -0.00908660888671875, -0.01500701904296875, 0.045501708984375, -0.03564453125, -0.058013916015625, 0.0085296630859375, 0.0247955322265625, 0.026611328125, 0.022552490234375, -0.04669189453125, 0.0140228271484375, 0.004489898681640625, -0.034637451171875, 0.007617950439453125, 0.0202484130859375, -0.00017392635345458984, 0.07305908203125, 0.03485107421875, 0.01065826416015625, 0.00600433349609375, -0.0056610107421875, 0.06689453125, -0.04779052734375, -0.0278778076171875, -0.0487060546875, 0.04376220703125, 0.0105743408203125, -0.0360107421875, 0.04888916015625, 0.049346923828125, 0.06512451171875, -0.0234375, 0.05548095703125, -0.0028629302978515625, 0.060272216796875, -0.051361083984375, 0.041595458984375, -0.041046142578125, 0.01303863525390625, 0.0019283294677734375, -0.057586669921875, -0.01137542724609375, 0.0595703125, -0.01433563232421875, 0.01068878173828125, 0.03289794921875, 0.08111572265625, 0.005146026611328125, -0.02288818359375, 0.027587890625, 0.034637451171875, 0.0186920166015625, 0.0298614501953125, 0.06787109375, -0.04632568359375, 0.04296875, -0.0472412109375, -0.01003265380859375, -0.020233154296875, -0.058837890625, -0.0745849609375, -0.04620361328125, -0.021636962890625, -0.0308074951171875, -0.0197296142578125, 0.07598876953125, 0.03643798828125, -0.068359375, -0.043731689453125, 0.005645751953125, 0.01300811767578125, 0.01305389404296875, -0.01314544677734375, 0.0426025390625, -0.0273895263671875, -0.0528564453125, 0.0257415771484375, -0.0024127960205078125, 0.03533935546875, -0.0284423828125, -0.0017766952514648438, -0.0162506103515625, -0.005340576171875, 0.0380859375, -0.0045013427734375, -0.057891845703125, -0.00213623046875, 0.0010938644409179688, -0.012481689453125, 0.004085540771484375, 0.0316162109375, -0.056427001953125, 0.020294189453125, 0.0360107421875, 0.020233154296875, 0.041351318359375, 0.019073486328125, 0.022918701171875, -0.03436279296875, 0.033111572265625, 0.00838470458984375, 0.021392822265625, 0.0292816162109375, -0.04486083984375, 0.050262451171875, 0.032073974609375, -0.05322265625, -0.053680419921875, 0.01110076904296875, -0.07550048828125, 0.004985809326171875, 0.10186767578125, 0.0007190704345703125, -0.01479339599609375, -0.006336212158203125, -0.040069580078125, 0.0199432373046875, -0.01551055908203125, 0.06365966796875, 0.040557861328125, -0.0219879150390625, -0.0191192626953125, -0.054412841796875, 0.04998779296875, 0.01312255859375, -0.062744140625, -0.0300750732421875, 0.004619598388671875, 0.0269775390625, -0.02032470703125, 0.04681396484375, 0.0224151611328125, 0.004364013671875, -0.01342010498046875, 0.0247955322265625, -0.0002818107604980469, -0.01080322265625, -0.0292816162109375, -0.0013189315795898438, 0.006542205810546875, -0.03271484375 ] ]
BELLE-2/BELLE-Llama2-13B-chat-0.4M
2023-10-18T03:18:07.000Z
[ "transformers", "pytorch", "llama", "text-generation", "text2text-generation", "zh", "en", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text2text-generation
BELLE-2
null
null
BELLE-2/BELLE-Llama2-13B-chat-0.4M
27
8,445
transformers
2023-07-26T06:01:54
--- license: llama2 tags: - text2text-generation pipeline_tag: text2text-generation language: - zh - en --- # Model Card for Model ID ## Welcome If you find this model helpful, please *like* this model and star us on https://github.com/LianjiaTech/BELLE ! ## Model description This model is obtained by fine-tuning the complete parameters using 0.4M Chinese instruction data on the original Llama2-13B-chat. We firmly believe that the original Llama2-chat exhibits commendable performance post Supervised Fine-Tuning (SFT) and Reinforcement Learning with Human Feedback (RLHF). Our pursuit continues to be the further enhancement of this model using Chinese instructional data for fine-tuning, with an aspiration to facilitate stable and high-quality Chinese language outputs. ## Use model Please note that the input should be formatted as follows in both **training** and **inference**. ``` python Human: \n{input}\n\nAssistant:\n ``` After you decrypt the files, BELLE-Llama2-13B-chat-0.4M can be easily loaded with AutoModelForCausalLM. ``` python from transformers import AutoModelForCausalLM, LlamaTokenizer import torch ckpt = '/path/to_finetuned_model/' device = torch.device('cuda') model = AutoModelForCausalLM.from_pretrained(ckpt).half().to(device) tokenizer = LlamaTokenizer.from_pretrained(ckpt) prompt = "Human: \n写一首中文歌曲,赞美大自然 \n\nAssistant: \n" input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(device) generate_ids = model.generate(input_ids, max_new_tokens=1024, do_sample=True, top_k=30, top_p=0.85, temperature=0.5, repetition_penalty=1.2, eos_token_id=2, bos_token_id=1, pad_token_id=0) output = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] response = output[len(prompt):] print(response) ``` ## Limitations There still exists a few issues in the model trained on current base model and data: 1. The model might generate factual errors when asked to follow instructions related to facts. 2. Occasionally generates harmful responses since the model still struggles to identify potential harmful instructions. 3. Needs improvements on reasoning and coding. Since the model still has its limitations, we require developers only use the open-sourced code, data, model and any other artifacts generated via this project for research purposes. Commercial use and other potential harmful use cases are not allowed. ## Citation Please cite our paper and github when using our code, data or model. ``` @misc{BELLE, author = {BELLEGroup}, title = {BELLE: Be Everyone's Large Language model Engine}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/LianjiaTech/BELLE}}, } ```
2,746
[ [ -0.00701904296875, -0.0662841796875, 0.0103302001953125, 0.0241241455078125, -0.00997161865234375, -0.01522064208984375, -0.005115509033203125, -0.045654296875, 0.01233673095703125, 0.037353515625, -0.04205322265625, -0.05108642578125, -0.047332763671875, 0.0036163330078125, -0.031158447265625, 0.07720947265625, 0.01027679443359375, 0.006649017333984375, 0.0153350830078125, 0.0194244384765625, -0.0272216796875, -0.04559326171875, -0.06866455078125, -0.0167388916015625, 0.01007080078125, 0.021484375, 0.037139892578125, 0.05560302734375, 0.019683837890625, 0.026214599609375, -0.01629638671875, 0.00986480712890625, -0.047210693359375, -0.018402099609375, -0.0048980712890625, -0.03997802734375, -0.044464111328125, -0.01404571533203125, 0.040283203125, 0.019866943359375, 0.00537109375, 0.0272216796875, 0.01177215576171875, 0.0159149169921875, -0.0250701904296875, 0.03546142578125, -0.056304931640625, -0.0124053955078125, -0.002689361572265625, -0.004795074462890625, -0.027313232421875, -0.024017333984375, -0.00846099853515625, -0.044342041015625, 0.010833740234375, 0.00437164306640625, 0.08563232421875, 0.019378662109375, -0.02850341796875, -0.002819061279296875, -0.03607177734375, 0.063232421875, -0.07568359375, 0.01537322998046875, 0.037994384765625, 0.01035308837890625, -0.023468017578125, -0.058929443359375, -0.0335693359375, -0.024993896484375, -0.0095062255859375, -0.0034465789794921875, -0.004772186279296875, -0.00231170654296875, 0.04449462890625, 0.019012451171875, -0.031646728515625, 0.00928497314453125, -0.03692626953125, -0.04638671875, 0.060455322265625, 0.02105712890625, 0.0286407470703125, -0.02496337890625, -0.0259552001953125, -0.0035552978515625, -0.036834716796875, 0.0290679931640625, 0.025146484375, 0.02703857421875, -0.02691650390625, 0.04217529296875, -0.025726318359375, 0.0238494873046875, 0.008056640625, -0.0229034423828125, 0.03131103515625, -0.01389312744140625, -0.0236053466796875, 0.007701873779296875, 0.0728759765625, 0.027252197265625, -0.00635528564453125, 0.0090179443359375, -0.0238800048828125, -0.01186370849609375, -0.0125274658203125, -0.07403564453125, -0.0205230712890625, 0.0178985595703125, -0.02740478515625, -0.03631591796875, 0.001644134521484375, -0.05096435546875, -0.0145111083984375, -0.01141357421875, 0.0364990234375, -0.03155517578125, -0.03717041015625, 0.0080108642578125, 0.0200958251953125, 0.0261993408203125, 0.00957489013671875, -0.0865478515625, 0.0226593017578125, 0.036468505859375, 0.056121826171875, 0.004962921142578125, -0.0230712890625, -0.035888671875, 0.0013742446899414062, -0.0129547119140625, 0.0236663818359375, -0.025299072265625, -0.04803466796875, -0.0150909423828125, 0.0286407470703125, -0.007293701171875, -0.0202178955078125, 0.06439208984375, -0.0256195068359375, 0.0071868896484375, -0.0259857177734375, -0.037322998046875, -0.025634765625, 0.0110321044921875, -0.045501708984375, 0.09423828125, -0.00978851318359375, -0.072021484375, -0.0022296905517578125, -0.0509033203125, -0.0178985595703125, -0.0152130126953125, -0.0018901824951171875, -0.04083251953125, -0.008453369140625, 0.0303192138671875, 0.033538818359375, -0.01812744140625, 0.0298309326171875, -0.0246734619140625, -0.0406494140625, 0.0255584716796875, -0.05010986328125, 0.0853271484375, 0.0247344970703125, -0.035552978515625, 0.01221466064453125, -0.06365966796875, -0.0022430419921875, 0.03192138671875, -0.0248870849609375, -0.0062255859375, -0.002376556396484375, 0.01959228515625, 0.004825592041015625, 0.03741455078125, -0.03985595703125, 0.015045166015625, -0.046295166015625, 0.033721923828125, 0.050689697265625, -0.00099945068359375, 0.015869140625, -0.034423828125, 0.0170135498046875, 0.01336669921875, 0.015289306640625, -0.0172119140625, -0.03436279296875, -0.07958984375, -0.016937255859375, 0.0217437744140625, 0.0538330078125, -0.034210205078125, 0.04736328125, -0.01105499267578125, -0.042083740234375, -0.036224365234375, -0.002307891845703125, 0.0291900634765625, 0.04241943359375, 0.037933349609375, -0.00420379638671875, -0.049163818359375, -0.06298828125, 0.0014524459838867188, -0.00850677490234375, -0.0015897750854492188, 0.0218048095703125, 0.052276611328125, -0.02813720703125, 0.059326171875, -0.0247955322265625, -0.003559112548828125, -0.0241546630859375, 0.00902557373046875, 0.032928466796875, 0.05670166015625, 0.049102783203125, -0.031982421875, -0.0156402587890625, -0.01493072509765625, -0.049774169921875, -0.0166778564453125, -0.00930023193359375, -0.015838623046875, 0.029022216796875, 0.02117919921875, -0.037750244140625, 0.044036865234375, 0.03497314453125, -0.022735595703125, 0.048797607421875, -0.005558013916015625, 0.0144195556640625, -0.10699462890625, 0.0011701583862304688, -0.0048980712890625, -0.0030536651611328125, -0.036590576171875, -0.0032749176025390625, 0.006649017333984375, -0.00394439697265625, -0.042510986328125, 0.047088623046875, -0.021453857421875, 0.01020050048828125, -0.01517486572265625, -0.0086212158203125, -0.00714111328125, 0.055572509765625, 0.01032257080078125, 0.055908203125, 0.056304931640625, -0.052001953125, 0.039520263671875, 0.01556396484375, -0.021087646484375, -0.0008006095886230469, -0.07049560546875, 0.00560760498046875, 0.01076507568359375, 0.0121002197265625, -0.08123779296875, -0.029937744140625, 0.035675048828125, -0.04864501953125, 0.03314208984375, -0.01198577880859375, -0.0390625, -0.024169921875, -0.0199127197265625, 0.0201416015625, 0.042083740234375, -0.048492431640625, 0.03729248046875, 0.01294708251953125, 0.00928497314453125, -0.05224609375, -0.058380126953125, -0.00948333740234375, -0.01776123046875, -0.05108642578125, 0.016937255859375, -0.0035247802734375, 0.0012617111206054688, -0.0155792236328125, -0.004703521728515625, 0.0016317367553710938, 0.0116424560546875, 0.023040771484375, 0.037200927734375, -0.01103973388671875, 0.0112152099609375, 0.0139007568359375, -0.0159759521484375, 0.01776123046875, -0.011962890625, 0.04400634765625, -0.024688720703125, -0.0265655517578125, -0.061553955078125, 0.00809478759765625, 0.039093017578125, -0.01568603515625, 0.06646728515625, 0.058135986328125, -0.03973388671875, 0.0010051727294921875, -0.041961669921875, -0.034637451171875, -0.037628173828125, 0.051116943359375, -0.036590576171875, -0.055145263671875, 0.054901123046875, 0.01007080078125, 0.0275726318359375, 0.042327880859375, 0.04583740234375, 0.006565093994140625, 0.07958984375, 0.03875732421875, -0.0120849609375, 0.0194244384765625, -0.03912353515625, 0.0021800994873046875, -0.057403564453125, -0.021514892578125, -0.035064697265625, -0.0086822509765625, -0.050567626953125, -0.0136566162109375, 0.0101470947265625, 0.0188140869140625, -0.04473876953125, 0.030120849609375, -0.04248046875, 0.022186279296875, 0.04083251953125, 0.020416259765625, 0.0165863037109375, -0.006107330322265625, -0.019378662109375, -0.002750396728515625, -0.03778076171875, -0.050689697265625, 0.08343505859375, 0.057708740234375, 0.07098388671875, 0.0042724609375, 0.054046630859375, -0.008209228515625, 0.0083465576171875, -0.050140380859375, 0.0460205078125, 0.01727294921875, -0.05181884765625, -0.02569580078125, -0.029541015625, -0.0533447265625, 0.01523590087890625, -0.0233306884765625, -0.078857421875, -0.006839752197265625, 0.0126495361328125, -0.025726318359375, 0.03497314453125, -0.05767822265625, 0.069091796875, -0.034637451171875, -0.03692626953125, 0.0031719207763671875, -0.037017822265625, 0.050537109375, -0.00115203857421875, 0.0104217529296875, -0.010009765625, 0.006099700927734375, 0.06634521484375, -0.031829833984375, 0.08746337890625, -0.00904083251953125, -0.0104217529296875, 0.02337646484375, 0.0029850006103515625, 0.054595947265625, 0.0298309326171875, -0.01212310791015625, 0.0292510986328125, 0.0241241455078125, -0.0229034423828125, -0.025634765625, 0.0589599609375, -0.08880615234375, -0.05853271484375, -0.03375244140625, -0.035858154296875, -0.00020122528076171875, 0.01216888427734375, 0.044403076171875, 0.0308837890625, 0.0016241073608398438, 0.011138916015625, 0.052764892578125, -0.036651611328125, 0.034637451171875, 0.03363037109375, -0.034576416015625, -0.02569580078125, 0.061614990234375, 0.0012006759643554688, 0.007503509521484375, 0.0232391357421875, 0.006404876708984375, -0.04022216796875, -0.02557373046875, -0.03094482421875, 0.033355712890625, -0.043792724609375, -0.015045166015625, -0.046234130859375, -0.04156494140625, -0.04168701171875, 0.0154876708984375, -0.0374755859375, -0.02215576171875, -0.058197021484375, -0.00794219970703125, 0.044708251953125, 0.022064208984375, 0.0027217864990234375, 0.0258941650390625, -0.0694580078125, 0.008544921875, 0.01959228515625, 0.022979736328125, 0.0037631988525390625, -0.0753173828125, -0.021087646484375, 0.030975341796875, -0.02447509765625, -0.06890869140625, 0.0343017578125, -0.00750732421875, 0.044952392578125, 0.030426025390625, 0.018768310546875, 0.042724609375, -0.01145172119140625, 0.06329345703125, 0.0183868408203125, -0.07476806640625, 0.04193115234375, -0.023284912109375, 0.0178375244140625, 0.014251708984375, 0.0197296142578125, -0.02862548828125, -0.0190277099609375, -0.03753662109375, -0.049285888671875, 0.072021484375, 0.025360107421875, 0.02178955078125, 0.007160186767578125, 0.024261474609375, -0.0121307373046875, -0.01409912109375, -0.07733154296875, -0.019775390625, -0.040283203125, -0.0133819580078125, 0.0209808349609375, -0.031951904296875, -0.001995086669921875, -0.0333251953125, 0.073486328125, 0.0012378692626953125, 0.037353515625, 0.0031261444091796875, -0.009124755859375, -0.0016918182373046875, 0.020294189453125, 0.0430908203125, 0.0272216796875, -0.0160980224609375, -0.008087158203125, 0.04766845703125, -0.044342041015625, 0.004673004150390625, 0.01361846923828125, -0.01702880859375, -0.00620269775390625, 0.0175323486328125, 0.07891845703125, 0.00858306884765625, -0.031494140625, 0.026397705078125, -0.00907135009765625, -0.028289794921875, -0.034088134765625, 0.0264739990234375, 0.01306915283203125, 0.023284912109375, 0.0223236083984375, -0.01131439208984375, 0.0010557174682617188, -0.02569580078125, 0.00722503662109375, 0.019622802734375, -0.0150604248046875, -0.0202789306640625, 0.0771484375, 0.0233154296875, -0.020599365234375, 0.0443115234375, -0.01294708251953125, -0.039154052734375, 0.052764892578125, 0.052001953125, 0.06353759765625, -0.0128936767578125, -0.0027618408203125, 0.06707763671875, 0.031494140625, -0.005619049072265625, 0.018035888671875, 0.00029659271240234375, -0.051300048828125, -0.029876708984375, -0.056884765625, -0.00852203369140625, 0.03515625, -0.04119873046875, 0.044708251953125, -0.04010009765625, -0.0205078125, -0.023834228515625, 0.0120849609375, -0.0307159423828125, 0.01375579833984375, 0.00771331787109375, 0.0736083984375, -0.055908203125, 0.08367919921875, 0.027801513671875, -0.046783447265625, -0.0882568359375, -0.0108184814453125, 0.0071258544921875, -0.0643310546875, 0.053131103515625, 0.019256591796875, -0.007598876953125, 0.005733489990234375, -0.061126708984375, -0.07525634765625, 0.08978271484375, 0.021331787109375, -0.0628662109375, -0.004398345947265625, 0.007259368896484375, 0.029754638671875, -0.01427459716796875, 0.04168701171875, 0.0509033203125, 0.0235595703125, 0.029541015625, -0.0787353515625, 0.0123443603515625, -0.026214599609375, 0.006214141845703125, -0.01380157470703125, -0.0548095703125, 0.062164306640625, -0.0269622802734375, -0.0156707763671875, 0.01983642578125, 0.06854248046875, 0.0280914306640625, 0.007335662841796875, 0.0278472900390625, 0.035125732421875, 0.050872802734375, -0.0192413330078125, 0.045166015625, -0.035675048828125, 0.046539306640625, 0.07476806640625, 0.0026187896728515625, 0.0548095703125, 0.00951385498046875, -0.01448822021484375, 0.024200439453125, 0.06439208984375, -0.0116424560546875, 0.0234222412109375, 0.0092926025390625, -0.0116424560546875, -0.0096893310546875, 0.0017881393432617188, -0.043975830078125, 0.0269775390625, 0.036407470703125, -0.0285491943359375, -0.006595611572265625, -0.0081634521484375, 0.014495849609375, -0.0189666748046875, -0.01348114013671875, 0.040435791015625, -0.0017681121826171875, -0.028289794921875, 0.08123779296875, 0.01788330078125, 0.066162109375, -0.050262451171875, -0.005168914794921875, -0.0218048095703125, 0.00855255126953125, -0.02630615234375, -0.038360595703125, 0.0188140869140625, 0.005035400390625, 0.00021028518676757812, 0.01348114013671875, 0.038726806640625, -0.038909912109375, -0.0474853515625, 0.01459503173828125, 0.0020923614501953125, 0.02618408203125, 0.018218994140625, -0.0677490234375, 0.022064208984375, 0.0171661376953125, -0.033721923828125, 0.01099395751953125, 0.020965576171875, 0.0013399124145507812, 0.05126953125, 0.049774169921875, -0.006145477294921875, 0.00574493408203125, 0.0008549690246582031, 0.0631103515625, -0.042877197265625, -0.033447265625, -0.060760498046875, 0.04437255859375, -0.00580596923828125, -0.01983642578125, 0.052398681640625, 0.049560546875, 0.05950927734375, -0.0008397102355957031, 0.06005859375, -0.004749298095703125, 0.048828125, -0.04400634765625, 0.05877685546875, -0.0394287109375, 0.039306640625, -0.0264892578125, -0.061431884765625, -0.005680084228515625, 0.059814453125, -0.0220184326171875, 0.01160430908203125, 0.041168212890625, 0.09478759765625, -0.0015707015991210938, 0.01192474365234375, 0.02978515625, 0.034637451171875, 0.032470703125, 0.056182861328125, 0.046661376953125, -0.051605224609375, 0.04901123046875, -0.0292510986328125, -0.02301025390625, -0.02191162109375, -0.050537109375, -0.08673095703125, -0.04193115234375, -0.0187225341796875, -0.037078857421875, -0.016021728515625, 0.09381103515625, 0.041839599609375, -0.07012939453125, -0.0181427001953125, -0.003772735595703125, 0.0046234130859375, -0.007049560546875, -0.0186614990234375, 0.03887939453125, -0.0250701904296875, -0.07440185546875, 0.004024505615234375, -0.0091094970703125, 0.032196044921875, -0.0214385986328125, -0.0157012939453125, -0.00677490234375, 0.004665374755859375, 0.031768798828125, 0.0246734619140625, -0.06671142578125, -0.0211639404296875, 0.016082763671875, -0.025238037109375, -0.01467132568359375, 0.01514434814453125, -0.031951904296875, 0.0253753662109375, 0.0251617431640625, 0.043426513671875, 0.047943115234375, -0.01442718505859375, 0.0199127197265625, -0.030914306640625, 0.0189361572265625, -0.003002166748046875, 0.039703369140625, 0.01178741455078125, -0.037200927734375, 0.0267333984375, 0.0262603759765625, -0.03985595703125, -0.050140380859375, 0.0031414031982421875, -0.07208251953125, -0.016265869140625, 0.09954833984375, -0.00792694091796875, -0.0285491943359375, -0.0007357597351074219, -0.046844482421875, 0.038909912109375, -0.041656494140625, 0.06341552734375, 0.044891357421875, -0.0013036727905273438, -0.02490234375, -0.03961181640625, 0.03411865234375, 0.01201629638671875, -0.052642822265625, 0.002079010009765625, 0.0328369140625, 0.035675048828125, -0.0028667449951171875, 0.03985595703125, -0.005901336669921875, 0.0250701904296875, 0.0009431838989257812, 0.01251983642578125, -0.0188140869140625, -0.008209228515625, -0.01251983642578125, -0.018157958984375, -0.00574493408203125, -0.0287017822265625 ] ]
timm/dla34.in1k
2023-04-24T21:12:19.000Z
[ "timm", "pytorch", "safetensors", "image-classification", "dataset:imagenet-1k", "arxiv:1707.06484", "license:bsd-3-clause", "region:us" ]
image-classification
timm
null
null
timm/dla34.in1k
0
8,425
timm
2023-04-24T19:33:57
--- tags: - image-classification - timm library_name: timm license: bsd-3-clause datasets: - imagenet-1k --- # Model card for dla34.in1k A DLA (Deep Layer Aggregation) image classification model. Trained on ImageNet-1k by paper authors. ## Model Details - **Model Type:** Image classification / feature backbone - **Model Stats:** - Params (M): 15.7 - GMACs: 3.1 - Activations (M): 5.0 - Image size: 224 x 224 - **Papers:** - Deep Layer Aggregation: https://arxiv.org/abs/1707.06484 - **Original:** https://github.com/ucbdrive/dla - **Dataset:** ImageNet-1k ## Model Usage ### Image Classification ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model('dla34.in1k', pretrained=True) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 top5_probabilities, top5_class_indices = torch.topk(output.softmax(dim=1) * 100, k=5) ``` ### Feature Map Extraction ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'dla34.in1k', pretrained=True, features_only=True, ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 for o in output: # print shape of each feature map in output # e.g.: # torch.Size([1, 32, 112, 112]) # torch.Size([1, 64, 56, 56]) # torch.Size([1, 128, 28, 28]) # torch.Size([1, 256, 14, 14]) # torch.Size([1, 512, 7, 7]) print(o.shape) ``` ### Image Embeddings ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'dla34.in1k', pretrained=True, num_classes=0, # remove classifier nn.Linear ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # output is (batch_size, num_features) shaped tensor # or equivalently (without needing to set num_classes=0) output = model.forward_features(transforms(img).unsqueeze(0)) # output is unpooled, a (1, 512, 7, 7) shaped tensor output = model.forward_head(output, pre_logits=True) # output is a (1, num_features) shaped tensor ``` ## Model Comparison Explore the dataset and runtime metrics of this model in timm [model results](https://github.com/huggingface/pytorch-image-models/tree/main/results). ## Citation ```bibtex @inproceedings{yu2018deep, title={Deep layer aggregation}, author={Yu, Fisher and Wang, Dequan and Shelhamer, Evan and Darrell, Trevor}, booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, year={2018} } ```
3,582
[ [ -0.0369873046875, -0.038970947265625, 0.01390838623046875, 0.0111236572265625, -0.0257415771484375, -0.019012451171875, -0.00519561767578125, -0.035736083984375, 0.018798828125, 0.035552978515625, -0.0352783203125, -0.058441162109375, -0.0562744140625, -0.016143798828125, -0.0139007568359375, 0.07110595703125, -0.0037059783935546875, 0.00036406517028808594, -0.0151214599609375, -0.0401611328125, -0.01396942138671875, -0.024505615234375, -0.0684814453125, -0.035797119140625, 0.0280609130859375, 0.006938934326171875, 0.0352783203125, 0.045623779296875, 0.040069580078125, 0.038330078125, -0.01357269287109375, 0.0115509033203125, -0.015838623046875, -0.025054931640625, 0.033843994140625, -0.045074462890625, -0.04669189453125, 0.020599365234375, 0.05120849609375, 0.039764404296875, 0.0089569091796875, 0.028045654296875, 0.00952911376953125, 0.051055908203125, -0.0214385986328125, 0.0149078369140625, -0.0303497314453125, 0.01507568359375, -0.01134490966796875, 0.014007568359375, -0.0168609619140625, -0.035552978515625, 0.0178985595703125, -0.037750244140625, 0.0248260498046875, -0.0108642578125, 0.09698486328125, 0.019622802734375, -0.00839996337890625, -0.01108551025390625, -0.0162353515625, 0.062286376953125, -0.07025146484375, 0.00835418701171875, 0.02886962890625, 0.006866455078125, -0.004779815673828125, -0.06695556640625, -0.04534912109375, -0.003269195556640625, -0.00875091552734375, -0.0030879974365234375, -0.0022792816162109375, -0.000347137451171875, 0.01015472412109375, 0.033843994140625, -0.0294189453125, 0.0200653076171875, -0.04345703125, -0.007076263427734375, 0.046783447265625, 0.0094451904296875, 0.026519775390625, -0.006916046142578125, -0.0435791015625, -0.0265960693359375, -0.0238037109375, 0.0240478515625, 0.0239105224609375, 0.0198822021484375, -0.05218505859375, 0.0256195068359375, 0.0183868408203125, 0.0401611328125, 0.01380157470703125, -0.028839111328125, 0.049163818359375, 0.006755828857421875, -0.042083740234375, -0.00885009765625, 0.07763671875, 0.0240936279296875, 0.014892578125, 0.007038116455078125, -0.007244110107421875, -0.0244140625, -0.01447296142578125, -0.085205078125, -0.0272064208984375, 0.03448486328125, -0.045257568359375, -0.03594970703125, 0.022430419921875, -0.04962158203125, -0.0158233642578125, -0.008148193359375, 0.039764404296875, -0.03680419921875, -0.0277252197265625, 0.0073089599609375, -0.0100860595703125, 0.023345947265625, 0.01502227783203125, -0.04058837890625, 0.01215362548828125, 0.0181732177734375, 0.07659912109375, -0.0024738311767578125, -0.04180908203125, -0.01313018798828125, -0.0177154541015625, -0.0251922607421875, 0.037933349609375, -0.005245208740234375, -0.00598907470703125, -0.0183563232421875, 0.0204620361328125, -0.0189056396484375, -0.06024169921875, 0.0218353271484375, -0.02435302734375, 0.0194854736328125, -0.0009469985961914062, -0.0298614501953125, -0.040252685546875, 0.0162506103515625, -0.038360595703125, 0.0882568359375, 0.0261993408203125, -0.055938720703125, 0.03668212890625, -0.041473388671875, -0.0071868896484375, -0.019378662109375, -0.0011320114135742188, -0.07879638671875, -0.00788116455078125, 0.0240325927734375, 0.053985595703125, -0.01776123046875, 0.0009965896606445312, -0.045684814453125, -0.0168304443359375, 0.031585693359375, -0.0015497207641601562, 0.08416748046875, 0.01230621337890625, -0.024078369140625, 0.014007568359375, -0.048431396484375, 0.01189422607421875, 0.038055419921875, -0.02923583984375, -0.00899505615234375, -0.0482177734375, 0.0113067626953125, 0.02020263671875, 0.01558685302734375, -0.0411376953125, 0.024322509765625, -0.017913818359375, 0.0210113525390625, 0.054779052734375, -0.008056640625, 0.025604248046875, -0.0260162353515625, 0.016693115234375, 0.0367431640625, 0.0135650634765625, -0.00013446807861328125, -0.042022705078125, -0.057891845703125, -0.045166015625, 0.02685546875, 0.0295257568359375, -0.031646728515625, 0.05047607421875, -0.002574920654296875, -0.05462646484375, -0.038116455078125, 0.0173187255859375, 0.03167724609375, 0.039581298828125, 0.0223236083984375, -0.03564453125, -0.035980224609375, -0.05853271484375, 0.0153656005859375, 0.005306243896484375, 0.0017118453979492188, 0.0220947265625, 0.047393798828125, -0.0111541748046875, 0.046844482421875, -0.038909912109375, -0.035186767578125, -0.0150909423828125, 0.01042938232421875, 0.0389404296875, 0.05950927734375, 0.07562255859375, -0.045501708984375, -0.039764404296875, -0.01407623291015625, -0.08148193359375, 0.01239776611328125, -0.0052490234375, -0.01116943359375, 0.0304412841796875, 0.006389617919921875, -0.05279541015625, 0.040618896484375, 0.0204010009765625, -0.0263214111328125, 0.026611328125, -0.0207672119140625, 0.0216827392578125, -0.08392333984375, 0.0014524459838867188, 0.0258331298828125, -0.00010085105895996094, -0.033905029296875, 0.00809478759765625, -0.004161834716796875, -0.0027179718017578125, -0.041229248046875, 0.045623779296875, -0.043731689453125, -0.0171051025390625, -0.017364501953125, -0.01364898681640625, 0.00206756591796875, 0.058502197265625, -0.015411376953125, 0.0296783447265625, 0.07177734375, -0.04559326171875, 0.032958984375, 0.031829833984375, -0.0189208984375, 0.0295257568359375, -0.042205810546875, 0.025238037109375, -0.004802703857421875, 0.01788330078125, -0.07513427734375, -0.0159454345703125, 0.019561767578125, -0.036834716796875, 0.048309326171875, -0.037933349609375, -0.0321044921875, -0.03619384765625, -0.04205322265625, 0.032867431640625, 0.052459716796875, -0.06005859375, 0.031158447265625, 0.0289459228515625, 0.021575927734375, -0.040313720703125, -0.067138671875, -0.01959228515625, -0.0362548828125, -0.054290771484375, 0.0240478515625, 0.002010345458984375, 0.006610870361328125, 0.0101470947265625, -0.002849578857421875, -0.0194244384765625, -0.0082244873046875, 0.03704833984375, 0.0300445556640625, -0.0232391357421875, -0.01265716552734375, -0.0204010009765625, 0.00246429443359375, 0.00731658935546875, -0.0191650390625, 0.05230712890625, -0.01641845703125, -0.01251220703125, -0.062347412109375, -0.006053924560546875, 0.04180908203125, -0.003986358642578125, 0.06488037109375, 0.08123779296875, -0.039459228515625, -0.0036296844482421875, -0.0238494873046875, -0.0133056640625, -0.03692626953125, 0.0310211181640625, -0.029754638671875, -0.02484130859375, 0.0740966796875, 0.002132415771484375, 0.00542449951171875, 0.055328369140625, 0.02764892578125, -0.00963592529296875, 0.060638427734375, 0.04473876953125, -0.0022125244140625, 0.04473876953125, -0.080810546875, -0.0191650390625, -0.061614990234375, -0.033721923828125, -0.0267333984375, -0.040618896484375, -0.042205810546875, -0.0265655517578125, 0.0282440185546875, 0.0206756591796875, -0.0283355712890625, 0.028167724609375, -0.055450439453125, 0.01201629638671875, 0.044464111328125, 0.048980712890625, -0.025848388671875, 0.0159454345703125, -0.0074005126953125, 0.0029315948486328125, -0.04736328125, -0.0162200927734375, 0.08148193359375, 0.04058837890625, 0.051971435546875, -0.00827789306640625, 0.06072998046875, -0.009033203125, 0.01995849609375, -0.040985107421875, 0.045684814453125, -0.0248565673828125, -0.02813720703125, -0.01454925537109375, -0.02362060546875, -0.076416015625, -0.0006685256958007812, -0.01708984375, -0.061248779296875, 0.0193023681640625, 0.009429931640625, -0.0154266357421875, 0.0615234375, -0.0625, 0.06634521484375, -0.0111236572265625, -0.0247802734375, 0.00469207763671875, -0.03704833984375, 0.0290985107421875, 0.0046234130859375, -0.0175018310546875, -0.00949859619140625, 0.01497650146484375, 0.07965087890625, -0.043548583984375, 0.07177734375, -0.0433349609375, 0.01334381103515625, 0.04119873046875, -0.01491546630859375, 0.024993896484375, -0.01348114013671875, -0.007427215576171875, 0.029144287109375, 0.0004067420959472656, -0.039459228515625, -0.041015625, 0.04718017578125, -0.0806884765625, -0.0226898193359375, -0.0268096923828125, -0.0450439453125, 0.01204681396484375, 0.003177642822265625, 0.029205322265625, 0.045135498046875, 0.0191192626953125, 0.01364898681640625, 0.046600341796875, -0.033935546875, 0.034698486328125, -0.00832366943359375, -0.0235748291015625, -0.03436279296875, 0.06463623046875, 0.0204010009765625, 0.010101318359375, 0.00390625, 0.01306915283203125, -0.0283050537109375, -0.054290771484375, -0.027435302734375, 0.027557373046875, -0.05364990234375, -0.0312347412109375, -0.04266357421875, -0.04425048828125, -0.03759765625, -0.00567626953125, -0.0289154052734375, -0.02227783203125, -0.03228759765625, 0.01146697998046875, 0.0518798828125, 0.054595947265625, -0.006877899169921875, 0.04058837890625, -0.04425048828125, 0.00948333740234375, 0.012420654296875, 0.037200927734375, -0.0031642913818359375, -0.067138671875, -0.0247039794921875, -0.00830078125, -0.030426025390625, -0.053924560546875, 0.0406494140625, 0.0135498046875, 0.04425048828125, 0.032318115234375, -0.0143280029296875, 0.059234619140625, 0.007541656494140625, 0.043975830078125, 0.019287109375, -0.04425048828125, 0.0460205078125, -0.00397491455078125, 0.01401519775390625, 0.004817962646484375, 0.0267181396484375, 0.0029239654541015625, -0.01221466064453125, -0.06756591796875, -0.060638427734375, 0.0682373046875, 0.006473541259765625, -0.0035495758056640625, 0.028411865234375, 0.050384521484375, 0.004512786865234375, -0.00222015380859375, -0.05560302734375, -0.039215087890625, -0.0190887451171875, -0.023834228515625, 0.0016012191772460938, -0.0057525634765625, -0.00820159912109375, -0.042877197265625, 0.05303955078125, -0.007762908935546875, 0.05108642578125, 0.0218658447265625, -0.005260467529296875, -0.007480621337890625, -0.032012939453125, 0.0321044921875, 0.0306854248046875, -0.039398193359375, -0.003765106201171875, 0.0191650390625, -0.045806884765625, 0.004825592041015625, 0.0162353515625, 0.0002353191375732422, -0.00823211669921875, 0.033233642578125, 0.06256103515625, -0.00841522216796875, 0.00469970703125, 0.0221099853515625, -0.0012807846069335938, -0.0299530029296875, -0.02337646484375, 0.01302337646484375, -0.0029430389404296875, 0.03338623046875, 0.0333251953125, 0.0167083740234375, -0.0098419189453125, -0.025634765625, 0.022613525390625, 0.04180908203125, -0.01788330078125, -0.0218963623046875, 0.050201416015625, -0.01317596435546875, -0.00902557373046875, 0.060699462890625, -0.006626129150390625, -0.02960205078125, 0.09271240234375, 0.0330810546875, 0.07623291015625, -0.0013589859008789062, 0.0013818740844726562, 0.0595703125, 0.01212310791015625, 0.01470947265625, 0.00951385498046875, 0.0175933837890625, -0.060943603515625, 0.0019283294677734375, -0.04718017578125, 0.001708984375, 0.040863037109375, -0.039825439453125, 0.01511383056640625, -0.0557861328125, -0.022674560546875, 0.022674560546875, 0.0290679931640625, -0.06768798828125, 0.019775390625, 0.00286102294921875, 0.0643310546875, -0.0657958984375, 0.0687255859375, 0.0655517578125, -0.053863525390625, -0.07318115234375, -0.0004911422729492188, 0.0034332275390625, -0.0733642578125, 0.051239013671875, 0.033782958984375, 0.01470947265625, -0.0016269683837890625, -0.062225341796875, -0.04925537109375, 0.11041259765625, 0.040313720703125, 0.003475189208984375, 0.0195770263671875, 0.0008645057678222656, 0.020721435546875, -0.041534423828125, 0.02752685546875, 0.0186767578125, 0.0267486572265625, 0.01953125, -0.049224853515625, 0.01392364501953125, -0.0181732177734375, 0.00138092041015625, 0.0089874267578125, -0.05950927734375, 0.079345703125, -0.033721923828125, -0.00850677490234375, 0.005146026611328125, 0.056610107421875, 0.0266876220703125, 0.018463134765625, 0.048980712890625, 0.0657958984375, 0.042724609375, -0.018035888671875, 0.06353759765625, 0.0008749961853027344, 0.046478271484375, 0.044677734375, 0.01190185546875, 0.031707763671875, 0.0223388671875, -0.026824951171875, 0.031951904296875, 0.0828857421875, -0.027557373046875, 0.0267333984375, 0.006725311279296875, -0.0066375732421875, -0.0010280609130859375, 0.006198883056640625, -0.030181884765625, 0.038787841796875, 0.01641845703125, -0.03692626953125, -0.0169677734375, 0.0034637451171875, 0.0015058517456054688, -0.0177001953125, -0.0216064453125, 0.04486083984375, -0.00032591819763183594, -0.0247955322265625, 0.06182861328125, -0.0027179718017578125, 0.066162109375, -0.0311126708984375, -0.003231048583984375, -0.0294647216796875, 0.0264892578125, -0.02923583984375, -0.06866455078125, 0.0225830078125, -0.0234375, -0.00609588623046875, 0.00930023193359375, 0.046478271484375, -0.02813720703125, -0.039154052734375, 0.0255584716796875, 0.0242919921875, 0.046600341796875, 0.007476806640625, -0.0921630859375, 0.00966644287109375, 0.0022029876708984375, -0.043853759765625, 0.03839111328125, 0.033416748046875, 0.01165771484375, 0.055450439453125, 0.046417236328125, -0.00560760498046875, 0.01230621337890625, -0.006992340087890625, 0.06524658203125, -0.037994384765625, -0.0182342529296875, -0.055908203125, 0.050567626953125, -0.01010894775390625, -0.04119873046875, 0.036834716796875, 0.0458984375, 0.06341552734375, -0.00772857666015625, 0.0325927734375, -0.0218048095703125, -0.001125335693359375, -0.043670654296875, 0.0587158203125, -0.047576904296875, 0.0028171539306640625, -0.0150146484375, -0.049835205078125, -0.019012451171875, 0.043060302734375, -0.0086517333984375, 0.036712646484375, 0.03411865234375, 0.072021484375, -0.023590087890625, -0.0311279296875, 0.01629638671875, 0.0139007568359375, 0.01059722900390625, 0.037109375, 0.0238189697265625, -0.06341552734375, 0.0232086181640625, -0.05181884765625, -0.016204833984375, 0.00025916099548339844, -0.0474853515625, -0.08160400390625, -0.06939697265625, -0.045806884765625, -0.04559326171875, -0.0190277099609375, 0.0650634765625, 0.08349609375, -0.057037353515625, -0.0128021240234375, 0.006893157958984375, 0.0158233642578125, -0.0211181640625, -0.018707275390625, 0.057708740234375, -0.0208740234375, -0.06988525390625, -0.01611328125, -0.0035076141357421875, 0.035247802734375, -0.01146697998046875, -0.0199737548828125, -0.0251922607421875, -0.0168304443359375, 0.0218048095703125, 0.021270751953125, -0.047027587890625, -0.00991058349609375, -0.0286102294921875, -0.012847900390625, 0.0291595458984375, 0.0247802734375, -0.0439453125, 0.01030731201171875, 0.02764892578125, 0.032806396484375, 0.06475830078125, -0.021697998046875, 0.0018281936645507812, -0.06304931640625, 0.042572021484375, -0.0164794921875, 0.034027099609375, 0.025360107421875, -0.030242919921875, 0.0447998046875, 0.029449462890625, -0.037994384765625, -0.0654296875, -0.0122528076171875, -0.08599853515625, -0.0160675048828125, 0.05303955078125, -0.038330078125, -0.041473388671875, 0.0297088623046875, -0.0024166107177734375, 0.043609619140625, -0.00933837890625, 0.04534912109375, 0.01953125, -0.0161590576171875, -0.050567626953125, -0.033233642578125, 0.03076171875, 0.00481414794921875, -0.0589599609375, -0.03131103515625, 0.0012865066528320312, 0.046966552734375, 0.021331787109375, 0.04254150390625, -0.0124053955078125, 0.0045318603515625, 0.00787353515625, 0.03271484375, -0.0279541015625, -0.006153106689453125, -0.02923583984375, 0.008209228515625, -0.00525665283203125, -0.053619384765625 ] ]
garage-bAInd/Platypus2-70B-instruct
2023-08-20T04:57:51.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:garage-bAInd/Open-Platypus", "dataset:Open-Orca/OpenOrca", "arxiv:2308.07317", "arxiv:2307.09288", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
garage-bAInd
null
null
garage-bAInd/Platypus2-70B-instruct
151
8,423
transformers
2023-08-04T22:20:35
--- language: - en datasets: - garage-bAInd/Open-Platypus - Open-Orca/OpenOrca license: cc-by-nc-4.0 --- # Platypus2-70B-instruct Platypus-70B-instruct is a merge of [`garage-bAInd/Platypus2-70B`](https://huggingface.co/garage-bAInd/Platypus2-70B) and [`upstage/Llama-2-70b-instruct-v2`](https://huggingface.co/upstage/Llama-2-70b-instruct-v2). ![Platty](./Best_Platty_small.jpeg) ### Benchmark Metrics | Metric | Value | |-----------------------|-------| | MMLU (5-shot) | 70.48 | | ARC (25-shot) | 71.84 | | HellaSwag (10-shot) | 87.94 | | TruthfulQA (0-shot) | 62.26 | | Avg. | 73.13 | We use state-of-the-art [Language Model Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness) to run the benchmark tests above, using the same version as the HuggingFace LLM Leaderboard. Please see below for detailed instructions on reproducing benchmark results. ### Model Details * **Trained by**: **Platypus2-70B** trained by Cole Hunter & Ariel Lee; **Llama-2-70b-instruct** trained by upstageAI * **Model type:** **Platypus2-70B-instruct** is an auto-regressive language model based on the LLaMA 2 transformer architecture. * **Language(s)**: English * **License**: Non-Commercial Creative Commons license ([CC BY-NC-4.0](https://creativecommons.org/licenses/by-nc/4.0/)) ### Prompt Template ``` ### Instruction: <prompt> (without the <>) ### Response: ``` ### Training Dataset `garage-bAInd/Platypus2-70B` trained using STEM and logic based dataset [`garage-bAInd/Open-Platypus`](https://huggingface.co/datasets/garage-bAInd/Open-Platypus). Please see our [paper](https://arxiv.org/abs/2308.07317) and [project webpage](https://platypus-llm.github.io) for additional information. ### Training Procedure `garage-bAInd/Platypus2-70B` was instruction fine-tuned using LoRA on 8 A100 80GB. For training details and inference instructions please see the [Platypus](https://github.com/arielnlee/Platypus) GitHub repo. ### Reproducing Evaluation Results Install LM Evaluation Harness: ``` # clone repository git clone https://github.com/EleutherAI/lm-evaluation-harness.git # change to repo directory cd lm-evaluation-harness # check out the correct commit git checkout b281b0921b636bc36ad05c0b0b0763bd6dd43463 # install pip install -e . ``` Each task was evaluated on a single A100 80GB GPU. ARC: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/Platypus2-70B-instruct --tasks arc_challenge --batch_size 1 --no_cache --write_out --output_path results/Platypus2-70B-instruct/arc_challenge_25shot.json --device cuda --num_fewshot 25 ``` HellaSwag: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/Platypus2-70B-instruct --tasks hellaswag --batch_size 1 --no_cache --write_out --output_path results/Platypus2-70B-instruct/hellaswag_10shot.json --device cuda --num_fewshot 10 ``` MMLU: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/Platypus2-70B-instruct --tasks hendrycksTest-* --batch_size 1 --no_cache --write_out --output_path results/Platypus2-70B-instruct/mmlu_5shot.json --device cuda --num_fewshot 5 ``` TruthfulQA: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/Platypus2-70B-instruct --tasks truthfulqa_mc --batch_size 1 --no_cache --write_out --output_path results/Platypus2-70B-instruct/truthfulqa_0shot.json --device cuda ``` ### Limitations and bias Llama 2 and fine-tuned variants are a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Llama 2 and any fine-tuned varient's potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 2 variants, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available at https://ai.meta.com/llama/responsible-use-guide/ ### Citations ```bibtex @article{platypus2023, title={Platypus: Quick, Cheap, and Powerful Refinement of LLMs}, author={Ariel N. Lee and Cole J. Hunter and Nataniel Ruiz}, booktitle={arXiv preprint arxiv:2308.07317}, year={2023} } ``` ```bibtex @misc{touvron2023llama, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron and Louis Martin and Kevin Stone and Peter Albert and Amjad Almahairi and Yasmine Babaei and Nikolay Bashlykov year={2023}, eprint={2307.09288}, archivePrefix={arXiv}, } ``` ```bibtex @inproceedings{ hu2022lora, title={Lo{RA}: Low-Rank Adaptation of Large Language Models}, author={Edward J Hu and Yelong Shen and Phillip Wallis and Zeyuan Allen-Zhu and Yuanzhi Li and Shean Wang and Lu Wang and Weizhu Chen}, booktitle={International Conference on Learning Representations}, year={2022}, url={https://openreview.net/forum?id=nZeVKeeFYf9} } ```
5,154
[ [ -0.023406982421875, -0.056976318359375, 0.0215606689453125, 0.025543212890625, -0.025421142578125, -0.0004551410675048828, -0.0279998779296875, -0.037567138671875, 0.004108428955078125, 0.020721435546875, -0.037353515625, -0.023040771484375, -0.047515869140625, -0.00585174560546875, -0.00879669189453125, 0.0771484375, -0.030609130859375, -0.01325225830078125, -0.0100860595703125, -0.0174102783203125, -0.0494384765625, -0.037261962890625, -0.0347900390625, -0.0292510986328125, 0.0173187255859375, 0.0257110595703125, 0.042388916015625, 0.038726806640625, 0.04669189453125, 0.0238037109375, -0.01393890380859375, 0.018829345703125, -0.041595458984375, -0.01024627685546875, 0.01456451416015625, -0.041778564453125, -0.044189453125, 0.0089569091796875, 0.03948974609375, 0.0273895263671875, -0.01493072509765625, 0.035888671875, 0.01122283935546875, 0.0273590087890625, -0.045379638671875, 0.024749755859375, -0.044525146484375, -0.0110626220703125, -0.0239715576171875, -0.01120758056640625, -0.0207977294921875, -0.0172119140625, -0.015899658203125, -0.056182861328125, -0.0005049705505371094, 0.0029201507568359375, 0.08551025390625, 0.03924560546875, -0.018646240234375, -0.004421234130859375, -0.0230865478515625, 0.068359375, -0.0628662109375, 0.01168060302734375, 0.027587890625, 0.00872039794921875, -0.0306243896484375, -0.0506591796875, -0.0506591796875, -0.021026611328125, -0.00525665283203125, 0.0129547119140625, -0.0201873779296875, -0.0083770751953125, 0.02142333984375, 0.0318603515625, -0.0284271240234375, 0.038848876953125, -0.032806396484375, -0.01213836669921875, 0.05718994140625, 0.01244354248046875, 0.00630950927734375, -0.01120758056640625, -0.0408935546875, -0.037353515625, -0.0565185546875, 0.024322509765625, 0.030029296875, 0.00699615478515625, -0.03228759765625, 0.051055908203125, -0.01131439208984375, 0.02825927734375, 0.0009441375732421875, -0.0418701171875, 0.04669189453125, -0.0302886962890625, -0.025726318359375, -0.00732421875, 0.073974609375, 0.034881591796875, 0.004703521728515625, 0.0070953369140625, -0.01348114013671875, 0.0260009765625, -0.012115478515625, -0.05230712890625, -0.00980377197265625, 0.0228118896484375, -0.0224761962890625, -0.0183868408203125, -0.01430511474609375, -0.04400634765625, -0.0214996337890625, -0.010528564453125, 0.029510498046875, -0.032440185546875, -0.0345458984375, 0.0150909423828125, -0.00556182861328125, 0.038360595703125, 0.0196380615234375, -0.04852294921875, 0.02886962890625, 0.04681396484375, 0.06561279296875, -0.0238494873046875, -0.0469970703125, -0.0251922607421875, 0.0011949539184570312, -0.018646240234375, 0.059539794921875, -0.0042724609375, -0.017059326171875, -0.0206756591796875, 0.01029205322265625, -0.01116943359375, -0.047698974609375, 0.034637451171875, -0.0202178955078125, 0.014801025390625, -0.0165557861328125, -0.0301361083984375, -0.0286407470703125, -0.004840850830078125, -0.0293426513671875, 0.103515625, 0.01436614990234375, -0.058990478515625, 0.00972747802734375, -0.049774169921875, -0.0274810791015625, -0.00939178466796875, 0.00791168212890625, -0.04315185546875, -0.006793975830078125, 0.01044464111328125, 0.03363037109375, -0.039031982421875, 0.0238494873046875, -0.021453857421875, -0.02435302734375, 0.017791748046875, -0.00896453857421875, 0.0767822265625, 0.0167694091796875, -0.04510498046875, 0.006488800048828125, -0.045379638671875, -0.00787353515625, 0.03851318359375, -0.024749755859375, -0.012176513671875, -0.00782012939453125, -0.0085906982421875, 0.006145477294921875, 0.0286407470703125, -0.03350830078125, 0.00824737548828125, -0.023956298828125, 0.044036865234375, 0.0557861328125, -0.00693511962890625, 0.0181121826171875, -0.037994384765625, 0.0262451171875, 0.003093719482421875, 0.0225982666015625, 0.004268646240234375, -0.052337646484375, -0.0810546875, -0.023529052734375, 0.00417327880859375, 0.060150146484375, -0.0279998779296875, 0.042083740234375, -0.003253936767578125, -0.04803466796875, -0.042388916015625, 0.0271148681640625, 0.043182373046875, 0.03973388671875, 0.03594970703125, -0.03411865234375, -0.04669189453125, -0.0643310546875, -0.005565643310546875, -0.029510498046875, 0.00974273681640625, 0.0189361572265625, 0.050689697265625, -0.0246429443359375, 0.048919677734375, -0.035797119140625, -0.0257568359375, -0.0191192626953125, -0.0027141571044921875, 0.02691650390625, 0.045745849609375, 0.03961181640625, -0.0178375244140625, -0.0182342529296875, -0.0167694091796875, -0.06402587890625, -0.0177154541015625, -0.0008087158203125, -0.0170135498046875, 0.035308837890625, 0.01433563232421875, -0.064208984375, 0.0245361328125, 0.035552978515625, -0.01316070556640625, 0.039093017578125, -0.01198577880859375, -0.00876617431640625, -0.06500244140625, 0.007213592529296875, 0.0005950927734375, -0.0008058547973632812, -0.03662109375, 0.016448974609375, -0.003269195556640625, 0.00867462158203125, -0.047332763671875, 0.047882080078125, -0.0401611328125, -0.0147552490234375, -0.00832366943359375, 0.0132904052734375, -0.01065826416015625, 0.05157470703125, -0.00624847412109375, 0.06671142578125, 0.039093017578125, -0.043182373046875, 0.015716552734375, 0.030517578125, -0.0300750732421875, 0.01910400390625, -0.0634765625, 0.0163421630859375, 0.01226806640625, 0.0271148681640625, -0.08251953125, -0.01280975341796875, 0.0247650146484375, -0.024749755859375, 0.0230712890625, 0.0089263916015625, -0.05706787109375, -0.03533935546875, -0.0411376953125, 0.0238800048828125, 0.06756591796875, -0.046417236328125, 0.019439697265625, 0.036346435546875, 0.0014028549194335938, -0.048126220703125, -0.059722900390625, -0.01436614990234375, -0.028961181640625, -0.052337646484375, 0.01203155517578125, -0.01006317138671875, -0.0180816650390625, -0.0186920166015625, -0.00403594970703125, 0.00975799560546875, 0.015533447265625, 0.0396728515625, 0.03192138671875, -0.0111236572265625, -0.005214691162109375, -0.00021517276763916016, -0.017181396484375, -0.001987457275390625, 0.0070648193359375, 0.0494384765625, -0.0267486572265625, -0.0145263671875, -0.062103271484375, 0.002948760986328125, 0.034454345703125, -0.0204925537109375, 0.046783447265625, 0.046661376953125, -0.01276397705078125, 0.0144805908203125, -0.06146240234375, -0.0128936767578125, -0.037200927734375, 0.02801513671875, -0.01885986328125, -0.055511474609375, 0.04595947265625, -0.000415802001953125, 0.0135650634765625, 0.05419921875, 0.05987548828125, -0.003253936767578125, 0.056488037109375, 0.038360595703125, -0.0008797645568847656, 0.030548095703125, -0.053009033203125, 0.004547119140625, -0.08099365234375, -0.0277099609375, -0.0277099609375, -0.0254058837890625, -0.04425048828125, -0.040863037109375, 0.01297760009765625, 0.022186279296875, -0.042755126953125, 0.040985107421875, -0.036346435546875, 0.01605224609375, 0.042633056640625, 0.007808685302734375, 0.0177764892578125, 0.00823974609375, -0.00785064697265625, 0.00116729736328125, -0.04766845703125, -0.042083740234375, 0.089111328125, 0.04327392578125, 0.059814453125, 0.0022678375244140625, 0.047698974609375, -0.0068817138671875, 0.024627685546875, -0.048736572265625, 0.051300048828125, -0.007129669189453125, -0.034149169921875, -0.004581451416015625, -0.01434326171875, -0.06854248046875, 0.0223236083984375, -0.0013713836669921875, -0.05859375, 0.01248931884765625, 0.001117706298828125, -0.0297393798828125, 0.02703857421875, -0.068359375, 0.053314208984375, -0.034332275390625, -0.0284881591796875, -0.02362060546875, -0.0501708984375, 0.05157470703125, -0.007312774658203125, 0.0058746337890625, -0.0230712890625, -0.01393890380859375, 0.08203125, -0.04913330078125, 0.07421875, -0.01459503173828125, -0.0009250640869140625, 0.037689208984375, -0.004161834716796875, 0.0399169921875, 0.0074005126953125, 0.002346038818359375, 0.03717041015625, -0.003047943115234375, -0.021026611328125, -0.01430511474609375, 0.0584716796875, -0.09613037109375, -0.04925537109375, -0.041412353515625, -0.052154541015625, 0.0037975311279296875, 0.003986358642578125, 0.01448822021484375, 0.0031337738037109375, 0.02130126953125, 0.0038280487060546875, 0.041748046875, -0.0309295654296875, 0.045623779296875, 0.03936767578125, -0.0011653900146484375, -0.031982421875, 0.0506591796875, 0.001712799072265625, 0.0199127197265625, 0.01041412353515625, 0.00605010986328125, -0.0238037109375, -0.0304718017578125, -0.0185089111328125, 0.048553466796875, -0.04473876953125, -0.040679931640625, -0.041748046875, -0.020751953125, -0.0195159912109375, 0.0027980804443359375, -0.039215087890625, -0.03521728515625, -0.0460205078125, -0.002544403076171875, 0.0517578125, 0.0411376953125, -0.015869140625, 0.046478271484375, -0.0161895751953125, 0.023468017578125, 0.0216217041015625, 0.0205841064453125, 0.0016326904296875, -0.057769775390625, 0.008392333984375, 0.004222869873046875, -0.04205322265625, -0.0550537109375, 0.031280517578125, 0.014434814453125, 0.054534912109375, 0.0117950439453125, -0.0010528564453125, 0.0662841796875, -0.01387786865234375, 0.06402587890625, 0.025909423828125, -0.061614990234375, 0.052337646484375, -0.003215789794921875, 0.00457000732421875, 0.035308837890625, 0.0230865478515625, -0.01201629638671875, -0.0292510986328125, -0.0552978515625, -0.0650634765625, 0.06292724609375, 0.02105712890625, -0.0088958740234375, 0.018951416015625, 0.02972412109375, 0.01218414306640625, 0.007595062255859375, -0.059967041015625, -0.028594970703125, -0.0208587646484375, -0.00017583370208740234, -0.01538848876953125, -0.0224151611328125, -0.0170745849609375, -0.035430908203125, 0.05584716796875, 0.0008749961853027344, 0.038848876953125, 0.01367950439453125, -0.0275421142578125, -0.016326904296875, 0.003314971923828125, 0.046722412109375, 0.041351318359375, -0.0318603515625, -0.001331329345703125, 0.0278778076171875, -0.04595947265625, 0.0170135498046875, 0.01024627685546875, -0.0032863616943359375, -0.01285552978515625, 0.02880859375, 0.08270263671875, 0.0083770751953125, -0.0462646484375, 0.0340576171875, -0.0019683837890625, -0.0163421630859375, -0.0260467529296875, 0.0130462646484375, 0.00872039794921875, 0.0267333984375, 0.022003173828125, -0.00266265869140625, -0.022216796875, -0.03662109375, -0.004367828369140625, 0.031463623046875, 0.0084228515625, -0.0292816162109375, 0.06231689453125, 0.00405120849609375, -0.0213775634765625, 0.0443115234375, -0.016265869140625, -0.0309906005859375, 0.055755615234375, 0.05181884765625, 0.048583984375, -0.0093994140625, 0.0006208419799804688, 0.03662109375, 0.032318115234375, -0.01678466796875, 0.0343017578125, 0.0095977783203125, -0.0384521484375, -0.029510498046875, -0.04595947265625, -0.019805908203125, 0.032806396484375, -0.034454345703125, 0.0279693603515625, -0.042388916015625, -0.0213623046875, -0.00951385498046875, 0.033203125, -0.0513916015625, -0.0062103271484375, 0.004116058349609375, 0.07489013671875, -0.07135009765625, 0.061767578125, 0.05181884765625, -0.039398193359375, -0.0728759765625, -0.02899169921875, -0.00408935546875, -0.08343505859375, 0.04949951171875, 0.01336669921875, 0.00524139404296875, -0.0092620849609375, -0.050994873046875, -0.0775146484375, 0.11419677734375, 0.046478271484375, -0.04522705078125, 0.0249786376953125, 0.005809783935546875, 0.0399169921875, -0.019378662109375, 0.031463623046875, 0.05474853515625, 0.037384033203125, 0.004802703857421875, -0.09051513671875, 0.0204315185546875, -0.0211181640625, 0.006603240966796875, -0.0006079673767089844, -0.08331298828125, 0.08251953125, -0.032867431640625, -0.01482391357421875, 0.02301025390625, 0.04718017578125, 0.0567626953125, 0.0163421630859375, 0.032440185546875, 0.06976318359375, 0.059967041015625, -0.00708770751953125, 0.089111328125, -0.0209197998046875, 0.03729248046875, 0.07733154296875, -0.0095367431640625, 0.07879638671875, 0.0413818359375, -0.03936767578125, 0.047515869140625, 0.06854248046875, -0.00724029541015625, 0.0439453125, 0.00772857666015625, 0.01119232177734375, -0.005626678466796875, -0.004047393798828125, -0.046417236328125, 0.028900146484375, 0.021240234375, -0.00687408447265625, -0.006694793701171875, -0.0149993896484375, 0.0184173583984375, -0.02703857421875, -0.00646209716796875, 0.042022705078125, 0.0185394287109375, -0.05322265625, 0.08477783203125, 0.0020904541015625, 0.07098388671875, -0.0421142578125, 0.0158538818359375, -0.0377197265625, 0.019744873046875, -0.02349853515625, -0.057464599609375, -0.0029163360595703125, -0.0028400421142578125, 0.0079345703125, -0.00202178955078125, 0.04644775390625, -0.007183074951171875, -0.025970458984375, 0.035797119140625, 0.0243988037109375, 0.0262908935546875, 0.0137176513671875, -0.052093505859375, 0.0174713134765625, -0.00919342041015625, -0.0285491943359375, 0.0287322998046875, 0.00286865234375, -0.0146942138671875, 0.050384521484375, 0.05389404296875, 0.00028252601623535156, 0.018280029296875, -0.0095672607421875, 0.0740966796875, -0.0285186767578125, -0.026123046875, -0.05645751953125, 0.033782958984375, 0.006839752197265625, -0.046417236328125, 0.055267333984375, 0.04205322265625, 0.060516357421875, 0.004993438720703125, 0.04547119140625, -0.0115203857421875, 0.0244598388671875, -0.0293426513671875, 0.03729248046875, -0.04473876953125, 0.0285491943359375, -0.00553131103515625, -0.07354736328125, -0.0099639892578125, 0.05029296875, -0.0223388671875, -0.007465362548828125, 0.062042236328125, 0.06878662109375, -0.0090179443359375, -0.021514892578125, -0.00887298583984375, 0.03533935546875, 0.014862060546875, 0.07098388671875, 0.06622314453125, -0.05816650390625, 0.0386962890625, -0.040008544921875, -0.0277862548828125, -0.018768310546875, -0.059967041015625, -0.07037353515625, -0.037384033203125, -0.0379638671875, -0.028411865234375, -0.00012814998626708984, 0.05401611328125, 0.041900634765625, -0.06280517578125, -0.04290771484375, -0.0007548332214355469, 0.006145477294921875, -0.0167083740234375, -0.0128173828125, 0.03692626953125, -0.0176544189453125, -0.032623291015625, 0.01470184326171875, 0.0081329345703125, 0.015869140625, -0.025604248046875, -0.0257110595703125, -0.0238800048828125, -0.01263427734375, 0.035797119140625, 0.03204345703125, -0.0699462890625, -0.004550933837890625, -0.004650115966796875, -0.0081939697265625, 0.0172271728515625, 0.033233642578125, -0.062255859375, 0.00235748291015625, 0.033111572265625, 0.028228759765625, 0.05072021484375, -0.0166473388671875, 0.01055145263671875, -0.038848876953125, 0.03936767578125, -0.0119171142578125, 0.03363037109375, 0.0340576171875, -0.0215606689453125, 0.0540771484375, 0.0308837890625, -0.040008544921875, -0.07989501953125, -0.00916290283203125, -0.09161376953125, -0.010223388671875, 0.11419677734375, -0.0108642578125, -0.0308685302734375, 0.0169677734375, -0.016998291015625, 0.03594970703125, -0.03253173828125, 0.052001953125, 0.024322509765625, -0.01425933837890625, -0.010162353515625, -0.055572509765625, 0.020843505859375, 0.031585693359375, -0.0673828125, -0.012420654296875, 0.01274871826171875, 0.03814697265625, 0.01399993896484375, 0.04425048828125, 0.0013751983642578125, 0.016876220703125, -0.01131439208984375, 0.00797271728515625, -0.0141448974609375, -0.0004336833953857422, -0.028289794921875, -0.0181121826171875, 0.005641937255859375, -0.021636962890625 ] ]
Mahmoud8/roberta-base
2023-10-20T00:13:45.000Z
[ "transformers", "pytorch", "tensorboard", "roberta", "text-classification", "generated_from_trainer", "license:mit", "endpoints_compatible", "region:us" ]
text-classification
Mahmoud8
null
null
Mahmoud8/roberta-base
0
8,414
transformers
2023-10-19T23:34:10
--- license: mit tags: - generated_from_trainer metrics: - accuracy model-index: - name: roberta-base results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # roberta-base This model is a fine-tuned version of [roberta-base](https://huggingface.co/roberta-base) on the None dataset. It achieves the following results on the evaluation set: - Loss: 0.1585 - Accuracy: 0.9762 ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 4 - eval_batch_size: 4 - seed: 42 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 10 - mixed_precision_training: Native AMP ### Training results | Training Loss | Epoch | Step | Validation Loss | Accuracy | |:-------------:|:-----:|:-----:|:---------------:|:--------:| | 0.2838 | 1.0 | 3492 | 0.2001 | 0.9628 | | 0.1463 | 2.0 | 6984 | 0.1663 | 0.9725 | | 0.0922 | 3.0 | 10476 | 0.1962 | 0.9728 | | 0.081 | 4.0 | 13968 | 0.1684 | 0.9725 | | 0.0487 | 5.0 | 17460 | 0.1585 | 0.9762 | | 0.0443 | 6.0 | 20952 | 0.1707 | 0.9762 | | 0.0216 | 7.0 | 24444 | 0.1984 | 0.9765 | | 0.0341 | 8.0 | 27936 | 0.1892 | 0.9751 | ### Framework versions - Transformers 4.28.0 - Pytorch 2.0.0 - Datasets 2.1.0 - Tokenizers 0.13.3
1,774
[ [ -0.027130126953125, -0.0445556640625, 0.01483154296875, -0.00034332275390625, -0.022613525390625, -0.029815673828125, -0.0172576904296875, -0.01282501220703125, 0.0133819580078125, 0.028411865234375, -0.057159423828125, -0.0552978515625, -0.056793212890625, -0.0101165771484375, -0.0127716064453125, 0.09271240234375, 0.01824951171875, 0.0347900390625, -0.0023956298828125, -0.012420654296875, -0.032745361328125, -0.0430908203125, -0.049041748046875, -0.047576904296875, 0.0174560546875, 0.029052734375, 0.0504150390625, 0.056915283203125, 0.038360595703125, 0.01119232177734375, -0.0242156982421875, -0.00664520263671875, -0.0467529296875, -0.0299530029296875, 0.0019664764404296875, -0.036834716796875, -0.056549072265625, 0.0016937255859375, 0.043701171875, 0.031585693359375, -0.0108795166015625, 0.04278564453125, -0.005496978759765625, 0.0426025390625, -0.0328369140625, 0.0196380615234375, -0.032257080078125, 0.0122528076171875, -0.0201568603515625, -0.029022216796875, -0.0311431884765625, -0.005290985107421875, 0.00379180908203125, -0.033294677734375, 0.0364990234375, 0.0017709732055664062, 0.0999755859375, 0.02764892578125, -0.029510498046875, 0.0087127685546875, -0.0579833984375, 0.053741455078125, -0.048126220703125, 0.01971435546875, 0.028411865234375, 0.03082275390625, 0.005565643310546875, -0.043548583984375, -0.03076171875, -0.0002224445343017578, 0.00469207763671875, 0.0171966552734375, -0.0177459716796875, -0.0006365776062011719, 0.04486083984375, 0.035675048828125, -0.0582275390625, 0.002742767333984375, -0.03778076171875, -0.00200653076171875, 0.03662109375, 0.030975341796875, -0.00920867919921875, -0.0191192626953125, -0.033935546875, -0.0188751220703125, -0.03021240234375, 0.005512237548828125, 0.03924560546875, 0.024688720703125, -0.04168701171875, 0.04254150390625, -0.0203857421875, 0.07257080078125, 0.01128387451171875, -0.0179443359375, 0.051116943359375, 0.0002772808074951172, -0.0316162109375, -0.01352691650390625, 0.051544189453125, 0.039703369140625, 0.016326904296875, 0.01033782958984375, -0.01192474365234375, -0.006610870361328125, 0.02044677734375, -0.061309814453125, -0.0318603515625, 0.0179290771484375, -0.045745849609375, -0.047882080078125, 0.0163421630859375, -0.043426513671875, 0.01971435546875, -0.03167724609375, 0.029327392578125, -0.033050537109375, -0.0131683349609375, 0.007579803466796875, -0.0102081298828125, 0.0232391357421875, 0.01403045654296875, -0.064208984375, 0.03289794921875, 0.02984619140625, 0.04254150390625, 0.004810333251953125, -0.01503753662109375, -0.018829345703125, 0.00974273681640625, -0.0207672119140625, 0.048187255859375, -0.00994110107421875, -0.03375244140625, -0.018157958984375, 0.0135650634765625, -0.010040283203125, -0.0306854248046875, 0.0628662109375, -0.0223846435546875, 0.0248260498046875, 0.00518035888671875, -0.04266357421875, -0.017120361328125, 0.03533935546875, -0.044525146484375, 0.08990478515625, 0.021392822265625, -0.054229736328125, 0.038665771484375, -0.041839599609375, -0.004528045654296875, -0.005657196044921875, -0.007808685302734375, -0.06524658203125, -0.004337310791015625, 0.00490570068359375, 0.021820068359375, -0.0286712646484375, 0.0241851806640625, -0.0215911865234375, -0.03131103515625, -0.0012216567993164062, -0.047271728515625, 0.07293701171875, 0.00482940673828125, -0.03778076171875, 0.0247039794921875, -0.10174560546875, 0.0338134765625, 0.020172119140625, -0.03253173828125, 0.00933837890625, -0.0384521484375, 0.0323486328125, 0.0142059326171875, 0.02288818359375, -0.031005859375, 0.01267242431640625, -0.02606201171875, 0.012908935546875, 0.04449462890625, 0.00911712646484375, 0.01309967041015625, -0.042694091796875, 0.031585693359375, 0.007114410400390625, 0.0360107421875, 0.0184326171875, -0.04473876953125, -0.061309814453125, -0.019622802734375, 0.02838134765625, 0.0328369140625, -0.010650634765625, 0.05999755859375, -0.0157470703125, -0.058685302734375, -0.01806640625, 0.00045418739318847656, 0.044708251953125, 0.048309326171875, 0.03594970703125, -0.0129241943359375, -0.034332275390625, -0.076904296875, 0.0031642913818359375, 0.0008845329284667969, 0.01503753662109375, 0.027130126953125, 0.05438232421875, -0.0035495758056640625, 0.05780029296875, -0.042938232421875, -0.0230560302734375, -0.013702392578125, 0.01074981689453125, 0.04150390625, 0.06280517578125, 0.06341552734375, -0.038482666015625, -0.0269317626953125, -0.01464080810546875, -0.051300048828125, 0.024993896484375, -0.006084442138671875, -0.0257568359375, 0.00885009765625, -0.0073699951171875, -0.044525146484375, 0.057525634765625, 0.03814697265625, -0.0218658447265625, 0.05047607421875, -0.035369873046875, -0.004261016845703125, -0.0888671875, 0.0224151611328125, 0.0070648193359375, -0.0113677978515625, -0.0244598388671875, 0.004131317138671875, 0.013031005859375, -0.0163726806640625, -0.023162841796875, 0.04345703125, -0.0264892578125, -0.005237579345703125, -0.006832122802734375, -0.0219879150390625, 0.00247955322265625, 0.05682373046875, 0.01503753662109375, 0.050567626953125, 0.047119140625, -0.031585693359375, 0.0239715576171875, 0.038299560546875, -0.0174560546875, 0.044830322265625, -0.060699462890625, 0.01561737060546875, 0.004604339599609375, 0.01068878173828125, -0.052947998046875, -0.0229339599609375, 0.0253143310546875, -0.049163818359375, 0.0204315185546875, -0.041839599609375, -0.016937255859375, -0.02935791015625, -0.00678253173828125, 0.01531219482421875, 0.0458984375, -0.035308837890625, 0.03436279296875, -0.004962921142578125, 0.03240966796875, -0.039703369140625, -0.0601806640625, -0.01568603515625, -0.014404296875, -0.035797119140625, 0.01568603515625, -0.0022125244140625, 0.004856109619140625, 0.003932952880859375, -0.003116607666015625, -0.0256805419921875, -0.00713348388671875, 0.0374755859375, 0.032135009765625, -0.0143280029296875, -0.010955810546875, -0.0160675048828125, -0.033477783203125, 0.0070648193359375, -0.01203155517578125, 0.054229736328125, -0.00963592529296875, -0.028533935546875, -0.06317138671875, -0.00487518310546875, 0.035369873046875, -0.022979736328125, 0.0675048828125, 0.04437255859375, -0.03424072265625, -0.0106658935546875, -0.0263824462890625, -0.002521514892578125, -0.026275634765625, 0.02899169921875, -0.044097900390625, -0.01457977294921875, 0.045166015625, 0.0039825439453125, -0.004421234130859375, 0.06561279296875, 0.039703369140625, 0.01015472412109375, 0.077392578125, 0.019287109375, -0.00001531839370727539, 0.024688720703125, -0.0743408203125, -0.0121612548828125, -0.06158447265625, -0.034576416015625, -0.050994873046875, -0.0174560546875, -0.038177490234375, -0.0157012939453125, 0.021636962890625, 0.004955291748046875, -0.0531005859375, 0.0244903564453125, -0.042022705078125, 0.022430419921875, 0.0665283203125, 0.032501220703125, 0.0063323974609375, 0.0035552978515625, -0.007648468017578125, -0.00998687744140625, -0.058837890625, -0.033599853515625, 0.1099853515625, 0.01666259765625, 0.056182861328125, 0.001010894775390625, 0.0614013671875, -0.0020732879638671875, 0.01255035400390625, -0.04205322265625, 0.0185546875, 0.0025539398193359375, -0.0760498046875, -0.0128936767578125, -0.031158447265625, -0.047943115234375, 0.00617218017578125, -0.035919189453125, -0.04119873046875, 0.01812744140625, 0.021942138671875, -0.0253753662109375, 0.03436279296875, -0.0264129638671875, 0.0821533203125, -0.0270233154296875, -0.030792236328125, -0.01134490966796875, -0.0362548828125, 0.0182037353515625, 0.01183319091796875, -0.0146636962890625, 0.00482177734375, 0.022430419921875, 0.054656982421875, -0.051422119140625, 0.04620361328125, -0.0267791748046875, 0.0294647216796875, 0.01739501953125, -0.005889892578125, 0.051971435546875, 0.015228271484375, -0.01525115966796875, 0.018798828125, -0.00278472900390625, -0.049041748046875, -0.039093017578125, 0.053009033203125, -0.08258056640625, -0.0173187255859375, -0.051361083984375, -0.03131103515625, -0.00814056396484375, 0.027313232421875, 0.0433349609375, 0.0557861328125, -0.007251739501953125, 0.0231170654296875, 0.039459228515625, 0.00705718994140625, 0.006805419921875, 0.02752685546875, 0.0022869110107421875, -0.052825927734375, 0.055694580078125, -0.005298614501953125, 0.016204833984375, -0.002895355224609375, 0.0010042190551757812, -0.0232391357421875, -0.048126220703125, -0.05596923828125, 0.0160980224609375, -0.044342041015625, -0.020782470703125, -0.0220489501953125, -0.025604248046875, -0.0196380615234375, -0.0019664764404296875, -0.037628173828125, -0.029144287109375, -0.046905517578125, -0.01171112060546875, 0.037200927734375, 0.045928955078125, 0.00646209716796875, 0.039825439453125, -0.043701171875, -0.0040435791015625, -0.0071563720703125, 0.0294036865234375, -0.0013246536254882812, -0.06890869140625, -0.021636962890625, 0.004016876220703125, -0.028411865234375, -0.050537109375, 0.040283203125, 0.006275177001953125, 0.04437255859375, 0.03717041015625, -0.01470947265625, 0.07977294921875, -0.022979736328125, 0.06097412109375, 0.0249786376953125, -0.046234130859375, 0.04571533203125, -0.028656005859375, 0.007686614990234375, 0.0487060546875, 0.0309906005859375, -0.002498626708984375, -0.00010287761688232422, -0.099365234375, -0.05462646484375, 0.06683349609375, 0.0233306884765625, 0.0007224082946777344, 0.006908416748046875, 0.03106689453125, -0.01363372802734375, 0.01132965087890625, -0.0582275390625, -0.048614501953125, -0.017059326171875, -0.02105712890625, -0.00260162353515625, -0.0164947509765625, -0.02813720703125, -0.03643798828125, 0.08013916015625, -0.0026187896728515625, 0.020294189453125, 0.007656097412109375, 0.005580902099609375, -0.0215301513671875, -0.00272369384765625, 0.05255126953125, 0.0594482421875, -0.049102783203125, -0.017669677734375, 0.0159759521484375, -0.0193023681640625, 0.002857208251953125, 0.00794219970703125, -0.0206756591796875, 0.01366424560546875, 0.0323486328125, 0.072998046875, 0.0245208740234375, -0.0193023681640625, 0.0404052734375, -0.0030059814453125, -0.03448486328125, -0.052276611328125, 0.0134429931640625, -0.00958251953125, 0.014739990234375, 0.0157012939453125, 0.042999267578125, -0.0001919269561767578, -0.0208892822265625, 0.0141143798828125, 0.0263671875, -0.03509521484375, -0.0140838623046875, 0.06982421875, -0.0018510818481445312, -0.0272979736328125, 0.051116943359375, -0.005889892578125, -0.0189208984375, 0.0648193359375, 0.045318603515625, 0.0552978515625, 0.00021946430206298828, -0.00537109375, 0.063720703125, 0.0038166046142578125, -0.00531768798828125, 0.042724609375, 0.00970458984375, -0.0302276611328125, -0.0018405914306640625, -0.04827880859375, -0.0247344970703125, 0.035400390625, -0.09344482421875, 0.033477783203125, -0.05096435546875, -0.0411376953125, 0.006328582763671875, 0.01435089111328125, -0.0689697265625, 0.042388916015625, -0.007587432861328125, 0.08465576171875, -0.0648193359375, 0.06671142578125, 0.047393798828125, -0.051116943359375, -0.08123779296875, -0.01263427734375, -0.0043792724609375, -0.06817626953125, 0.0638427734375, -0.0084228515625, 0.018402099609375, 0.012908935546875, -0.0340576171875, -0.07135009765625, 0.08306884765625, 0.0113525390625, -0.04241943359375, 0.0026226043701171875, 0.005634307861328125, 0.0467529296875, -0.01947021484375, 0.046875, 0.0165252685546875, 0.025787353515625, 0.01517486572265625, -0.0703125, -0.01190948486328125, -0.02520751953125, 0.0142364501953125, 0.00374603271484375, -0.0609130859375, 0.08392333984375, 0.001995086669921875, 0.0279998779296875, 0.0255279541015625, 0.03857421875, 0.0214385986328125, 0.00830841064453125, 0.0241851806640625, 0.0777587890625, 0.047119140625, -0.01727294921875, 0.07257080078125, -0.058441162109375, 0.06512451171875, 0.092041015625, 0.00016689300537109375, 0.04974365234375, 0.0160064697265625, -0.0229034423828125, 0.03948974609375, 0.054595947265625, -0.031707763671875, 0.027130126953125, 0.0089263916015625, -0.0029621124267578125, -0.0287628173828125, 0.0240631103515625, -0.045196533203125, 0.0240020751953125, 0.0025177001953125, -0.050079345703125, -0.023651123046875, -0.0158843994140625, -0.00281524658203125, -0.0094451904296875, -0.016754150390625, 0.04132080078125, -0.034881591796875, -0.0218658447265625, 0.067626953125, 0.01078033447265625, 0.02679443359375, -0.047454833984375, -0.0148773193359375, -0.0024509429931640625, 0.031463623046875, -0.027252197265625, -0.04083251953125, 0.014739990234375, 0.010498046875, -0.0252838134765625, 0.00348663330078125, 0.0293426513671875, -0.01255035400390625, -0.05657958984375, 0.011505126953125, 0.025787353515625, 0.0214996337890625, 0.01198577880859375, -0.069580078125, -0.000014603137969970703, 0.0017871856689453125, -0.033294677734375, 0.0026226043701171875, 0.020294189453125, 0.0022449493408203125, 0.040740966796875, 0.050201416015625, 0.01206207275390625, 0.01007080078125, 0.0141448974609375, 0.0709228515625, -0.047454833984375, -0.041412353515625, -0.0400390625, 0.03997802734375, -0.01505279541015625, -0.0743408203125, 0.04931640625, 0.082275390625, 0.06634521484375, -0.0170745849609375, 0.04779052734375, -0.005771636962890625, 0.03997802734375, -0.039337158203125, 0.04010009765625, -0.04205322265625, -0.00861358642578125, -0.0098419189453125, -0.059326171875, -0.0158843994140625, 0.061767578125, -0.0298309326171875, 0.017669677734375, 0.03155517578125, 0.053680419921875, -0.004924774169921875, -0.00016963481903076172, 0.01351165771484375, 0.00017118453979492188, 0.00824737548828125, 0.039093017578125, 0.023223876953125, -0.062255859375, 0.031341552734375, -0.04241943359375, -0.0175933837890625, -0.01256561279296875, -0.037933349609375, -0.07183837890625, -0.0206298828125, -0.0233917236328125, -0.038818359375, 0.014495849609375, 0.0789794921875, 0.07464599609375, -0.061920166015625, -0.024749755859375, -0.01090240478515625, -0.02862548828125, -0.016937255859375, -0.018707275390625, 0.042510986328125, -0.01201629638671875, -0.06219482421875, 0.0023899078369140625, -0.0228118896484375, 0.021942138671875, -0.01425933837890625, -0.022552490234375, -0.02020263671875, -0.028045654296875, -0.00016963481903076172, 0.00408935546875, -0.038177490234375, -0.0243682861328125, -0.018157958984375, -0.00452423095703125, 0.0254058837890625, 0.0140838623046875, -0.03778076171875, 0.0231475830078125, 0.01690673828125, 0.014739990234375, 0.06915283203125, 0.0157012939453125, 0.0143280029296875, -0.0531005859375, 0.03448486328125, 0.018585205078125, 0.0250091552734375, 0.0010137557983398438, -0.03240966796875, 0.040863037109375, 0.034088134765625, -0.0400390625, -0.05621337890625, -0.006805419921875, -0.0865478515625, 0.0099945068359375, 0.07147216796875, -0.0086212158203125, -0.045013427734375, 0.0210723876953125, -0.0135650634765625, 0.0170745849609375, -0.0232696533203125, 0.032012939453125, 0.048065185546875, -0.021759033203125, -0.0034008026123046875, -0.040863037109375, 0.03692626953125, 0.0235137939453125, -0.044708251953125, -0.01348876953125, 0.0360107421875, 0.05438232421875, 0.0115509033203125, 0.031494140625, -0.0149383544921875, 0.0281982421875, 0.01435089111328125, 0.03375244140625, -0.03375244140625, -0.02862548828125, -0.03472900390625, 0.00563812255859375, 0.014434814453125, -0.0311737060546875 ] ]
facebook/wav2vec2-large-960h
2022-04-05T16:40:42.000Z
[ "transformers", "pytorch", "wav2vec2", "automatic-speech-recognition", "speech", "en", "dataset:librispeech_asr", "arxiv:2006.11477", "license:apache-2.0", "endpoints_compatible", "has_space", "region:us" ]
automatic-speech-recognition
facebook
null
null
facebook/wav2vec2-large-960h
15
8,396
transformers
2022-03-02T23:29:05
--- language: en datasets: - librispeech_asr tags: - speech license: apache-2.0 --- # Wav2Vec2-Large-960h [Facebook's Wav2Vec2](https://ai.facebook.com/blog/wav2vec-20-learning-the-structure-of-speech-from-raw-audio/) The large model pretrained and fine-tuned on 960 hours of Librispeech on 16kHz sampled speech audio. When using the model make sure that your speech input is also sampled at 16Khz. [Paper](https://arxiv.org/abs/2006.11477) Authors: Alexei Baevski, Henry Zhou, Abdelrahman Mohamed, Michael Auli **Abstract** We show for the first time that learning powerful representations from speech audio alone followed by fine-tuning on transcribed speech can outperform the best semi-supervised methods while being conceptually simpler. wav2vec 2.0 masks the speech input in the latent space and solves a contrastive task defined over a quantization of the latent representations which are jointly learned. Experiments using all labeled data of Librispeech achieve 1.8/3.3 WER on the clean/other test sets. When lowering the amount of labeled data to one hour, wav2vec 2.0 outperforms the previous state of the art on the 100 hour subset while using 100 times less labeled data. Using just ten minutes of labeled data and pre-training on 53k hours of unlabeled data still achieves 4.8/8.2 WER. This demonstrates the feasibility of speech recognition with limited amounts of labeled data. The original model can be found under https://github.com/pytorch/fairseq/tree/master/examples/wav2vec#wav2vec-20. # Usage To transcribe audio files the model can be used as a standalone acoustic model as follows: ```python from transformers import Wav2Vec2Processor, Wav2Vec2ForCTC from datasets import load_dataset import torch # load model and processor processor = Wav2Vec2Processor.from_pretrained("facebook/wav2vec2-large-960h") model = Wav2Vec2ForCTC.from_pretrained("facebook/wav2vec2-large-960h") # load dummy dataset and read soundfiles ds = load_dataset("patrickvonplaten/librispeech_asr_dummy", "clean", split="validation") # tokenize input_values = processor(ds[0]["audio"]["array"],, return_tensors="pt", padding="longest").input_values # Batch size 1 # retrieve logits logits = model(input_values).logits # take argmax and decode predicted_ids = torch.argmax(logits, dim=-1) transcription = processor.batch_decode(predicted_ids) ``` ## Evaluation This code snippet shows how to evaluate **facebook/wav2vec2-large-960h** on LibriSpeech's "clean" and "other" test data. ```python from datasets import load_dataset from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor import soundfile as sf import torch from jiwer import wer librispeech_eval = load_dataset("librispeech_asr", "clean", split="test") model = Wav2Vec2ForCTC.from_pretrained("facebook/wav2vec2-large-960h").to("cuda") processor = Wav2Vec2Processor.from_pretrained("facebook/wav2vec2-large-960h") def map_to_pred(batch): input_values = processor(batch["audio"]["array"], return_tensors="pt", padding="longest").input_values with torch.no_grad(): logits = model(input_values.to("cuda")).logits predicted_ids = torch.argmax(logits, dim=-1) transcription = processor.batch_decode(predicted_ids) batch["transcription"] = transcription return batch result = librispeech_eval.map(map_to_pred, batched=True, batch_size=1, remove_columns=["speech"]) print("WER:", wer(result["text"], result["transcription"])) ``` *Result (WER)*: | "clean" | "other" | |---|---| | 2.8 | 6.3 |
3,532
[ [ -0.015045166015625, -0.04779052734375, 0.0131072998046875, 0.0126190185546875, -0.01401519775390625, -0.01256561279296875, -0.04052734375, -0.043426513671875, -0.0018749237060546875, 0.0128936767578125, -0.04327392578125, -0.042877197265625, -0.0445556640625, -0.030242919921875, -0.031494140625, 0.07135009765625, 0.020172119140625, 0.007415771484375, 0.00644683837890625, -0.01000213623046875, -0.03045654296875, -0.0206298828125, -0.0667724609375, -0.033355712890625, 0.0194091796875, 0.01433563232421875, 0.0163421630859375, 0.01529693603515625, 0.0256195068359375, 0.0254058837890625, -0.0178375244140625, 0.004673004150390625, -0.053192138671875, -0.00603485107421875, 0.0096893310546875, -0.0288543701171875, -0.02093505859375, 0.02130126953125, 0.04376220703125, 0.035491943359375, -0.0157318115234375, 0.038238525390625, 0.006801605224609375, 0.031768798828125, -0.02313232421875, 0.0258941650390625, -0.04473876953125, -0.01519012451171875, -0.00975799560546875, -0.007320404052734375, -0.04632568359375, -0.009033203125, 0.01117706298828125, -0.039703369140625, 0.0137939453125, -0.01690673828125, 0.06634521484375, 0.017425537109375, -0.024932861328125, -0.0311431884765625, -0.073486328125, 0.0655517578125, -0.047821044921875, 0.055999755859375, 0.034637451171875, 0.0164031982421875, -0.00437164306640625, -0.08563232421875, -0.0330810546875, -0.004093170166015625, 0.023101806640625, 0.033905029296875, -0.0241546630859375, 0.005985260009765625, 0.031463623046875, 0.020538330078125, -0.0499267578125, 0.01073455810546875, -0.068603515625, -0.03741455078125, 0.060516357421875, -0.025390625, -0.0011186599731445312, 0.000720977783203125, -0.0257568359375, -0.044158935546875, -0.01898193359375, 0.035064697265625, 0.0236968994140625, 0.01204681396484375, -0.030242919921875, 0.03131103515625, 0.0066986083984375, 0.045654296875, 0.003475189208984375, -0.0305938720703125, 0.052276611328125, -0.01175689697265625, -0.0093841552734375, 0.032745361328125, 0.06378173828125, 0.01375579833984375, 0.0135040283203125, 0.00882720947265625, -0.01291656494140625, 0.01224517822265625, -0.014373779296875, -0.060638427734375, -0.039520263671875, 0.034637451171875, -0.030914306640625, 0.00754547119140625, 0.01036834716796875, -0.01947021484375, -0.00899505615234375, -0.0206298828125, 0.07293701171875, -0.03973388671875, -0.019683837890625, 0.0120849609375, -0.02099609375, 0.0170745849609375, -0.00632476806640625, -0.061370849609375, 0.0111846923828125, 0.036712646484375, 0.0638427734375, 0.01004791259765625, -0.006443023681640625, -0.046295166015625, -0.00507354736328125, -0.0202178955078125, 0.036224365234375, -0.006999969482421875, -0.041015625, -0.021026611328125, -0.0092010498046875, 0.007965087890625, -0.04681396484375, 0.05682373046875, -0.03082275390625, 0.018310546875, -0.005764007568359375, -0.051910400390625, -0.02044677734375, -0.044891357421875, -0.0435791015625, 0.09130859375, 0.0108642578125, -0.045196533203125, 0.020355224609375, -0.0286712646484375, -0.048065185546875, -0.025177001953125, -0.00347137451171875, -0.0462646484375, 0.01073455810546875, 0.01494598388671875, 0.033905029296875, -0.015045166015625, 0.002140045166015625, -0.01198577880859375, -0.04779052734375, 0.0306549072265625, -0.041015625, 0.08319091796875, 0.0226593017578125, -0.0401611328125, 0.01490020751953125, -0.0684814453125, 0.01366424560546875, 0.004138946533203125, -0.034027099609375, 0.0095672607421875, -0.0107574462890625, 0.0298004150390625, 0.0198516845703125, 0.00974273681640625, -0.043426513671875, -0.0141448974609375, -0.056884765625, 0.047882080078125, 0.05169677734375, -0.0102386474609375, 0.0258331298828125, -0.0240325927734375, -0.004558563232421875, -0.020843505859375, -0.0030002593994140625, 0.01068115234375, -0.033294677734375, -0.046356201171875, -0.034149169921875, 0.0286102294921875, 0.0362548828125, -0.017181396484375, 0.04949951171875, -0.0093536376953125, -0.068115234375, -0.07098388671875, 0.0010480880737304688, 0.0237884521484375, 0.036407470703125, 0.050323486328125, -0.0152587890625, -0.0596923828125, -0.05859375, -0.00769805908203125, -0.00829315185546875, -0.019134521484375, 0.0203857421875, 0.01995849609375, -0.0244598388671875, 0.045623779296875, -0.0186920166015625, -0.03173828125, -0.0174407958984375, 0.01380157470703125, 0.047088623046875, 0.04931640625, 0.0185089111328125, -0.047271728515625, -0.0224761962890625, -0.0257110595703125, -0.0384521484375, -0.006717681884765625, -0.00689697265625, 0.0009374618530273438, 0.00970458984375, 0.033782958984375, -0.038360595703125, 0.0271453857421875, 0.04132080078125, -0.00962066650390625, 0.0301971435546875, -0.0086212158203125, -0.0016031265258789062, -0.07415771484375, 0.0011959075927734375, -0.007232666015625, -0.01739501953125, -0.03778076171875, -0.0426025390625, -0.00959014892578125, -0.006618499755859375, -0.039459228515625, 0.031036376953125, -0.033111572265625, -0.0217437744140625, -0.01435089111328125, 0.015533447265625, -0.01128387451171875, 0.03570556640625, 0.00457763671875, 0.0518798828125, 0.0467529296875, -0.04107666015625, 0.047210693359375, 0.0149383544921875, -0.04718017578125, 0.0003063678741455078, -0.06585693359375, 0.033843994140625, 0.0116424560546875, 0.029266357421875, -0.0888671875, -0.00849151611328125, -0.0111846923828125, -0.06915283203125, 0.0254058837890625, 0.0031070709228515625, -0.032012939453125, -0.0313720703125, -0.00737762451171875, 0.0296173095703125, 0.0751953125, -0.051666259765625, 0.0399169921875, 0.03265380859375, 0.0119476318359375, -0.032318115234375, -0.06982421875, -0.035003662109375, -0.00244903564453125, -0.055999755859375, 0.0272674560546875, 0.0022296905517578125, -0.0003371238708496094, -0.01381683349609375, -0.033966064453125, 0.0130615234375, -0.00899505615234375, 0.040374755859375, 0.017059326171875, -0.0068511962890625, 0.015228271484375, -0.0093536376953125, -0.019134521484375, 0.01500701904296875, -0.040618896484375, 0.05712890625, -0.00775146484375, -0.014495849609375, -0.0687255859375, -0.0038299560546875, 0.016693115234375, -0.02630615234375, 0.035552978515625, 0.08978271484375, -0.029693603515625, -0.019073486328125, -0.042144775390625, -0.0248870849609375, -0.040252685546875, 0.054718017578125, -0.0175323486328125, -0.049560546875, 0.0256805419921875, 0.001415252685546875, 0.0117950439453125, 0.048492431640625, 0.0572509765625, -0.032012939453125, 0.06561279296875, 0.0177154541015625, 0.0007529258728027344, 0.040435791015625, -0.0657958984375, 0.0084686279296875, -0.052947998046875, -0.0343017578125, -0.028656005859375, -0.033966064453125, -0.039764404296875, -0.04034423828125, 0.03619384765625, 0.00537109375, -0.0113525390625, 0.0240936279296875, -0.05035400390625, 0.0135040283203125, 0.05389404296875, 0.0235137939453125, -0.01197052001953125, 0.0172576904296875, 0.003955841064453125, -0.0020351409912109375, -0.036041259765625, -0.01461029052734375, 0.09405517578125, 0.033966064453125, 0.0531005859375, -0.00872802734375, 0.056365966796875, 0.011077880859375, -0.02203369140625, -0.06805419921875, 0.032867431640625, -0.010467529296875, -0.051513671875, -0.0226287841796875, -0.0193328857421875, -0.0660400390625, 0.010345458984375, -0.0271453857421875, -0.06201171875, 0.00588226318359375, 0.0004661083221435547, -0.0226287841796875, 0.0141143798828125, -0.059326171875, 0.045196533203125, -0.0115966796875, -0.02679443359375, -0.02679443359375, -0.051849365234375, 0.004852294921875, 0.005168914794921875, 0.01433563232421875, -0.009918212890625, 0.032958984375, 0.1041259765625, -0.0133209228515625, 0.03680419921875, -0.031707763671875, -0.0002300739288330078, 0.051300048828125, -0.017242431640625, 0.027862548828125, 0.00220489501953125, -0.0189208984375, 0.0216217041015625, 0.0089263916015625, -0.024749755859375, -0.0298004150390625, 0.04791259765625, -0.0787353515625, -0.0205078125, -0.01537322998046875, -0.03485107421875, -0.02191162109375, 0.00847625732421875, 0.061004638671875, 0.060638427734375, -0.004039764404296875, 0.038360595703125, 0.051544189453125, -0.006839752197265625, 0.034881591796875, 0.004505157470703125, -0.007354736328125, -0.031402587890625, 0.07159423828125, 0.01861572265625, 0.0159759521484375, 0.007625579833984375, 0.0165863037109375, -0.04681396484375, -0.038055419921875, -0.0037708282470703125, 0.016998291015625, -0.05133056640625, -0.0032978057861328125, -0.044952392578125, -0.029815673828125, -0.0499267578125, 0.004268646240234375, -0.0518798828125, -0.032135009765625, -0.033172607421875, -0.007770538330078125, 0.026702880859375, 0.044036865234375, -0.0347900390625, 0.0298919677734375, -0.04315185546875, 0.033966064453125, 0.02606201171875, -0.0034332275390625, -0.0092315673828125, -0.07763671875, -0.02532958984375, 0.018707275390625, -0.0020275115966796875, -0.06219482421875, 0.01262664794921875, 0.0171356201171875, 0.03228759765625, 0.0265960693359375, -0.005596160888671875, 0.046875, -0.02301025390625, 0.05645751953125, 0.022430419921875, -0.07891845703125, 0.04693603515625, -0.005706787109375, 0.01142120361328125, 0.032928466796875, 0.0141143798828125, -0.026611328125, -0.004215240478515625, -0.05279541015625, -0.07415771484375, 0.0672607421875, 0.0286712646484375, -0.0013799667358398438, 0.03326416015625, 0.018798828125, -0.007495880126953125, -0.005950927734375, -0.04693603515625, -0.03369140625, -0.02490234375, -0.0222625732421875, -0.0304107666015625, -0.019683837890625, -0.004413604736328125, -0.03857421875, 0.0721435546875, 0.0287933349609375, 0.042388916015625, 0.026458740234375, -0.01181793212890625, 0.00836181640625, 0.00970458984375, 0.0283660888671875, 0.02593994140625, -0.025421142578125, 0.01215362548828125, 0.0249786376953125, -0.047637939453125, 0.01401519775390625, 0.0185394287109375, 0.013946533203125, 0.005970001220703125, 0.056884765625, 0.08624267578125, -0.0020351409912109375, -0.03497314453125, 0.039764404296875, 0.0023899078369140625, -0.0232696533203125, -0.042205810546875, 0.01531219482421875, 0.0347900390625, 0.03009033203125, 0.03143310546875, -0.0002415180206298828, 0.00800323486328125, -0.03216552734375, 0.0257568359375, 0.017669677734375, -0.037322998046875, -0.02349853515625, 0.067626953125, 0.00585174560546875, -0.018768310546875, 0.054901123046875, -0.002727508544921875, -0.021270751953125, 0.0477294921875, 0.0450439453125, 0.05511474609375, -0.0247802734375, -0.01788330078125, 0.04486083984375, 0.0165863037109375, -0.00341033935546875, 0.034423828125, -0.01100921630859375, -0.033203125, -0.023529052734375, -0.046722412109375, 0.0035457611083984375, 0.0192108154296875, -0.05908203125, 0.027862548828125, -0.032623291015625, -0.0283660888671875, 0.02227783203125, 0.017791748046875, -0.0589599609375, 0.0271759033203125, 0.020599365234375, 0.055816650390625, -0.06402587890625, 0.07965087890625, 0.0236053466796875, -0.0223541259765625, -0.09820556640625, -0.01280975341796875, -0.0037479400634765625, -0.056732177734375, 0.047393798828125, 0.02532958984375, -0.0293426513671875, 0.01371002197265625, -0.042510986328125, -0.0633544921875, 0.08074951171875, 0.02691650390625, -0.057281494140625, 0.007007598876953125, -0.0098876953125, 0.03717041015625, -0.0012044906616210938, 0.01739501953125, 0.055145263671875, 0.034271240234375, 0.009429931640625, -0.07513427734375, -0.0105133056640625, -0.007190704345703125, -0.0197906494140625, -0.016693115234375, -0.047882080078125, 0.0728759765625, -0.031280517578125, -0.0222625732421875, -0.005832672119140625, 0.0767822265625, 0.0187835693359375, 0.0225067138671875, 0.045501708984375, 0.03729248046875, 0.07440185546875, -0.0179901123046875, 0.056182861328125, 0.0015468597412109375, 0.0401611328125, 0.08441162109375, 0.004589080810546875, 0.06787109375, 0.0195770263671875, -0.027557373046875, 0.0271148681640625, 0.04473876953125, -0.011016845703125, 0.05291748046875, 0.0196075439453125, -0.0186614990234375, -0.013336181640625, 0.002719879150390625, -0.049224853515625, 0.070556640625, 0.0236358642578125, -0.01116943359375, 0.019378662109375, 0.0135955810546875, -0.0087738037109375, -0.01007080078125, -0.0101165771484375, 0.055206298828125, 0.01291656494140625, -0.0186004638671875, 0.07293701171875, 0.0023555755615234375, 0.057891845703125, -0.04736328125, 0.0045318603515625, 0.018585205078125, 0.02349853515625, -0.030792236328125, -0.046722412109375, 0.007610321044921875, -0.016876220703125, -0.01352691650390625, 0.0047760009765625, 0.043426513671875, -0.049713134765625, -0.035858154296875, 0.044769287109375, 0.003879547119140625, 0.02264404296875, -0.0018453598022460938, -0.04632568359375, 0.0272369384765625, 0.0207366943359375, -0.030914306640625, -0.004871368408203125, 0.006526947021484375, 0.0271148681640625, 0.020721435546875, 0.05633544921875, 0.01543426513671875, 0.01137542724609375, 0.0005598068237304688, 0.044036865234375, -0.039031982421875, -0.040435791015625, -0.04425048828125, 0.02789306640625, 0.00452423095703125, -0.0171661376953125, 0.04144287109375, 0.062255859375, 0.07781982421875, 0.0036296844482421875, 0.0521240234375, 0.000782012939453125, 0.049530029296875, -0.054901123046875, 0.059051513671875, -0.046661376953125, 0.0083160400390625, -0.01092529296875, -0.0643310546875, 0.004192352294921875, 0.0684814453125, -0.01177978515625, 0.025421142578125, 0.0374755859375, 0.06512451171875, -0.0064697265625, 0.0023040771484375, 0.0139923095703125, 0.0284576416015625, 0.0270538330078125, 0.0565185546875, 0.042327880859375, -0.0587158203125, 0.054779052734375, -0.040252685546875, -0.0142669677734375, -0.00787353515625, -0.016937255859375, -0.06622314453125, -0.06298828125, -0.0184783935546875, -0.05328369140625, -0.006023406982421875, 0.08087158203125, 0.06365966796875, -0.06451416015625, -0.0284881591796875, 0.0211639404296875, -0.0123138427734375, -0.028656005859375, -0.013519287109375, 0.057708740234375, 0.002773284912109375, -0.06591796875, 0.055023193359375, -0.00634002685546875, 0.0042724609375, 0.0024051666259765625, -0.01194000244140625, -0.019989013671875, -0.0014476776123046875, 0.03155517578125, 0.01561737060546875, -0.050689697265625, -0.0198822021484375, -0.01418304443359375, -0.011688232421875, 0.00897979736328125, 0.032196044921875, -0.049346923828125, 0.04443359375, 0.043121337890625, 0.024505615234375, 0.07952880859375, -0.0214080810546875, 0.006313323974609375, -0.050140380859375, 0.03680419921875, 0.0214385986328125, 0.023590087890625, 0.01885986328125, -0.01372528076171875, 0.0204925537109375, 0.02069091796875, -0.045135498046875, -0.058563232421875, -0.005184173583984375, -0.1015625, -0.0168914794921875, 0.09747314453125, 0.00508880615234375, -0.0156707763671875, 0.0119476318359375, -0.028656005859375, 0.072998046875, -0.036346435546875, 0.0288543701171875, 0.0306854248046875, -0.01374053955078125, 0.01277923583984375, -0.0430908203125, 0.041778564453125, 0.03253173828125, -0.0241241455078125, -0.004734039306640625, 0.031280517578125, 0.040557861328125, 0.0066680908203125, 0.06298828125, -0.0085601806640625, 0.0281219482421875, 0.0226287841796875, 0.0178070068359375, -0.0204010009765625, -0.02154541015625, -0.0362548828125, 0.007274627685546875, -0.00620269775390625, -0.0374755859375 ] ]
kingabzpro/wav2vec2-large-xls-r-300m-Urdu
2023-10-11T16:39:15.000Z
[ "transformers", "pytorch", "safetensors", "wav2vec2", "automatic-speech-recognition", "generated_from_trainer", "hf-asr-leaderboard", "robust-speech-event", "ur", "dataset:mozilla-foundation/common_voice_8_0", "license:apache-2.0", "model-index", "endpoints_compatible", "has_space", "region:us" ]
automatic-speech-recognition
kingabzpro
null
null
kingabzpro/wav2vec2-large-xls-r-300m-Urdu
6
8,393
transformers
2022-03-02T23:29:05
--- language: - ur license: apache-2.0 tags: - generated_from_trainer - hf-asr-leaderboard - robust-speech-event datasets: - mozilla-foundation/common_voice_8_0 metrics: - wer base_model: facebook/wav2vec2-xls-r-300m model-index: - name: wav2vec2-large-xls-r-300m-Urdu results: - task: type: automatic-speech-recognition name: Speech Recognition dataset: name: Common Voice 8 type: mozilla-foundation/common_voice_8_0 args: ur metrics: - type: wer value: 39.89 name: Test WER - type: cer value: 16.7 name: Test CER --- --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # wav2vec2-large-xls-r-300m-Urdu This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the common_voice dataset. It achieves the following results on the evaluation set: - Loss: 0.9889 - Wer: 0.5607 - Cer: 0.2370 #### Evaluation Commands 1. To evaluate on `mozilla-foundation/common_voice_8_0` with split `test` ```bash python eval.py --model_id kingabzpro/wav2vec2-large-xls-r-300m-Urdu --dataset mozilla-foundation/common_voice_8_0 --config ur --split test ``` ### Inference With LM ```python from datasets import load_dataset, Audio from transformers import pipeline model = "kingabzpro/wav2vec2-large-xls-r-300m-Urdu" data = load_dataset("mozilla-foundation/common_voice_8_0", "ur", split="test", streaming=True, use_auth_token=True) sample_iter = iter(data.cast_column("path", Audio(sampling_rate=16_000))) sample = next(sample_iter) asr = pipeline("automatic-speech-recognition", model=model) prediction = asr(sample["path"]["array"], chunk_length_s=5, stride_length_s=1) prediction # => {'text': 'اب یہ ونگین لمحاتانکھار دلمیں میںفوث کریلیا اجائ'} ``` ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.0001 - train_batch_size: 32 - eval_batch_size: 8 - seed: 42 - gradient_accumulation_steps: 2 - total_train_batch_size: 64 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - lr_scheduler_warmup_steps: 1000 - num_epochs: 200 ### Training results | Training Loss | Epoch | Step | Validation Loss | Wer | Cer | |:-------------:|:------:|:----:|:---------------:|:------:|:------:| | 3.6398 | 30.77 | 400 | 3.3517 | 1.0 | 1.0 | | 2.9225 | 61.54 | 800 | 2.5123 | 1.0 | 0.8310 | | 1.2568 | 92.31 | 1200 | 0.9699 | 0.6273 | 0.2575 | | 0.8974 | 123.08 | 1600 | 0.9715 | 0.5888 | 0.2457 | | 0.7151 | 153.85 | 2000 | 0.9984 | 0.5588 | 0.2353 | | 0.6416 | 184.62 | 2400 | 0.9889 | 0.5607 | 0.2370 | ### Framework versions - Transformers 4.17.0.dev0 - Pytorch 1.10.2+cu102 - Datasets 1.18.2.dev0 - Tokenizers 0.11.0 ### Eval results on Common Voice 8 "test" (WER): | Without LM | With LM (run `./eval.py`) | |---|---| | 52.03 | 39.89 |
3,262
[ [ -0.03619384765625, -0.045166015625, -0.0002377033233642578, 0.01306915283203125, -0.0135955810546875, -0.01552581787109375, -0.026275634765625, -0.016326904296875, 0.00865936279296875, 0.0272064208984375, -0.04925537109375, -0.048309326171875, -0.053070068359375, -0.00875091552734375, -0.0247802734375, 0.06927490234375, 0.01702880859375, 0.01364898681640625, 0.00867462158203125, -0.00856781005859375, -0.0219268798828125, -0.0362548828125, -0.06304931640625, -0.032440185546875, 0.01556396484375, 0.032623291015625, 0.052825927734375, 0.050750732421875, 0.03411865234375, 0.026885986328125, -0.0275115966796875, 0.0003631114959716797, -0.01512908935546875, -0.0161895751953125, 0.02288818359375, -0.032501220703125, -0.0379638671875, 0.0024089813232421875, 0.0675048828125, 0.039276123046875, -0.022003173828125, 0.02996826171875, 0.01129913330078125, 0.05303955078125, -0.022064208984375, 0.007312774658203125, -0.03631591796875, 0.00970458984375, -0.01416015625, -0.00691986083984375, -0.01331329345703125, -0.0034999847412109375, -0.007541656494140625, -0.0288543701171875, 0.0231475830078125, -0.0019989013671875, 0.08050537109375, 0.0208587646484375, -0.0232696533203125, -0.0008311271667480469, -0.059661865234375, 0.07281494140625, -0.06890869140625, 0.036346435546875, 0.0308074951171875, 0.030426025390625, -0.00653839111328125, -0.05523681640625, -0.054473876953125, 0.00087738037109375, 0.0026397705078125, 0.02130126953125, -0.0236663818359375, -0.0062713623046875, 0.051666259765625, 0.029876708984375, -0.05084228515625, 0.0082550048828125, -0.053192138671875, -0.033905029296875, 0.05419921875, 0.010345458984375, 0.01410675048828125, -0.0184173583984375, -0.025390625, -0.0280914306640625, -0.0225830078125, 0.039764404296875, 0.042236328125, 0.027984619140625, -0.036407470703125, 0.03546142578125, -0.0199432373046875, 0.049041748046875, 0.006877899169921875, -0.0257415771484375, 0.051513671875, -0.01064300537109375, -0.0193939208984375, 0.020294189453125, 0.0838623046875, 0.022369384765625, 0.017059326171875, 0.0249786376953125, -0.00835418701171875, 0.0010242462158203125, 0.00020444393157958984, -0.0745849609375, -0.00626373291015625, 0.0268096923828125, -0.0303955078125, -0.020751953125, 0.005962371826171875, -0.044586181640625, 0.014190673828125, -0.0278778076171875, 0.0439453125, -0.034393310546875, -0.022003173828125, -0.00293731689453125, -0.00550079345703125, 0.030029296875, 0.0006284713745117188, -0.063232421875, 0.0294342041015625, 0.036590576171875, 0.05419921875, 0.00907135009765625, -0.02874755859375, -0.034088134765625, -0.00910186767578125, -0.03070068359375, 0.04449462890625, -0.01377105712890625, -0.0300445556640625, -0.01001739501953125, 0.007434844970703125, -0.0157623291015625, -0.044708251953125, 0.06427001953125, -0.0108489990234375, 0.016754150390625, -0.0113372802734375, -0.03533935546875, -0.0249786376953125, -0.004856109619140625, -0.0440673828125, 0.097900390625, 0.001087188720703125, -0.053619384765625, 0.024322509765625, -0.043853759765625, -0.01399993896484375, -0.01287841796875, -0.01078033447265625, -0.0560302734375, -0.02178955078125, 0.0196380615234375, 0.032623291015625, -0.0235443115234375, 0.01006317138671875, -0.012420654296875, -0.051361083984375, 0.021026611328125, -0.04132080078125, 0.0733642578125, 0.0198211669921875, -0.042236328125, 0.01387786865234375, -0.0894775390625, 0.0316162109375, 0.006572723388671875, -0.03009033203125, 0.01165771484375, -0.0099639892578125, 0.03082275390625, 0.023162841796875, 0.0032596588134765625, -0.0300140380859375, -0.0016775131225585938, -0.051300048828125, 0.034332275390625, 0.037506103515625, -0.000843048095703125, 0.004955291748046875, -0.0243682861328125, 0.0228118896484375, 0.01416778564453125, 0.009613037109375, 0.00914764404296875, -0.042449951171875, -0.054962158203125, -0.022216796875, 0.0111846923828125, 0.039398193359375, -0.0099945068359375, 0.05352783203125, -0.0124664306640625, -0.060089111328125, -0.04486083984375, 0.0137176513671875, 0.0280914306640625, 0.04144287109375, 0.043182373046875, -0.0029621124267578125, -0.047332763671875, -0.08306884765625, -0.019439697265625, -0.01194000244140625, -0.00337982177734375, 0.0256805419921875, 0.040130615234375, -0.0298309326171875, 0.0531005859375, -0.0286407470703125, -0.026275634765625, -0.033538818359375, 0.0016345977783203125, 0.047576904296875, 0.050811767578125, 0.03594970703125, -0.053466796875, -0.04315185546875, -0.010498046875, -0.041961669921875, -0.0023212432861328125, -0.006511688232421875, -0.00577545166015625, 0.0152740478515625, 0.020111083984375, -0.044189453125, 0.04400634765625, 0.034149169921875, -0.02252197265625, 0.046722412109375, -0.008941650390625, 0.01349639892578125, -0.0953369140625, 0.01444244384765625, 0.007221221923828125, 0.00211334228515625, -0.045440673828125, -0.0179595947265625, 0.003345489501953125, -0.01322174072265625, -0.03765869140625, 0.046905517578125, -0.01486968994140625, 0.007476806640625, -0.014739990234375, -0.0078277587890625, -0.0097503662109375, 0.05419921875, 0.004604339599609375, 0.0662841796875, 0.05792236328125, -0.0362548828125, 0.026214599609375, 0.02899169921875, -0.0281982421875, 0.020233154296875, -0.06597900390625, 0.009613037109375, 0.019989013671875, 0.01067352294921875, -0.0726318359375, -0.0174713134765625, 0.031768798828125, -0.05902099609375, 0.0235595703125, -0.0149688720703125, -0.02886962890625, -0.03680419921875, -0.0167083740234375, 0.0227203369140625, 0.059051513671875, -0.0318603515625, 0.033477783203125, 0.00594329833984375, 0.004547119140625, -0.052490234375, -0.059539794921875, -0.01131439208984375, -0.018402099609375, -0.05126953125, 0.0188751220703125, -0.005786895751953125, -0.00628662109375, -0.012481689453125, -0.0198516845703125, -0.0140838623046875, -0.0106964111328125, 0.035369873046875, 0.0173492431640625, -0.0159149169921875, -0.0198974609375, 0.00006377696990966797, -0.019927978515625, 0.0198822021484375, -0.00392913818359375, 0.044158935546875, -0.01349639892578125, -0.0232696533203125, -0.074462890625, 0.01580810546875, 0.046905517578125, -0.0196075439453125, 0.057769775390625, 0.08013916015625, -0.032562255859375, -0.01019287109375, -0.040863037109375, -0.00405120849609375, -0.03912353515625, 0.048736572265625, -0.02325439453125, -0.02886962890625, 0.0562744140625, 0.00479888916015625, -0.014434814453125, 0.07208251953125, 0.0487060546875, 0.006595611572265625, 0.08721923828125, 0.01132965087890625, -0.0076141357421875, 0.0182952880859375, -0.0679931640625, 0.00554656982421875, -0.0675048828125, -0.0303955078125, -0.034698486328125, -0.015716552734375, -0.04400634765625, -0.0128021240234375, 0.024688720703125, 0.015045166015625, -0.03863525390625, 0.01490020751953125, -0.04339599609375, 0.01021575927734375, 0.050384521484375, 0.0164642333984375, -0.0120697021484375, 0.008331298828125, -0.02166748046875, -0.0088043212890625, -0.042510986328125, -0.0306854248046875, 0.0950927734375, 0.02655029296875, 0.040374755859375, -0.003978729248046875, 0.04693603515625, 0.007152557373046875, -0.01360321044921875, -0.058807373046875, 0.03851318359375, -0.0002989768981933594, -0.045562744140625, -0.0254058837890625, -0.0302734375, -0.05950927734375, 0.0143890380859375, -0.0186309814453125, -0.06597900390625, 0.00890350341796875, 0.009490966796875, -0.02813720703125, 0.02593994140625, -0.036773681640625, 0.07122802734375, -0.0093994140625, -0.02899169921875, -0.0182952880859375, -0.0496826171875, 0.00969696044921875, 0.0009589195251464844, 0.006622314453125, 0.004489898681640625, 0.016326904296875, 0.09014892578125, -0.048065185546875, 0.03662109375, -0.0184173583984375, 0.0198974609375, 0.043609619140625, -0.023345947265625, 0.0411376953125, 0.00106048583984375, -0.01403045654296875, 0.0283050537109375, 0.0142669677734375, -0.0330810546875, -0.035064697265625, 0.060699462890625, -0.0833740234375, -0.03607177734375, -0.043243408203125, -0.034210205078125, -0.005420684814453125, 0.014739990234375, 0.045623779296875, 0.06353759765625, 0.004852294921875, 0.032989501953125, 0.036285400390625, -0.019989013671875, 0.0280609130859375, 0.0252838134765625, -0.0020008087158203125, -0.0511474609375, 0.06329345703125, 0.0159912109375, 0.0212249755859375, 0.0013093948364257812, 0.024810791015625, -0.035308837890625, -0.036529541015625, -0.034759521484375, 0.0145111083984375, -0.045501708984375, -0.0035152435302734375, -0.0401611328125, -0.049468994140625, -0.044891357421875, 0.00991058349609375, -0.035888671875, -0.022796630859375, -0.0235137939453125, 0.0018768310546875, 0.0372314453125, 0.02935791015625, -0.0101776123046875, 0.0263824462890625, -0.0537109375, 0.015380859375, 0.0098114013671875, 0.01503753662109375, 0.0095977783203125, -0.07537841796875, -0.031402587890625, 0.018707275390625, -0.0289459228515625, -0.061798095703125, 0.044219970703125, 0.0196685791015625, 0.04278564453125, 0.045257568359375, -0.0056304931640625, 0.065185546875, -0.0290374755859375, 0.06787109375, 0.01274871826171875, -0.05755615234375, 0.04583740234375, -0.02392578125, 0.0184326171875, 0.0258636474609375, 0.02880859375, -0.041748046875, -0.0193023681640625, -0.061065673828125, -0.06787109375, 0.07861328125, 0.029815673828125, 0.0023784637451171875, 0.01678466796875, 0.020111083984375, -0.01454925537109375, 0.006465911865234375, -0.04278564453125, -0.0609130859375, -0.021636962890625, -0.01885986328125, -0.0197296142578125, -0.0238189697265625, -0.00380706787109375, -0.058837890625, 0.0687255859375, 0.0144195556640625, 0.0333251953125, 0.0207672119140625, 0.01450347900390625, -0.00930023193359375, 0.0183868408203125, 0.04058837890625, 0.03448486328125, -0.03472900390625, -0.01136016845703125, 0.0215911865234375, -0.058502197265625, 0.006877899169921875, 0.0168304443359375, -0.003383636474609375, 0.009429931640625, 0.029937744140625, 0.08013916015625, 0.006103515625, -0.028076171875, 0.040069580078125, 0.002986907958984375, -0.036376953125, -0.0533447265625, 0.0158843994140625, 0.00879669189453125, 0.01953125, 0.039337158203125, 0.020111083984375, 0.01140594482421875, -0.023956298828125, 0.0138702392578125, 0.02197265625, -0.034027099609375, -0.022216796875, 0.06646728515625, -0.0104827880859375, -0.03912353515625, 0.0489501953125, -0.0023040771484375, -0.04742431640625, 0.06390380859375, 0.039520263671875, 0.06585693359375, -0.0223541259765625, -0.006038665771484375, 0.06414794921875, 0.004665374755859375, -0.012725830078125, 0.029510498046875, 0.0023632049560546875, -0.0306396484375, -0.01380157470703125, -0.0592041015625, -0.0150604248046875, 0.04180908203125, -0.07501220703125, 0.030548095703125, -0.0269775390625, -0.0226593017578125, 0.015655517578125, 0.02545166015625, -0.060089111328125, 0.032196044921875, 0.00534820556640625, 0.072265625, -0.0645751953125, 0.0654296875, 0.043060302734375, -0.0318603515625, -0.098388671875, -0.0031681060791015625, 0.0011835098266601562, -0.061065673828125, 0.056884765625, 0.015533447265625, 0.0014200210571289062, 0.007305145263671875, -0.03912353515625, -0.07293701171875, 0.08038330078125, 0.00665283203125, -0.0462646484375, 0.0288543701171875, 0.028472900390625, 0.043121337890625, -0.00380706787109375, 0.0384521484375, 0.026763916015625, 0.032684326171875, 0.0092010498046875, -0.07635498046875, 0.00469207763671875, -0.040008544921875, -0.00878143310546875, 0.00737762451171875, -0.059417724609375, 0.0816650390625, -0.017242431640625, 0.00572967529296875, 0.0097503662109375, 0.04693603515625, 0.02655029296875, 0.0264129638671875, 0.03466796875, 0.050506591796875, 0.0396728515625, -0.002628326416015625, 0.058624267578125, -0.0406494140625, 0.040618896484375, 0.06005859375, 0.01050567626953125, 0.06817626953125, 0.0288848876953125, -0.043792724609375, 0.0298004150390625, 0.046051025390625, -0.0196533203125, 0.02703857421875, 0.00861358642578125, -0.00969696044921875, -0.0289154052734375, 0.01371002197265625, -0.050048828125, 0.03717041015625, 0.010650634765625, -0.0290679931640625, 0.00450897216796875, -0.0128631591796875, 0.0012483596801757812, -0.0018911361694335938, -0.0206756591796875, 0.040618896484375, -0.0017423629760742188, -0.029937744140625, 0.07354736328125, -0.0010633468627929688, 0.043975830078125, -0.047210693359375, -0.01043701171875, 0.002346038818359375, 0.029510498046875, -0.0323486328125, -0.04217529296875, 0.01483154296875, 0.002307891845703125, -0.01837158203125, 0.01122283935546875, 0.03289794921875, -0.031158447265625, -0.0662841796875, 0.0206756591796875, 0.0208740234375, 0.0250244140625, -0.0054168701171875, -0.06573486328125, 0.004425048828125, 0.01177215576171875, -0.040863037109375, 0.0196685791015625, 0.017913818359375, 0.0115203857421875, 0.04742431640625, 0.060638427734375, 0.023345947265625, 0.0014743804931640625, 0.007274627685546875, 0.05712890625, -0.05462646484375, -0.042022705078125, -0.059967041015625, 0.033843994140625, -0.00439453125, -0.03564453125, 0.05792236328125, 0.06866455078125, 0.051727294921875, 0.004665374755859375, 0.0548095703125, 0.0036258697509765625, 0.051727294921875, -0.032958984375, 0.0709228515625, -0.055084228515625, 0.01163482666015625, -0.021453857421875, -0.058929443359375, -0.000988006591796875, 0.055694580078125, -0.0218658447265625, 0.0150146484375, 0.041046142578125, 0.0751953125, -0.00246429443359375, -0.0070037841796875, 0.01438140869140625, 0.023895263671875, 0.01837158203125, 0.0377197265625, 0.036956787109375, -0.06427001953125, 0.0518798828125, -0.0455322265625, -0.004680633544921875, -0.006114959716796875, -0.0301513671875, -0.06976318359375, -0.04730224609375, -0.0286407470703125, -0.037384033203125, -0.00954437255859375, 0.0897216796875, 0.04290771484375, -0.06207275390625, -0.02947998046875, 0.006622314453125, -0.01245880126953125, -0.03948974609375, -0.0173797607421875, 0.052642822265625, -0.0045166015625, -0.058441162109375, 0.01154327392578125, -0.0155029296875, 0.0072021484375, -0.0078277587890625, -0.0290679931640625, -0.01346588134765625, -0.004367828369140625, 0.00991058349609375, 0.019287109375, -0.060882568359375, -0.0177459716796875, -0.0011186599731445312, -0.010040283203125, 0.01519775390625, 0.01236724853515625, -0.044586181640625, 0.0189971923828125, 0.0241546630859375, 0.01235198974609375, 0.042694091796875, -0.004314422607421875, 0.012115478515625, -0.033233642578125, 0.031494140625, 0.0081939697265625, 0.024078369140625, 0.01284027099609375, -0.0199737548828125, 0.018096923828125, 0.03466796875, -0.0457763671875, -0.07220458984375, -0.023223876953125, -0.09503173828125, 0.0050201416015625, 0.10394287109375, 0.0056915283203125, -0.0274810791015625, 0.0185394287109375, -0.02880859375, 0.043304443359375, -0.036834716796875, 0.036346435546875, 0.03857421875, -0.006305694580078125, 0.01245880126953125, -0.05157470703125, 0.04437255859375, 0.0191650390625, -0.02508544921875, -0.007450103759765625, 0.0142669677734375, 0.043182373046875, 0.002208709716796875, 0.03912353515625, -0.0004677772521972656, 0.0216217041015625, 0.01593017578125, 0.0190582275390625, -0.027862548828125, -0.00664520263671875, -0.029144287109375, -0.00571441650390625, -0.005283355712890625, -0.0338134765625 ] ]
darkstorm2150/Protogen_x3.4_Official_Release
2023-05-10T17:26:00.000Z
[ "diffusers", "stable-diffusion", "stable-diffusion-diffusers", "text-to-image", "art", "artistic", "protogen", "en", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
darkstorm2150
null
null
darkstorm2150/Protogen_x3.4_Official_Release
342
8,372
diffusers
2023-01-02T00:29:52
--- language: - en tags: - stable-diffusion - stable-diffusion-diffusers - text-to-image - art - artistic - diffusers - protogen inference: true license: creativeml-openrail-m --- <center><img src="https://huggingface.co/darkstorm2150/Protogen_x3.4_Official_Release/resolve/main/Protogen_x3.4-512.png" style="height:690px; border-radius: 8%; border: 10px solid #663380; padding-top:0px;" span title="Protogen x3.4 Raw Output"></center> <center><h1>Protogen x3.4 (Photorealism) Official Release</h1></center> <center><p><em>Research Model by <a href="https://instagram.com/officialvictorespinoza">darkstorm2150</a></em></p></center> </div> ## Table of contents * [General info](#general-info) * [Granular Adaptive Learning](#granular-adaptive-learning) * [Trigger Words](#trigger-words) * [Setup](#setup) * [Space](#space) * [CompVis](#compvis) * [Diffusers](#🧨-diffusers) * [Checkpoint Merging Data Reference](#checkpoint-merging-data-reference) * [License](#license) ## General info Protogen x3.4 Protogen was warm-started with [Stable Diffusion v1-5](https://huggingface.co/runwayml/stable-diffusion-v1-5) and fine-tuned on various high quality image datasets. Version 3.4 continued training from [ProtoGen v2.2](https://huggingface.co/darkstorm2150/Protogen_v2.2_Official_Release) with added photorealism. ## Granular Adaptive Learning Granular adaptive learning is a machine learning technique that focuses on adjusting the learning process at a fine-grained level, rather than making global adjustments to the model. This approach allows the model to adapt to specific patterns or features in the data, rather than making assumptions based on general trends. Granular adaptive learning can be achieved through techniques such as active learning, which allows the model to select the data it wants to learn from, or through the use of reinforcement learning, where the model receives feedback on its performance and adapts based on that feedback. It can also be achieved through techniques such as online learning where the model adjust itself as it receives more data. Granular adaptive learning is often used in situations where the data is highly diverse or non-stationary and where the model needs to adapt quickly to changing patterns. This is often the case in dynamic environments such as robotics, financial markets, and natural language processing. ## Trigger Words modelshoot style, analog style, mdjrny-v4 style, nousr robot Trigger words are available for the hassan1.4 and f222, might have to google them :) ## Setup To run this model, download the model.ckpt or model.safetensor and install it in your "stable-diffusion-webui\models\Stable-diffusion" directory ## Space We support a [Gradio](https://github.com/gradio-app/gradio) Web UI: [![Open In Spaces](https://camo.githubusercontent.com/00380c35e60d6b04be65d3d94a58332be5cc93779f630bcdfc18ab9a3a7d3388/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f25463025394625413425393725323048756767696e67253230466163652d5370616365732d626c7565)](https://huggingface.co/spaces/darkstorm2150/Stable-Diffusion-Protogen-webui) ### CompVis ## CKPT [Download ProtoGen x3.4.ckpt (5.98GB)](https://huggingface.co/darkstorm2150/Protogen_x3.4_Official_Release/resolve/main/ProtoGen_X3.4.ckpt) [Download ProtoGen X3.4-pruned-fp16.ckpt (1.89 GB)](https://huggingface.co/darkstorm2150/Protogen_x3.4_Official_Release/resolve/main/ProtoGen_X3.4-pruned-fp16.ckpt) ## Safetensors [Download ProtoGen x3.4.safetensors (5.98GB)](https://huggingface.co/darkstorm2150/Protogen_x3.4_Official_Release/resolve/main/ProtoGen_X3.4.safetensors) [Download ProtoGen x3.4-pruned-fp16.safetensors (1.89GB)](https://huggingface.co/darkstorm2150/Protogen_x3.4_Official_Release/resolve/main/ProtoGen_X3.4-pruned-fp16.safetensors) ### 🧨 Diffusers This model can be used just like any other Stable Diffusion model. For more information, please have a look at the [Stable Diffusion Pipeline](https://huggingface.co/docs/diffusers/api/pipelines/stable_diffusion). ```python from diffusers import StableDiffusionPipeline, DPMSolverMultistepScheduler import torch prompt = ( "modelshoot style, (extremely detailed CG unity 8k wallpaper), full shot body photo of the most beautiful artwork in the world, " "english medieval witch, black silk vale, pale skin, black silk robe, black cat, necromancy magic, medieval era, " "photorealistic painting by Ed Blinkey, Atey Ghailan, Studio Ghibli, by Jeremy Mann, Greg Manchess, Antonio Moro, trending on ArtStation, " "trending on CGSociety, Intricate, High Detail, Sharp focus, dramatic, photorealistic painting art by midjourney and greg rutkowski" ) model_id = "darkstorm2150/Protogen_x3.4_Official_Release" pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16) pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config) pipe = pipe.to("cuda") image = pipe(prompt, num_inference_steps=25).images[0] image.save("./result.jpg") ``` ![img](https://huggingface.co/datasets/patrickvonplaten/images/resolve/main/protogen/rswf5qk9be9a1.jpg) ## PENDING DATA FOR MERGE, RPGv2 not accounted.. ## Checkpoint Merging Data Reference <style> .myTable { border-collapse:collapse; } .myTable th { background-color:#663380; color:white; } .myTable td, .myTable th { padding:5px; border:1px solid #663380; } </style> <table class="myTable"> <tr> <th>Models</th> <th>Protogen v2.2 (Anime)</th> <th>Protogen x3.4 (Photo)</th> <th>Protogen x5.3 (Photo)</th> <th>Protogen x5.8 (Sci-fi/Anime)</th> <th>Protogen x5.9 (Dragon)</th> <th>Protogen x7.4 (Eclipse)</th> <th>Protogen x8.0 (Nova)</th> <th>Protogen x8.6 (Infinity)</th> </tr> <tr> <td>seek_art_mega v1</td> <td>52.50%</td> <td>42.76%</td> <td>42.63%</td> <td></td> <td></td> <td></td> <td>25.21%</td> <td>14.83%</td> </tr> <tr> <td>modelshoot v1</td> <td>30.00%</td> <td>24.44%</td> <td>24.37%</td> <td>2.56%</td> <td>2.05%</td> <td>3.48%</td> <td>22.91%</td> <td>13.48%</td> </tr> <tr> <td>elldreth v1</td> <td>12.64%</td> <td>10.30%</td> <td>10.23%</td> <td></td> <td></td> <td></td> <td>6.06%</td> <td>3.57%</td> </tr> <tr> <td>photoreal v2</td> <td></td> <td></td> <td>10.00%</td> <td>48.64%</td> <td>38.91%</td> <td>66.33%</td> <td>20.49%</td> <td>12.06%</td> </tr> <tr> <td>analogdiffusion v1</td> <td></td> <td>4.75%</td> <td>4.50%</td> <td></td> <td></td> <td></td> <td>1.75%</td> <td>1.03%</td> </tr> <tr> <td>openjourney v2</td> <td></td> <td>4.51%</td> <td>4.28%</td> <td></td> <td></td> <td>4.75%</td> <td>2.26%</td> <td>1.33%</td> </tr> <tr> <td>hassan1.4</td> <td>2.63%</td> <td>2.14%</td> <td>2.13%</td> <td></td> <td></td> <td></td> <td>1.26%</td> <td>0.74%</td> </tr> <tr> <td>f222</td> <td>2.23%</td> <td>1.82%</td> <td>1.81%</td> <td></td> <td></td> <td></td> <td>1.07%</td> <td>0.63%</td> </tr> <tr> <td>hasdx</td> <td></td> <td></td> <td></td> <td>20.00%</td> <td>16.00%</td> <td>4.07%</td> <td>5.01%</td> <td>2.95%</td> </tr> <tr> <td>moistmix</td> <td></td> <td></td> <td></td> <td>16.00%</td> <td>12.80%</td> <td>3.86%</td> <td>4.08%</td> <td>2.40%</td> </tr> <tr> <td>roboDiffusion v1</td> <td></td> <td>4.29%</td> <td></td> <td>12.80%</td> <td>10.24%</td> <td>3.67%</td> <td>4.41%</td> <td>2.60%</td> </tr> <tr> <td>RPG v3</td> <td></td> <td>5.00%</td> <td></td> <td></td> <td>20.00%</td> <td>4.29%</td> <td>4.29%</td> <td>2.52%</td> </tr> <tr> <td>anything&everything</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>4.51%</td> <td>0.56%</td> <td>0.33%</td> </tr> <tr> <td>dreamlikediff v1</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>5.0%</td> <td>0.63%</td> <td>0.37%</td> </tr> <tr> <td>sci-fidiff v1</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>3.10%</td> </tr> <tr> <td>synthwavepunk v2</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>3.26%</td> </tr> <tr> <td>mashupv2</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>11.51%</td> </tr> <tr> <td>dreamshaper 252</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>4.04%</td> </tr> <tr> <td>comicdiff v2</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>4.25%</td> </tr> <tr> <td>artEros</td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td></td> <td>15.00%</td> </tr> </table> ## License By downloading you agree to the terms of these licenses <a href="https://huggingface.co/spaces/CompVis/stable-diffusion-license">CreativeML Open RAIL-M</a> <a href="https://huggingface.co/coreco/seek.art_MEGA/blob/main/LICENSE.txt">Seek Art Mega License</a>
8,620
[ [ -0.05133056640625, -0.046875, 0.01416015625, 0.033599853515625, -0.0121917724609375, 0.004177093505859375, 0.01143646240234375, -0.0333251953125, 0.0281219482421875, 0.00685882568359375, -0.047821044921875, -0.0288543701171875, -0.043182373046875, 0.0008301734924316406, -0.007328033447265625, 0.06280517578125, -0.0038318634033203125, -0.01406097412109375, 0.004669189453125, 0.004730224609375, -0.01776123046875, -0.001956939697265625, -0.0293731689453125, -0.0236663818359375, 0.0225982666015625, 0.034698486328125, 0.060943603515625, 0.041778564453125, 0.0290374755859375, 0.0274810791015625, -0.022735595703125, 0.0033512115478515625, -0.03399658203125, -0.0023746490478515625, 0.0032978057861328125, -0.0200958251953125, -0.035003662109375, -0.0006628036499023438, 0.02557373046875, 0.031097412109375, -0.01110076904296875, 0.02398681640625, 0.0238494873046875, 0.061126708984375, -0.039764404296875, 0.0121002197265625, -0.005565643310546875, 0.0205078125, -0.004589080810546875, -0.0041961669921875, -0.0017805099487304688, -0.04437255859375, -0.00266265869140625, -0.0635986328125, 0.0169677734375, -0.00310516357421875, 0.0828857421875, -0.004306793212890625, -0.0175323486328125, -0.0017938613891601562, -0.052093505859375, 0.058013916015625, -0.0555419921875, 0.027313232421875, 0.01357269287109375, 0.01265716552734375, -0.01386260986328125, -0.05712890625, -0.06683349609375, 0.018280029296875, -0.0012922286987304688, 0.0477294921875, -0.0301055908203125, -0.02655029296875, 0.012115478515625, 0.0225982666015625, -0.054229736328125, -0.01033782958984375, -0.03363037109375, -0.012603759765625, 0.04437255859375, 0.01227569580078125, 0.02618408203125, -0.01151275634765625, -0.04437255859375, -0.020660400390625, -0.0164031982421875, 0.031341552734375, 0.0239410400390625, -0.000743865966796875, -0.041412353515625, 0.034149169921875, -0.0081024169921875, 0.0509033203125, 0.01776123046875, -0.0306243896484375, 0.040924072265625, -0.0369873046875, -0.02532958984375, -0.0171966552734375, 0.056549072265625, 0.04815673828125, -0.007122039794921875, 0.0026454925537109375, 0.007965087890625, 0.01019287109375, -0.0004222393035888672, -0.07379150390625, -0.0238494873046875, 0.036346435546875, -0.0275421142578125, -0.03570556640625, 0.0016183853149414062, -0.08099365234375, -0.00458526611328125, 0.006954193115234375, 0.024444580078125, -0.035736083984375, -0.031402587890625, 0.01468658447265625, -0.024383544921875, 0.0152435302734375, 0.0263824462890625, -0.051116943359375, 0.0277557373046875, 0.023834228515625, 0.07757568359375, -0.004791259765625, -0.008514404296875, 0.0221099853515625, 0.0222320556640625, -0.03533935546875, 0.05523681640625, -0.011199951171875, -0.045135498046875, -0.037109375, 0.0264739990234375, -0.01395416259765625, -0.007350921630859375, 0.04583740234375, -0.025726318359375, 0.029388427734375, -0.024169921875, -0.0411376953125, -0.0164947509765625, 0.0156402587890625, -0.043792724609375, 0.06549072265625, 0.0160064697265625, -0.0732421875, 0.02685546875, -0.05462646484375, -0.0015468597412109375, -0.01580810546875, 0.00001722574234008789, -0.06005859375, -0.0242919921875, 0.01435089111328125, 0.0234832763671875, -0.01407623291015625, -0.0235748291015625, -0.0345458984375, -0.0246429443359375, 0.0028667449951171875, -0.01558685302734375, 0.08154296875, 0.03485107421875, -0.0506591796875, -0.0034084320068359375, -0.0552978515625, 0.0019273757934570312, 0.046142578125, -0.0187835693359375, 0.0104217529296875, -0.033935546875, 0.0002942085266113281, 0.0305938720703125, 0.0102386474609375, -0.044525146484375, 0.0162811279296875, -0.0264434814453125, 0.0281524658203125, 0.06976318359375, 0.02490234375, 0.03643798828125, -0.061248779296875, 0.041107177734375, 0.024200439453125, 0.0081024169921875, 0.026763916015625, -0.040618896484375, -0.051971435546875, -0.026214599609375, 0.018798828125, 0.0382080078125, -0.036376953125, 0.032073974609375, -0.018890380859375, -0.06854248046875, -0.017578125, -0.0027675628662109375, 0.0259246826171875, 0.049224853515625, 0.0225677490234375, -0.005283355712890625, -0.0404052734375, -0.056060791015625, 0.0179443359375, -0.01297760009765625, 0.0193634033203125, 0.04547119140625, 0.054534912109375, -0.0222015380859375, 0.061431884765625, -0.066650390625, -0.0236968994140625, -0.0208740234375, -0.007061004638671875, 0.033111572265625, 0.054962158203125, 0.06585693359375, -0.07049560546875, -0.043853759765625, 0.005062103271484375, -0.0496826171875, -0.003253936767578125, 0.008056640625, -0.0295257568359375, -0.00007766485214233398, 0.01247406005859375, -0.04815673828125, 0.036468505859375, 0.045074462890625, -0.05157470703125, 0.060760498046875, -0.0307464599609375, 0.028900146484375, -0.085205078125, 0.02197265625, 0.004306793212890625, -0.007427215576171875, -0.054718017578125, 0.01084136962890625, -0.005077362060546875, 0.0021648406982421875, -0.051513671875, 0.054168701171875, -0.044281005859375, 0.017913818359375, 0.001361846923828125, 0.005016326904296875, 0.007099151611328125, 0.041168212890625, -0.01329803466796875, 0.071044921875, 0.04571533203125, -0.049896240234375, 0.0200653076171875, 0.0218505859375, -0.03594970703125, 0.01727294921875, -0.04705810546875, -0.006641387939453125, -0.0177459716796875, 0.01439666748046875, -0.08233642578125, -0.0177001953125, 0.023773193359375, -0.043853759765625, 0.0045013427734375, 0.0022430419921875, -0.02667236328125, -0.055816650390625, -0.0291900634765625, 0.008331298828125, 0.0643310546875, -0.015167236328125, 0.033172607421875, 0.013824462890625, 0.01503753662109375, -0.036163330078125, -0.045074462890625, -0.0168914794921875, -0.0268096923828125, -0.074951171875, 0.05462646484375, -0.01739501953125, -0.013275146484375, -0.00208282470703125, -0.0177459716796875, -0.007343292236328125, 0.004093170166015625, 0.0278472900390625, 0.01007843017578125, 0.006877899169921875, -0.0224456787109375, -0.0259857177734375, -0.0084228515625, -0.01476287841796875, -0.0091552734375, 0.048614501953125, 0.0014276504516601562, -0.016326904296875, -0.041961669921875, -0.0036029815673828125, 0.057159423828125, 0.0020923614501953125, 0.0552978515625, 0.055206298828125, -0.017913818359375, -0.0113677978515625, -0.0196990966796875, -0.01367950439453125, -0.035919189453125, 0.0012226104736328125, -0.023193359375, -0.0465087890625, 0.05487060546875, 0.005767822265625, -0.0026397705078125, 0.044708251953125, 0.03662109375, -0.0236663818359375, 0.06787109375, 0.038818359375, 0.003192901611328125, 0.03326416015625, -0.06756591796875, -0.0089569091796875, -0.060546875, -0.040252685546875, -0.02618408203125, -0.04815673828125, -0.03765869140625, -0.05389404296875, 0.042266845703125, 0.016815185546875, -0.045867919921875, 0.027099609375, -0.049102783203125, 0.006622314453125, 0.037109375, 0.05084228515625, -0.0014495849609375, -0.01123809814453125, -0.035919189453125, -0.01250457763671875, -0.037567138671875, -0.0255126953125, 0.060882568359375, 0.007480621337890625, 0.040191650390625, 0.036712646484375, 0.055633544921875, 0.01226043701171875, -0.000045299530029296875, -0.020904541015625, 0.035369873046875, 0.010528564453125, -0.07159423828125, -0.0214996337890625, -0.024658203125, -0.08038330078125, 0.0228118896484375, -0.0361328125, -0.06201171875, 0.0335693359375, 0.01544189453125, -0.0413818359375, 0.035369873046875, -0.045684814453125, 0.07293701171875, -0.00970458984375, -0.059783935546875, 0.0207061767578125, -0.0555419921875, 0.0163726806640625, 0.0066070556640625, 0.044830322265625, -0.01555633544921875, -0.021026611328125, 0.061767578125, -0.04931640625, 0.052093505859375, -0.020660400390625, 0.021575927734375, 0.040985107421875, 0.007122039794921875, 0.04998779296875, 0.0033092498779296875, -0.004207611083984375, -0.01174163818359375, -0.01175689697265625, -0.048828125, -0.02972412109375, 0.061431884765625, -0.07879638671875, -0.0408935546875, -0.059844970703125, -0.0194854736328125, 0.00992584228515625, 0.0277252197265625, 0.032440185546875, 0.013336181640625, -0.004039764404296875, 0.005279541015625, 0.051971435546875, -0.01468658447265625, 0.05169677734375, 0.0206451416015625, -0.0297088623046875, -0.04180908203125, 0.07269287109375, 0.022705078125, 0.02471923828125, 0.0162506103515625, 0.04840087890625, -0.0193939208984375, -0.0537109375, -0.020111083984375, 0.00884246826171875, -0.0283966064453125, -0.0214385986328125, -0.06549072265625, -0.007080078125, -0.04034423828125, -0.03436279296875, -0.00988006591796875, -0.037261962890625, -0.029571533203125, -0.0229034423828125, 0.044036865234375, 0.0302581787109375, -0.0275726318359375, 0.0111541748046875, -0.035980224609375, 0.0285186767578125, 0.033935546875, 0.01849365234375, 0.0016660690307617188, -0.034820556640625, 0.0095367431640625, 0.0177459716796875, -0.042999267578125, -0.080078125, 0.0494384765625, -0.0004329681396484375, 0.0323486328125, 0.0195770263671875, -0.00601959228515625, 0.0830078125, -0.0204010009765625, 0.070556640625, 0.032958984375, -0.05743408203125, 0.04388427734375, -0.04815673828125, 0.0294036865234375, 0.034393310546875, 0.037933349609375, -0.0219268798828125, -0.022857666015625, -0.06695556640625, -0.068115234375, 0.04248046875, 0.040863037109375, -0.020904541015625, 0.0161590576171875, 0.0011053085327148438, -0.0123138427734375, 0.005634307861328125, -0.06695556640625, -0.0460205078125, -0.019439697265625, 0.002597808837890625, -0.0062713623046875, 0.01105499267578125, -0.0018663406372070312, -0.043121337890625, 0.0626220703125, 0.014923095703125, 0.044677734375, 0.027252197265625, 0.01122283935546875, -0.01519012451171875, 0.0084075927734375, 0.038055419921875, 0.049774169921875, -0.03143310546875, 0.0009741783142089844, 0.00946044921875, -0.049957275390625, 0.0264892578125, -0.00905609130859375, -0.03857421875, -0.0045166015625, 0.0244140625, 0.03460693359375, 0.017242431640625, -0.00968170166015625, 0.035980224609375, -0.0001652240753173828, -0.03265380859375, -0.039337158203125, 0.0267181396484375, 0.0285797119140625, 0.0246734619140625, 0.0162811279296875, 0.033599853515625, 0.00612640380859375, -0.041168212890625, 0.021209716796875, 0.026641845703125, -0.03814697265625, -0.0079498291015625, 0.07940673828125, 0.00506591796875, 0.0020122528076171875, 0.011993408203125, -0.01041412353515625, -0.040771484375, 0.07403564453125, 0.054107666015625, 0.03704833984375, -0.01160430908203125, 0.0236663818359375, 0.05584716796875, 0.001575469970703125, -0.01247406005859375, 0.040435791015625, 0.017974853515625, -0.04144287109375, 0.004070281982421875, -0.06207275390625, -0.014617919921875, 0.006103515625, -0.025299072265625, 0.035919189453125, -0.0426025390625, -0.023956298828125, 0.006053924560546875, 0.016632080078125, -0.042510986328125, 0.040130615234375, -0.00514984130859375, 0.07293701171875, -0.06219482421875, 0.0498046875, 0.043853759765625, -0.04913330078125, -0.077392578125, -0.00824737548828125, 0.01508331298828125, -0.055145263671875, 0.033935546875, -0.00669097900390625, 0.004299163818359375, 0.015777587890625, -0.04547119140625, -0.08380126953125, 0.1104736328125, 0.00980377197265625, -0.0255126953125, 0.006717681884765625, -0.0031490325927734375, 0.04791259765625, -0.00800323486328125, 0.036773681640625, 0.0304107666015625, 0.0439453125, 0.03546142578125, -0.047515869140625, 0.00969696044921875, -0.0411376953125, 0.0105438232421875, 0.0094146728515625, -0.0762939453125, 0.07025146484375, -0.024383544921875, -0.018585205078125, -0.002170562744140625, 0.05206298828125, 0.037841796875, 0.013214111328125, 0.036041259765625, 0.07391357421875, 0.0279388427734375, -0.03131103515625, 0.07269287109375, -0.025421142578125, 0.0452880859375, 0.0526123046875, 0.009124755859375, 0.042755126953125, 0.0212554931640625, -0.045684814453125, 0.032196044921875, 0.055419921875, -0.0011434555053710938, 0.05218505859375, 0.0197601318359375, -0.0218963623046875, 0.002349853515625, 0.011383056640625, -0.04949951171875, 0.0161590576171875, 0.01039886474609375, -0.0196533203125, 0.0052490234375, 0.0011529922485351562, 0.021331787109375, 0.007144927978515625, -0.018951416015625, 0.054412841796875, -0.011322021484375, -0.0360107421875, 0.040008544921875, -0.008575439453125, 0.0509033203125, -0.040740966796875, 0.006786346435546875, -0.019134521484375, 0.0294647216796875, -0.037353515625, -0.0771484375, 0.00818634033203125, -0.00965118408203125, -0.004688262939453125, -0.015716552734375, 0.032012939453125, -0.01078033447265625, -0.060577392578125, 0.02398681640625, 0.01453399658203125, 0.004604339599609375, 0.0309600830078125, -0.07501220703125, 0.0185699462890625, 0.0177459716796875, -0.0345458984375, 0.01506805419921875, 0.0295867919921875, 0.0234832763671875, 0.058135986328125, 0.060211181640625, 0.0196533203125, 0.0188140869140625, -0.033905029296875, 0.073974609375, -0.058135986328125, -0.043701171875, -0.057403564453125, 0.0594482421875, -0.021331787109375, -0.02545166015625, 0.081787109375, 0.052459716796875, 0.05267333984375, -0.009979248046875, 0.0665283203125, -0.0426025390625, 0.03436279296875, -0.01068115234375, 0.04669189453125, -0.049072265625, -0.00901031494140625, -0.049560546875, -0.06744384765625, -0.019073486328125, 0.05474853515625, -0.00775146484375, 0.0270233154296875, 0.034698486328125, 0.060211181640625, -0.00884246826171875, -0.004150390625, 0.004665374755859375, 0.0289459228515625, 0.0265350341796875, 0.047821044921875, 0.024749755859375, -0.045196533203125, 0.0292816162109375, -0.057830810546875, -0.026947021484375, -0.0171966552734375, -0.05487060546875, -0.03253173828125, -0.0391845703125, -0.03717041015625, -0.047821044921875, 0.006771087646484375, 0.061309814453125, 0.045135498046875, -0.054107666015625, -0.0257568359375, -0.021270751953125, 0.00843048095703125, -0.032958984375, -0.0190277099609375, 0.03167724609375, 0.0127105712890625, -0.0574951171875, -0.005397796630859375, 0.01529693603515625, 0.043182373046875, 0.005275726318359375, -0.0175323486328125, -0.031402587890625, 0.004787445068359375, 0.02093505859375, 0.0222930908203125, -0.02996826171875, -0.01511383056640625, -0.00458526611328125, -0.007205963134765625, 0.022979736328125, 0.01093292236328125, -0.032958984375, 0.034423828125, 0.05047607421875, 0.005985260009765625, 0.0634765625, 0.01019287109375, 0.00859832763671875, -0.0202484130859375, 0.014984130859375, 0.019134521484375, 0.0233612060546875, 0.0023975372314453125, -0.022491455078125, 0.05517578125, 0.033477783203125, -0.058074951171875, -0.04974365234375, -0.0032634735107421875, -0.0968017578125, -0.0245361328125, 0.09295654296875, -0.00284576416015625, -0.04095458984375, 0.009521484375, -0.022247314453125, 0.0203857421875, -0.042510986328125, 0.035369873046875, 0.0325927734375, -0.034820556640625, -0.01546478271484375, -0.046478271484375, 0.03857421875, 0.0028209686279296875, -0.0592041015625, -0.01092529296875, 0.031890869140625, 0.033477783203125, 0.0282745361328125, 0.052093505859375, -0.029998779296875, 0.0102691650390625, 0.01412200927734375, -0.0009531974792480469, 0.001483917236328125, -0.0015201568603515625, -0.00429534912109375, 0.026763916015625, -0.01275634765625, -0.0167083740234375 ] ]
shibing624/text2vec-base-chinese-paraphrase
2023-06-26T06:51:03.000Z
[ "transformers", "pytorch", "ernie", "feature-extraction", "text2vec", "sentence-similarity", "zh", "dataset:https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-paraphrase-dataset", "license:apache-2.0", "endpoints_compatible", "has_space", "region:us" ]
sentence-similarity
shibing624
null
null
shibing624/text2vec-base-chinese-paraphrase
51
8,363
transformers
2023-06-19T12:48:16
--- pipeline_tag: sentence-similarity license: apache-2.0 tags: - text2vec - feature-extraction - sentence-similarity - transformers datasets: - https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-paraphrase-dataset language: - zh metrics: - spearmanr library_name: transformers --- # shibing624/text2vec-base-chinese-paraphrase This is a CoSENT(Cosine Sentence) model: shibing624/text2vec-base-chinese-paraphrase. It maps sentences to a 768 dimensional dense vector space and can be used for tasks like sentence embeddings, text matching or semantic search. - training dataset: https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-paraphrase-dataset - base model: nghuyong/ernie-3.0-base-zh - max_seq_length: 256 - best epoch: 5 - sentence embedding dim: 768 ## Evaluation For an automated evaluation of this model, see the *Evaluation Benchmark*: [text2vec](https://github.com/shibing624/text2vec) ### Release Models - 本项目release模型的中文匹配评测结果: | Arch | BaseModel | Model | ATEC | BQ | LCQMC | PAWSX | STS-B | SOHU-dd | SOHU-dc | Avg | QPS | |:-----------|:----------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------|:-----:|:-----:|:-----:|:-----:|:-----:|:-------:|:-------:|:---------:|:-----:| | Word2Vec | word2vec | [w2v-light-tencent-chinese](https://ai.tencent.com/ailab/nlp/en/download.html) | 20.00 | 31.49 | 59.46 | 2.57 | 55.78 | 55.04 | 20.70 | 35.03 | 23769 | | SBERT | xlm-roberta-base | [sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2) | 18.42 | 38.52 | 63.96 | 10.14 | 78.90 | 63.01 | 52.28 | 46.46 | 3138 | | Instructor | hfl/chinese-roberta-wwm-ext | [moka-ai/m3e-base](https://huggingface.co/moka-ai/m3e-base) | 41.27 | 63.81 | 74.87 | 12.20 | 76.96 | 75.83 | 60.55 | 57.93 | 2980 | | CoSENT | hfl/chinese-macbert-base | [shibing624/text2vec-base-chinese](https://huggingface.co/shibing624/text2vec-base-chinese) | 31.93 | 42.67 | 70.16 | 17.21 | 79.30 | 70.27 | 50.42 | 51.61 | 3008 | | CoSENT | hfl/chinese-lert-large | [GanymedeNil/text2vec-large-chinese](https://huggingface.co/GanymedeNil/text2vec-large-chinese) | 32.61 | 44.59 | 69.30 | 14.51 | 79.44 | 73.01 | 59.04 | 53.12 | 2092 | | CoSENT | nghuyong/ernie-3.0-base-zh | [shibing624/text2vec-base-chinese-sentence](https://huggingface.co/shibing624/text2vec-base-chinese-sentence) | 43.37 | 61.43 | 73.48 | 38.90 | 78.25 | 70.60 | 53.08 | 59.87 | 3089 | | CoSENT | nghuyong/ernie-3.0-base-zh | [shibing624/text2vec-base-chinese-paraphrase](https://huggingface.co/shibing624/text2vec-base-chinese-paraphrase) | 44.89 | 63.58 | 74.24 | 40.90 | 78.93 | 76.70 | 63.30 | **63.08** | 3066 | | CoSENT | sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2 | [shibing624/text2vec-base-multilingual](https://huggingface.co/shibing624/text2vec-base-multilingual) | 32.39 | 50.33 | 65.64 | 32.56 | 74.45 | 68.88 | 51.17 | 53.67 | 4004 | 说明: - 结果评测指标:spearman系数 - `shibing624/text2vec-base-chinese`模型,是用CoSENT方法训练,基于`hfl/chinese-macbert-base`在中文STS-B数据训练得到,并在中文STS-B测试集评估达到较好效果,运行[examples/training_sup_text_matching_model.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model.py)代码可训练模型,模型文件已经上传HF model hub,中文通用语义匹配任务推荐使用 - `shibing624/text2vec-base-chinese-sentence`模型,是用CoSENT方法训练,基于`nghuyong/ernie-3.0-base-zh`用人工挑选后的中文STS数据集[shibing624/nli-zh-all/text2vec-base-chinese-sentence-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-sentence-dataset)训练得到,并在中文各NLI测试集评估达到较好效果,运行[examples/training_sup_text_matching_model_jsonl_data.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model_jsonl_data.py)代码可训练模型,模型文件已经上传HF model hub,中文s2s(句子vs句子)语义匹配任务推荐使用 - `shibing624/text2vec-base-chinese-paraphrase`模型,是用CoSENT方法训练,基于`nghuyong/ernie-3.0-base-zh`用人工挑选后的中文STS数据集[shibing624/nli-zh-all/text2vec-base-chinese-paraphrase-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-paraphrase-dataset),数据集相对于[shibing624/nli-zh-all/text2vec-base-chinese-sentence-dataset](https://huggingface.co/datasets/shibing624/nli-zh-all/tree/main/text2vec-base-chinese-sentence-dataset)加入了s2p(sentence to paraphrase)数据,强化了其长文本的表征能力,并在中文各NLI测试集评估达到SOTA,运行[examples/training_sup_text_matching_model_jsonl_data.py](https://github.com/shibing624/text2vec/blob/master/examples/training_sup_text_matching_model_jsonl_data.py)代码可训练模型,模型文件已经上传HF model hub,中文s2p(句子vs段落)语义匹配任务推荐使用 - `sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2`模型是用SBERT训练,是`paraphrase-MiniLM-L12-v2`模型的多语言版本,支持中文、英文等 - `w2v-light-tencent-chinese`是腾讯词向量的Word2Vec模型,CPU加载使用,适用于中文字面匹配任务和缺少数据的冷启动情况 ## Usage (text2vec) Using this model becomes easy when you have [text2vec](https://github.com/shibing624/text2vec) installed: ``` pip install -U text2vec ``` Then you can use the model like this: ```python from text2vec import SentenceModel sentences = ['如何更换花呗绑定银行卡', '花呗更改绑定银行卡'] model = SentenceModel('shibing624/text2vec-base-chinese-paraphrase') embeddings = model.encode(sentences) print(embeddings) ``` ## Usage (HuggingFace Transformers) Without [text2vec](https://github.com/shibing624/text2vec), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings. Install transformers: ``` pip install transformers ``` Then load model and predict: ```python from transformers import BertTokenizer, BertModel import torch # Mean Pooling - Take attention mask into account for correct averaging def mean_pooling(model_output, attention_mask): token_embeddings = model_output[0] # First element of model_output contains all token embeddings input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9) # Load model from HuggingFace Hub tokenizer = BertTokenizer.from_pretrained('shibing624/text2vec-base-chinese-paraphrase') model = BertModel.from_pretrained('shibing624/text2vec-base-chinese-paraphrase') sentences = ['如何更换花呗绑定银行卡', '花呗更改绑定银行卡'] # Tokenize sentences encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt') # Compute token embeddings with torch.no_grad(): model_output = model(**encoded_input) # Perform pooling. In this case, mean pooling. sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask']) print("Sentence embeddings:") print(sentence_embeddings) ``` ## Usage (sentence-transformers) [sentence-transformers](https://github.com/UKPLab/sentence-transformers) is a popular library to compute dense vector representations for sentences. Install sentence-transformers: ``` pip install -U sentence-transformers ``` Then load model and predict: ```python from sentence_transformers import SentenceTransformer m = SentenceTransformer("shibing624/text2vec-base-chinese-paraphrase") sentences = ['如何更换花呗绑定银行卡', '花呗更改绑定银行卡'] sentence_embeddings = m.encode(sentences) print("Sentence embeddings:") print(sentence_embeddings) ``` ## Full Model Architecture ``` CoSENT( (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: ErnieModel (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_mean_tokens': True}) ) ``` ## Intended uses Our model is intented to be used as a sentence and short paragraph encoder. Given an input text, it ouptuts a vector which captures the semantic information. The sentence vector may be used for information retrieval, clustering or sentence similarity tasks. By default, input text longer than 256 word pieces is truncated. ## Training procedure ### Pre-training We use the pretrained [`nghuyong/ernie-3.0-base-zh`](https://huggingface.co/nghuyong/ernie-3.0-base-zh) model. Please refer to the model card for more detailed information about the pre-training procedure. ### Fine-tuning We fine-tune the model using a contrastive objective. Formally, we compute the cosine similarity from each possible sentence pairs from the batch. We then apply the rank loss by comparing with true pairs and false pairs. ## Citing & Authors This model was trained by [text2vec](https://github.com/shibing624/text2vec). If you find this model helpful, feel free to cite: ```bibtex @software{text2vec, author = {Ming Xu}, title = {text2vec: A Tool for Text to Vector}, year = {2023}, url = {https://github.com/shibing624/text2vec}, } ```
9,524
[ [ -0.005184173583984375, -0.055328369140625, 0.025177001953125, 0.02935791015625, -0.02587890625, -0.03228759765625, -0.017669677734375, -0.0090789794921875, 0.00848388671875, 0.033447265625, -0.02764892578125, -0.039703369140625, -0.040618896484375, 0.00954437255859375, -0.006961822509765625, 0.06573486328125, -0.02081298828125, 0.014068603515625, -0.01171112060546875, -0.029876708984375, -0.0328369140625, -0.031829833984375, -0.041473388671875, -0.00917816162109375, 0.008758544921875, 0.02392578125, 0.053466796875, 0.03814697265625, 0.03265380859375, 0.019073486328125, -0.00650787353515625, 0.013092041015625, -0.0174102783203125, -0.0020961761474609375, 0.0030155181884765625, -0.04052734375, -0.0075225830078125, -0.0006203651428222656, 0.037078857421875, 0.01904296875, 0.00279998779296875, -0.004119873046875, 0.028045654296875, 0.039520263671875, -0.02642822265625, 0.024810791015625, -0.039520263671875, -0.00225067138671875, -0.00984954833984375, -0.0079345703125, -0.0236053466796875, -0.0167694091796875, 0.010955810546875, -0.041900634765625, 0.006275177001953125, 0.00797271728515625, 0.08941650390625, 0.0109100341796875, -0.0225067138671875, -0.034912109375, -0.012115478515625, 0.07061767578125, -0.06585693359375, 0.01235198974609375, 0.040252685546875, -0.0020809173583984375, 0.0062408447265625, -0.06292724609375, -0.050262451171875, -0.0072479248046875, -0.040435791015625, 0.02703857421875, 0.00016748905181884766, -0.01302337646484375, 0.006389617919921875, 0.008575439453125, -0.056610107421875, -0.0263671875, -0.028533935546875, -0.01361083984375, 0.03485107421875, 0.004337310791015625, 0.0416259765625, -0.05157470703125, -0.036865234375, -0.0253753662109375, -0.03277587890625, 0.019775390625, -0.005565643310546875, 0.01108551025390625, -0.03912353515625, 0.049224853515625, -0.0013666152954101562, 0.030548095703125, -0.0097808837890625, -0.0004086494445800781, 0.045074462890625, -0.05487060546875, -0.003871917724609375, -0.0225982666015625, 0.09527587890625, 0.047576904296875, 0.0181121826171875, -0.0001952648162841797, -0.006107330322265625, -0.004489898681640625, -0.0273284912109375, -0.04986572265625, -0.024749755859375, 0.0233001708984375, -0.044097900390625, -0.0130767822265625, 0.012542724609375, -0.0703125, -0.0021114349365234375, -0.00031447410583496094, 0.035797119140625, -0.0584716796875, -0.006473541259765625, 0.007526397705078125, -0.035614013671875, 0.033355712890625, -0.0110626220703125, -0.05609130859375, 0.009613037109375, 0.041259765625, 0.0836181640625, 0.0006647109985351562, -0.035064697265625, -0.0252532958984375, 0.00989532470703125, -0.01849365234375, 0.033935546875, -0.02276611328125, -0.01020050048828125, 0.01297760009765625, 0.00989532470703125, -0.0247802734375, -0.0283050537109375, 0.05902099609375, -0.00409698486328125, 0.04736328125, -0.033782958984375, -0.0396728515625, 0.00034427642822265625, 0.005374908447265625, -0.035552978515625, 0.0950927734375, 0.0038204193115234375, -0.08599853515625, -0.006389617919921875, -0.0213470458984375, -0.03143310546875, -0.010406494140625, -0.0158843994140625, -0.043426513671875, -0.01183319091796875, 0.03448486328125, 0.046417236328125, -0.019195556640625, 0.004268646240234375, -0.0031757354736328125, -0.0203094482421875, 0.014495849609375, -0.01348114013671875, 0.08135986328125, 0.0142974853515625, -0.03717041015625, -0.002471923828125, -0.045013427734375, 0.005435943603515625, 0.0206451416015625, -0.0163116455078125, -0.03387451171875, -0.004344940185546875, 0.0240631103515625, 0.037841796875, 0.036834716796875, -0.0254974365234375, -0.0183563232421875, -0.044830322265625, 0.061767578125, 0.03485107421875, 0.0096588134765625, 0.03253173828125, -0.030853271484375, 0.012237548828125, 0.0025386810302734375, 0.00397491455078125, -0.01320648193359375, -0.054656982421875, -0.06536865234375, -0.0028743743896484375, 0.0137939453125, 0.068115234375, -0.08697509765625, 0.058624267578125, -0.02850341796875, -0.041717529296875, -0.04632568359375, 0.010406494140625, 0.04052734375, 0.026641845703125, 0.0595703125, 0.0261993408203125, -0.03509521484375, -0.0618896484375, -0.024749755859375, -0.0208740234375, -0.006298065185546875, 0.0234527587890625, 0.044677734375, -0.007343292236328125, 0.04876708984375, -0.032135009765625, -0.03436279296875, -0.0450439453125, 0.001918792724609375, 0.01288604736328125, 0.03778076171875, 0.045135498046875, -0.0704345703125, -0.053924560546875, -0.012115478515625, -0.07525634765625, 0.005001068115234375, -0.0269012451171875, -0.027099609375, 0.00746917724609375, 0.046875, -0.045196533203125, 0.0225677490234375, 0.0506591796875, -0.0292205810546875, 0.0215301513671875, -0.04052734375, 0.00701904296875, -0.1119384765625, 0.0046234130859375, 0.01250457763671875, -0.0003387928009033203, -0.0430908203125, 0.01183319091796875, 0.0211944580078125, 0.01316070556640625, -0.032928466796875, 0.041748046875, -0.04217529296875, 0.032928466796875, -0.002986907958984375, 0.03399658203125, 0.0044708251953125, 0.049072265625, 0.01175689697265625, 0.059112548828125, 0.0281524658203125, -0.045928955078125, 0.0194091796875, 0.05206298828125, -0.031890869140625, 0.01512908935546875, -0.0635986328125, -0.00926971435546875, 0.005641937255859375, 0.0214691162109375, -0.088134765625, -0.004596710205078125, 0.038604736328125, -0.050445556640625, -0.005153656005859375, 0.0228424072265625, -0.0291595458984375, -0.039337158203125, -0.05615234375, 0.01015472412109375, 0.05224609375, -0.03546142578125, 0.032623291015625, 0.01134490966796875, -0.008636474609375, -0.04278564453125, -0.08416748046875, 0.01464080810546875, -0.006587982177734375, -0.05780029296875, 0.03662109375, -0.0067596435546875, 0.00994873046875, -0.0036411285400390625, 0.0196075439453125, -0.009979248046875, -0.00977325439453125, -0.00677490234375, 0.0204620361328125, -0.005443572998046875, -0.0100250244140625, 0.0165252685546875, -0.00013911724090576172, -0.00525665283203125, 0.00717926025390625, 0.0435791015625, 0.003643035888671875, 0.002910614013671875, -0.05218505859375, 0.02484130859375, 0.0265045166015625, -0.0163726806640625, 0.06817626953125, 0.0557861328125, -0.0251922607421875, 0.0137481689453125, -0.026275634765625, -0.005199432373046875, -0.035003662109375, 0.043426513671875, -0.0367431640625, -0.054443359375, 0.0298614501953125, 0.0258941650390625, 0.01531219482421875, 0.06396484375, 0.05230712890625, -0.00231170654296875, 0.06280517578125, 0.0202178955078125, -0.008026123046875, 0.030975341796875, -0.0251617431640625, 0.00553131103515625, -0.064697265625, -0.0276947021484375, -0.03485107421875, -0.00656890869140625, -0.047088623046875, -0.051910400390625, 0.0200042724609375, 0.0129241943359375, 0.0022220611572265625, 0.05743408203125, -0.038818359375, 0.001499176025390625, 0.040771484375, 0.0212860107421875, -0.0075836181640625, -0.00008958578109741211, -0.0288543701171875, -0.0164794921875, -0.0401611328125, -0.040435791015625, 0.06951904296875, 0.03375244140625, 0.0284423828125, -0.0034389495849609375, 0.0400390625, -0.002048492431640625, -0.018707275390625, -0.036041259765625, 0.046600341796875, -0.0249481201171875, -0.03363037109375, -0.02447509765625, -0.0279541015625, -0.060791015625, 0.0220947265625, -0.01203155517578125, -0.051849365234375, 0.00780487060546875, -0.00595855712890625, -0.0247802734375, 0.00980377197265625, -0.048126220703125, 0.07025146484375, -0.010498046875, -0.021270751953125, -0.00494384765625, -0.06695556640625, 0.0205230712890625, 0.0123748779296875, 0.022247314453125, 0.0004112720489501953, -0.01751708984375, 0.07525634765625, -0.039337158203125, 0.039031982421875, -0.01462554931640625, 0.00014853477478027344, 0.042236328125, -0.0177764892578125, 0.052703857421875, 0.004367828369140625, -0.01067352294921875, 0.01349639892578125, 0.0182037353515625, -0.0350341796875, -0.0352783203125, 0.05120849609375, -0.060333251953125, -0.0269927978515625, -0.03594970703125, -0.0205535888671875, 0.0015859603881835938, 0.00496673583984375, 0.050689697265625, 0.01459503173828125, -0.00940704345703125, 0.04168701171875, 0.04010009765625, -0.038177490234375, 0.03741455078125, 0.004314422607421875, 0.013092041015625, -0.0533447265625, 0.06866455078125, 0.003963470458984375, 0.01410675048828125, 0.05426025390625, 0.024078369140625, -0.0254974365234375, -0.034515380859375, -0.0196075439453125, 0.036895751953125, -0.036773681640625, -0.000044465065002441406, -0.08294677734375, -0.0238189697265625, -0.056732177734375, 0.002506256103515625, -0.00907135009765625, -0.0282135009765625, -0.0308990478515625, -0.005306243896484375, 0.0239105224609375, 0.02935791015625, 0.005985260009765625, 0.0211639404296875, -0.056427001953125, 0.0306396484375, 0.00010836124420166016, -0.0189056396484375, -0.0158843994140625, -0.05474853515625, -0.0335693359375, 0.012725830078125, -0.0190277099609375, -0.061065673828125, 0.043426513671875, 0.0204925537109375, 0.038330078125, 0.015228271484375, 0.00662994384765625, 0.0396728515625, -0.04638671875, 0.0899658203125, 0.0272216796875, -0.07305908203125, 0.0340576171875, -0.01268768310546875, 0.0127105712890625, 0.04046630859375, 0.024017333984375, -0.06378173828125, -0.032806396484375, -0.031158447265625, -0.080322265625, 0.05499267578125, 0.032501220703125, 0.031646728515625, -0.01386260986328125, 0.04229736328125, -0.01055908203125, -0.002056121826171875, -0.06781005859375, -0.04351806640625, -0.0291595458984375, -0.052978515625, -0.0104827880859375, -0.034759521484375, 0.00836944580078125, -0.0291595458984375, 0.052490234375, 0.0023441314697265625, 0.0457763671875, 0.0272216796875, -0.01259613037109375, 0.017059326171875, 0.0030269622802734375, 0.039764404296875, 0.0257110595703125, -0.0002453327178955078, 0.0005145072937011719, 0.037078857421875, -0.037322998046875, -0.0030307769775390625, 0.0084075927734375, -0.025299072265625, 0.0175018310546875, 0.049560546875, 0.0648193359375, 0.0186767578125, -0.04876708984375, 0.06512451171875, -0.0120086669921875, -0.03875732421875, -0.02392578125, -0.0027828216552734375, 0.0282135009765625, 0.0173492431640625, 0.0173492431640625, -0.0097808837890625, 0.00390625, -0.0272674560546875, 0.0215911865234375, 0.0225982666015625, -0.031036376953125, -0.0193634033203125, 0.0650634765625, 0.0037136077880859375, -0.00449371337890625, 0.03948974609375, -0.003986358642578125, -0.051300048828125, 0.03631591796875, 0.0274505615234375, 0.0589599609375, -0.008758544921875, 0.0166015625, 0.052398681640625, 0.023223876953125, -0.0254974365234375, 0.0095977783203125, 0.0156707763671875, -0.0460205078125, 0.0020313262939453125, -0.035064697265625, 0.00534820556640625, 0.0134124755859375, -0.03668212890625, 0.03717041015625, -0.029998779296875, 0.005115509033203125, -0.006862640380859375, 0.0003383159637451172, -0.03350830078125, 0.0169525146484375, 0.003505706787109375, 0.06689453125, -0.0648193359375, 0.07354736328125, 0.03997802734375, -0.046112060546875, -0.056488037109375, 0.023193359375, -0.02398681640625, -0.0682373046875, 0.0345458984375, 0.01544952392578125, 0.0177001953125, -0.01360321044921875, -0.028594970703125, -0.052947998046875, 0.09307861328125, 0.00281524658203125, -0.028045654296875, -0.02984619140625, 0.01206207275390625, 0.05096435546875, -0.01184844970703125, 0.03369140625, 0.0400390625, 0.03533935546875, 0.007404327392578125, -0.04522705078125, 0.027984619140625, -0.02008056640625, 0.01508331298828125, -0.02484130859375, -0.0772705078125, 0.08660888671875, -0.006134033203125, -0.00527191162109375, 0.0341796875, 0.07196044921875, 0.0169677734375, 0.00917816162109375, 0.0283660888671875, 0.0311126708984375, 0.032989501953125, -0.001399993896484375, 0.072265625, -0.0321044921875, 0.050018310546875, 0.05987548828125, 0.0099945068359375, 0.08526611328125, 0.0328369140625, -0.0207061767578125, 0.034271240234375, 0.0423583984375, -0.00774383544921875, 0.060760498046875, -0.0090484619140625, -0.007049560546875, -0.0090789794921875, 0.0135650634765625, -0.0284271240234375, 0.01593017578125, 0.0136260986328125, -0.034393310546875, 0.0030364990234375, -0.0035247802734375, 0.0291900634765625, 0.015838623046875, 0.0071563720703125, 0.05218505859375, 0.0148773193359375, -0.056549072265625, 0.049285888671875, 0.025726318359375, 0.07537841796875, -0.0399169921875, 0.01137542724609375, -0.0038909912109375, 0.0227508544921875, -0.0076751708984375, -0.06005859375, 0.0187225341796875, -0.01212310791015625, -0.0010728836059570312, -0.006015777587890625, 0.037139892578125, -0.05499267578125, -0.037811279296875, 0.04547119140625, 0.0306243896484375, 0.00981903076171875, -0.004711151123046875, -0.0975341796875, -0.01035308837890625, 0.03192138671875, -0.03955078125, 0.019317626953125, 0.0439453125, 0.008087158203125, 0.035614013671875, 0.027496337890625, -0.00785064697265625, 0.00533294677734375, -0.006389617919921875, 0.05108642578125, -0.05499267578125, -0.03265380859375, -0.0804443359375, 0.0338134765625, -0.019317626953125, -0.035491943359375, 0.07501220703125, 0.0517578125, 0.046539306640625, -0.005939483642578125, 0.07330322265625, -0.01419830322265625, 0.04638671875, -0.045501708984375, 0.0552978515625, -0.0526123046875, -0.007598876953125, -0.02947998046875, -0.04339599609375, -0.01363372802734375, 0.06341552734375, -0.02166748046875, 0.0012845993041992188, 0.07769775390625, 0.06036376953125, 0.0177764892578125, -0.01343536376953125, 0.00803375244140625, 0.0189208984375, 0.02569580078125, 0.0653076171875, 0.0307464599609375, -0.082275390625, 0.07440185546875, -0.0311126708984375, -0.01043701171875, -0.0159912109375, -0.034912109375, -0.08392333984375, -0.06988525390625, -0.0300445556640625, -0.052490234375, -0.0007066726684570312, 0.065185546875, 0.0134429931640625, -0.0701904296875, -0.0092926025390625, -0.0190887451171875, -0.00531768798828125, -0.0215911865234375, -0.0189056396484375, 0.0526123046875, -0.0283050537109375, -0.0731201171875, 0.005054473876953125, 0.0022678375244140625, -0.000005304813385009766, 0.00310516357421875, -0.0196685791015625, -0.04803466796875, 0.012115478515625, 0.029937744140625, 0.0016956329345703125, -0.050445556640625, -0.0193634033203125, 0.01255035400390625, -0.044189453125, 0.007129669189453125, 0.03564453125, -0.02545166015625, 0.00933837890625, 0.0555419921875, 0.02935791015625, 0.029876708984375, -0.00435638427734375, 0.0307769775390625, -0.039215087890625, 0.0267486572265625, -0.002651214599609375, 0.03668212890625, 0.028594970703125, -0.0260467529296875, 0.033355712890625, 0.036468505859375, -0.03265380859375, -0.0401611328125, -0.023345947265625, -0.07952880859375, -0.03643798828125, 0.11639404296875, -0.0311126708984375, -0.034759521484375, 0.00439453125, -0.037689208984375, 0.044219970703125, -0.0233154296875, 0.0263671875, 0.062255859375, 0.017181396484375, -0.00946044921875, -0.04144287109375, 0.039337158203125, 0.042938232421875, -0.050201416015625, 0.01629638671875, 0.0034427642822265625, 0.029022216796875, 0.005710601806640625, 0.044830322265625, -0.0078125, 0.0042572021484375, -0.0009670257568359375, -0.0025119781494140625, 0.010894775390625, 0.01180267333984375, -0.01253509521484375, 0.0015010833740234375, -0.0229644775390625, -0.0288543701171875 ] ]
Linly-AI/Chinese-LLaMA-2-7B-hf
2023-09-01T09:04:51.000Z
[ "transformers", "pytorch", "llama", "text-generation", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Linly-AI
null
null
Linly-AI/Chinese-LLaMA-2-7B-hf
18
8,357
transformers
2023-07-21T13:09:30
基于中英文混合语料增量训练,词表扩充汉字。 训练细节和benchmark指标: https://github.com/CVI-SZU/Linly ```python from transformers import AutoModelForCausalLM, AutoTokenizer model = AutoModelForCausalLM.from_pretrained("Linly-AI/Chinese-LLaMA-2-7B-hf", device_map="cuda:0", torch_dtype=torch.float16, trust_remote_code=True) tokenizer = AutoTokenizer.from_pretrained("Linly-AI/Chinese-LLaMA-2-7B-hf", use_fast=False, trust_remote_code=True) prompt = "北京有什么好玩的地方?" prompt = f"### Instruction:{prompt.strip()} ### Response:" inputs = tokenizer(prompt, return_tensors="pt").to("cuda:0") generate_ids = model.generate(inputs.input_ids, do_sample=True, max_new_tokens=2048, top_k=10, top_p=0.85, temperature=1, repetition_penalty=1.15, eos_token_id=2, bos_token_id=1, pad_token_id=0) response = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] response = response.lstrip(prompt) ```
908
[ [ -0.026153564453125, -0.046112060546875, 0.0045623779296875, 0.0256805419921875, -0.023773193359375, 0.00384521484375, -0.01593017578125, -0.031707763671875, 0.0002884864807128906, -0.0080108642578125, -0.029510498046875, -0.03131103515625, -0.04425048828125, 0.01226806640625, -0.0192108154296875, 0.057891845703125, -0.0234375, 0.00885009765625, 0.01197052001953125, -0.0145111083984375, -0.007080078125, -0.03070068359375, -0.0615234375, -0.0105438232421875, 0.00027871131896972656, -0.0030059814453125, 0.0201568603515625, 0.05743408203125, 0.039154052734375, 0.0298919677734375, -0.00046753883361816406, 0.0300140380859375, -0.0275726318359375, 0.0002732276916503906, 0.0081329345703125, -0.024658203125, -0.041412353515625, -0.01494598388671875, 0.05133056640625, 0.01006317138671875, 0.0110015869140625, 0.03790283203125, 0.004764556884765625, 0.0189056396484375, -0.044036865234375, 0.014251708984375, -0.044921875, 0.00415802001953125, -0.0037174224853515625, -0.01025390625, -0.00896453857421875, -0.01006317138671875, -0.005687713623046875, -0.0482177734375, 0.01467132568359375, 0.01047515869140625, 0.0963134765625, 0.039520263671875, -0.037078857421875, -0.0095367431640625, -0.029083251953125, 0.079833984375, -0.072021484375, 0.0184478759765625, 0.004077911376953125, 0.0311431884765625, -0.00969696044921875, -0.06964111328125, -0.050689697265625, -0.0075225830078125, -0.017181396484375, 0.005401611328125, -0.0094146728515625, -0.005344390869140625, 0.039581298828125, 0.016693115234375, -0.032257080078125, 0.00762176513671875, -0.01056671142578125, -0.037750244140625, 0.0604248046875, 0.025543212890625, 0.01081085205078125, -0.0262298583984375, -0.044952392578125, -0.0250244140625, -0.01261138916015625, 0.036285400390625, 0.00279998779296875, 0.006755828857421875, -0.0289154052734375, 0.018768310546875, -0.021240234375, 0.0309600830078125, 0.015960693359375, -0.017578125, 0.051116943359375, -0.0467529296875, -0.03167724609375, -0.01739501953125, 0.08251953125, 0.0215606689453125, -0.0027980804443359375, -0.002780914306640625, -0.03070068359375, -0.0130157470703125, -0.0201263427734375, -0.07769775390625, -0.0145721435546875, 0.0164947509765625, -0.0433349609375, -0.019134521484375, 0.01763916015625, -0.048980712890625, 0.0032596588134765625, -0.0034999847412109375, 0.0246734619140625, -0.02191162109375, -0.0242462158203125, -0.0131378173828125, 0.0003204345703125, 0.046905517578125, 0.0018129348754882812, -0.05908203125, 0.006298065185546875, 0.04071044921875, 0.05517578125, -0.027374267578125, -0.03460693359375, -0.038482666015625, -0.00640869140625, -0.01513671875, 0.037200927734375, 0.0107879638671875, -0.034820556640625, -0.0228729248046875, 0.0164794921875, -0.00826263427734375, -0.03143310546875, 0.03863525390625, -0.0247650146484375, -0.0146484375, -0.028045654296875, -0.0263671875, -0.00853729248046875, 0.035003662109375, -0.035552978515625, 0.1041259765625, -0.0017566680908203125, -0.046356201171875, 0.032684326171875, -0.026763916015625, -0.0217742919921875, -0.0158233642578125, -0.0172271728515625, -0.04241943359375, -0.0093994140625, 0.028594970703125, 0.03326416015625, -0.026031494140625, 0.0264434814453125, -0.0177764892578125, -0.022125244140625, 0.0285491943359375, -0.05322265625, 0.055999755859375, 0.033905029296875, -0.06201171875, 0.0236358642578125, -0.06787109375, 0.0183868408203125, 0.0167694091796875, -0.0225830078125, -0.00286865234375, -0.021575927734375, 0.01055145263671875, 0.0158538818359375, 0.044219970703125, -0.0286712646484375, 0.0128631591796875, -0.036407470703125, 0.038482666015625, 0.07440185546875, -0.01432037353515625, 0.0234375, -0.023284912109375, 0.021331787109375, 0.00608062744140625, 0.027618408203125, -0.0134429931640625, -0.0286712646484375, -0.07269287109375, -0.02960205078125, 0.01537322998046875, 0.0386962890625, -0.058929443359375, 0.052337646484375, -0.007434844970703125, -0.0433349609375, -0.0281829833984375, 0.00809478759765625, 0.039794921875, 0.04443359375, 0.036712646484375, -0.003688812255859375, -0.049713134765625, -0.060943603515625, -0.0017480850219726562, -0.002300262451171875, 0.0036602020263671875, 0.032318115234375, 0.049072265625, -0.01983642578125, 0.057464599609375, -0.01751708984375, -0.0003452301025390625, 0.01097869873046875, 0.00428009033203125, 0.04315185546875, 0.030029296875, 0.05340576171875, -0.03125, -0.0587158203125, -0.00519561767578125, -0.06744384765625, -0.00527191162109375, -0.0091552734375, -0.0297393798828125, 0.03802490234375, 0.0299530029296875, -0.032379150390625, 0.048980712890625, 0.0399169921875, -0.029327392578125, 0.0506591796875, -0.0198516845703125, 0.0171661376953125, -0.08642578125, 0.032470703125, -0.0069580078125, -0.0035915374755859375, -0.0252227783203125, 0.01123046875, 0.015838623046875, 0.01165771484375, -0.048919677734375, 0.04693603515625, -0.03363037109375, -0.0003597736358642578, -0.0115966796875, -0.0168304443359375, 0.0044097900390625, 0.039642333984375, -0.0195770263671875, 0.076904296875, 0.04583740234375, -0.06072998046875, 0.03961181640625, 0.0107421875, -0.017578125, -0.0034961700439453125, -0.06195068359375, 0.015655517578125, 0.0244903564453125, 0.01837158203125, -0.09271240234375, 0.00894927978515625, 0.0236053466796875, -0.0276641845703125, 0.0191802978515625, 0.01128387451171875, -0.0321044921875, -0.033660888671875, -0.03228759765625, 0.06573486328125, 0.03131103515625, -0.0626220703125, 0.05499267578125, 0.0079803466796875, 0.033203125, -0.06378173828125, -0.049468994140625, -0.0192718505859375, -0.007534027099609375, -0.047210693359375, 0.0357666015625, -0.023529052734375, -0.002841949462890625, -0.018035888671875, -0.01446533203125, 0.00505828857421875, 0.00909423828125, 0.00438690185546875, 0.0386962890625, -0.013916015625, -0.0208892822265625, -0.0142059326171875, -0.0306854248046875, 0.0146942138671875, -0.00997161865234375, 0.0645751953125, -0.0194091796875, -0.0136871337890625, -0.055999755859375, -0.003437042236328125, 0.03204345703125, -0.0010881423950195312, 0.0401611328125, 0.061004638671875, -0.0133514404296875, -0.01306915283203125, -0.036956787109375, -0.01044464111328125, -0.041290283203125, 0.0321044921875, -0.04815673828125, -0.0215606689453125, 0.06256103515625, 0.014892578125, 0.0101776123046875, 0.047271728515625, 0.0384521484375, 0.005931854248046875, 0.07000732421875, 0.01045989990234375, -0.03546142578125, 0.0211639404296875, -0.06396484375, 0.00959014892578125, -0.05206298828125, -0.0180511474609375, -0.03643798828125, -0.0199737548828125, -0.0513916015625, -0.037261962890625, 0.031494140625, -0.004520416259765625, -0.035308837890625, 0.033111572265625, -0.0643310546875, 0.004863739013671875, 0.0531005859375, 0.0007410049438476562, -0.0016355514526367188, -0.0099029541015625, -0.0242156982421875, 0.00678253173828125, -0.040863037109375, -0.01447296142578125, 0.08673095703125, 0.0318603515625, 0.0634765625, -0.01306915283203125, 0.050384521484375, -0.0014066696166992188, -0.005352020263671875, -0.043731689453125, 0.045440673828125, -0.0008921623229980469, -0.0306396484375, -0.025634765625, -0.0296783447265625, -0.06378173828125, 0.01044464111328125, -0.00568389892578125, -0.0328369140625, 0.0020999908447265625, 0.0067596435546875, -0.0249481201171875, 0.037689208984375, -0.0338134765625, 0.0517578125, -0.04559326171875, -0.024078369140625, 0.00970458984375, -0.0258636474609375, 0.01552581787109375, 0.00506591796875, 0.00652313232421875, -0.007335662841796875, -0.00553131103515625, 0.0614013671875, -0.0361328125, 0.041351318359375, -0.022705078125, -0.0034351348876953125, 0.0176544189453125, -0.00977325439453125, 0.0487060546875, 0.0152740478515625, -0.018341064453125, 0.048431396484375, 0.004299163818359375, -0.03033447265625, -0.0255889892578125, 0.06005859375, -0.0848388671875, -0.037811279296875, -0.046630859375, -0.03021240234375, 0.018829345703125, 0.01183319091796875, 0.0430908203125, 0.00809478759765625, 0.0055999755859375, -0.004474639892578125, 0.0254669189453125, -0.0239105224609375, 0.07354736328125, 0.0225372314453125, 0.0007538795471191406, -0.0556640625, 0.064208984375, 0.007843017578125, 0.0067138671875, 0.013671875, 0.00664520263671875, -0.0255889892578125, -0.026519775390625, -0.0345458984375, 0.0227203369140625, -0.046875, -0.0360107421875, -0.0311126708984375, -0.044097900390625, -0.0487060546875, 0.0007052421569824219, -0.041961669921875, -0.01139068603515625, -0.0340576171875, 0.0013227462768554688, 0.02447509765625, 0.011383056640625, -0.00872039794921875, 0.0037994384765625, -0.03814697265625, 0.0306396484375, 0.01337432861328125, 0.008331298828125, 0.01236724853515625, -0.05572509765625, -0.0205078125, -0.0008196830749511719, -0.0207366943359375, -0.027862548828125, 0.045074462890625, 0.0087432861328125, 0.0628662109375, 0.039794921875, 0.01552581787109375, 0.0699462890625, -0.01152801513671875, 0.060455322265625, 0.00922393798828125, -0.08697509765625, 0.02593994140625, 0.00872039794921875, 0.0189666748046875, 0.0262298583984375, 0.0103607177734375, -0.0214080810546875, -0.007160186767578125, -0.036651611328125, -0.09381103515625, 0.0517578125, 0.006763458251953125, 0.00212860107421875, 0.01279449462890625, 0.0221405029296875, 0.005214691162109375, 0.00283050537109375, -0.056396484375, -0.0540771484375, -0.031280517578125, 0.0006017684936523438, -0.002498626708984375, -0.0213623046875, 0.0011625289916992188, -0.04205322265625, 0.07025146484375, -0.01483917236328125, 0.045257568359375, 0.0252227783203125, -0.0103759765625, -0.004276275634765625, -0.01971435546875, 0.048248291015625, 0.03289794921875, -0.0135650634765625, -0.01021575927734375, 0.048004150390625, -0.027130126953125, 0.0026111602783203125, 0.01242828369140625, -0.02093505859375, 0.01056671142578125, 0.0333251953125, 0.06719970703125, -0.0030612945556640625, -0.0185394287109375, 0.0272369384765625, -0.01702880859375, -0.029144287109375, -0.04571533203125, 0.01311492919921875, -0.0242156982421875, 0.0184478759765625, 0.04840087890625, -0.0032958984375, -0.00036644935607910156, -0.0167694091796875, 0.019195556640625, 0.0401611328125, 0.00237274169921875, -0.01202392578125, 0.06500244140625, 0.020843505859375, -0.01335906982421875, 0.048919677734375, -0.0125732421875, -0.0487060546875, 0.07427978515625, 0.0294647216796875, 0.0679931640625, -0.00849151611328125, -0.01415252685546875, 0.060272216796875, 0.006381988525390625, 0.01097869873046875, 0.024627685546875, -0.02203369140625, -0.053314208984375, -0.01702880859375, -0.04571533203125, -0.0124664306640625, 0.017303466796875, -0.044677734375, 0.030059814453125, -0.042938232421875, -0.014373779296875, -0.005237579345703125, 0.0188751220703125, -0.05865478515625, 0.022705078125, 0.01061248779296875, 0.08270263671875, -0.04718017578125, 0.07183837890625, 0.0279083251953125, -0.0166015625, -0.0899658203125, -0.00033402442932128906, -0.0165863037109375, -0.07275390625, 0.059295654296875, 0.016632080078125, -0.0022983551025390625, -0.0031414031982421875, -0.026641845703125, -0.08135986328125, 0.11065673828125, 0.0103607177734375, -0.04083251953125, -0.0004394054412841797, 0.0171356201171875, 0.0263671875, -0.018310546875, 0.06072998046875, 0.04461669921875, 0.042022705078125, 0.0009455680847167969, -0.0848388671875, 0.0242156982421875, -0.0362548828125, 0.005096435546875, -0.00943756103515625, -0.07672119140625, 0.0928955078125, -0.0052032470703125, -0.0027008056640625, 0.0163726806640625, 0.062469482421875, 0.03369140625, 0.0316162109375, 0.04644775390625, 0.03533935546875, 0.047760009765625, -0.0169677734375, 0.050445556640625, -0.05419921875, 0.0482177734375, 0.046905517578125, -0.005641937255859375, 0.04095458984375, 0.0374755859375, -0.010467529296875, 0.033111572265625, 0.0555419921875, -0.03009033203125, 0.048431396484375, 0.006603240966796875, -0.0163726806640625, 0.00007152557373046875, 0.01273345947265625, -0.0555419921875, 0.032196044921875, 0.0096282958984375, -0.0265655517578125, -0.007808685302734375, 0.0009264945983886719, -0.01041412353515625, -0.0211029052734375, -0.0028820037841796875, 0.044891357421875, -0.0087432861328125, -0.050994873046875, 0.0838623046875, 0.0098724365234375, 0.089111328125, -0.0416259765625, 0.0123748779296875, -0.0200347900390625, 0.016845703125, -0.0200042724609375, -0.0261383056640625, 0.007213592529296875, -0.00975799560546875, -0.008514404296875, -0.0018672943115234375, 0.046722412109375, -0.006931304931640625, -0.03485107421875, 0.03955078125, 0.0007352828979492188, 0.009552001953125, 0.0174102783203125, -0.07135009765625, 0.01297760009765625, 0.00923919677734375, -0.05218505859375, 0.0083465576171875, 0.0229034423828125, 0.007686614990234375, 0.050384521484375, 0.06640625, 0.00403594970703125, 0.0159454345703125, -0.0016651153564453125, 0.07403564453125, -0.05511474609375, -0.049530029296875, -0.07049560546875, 0.04217529296875, -0.019744873046875, -0.04547119140625, 0.06695556640625, 0.0626220703125, 0.0699462890625, -0.0009207725524902344, 0.0576171875, -0.011962890625, 0.01141357421875, -0.03936767578125, 0.046630859375, -0.03662109375, 0.0244903564453125, -0.0089569091796875, -0.055267333984375, 0.00138092041015625, 0.06231689453125, -0.006488800048828125, 0.0200042724609375, 0.059356689453125, 0.07391357421875, 0.002490997314453125, -0.032501220703125, 0.02703857421875, 0.023529052734375, 0.04241943359375, 0.05316162109375, 0.034820556640625, -0.073486328125, 0.0540771484375, -0.04754638671875, -0.01032257080078125, -0.0163726806640625, -0.061370849609375, -0.0721435546875, -0.031646728515625, -0.0171356201171875, -0.052764892578125, -0.024261474609375, 0.0789794921875, 0.07061767578125, -0.0811767578125, -0.0198211669921875, -0.00714874267578125, -0.004695892333984375, -0.0254669189453125, -0.0182952880859375, 0.04705810546875, -0.036773681640625, -0.076416015625, 0.019073486328125, -0.00450897216796875, -0.0000037550926208496094, -0.0254058837890625, -0.032012939453125, -0.005126953125, -0.00241851806640625, 0.00928497314453125, 0.0216827392578125, -0.058807373046875, -0.01318359375, 0.014556884765625, -0.0284576416015625, 0.01190185546875, 0.039093017578125, -0.0440673828125, 0.0269622802734375, 0.039306640625, 0.0367431640625, 0.057098388671875, -0.008880615234375, 0.025726318359375, -0.04296875, 0.031463623046875, -0.00902557373046875, 0.034515380859375, 0.0078277587890625, -0.042510986328125, 0.03399658203125, 0.0286407470703125, -0.047760009765625, -0.0653076171875, -0.0184478759765625, -0.07861328125, -0.01311492919921875, 0.09234619140625, -0.02069091796875, -0.04632568359375, 0.01227569580078125, -0.00908660888671875, 0.046783447265625, -0.035125732421875, 0.0701904296875, 0.030548095703125, -0.0013628005981445312, 0.00909423828125, -0.0269622802734375, 0.03460693359375, 0.036346435546875, -0.047332763671875, -0.0247650146484375, -0.01117706298828125, 0.033721923828125, 0.01555633544921875, 0.040435791015625, 0.01418304443359375, 0.024810791015625, 0.0033397674560546875, 0.0130462646484375, -0.037078857421875, 0.00311279296875, -0.01947021484375, -0.01165008544921875, -0.0145721435546875, -0.047821044921875 ] ]
TheBloke/wizardLM-7B-HF
2023-06-05T00:09:55.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/wizardLM-7B-HF
89
8,355
transformers
2023-04-26T07:12:30
--- license: other --- <!-- header start --> <div style="width: 100%;"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p><a href="https://discord.gg/Jq4vkcDakD">Chat & support: my new Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <!-- header end --> # WizardLM: An Instruction-following LLM Using Evol-Instruct These files are the result of merging the [delta weights](https://huggingface.co/victor123/WizardLM) with the original Llama7B model. The code for merging is provided in the [WizardLM official Github repo](https://github.com/nlpxucan/WizardLM). The original WizardLM deltas are in float32, and this results in producing an HF repo that is also float32, and is much larger than a normal 7B Llama model. Therefore for this repo I converted the merged model to float16, to produce a standard size 7B model. This was achieved by running **`model = model.half()`** prior to saving. ## WizardLM-7B HF This repo contains the full unquantised model files in HF format for GPU inference and as a base for quantisation/conversion. ## Other repositories available * [4bit GGML models for CPU inference](https://huggingface.co/TheBloke/wizardLM-7B-GGML) * [4bit GPTQ models for GPU inference](https://huggingface.co/TheBloke/wizardLM-7B-GPTQ) <!-- footer start --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/Jq4vkcDakD) ## Thanks, and how to contribute. Thanks to the [chirper.ai](https://chirper.ai) team! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Patreon special mentions**: Aemon Algiz, Dmitriy Samsonov, Nathan LeClaire, Trenton Dambrowitz, Mano Prime, David Flickinger, vamX, Nikolai Manek, senxiiz, Khalefa Al-Ahmad, Illia Dulskyi, Jonathan Leane, Talal Aujan, V. Lukas, Joseph William Delisle, Pyrater, Oscar Rangel, Lone Striker, Luke Pendergrass, Eugene Pentland, Sebastain Graf, Johann-Peter Hartman. Thank you to all my generous patrons and donaters! <!-- footer end --> # Original model info ## Full details in the model's Github page [WizardLM official Github repo](https://github.com/nlpxucan/WizardLM). ## Overview of Evol-Instruct Evol-Instruct is a novel method using LLMs instead of humans to automatically mass-produce open-domain instructions of various difficulty levels and skills range, to improve the performance of LLMs. Although on our complexity-balanced test set, WizardLM-7B outperforms ChatGPT in the high-complexity instructions, it still lag behind ChatGPT on the entire test set, and we also consider WizardLM to still be in a baby state. This repository will continue to improve WizardLM, train on larger scales, add more training data, and innovate more advanced large-model training methods. ![info](https://github.com/nlpxucan/WizardLM/raw/main/imgs/git_overall.png) ![info](https://github.com/nlpxucan/WizardLM/raw/main/imgs/git_running.png)
3,930
[ [ -0.042388916015625, -0.048004150390625, 0.003894805908203125, 0.01123809814453125, -0.0134124755859375, -0.0063018798828125, 0.0013055801391601562, -0.042388916015625, 0.0142059326171875, 0.03076171875, -0.04864501953125, -0.0247039794921875, -0.022308349609375, 0.0011587142944335938, -0.0266265869140625, 0.08001708984375, 0.01531982421875, -0.0015325546264648438, -0.0108489990234375, 0.008148193359375, -0.044769287109375, -0.0286407470703125, -0.05816650390625, -0.0439453125, 0.03948974609375, 0.01059722900390625, 0.049346923828125, 0.03448486328125, 0.0266265869140625, 0.027984619140625, -0.01091766357421875, 0.0245361328125, -0.053802490234375, -0.00013887882232666016, 0.0093231201171875, -0.01407623291015625, -0.06304931640625, -0.005054473876953125, 0.0303802490234375, 0.03448486328125, -0.0270538330078125, 0.0193634033203125, 0.015655517578125, 0.06170654296875, -0.03472900390625, 0.0272979736328125, -0.031707763671875, -0.006969451904296875, -0.00473785400390625, 0.0160980224609375, -0.0203857421875, -0.0203857421875, -0.004940032958984375, -0.07427978515625, 0.0106201171875, 0.0172882080078125, 0.07659912109375, 0.01422882080078125, -0.023101806640625, 0.00492095947265625, -0.048095703125, 0.038909912109375, -0.07110595703125, 0.030181884765625, 0.023529052734375, 0.031219482421875, -0.0236968994140625, -0.05572509765625, -0.04443359375, -0.0104827880859375, 0.00421905517578125, 0.0161285400390625, -0.03094482421875, -0.0016727447509765625, 0.0001500844955444336, 0.030853271484375, -0.052490234375, -0.00011879205703735352, -0.04412841796875, -0.01268768310546875, 0.06353759765625, 0.00324249267578125, 0.0233612060546875, 0.0167236328125, -0.011932373046875, -0.0217437744140625, -0.052337646484375, -0.0028362274169921875, 0.0284576416015625, 0.021514892578125, -0.047882080078125, 0.06536865234375, -0.00738525390625, 0.044403076171875, 0.03729248046875, -0.0093994140625, 0.01953125, -0.0283050537109375, -0.0379638671875, -0.0025882720947265625, 0.062469482421875, 0.028411865234375, 0.0036296844482421875, 0.013885498046875, -0.00865936279296875, -0.02191162109375, 0.01461029052734375, -0.0643310546875, -0.0250091552734375, 0.02288818359375, -0.044097900390625, -0.01525115966796875, -0.004779815673828125, -0.049041748046875, -0.048309326171875, -0.0166778564453125, 0.03302001953125, -0.0325927734375, -0.041168212890625, 0.013031005859375, -0.022125244140625, 0.041229248046875, 0.062469482421875, -0.052276611328125, 0.01226043701171875, 0.049713134765625, 0.0465087890625, 0.0145721435546875, -0.0188751220703125, -0.02484130859375, 0.00798797607421875, -0.029754638671875, 0.038299560546875, -0.01218414306640625, -0.03924560546875, -0.01125335693359375, -0.0006971359252929688, 0.0069122314453125, -0.03814697265625, 0.01277923583984375, -0.038055419921875, 0.016265869140625, -0.01071929931640625, -0.047943115234375, -0.0167694091796875, 0.010986328125, -0.052459716796875, 0.06402587890625, 0.0213470458984375, -0.050750732421875, 0.0011997222900390625, -0.0455322265625, -0.00772857666015625, 0.0038356781005859375, 0.012115478515625, -0.03131103515625, -0.0032978057861328125, -0.0079498291015625, 0.0214996337890625, -0.038726806640625, -0.0034465789794921875, -0.046783447265625, -0.0167236328125, 0.02301025390625, -0.0460205078125, 0.07476806640625, 0.00980377197265625, -0.0242919921875, -0.011138916015625, -0.06982421875, -0.01303863525390625, 0.0279388427734375, -0.0271453857421875, 0.00336456298828125, -0.0176239013671875, -0.0015153884887695312, 0.004638671875, 0.036376953125, -0.01885986328125, 0.0280303955078125, -0.02178955078125, 0.009765625, 0.06256103515625, -0.00881195068359375, 0.03851318359375, -0.03900146484375, 0.03131103515625, -0.017578125, 0.039306640625, 0.01161956787109375, -0.0576171875, -0.06707763671875, -0.03472900390625, 0.01180267333984375, 0.03759765625, -0.04022216796875, 0.057098388671875, -0.00955963134765625, -0.0595703125, -0.046905517578125, 0.006130218505859375, 0.02130126953125, 0.040069580078125, 0.031463623046875, -0.0187835693359375, -0.031524658203125, -0.0655517578125, 0.015350341796875, -0.036346435546875, -0.01546478271484375, 0.0198211669921875, 0.0439453125, -0.0130157470703125, 0.054534912109375, -0.03125, -0.01139068603515625, -0.0100250244140625, -0.01515960693359375, 0.027130126953125, 0.052886962890625, 0.034393310546875, -0.05474853515625, -0.049041748046875, 0.0263824462890625, -0.050750732421875, -0.0122833251953125, 0.004180908203125, -0.035369873046875, 0.01922607421875, 0.0007572174072265625, -0.08734130859375, 0.0484619140625, 0.036224365234375, -0.047698974609375, 0.048095703125, -0.0229949951171875, 0.0157318115234375, -0.07098388671875, -0.00667572021484375, -0.002490997314453125, -0.0119171142578125, -0.0455322265625, -0.01442718505859375, -0.01561737060546875, -0.0023174285888671875, -0.037139892578125, 0.0552978515625, -0.052459716796875, 0.00887298583984375, -0.00928497314453125, -0.0077667236328125, 0.0191650390625, 0.0298004150390625, -0.0295257568359375, 0.039825439453125, 0.0474853515625, -0.0295562744140625, 0.04486083984375, 0.017242431640625, -0.005466461181640625, 0.030303955078125, -0.064453125, 0.002597808837890625, -0.0015716552734375, 0.03460693359375, -0.0743408203125, -0.0201263427734375, 0.047882080078125, -0.042327880859375, 0.06201171875, -0.0226593017578125, -0.0296783447265625, -0.03631591796875, -0.0404052734375, 0.016143798828125, 0.0703125, -0.036773681640625, 0.0523681640625, 0.0302581787109375, 0.01025390625, -0.055877685546875, -0.0487060546875, -0.014190673828125, -0.0205078125, -0.051239013671875, 0.0287322998046875, -0.022369384765625, -0.02374267578125, -0.0008802413940429688, -0.006931304931640625, -0.0026683807373046875, -0.00736236572265625, 0.0292205810546875, 0.046905517578125, -0.01641845703125, -0.01806640625, -0.0028362274169921875, 0.01447296142578125, -0.011962890625, -0.0206146240234375, 0.0484619140625, -0.0302581787109375, -0.03765869140625, -0.0498046875, 0.0171051025390625, 0.05767822265625, -0.005558013916015625, 0.065185546875, 0.034881591796875, -0.0262908935546875, -0.01389312744140625, -0.0592041015625, -0.0074462890625, -0.040374755859375, 0.01352691650390625, -0.01003265380859375, -0.05792236328125, 0.04541015625, 0.0283203125, 0.0255126953125, 0.04632568359375, 0.049346923828125, -0.01544189453125, 0.06439208984375, 0.048004150390625, 0.002147674560546875, 0.03289794921875, -0.048187255859375, 0.0080718994140625, -0.0543212890625, -0.0307769775390625, -0.03131103515625, -0.014068603515625, -0.0457763671875, -0.050933837890625, 0.0186920166015625, 0.0276336669921875, -0.060333251953125, 0.022918701171875, -0.04718017578125, 0.012176513671875, 0.0185699462890625, 0.010986328125, 0.013702392578125, -0.003154754638671875, 0.01001739501953125, 0.0193939208984375, -0.05584716796875, -0.0283203125, 0.0662841796875, 0.02764892578125, 0.058074951171875, 0.01531982421875, 0.05438232421875, 0.022735595703125, 0.03924560546875, -0.037200927734375, 0.038055419921875, 0.01020050048828125, -0.06005859375, -0.03619384765625, -0.018646240234375, -0.0755615234375, 0.03277587890625, -0.01326751708984375, -0.046905517578125, 0.0253143310546875, 0.0170135498046875, -0.017913818359375, 0.034759521484375, -0.0439453125, 0.05230712890625, -0.005748748779296875, -0.0263824462890625, -0.0172271728515625, -0.04571533203125, 0.03302001953125, -0.001476287841796875, 0.0197296142578125, -0.0229339599609375, -0.01273345947265625, 0.039093017578125, -0.07684326171875, 0.0872802734375, -0.00010734796524047852, -0.01739501953125, 0.045196533203125, 0.003818511962890625, 0.038482666015625, -0.002902984619140625, -0.01544189453125, 0.0264739990234375, -0.00789642333984375, -0.0257568359375, -0.0302581787109375, 0.046966552734375, -0.08135986328125, -0.054107666015625, -0.022247314453125, -0.037078857421875, 0.01436614990234375, 0.0157623291015625, 0.0163421630859375, 0.021148681640625, -0.00833892822265625, 0.018035888671875, 0.04400634765625, -0.013153076171875, 0.041351318359375, 0.032684326171875, -0.017913818359375, -0.0399169921875, 0.063232421875, 0.01410675048828125, 0.01044464111328125, 0.023529052734375, 0.01349639892578125, -0.021270751953125, -0.026824951171875, -0.042144775390625, 0.043853759765625, -0.0533447265625, -0.034454345703125, -0.0235443115234375, -0.0274200439453125, -0.045684814453125, -0.020355224609375, -0.0465087890625, -0.037200927734375, -0.05078125, 0.01715087890625, 0.05340576171875, 0.05828857421875, -0.01490020751953125, 0.015380859375, -0.051513671875, 0.0027065277099609375, 0.0257568359375, 0.00327301025390625, 0.018585205078125, -0.05999755859375, -0.0211639404296875, 0.007656097412109375, -0.031829833984375, -0.058807373046875, 0.0548095703125, 0.00630950927734375, 0.054290771484375, 0.0192718505859375, 0.00888824462890625, 0.058685302734375, -0.049530029296875, 0.070556640625, 0.040771484375, -0.054962158203125, 0.02301025390625, -0.0248565673828125, 0.014892578125, 0.0234527587890625, 0.03240966796875, -0.01308441162109375, -0.0364990234375, -0.052581787109375, -0.054168701171875, 0.0284576416015625, 0.0169219970703125, 0.006153106689453125, 0.0170440673828125, 0.03369140625, 0.002841949462890625, 0.0115509033203125, -0.054534912109375, -0.041168212890625, -0.02423095703125, -0.0009074211120605469, 0.01959228515625, 0.0063323974609375, -0.0201416015625, -0.044891357421875, 0.0830078125, -0.0097503662109375, 0.035736083984375, 0.014984130859375, 0.010223388671875, -0.0098724365234375, -0.0003185272216796875, 0.03564453125, 0.068603515625, -0.00372314453125, -0.01206207275390625, 0.01454925537109375, -0.0310211181640625, 0.005596160888671875, 0.0230865478515625, -0.006252288818359375, -0.01352691650390625, 0.03094482421875, 0.06341552734375, -0.0046844482421875, -0.03204345703125, 0.030426025390625, -0.0009336471557617188, -0.018157958984375, -0.021820068359375, 0.02093505859375, 0.030029296875, 0.047637939453125, 0.037200927734375, -0.0025501251220703125, 0.01082611083984375, -0.021270751953125, -0.00368499755859375, 0.04486083984375, -0.01123046875, -0.0257110595703125, 0.05816650390625, 0.0018625259399414062, -0.031982421875, 0.04705810546875, -0.0013885498046875, -0.031036376953125, 0.06646728515625, 0.049041748046875, 0.06671142578125, -0.00696563720703125, 0.00954437255859375, 0.01904296875, 0.0135955810546875, -0.00433349609375, 0.00286102294921875, -0.0018568038940429688, -0.038421630859375, -0.0271453857421875, -0.046478271484375, -0.035614013671875, 0.007091522216796875, -0.032684326171875, 0.042877197265625, -0.03668212890625, -0.01202392578125, -0.0057220458984375, 0.00846099853515625, -0.0472412109375, 0.0013027191162109375, 0.0311431884765625, 0.07275390625, -0.038238525390625, 0.0694580078125, 0.033233642578125, -0.03839111328125, -0.0689697265625, -0.0232391357421875, 0.012237548828125, -0.056427001953125, 0.0304718017578125, 0.00440216064453125, -0.003017425537109375, 0.0022144317626953125, -0.06280517578125, -0.07122802734375, 0.11627197265625, 0.0220184326171875, -0.046417236328125, -0.018585205078125, -0.00795745849609375, 0.035919189453125, -0.033599853515625, 0.029022216796875, 0.0234832763671875, 0.039825439453125, 0.0145111083984375, -0.08636474609375, 0.00603485107421875, -0.033843994140625, 0.00675201416015625, -0.0005755424499511719, -0.07965087890625, 0.07147216796875, -0.01044464111328125, 0.0010433197021484375, 0.01087188720703125, 0.060546875, 0.04339599609375, 0.0157012939453125, 0.039947509765625, 0.036346435546875, 0.06524658203125, 0.0091094970703125, 0.09039306640625, -0.0260009765625, 0.040008544921875, 0.059722900390625, -0.01525115966796875, 0.047943115234375, 0.0184173583984375, -0.03515625, 0.0279541015625, 0.06146240234375, -0.0153045654296875, 0.021240234375, 0.0216217041015625, -0.02581787109375, -0.00955963134765625, -0.0025634765625, -0.057586669921875, -0.001544952392578125, 0.01358795166015625, 0.0013217926025390625, -0.0025539398193359375, 0.000720977783203125, 0.003208160400390625, -0.038299560546875, -0.0286102294921875, 0.041168212890625, 0.0214996337890625, -0.026824951171875, 0.08367919921875, -0.022216796875, 0.06439208984375, -0.055877685546875, -0.0186614990234375, -0.029876708984375, 0.0129547119140625, -0.01202392578125, -0.04656982421875, 0.0024566650390625, -0.01312255859375, -0.0182647705078125, -0.005016326904296875, 0.063232421875, -0.0231781005859375, -0.04052734375, 0.041168212890625, 0.03240966796875, 0.024383544921875, 0.019256591796875, -0.06768798828125, 0.02313232421875, 0.0013494491577148438, -0.0487060546875, 0.0374755859375, 0.03680419921875, 0.00029659271240234375, 0.06158447265625, 0.04833984375, 0.00058746337890625, 0.0209197998046875, -0.0034332275390625, 0.0780029296875, -0.022308349609375, -0.0076904296875, -0.06646728515625, 0.05804443359375, -0.005985260009765625, -0.01544189453125, 0.0618896484375, 0.047882080078125, 0.054595947265625, -0.0128326416015625, 0.05364990234375, -0.01519775390625, 0.007770538330078125, -0.01161956787109375, 0.07647705078125, -0.07135009765625, 0.022064208984375, -0.0078582763671875, -0.060821533203125, -0.01629638671875, 0.044586181640625, 0.0163116455078125, 0.004791259765625, 0.019256591796875, 0.07666015625, 0.00531768798828125, -0.0134124755859375, 0.03411865234375, 0.026947021484375, 0.033355712890625, 0.0360107421875, 0.06304931640625, -0.041412353515625, 0.052490234375, -0.037109375, -0.0201873779296875, -0.027191162109375, -0.062042236328125, -0.0655517578125, -0.032684326171875, -0.035308837890625, -0.0562744140625, -0.0023136138916015625, 0.08062744140625, 0.055419921875, -0.050933837890625, -0.04058837890625, 0.004467010498046875, 0.0259246826171875, -0.012115478515625, -0.018890380859375, 0.0176849365234375, 0.007175445556640625, -0.05767822265625, 0.03240966796875, 0.01190948486328125, 0.03814697265625, -0.0262451171875, -0.0274200439453125, -0.0224609375, 0.0162506103515625, 0.049591064453125, 0.040008544921875, -0.049591064453125, -0.0090484619140625, 0.0024700164794921875, -0.011871337890625, 0.0135040283203125, 0.0262603759765625, -0.03924560546875, 0.00921630859375, 0.048095703125, 0.032196044921875, 0.051055908203125, -0.0131683349609375, 0.03472900390625, -0.014312744140625, 0.005870819091796875, 0.005588531494140625, 0.03778076171875, 0.0230560302734375, -0.038055419921875, 0.047088623046875, 0.023162841796875, -0.05767822265625, -0.07012939453125, -0.0251007080078125, -0.08233642578125, -0.02880859375, 0.0780029296875, 0.004642486572265625, -0.036956787109375, 0.0025577545166015625, -0.0213775634765625, 0.038848876953125, -0.038421630859375, 0.0279388427734375, 0.0435791015625, -0.0292205810546875, -0.01438140869140625, -0.050567626953125, 0.024383544921875, 0.0004024505615234375, -0.05474853515625, 0.0026073455810546875, 0.060546875, 0.0262908935546875, 0.0224456787109375, 0.05938720703125, -0.0021915435791015625, 0.0272979736328125, 0.01007080078125, 0.032501220703125, -0.01001739501953125, -0.0154266357421875, -0.029754638671875, 0.00769805908203125, -0.00498199462890625, -0.0054168701171875 ] ]
SkunkworksAI/Mistralic-7B-1
2023-10-03T22:04:49.000Z
[ "transformers", "pytorch", "endpoints_compatible", "has_space", "region:us" ]
null
SkunkworksAI
null
null
SkunkworksAI/Mistralic-7B-1
20
8,355
transformers
2023-09-29T23:45:09
<p><h1> 🦾 Mistralic-7B-1 🦾 </h1></p> Special thanks to Together Compute for sponsoring Skunkworks with compute! **INFERENCE** ``` import torch from transformers import AutoModelForCausalLM, AutoTokenizer torch.set_default_device('cuda') system_prompt = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n" system_no_input_prompt = "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n" def generate_prompt(instruction, input=None): if input: prompt = f"### System:\n{system_prompt}\n\n" else: prompt = f"### System:\n{system_no_input_prompt}\n\n" prompt += f"### Instruction:\n{instruction}\n\n" if input: prompt += f"### Input:\n{input}\n\n" return prompt + """### Response:\n""" device = "cuda" model = AutoModelForCausalLM.from_pretrained("SkunkworksAI/Mistralic-7B-1") tokenizer = AutoTokenizer.from_pretrained("SkunkworksAI/Mistralic-7B-1") while True: instruction = input("Enter Instruction: ") instruction = generate_prompt(instruction) inputs = tokenizer(instruction, return_tensors="pt", return_attention_mask=False) outputs = model.generate(**inputs, max_length=1000, do_sample=True, temperature=0.01, use_cache=True, eos_token_id=tokenizer.eos_token_id) text = tokenizer.batch_decode(outputs)[0] print(text) ``` **EVALUATION** ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64b7e345f92b20f7a38bf47a/ycpNhdGZHGbai_wslT2Bg.png) Average: 0.72157 For comparison: mistralai/Mistral-7B-v0.1 scores 0.7116 mistralai/Mistral-7B-Instruct-v0.1 scores 0.6794
1,749
[ [ -0.02069091796875, -0.0516357421875, 0.0295257568359375, 0.0372314453125, -0.0212554931640625, -0.0256805419921875, -0.01351165771484375, -0.006992340087890625, 0.007171630859375, 0.004467010498046875, -0.046356201171875, -0.033599853515625, -0.044647216796875, -0.0012712478637695312, -0.0220947265625, 0.0701904296875, 0.0029430389404296875, -0.00830841064453125, -0.01666259765625, 0.0059661865234375, -0.0288848876953125, -0.0296783447265625, -0.057098388671875, -0.0288543701171875, 0.01412200927734375, 0.0041961669921875, 0.030426025390625, 0.036865234375, 0.01355743408203125, 0.03411865234375, -0.01251220703125, 0.003673553466796875, -0.02142333984375, 0.01508331298828125, 0.007488250732421875, -0.036285400390625, -0.01335906982421875, 0.0013971328735351562, 0.036102294921875, 0.0178070068359375, -0.0020847320556640625, 0.03289794921875, -0.002796173095703125, 0.01203155517578125, -0.04803466796875, 0.0207977294921875, -0.028472900390625, -0.00865936279296875, -0.00135040283203125, -0.01140594482421875, -0.037017822265625, -0.01554107666015625, -0.0184173583984375, -0.057708740234375, 0.02972412109375, -0.0046234130859375, 0.08477783203125, 0.04388427734375, -0.01788330078125, -0.022216796875, -0.03240966796875, 0.0657958984375, -0.07269287109375, 0.00823974609375, 0.0179901123046875, 0.006740570068359375, -0.024627685546875, -0.0826416015625, -0.05621337890625, -0.0253753662109375, -0.01056671142578125, 0.025970458984375, -0.028564453125, 0.0176544189453125, 0.04339599609375, 0.0232696533203125, -0.032806396484375, -0.01277923583984375, -0.044525146484375, -0.0166015625, 0.035400390625, 0.03887939453125, 0.007537841796875, -0.0268402099609375, -0.03009033203125, -0.0484619140625, 0.00643157958984375, 0.0321044921875, 0.010772705078125, 0.00006210803985595703, -0.0242767333984375, 0.03875732421875, -0.0181732177734375, 0.04931640625, 0.03692626953125, -0.007511138916015625, 0.03912353515625, -0.0036468505859375, -0.05218505859375, -0.0095672607421875, 0.082763671875, 0.0214080810546875, -0.0095367431640625, 0.0039005279541015625, -0.01306915283203125, 0.01512908935546875, 0.011199951171875, -0.06854248046875, -0.034576416015625, 0.03302001953125, -0.032684326171875, -0.035400390625, 0.022705078125, -0.052825927734375, -0.01422882080078125, -0.01297760009765625, 0.053070068359375, -0.029052734375, -0.0253753662109375, -0.006839752197265625, -0.036102294921875, 0.0250244140625, 0.00887298583984375, -0.05523681640625, 0.0255889892578125, 0.03558349609375, 0.06109619140625, 0.0004987716674804688, -0.03521728515625, -0.02227783203125, -0.001186370849609375, -0.01331329345703125, 0.040435791015625, -0.01314544677734375, -0.034332275390625, -0.04156494140625, 0.0063323974609375, -0.020904541015625, -0.035003662109375, 0.028533935546875, -0.006809234619140625, 0.0251312255859375, -0.015167236328125, -0.03009033203125, -0.0219573974609375, 0.0012178421020507812, -0.033935546875, 0.11273193359375, 0.023590087890625, -0.07318115234375, 0.01049041748046875, -0.04327392578125, -0.01593017578125, -0.0172576904296875, -0.0254058837890625, -0.0295257568359375, 0.0140228271484375, 0.00899505615234375, 0.034576416015625, -0.01180267333984375, 0.01300811767578125, -0.03033447265625, -0.02880859375, 0.0248870849609375, -0.0517578125, 0.09326171875, 0.026031494140625, -0.06890869140625, 0.03662109375, -0.052276611328125, 0.0175933837890625, 0.00867462158203125, -0.011474609375, 0.0014190673828125, -0.03204345703125, 0.01430511474609375, 0.019317626953125, 0.03192138671875, -0.03509521484375, 0.0171661376953125, -0.02764892578125, 0.03021240234375, 0.067138671875, 0.0004856586456298828, 0.028594970703125, -0.01812744140625, 0.03729248046875, 0.020263671875, 0.0186920166015625, 0.0022106170654296875, -0.04058837890625, -0.0738525390625, -0.033477783203125, -0.0022106170654296875, 0.04876708984375, -0.0673828125, 0.03289794921875, 0.01450347900390625, -0.060150146484375, -0.032440185546875, -0.0195159912109375, 0.0250091552734375, 0.048828125, 0.0274505615234375, 0.01342010498046875, -0.03912353515625, -0.0557861328125, 0.003856658935546875, -0.0191497802734375, 0.000446319580078125, 0.007678985595703125, 0.03143310546875, -0.01187896728515625, 0.0596923828125, -0.049591064453125, 0.00955963134765625, -0.01168060302734375, 0.0216522216796875, 0.06304931640625, 0.053955078125, 0.047393798828125, -0.034027099609375, -0.0269775390625, -0.01235198974609375, -0.057952880859375, -0.024627685546875, -0.0263824462890625, -0.018829345703125, 0.00481414794921875, 0.0301361083984375, -0.06842041015625, 0.066650390625, 0.0206451416015625, -0.04290771484375, 0.062744140625, -0.0318603515625, 0.0212249755859375, -0.0916748046875, 0.020294189453125, -0.007549285888671875, -0.0004756450653076172, -0.0380859375, 0.0153961181640625, -0.003314971923828125, 0.00493621826171875, -0.0231475830078125, 0.055572509765625, -0.0323486328125, 0.00344085693359375, -0.0174560546875, -0.0271759033203125, -0.0049285888671875, 0.04290771484375, -0.004817962646484375, 0.05322265625, 0.06561279296875, -0.060577392578125, 0.04345703125, 0.004669189453125, 0.00603485107421875, 0.008087158203125, -0.06927490234375, 0.00498199462890625, 0.00849151611328125, 0.014801025390625, -0.0877685546875, -0.01140594482421875, 0.05059814453125, -0.0653076171875, 0.0174102783203125, -0.00591278076171875, -0.028594970703125, -0.030120849609375, -0.01361846923828125, 0.034393310546875, 0.041168212890625, -0.042999267578125, 0.05987548828125, -0.00507354736328125, 0.007732391357421875, -0.048980712890625, -0.042572021484375, -0.0208740234375, -0.0147247314453125, -0.032958984375, 0.003032684326171875, -0.0183563232421875, -0.0144195556640625, -0.01154327392578125, -0.019744873046875, -0.0166168212890625, 0.01174163818359375, 0.046600341796875, 0.0321044921875, -0.0079345703125, -0.0261077880859375, 0.01708984375, -0.015228271484375, 0.037353515625, -0.0163421630859375, 0.061859130859375, -0.01244354248046875, -0.021697998046875, -0.047576904296875, 0.005584716796875, 0.043792724609375, 0.001125335693359375, 0.04339599609375, 0.06927490234375, -0.038177490234375, -0.01096343994140625, -0.0291290283203125, -0.01328277587890625, -0.040863037109375, 0.01494598388671875, -0.034698486328125, -0.0233001708984375, 0.046630859375, 0.024383544921875, 0.020721435546875, 0.060791015625, 0.050994873046875, -0.00525665283203125, 0.0732421875, 0.0173187255859375, 0.00379180908203125, 0.030975341796875, -0.0555419921875, 0.0041656494140625, -0.035247802734375, -0.031707763671875, -0.042022705078125, -0.013153076171875, -0.03564453125, -0.025848388671875, 0.0221710205078125, 0.021270751953125, -0.0243377685546875, 0.03741455078125, -0.06622314453125, 0.00507354736328125, 0.060089111328125, 0.01904296875, -0.0189971923828125, 0.0026226043701171875, -0.029388427734375, 0.01483917236328125, -0.048248291015625, -0.021331787109375, 0.0552978515625, 0.0166168212890625, 0.061187744140625, -0.01137542724609375, 0.07366943359375, 0.005565643310546875, 0.02105712890625, -0.052520751953125, 0.036529541015625, 0.0185394287109375, -0.0433349609375, -0.0212554931640625, -0.04083251953125, -0.0589599609375, 0.0292510986328125, -0.0092926025390625, -0.05865478515625, 0.0175018310546875, 0.01126861572265625, -0.035888671875, 0.0221099853515625, -0.06292724609375, 0.063720703125, -0.004383087158203125, -0.0242156982421875, 0.0011472702026367188, -0.0250701904296875, 0.023284912109375, 0.029449462890625, -0.00872039794921875, 0.011322021484375, 0.00807952880859375, 0.07000732421875, -0.040496826171875, 0.055389404296875, -0.03302001953125, 0.010986328125, 0.032501220703125, -0.00714874267578125, 0.01244354248046875, 0.0205841064453125, -0.006771087646484375, 0.006084442138671875, 0.030853271484375, -0.03143310546875, -0.0382080078125, 0.0556640625, -0.06512451171875, -0.03729248046875, -0.0635986328125, -0.033843994140625, 0.021942138671875, 0.0191650390625, 0.05694580078125, 0.043792724609375, 0.00759124755859375, -0.01380157470703125, 0.042022705078125, -0.013275146484375, 0.054229736328125, -0.0011968612670898438, -0.005428314208984375, -0.053070068359375, 0.06158447265625, -0.015869140625, 0.004482269287109375, 0.001323699951171875, 0.0160369873046875, -0.038299560546875, -0.0078887939453125, -0.038848876953125, 0.01190185546875, -0.05596923828125, -0.0269012451171875, -0.0506591796875, -0.029327392578125, -0.053314208984375, -0.01003265380859375, -0.01280975341796875, -0.027374267578125, -0.048492431640625, 0.00464630126953125, 0.033843994140625, 0.045867919921875, -0.02374267578125, 0.034027099609375, -0.029998779296875, 0.0428466796875, 0.00995635986328125, 0.00083160400390625, 0.0203857421875, -0.055206298828125, -0.0006556510925292969, 0.0034351348876953125, -0.03717041015625, -0.06854248046875, 0.0281829833984375, -0.01036834716796875, 0.05621337890625, 0.04736328125, 0.01250457763671875, 0.0723876953125, -0.00955963134765625, 0.0552978515625, 0.031280517578125, -0.0806884765625, 0.02880859375, -0.0026569366455078125, 0.027862548828125, 0.0305633544921875, 0.0023860931396484375, -0.02667236328125, -0.03924560546875, -0.07330322265625, -0.07147216796875, 0.059112548828125, 0.0308380126953125, 0.0008769035339355469, -0.015960693359375, 0.018890380859375, 0.0013132095336914062, -0.0018863677978515625, -0.057098388671875, -0.05230712890625, -0.036376953125, -0.0302886962890625, 0.0078582763671875, 0.0014095306396484375, 0.0005564689636230469, -0.042022705078125, 0.054718017578125, 0.00921630859375, 0.054962158203125, 0.0269012451171875, -0.005054473876953125, 0.01163482666015625, 0.000469207763671875, 0.053314208984375, 0.04437255859375, -0.01544952392578125, 0.01074981689453125, 0.01806640625, -0.06219482421875, 0.0127105712890625, 0.01480865478515625, -0.01200103759765625, 0.0211639404296875, 0.02587890625, 0.0609130859375, -0.00701904296875, -0.007564544677734375, 0.03155517578125, -0.021453857421875, -0.0179443359375, -0.044708251953125, 0.02581787109375, 0.0026073455810546875, 0.021728515625, 0.04278564453125, 0.0185699462890625, 0.01200103759765625, -0.004772186279296875, 0.0154876708984375, 0.0202484130859375, -0.01129913330078125, -0.01216888427734375, 0.068115234375, -0.01154327392578125, -0.01256561279296875, 0.038360595703125, -0.02191162109375, -0.044525146484375, 0.07830810546875, 0.044647216796875, 0.07037353515625, -0.001842498779296875, 0.003688812255859375, 0.05712890625, 0.0255889892578125, 0.0023250579833984375, 0.041961669921875, -0.01090240478515625, -0.0212249755859375, -0.01367950439453125, -0.041046142578125, -0.012786865234375, 0.0187225341796875, -0.07025146484375, 0.018829345703125, -0.061248779296875, -0.020782470703125, 0.0025577545166015625, 0.00490570068359375, -0.05706787109375, 0.03582763671875, 0.003936767578125, 0.073486328125, -0.07025146484375, 0.050018310546875, 0.050018310546875, -0.037078857421875, -0.07763671875, -0.007045745849609375, -0.0172576904296875, -0.046600341796875, 0.046600341796875, 0.023406982421875, -0.0029964447021484375, 0.0114898681640625, -0.064697265625, -0.05682373046875, 0.0867919921875, 0.02142333984375, -0.009918212890625, 0.0040130615234375, -0.0136566162109375, 0.0229034423828125, 0.00013363361358642578, 0.0648193359375, 0.035797119140625, 0.035888671875, 0.002437591552734375, -0.051025390625, 0.023529052734375, -0.0271148681640625, -0.00862884521484375, 0.01268768310546875, -0.03045654296875, 0.086181640625, -0.0208892822265625, -0.023895263671875, 0.0290069580078125, 0.08746337890625, 0.050018310546875, 0.0465087890625, 0.039154052734375, 0.05712890625, 0.043670654296875, -0.0181732177734375, 0.059356689453125, -0.0227508544921875, 0.048187255859375, 0.06878662109375, 0.0243377685546875, 0.043182373046875, 0.0447998046875, -0.017059326171875, 0.0321044921875, 0.058807373046875, -0.01483154296875, 0.0307464599609375, 0.0255126953125, -0.01654052734375, -0.0104522705078125, 0.0308685302734375, -0.03729248046875, 0.026275634765625, 0.024688720703125, -0.016082763671875, -0.01507568359375, -0.010955810546875, 0.004726409912109375, -0.0283660888671875, -0.015228271484375, 0.03564453125, -0.004085540771484375, -0.046600341796875, 0.06927490234375, 0.0092926025390625, 0.06256103515625, -0.033599853515625, -0.022369384765625, -0.01239776611328125, 0.0127716064453125, -0.031219482421875, -0.040985107421875, 0.0004444122314453125, -0.0005655288696289062, -0.0229339599609375, 0.01409912109375, 0.0290985107421875, -0.0231170654296875, -0.051544189453125, 0.0030994415283203125, -0.0008502006530761719, 0.0384521484375, 0.0003070831298828125, -0.061676025390625, 0.00687408447265625, 0.006679534912109375, -0.0287322998046875, -0.0112762451171875, 0.034637451171875, 0.00997161865234375, 0.0399169921875, 0.055145263671875, -0.0030689239501953125, 0.03558349609375, -0.0120849609375, 0.07525634765625, -0.0498046875, -0.042999267578125, -0.058807373046875, 0.044525146484375, -0.0122833251953125, -0.061126708984375, 0.05841064453125, 0.050445556640625, 0.062164306640625, -0.0121002197265625, 0.052520751953125, -0.04010009765625, -0.0016412734985351562, -0.038726806640625, 0.05706787109375, -0.0186309814453125, -0.008026123046875, -0.0184173583984375, -0.057708740234375, 0.01885986328125, 0.07049560546875, -0.01541900634765625, 0.0206146240234375, 0.0562744140625, 0.06988525390625, -0.019256591796875, -0.016815185546875, 0.01416015625, 0.0184478759765625, 0.01214599609375, 0.042449951171875, 0.050201416015625, -0.053497314453125, 0.0276031494140625, -0.0555419921875, -0.035858154296875, 0.00754547119140625, -0.05029296875, -0.0751953125, -0.047332763671875, -0.0141448974609375, -0.05731201171875, -0.03021240234375, 0.08038330078125, 0.03997802734375, -0.07061767578125, -0.0195465087890625, -0.007770538330078125, -0.004253387451171875, -0.0316162109375, -0.0270233154296875, 0.035369873046875, -0.0287933349609375, -0.05072021484375, 0.01265716552734375, -0.0087432861328125, 0.004085540771484375, -0.0127105712890625, -0.0141143798828125, -0.004425048828125, -0.00237274169921875, 0.01371002197265625, 0.020263671875, -0.07012939453125, 0.0016183853149414062, 0.0093231201171875, -0.0254364013671875, -0.0028324127197265625, 0.04083251953125, -0.053009033203125, 0.02142333984375, 0.04400634765625, 0.0277252197265625, 0.05224609375, -0.01302337646484375, 0.02191162109375, -0.035919189453125, 0.0226593017578125, 0.004863739013671875, 0.0302886962890625, 0.009552001953125, -0.046051025390625, 0.01029205322265625, 0.0274200439453125, -0.0360107421875, -0.055267333984375, -0.0103912353515625, -0.08740234375, -0.01361083984375, 0.08868408203125, -0.0167388916015625, -0.046539306640625, 0.00836181640625, -0.0355224609375, 0.057891845703125, -0.035888671875, 0.058807373046875, 0.023468017578125, -0.01275634765625, -0.00609588623046875, -0.02777099609375, 0.0306854248046875, 0.0223388671875, -0.058685302734375, -0.003063201904296875, 0.02490234375, 0.03765869140625, 0.0164794921875, 0.04095458984375, 0.00884246826171875, 0.03778076171875, 0.0235137939453125, 0.0235595703125, -0.02703857421875, -0.0062103271484375, -0.039886474609375, -0.011322021484375, -0.007781982421875, -0.050262451171875 ] ]
TheBloke/WizardLM-13B-V1.2-GPTQ
2023-09-27T12:45:05.000Z
[ "transformers", "safetensors", "llama", "text-generation", "arxiv:2304.12244", "arxiv:2306.08568", "arxiv:2308.09583", "license:llama2", "has_space", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/WizardLM-13B-V1.2-GPTQ
30
8,354
transformers
2023-07-25T22:17:07
--- license: llama2 model_name: WizardLM 13B V1.2 base_model: WizardLM/WizardLM-13B-V1.2 inference: false model_creator: WizardLM model_type: llama prompt_template: 'A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user''s questions. USER: {prompt} ASSISTANT: ' quantized_by: TheBloke --- <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # WizardLM 13B V1.2 - GPTQ - Model creator: [WizardLM](https://huggingface.co/WizardLM) - Original model: [WizardLM 13B V1.2](https://huggingface.co/WizardLM/WizardLM-13B-V1.2) <!-- description start --> ## Description This repo contains GPTQ model files for [WizardLM's WizardLM 13B V1.2](https://huggingface.co/WizardLM/WizardLM-13B-V1.2). Multiple GPTQ parameter permutations are provided; see Provided Files below for details of the options provided, their parameters, and the software used to create them. <!-- description end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ) * [2, 3, 4, 5, 6 and 8-bit GGUF models for CPU+GPU inference](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GGUF) * [WizardLM's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/WizardLM/WizardLM-13B-V1.2) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: Vicuna ``` A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: {prompt} ASSISTANT: ``` <!-- prompt-template end --> <!-- README_GPTQ.md-provided-files start --> ## Provided files and GPTQ parameters Multiple quantisation parameters are provided, to allow you to choose the best one for your hardware and requirements. Each separate quant is in a different branch. See below for instructions on fetching from different branches. All recent GPTQ files are made with AutoGPTQ, and all files in non-main branches are made with AutoGPTQ. Files in the `main` branch which were uploaded before August 2023 were made with GPTQ-for-LLaMa. <details> <summary>Explanation of GPTQ parameters</summary> - Bits: The bit size of the quantised model. - GS: GPTQ group size. Higher numbers use less VRAM, but have lower quantisation accuracy. "None" is the lowest possible value. - Act Order: True or False. Also known as `desc_act`. True results in better quantisation accuracy. Some GPTQ clients have had issues with models that use Act Order plus Group Size, but this is generally resolved now. - Damp %: A GPTQ parameter that affects how samples are processed for quantisation. 0.01 is default, but 0.1 results in slightly better accuracy. - GPTQ dataset: The dataset used for quantisation. Using a dataset more appropriate to the model's training can improve quantisation accuracy. Note that the GPTQ dataset is not the same as the dataset used to train the model - please refer to the original model repo for details of the training dataset(s). - Sequence Length: The length of the dataset sequences used for quantisation. Ideally this is the same as the model sequence length. For some very long sequence models (16+K), a lower sequence length may have to be used. Note that a lower sequence length does not limit the sequence length of the quantised model. It only impacts the quantisation accuracy on longer inference sequences. - ExLlama Compatibility: Whether this file can be loaded with ExLlama, which currently only supports Llama models in 4-bit. </details> | Branch | Bits | GS | Act Order | Damp % | GPTQ Dataset | Seq Len | Size | ExLlama | Desc | | ------ | ---- | -- | --------- | ------ | ------------ | ------- | ---- | ------- | ---- | | [main](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/main) | 4 | 128 | No | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 7.26 GB | Yes | 4-bit, without Act Order and group size 128g. | | [gptq-4bit-32g-actorder_True](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-4bit-32g-actorder_True) | 4 | 32 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 8.00 GB | Yes | 4-bit, with Act Order and group size 32g. Gives highest possible inference quality, with maximum VRAM usage. | | [gptq-4bit-64g-actorder_True](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-4bit-64g-actorder_True) | 4 | 64 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 7.51 GB | Yes | 4-bit, with Act Order and group size 64g. Uses less VRAM than 32g, but with slightly lower accuracy. | | [gptq-4bit-128g-actorder_True](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-4bit-128g-actorder_True) | 4 | 128 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 7.26 GB | Yes | 4-bit, with Act Order and group size 128g. Uses even less VRAM than 64g, but with slightly lower accuracy. | | [gptq-8bit--1g-actorder_True](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-8bit--1g-actorder_True) | 8 | None | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 13.36 GB | No | 8-bit, with Act Order. No group size, to lower VRAM requirements. | | [gptq-8bit-128g-actorder_False](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-8bit-128g-actorder_False) | 8 | 128 | No | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 13.65 GB | No | 8-bit, with group size 128g for higher inference quality and without Act Order to improve AutoGPTQ speed. | | [gptq-8bit-128g-actorder_True](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-8bit-128g-actorder_True) | 8 | 128 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 13.65 GB | No | 8-bit, with group size 128g for higher inference quality and with Act Order for even higher accuracy. | | [gptq-8bit-64g-actorder_True](https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ/tree/gptq-8bit-64g-actorder_True) | 8 | 64 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 13.95 GB | No | 8-bit, with group size 64g and Act Order for even higher inference quality. Poor AutoGPTQ CUDA speed. | <!-- README_GPTQ.md-provided-files end --> <!-- README_GPTQ.md-download-from-branches start --> ## How to download from branches - In text-generation-webui, you can add `:branch` to the end of the download name, eg `TheBloke/WizardLM-13B-V1.2-GPTQ:main` - With Git, you can clone a branch with: ``` git clone --single-branch --branch main https://huggingface.co/TheBloke/WizardLM-13B-V1.2-GPTQ ``` - In Python Transformers code, the branch is the `revision` parameter; see below. <!-- README_GPTQ.md-download-from-branches end --> <!-- README_GPTQ.md-text-generation-webui start --> ## How to easily download and use this model in [text-generation-webui](https://github.com/oobabooga/text-generation-webui). Please make sure you're using the latest version of [text-generation-webui](https://github.com/oobabooga/text-generation-webui). It is strongly recommended to use the text-generation-webui one-click-installers unless you're sure you know how to make a manual install. 1. Click the **Model tab**. 2. Under **Download custom model or LoRA**, enter `TheBloke/WizardLM-13B-V1.2-GPTQ`. - To download from a specific branch, enter for example `TheBloke/WizardLM-13B-V1.2-GPTQ:main` - see Provided Files above for the list of branches for each option. 3. Click **Download**. 4. The model will start downloading. Once it's finished it will say "Done". 5. In the top left, click the refresh icon next to **Model**. 6. In the **Model** dropdown, choose the model you just downloaded: `WizardLM-13B-V1.2-GPTQ` 7. The model will automatically load, and is now ready for use! 8. If you want any custom settings, set them and then click **Save settings for this model** followed by **Reload the Model** in the top right. * Note that you do not need to and should not set manual GPTQ parameters any more. These are set automatically from the file `quantize_config.json`. 9. Once you're ready, click the **Text Generation tab** and enter a prompt to get started! <!-- README_GPTQ.md-text-generation-webui end --> <!-- README_GPTQ.md-use-from-python start --> ## How to use this GPTQ model from Python code ### Install the necessary packages Requires: Transformers 4.32.0 or later, Optimum 1.12.0 or later, and AutoGPTQ 0.4.2 or later. ```shell pip3 install transformers>=4.32.0 optimum>=1.12.0 pip3 install auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ # Use cu117 if on CUDA 11.7 ``` If you have problems installing AutoGPTQ using the pre-built wheels, install it from source instead: ```shell pip3 uninstall -y auto-gptq git clone https://github.com/PanQiWei/AutoGPTQ cd AutoGPTQ pip3 install . ``` ### For CodeLlama models only: you must use Transformers 4.33.0 or later. If 4.33.0 is not yet released when you read this, you will need to install Transformers from source: ```shell pip3 uninstall -y transformers pip3 install git+https://github.com/huggingface/transformers.git ``` ### You can then use the following code ```python from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline model_name_or_path = "TheBloke/WizardLM-13B-V1.2-GPTQ" # To use a different branch, change revision # For example: revision="main" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", trust_remote_code=False, revision="main") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True) prompt = "Tell me about AI" prompt_template=f'''A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: {prompt} ASSISTANT: ''' print("\n\n*** Generate:") input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512) print(tokenizer.decode(output[0])) # Inference can also be done using transformers' pipeline print("*** Pipeline:") pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1 ) print(pipe(prompt_template)[0]['generated_text']) ``` <!-- README_GPTQ.md-use-from-python end --> <!-- README_GPTQ.md-compatibility start --> ## Compatibility The files provided are tested to work with AutoGPTQ, both via Transformers and using AutoGPTQ directly. They should also work with [Occ4m's GPTQ-for-LLaMa fork](https://github.com/0cc4m/KoboldAI). [ExLlama](https://github.com/turboderp/exllama) is compatible with Llama models in 4-bit. Please see the Provided Files table above for per-file compatibility. [Huggingface Text Generation Inference (TGI)](https://github.com/huggingface/text-generation-inference) is compatible with all GPTQ models. <!-- README_GPTQ.md-compatibility end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Alicia Loh, Stephen Murray, K, Ajan Kanaga, RoA, Magnesian, Deo Leter, Olakabola, Eugene Pentland, zynix, Deep Realms, Raymond Fosdick, Elijah Stavena, Iucharbius, Erik Bjäreholt, Luis Javier Navarrete Lozano, Nicholas, theTransient, John Detwiler, alfie_i, knownsqashed, Mano Prime, Willem Michiel, Enrico Ros, LangChain4j, OG, Michael Dempsey, Pierre Kircher, Pedro Madruga, James Bentley, Thomas Belote, Luke @flexchar, Leonard Tan, Johann-Peter Hartmann, Illia Dulskyi, Fen Risland, Chadd, S_X, Jeff Scroggin, Ken Nordquist, Sean Connelly, Artur Olbinski, Swaroop Kallakuri, Jack West, Ai Maven, David Ziegler, Russ Johnson, transmissions 11, John Villwock, Alps Aficionado, Clay Pascal, Viktor Bowallius, Subspace Studios, Rainer Wilmers, Trenton Dambrowitz, vamX, Michael Levine, 준교 김, Brandon Frisco, Kalila, Trailburnt, Randy H, Talal Aujan, Nathan Dryer, Vadim, 阿明, ReadyPlayerEmma, Tiffany J. Kim, George Stoitzev, Spencer Kim, Jerry Meng, Gabriel Tamborski, Cory Kujawski, Jeffrey Morgan, Spiking Neurons AB, Edmond Seymore, Alexandros Triantafyllidis, Lone Striker, Cap'n Zoog, Nikolai Manek, danny, ya boyyy, Derek Yates, usrbinkat, Mandus, TL, Nathan LeClaire, subjectnull, Imad Khwaja, webtim, Raven Klaugh, Asp the Wyvern, Gabriel Puliatti, Caitlyn Gatomon, Joseph William Delisle, Jonathan Leane, Luke Pendergrass, SuperWojo, Sebastain Graf, Will Dee, Fred von Graf, Andrey, Dan Guido, Daniel P. Andersen, Nitin Borwankar, Elle, Vitor Caleffi, biorpg, jjj, NimbleBox.ai, Pieter, Matthew Berman, terasurfer, Michael Davis, Alex, Stanislav Ovsiannikov Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> # Original model card: WizardLM's WizardLM 13B V1.2 This is the **Full-Weight** of WizardLM-13B V1.2 model, this model is trained from **Llama-2 13b**. ## WizardLM: Empowering Large Pre-Trained Language Models to Follow Complex Instructions <p align="center"> 🤗 <a href="https://huggingface.co/WizardLM" target="_blank">HF Repo</a> •🐱 <a href="https://github.com/nlpxucan/WizardLM" target="_blank">Github Repo</a> • 🐦 <a href="https://twitter.com/WizardLM_AI" target="_blank">Twitter</a> • 📃 <a href="https://arxiv.org/abs/2304.12244" target="_blank">[WizardLM]</a> • 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> • 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a> <br> </p> <p align="center"> 👋 Join our <a href="https://discord.gg/VZjjHtWrKs" target="_blank">Discord</a> </p> ## News - 🔥🔥🔥[2023/08/26] We released **WizardCoder-Python-34B-V1.0** , which achieves the **73.2 pass@1** and surpasses **GPT4 (2023/03/15)**, **ChatGPT-3.5**, and **Claude2** on the [HumanEval Benchmarks](https://github.com/openai/human-eval). For more details, please refer to [WizardCoder](https://github.com/nlpxucan/WizardLM/tree/main/WizardCoder). - [2023/06/16] We released **WizardCoder-15B-V1.0** , which surpasses **Claude-Plus (+6.8)**, **Bard (+15.3)** and **InstructCodeT5+ (+22.3)** on the [HumanEval Benchmarks](https://github.com/openai/human-eval). For more details, please refer to [WizardCoder](https://github.com/nlpxucan/WizardLM/tree/main/WizardCoder). | Model | Checkpoint | Paper | HumanEval | MBPP | Demo | License | | ----- |------| ---- |------|-------| ----- | ----- | | WizardCoder-Python-34B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-Python-34B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 73.2 | 61.2 | [Demo](http://47.103.63.15:50085/) | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama2</a> | | WizardCoder-15B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-15B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 59.8 |50.6 | -- | <a href="https://huggingface.co/spaces/bigcode/bigcode-model-license-agreement" target="_blank">OpenRAIL-M</a> | | WizardCoder-Python-13B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-Python-13B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 64.0 | 55.6 | -- | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama2</a> | | WizardCoder-Python-7B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-Python-7B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 55.5 | 51.6 | [Demo](http://47.103.63.15:50088/) | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama2</a> | | WizardCoder-3B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-3B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 34.8 |37.4 | -- | <a href="https://huggingface.co/spaces/bigcode/bigcode-model-license-agreement" target="_blank">OpenRAIL-M</a> | | WizardCoder-1B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-1B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 23.8 |28.6 | -- | <a href="https://huggingface.co/spaces/bigcode/bigcode-model-license-agreement" target="_blank">OpenRAIL-M</a> | - 🔥 [08/11/2023] We release **WizardMath** Models. - 🔥 Our **WizardMath-70B-V1.0** model slightly outperforms some closed-source LLMs on the GSM8K, including **ChatGPT 3.5**, **Claude Instant 1** and **PaLM 2 540B**. - 🔥 Our **WizardMath-70B-V1.0** model achieves **81.6 pass@1** on the [GSM8k Benchmarks](https://github.com/openai/grade-school-math), which is **24.8** points higher than the SOTA open-source LLM. - 🔥 Our **WizardMath-70B-V1.0** model achieves **22.7 pass@1** on the [MATH Benchmarks](https://github.com/hendrycks/math), which is **9.2** points higher than the SOTA open-source LLM. | Model | Checkpoint | Paper | GSM8k | MATH |Online Demo| License| | ----- |------| ---- |------|-------| ----- | ----- | | WizardMath-70B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardMath-70B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a>| **81.6** | **22.7** |[Demo](http://47.103.63.15:50083/)| <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 </a> | | WizardMath-13B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardMath-13B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a>| **63.9** | **14.0** |[Demo](http://47.103.63.15:50082/)| <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 </a> | | WizardMath-7B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardMath-7B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a>| **54.9** | **10.7** | [Demo](http://47.103.63.15:50080/)| <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 </a>| <font size=4> | <sup>Model</sup> | <sup>Checkpoint</sup> | <sup>Paper</sup> |<sup>MT-Bench</sup> | <sup>AlpacaEval</sup> | <sup>WizardEval</sup> | <sup>HumanEval</sup> | <sup>License</sup>| | ----- |------| ---- |------|-------| ----- | ----- | ----- | | <sup>WizardLM-13B-V1.2</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-13B-V1.2" target="_blank">HF Link</a> </sup>| | <sup>7.06</sup> | <sup>89.17%</sup> | <sup>101.4% </sup>|<sup>36.6 pass@1</sup>|<sup> <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 License </a></sup> | | <sup>WizardLM-13B-V1.1</sup> |<sup> 🤗 <a href="https://huggingface.co/WizardLM/WizardLM-13B-V1.1" target="_blank">HF Link</a> </sup> | | <sup>6.76</sup> |<sup>86.32%</sup> | <sup>99.3% </sup> |<sup>25.0 pass@1</sup>| <sup>Non-commercial</sup>| | <sup>WizardLM-30B-V1.0</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-30B-V1.0" target="_blank">HF Link</a></sup> | | <sup>7.01</sup> | | <sup>97.8% </sup> | <sup>37.8 pass@1</sup>| <sup>Non-commercial</sup> | | <sup>WizardLM-13B-V1.0</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-13B-V1.0" target="_blank">HF Link</a> </sup> | | <sup>6.35</sup> | <sup>75.31%</sup> | <sup>89.1% </sup> |<sup> 24.0 pass@1 </sup> | <sup>Non-commercial</sup>| | <sup>WizardLM-7B-V1.0 </sup>| <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-7B-V1.0" target="_blank">HF Link</a> </sup> |<sup> 📃 <a href="https://arxiv.org/abs/2304.12244" target="_blank">[WizardLM]</a> </sup>| | | <sup>78.0% </sup> |<sup>19.1 pass@1 </sup>|<sup> Non-commercial</sup>| </font> **Repository**: https://github.com/nlpxucan/WizardLM **Twitter**: - 🔥🔥🔥 [7/25/2023] We released **WizardLM V1.2** models. The **WizardLM-13B-V1.2** is here ([Demo_13B-V1.2](https://b7a19878988c8c73.gradio.app), [Demo_13B-V1.2_bak-1](https://d0a37a76e0ac4b52.gradio.app/), [Full Model Weight](https://huggingface.co/WizardLM/WizardLM-13B-V1.2)). Please checkout the [paper](https://arxiv.org/abs/2304.12244). - 🔥🔥🔥 [7/25/2023] The **WizardLM-13B-V1.2** achieves **7.06** on [MT-Bench Leaderboard](https://chat.lmsys.org/?leaderboard), **89.17%** on [AlpacaEval Leaderboard](https://tatsu-lab.github.io/alpaca_eval/), and **101.4%** on [WizardLM Eval](https://github.com/nlpxucan/WizardLM/blob/main/WizardLM/data/WizardLM_testset.jsonl). (Note: MT-Bench and AlpacaEval are all self-test, will push update and request review. All tests are completed under their official settings.) ❗<b>Note for model system prompts usage:</b> <b>WizardLM</b> adopts the prompt format from <b>Vicuna</b> and supports **multi-turn** conversation. The prompt should be as following: ``` A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: Hi ASSISTANT: Hello.</s>USER: Who are you? ASSISTANT: I am WizardLM.</s>...... ``` ## Inference WizardLM Demo Script We provide the inference WizardLM demo code [here](https://github.com/nlpxucan/WizardLM/tree/main/demo). Please cite the paper if you use the data or code from WizardLM. ``` @article{xu2023wizardlm, title={Wizardlm: Empowering large language models to follow complex instructions}, author={Xu, Can and Sun, Qingfeng and Zheng, Kai and Geng, Xiubo and Zhao, Pu and Feng, Jiazhan and Tao, Chongyang and Jiang, Daxin}, journal={arXiv preprint arXiv:2304.12244}, year={2023} } ``` ❗<b>To commen concern about dataset:</b> Recently, there have been clear changes in the open-source policy and regulations of our overall organization's code, data, and models. Despite this, we have still worked hard to obtain opening the weights of the model first, but the data involves stricter auditing and is in review with our legal team . Our researchers have no authority to publicly release them without authorization. Thank you for your understanding.
25,156
[ [ -0.043121337890625, -0.05712890625, -0.0006093978881835938, 0.01568603515625, -0.00878143310546875, -0.00916290283203125, 0.00920867919921875, -0.0367431640625, 0.010406494140625, 0.0302581787109375, -0.043853759765625, -0.03192138671875, -0.024078369140625, -0.004608154296875, -0.02764892578125, 0.08123779296875, 0.00774383544921875, -0.0177001953125, -0.0025634765625, -0.0205078125, -0.0196380615234375, -0.037109375, -0.057281494140625, -0.01461029052734375, 0.0287017822265625, 0.01018524169921875, 0.06591796875, 0.034027099609375, 0.0109710693359375, 0.0243377685546875, 0.0032672882080078125, 0.0096893310546875, -0.040771484375, -0.0156097412109375, 0.01079559326171875, -0.008392333984375, -0.049102783203125, 0.01328277587890625, 0.033843994140625, 0.0215301513671875, -0.0279083251953125, 0.0176849365234375, 0.0042724609375, 0.059356689453125, -0.03350830078125, 0.0148468017578125, -0.0286407470703125, 0.00922393798828125, -0.0031890869140625, 0.011688232421875, -0.00867462158203125, -0.032318115234375, 0.010284423828125, -0.06329345703125, 0.0222930908203125, 0.01084136962890625, 0.08502197265625, 0.011993408203125, -0.050689697265625, 0.0117340087890625, -0.028594970703125, 0.042144775390625, -0.06964111328125, 0.02410888671875, 0.042999267578125, 0.0181884765625, -0.0237274169921875, -0.060760498046875, -0.051116943359375, -0.0031070709228515625, -0.0169830322265625, 0.0211639404296875, -0.027496337890625, 0.004364013671875, 0.03497314453125, 0.05633544921875, -0.07421875, -0.01461029052734375, -0.0231170654296875, -0.0188140869140625, 0.07421875, 0.0202789306640625, 0.0207366943359375, -0.01465606689453125, -0.01483917236328125, -0.03143310546875, -0.0400390625, 0.00852203369140625, 0.026275634765625, -0.004177093505859375, -0.03424072265625, 0.037811279296875, -0.0275726318359375, 0.043060302734375, 0.0208282470703125, -0.007198333740234375, 0.0293121337890625, -0.048919677734375, -0.036407470703125, -0.021942138671875, 0.09027099609375, 0.0341796875, -0.01052093505859375, 0.0112762451171875, -0.00012993812561035156, -0.0186614990234375, 0.0005397796630859375, -0.07421875, -0.046417236328125, 0.03192138671875, -0.0335693359375, -0.01172637939453125, -0.0006656646728515625, -0.05426025390625, -0.009490966796875, -0.00859832763671875, 0.03912353515625, -0.049468994140625, -0.031585693359375, 0.00704193115234375, -0.033721923828125, 0.046630859375, 0.03045654296875, -0.05462646484375, 0.03961181640625, 0.02423095703125, 0.045013427734375, -0.0035800933837890625, -0.015899658203125, -0.01297760009765625, 0.0045013427734375, -0.01611328125, 0.03240966796875, -0.00800323486328125, -0.034393310546875, -0.0313720703125, 0.0209808349609375, 0.00007843971252441406, -0.0209808349609375, 0.032745361328125, -0.023956298828125, 0.039398193359375, -0.03143310546875, -0.048736572265625, -0.028900146484375, 0.0033397674560546875, -0.050506591796875, 0.10015869140625, 0.036224365234375, -0.058837890625, 0.017547607421875, -0.03509521484375, -0.012908935546875, -0.00307464599609375, 0.001739501953125, -0.03900146484375, -0.0099639892578125, 0.0123443603515625, 0.0166015625, -0.025177001953125, 0.00713348388671875, -0.0200042724609375, -0.016754150390625, 0.0103302001953125, -0.051300048828125, 0.10101318359375, 0.0119476318359375, -0.032318115234375, -0.00861358642578125, -0.0601806640625, 0.0156707763671875, 0.033447265625, -0.0113372802734375, 0.0015077590942382812, -0.0167388916015625, 0.00493621826171875, 0.0154876708984375, 0.0206298828125, -0.0179595947265625, 0.036102294921875, -0.0199432373046875, 0.031982421875, 0.046051025390625, 0.005428314208984375, 0.0188140869140625, -0.0281524658203125, 0.033599853515625, 0.0017194747924804688, 0.049285888671875, 0.0149383544921875, -0.059234619140625, -0.048828125, -0.019683837890625, 0.0274505615234375, 0.05267333984375, -0.045867919921875, 0.049346923828125, -0.010406494140625, -0.060638427734375, -0.022735595703125, -0.0069122314453125, 0.0185394287109375, 0.032958984375, 0.038818359375, -0.03082275390625, -0.0182647705078125, -0.067626953125, 0.0015697479248046875, -0.034912109375, -0.005031585693359375, 0.03057861328125, 0.053558349609375, -0.0099639892578125, 0.065673828125, -0.050262451171875, -0.002655029296875, 0.0089263916015625, 0.004383087158203125, 0.02777099609375, 0.04296875, 0.057220458984375, -0.057586669921875, -0.050506591796875, -0.005950927734375, -0.045654296875, -0.004764556884765625, -0.0024356842041015625, -0.036712646484375, 0.01904296875, -0.005352020263671875, -0.08612060546875, 0.058624267578125, 0.028533935546875, -0.05694580078125, 0.0677490234375, -0.0211029052734375, 0.0186920166015625, -0.06903076171875, -0.0023899078369140625, 0.001750946044921875, -0.01708984375, -0.036895751953125, -0.00930023193359375, 0.0007848739624023438, 0.01448822021484375, -0.0270538330078125, 0.055633544921875, -0.04290771484375, 0.0027942657470703125, 0.0047454833984375, -0.01001739501953125, 0.0301361083984375, 0.043609619140625, -0.01439666748046875, 0.0626220703125, 0.041259765625, -0.0343017578125, 0.051666259765625, 0.034820556640625, 0.0008754730224609375, 0.02264404296875, -0.054107666015625, 0.005023956298828125, 0.013824462890625, 0.031402587890625, -0.071044921875, -0.01690673828125, 0.042999267578125, -0.04547119140625, 0.038604736328125, -0.0184173583984375, -0.03961181640625, -0.034088134765625, -0.04510498046875, 0.0208892822265625, 0.060760498046875, -0.023529052734375, 0.0328369140625, 0.028472900390625, 0.007762908935546875, -0.051300048828125, -0.050140380859375, -0.0129547119140625, -0.020904541015625, -0.048126220703125, 0.032989501953125, -0.01110076904296875, -0.001773834228515625, 0.005260467529296875, -0.01079559326171875, -0.00860595703125, -0.0089263916015625, 0.0177459716796875, 0.0275115966796875, -0.0115966796875, -0.01091766357421875, 0.00841522216796875, 0.006847381591796875, 0.0028400421142578125, -0.0229644775390625, 0.0237884521484375, -0.01439666748046875, -0.005401611328125, -0.0216217041015625, 0.0262603759765625, 0.03668212890625, 0.00597381591796875, 0.062164306640625, 0.06475830078125, -0.0236663818359375, 0.006717681884765625, -0.03973388671875, -0.00928497314453125, -0.038055419921875, 0.0137786865234375, -0.01557159423828125, -0.045166015625, 0.041717529296875, 0.03399658203125, 0.0149688720703125, 0.066650390625, 0.03424072265625, 0.001422882080078125, 0.076416015625, 0.018341064453125, -0.0114288330078125, 0.032958984375, -0.04461669921875, -0.00714874267578125, -0.05499267578125, -0.015899658203125, -0.023590087890625, -0.00887298583984375, -0.059844970703125, -0.04107666015625, 0.02301025390625, 0.0291595458984375, -0.06280517578125, 0.039154052734375, -0.0579833984375, 0.01224517822265625, 0.04827880859375, 0.0128936767578125, 0.0122528076171875, 0.005680084228515625, -0.00861358642578125, 0.01122283935546875, -0.047088623046875, -0.016998291015625, 0.08172607421875, 0.017852783203125, 0.04791259765625, 0.0173492431640625, 0.028076171875, 0.0180816650390625, 0.0143585205078125, -0.03387451171875, 0.0457763671875, 0.007465362548828125, -0.05718994140625, -0.035186767578125, -0.045166015625, -0.07275390625, 0.0258941650390625, -0.00499725341796875, -0.0601806640625, 0.0294189453125, 0.00201416015625, -0.018707275390625, 0.0189361572265625, -0.062103271484375, 0.082275390625, -0.01155853271484375, -0.0325927734375, -0.0037994384765625, -0.05548095703125, 0.0215606689453125, 0.01148223876953125, -0.0075531005859375, -0.0187225341796875, -0.0141448974609375, 0.056884765625, -0.0740966796875, 0.047576904296875, -0.0205230712890625, -0.0010309219360351562, 0.042327880859375, -0.01198577880859375, 0.047454833984375, 0.004940032958984375, -0.0024356842041015625, 0.02947998046875, 0.0281219482421875, -0.041900634765625, -0.036102294921875, 0.0401611328125, -0.071533203125, -0.042327880859375, -0.042510986328125, -0.0321044921875, -0.0085906982421875, 0.002895355224609375, 0.03631591796875, 0.0322265625, 0.0003197193145751953, -0.00148773193359375, 0.05194091796875, -0.026123046875, 0.03326416015625, 0.0292510986328125, -0.01947021484375, -0.038665771484375, 0.0643310546875, 0.01355743408203125, 0.01522064208984375, 0.00948333740234375, 0.005573272705078125, -0.03936767578125, -0.038299560546875, -0.049468994140625, 0.0222015380859375, -0.0438232421875, -0.0300445556640625, -0.04632568359375, -0.0307769775390625, -0.039215087890625, 0.0230712890625, -0.02642822265625, -0.0582275390625, -0.032806396484375, -0.0008111000061035156, 0.07537841796875, 0.0310516357421875, -0.0161285400390625, 0.0127716064453125, -0.0560302734375, 0.0188446044921875, 0.03204345703125, 0.01629638671875, 0.0010347366333007812, -0.05633544921875, -0.01198577880859375, 0.00821685791015625, -0.0479736328125, -0.07781982421875, 0.053955078125, 0.0182342529296875, 0.04425048828125, 0.0274200439453125, 0.0167999267578125, 0.0633544921875, -0.026824951171875, 0.0850830078125, 0.01275634765625, -0.059722900390625, 0.035308837890625, -0.0284423828125, 0.0162200927734375, 0.0323486328125, 0.041778564453125, -0.02197265625, -0.0267791748046875, -0.059356689453125, -0.064453125, 0.03564453125, 0.0305633544921875, -0.0012063980102539062, 0.007534027099609375, 0.0355224609375, 0.007511138916015625, 0.01081085205078125, -0.05389404296875, -0.059356689453125, -0.034820556640625, -0.010040283203125, 0.01166534423828125, 0.0009775161743164062, -0.018524169921875, -0.060394287109375, 0.0736083984375, -0.0106658935546875, 0.054534912109375, 0.0251922607421875, 0.009185791015625, 0.007503509521484375, -0.0005822181701660156, 0.0211639404296875, 0.04705810546875, -0.0149688720703125, -0.020111083984375, 0.018402099609375, -0.06500244140625, 0.01074981689453125, 0.0333251953125, -0.00714874267578125, -0.00913238525390625, 0.01025390625, 0.059661865234375, -0.0184173583984375, -0.0238037109375, 0.034820556640625, -0.025390625, -0.0233612060546875, -0.0265655517578125, 0.0223236083984375, 0.009918212890625, 0.027557373046875, 0.036285400390625, -0.017303466796875, 0.033843994140625, -0.03216552734375, 0.01435089111328125, 0.043182373046875, -0.009796142578125, -0.020294189453125, 0.0538330078125, -0.01039886474609375, 0.00688934326171875, 0.05426025390625, -0.02764892578125, -0.0302734375, 0.05975341796875, 0.02557373046875, 0.05499267578125, -0.01148223876953125, 0.015777587890625, 0.03875732421875, 0.0092010498046875, -0.0028514862060546875, 0.020599365234375, -0.006053924560546875, -0.0435791015625, -0.024688720703125, -0.04376220703125, -0.0223236083984375, 0.0156402587890625, -0.053558349609375, 0.0082244873046875, -0.0218353271484375, -0.02630615234375, -0.009246826171875, 0.038116455078125, -0.046295166015625, 0.0167694091796875, 0.0012788772583007812, 0.07745361328125, -0.0479736328125, 0.0712890625, 0.03057861328125, -0.0289764404296875, -0.073974609375, -0.01416778564453125, 0.007762908935546875, -0.0413818359375, 0.015411376953125, -0.0028858184814453125, 0.0239105224609375, 0.00115966796875, -0.0489501953125, -0.0677490234375, 0.10870361328125, 0.0182952880859375, -0.034698486328125, -0.0126953125, 0.001079559326171875, 0.024810791015625, -0.0022735595703125, 0.0533447265625, 0.047943115234375, 0.03411865234375, 0.0127105712890625, -0.07452392578125, 0.028594970703125, -0.035186767578125, -0.006977081298828125, 0.0128936767578125, -0.07220458984375, 0.07275390625, 0.0082550048828125, -0.006336212158203125, 0.004611968994140625, 0.051300048828125, 0.0291290283203125, 0.007434844970703125, 0.024810791015625, 0.05810546875, 0.0657958984375, -0.0225372314453125, 0.09210205078125, -0.014007568359375, 0.047210693359375, 0.060699462890625, 0.006378173828125, 0.041717529296875, 0.01432037353515625, -0.05706787109375, 0.035430908203125, 0.07879638671875, -0.01374053955078125, 0.034027099609375, 0.006473541259765625, -0.0244140625, -0.004810333251953125, 0.02581787109375, -0.058868408203125, -0.00287628173828125, 0.0296630859375, -0.00800323486328125, 0.0023365020751953125, -0.0110321044921875, -0.0010776519775390625, -0.054107666015625, -0.0189361572265625, 0.042694091796875, 0.0129547119140625, -0.02325439453125, 0.0694580078125, -0.01541900634765625, 0.048095703125, -0.042694091796875, -0.010894775390625, -0.0312347412109375, -0.00862884521484375, -0.022491455078125, -0.060699462890625, 0.0163116455078125, -0.021148681640625, -0.011932373046875, 0.004261016845703125, 0.050933837890625, -0.0168304443359375, -0.036285400390625, 0.0217132568359375, 0.034454345703125, 0.02764892578125, -0.00881195068359375, -0.08721923828125, 0.003444671630859375, -0.0015344619750976562, -0.061248779296875, 0.0357666015625, 0.043182373046875, 0.00743865966796875, 0.05023193359375, 0.039520263671875, -0.005916595458984375, 0.01432037353515625, -0.019561767578125, 0.06805419921875, -0.05999755859375, -0.01413726806640625, -0.058319091796875, 0.04144287109375, -0.0217132568359375, -0.0321044921875, 0.06817626953125, 0.0494384765625, 0.049163818359375, 0.005428314208984375, 0.049652099609375, -0.0293121337890625, 0.01480865478515625, -0.019195556640625, 0.05303955078125, -0.053131103515625, 0.004398345947265625, -0.0210113525390625, -0.054595947265625, -0.001628875732421875, 0.052154541015625, -0.01166534423828125, 0.0194091796875, 0.033050537109375, 0.06591796875, 0.003696441650390625, 0.01020050048828125, 0.0182037353515625, 0.02557373046875, 0.01074981689453125, 0.058074951171875, 0.049468994140625, -0.07232666015625, 0.04266357421875, -0.028167724609375, -0.01535797119140625, -0.00617218017578125, -0.0540771484375, -0.055877685546875, -0.035400390625, -0.050506591796875, -0.053741455078125, -0.0069122314453125, 0.06695556640625, 0.0616455078125, -0.054412841796875, -0.0160675048828125, -0.006206512451171875, 0.007114410400390625, -0.026611328125, -0.025970458984375, 0.036376953125, 0.0169677734375, -0.04962158203125, 0.010345458984375, 0.003292083740234375, 0.02325439453125, -0.0132598876953125, -0.0301361083984375, -0.01372528076171875, 0.0110321044921875, 0.04913330078125, 0.03900146484375, -0.0413818359375, -0.01081085205078125, -0.01000213623046875, -0.01140594482421875, 0.021148681640625, 0.0142059326171875, -0.05706787109375, -0.00015485286712646484, 0.037261962890625, 0.01432037353515625, 0.0699462890625, 0.0009851455688476562, 0.024169921875, -0.0287933349609375, 0.003597259521484375, 0.00872039794921875, 0.02099609375, 0.004344940185546875, -0.0439453125, 0.05059814453125, 0.03179931640625, -0.05535888671875, -0.056060791015625, -0.0166473388671875, -0.08721923828125, -0.0223236083984375, 0.0823974609375, -0.0086669921875, -0.0277557373046875, -0.01155853271484375, -0.018341064453125, 0.02691650390625, -0.036041259765625, 0.020111083984375, 0.041900634765625, -0.033477783203125, -0.0249176025390625, -0.0633544921875, 0.049530029296875, 0.01207733154296875, -0.059356689453125, 0.0032825469970703125, 0.0457763671875, 0.03497314453125, -0.00043654441833496094, 0.06439208984375, -0.01526641845703125, 0.02490234375, 0.0166778564453125, 0.0005164146423339844, -0.0009307861328125, 0.0135345458984375, -0.0235137939453125, 0.0009098052978515625, -0.0235443115234375, -0.0005383491516113281 ] ]
codeparrot/codeparrot
2022-06-24T08:28:28.000Z
[ "transformers", "pytorch", "tensorboard", "gpt2", "text-generation", "code", "generation", "dataset:codeparrot/codeparrot-clean-train", "model-index", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
codeparrot
null
null
codeparrot/codeparrot
88
8,353
transformers
2022-03-02T23:29:05
--- language: code tags: - code - gpt2 - generation datasets: - codeparrot/codeparrot-clean-train widget: - text: "from transformer import" example_title: "Transformers" - text: "def print_hello_world():\n\t" example_title: "Hello World!" - text: "def get_file_size(filepath):" example_title: "File size" - text: "import numpy as" example_title: "Numpy" model-index: - name: codeparrot results: - task: name: Code Generation type: code-generation dataset: name: "HumanEval" type: openai_humaneval metrics: - name: pass@1 type: code_eval value: 3.99 - name: pass@10 type: code_eval value: 8.69 - name: pass@100 type: code_eval value: 17.88 --- # CodeParrot 🦜 CodeParrot 🦜 is a GPT-2 model (1.5B parameters) trained to generate Python code. After the initial training and release of v1.0 we trained the model some more and released v1.1 (see below for details). ## Usage You can load the CodeParrot model and tokenizer directly in `transformers`: ```Python from transformers import AutoTokenizer, AutoModelWithLMHead tokenizer = AutoTokenizer.from_pretrained("codeparrot/codeparrot") model = AutoModelWithLMHead.from_pretrained("codeparrot/codeparrot") inputs = tokenizer("def hello_world():", return_tensors="pt") outputs = model(**inputs) ``` or with a `pipeline`: ```Python from transformers import pipeline pipe = pipeline("text-generation", model="codeparrot/codeparrot") outputs = pipe("def hello_world():") ``` ## Training The model was trained on the cleaned [CodeParrot 🦜 dataset](https://huggingface.co/datasets/codeparrot/codeparrot-clean) in two steps. After the initial training (v1.0) the model was trained for another 30k steps resulting in v1.1 and you find the settings in the following table: |Config| v1.0| v1.1| |------|------------------|--------------------| |Batch size| 512 | 512 | |Context size| 1024 | 1024 | |Training steps| 50'000| 30'000 |Gradient accumulation| 16| 16 | |Gradient checkpointing| True| True | |Learning rate| 2e-4 | 5e-5 | |Weight decay | 0.1 | 0.1 | |Warmup steps| 750 | 750 | |Schedule| Cosine | Cosine | The training was executed on 16 x A100 (40GB) GPUs. This setting amounts to roughly 26 + 15 billion tokens. ## Performance We evaluated the model on OpenAI's [HumanEval](https://huggingface.co/datasets/openai_humaneval) benchmark which consists of programming challenges: | Metric | v1.0 | v1.1 | |--------|-----|-----| |pass@1 | 3.58% | 3.99% | |pass@10 | 8.03% | 8.69% | |pass@100 | 14.96% | 17.88% | The [pass@k metric](https://huggingface.co/metrics/code_eval) tells the probability that at least one out of k generations passes the tests. ## Resources - Dataset: [full](https://huggingface.co/datasets/codeparrot/codeparrot-clean), [train](https://huggingface.co/datasets/codeparrot/codeparrot-clean-train), [valid](https://huggingface.co/datasets/codeparrot/codeparrot-clean-valid) - Code: [repository](https://github.com/huggingface/transformers/tree/master/examples/research_projects/codeparrot) - Spaces: [generation](), [highlighting]()
3,141
[ [ -0.0430908203125, -0.0343017578125, -0.005710601806640625, 0.0034503936767578125, -0.0304718017578125, 0.00925445556640625, -0.01244354248046875, -0.02496337890625, -0.007106781005859375, 0.014617919921875, -0.043060302734375, -0.0221099853515625, -0.046295166015625, -0.00011336803436279297, -0.006427764892578125, 0.11676025390625, -0.007091522216796875, -0.0027256011962890625, -0.01297760009765625, -0.00843048095703125, -0.03375244140625, -0.034210205078125, -0.0537109375, -0.03057861328125, 0.0133819580078125, 0.0245361328125, 0.0582275390625, 0.046112060546875, 0.037261962890625, 0.0162353515625, -0.0140838623046875, -0.0179290771484375, -0.0361328125, -0.01049041748046875, 0.005985260009765625, -0.03662109375, -0.034210205078125, 0.00897979736328125, 0.031829833984375, 0.0224456787109375, 0.00598907470703125, 0.031768798828125, 0.006397247314453125, 0.043975830078125, -0.0374755859375, 0.0267791748046875, -0.029998779296875, 0.02020263671875, -0.008575439453125, -0.0059814453125, -0.0053253173828125, -0.020050048828125, -0.0030670166015625, -0.049652099609375, 0.033172607421875, -0.0035552978515625, 0.08453369140625, 0.0322265625, -0.0211639404296875, -0.01058197021484375, -0.04010009765625, 0.059356689453125, -0.0458984375, 0.011962890625, 0.02008056640625, 0.02691650390625, 0.0011377334594726562, -0.080078125, -0.03167724609375, -0.00812530517578125, -0.01180267333984375, 0.002994537353515625, -0.0269927978515625, 0.0008401870727539062, 0.0535888671875, 0.049041748046875, -0.06536865234375, -0.0020427703857421875, -0.047698974609375, -0.031524658203125, 0.0445556640625, 0.020660400390625, 0.017486572265625, -0.0273284912109375, -0.037109375, -0.032470703125, -0.038848876953125, 0.0199432373046875, 0.03155517578125, 0.003284454345703125, -0.031829833984375, 0.0374755859375, -0.030792236328125, 0.042724609375, -0.0120697021484375, -0.0066070556640625, 0.03717041015625, -0.04266357421875, -0.02435302734375, -0.00650787353515625, 0.07421875, 0.0135040283203125, 0.039276123046875, 0.00615692138671875, -0.02239990234375, 0.01494598388671875, 0.0188446044921875, -0.10009765625, -0.041473388671875, 0.0264129638671875, -0.0322265625, -0.044189453125, 0.019195556640625, -0.0650634765625, 0.010345458984375, -0.012481689453125, 0.0232696533203125, -0.04241943359375, -0.03564453125, -0.005889892578125, -0.027313232421875, 0.03790283203125, 0.00235748291015625, -0.055328369140625, 0.0230865478515625, 0.05810546875, 0.0703125, -0.003116607666015625, -0.025054931640625, -0.036773681640625, -0.027130126953125, -0.017822265625, 0.0400390625, -0.02056884765625, -0.02264404296875, -0.007266998291015625, 0.00466156005859375, -0.00769805908203125, -0.036376953125, 0.0312347412109375, -0.032562255859375, 0.017181396484375, -0.004337310791015625, -0.038848876953125, -0.01027679443359375, 0.00018918514251708984, -0.0296478271484375, 0.08984375, 0.03369140625, -0.06878662109375, 0.047943115234375, -0.053558349609375, -0.02294921875, 0.00786590576171875, -0.01367950439453125, -0.0556640625, 0.00402069091796875, 0.009002685546875, 0.0277862548828125, -0.006591796875, 0.0193023681640625, -0.016571044921875, -0.037628173828125, 0.005889892578125, -0.0251007080078125, 0.07598876953125, 0.048675537109375, -0.0401611328125, 0.01041412353515625, -0.0538330078125, 0.023651123046875, 0.0257720947265625, -0.01120758056640625, 0.005123138427734375, -0.020355224609375, 0.01332855224609375, 0.0255279541015625, 0.01082611083984375, -0.0272674560546875, 0.037353515625, -0.03009033203125, 0.055145263671875, 0.042236328125, 0.0103912353515625, 0.0179595947265625, -0.02960205078125, 0.06005859375, 0.0123443603515625, 0.030975341796875, -0.0225982666015625, -0.064453125, -0.052337646484375, -0.0212249755859375, 0.022552490234375, 0.0309906005859375, -0.052734375, 0.034912109375, -0.029388427734375, -0.046356201171875, -0.0304718017578125, -0.0030841827392578125, 0.048675537109375, 0.01861572265625, 0.0210418701171875, -0.0279998779296875, -0.044891357421875, -0.059112548828125, -0.009246826171875, -0.0218353271484375, 0.0007433891296386719, -0.00611114501953125, 0.079833984375, -0.00835418701171875, 0.066162109375, -0.05206298828125, -0.0055999755859375, -0.02178955078125, 0.0152587890625, 0.035247802734375, 0.0592041015625, 0.04791259765625, -0.050201416015625, -0.027618408203125, -0.0076446533203125, -0.04461669921875, 0.0128173828125, 0.0022029876708984375, -0.003414154052734375, 0.021820068359375, 0.02886962890625, -0.037689208984375, 0.045867919921875, 0.041168212890625, -0.03973388671875, 0.05413818359375, -0.0152435302734375, 0.01053619384765625, -0.0836181640625, 0.0311431884765625, -0.0036945343017578125, -0.01416015625, -0.034759521484375, 0.03326416015625, -0.0033779144287109375, -0.024688720703125, -0.036865234375, 0.0340576171875, -0.023040771484375, -0.0075836181640625, -0.01203155517578125, -0.0209197998046875, 0.002521514892578125, 0.053497314453125, -0.012359619140625, 0.08612060546875, 0.036712646484375, -0.0394287109375, 0.01319122314453125, 0.0220489501953125, -0.0251007080078125, 0.01477813720703125, -0.072998046875, 0.0205841064453125, 0.01204681396484375, 0.00977325439453125, -0.07916259765625, -0.0273590087890625, 0.028411865234375, -0.049072265625, 0.028167724609375, -0.03961181640625, -0.045623779296875, -0.042572021484375, -0.0218963623046875, 0.0458984375, 0.07171630859375, -0.03271484375, 0.01062774658203125, 0.0120697021484375, 0.0018634796142578125, -0.0302276611328125, -0.05108642578125, -0.012786865234375, -0.0079193115234375, -0.042266845703125, 0.016387939453125, -0.012420654296875, 0.009124755859375, -0.0103912353515625, -0.01523590087890625, -0.021820068359375, 0.0013103485107421875, 0.03369140625, 0.0256805419921875, -0.00847625732421875, -0.018951416015625, 0.0032100677490234375, -0.0275421142578125, 0.00791168212890625, -0.02069091796875, 0.06060791015625, -0.0230255126953125, -0.0137481689453125, -0.0333251953125, -0.007762908935546875, 0.0452880859375, -0.02520751953125, 0.0450439453125, 0.060333251953125, -0.023284912109375, -0.0005640983581542969, -0.022430419921875, -0.0025920867919921875, -0.03399658203125, 0.03826904296875, -0.040435791015625, -0.05206298828125, 0.0478515625, 0.01366424560546875, 0.0018472671508789062, 0.047210693359375, 0.036407470703125, 0.0225372314453125, 0.07232666015625, 0.0212249755859375, -0.0241851806640625, 0.031768798828125, -0.06378173828125, 0.0034732818603515625, -0.05548095703125, -0.0228118896484375, -0.046234130859375, -0.0065460205078125, -0.06317138671875, -0.03228759765625, 0.02593994140625, 0.027130126953125, -0.05389404296875, 0.04705810546875, -0.0679931640625, 0.03125, 0.049530029296875, 0.004093170166015625, 0.01287841796875, 0.00626373291015625, -0.015899658203125, -0.00004404783248901367, -0.0648193359375, -0.02496337890625, 0.08880615234375, 0.032745361328125, 0.036407470703125, -0.02191162109375, 0.0509033203125, 0.004894256591796875, 0.0180816650390625, -0.04241943359375, 0.0272064208984375, 0.005817413330078125, -0.050048828125, 0.007099151611328125, -0.046539306640625, -0.0592041015625, 0.007274627685546875, 0.0020236968994140625, -0.0443115234375, -0.0016431808471679688, 0.01165008544921875, -0.013427734375, 0.0333251953125, -0.05029296875, 0.08709716796875, 0.0036182403564453125, -0.01739501953125, -0.005771636962890625, -0.048431396484375, 0.028106689453125, 0.00036263465881347656, 0.00431060791015625, 0.00504302978515625, 0.0005664825439453125, 0.06109619140625, -0.044921875, 0.0426025390625, -0.0307464599609375, 0.008148193359375, 0.03057861328125, 0.0008697509765625, 0.038360595703125, 0.01052093505859375, 0.002933502197265625, 0.036865234375, 0.015289306640625, -0.03594970703125, -0.0215911865234375, 0.043853759765625, -0.0718994140625, -0.0157318115234375, -0.05670166015625, -0.0275421142578125, 0.01470947265625, 0.02252197265625, 0.051177978515625, 0.042266845703125, 0.006313323974609375, 0.00569915771484375, 0.038177490234375, -0.03924560546875, 0.036773681640625, 0.0137176513671875, -0.031829833984375, -0.061004638671875, 0.068359375, -0.004711151123046875, 0.027496337890625, -0.0001494884490966797, 0.004398345947265625, -0.019744873046875, -0.02685546875, -0.04510498046875, 0.004993438720703125, -0.041229248046875, -0.037353515625, -0.038909912109375, -0.035491943359375, -0.037750244140625, 0.00395965576171875, -0.03546142578125, -0.026702880859375, -0.006313323974609375, 0.005702972412109375, 0.039215087890625, 0.0440673828125, 0.0037364959716796875, 0.0106658935546875, -0.04620361328125, 0.0228424072265625, 0.004520416259765625, 0.04766845703125, -0.0085906982421875, -0.057464599609375, -0.0175323486328125, 0.010162353515625, -0.032440185546875, -0.046356201171875, 0.0187530517578125, 0.0121002197265625, 0.02545166015625, 0.0302734375, -0.0017080307006835938, 0.055328369140625, -0.023193359375, 0.0714111328125, 0.0122528076171875, -0.06463623046875, 0.05364990234375, -0.0269317626953125, 0.02947998046875, 0.047454833984375, 0.0245819091796875, -0.0283203125, -0.032562255859375, -0.057342529296875, -0.052490234375, 0.059173583984375, 0.044647216796875, -0.0001823902130126953, 0.007350921630859375, 0.0394287109375, -0.005268096923828125, 0.0130157470703125, -0.052764892578125, -0.031005859375, -0.037933349609375, -0.0016069412231445312, -0.006443023681640625, -0.006549835205078125, 0.0055084228515625, -0.051025390625, 0.049652099609375, -0.006378173828125, 0.0404052734375, 0.016998291015625, 0.0009183883666992188, -0.0125732421875, 0.0006546974182128906, 0.04046630859375, 0.04986572265625, -0.01605224609375, -0.0172271728515625, -0.004802703857421875, -0.050048828125, 0.01053619384765625, 0.013519287109375, -0.0115966796875, -0.006084442138671875, 0.033905029296875, 0.06353759765625, 0.004734039306640625, -0.0343017578125, 0.052520751953125, 0.003353118896484375, -0.0278778076171875, -0.0308074951171875, 0.0330810546875, -0.012786865234375, 0.0029621124267578125, 0.016204833984375, 0.022613525390625, 0.0005712509155273438, -0.023040771484375, 0.023956298828125, 0.0171356201171875, -0.028350830078125, -0.0269927978515625, 0.08197021484375, -0.0037708282470703125, -0.00923919677734375, 0.06707763671875, -0.034820556640625, -0.05010986328125, 0.06817626953125, 0.027679443359375, 0.081787109375, 0.0011281967163085938, 0.016143798828125, 0.058380126953125, 0.0205078125, 0.001850128173828125, 0.0237274169921875, -0.004840850830078125, -0.0311431884765625, -0.03814697265625, -0.059051513671875, -0.014373779296875, 0.00943756103515625, -0.065673828125, 0.023651123046875, -0.031341552734375, -0.00887298583984375, -0.0132904052734375, 0.0287322998046875, -0.07220458984375, 0.0189208984375, -0.0010738372802734375, 0.08233642578125, -0.06353759765625, 0.06829833984375, 0.06121826171875, -0.062469482421875, -0.0648193359375, -0.004489898681640625, -0.0124664306640625, -0.068359375, 0.04339599609375, 0.02294921875, 0.0345458984375, 0.01134490966796875, -0.045135498046875, -0.05352783203125, 0.0882568359375, 0.020782470703125, -0.03289794921875, 0.01433563232421875, 0.0298614501953125, 0.0450439453125, -0.007354736328125, 0.03631591796875, 0.042144775390625, 0.02740478515625, -0.0020904541015625, -0.0653076171875, 0.01702880859375, -0.039337158203125, 0.0241851806640625, 0.0232696533203125, -0.05780029296875, 0.07098388671875, -0.0199127197265625, -0.0035839080810546875, 0.0161895751953125, 0.04296875, 0.03802490234375, 0.01904296875, 0.03765869140625, 0.05987548828125, 0.025665283203125, -0.024139404296875, 0.0975341796875, -0.05255126953125, 0.06475830078125, 0.059326171875, 0.01293182373046875, 0.0367431640625, 0.025634765625, -0.039031982421875, 0.037200927734375, 0.05572509765625, -0.0286102294921875, 0.033203125, 0.0247650146484375, -0.00341033935546875, 0.003673553466796875, 0.0136566162109375, -0.044891357421875, 0.0179595947265625, -0.0094757080078125, -0.0157623291015625, -0.01235198974609375, 0.00885009765625, 0.01558685302734375, -0.0302276611328125, -0.01114654541015625, 0.052764892578125, -0.003925323486328125, -0.0614013671875, 0.06085205078125, -0.003849029541015625, 0.04388427734375, -0.050018310546875, -0.0168304443359375, -0.03057861328125, 0.0152130126953125, -0.0192108154296875, -0.04925537109375, 0.023529052734375, 0.0123138427734375, -0.01117706298828125, -0.006282806396484375, 0.04827880859375, -0.004608154296875, -0.0345458984375, 0.01103973388671875, 0.014251708984375, 0.0234375, -0.014801025390625, -0.06219482421875, 0.0180816650390625, 0.01268768310546875, -0.03192138671875, 0.03057861328125, 0.0158538818359375, 0.0202484130859375, 0.04443359375, 0.044647216796875, 0.0022182464599609375, 0.007709503173828125, -0.0010328292846679688, 0.074951171875, -0.04412841796875, -0.035064697265625, -0.055023193359375, 0.04791259765625, -0.004238128662109375, -0.059722900390625, 0.046051025390625, 0.07171630859375, 0.08428955078125, -0.00963592529296875, 0.0572509765625, -0.03338623046875, 0.004611968994140625, -0.028900146484375, 0.050384521484375, -0.038299560546875, -0.001003265380859375, -0.024932861328125, -0.07080078125, 0.0038242340087890625, 0.041412353515625, -0.00809478759765625, 0.0213623046875, 0.045562744140625, 0.06329345703125, -0.011016845703125, 0.00276947021484375, 0.005290985107421875, 0.0062103271484375, 0.031890869140625, 0.046417236328125, 0.037506103515625, -0.0703125, 0.05535888671875, -0.050079345703125, -0.0211334228515625, 0.0024051666259765625, -0.043426513671875, -0.05670166015625, -0.033782958984375, -0.0394287109375, -0.049591064453125, -0.005115509033203125, 0.0693359375, 0.054168701171875, -0.07080078125, -0.00672149658203125, -0.04046630859375, -0.016998291015625, -0.02679443359375, -0.0258026123046875, 0.0272064208984375, -0.04925537109375, -0.05621337890625, -0.00418853759765625, -0.002422332763671875, -0.0026378631591796875, -0.01486968994140625, -0.019195556640625, 0.01546478271484375, -0.0261383056640625, 0.01861572265625, 0.01332855224609375, -0.034820556640625, -0.027008056640625, -0.0192718505859375, -0.01064300537109375, 0.019866943359375, 0.061676025390625, -0.056121826171875, 0.0309906005859375, 0.03948974609375, 0.0284423828125, 0.0509033203125, -0.00878143310546875, 0.0291595458984375, -0.055145263671875, 0.025634765625, 0.009429931640625, 0.031494140625, -0.002288818359375, -0.016082763671875, 0.055572509765625, 0.033050537109375, -0.056640625, -0.06121826171875, 0.005771636962890625, -0.0635986328125, -0.0184478759765625, 0.087890625, -0.0173797607421875, -0.00971221923828125, 0.004657745361328125, -0.0112762451171875, 0.03436279296875, -0.019989013671875, 0.03143310546875, 0.027618408203125, 0.0011014938354492188, 0.008697509765625, -0.057525634765625, 0.048370361328125, 0.021942138671875, -0.052734375, -0.00775146484375, 0.017333984375, 0.05010986328125, 0.00445556640625, 0.04705810546875, -0.023956298828125, 0.01971435546875, 0.019866943359375, 0.018310546875, -0.026458740234375, -0.01483154296875, -0.038665771484375, 0.01180267333984375, -0.01169586181640625, -0.02557373046875 ] ]
ehartford/dolphin-2.0-mistral-7b
2023-10-03T16:25:19.000Z
[ "transformers", "pytorch", "mistral", "text-generation", "en", "dataset:ehartford/dolphin", "dataset:jondurbin/airoboros-2.2.1", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
ehartford
null
null
ehartford/dolphin-2.0-mistral-7b
97
8,339
transformers
2023-10-02T21:56:22
--- license: apache-2.0 datasets: - ehartford/dolphin - jondurbin/airoboros-2.2.1 language: - en --- Dolphin 2.0 🐬 https://erichartford.com/dolphin Dolphin-2.0-mistral-7b's training was sponsored by [a16z](https://a16z.com/supporting-the-open-source-ai-community/). This model is based on mistralAI, so it is suitable for commercial or non-commercial use. This model is uncensored. I have filtered the dataset to remove alignment and bias. This makes the model more compliant. You are advised to implement your own alignment layer before exposing the model as a service. It will be highly compliant to any requests, even unethical ones. Please read my blog post about uncensored models. https://erichartford.com/uncensored-models You are responsible for any content you create using this model. Enjoy responsibly. ## Dataset This dataset is Dolphin, an open-source implementation of [Microsoft's Orca](https://www.microsoft.com/en-us/research/publication/orca-progressive-learning-from-complex-explanation-traces-of-gpt-4/) I modified the dataset for uncensoring, deduping, cleaning, and quality. I added Jon Durbin's excellent Airoboros dataset to increase creativity. ## Training It took 48 hours to train 10 epochs on 4x A100s. Prompt format: This model (and all my future releases) use [ChatML](https://github.com/openai/openai-python/blob/main/chatml.md) prompt format. ``` <|im_start|>system You are Dolphin, a helpful AI assistant.<|im_end|> <|im_start|>user {prompt}<|im_end|> ``` Example: ``` <|im_start|>system you are an expert dolphin trainer<|im_end|> <|im_start|>user What is the best way to train a dolphin to obey me? Please answer step by step.<|im_end|> ``` ## Gratitude - This model was made possible by the generous sponsorship of a16z. - Thank you to Microsoft for authoring the Orca paper and inspiring this work. - Special thanks to WingLian, and TheBloke for helpful advice - Thank you to all the other people in the Open Source AI community who have taught me and helped me along the way. ## Example Output ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63111b2d88942700629f5771/xnz5M1lYd4oGVATSDRkQ-.png) [Buy me a coffee](https://www.buymeacoffee.com/ehartford)
2,233
[ [ -0.0287628173828125, -0.034210205078125, 0.004756927490234375, 0.0210418701171875, -0.020263671875, -0.0396728515625, 0.006984710693359375, -0.06439208984375, 0.0014629364013671875, 0.038482666015625, -0.054168701171875, -0.000621795654296875, -0.051422119140625, -0.0042266845703125, 0.013214111328125, 0.07049560546875, -0.01094818115234375, 0.0218963623046875, 0.01317596435546875, -0.025299072265625, -0.03265380859375, -0.0212860107421875, -0.099365234375, -0.0330810546875, 0.0308685302734375, 0.009429931640625, 0.042449951171875, 0.05926513671875, 0.002758026123046875, 0.0189208984375, -0.00835418701171875, 0.0328369140625, -0.051422119140625, 0.0024242401123046875, -0.027587890625, -0.0123291015625, -0.039581298828125, 0.0216522216796875, -0.0010099411010742188, 0.0288543701171875, -0.0185699462890625, 0.0235137939453125, 0.0062103271484375, 0.04931640625, -0.0372314453125, 0.0498046875, -0.025421142578125, 0.0020618438720703125, -0.00028228759765625, 0.0101165771484375, -0.0175933837890625, -0.021209716796875, -0.001201629638671875, -0.09149169921875, -0.011962890625, -0.005794525146484375, 0.08721923828125, 0.0254364013671875, -0.0309600830078125, -0.0294952392578125, -0.034820556640625, 0.0482177734375, -0.044342041015625, 0.0333251953125, 0.040283203125, 0.038330078125, -0.027679443359375, -0.052001953125, -0.03350830078125, -0.01806640625, 0.032684326171875, 0.03582763671875, -0.00997161865234375, -0.0004837512969970703, 0.0204010009765625, 0.0158843994140625, -0.038482666015625, 0.0205230712890625, -0.033843994140625, -0.0095977783203125, 0.045501708984375, -0.0043182373046875, 0.02178955078125, 0.034881591796875, -0.032318115234375, -0.02630615234375, -0.046966552734375, 0.02825927734375, 0.0386962890625, 0.029937744140625, -0.03155517578125, 0.060302734375, -0.003143310546875, 0.03857421875, 0.0035648345947265625, -0.008880615234375, 0.0094146728515625, 0.005161285400390625, -0.0113983154296875, 0.016754150390625, 0.0584716796875, 0.022125244140625, 0.026885986328125, 0.01274871826171875, -0.019927978515625, -0.01424407958984375, 0.02606201171875, -0.04864501953125, -0.0123748779296875, 0.021484375, -0.048828125, -0.0301513671875, -0.006992340087890625, -0.032379150390625, -0.0308837890625, -0.026092529296875, 0.0411376953125, -0.04608154296875, -0.039581298828125, 0.0163116455078125, -0.024261474609375, 0.020477294921875, 0.036895751953125, -0.083740234375, 0.036712646484375, 0.040679931640625, 0.06182861328125, 0.024749755859375, -0.0214080810546875, -0.0284271240234375, 0.029815673828125, -0.032012939453125, 0.0223541259765625, 0.0017423629760742188, -0.0233917236328125, 0.0020427703857421875, -0.00046944618225097656, -0.005519866943359375, -0.032196044921875, 0.0119171142578125, -0.05242919921875, 0.0253448486328125, -0.01502227783203125, -0.05181884765625, -0.029144287109375, 0.00025391578674316406, -0.05096435546875, 0.054229736328125, 0.01403045654296875, -0.053802490234375, 0.0097198486328125, -0.0926513671875, -0.01300048828125, -0.0181427001953125, -0.006610870361328125, -0.03466796875, -0.0147552490234375, 0.013702392578125, -0.001880645751953125, -0.006404876708984375, 0.0047454833984375, -0.050445556640625, -0.01335906982421875, 0.01739501953125, -0.01507568359375, 0.1068115234375, 0.038665771484375, -0.0543212890625, -0.00331878662109375, -0.04486083984375, 0.00113677978515625, 0.024444580078125, -0.0279693603515625, -0.01483917236328125, -0.0092620849609375, 0.00212860107421875, 0.0121917724609375, 0.0293426513671875, -0.0506591796875, 0.0247039794921875, -0.0183563232421875, 0.03173828125, 0.0616455078125, -0.0014181137084960938, 0.0066986083984375, -0.038818359375, 0.0209808349609375, -0.0007348060607910156, 0.039581298828125, 0.0035457611083984375, -0.051361083984375, -0.044464111328125, -0.0173492431640625, -0.00004506111145019531, 0.0200347900390625, -0.05535888671875, 0.034149169921875, -0.0095367431640625, -0.036895751953125, -0.058990478515625, -0.00310516357421875, 0.039306640625, 0.05010986328125, 0.035980224609375, -0.02679443359375, -0.038330078125, -0.053741455078125, 0.0006494522094726562, -0.0204925537109375, -0.00640106201171875, 0.046234130859375, 0.031158447265625, -0.0047760009765625, 0.06243896484375, -0.0200347900390625, -0.037353515625, -0.01165771484375, 0.0035877227783203125, 0.0134429931640625, 0.06475830078125, 0.0557861328125, -0.05792236328125, -0.0207672119140625, 0.001331329345703125, -0.0772705078125, 0.004993438720703125, 0.01018524169921875, -0.007781982421875, 0.0330810546875, 0.031951904296875, -0.03338623046875, 0.04046630859375, 0.02484130859375, -0.01404571533203125, 0.03125, -0.032196044921875, -0.0045318603515625, -0.07122802734375, 0.032379150390625, -0.0010633468627929688, -0.01522064208984375, -0.0247802734375, 0.00777435302734375, -0.0132904052734375, -0.018310546875, -0.032989501953125, 0.05224609375, -0.0262298583984375, -0.0083770751953125, -0.01934814453125, -0.01067352294921875, -0.01953125, 0.03436279296875, 0.0272979736328125, 0.0584716796875, 0.04840087890625, -0.054168701171875, 0.01233673095703125, 0.0350341796875, -0.02288818359375, 0.0103912353515625, -0.07916259765625, 0.0243072509765625, -0.002201080322265625, 0.0256805419921875, -0.06719970703125, -0.015777587890625, 0.050140380859375, -0.046630859375, 0.0223236083984375, -0.0224151611328125, -0.017120361328125, -0.00873565673828125, -0.0030384063720703125, 0.0177154541015625, 0.043701171875, -0.07110595703125, 0.0247650146484375, 0.03900146484375, 0.00879669189453125, -0.052459716796875, -0.06219482421875, -0.01450347900390625, -0.00592041015625, -0.029144287109375, 0.0159759521484375, 0.0013256072998046875, 0.003894805908203125, -0.01220703125, -0.00661468505859375, -0.02288818359375, 0.008392333984375, 0.040802001953125, 0.01049041748046875, -0.0163116455078125, -0.0277557373046875, 0.01511383056640625, -0.0181732177734375, 0.0039215087890625, -0.04736328125, 0.03179931640625, -0.01070404052734375, -0.01995849609375, -0.0701904296875, 0.0021076202392578125, 0.045440673828125, -0.00844573974609375, 0.046722412109375, 0.045074462890625, -0.01873779296875, -0.0037994384765625, -0.0239410400390625, -0.0220794677734375, -0.040283203125, 0.00858306884765625, -0.0229339599609375, -0.050872802734375, 0.026580810546875, 0.0129852294921875, 0.023223876953125, 0.01800537109375, 0.0290069580078125, -0.01360321044921875, 0.08160400390625, 0.05059814453125, -0.0257720947265625, 0.03582763671875, -0.040679931640625, 0.000629425048828125, -0.05340576171875, -0.046356201171875, -0.03326416015625, -0.01654052734375, -0.034637451171875, 0.007122039794921875, 0.022430419921875, 0.0009512901306152344, -0.0606689453125, 0.0226593017578125, -0.032958984375, 0.0134735107421875, 0.037872314453125, 0.0247802734375, -0.00263214111328125, 0.013427734375, -0.006931304931640625, 0.0013799667358398438, -0.049957275390625, -0.045501708984375, 0.0687255859375, 0.044586181640625, 0.0692138671875, 0.034210205078125, 0.0411376953125, 0.0290679931640625, 0.01548004150390625, -0.03460693359375, 0.03009033203125, -0.00469970703125, -0.055511474609375, -0.03753662109375, -0.031036376953125, -0.095458984375, 0.005374908447265625, 0.00998687744140625, -0.06207275390625, 0.01202392578125, 0.0124664306640625, -0.005290985107421875, 0.0290374755859375, -0.0396728515625, 0.07696533203125, -0.02239990234375, -0.01910400390625, -0.00925445556640625, -0.052337646484375, 0.02557373046875, 0.0174713134765625, 0.004852294921875, -0.01409149169921875, -0.02252197265625, 0.04644775390625, -0.0665283203125, 0.088623046875, -0.04241943359375, -0.006809234619140625, 0.02581787109375, 0.0009784698486328125, 0.007472991943359375, 0.010467529296875, -0.009002685546875, 0.011993408203125, -0.003032684326171875, -0.045867919921875, -0.018524169921875, 0.0400390625, -0.10821533203125, -0.0115203857421875, -0.03057861328125, -0.02056884765625, -0.0015544891357421875, 0.03582763671875, 0.036468505859375, 0.0273895263671875, -0.0041046142578125, -0.02069091796875, 0.036834716796875, -0.0048370361328125, 0.0289154052734375, 0.058013916015625, -0.020294189453125, -0.04986572265625, 0.06182861328125, 0.000743865966796875, -0.0006155967712402344, 0.01082611083984375, 0.0125579833984375, -0.057098388671875, -0.0307159423828125, -0.046630859375, 0.01806640625, -0.0357666015625, 0.003803253173828125, -0.0274505615234375, -0.010040283203125, -0.058746337890625, 0.00911712646484375, -0.037139892578125, -0.034210205078125, -0.046478271484375, 0.01346588134765625, 0.0345458984375, 0.06781005859375, -0.020965576171875, 0.0235748291015625, -0.0380859375, 0.00928497314453125, 0.025177001953125, 0.0159149169921875, 0.017120361328125, -0.053314208984375, 0.00728607177734375, 0.0394287109375, -0.039581298828125, -0.046051025390625, 0.02166748046875, 0.0207061767578125, 0.0543212890625, 0.04840087890625, 0.009185791015625, 0.0595703125, -0.01384735107421875, 0.06488037109375, 0.0276031494140625, -0.060791015625, 0.04864501953125, -0.030517578125, -0.0006012916564941406, 0.0399169921875, 0.05889892578125, -0.0283355712890625, -0.034576416015625, -0.06048583984375, -0.046173095703125, 0.07440185546875, 0.0198516845703125, 0.01316070556640625, 0.00811004638671875, 0.059722900390625, 0.0172271728515625, 0.0086669921875, -0.06317138671875, -0.0210113525390625, -0.04925537109375, -0.0005030632019042969, 0.0019512176513671875, 0.0089569091796875, -0.0005106925964355469, -0.017242431640625, 0.088623046875, -0.01041412353515625, 0.025543212890625, 0.0259552001953125, 0.00798797607421875, -0.004970550537109375, -0.035552978515625, 0.044158935546875, 0.037750244140625, -0.01543426513671875, -0.006561279296875, -0.018829345703125, -0.046661376953125, 0.01092529296875, 0.000812530517578125, -0.0156402587890625, -0.005809783935546875, 0.0181427001953125, 0.05792236328125, -0.0174560546875, -0.042877197265625, 0.04302978515625, -0.01157379150390625, -0.010711669921875, -0.0306854248046875, 0.0233154296875, -0.0004532337188720703, 0.02490234375, -0.003902435302734375, 0.0033397674560546875, 0.0006213188171386719, -0.029998779296875, -0.0014247894287109375, 0.00853729248046875, -0.04693603515625, -0.02020263671875, 0.0831298828125, -0.00600433349609375, -0.0104217529296875, 0.0748291015625, -0.00946807861328125, -0.01462554931640625, 0.06402587890625, 0.05035400390625, 0.051422119140625, -0.0177764892578125, 0.0164794921875, 0.03607177734375, 0.006755828857421875, -0.008575439453125, 0.04901123046875, -0.002086639404296875, -0.05352783203125, -0.0013437271118164062, -0.01325225830078125, -0.043060302734375, 0.04302978515625, -0.0697021484375, 0.017730712890625, -0.050262451171875, -0.0186004638671875, 0.021697998046875, -0.0007905960083007812, -0.01953125, 0.00844573974609375, 0.004695892333984375, 0.09161376953125, -0.0550537109375, 0.08306884765625, 0.07025146484375, -0.07037353515625, -0.06646728515625, -0.027435302734375, 0.0013532638549804688, -0.07611083984375, 0.042144775390625, 0.01385498046875, -0.0037689208984375, -0.00339508056640625, -0.07293701171875, -0.0518798828125, 0.0777587890625, 0.0330810546875, -0.01345062255859375, 0.0195159912109375, 0.0104827880859375, 0.041656494140625, -0.035430908203125, 0.0225830078125, 0.01358795166015625, 0.02752685546875, 0.0012998580932617188, -0.06829833984375, 0.004940032958984375, -0.029449462890625, 0.0163726806640625, -0.024444580078125, -0.07135009765625, 0.053741455078125, -0.0191497802734375, 0.018951416015625, 0.0396728515625, 0.0562744140625, 0.031280517578125, 0.025787353515625, 0.0443115234375, 0.04205322265625, 0.056060791015625, 0.00984954833984375, 0.072265625, -0.0301513671875, 0.0308074951171875, 0.08966064453125, -0.006916046142578125, 0.049407958984375, 0.0258636474609375, 0.00041103363037109375, 0.0200347900390625, 0.07757568359375, -0.0157318115234375, 0.058013916015625, 0.007083892822265625, -0.0028972625732421875, -0.00005990266799926758, -0.0178375244140625, -0.03704833984375, 0.009613037109375, 0.018341064453125, -0.0241241455078125, -0.0234375, 0.01161956787109375, 0.007678985595703125, -0.016571044921875, -0.02142333984375, 0.07318115234375, 0.00574493408203125, -0.053985595703125, 0.07843017578125, -0.004688262939453125, 0.0220489501953125, -0.060882568359375, -0.003631591796875, -0.0211944580078125, 0.02020263671875, -0.0055389404296875, -0.04827880859375, -0.0013227462768554688, 0.001735687255859375, -0.004451751708984375, -0.0214691162109375, 0.036468505859375, -0.0221405029296875, 0.01355743408203125, 0.0296478271484375, 0.0174407958984375, 0.05126953125, 0.00988006591796875, -0.0775146484375, 0.00738525390625, 0.01316070556640625, -0.0021038055419921875, 0.0290679931640625, 0.019683837890625, -0.0084686279296875, 0.06585693359375, 0.047027587890625, -0.00018298625946044922, -0.0043182373046875, 0.007305145263671875, 0.06683349609375, -0.010711669921875, -0.050445556640625, -0.032501220703125, 0.0220489501953125, -0.00024509429931640625, -0.0521240234375, 0.035675048828125, 0.06683349609375, 0.07293701171875, -0.0269622802734375, 0.066162109375, -0.00774383544921875, 0.014892578125, -0.0316162109375, 0.0777587890625, -0.0579833984375, 0.0034027099609375, 0.0085601806640625, -0.049713134765625, 0.0130615234375, 0.0606689453125, -0.00728607177734375, 0.0136260986328125, 0.0031299591064453125, 0.057525634765625, -0.0247802734375, 0.0278167724609375, 0.007228851318359375, 0.01617431640625, 0.043914794921875, 0.030975341796875, 0.059356689453125, -0.0294189453125, 0.035491943359375, -0.045135498046875, -0.031219482421875, -0.01480865478515625, -0.060150146484375, -0.060455322265625, -0.0360107421875, -0.03387451171875, -0.044891357421875, 0.0231170654296875, 0.0772705078125, 0.06719970703125, -0.059906005859375, -0.046051025390625, -0.0217742919921875, -0.013214111328125, -0.042327880859375, -0.0108489990234375, 0.0308837890625, -0.0345458984375, -0.053741455078125, 0.0247802734375, -0.005420684814453125, 0.053314208984375, -0.0020751953125, -0.0234832763671875, -0.006855010986328125, -0.0045013427734375, 0.012969970703125, 0.058135986328125, -0.0406494140625, -0.01238250732421875, -0.02020263671875, -0.0277099609375, -0.0003819465637207031, 0.0268096923828125, -0.02435302734375, 0.03790283203125, 0.005489349365234375, 0.02777099609375, 0.0611572265625, -0.008636474609375, 0.03173828125, -0.0213470458984375, 0.0203094482421875, 0.00952911376953125, 0.030517578125, -0.0016803741455078125, -0.02593994140625, 0.05224609375, 0.0122222900390625, -0.041961669921875, -0.036407470703125, -0.004150390625, -0.08636474609375, -0.0219573974609375, 0.0716552734375, -0.044708251953125, -0.040863037109375, -0.0145721435546875, -0.033203125, 0.0305328369140625, -0.045745849609375, 0.07586669921875, 0.0093536376953125, -0.030487060546875, 0.0177764892578125, -0.036895751953125, 0.042327880859375, 0.0094757080078125, -0.044586181640625, -0.0013742446899414062, 0.02777099609375, 0.038330078125, -0.0014200210571289062, 0.0506591796875, -0.0198516845703125, 0.0255889892578125, 0.00543975830078125, 0.01535797119140625, -0.04541015625, -0.00395965576171875, -0.035125732421875, -0.008270263671875, -0.002346038818359375, -0.0557861328125 ] ]
baichuan-inc/Baichuan2-13B-Base
2023-10-07T04:29:23.000Z
[ "transformers", "pytorch", "baichuan", "text-generation", "custom_code", "en", "zh", "license:other", "has_space", "region:us" ]
text-generation
baichuan-inc
null
null
baichuan-inc/Baichuan2-13B-Base
61
8,338
transformers
2023-08-30T10:11:24
--- language: - en - zh license: other tasks: - text-generation --- <!-- markdownlint-disable first-line-h1 --> <!-- markdownlint-disable html --> <div align="center"> <h1> Baichuan 2 </h1> </div> <div align="center"> <a href="https://github.com/baichuan-inc/Baichuan2" target="_blank">🦉GitHub</a> | <a href="https://github.com/baichuan-inc/Baichuan-7B/blob/main/media/wechat.jpeg?raw=true" target="_blank">💬WeChat</a> </div> <div align="center"> 🚀 <a href="https://www.baichuan-ai.com/" target="_blank">百川大模型在线对话平台</a> 已正式向公众开放 🎉 </div> # 目录/Table of Contents - [📖 模型介绍/Introduction](#Introduction) - [⚙️ 快速开始/Quick Start](#Start) - [📊 Benchmark评估/Benchmark Evaluation](#Benchmark) - [📜 声明与协议/Terms and Conditions](#Terms) # <span id="Introduction">模型介绍/Introduction</span> Baichuan 2 是[百川智能]推出的新一代开源大语言模型,采用 **2.6 万亿** Tokens 的高质量语料训练,在权威的中文和英文 benchmark 上均取得同尺寸最好的效果。本次发布包含有 7B、13B 的 Base 和 Chat 版本,并提供了 Chat 版本的 4bits 量化,所有版本不仅对学术研究完全开放,开发者也仅需[邮件申请]并获得官方商用许可后,即可以免费商用。具体发布版本和下载见下表: Baichuan 2 is the new generation of large-scale open-source language models launched by [Baichuan Intelligence inc.](https://www.baichuan-ai.com/). It is trained on a high-quality corpus with 2.6 trillion tokens and has achieved the best performance in authoritative Chinese and English benchmarks of the same size. This release includes 7B and 13B versions for both Base and Chat models, along with a 4bits quantized version for the Chat model. All versions are fully open to academic research, and developers can also use them for free in commercial applications after obtaining an official commercial license through [email request](mailto:opensource@baichuan-inc.com). The specific release versions and download links are listed in the table below: | | Base Model | Chat Model | 4bits Quantized Chat Model | |:---:|:--------------------:|:--------------------:|:--------------------------:| | 7B | [Baichuan2-7B-Base](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base) | [Baichuan2-7B-Chat](https://huggingface.co/baichuan-inc/Baichuan2-7B-Chat) | [Baichuan2-7B-Chat-4bits](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base-4bits) | | 13B | [Baichuan2-13B-Base](https://huggingface.co/baichuan-inc/Baichuan2-13B-Base) | [Baichuan2-13B-Chat](https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat) | [Baichuan2-13B-Chat-4bits](https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat-4bits) | # <span id="Start">快速开始/Quick Start</span> 在Baichuan2系列模型中,我们为了加快推理速度使用了Pytorch2.0加入的新功能F.scaled_dot_product_attention,因此模型需要在Pytorch2.0环境下运行。 In the Baichuan 2 series models, we have utilized the new feature `F.scaled_dot_product_attention` introduced in PyTorch 2.0 to accelerate inference speed. Therefore, the model needs to be run in a PyTorch 2.0 environment. ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained("baichuan-inc/Baichuan2-13B-Base", use_fast=False, trust_remote_code=True) model = AutoModelForCausalLM.from_pretrained("baichuan-inc/Baichuan2-13B-Base", device_map="auto", trust_remote_code=True) inputs = tokenizer('登鹳雀楼->王之涣\n夜雨寄北->', return_tensors='pt') inputs = inputs.to('cuda:0') pred = model.generate(**inputs, max_new_tokens=64, repetition_penalty=1.1) print(tokenizer.decode(pred.cpu()[0], skip_special_tokens=True)) ``` # <span id="Benchmark">Benchmark 结果/Benchmark Evaluation</span> 我们在[通用]、[法律]、[医疗]、[数学]、[代码]和[多语言翻译]六个领域的中英文权威数据集上对模型进行了广泛测试,更多详细测评结果可查看[GitHub]。 We have extensively tested the model on authoritative Chinese-English datasets across six domains: [General](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md#general-domain), [Legal](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md#law-and-medicine), [Medical](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md#law-and-medicine), [Mathematics](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md#mathematics-and-code), [Code](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md#mathematics-and-code), and [Multilingual Translation](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md#multilingual-translation). For more detailed evaluation results, please refer to [GitHub](https://github.com/baichuan-inc/Baichuan2/blob/main/README_EN.md). ### 7B Model Results | | **C-Eval** | **MMLU** | **CMMLU** | **Gaokao** | **AGIEval** | **BBH** | |:-----------------------:|:----------:|:--------:|:---------:|:----------:|:-----------:|:-------:| | | 5-shot | 5-shot | 5-shot | 5-shot | 5-shot | 3-shot | | **GPT-4** | 68.40 | 83.93 | 70.33 | 66.15 | 63.27 | 75.12 | | **GPT-3.5 Turbo** | 51.10 | 68.54 | 54.06 | 47.07 | 46.13 | 61.59 | | **LLaMA-7B** | 27.10 | 35.10 | 26.75 | 27.81 | 28.17 | 32.38 | | **LLaMA2-7B** | 28.90 | 45.73 | 31.38 | 25.97 | 26.53 | 39.16 | | **MPT-7B** | 27.15 | 27.93 | 26.00 | 26.54 | 24.83 | 35.20 | | **Falcon-7B** | 24.23 | 26.03 | 25.66 | 24.24 | 24.10 | 28.77 | | **ChatGLM2-6B** | 50.20 | 45.90 | 49.00 | 49.44 | 45.28 | 31.65 | | **[Baichuan-7B]** | 42.80 | 42.30 | 44.02 | 36.34 | 34.44 | 32.48 | | **[Baichuan2-7B-Base]** | 54.00 | 54.16 | 57.07 | 47.47 | 42.73 | 41.56 | ### 13B Model Results | | **C-Eval** | **MMLU** | **CMMLU** | **Gaokao** | **AGIEval** | **BBH** | |:---------------------------:|:----------:|:--------:|:---------:|:----------:|:-----------:|:-------:| | | 5-shot | 5-shot | 5-shot | 5-shot | 5-shot | 3-shot | | **GPT-4** | 68.40 | 83.93 | 70.33 | 66.15 | 63.27 | 75.12 | | **GPT-3.5 Turbo** | 51.10 | 68.54 | 54.06 | 47.07 | 46.13 | 61.59 | | **LLaMA-13B** | 28.50 | 46.30 | 31.15 | 28.23 | 28.22 | 37.89 | | **LLaMA2-13B** | 35.80 | 55.09 | 37.99 | 30.83 | 32.29 | 46.98 | | **Vicuna-13B** | 32.80 | 52.00 | 36.28 | 30.11 | 31.55 | 43.04 | | **Chinese-Alpaca-Plus-13B** | 38.80 | 43.90 | 33.43 | 34.78 | 35.46 | 28.94 | | **XVERSE-13B** | 53.70 | 55.21 | 58.44 | 44.69 | 42.54 | 38.06 | | **[Baichuan-13B-Base]** | 52.40 | 51.60 | 55.30 | 49.69 | 43.20 | 43.01 | | **[Baichuan2-13B-Base]** | 58.10 | 59.17 | 61.97 | 54.33 | 48.17 | 48.78 | ## 训练过程模型/Training Dynamics 除了训练了 2.6 万亿 Tokens 的 [Baichuan2-7B-Base](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base) 模型,我们还提供了在此之前的另外 11 个中间过程的模型(分别对应训练了约 0.2 ~ 2.4 万亿 Tokens)供社区研究使用 ([训练过程checkpoint下载](https://huggingface.co/baichuan-inc/Baichuan2-7B-Intermediate-Checkpoints))。下图给出了这些 checkpoints 在 C-Eval、MMLU、CMMLU 三个 benchmark 上的效果变化: In addition to the [Baichuan2-7B-Base](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base) model trained on 2.6 trillion tokens, we also offer 11 additional intermediate-stage models for community research, corresponding to training on approximately 0.2 to 2.4 trillion tokens each ([Intermediate Checkpoints Download](https://huggingface.co/baichuan-inc/Baichuan2-7B-Intermediate-Checkpoints)). The graph below shows the performance changes of these checkpoints on three benchmarks: C-Eval, MMLU, and CMMLU. ![checkpoint](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base/resolve/main/checkpoints.jpeg) # <span id="Terms">声明与协议/Terms and Conditions</span> ## 声明 我们在此声明,我们的开发团队并未基于 Baichuan 2 模型开发任何应用,无论是在 iOS、Android、网页或任何其他平台。我们强烈呼吁所有使用者,不要利用 Baichuan 2 模型进行任何危害国家社会安全或违法的活动。另外,我们也要求使用者不要将 Baichuan 2 模型用于未经适当安全审查和备案的互联网服务。我们希望所有的使用者都能遵守这个原则,确保科技的发展能在规范和合法的环境下进行。 我们已经尽我们所能,来确保模型训练过程中使用的数据的合规性。然而,尽管我们已经做出了巨大的努力,但由于模型和数据的复杂性,仍有可能存在一些无法预见的问题。因此,如果由于使用 Baichuan 2 开源模型而导致的任何问题,包括但不限于数据安全问题、公共舆论风险,或模型被误导、滥用、传播或不当利用所带来的任何风险和问题,我们将不承担任何责任。 We hereby declare that our team has not developed any applications based on Baichuan 2 models, not on iOS, Android, the web, or any other platform. We strongly call on all users not to use Baichuan 2 models for any activities that harm national / social security or violate the law. Also, we ask users not to use Baichuan 2 models for Internet services that have not undergone appropriate security reviews and filings. We hope that all users can abide by this principle and ensure that the development of technology proceeds in a regulated and legal environment. We have done our best to ensure the compliance of the data used in the model training process. However, despite our considerable efforts, there may still be some unforeseeable issues due to the complexity of the model and data. Therefore, if any problems arise due to the use of Baichuan 2 open-source models, including but not limited to data security issues, public opinion risks, or any risks and problems brought about by the model being misled, abused, spread or improperly exploited, we will not assume any responsibility. ## 协议 社区使用 Baichuan 2 模型需要遵循 [Apache 2.0](https://github.com/baichuan-inc/Baichuan2/blob/main/LICENSE) 和[《Baichuan 2 模型社区许可协议》](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base/resolve/main/Baichuan%202%E6%A8%A1%E5%9E%8B%E7%A4%BE%E5%8C%BA%E8%AE%B8%E5%8F%AF%E5%8D%8F%E8%AE%AE.pdf)。Baichuan 2 模型支持商业用途,如果您计划将 Baichuan 2 模型或其衍生品用于商业目的,请您确认您的主体符合以下情况: 1. 您或您的关联方的服务或产品的日均用户活跃量(DAU)低于100万。 2. 您或您的关联方不是软件服务提供商、云服务提供商。 3. 您或您的关联方不存在将授予您的商用许可,未经百川许可二次授权给其他第三方的可能。 在符合以上条件的前提下,您需要通过以下联系邮箱 opensource@baichuan-inc.com ,提交《Baichuan 2 模型社区许可协议》要求的申请材料。审核通过后,百川将特此授予您一个非排他性、全球性、不可转让、不可再许可、可撤销的商用版权许可。 The community usage of Baichuan 2 model requires adherence to [Apache 2.0](https://github.com/baichuan-inc/Baichuan2/blob/main/LICENSE) and [Community License for Baichuan2 Model](https://huggingface.co/baichuan-inc/Baichuan2-7B-Base/resolve/main/Baichuan%202%E6%A8%A1%E5%9E%8B%E7%A4%BE%E5%8C%BA%E8%AE%B8%E5%8F%AF%E5%8D%8F%E8%AE%AE.pdf). The Baichuan 2 model supports commercial use. If you plan to use the Baichuan 2 model or its derivatives for commercial purposes, please ensure that your entity meets the following conditions: 1. The Daily Active Users (DAU) of your or your affiliate's service or product is less than 1 million. 2. Neither you nor your affiliates are software service providers or cloud service providers. 3. There is no possibility for you or your affiliates to grant the commercial license given to you, to reauthorize it to other third parties without Baichuan's permission. Upon meeting the above conditions, you need to submit the application materials required by the Baichuan 2 Model Community License Agreement via the following contact email: opensource@baichuan-inc.com. Once approved, Baichuan will hereby grant you a non-exclusive, global, non-transferable, non-sublicensable, revocable commercial copyright license. [GitHub]:https://github.com/baichuan-inc/Baichuan2 [Baichuan2]:https://github.com/baichuan-inc/Baichuan2 [Baichuan-7B]:https://huggingface.co/baichuan-inc/Baichuan-7B [Baichuan2-7B-Base]:https://huggingface.co/baichuan-inc/Baichuan2-7B-Base [Baichuan2-7B-Chat]:https://huggingface.co/baichuan-inc/Baichuan2-7B-Chat [Baichuan2-7B-Chat-4bits]:https://huggingface.co/baichuan-inc/Baichuan2-7B-Chat-4bits [Baichuan-13B-Base]:https://huggingface.co/baichuan-inc/Baichuan-13B-Base [Baichuan2-13B-Base]:https://huggingface.co/baichuan-inc/Baichuan2-13B-Base [Baichuan2-13B-Chat]:https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat [Baichuan2-13B-Chat-4bits]:https://huggingface.co/baichuan-inc/Baichuan2-13B-Chat-4bits [通用]:https://github.com/baichuan-inc/Baichuan2#%E9%80%9A%E7%94%A8%E9%A2%86%E5%9F%9F [法律]:https://github.com/baichuan-inc/Baichuan2#%E6%B3%95%E5%BE%8B%E5%8C%BB%E7%96%97 [医疗]:https://github.com/baichuan-inc/Baichuan2#%E6%B3%95%E5%BE%8B%E5%8C%BB%E7%96%97 [数学]:https://github.com/baichuan-inc/Baichuan2#%E6%95%B0%E5%AD%A6%E4%BB%A3%E7%A0%81 [代码]:https://github.com/baichuan-inc/Baichuan2#%E6%95%B0%E5%AD%A6%E4%BB%A3%E7%A0%81 [多语言翻译]:https://github.com/baichuan-inc/Baichuan2#%E5%A4%9A%E8%AF%AD%E8%A8%80%E7%BF%BB%E8%AF%91 [《Baichuan 2 模型社区许可协议》]:https://huggingface.co/baichuan-inc/Baichuan2-7B-Base/blob/main/Baichuan%202%E6%A8%A1%E5%9E%8B%E7%A4%BE%E5%8C%BA%E8%AE%B8%E5%8F%AF%E5%8D%8F%E8%AE%AE.pdf [邮件申请]: mailto:opensource@baichuan-inc.com [Email]: mailto:opensource@baichuan-inc.com [opensource@baichuan-inc.com]: mailto:opensource@baichuan-inc.com [训练过程heckpoint下载]: https://huggingface.co/baichuan-inc/Baichuan2-7B-Intermediate-Checkpoints [百川智能]: https://www.baichuan-ai.com
12,943
[ [ -0.0251312255859375, -0.05059814453125, 0.0023860931396484375, 0.029266357421875, -0.0212249755859375, -0.00384521484375, -0.02020263671875, -0.032318115234375, 0.01861572265625, 0.0058746337890625, -0.03326416015625, -0.03515625, -0.049591064453125, -0.0019989013671875, 0.006717681884765625, 0.06640625, -0.005596160888671875, 0.00521087646484375, 0.01983642578125, -0.01215362548828125, -0.044342041015625, -0.019683837890625, -0.061309814453125, -0.01496124267578125, 0.0191497802734375, 0.01885986328125, 0.0518798828125, 0.048370361328125, 0.055572509765625, 0.0177764892578125, -0.0179901123046875, 0.0171661376953125, -0.0288848876953125, -0.01435089111328125, 0.025604248046875, -0.03607177734375, -0.056396484375, 0.0002789497375488281, 0.0263214111328125, 0.026458740234375, -0.002124786376953125, 0.0201416015625, 0.020538330078125, 0.038848876953125, -0.0255126953125, 0.0243072509765625, -0.0184326171875, -0.003398895263671875, -0.0157012939453125, 0.003536224365234375, -0.0161895751953125, -0.0265960693359375, 0.005199432373046875, -0.045501708984375, 0.0107421875, 0.00670623779296875, 0.111083984375, -0.0015087127685546875, -0.0282135009765625, -0.00958251953125, -0.0191192626953125, 0.06549072265625, -0.0799560546875, 0.0140838623046875, 0.0264739990234375, 0.01519012451171875, -0.01104736328125, -0.0643310546875, -0.0374755859375, -0.0027923583984375, -0.03570556640625, 0.028717041015625, -0.0186004638671875, -0.015228271484375, 0.007904052734375, 0.0341796875, -0.05242919921875, 0.0026798248291015625, -0.043243408203125, -0.0133056640625, 0.059478759765625, 0.0186004638671875, 0.023040771484375, -0.0330810546875, -0.038482666015625, -0.0026912689208984375, -0.038818359375, 0.0295257568359375, -0.001678466796875, 0.007965087890625, -0.043670654296875, 0.025360107421875, -0.0040283203125, 0.034149169921875, 0.016754150390625, -0.014495849609375, 0.043426513671875, -0.047332763671875, -0.0256805419921875, -0.0219879150390625, 0.09246826171875, 0.0419921875, -0.0184326171875, 0.0110626220703125, -0.0174102783203125, -0.01959228515625, -0.026702880859375, -0.06988525390625, -0.0290985107421875, 0.0440673828125, -0.05963134765625, -0.02685546875, 0.0140380859375, -0.05682373046875, -0.0027866363525390625, -0.00125885009765625, 0.034820556640625, -0.046844482421875, -0.043426513671875, -0.004547119140625, -0.007080078125, 0.0236663818359375, 0.020263671875, -0.0657958984375, 0.0180206298828125, 0.04119873046875, 0.0849609375, -0.0103302001953125, -0.0305938720703125, -0.0146331787109375, -0.0021877288818359375, -0.032379150390625, 0.042877197265625, 0.002437591552734375, -0.0279998779296875, -0.01485443115234375, 0.023834228515625, -0.01477813720703125, -0.033538818359375, 0.0271759033203125, -0.01534271240234375, 0.00839996337890625, -0.03826904296875, -0.0293121337890625, -0.01422119140625, 0.029052734375, -0.048370361328125, 0.08575439453125, 0.005184173583984375, -0.06549072265625, 0.0201416015625, -0.03936767578125, -0.0233306884765625, -0.01543426513671875, 0.004039764404296875, -0.041839599609375, -0.0299224853515625, 0.0244293212890625, 0.03497314453125, -0.03704833984375, 0.00934600830078125, -0.00901031494140625, -0.0267791748046875, 0.0106964111328125, -0.019287109375, 0.0906982421875, 0.0341796875, -0.051025390625, 0.01493072509765625, -0.048187255859375, -0.0018062591552734375, 0.0341796875, -0.021331787109375, 0.00807952880859375, -0.0113983154296875, 0.00569915771484375, 0.0278778076171875, 0.0281982421875, -0.0146026611328125, 0.0057830810546875, -0.032684326171875, 0.0545654296875, 0.06304931640625, 0.005283355712890625, 0.020111083984375, -0.050994873046875, 0.0258331298828125, 0.026519775390625, 0.033233642578125, -0.02288818359375, -0.053985595703125, -0.07598876953125, -0.023895263671875, 0.023590087890625, 0.045074462890625, -0.037109375, 0.0506591796875, -0.01050567626953125, -0.04974365234375, -0.038116455078125, -0.0037136077880859375, 0.032867431640625, 0.030792236328125, 0.0270233154296875, -0.0084381103515625, -0.04217529296875, -0.05499267578125, 0.00926971435546875, -0.02117919921875, 0.005359649658203125, 0.0293426513671875, 0.05615234375, -0.00888824462890625, 0.05145263671875, -0.039520263671875, -0.019989013671875, -0.0260162353515625, -0.003871917724609375, 0.03704833984375, 0.042877197265625, 0.053985595703125, -0.049163818359375, -0.061431884765625, 0.015777587890625, -0.060516357421875, 0.0111083984375, -0.0075225830078125, -0.0284423828125, 0.0305938720703125, 0.01214599609375, -0.047760009765625, 0.0364990234375, 0.044342041015625, -0.0250396728515625, 0.059417724609375, -0.0191802978515625, 0.02178955078125, -0.09051513671875, 0.0223846435546875, -0.00472259521484375, 0.003597259521484375, -0.042022705078125, 0.007045745849609375, 0.017852783203125, 0.01108551025390625, -0.03399658203125, 0.05657958984375, -0.050140380859375, 0.0225372314453125, 0.00411224365234375, 0.024017333984375, 0.0089263916015625, 0.050506591796875, -0.0007357597351074219, 0.059844970703125, 0.047637939453125, -0.045684814453125, 0.037017822265625, 0.0287933349609375, -0.0260772705078125, 0.0029125213623046875, -0.05523681640625, -0.002590179443359375, 0.0140228271484375, 0.0203094482421875, -0.08428955078125, -0.01389312744140625, 0.039276123046875, -0.05810546875, 0.019012451171875, -0.012908935546875, -0.025604248046875, -0.050994873046875, -0.04779052734375, 0.01020050048828125, 0.0416259765625, -0.0372314453125, 0.020233154296875, 0.0153961181640625, -0.0014438629150390625, -0.0428466796875, -0.05950927734375, -0.01458740234375, -0.0158233642578125, -0.06805419921875, 0.0240631103515625, -0.002208709716796875, -0.00531768798828125, -0.00505828857421875, 0.00004291534423828125, -0.0028171539306640625, 0.004070281982421875, 0.00982666015625, 0.042022705078125, -0.02117919921875, -0.01522064208984375, -0.00749969482421875, -0.00096893310546875, -0.0004935264587402344, -0.01206207275390625, 0.052215576171875, -0.00769805908203125, 0.0030536651611328125, -0.04534912109375, 0.0028171539306640625, 0.033721923828125, -0.038330078125, 0.07012939453125, 0.049072265625, -0.0283203125, 0.01324462890625, -0.0340576171875, -0.01062774658203125, -0.03399658203125, 0.0252532958984375, -0.028472900390625, -0.042633056640625, 0.0633544921875, 0.023406982421875, 0.0231475830078125, 0.054168701171875, 0.050628662109375, -0.0026531219482421875, 0.06549072265625, 0.01009368896484375, -0.01166534423828125, 0.0281829833984375, -0.0582275390625, 0.00658416748046875, -0.06549072265625, -0.039642333984375, -0.02899169921875, -0.02178955078125, -0.0419921875, -0.033355712890625, 0.026153564453125, 0.00695037841796875, -0.033843994140625, 0.043487548828125, -0.03790283203125, -0.0022563934326171875, 0.052032470703125, 0.0226287841796875, 0.0022106170654296875, -0.01390838623046875, -0.00921630859375, -0.00176239013671875, -0.04241943359375, -0.0194244384765625, 0.08709716796875, 0.0308685302734375, 0.04388427734375, 0.0195770263671875, 0.03289794921875, 0.008056640625, 0.007137298583984375, -0.043670654296875, 0.0238037109375, -0.004238128662109375, -0.06268310546875, -0.01264190673828125, -0.033660888671875, -0.0706787109375, 0.02569580078125, -0.01035308837890625, -0.061370849609375, 0.0111083984375, 0.0008993148803710938, -0.0421142578125, 0.029052734375, -0.05712890625, 0.0687255859375, -0.0304718017578125, -0.041595458984375, -0.0004177093505859375, -0.060516357421875, 0.03997802734375, 0.006420135498046875, 0.017913818359375, -0.006591796875, 0.01080322265625, 0.068115234375, -0.05401611328125, 0.0400390625, -0.015533447265625, -0.0023899078369140625, 0.040283203125, 0.00339508056640625, 0.05523681640625, 0.01332855224609375, -0.01081085205078125, 0.02825927734375, 0.0074920654296875, -0.03790283203125, -0.0303802490234375, 0.046875, -0.06494140625, -0.0445556640625, -0.041107177734375, -0.0281829833984375, 0.012298583984375, 0.0278167724609375, 0.046966552734375, 0.0233154296875, 0.008697509765625, 0.013397216796875, 0.03570556640625, -0.0263214111328125, 0.044769287109375, 0.0257720947265625, -0.015838623046875, -0.044677734375, 0.05731201171875, 0.0132293701171875, 0.029815673828125, 0.0232391357421875, 0.0157318115234375, -0.02008056640625, -0.0287933349609375, -0.036865234375, 0.0266265869140625, -0.02996826171875, -0.02032470703125, -0.044189453125, -0.0303802490234375, -0.06768798828125, -0.0037403106689453125, -0.0238189697265625, -0.0231475830078125, -0.01873779296875, -0.0111236572265625, 0.0291748046875, 0.0281829833984375, -0.0162811279296875, 0.019927978515625, -0.055999755859375, 0.0169219970703125, 0.004547119140625, 0.00970458984375, 0.011444091796875, -0.053466796875, -0.03857421875, 0.0250244140625, -0.039154052734375, -0.054107666015625, 0.04595947265625, -0.0037078857421875, 0.038726806640625, 0.04754638671875, -0.00017726421356201172, 0.05810546875, -0.019805908203125, 0.08172607421875, 0.0285797119140625, -0.0589599609375, 0.04754638671875, -0.031341552734375, -0.00010985136032104492, 0.019927978515625, 0.023101806640625, -0.045806884765625, -0.01474761962890625, -0.040985107421875, -0.060150146484375, 0.07965087890625, 0.03857421875, -0.00775146484375, 0.009124755859375, 0.0157470703125, -0.01123809814453125, 0.002979278564453125, -0.0565185546875, -0.057159423828125, -0.02716064453125, -0.002063751220703125, 0.00771331787109375, -0.0155487060546875, -0.0022869110107421875, -0.0272979736328125, 0.061676025390625, 0.0169525146484375, 0.0367431640625, 0.0204925537109375, -0.0025501251220703125, 0.0032100677490234375, -0.01245880126953125, 0.03533935546875, 0.0413818359375, -0.03216552734375, -0.0197906494140625, 0.013702392578125, -0.04742431640625, -0.0038089752197265625, 0.01275634765625, -0.0275726318359375, 0.0035552978515625, 0.02911376953125, 0.063720703125, 0.00562286376953125, -0.028167724609375, 0.04327392578125, -0.002880096435546875, -0.0183563232421875, -0.0184783935546875, 0.004337310791015625, 0.00307464599609375, 0.015777587890625, 0.019256591796875, 0.0007281303405761719, 0.0011749267578125, -0.0390625, 0.01039886474609375, 0.0159912109375, -0.0190887451171875, -0.0199127197265625, 0.0736083984375, 0.01523590087890625, 0.0008645057678222656, 0.041778564453125, -0.0136566162109375, -0.04425048828125, 0.068359375, 0.03167724609375, 0.0489501953125, -0.0220947265625, 0.007793426513671875, 0.07574462890625, 0.026275634765625, -0.01338958740234375, 0.006328582763671875, 0.013885498046875, -0.03729248046875, 0.0061187744140625, -0.0277557373046875, 0.002803802490234375, 0.017578125, -0.04449462890625, 0.04107666015625, -0.036468505859375, -0.030975341796875, -0.006328582763671875, 0.03582763671875, -0.03497314453125, 0.0270233154296875, 0.004634857177734375, 0.072998046875, -0.0413818359375, 0.0626220703125, 0.03277587890625, -0.05682373046875, -0.0823974609375, -0.00849151611328125, 0.006195068359375, -0.05975341796875, 0.03253173828125, 0.01288604736328125, 0.0206146240234375, -0.010345458984375, -0.035064697265625, -0.07037353515625, 0.1168212890625, 0.0009531974792480469, -0.034698486328125, -0.00824737548828125, -0.000621795654296875, 0.0295867919921875, 0.00244903564453125, 0.045501708984375, 0.04718017578125, 0.03692626953125, 0.007335662841796875, -0.08087158203125, 0.021270751953125, -0.044830322265625, -0.005420684814453125, -0.00446319580078125, -0.10693359375, 0.098388671875, -0.0201873779296875, -0.005123138427734375, 0.0169677734375, 0.053802490234375, 0.035919189453125, 0.01557159423828125, 0.0177154541015625, 0.0328369140625, 0.05322265625, -0.0277252197265625, 0.06439208984375, -0.032684326171875, 0.05462646484375, 0.06121826171875, 0.0024471282958984375, 0.045806884765625, 0.0084381103515625, -0.040435791015625, 0.032958984375, 0.07196044921875, -0.0223846435546875, 0.0360107421875, -0.010528564453125, -0.0146026611328125, -0.00327301025390625, 0.0171661376953125, -0.050628662109375, 0.01016998291015625, 0.0164794921875, -0.020233154296875, 0.0077667236328125, -0.0186614990234375, 0.03546142578125, -0.0263671875, -0.0104217529296875, 0.039947509765625, 0.0037136077880859375, -0.052459716796875, 0.06646728515625, 0.0186004638671875, 0.0684814453125, -0.05224609375, 0.00998687744140625, -0.044158935546875, 0.01232147216796875, -0.023834228515625, -0.054046630859375, -0.000644683837890625, 0.0003497600555419922, -0.0028438568115234375, 0.0198211669921875, 0.037506103515625, -0.01611328125, -0.033782958984375, 0.032135009765625, 0.0102081298828125, 0.01143646240234375, 0.01210784912109375, -0.0673828125, 0.0015745162963867188, 0.0121002197265625, -0.039825439453125, 0.0161590576171875, 0.0347900390625, 0.00289154052734375, 0.05120849609375, 0.048431396484375, 0.0023441314697265625, 0.0215301513671875, -0.012298583984375, 0.06640625, -0.048248291015625, -0.03399658203125, -0.06243896484375, 0.048797607421875, -0.01117706298828125, -0.038299560546875, 0.0711669921875, 0.06732177734375, 0.052581787109375, 0.0026912689208984375, 0.06622314453125, -0.0401611328125, 0.0267791748046875, -0.03582763671875, 0.07275390625, -0.054229736328125, 0.007587432861328125, -0.0235137939453125, -0.034820556640625, -0.0207977294921875, 0.050994873046875, -0.015472412109375, 0.00989532470703125, 0.049560546875, 0.0684814453125, 0.00907135009765625, -0.0111236572265625, 0.00679779052734375, 0.033477783203125, 0.0377197265625, 0.07171630859375, 0.032196044921875, -0.0787353515625, 0.057525634765625, -0.048614501953125, -0.0179595947265625, -0.0306396484375, -0.0267791748046875, -0.06982421875, -0.04193115234375, -0.020111083984375, -0.052947998046875, -0.0117950439453125, 0.06793212890625, 0.059906005859375, -0.069580078125, -0.027984619140625, 0.003662109375, 0.0033626556396484375, -0.0299224853515625, -0.01763916015625, 0.0604248046875, -0.0217437744140625, -0.0673828125, -0.0009121894836425781, 0.0042572021484375, 0.007450103759765625, 0.0076751708984375, -0.033294677734375, -0.0275726318359375, -0.0006747245788574219, 0.0318603515625, 0.0165557861328125, -0.051727294921875, 0.0013551712036132812, 0.0227508544921875, -0.02374267578125, 0.0173492431640625, 0.0214996337890625, -0.0234832763671875, 0.0194549560546875, 0.04022216796875, 0.01084136962890625, 0.0413818359375, -0.0011463165283203125, 0.0168304443359375, -0.0198211669921875, 0.0250396728515625, -0.010894775390625, 0.0374755859375, 0.00296783447265625, -0.0236358642578125, 0.046875, 0.0304718017578125, -0.034423828125, -0.05926513671875, -0.02288818359375, -0.07940673828125, -0.0229339599609375, 0.1014404296875, -0.035797119140625, -0.0276641845703125, 0.0128631591796875, -0.031280517578125, 0.039459228515625, -0.032745361328125, 0.050262451171875, 0.055633544921875, 0.00569915771484375, -0.00997161865234375, -0.041473388671875, 0.027252197265625, 0.020172119140625, -0.057952880859375, 0.0019245147705078125, 0.0156402587890625, 0.026702880859375, 0.006282806396484375, 0.05279541015625, -0.01617431640625, 0.03253173828125, 0.00902557373046875, 0.0009241104125976562, -0.0093231201171875, -0.00016319751739501953, -0.0030918121337890625, -0.0216217041015625, -0.0118865966796875, -0.036224365234375 ] ]
jondurbin/airoboros-l2-13b-2.2.1
2023-09-21T18:39:18.000Z
[ "transformers", "pytorch", "llama", "text-generation", "dataset:jondurbin/airoboros-2.2.1", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
jondurbin
null
null
jondurbin/airoboros-l2-13b-2.2.1
3
8,329
transformers
2023-09-20T18:35:28
--- license: llama2 datasets: - jondurbin/airoboros-2.2.1 --- ### Overview Another experimental model, using mostly sythetic data generated by [airoboros](https://github.com/jondurbin/airoboros) This is essentially a minor "fix" branch of [airoboros-l2-13b-2.2](https://hf.co/jondurbin/airoboros-l2-13b-2.2) with a updates, primarily: - [re-generated writing responses](https://huggingface.co/datasets/jondurbin/airoboros-2.2.1#re-generated-writing-responses) - [longer contextual blocks](https://huggingface.co/datasets/jondurbin/airoboros-2.2.1#longer-contextual-blocks) - [removal of "rp" data](https://huggingface.co/datasets/jondurbin/airoboros-2.2.1#rp-category-removed) - [(less aggressive) de-censoring](https://huggingface.co/datasets/jondurbin/airoboros-2.2.1#de-censoring) - more fine-tuning epochs This is a fairly general purpose model, but focuses heavily on instruction following, rather than casual chat/roleplay. Huge thank you to the folks over at [a16z](https://a16z.com/) for sponsoring the costs associated with building models and associated tools! ### Prompt format The prompt format: ``` A chat. USER: {prompt} ASSISTANT: ``` The default system prompt ("A chat.") was used for most of the prompts, however it also included a wide sampling of responses with other prompts, particularly in "stylized\_response", "rp", "gtkm", etc. Here's another example: ``` A chat between Bob (aka USER) and Tom (aka ASSISTANT). Tom is an extremely intelligent 18th century bookkeeper, who speaks loquaciously. USER: {prompt} ASSISTANT: ``` And chat scenario that wouldn't require USER/ASSISTANT (but should use stopping criteria to prevent the model from speaking on your behalf). ``` A chat between old friends: Timmy and Tommy. {description of characters} {setting for the chat} Timmy: *takes a big sip from his coffee* "Ah, sweet, delicious, magical coffee." Tommy: ``` __*I strongly suggest adding stopping criteria/early inference stopping on "USER:", and/or whatever names you specify in the system prompt.*__ ### Fine tuning info https://wandb.ai/jondurbin/airoboros-l2-13b-2.2.1/runs/zbz8mgaz?workspace=user-jondurbin ### Helpful usage tips *The prompts shown here are are just the text that would be included after USER: and before ASSISTANT: in the full prompt format above, the system prompt and USER:/ASSISTANT: have been omited for readability.* #### Context obedient question answering By obedient, I mean the model was trained to ignore what it thinks it knows, and uses the context to answer the question. The model was also tuned to limit the values to the provided context as much as possible to reduce hallucinations. The format for a closed-context prompt is as follows: ``` BEGININPUT BEGINCONTEXT [key0: value0] [key1: value1] ... other metdata ... ENDCONTEXT [insert your text blocks here] ENDINPUT [add as many other blocks, in the exact same format] BEGININSTRUCTION [insert your instruction(s). The model was tuned with single questions, paragraph format, lists, etc.] ENDINSTRUCTION ``` It's also helpful to add "Don't make up answers if you don't know." to your instruction block to make sure if the context is completely unrelated it doesn't make something up. *The __only__ prompts that need this closed context formating are closed-context instructions. Normal questions/instructions do not!* I know it's a bit verbose and annoying, but after much trial and error, using these explicit delimiters helps the model understand where to find the responses and how to associate specific sources with it. - `BEGININPUT` - denotes a new input block - `BEGINCONTEXT` - denotes the block of context (metadata key/value pairs) to associate with the current input block - `ENDCONTEXT` - denotes the end of the metadata block for the current input - [text] - Insert whatever text you want for the input block, as many paragraphs as can fit in the context. - `ENDINPUT` - denotes the end of the current input block - [repeat as many input blocks in this format as you want] - `BEGININSTRUCTION` - denotes the start of the list (or one) instruction(s) to respond to for all of the input blocks above. - [instruction(s)] - `ENDINSTRUCTION` - denotes the end of instruction set It sometimes works without `ENDINSTRUCTION`, but by explicitly including that in the prompt, the model better understands that all of the instructions in the block should be responded to. Here's a trivial, but important example to prove the point: ``` BEGININPUT BEGINCONTEXT date: 2021-01-01 url: https://web.site/123 ENDCONTEXT In a shocking turn of events, blueberries are now green, but will be sticking with the same name. ENDINPUT BEGININSTRUCTION What color are bluberries? Source? ENDINSTRUCTION ``` And the response: ``` Blueberries are now green. Source: date: 2021-01-01 url: https://web.site/123 ``` #### Summarization 500 samples have been included from [this dataset](https://huggingface.co/datasets/mattpscott/airoboros-summarization), using the same format as contextual question answering, for example: ``` BEGININPUT {text to summarize} ENDINPUT BEGININSTRUCTION Summarize the input in around 130 words. ENDINSTRUCTION ``` #### Getting longer responses You can use a few techniques to get longer responses. Detailed prompts, with explicit instruction for word count: ``` Please compose a narrative set in the heart of an ancient library, steeped in the scent of old parchment and ink. The protagonist should be a young scholar who is dedicated to studying the art of storytelling and its evolution throughout history. In her pursuit of knowledge, she stumbles upon a forgotten tome that seems to possess an unusual aura. This book has the ability to bring stories to life, literally manifesting characters and scenarios from within its pages into reality. The main character must navigate through various epochs of storytelling - from oral traditions of tribal societies, through medieval minstrels' tales, to modern-day digital narratives - as they come alive around her. Each era presents its unique challenges and lessons about the power and impact of stories on human civilization. One such character could be a sentient quill pen, who was once used by renowned authors of yesteryears and now holds their wisdom and experiences. It becomes her mentor, guiding her through this journey with witty remarks and insightful commentary. Ensure that your tale encapsulates the thrill of adventure, the beauty of learning, and the profound connection between humans and their stories. All characters involved should be non-human entities. Feel free to explore creative liberties but maintain the mentioned elements. Your response should be approximately 2300 words. ``` Or, a simpler example: ``` Please create a long, detailed story about a dragon in an old growth forest who, for some reason, begins speaking the words of the source code of linux. ``` #### Coding You can ask for fairly complex coding instructions with multiple criteria, e.g.: ``` Create a python application with the following requirements: - Asyncio FastAPI webserver - ping endpoint that returns the current date in JSON format - file upload endpoint, which calculates the file's sha256 checksum, and checks postgres to deduplicate ``` Or inline criteria: ``` Write a multi-threaded TCP server in C that accepts a "GET [key]" input and "SET [key] [value]" input, and uses a binary tree to get and store the input values. ``` You can also optionally add a single space and "PLAINFORMAT" at the end of your prompt to avoid backticks, explanations, etc. and just print the code, e.g.: ``` Write a websocket application in node.js. PLAINFORMAT ``` #### Agent/function calling The dataset includes many examples of function/args generation based on input criteria. This is somewhat similar to the OpenAI function calling, but the output is either JSON or YAML. Example prompt: ``` As an AI assistant, please select the most suitable function and parameters from the list of available functions below, based on the user's input. Provide your response in JSON format. Input: I want to know how many times 'Python' is mentioned in my text file. Available functions: file_analytics: description: This tool performs various operations on a text file. params: action: The operation we want to perform on the data, such as "count_occurrences", "find_line", etc. filters: keyword: The word or phrase we want to search for. ``` Response: ```json { "function": "file_analytics", "params": { "action": "count_occurrences", "filters": { "keyword": "Python" } } } ``` #### Chain-of-thought You can ask for several possible responses to a given problem, with a ranking and final answer selection. Example prompt: ``` A group of people decide to play a round-robin tournament where every player plays every other player exactly once. If a total of 45 games were played, how many players participated in the tournament? Offer several potential answers, rank them according to mathematical logic, and pick the most feasible one. ``` Example response: ``` Solution 1: In a round-robin tournament, each player plays every other player exactly once. This means that the total number of games played is equal to the combination of all players taken two at a time (since a game involves two players). The formula for combinations is nC2 = n(n-1)/2, where n is the total number of players. We know that nC2 equals 45 (the total number of games), so we can set up the equation as follows: n(n-1)/2 = 45 Solving this equation gives us n=10. Final answer: There were 10 players in the tournament. Solution 2: Another way to approach this problem is by using the concept of triangular numbers. Triangular numbers are a sequence of numbers where each number can be represented as a triangle with dots. The nth triangular number is given by the sum of the first n natural numbers. If we consider each game as a dot and each player as a side of the triangle, then the total number of games (45) would correspond to the 9th triangular number because 1+2+3+4+5+6+7+8+9=45. However, since each side of the triangle represents a player, and there's one more player than sides in our model (because the last player has no one left to play against), we need to add one more to get the total number of players. Final answer: There were 10 players in the tournament. Solution 3: We could also use trial and error to solve this problem. Starting from a small number of players, we calculate how many games would be played in a round-robin tournament until we reach 45 games. For example, with 2 players, only 1 game is played; with 3 players, 3 games are played; with 4 players, 6 games are played, and so on. Continuing this process, we find that with 10 players, 45 games are played. Final answer: There were 10 players in the tournament. Ranking of solutions from best to worst: 1. Solution 1: This solution uses direct mathematical reasoning based on the properties of a round-robin tournament and solves an equation to find the answer. 2. Solution 2: This solution uses the concept of triangular numbers which requires additional steps and knowledge beyond the immediate problem context, but still arrives at the correct answer. 3. Solution 3: This solution relies on trial and error, which is less efficient and reliable than the other methods, but it eventually gets to the right answer. Best and final answer: There were 10 players in the tournament. ``` #### reWOO style execution planning The model now supports execution planning for complex instructions that would require making use of several tools. The output is just the plan, you must implement a mechanism to parse the output and actually call the functions! Example prompt: ``` Please construct a systematic plan to generate an optimal response to the user instruction, utilizing a set of provided tools. Each plan will correspond to an evidence value, which will be the output of one of the available functions given an input string that could be the user's question, one or more prior evidence values, or a combination of both. Here are the tools available to be called: DuckDuckGo[input]: Tool that enables the user to search for information using the DuckDuckGo search engine. This tool is helpful in locating a suitable list of sites that may contain the answer to the user's query. It does not directly answer the question but finds a list of sites that might have the answer. TextScraper[input]: Load one or more websites from the input string containing newline delimited links, where input is one or more links, and produces plain text output containing the content of the links. HyperlinkExtractor[input]: Extract links from plain text and produces a plain text, newline delimited response of links. KnowledgeModel[input]: Question answering language model, particularly useful in answering questions based on an input passage of text. The input must be a text question that references an :evidence[n]: variable, e.g. What color is the cat, given :evidence1:? The output should be in format: Plan: [first action to take based on input question] :evidence0: = [function to call with input parameter] Plan: [next action to take, based on result of :evidence0:] :evidence1: = [next function to call and input parameter, which may include reference to previous evidence, e.g. "Given context :evidence0"] ... Answer: [:evidence[n]: containing the final answer.] Question: Who won the Nobel Prize in Literature in 2020 and what is their most famous work? ``` Response: ``` Plan: Begin by conducting a web search to find out who won the Nobel Prize in Literature in 2020. :evidence0: = DuckDuckGo[Nobel Prize in Literature 2020 winner] Plan: Utilize the KnowledgeModel to interpret the search results and find the name of the laureate. :evidence1: = KnowledgeModel[Who won the Nobel Prize in Literature in 2020, given :evidence0:?] Plan: Conduct another web search to find the most famous work of the identified laureate. :evidence2: = DuckDuckGo[Most famous work of :evidence1:] Plan: Extract the relevant links from the DuckDuckGo search results for a more focused search. :evidence3: = HyperlinkExtractor[:evidence2:] Plan: Use the TextScraper tool to extract information from the relevant links. :evidence4: = TextScraper[:evidence3:] Plan: Finally, utilize the KnowledgeModel to identify and summarize the most famous work of the laureate from the extracted information. :evidence5: = KnowledgeModel[What is the most famous work of :evidence1:, given :evidence4:?] Answer: :evidence5: ``` For this to be useful, you'd have to parse the output plan text, and implement/call each of the functions. This is just pseudo-code, completely untested off the top of my head, and obviously would requiring full implementation + hardening: ```python import re import requests def inject_context(input_text, **context): for ref in set(re.findall(r"(:evidence[0-9]+:)", input_text, re.I)): input_text = input_text.replace(ref, context.get(ref, "")) return input_text def duckduckgo(input_text, **context): search_string = inject_context(input_text, **context) ... search via duck duck go using search_string ... return text content def link_extractor(input_text, **context): input_text = inject_context(input_text, **context) return "\n".join(list(set(re.findall(r"(https?://[^\s]+?\.?)", input_text, re.I)))) def scrape(input_text, **context): input_text = inject_context(input_text, **context) text = [] for link in input_text.splitlines(): text.append(requests.get(link).text) return "\n".join(text) def infer(input_text, **context) prompt = inject_context(input_text, **context) ... call model with prompt, return output def parse_plan(plan): method_map = { "DuckDuckGo": duckduckgo, "HyperlinkExtractor": link_extractor, "KnowledgeModel": infer, "TextScraper": scrape, } context = {} for line in plan.strip().splitlines(): if line.startswith("Plan:"): print(line) continue parts = re.match("^(:evidence[0-9]+:)\s*=\s*([^\[]+])(\[.*\])\s$", line, re.I) if not parts: if line.startswith("Answer: "): return context.get(line.split(" ")[-1].strip(), "Answer couldn't be generated...") raise RuntimeError("bad format: " + line) context[parts.group(1)] = method_map[parts.group(2)](parts.group(3), **context) ``` ### Contribute If you're interested in new functionality, particularly a new "instructor" type to generate a specific type of training data, take a look at the dataset generation tool repo: https://github.com/jondurbin/airoboros and either make a PR or open an issue with details. To help me with the OpenAI/compute costs: - https://bmc.link/jondurbin - ETH 0xce914eAFC2fe52FdceE59565Dd92c06f776fcb11 - BTC bc1qdwuth4vlg8x37ggntlxu5cjfwgmdy5zaa7pswf ### Licence and usage restrictions The airoboros 2.2 models are built on top of llama-2/codellama. The llama-2 base model has a custom Meta license: - See the [meta-license/LICENSE.txt](meta-license/LICENSE.txt) file attached for the original license provided by Meta. - See also [meta-license/USE_POLICY.md](meta-license/USE_POLICY.md) and [meta-license/Responsible-Use-Guide.pdf](meta-license/Responsible-Use-Guide.pdf), also provided by Meta. The fine-tuning data was mostly generated by OpenAI API calls to gpt-4, via [airoboros](https://github.com/jondurbin/airoboros) The ToS for OpenAI API usage has a clause preventing the output from being used to train a model that __competes__ with OpenAI - what does *compete* actually mean here? - these small open source models will not produce output anywhere near the quality of gpt-4, or even gpt-3.5, so I can't imagine this could credibly be considered competing in the first place - if someone else uses the dataset to do the same, they wouldn't necessarily be violating the ToS because they didn't call the API, so I don't know how that works - the training data used in essentially all large language models includes a significant amount of copyrighted or otherwise non-permissive licensing in the first place - other work using the self-instruct method, e.g. the original here: https://github.com/yizhongw/self-instruct released the data and model as apache-2 I am purposingly leaving this license ambiguous (other than the fact you must comply with the Meta original license for llama-2) because I am not a lawyer and refuse to attempt to interpret all of the terms accordingly. Your best bet is probably to avoid using this commercially due to the OpenAI API usage. Either way, by using this model, you agree to completely indemnify me.
18,792
[ [ -0.0263519287109375, -0.08306884765625, 0.033660888671875, 0.0038013458251953125, -0.003559112548828125, -0.0245361328125, -0.011962890625, -0.032989501953125, 0.025390625, 0.041046142578125, -0.060546875, -0.032470703125, -0.0258026123046875, 0.01409912109375, -0.0149993896484375, 0.08447265625, 0.00585174560546875, -0.015777587890625, -0.01439666748046875, 0.003147125244140625, -0.05242919921875, -0.035888671875, -0.05853271484375, -0.004611968994140625, 0.038543701171875, 0.036224365234375, 0.045989990234375, 0.047088623046875, 0.021331787109375, 0.02508544921875, -0.01300048828125, 0.023162841796875, -0.03497314453125, 0.0222015380859375, -0.0110321044921875, -0.031890869140625, -0.03289794921875, 0.0004744529724121094, 0.03424072265625, 0.0511474609375, -0.00449371337890625, 0.0174560546875, 0.004146575927734375, 0.032440185546875, -0.03973388671875, 0.007022857666015625, -0.0138397216796875, 0.0006375312805175781, -0.01050567626953125, -0.021331787109375, -0.03778076171875, -0.019744873046875, 0.007389068603515625, -0.06671142578125, 0.0004832744598388672, 0.0242767333984375, 0.06463623046875, 0.01004791259765625, -0.032379150390625, -0.0215301513671875, -0.045166015625, 0.055389404296875, -0.05816650390625, 0.01007843017578125, 0.04705810546875, 0.030548095703125, -0.025238037109375, -0.06707763671875, -0.048431396484375, -0.0146636962890625, -0.018402099609375, 0.01239013671875, -0.0161285400390625, -0.006664276123046875, 0.037384033203125, 0.007659912109375, -0.05548095703125, -0.0136871337890625, -0.044891357421875, -0.01445770263671875, 0.039947509765625, 0.03448486328125, 0.030792236328125, -0.031494140625, -0.02734375, -0.0106658935546875, -0.0372314453125, 0.027496337890625, 0.023773193359375, 0.02764892578125, -0.019775390625, 0.04486083984375, -0.0253753662109375, 0.05291748046875, 0.00417327880859375, -0.0134124755859375, -0.0007815361022949219, -0.032012939453125, -0.019073486328125, -0.01079559326171875, 0.064208984375, 0.05609130859375, 0.01406097412109375, 0.005420684814453125, -0.0017480850219726562, -0.0090484619140625, 0.013946533203125, -0.0633544921875, -0.02166748046875, 0.035736083984375, -0.055694580078125, -0.020172119140625, -0.00949859619140625, -0.061676025390625, -0.0281524658203125, -0.01470184326171875, 0.02960205078125, -0.038177490234375, -0.00878143310546875, 0.00902557373046875, -0.0279083251953125, 0.0062408447265625, 0.037506103515625, -0.0732421875, 0.036529541015625, 0.022979736328125, 0.056793212890625, 0.00807952880859375, -0.0213165283203125, -0.029327392578125, 0.002109527587890625, -0.0098419189453125, 0.060150146484375, -0.03997802734375, -0.0312042236328125, -0.0182342529296875, 0.0240936279296875, -0.0008978843688964844, -0.023712158203125, 0.035552978515625, -0.02667236328125, 0.048736572265625, -0.03314208984375, -0.03778076171875, -0.02264404296875, 0.0150909423828125, -0.028900146484375, 0.0697021484375, 0.006458282470703125, -0.06378173828125, -0.0101470947265625, -0.06597900390625, -0.01751708984375, -0.002758026123046875, -0.0028018951416015625, 0.01043701171875, -0.0158538818359375, 0.01206207275390625, 0.034454345703125, -0.0273284912109375, 0.0177154541015625, -0.0270538330078125, -0.024261474609375, 0.0288543701171875, -0.0268096923828125, 0.09197998046875, 0.021148681640625, -0.01151275634765625, -0.0017538070678710938, -0.052398681640625, -0.00382232666015625, 0.01094818115234375, -0.019744873046875, -0.0175018310546875, -0.0063934326171875, 0.0085906982421875, -0.00281524658203125, 0.0213165283203125, -0.051055908203125, 0.0306243896484375, -0.0283203125, 0.049896240234375, 0.046539306640625, 0.01611328125, 0.02557373046875, -0.044677734375, 0.039337158203125, -0.0053558349609375, 0.017303466796875, -0.047576904296875, -0.040618896484375, -0.04949951171875, 0.0022640228271484375, 0.0103607177734375, 0.0660400390625, -0.047637939453125, 0.028900146484375, 0.0013942718505859375, -0.037994384765625, -0.0195465087890625, -0.01531982421875, 0.0266876220703125, 0.0643310546875, 0.037872314453125, -0.002529144287109375, -0.042755126953125, -0.059661865234375, 0.01200103759765625, -0.032806396484375, -0.006580352783203125, 0.039520263671875, 0.038421630859375, -0.0116119384765625, 0.074951171875, -0.0709228515625, 0.0028095245361328125, -0.0092010498046875, -0.002223968505859375, 0.011260986328125, 0.0570068359375, 0.033477783203125, -0.057464599609375, -0.016845703125, -0.0157928466796875, -0.0633544921875, -0.0115966796875, -0.0164337158203125, -0.0187835693359375, 0.004962921142578125, 0.038604736328125, -0.048797607421875, 0.0338134765625, 0.01275634765625, -0.04840087890625, 0.052001953125, -0.00852203369140625, 0.0251007080078125, -0.0994873046875, 0.0261993408203125, -0.01303863525390625, -0.0068359375, -0.046234130859375, 0.0134124755859375, -0.0128631591796875, -0.013397216796875, -0.032257080078125, 0.06951904296875, -0.0272674560546875, 0.0145416259765625, -0.00423431396484375, 0.01035308837890625, 0.0210723876953125, 0.048736572265625, 0.006473541259765625, 0.04632568359375, 0.03546142578125, -0.04632568359375, 0.043243408203125, 0.033935546875, 0.005908966064453125, 0.0452880859375, -0.06884765625, 0.012725830078125, -0.0142364501953125, 0.0288543701171875, -0.0853271484375, -0.020904541015625, 0.047882080078125, -0.054046630859375, 0.01439666748046875, -0.0022335052490234375, -0.030487060546875, -0.0263214111328125, -0.023162841796875, 0.01275634765625, 0.0355224609375, -0.0239410400390625, 0.0531005859375, 0.0190277099609375, -0.016021728515625, -0.03955078125, -0.055389404296875, 0.01389312744140625, -0.0241851806640625, -0.052642822265625, 0.0290069580078125, -0.040985107421875, -0.024139404296875, -0.004604339599609375, 0.0090789794921875, -0.0183868408203125, 0.0164947509765625, 0.0249481201171875, 0.0231781005859375, -0.01490020751953125, 0.00785064697265625, 0.01486968994140625, 0.00211334228515625, 0.0047454833984375, -0.01264190673828125, 0.03948974609375, -0.0177764892578125, -0.0114593505859375, -0.040740966796875, 0.045440673828125, 0.030426025390625, -0.002655029296875, 0.044586181640625, 0.04058837890625, -0.039581298828125, 0.0088043212890625, -0.031402587890625, -0.034698486328125, -0.03790283203125, 0.012115478515625, -0.028839111328125, -0.051605224609375, 0.049957275390625, 0.0178680419921875, 0.0209503173828125, 0.0253448486328125, 0.0245513916015625, -0.02679443359375, 0.054931640625, 0.03704833984375, 0.0190582275390625, 0.0259246826171875, -0.021759033203125, -0.001209259033203125, -0.06597900390625, -0.030426025390625, -0.040374755859375, -0.025665283203125, -0.03668212890625, -0.01261138916015625, 0.0208740234375, 0.024932861328125, -0.0242919921875, 0.034637451171875, -0.046234130859375, 0.0312042236328125, 0.05084228515625, 0.0135955810546875, 0.01195526123046875, -0.0235137939453125, -0.0019197463989257812, -0.0022563934326171875, -0.054290771484375, -0.0579833984375, 0.067626953125, 0.03155517578125, 0.05029296875, 0.01361846923828125, 0.05474853515625, 0.017913818359375, -0.0030059814453125, -0.04412841796875, 0.052764892578125, -0.00119781494140625, -0.048675537109375, -0.03277587890625, -0.01461029052734375, -0.07781982421875, 0.0092315673828125, -0.01361083984375, -0.07940673828125, 0.0258636474609375, 0.0129852294921875, -0.071044921875, 0.0022335052490234375, -0.06365966796875, 0.078369140625, -0.004199981689453125, -0.024566650390625, 0.0137481689453125, -0.07733154296875, 0.01806640625, 0.03082275390625, 0.0052490234375, 0.004302978515625, -0.01047515869140625, 0.06341552734375, -0.03375244140625, 0.0777587890625, -0.01511383056640625, 0.0176849365234375, 0.037353515625, 0.01131439208984375, 0.0174713134765625, 0.01776123046875, 0.00418853759765625, -0.005157470703125, 0.038055419921875, -0.0238494873046875, -0.04742431640625, 0.05047607421875, -0.05877685546875, -0.03668212890625, -0.026397705078125, -0.047821044921875, 0.0115203857421875, 0.019317626953125, 0.024871826171875, 0.060760498046875, -0.017547607421875, 0.0094146728515625, 0.0599365234375, -0.036102294921875, 0.047576904296875, 0.045928955078125, -0.0262451171875, -0.03326416015625, 0.045654296875, 0.006542205810546875, 0.006893157958984375, 0.034210205078125, 0.024017333984375, -0.017486572265625, -0.012847900390625, -0.057403564453125, 0.01556396484375, -0.05596923828125, -0.01508331298828125, -0.0806884765625, -0.0074310302734375, -0.048095703125, -0.0155792236328125, 0.005199432373046875, -0.044830322265625, -0.052764892578125, -0.003398895263671875, 0.0389404296875, 0.039337158203125, -0.00041484832763671875, 0.03582763671875, -0.058563232421875, 0.0224151611328125, 0.0198822021484375, -0.005268096923828125, 0.004638671875, -0.044342041015625, -0.005115509033203125, 0.0097808837890625, -0.033843994140625, -0.08148193359375, 0.0355224609375, 0.01035308837890625, 0.03167724609375, 0.035797119140625, 0.0197906494140625, 0.050201416015625, -0.0313720703125, 0.0872802734375, -0.005680084228515625, -0.05694580078125, 0.0587158203125, -0.04229736328125, 0.031402587890625, 0.044189453125, 0.0313720703125, -0.064453125, -0.026519775390625, -0.048828125, -0.06500244140625, 0.0623779296875, 0.01446533203125, 0.0201263427734375, -0.01045989990234375, 0.0355224609375, 0.0059356689453125, 0.012420654296875, -0.060455322265625, -0.0292205810546875, -0.0229949951171875, -0.006656646728515625, 0.009490966796875, -0.0165252685546875, -0.01142120361328125, -0.024078369140625, 0.04443359375, -0.007015228271484375, 0.0361328125, 0.0243682861328125, 0.00592041015625, 0.0009703636169433594, 0.0171661376953125, 0.05145263671875, 0.04302978515625, -0.025360107421875, 0.005519866943359375, 0.00897979736328125, -0.0295867919921875, -0.004638671875, 0.01479339599609375, -0.0177459716796875, -0.0144195556640625, 0.032470703125, 0.0614013671875, 0.004703521728515625, -0.051025390625, 0.02801513671875, -0.020721435546875, 0.006256103515625, -0.028717041015625, 0.0254974365234375, 0.0179290771484375, 0.0205230712890625, 0.0243377685546875, -0.0003514289855957031, 0.0220947265625, -0.049560546875, 0.006103515625, 0.01364898681640625, -0.01175689697265625, -0.024261474609375, 0.04974365234375, 0.022796630859375, -0.04107666015625, 0.04730224609375, -0.026824951171875, -0.0295562744140625, 0.060272216796875, 0.055084228515625, 0.060699462890625, -0.00862884521484375, 0.0213775634765625, 0.040618896484375, 0.023101806640625, 0.007472991943359375, 0.033599853515625, -0.018463134765625, -0.047271728515625, -0.002246856689453125, -0.049072265625, -0.022613525390625, 0.01349639892578125, -0.046630859375, 0.0247039794921875, -0.04156494140625, -0.00603485107421875, 0.007110595703125, 0.0037517547607421875, -0.044219970703125, 0.0165252685546875, -0.002384185791015625, 0.079833984375, -0.08184814453125, 0.03985595703125, 0.0677490234375, -0.051788330078125, -0.058502197265625, -0.005039215087890625, 0.0089111328125, -0.034759521484375, 0.03900146484375, 0.02142333984375, 0.0114593505859375, 0.0023975372314453125, -0.056732177734375, -0.060882568359375, 0.0947265625, 0.00794219970703125, -0.0250701904296875, -0.01666259765625, -0.01776123046875, 0.04803466796875, -0.037811279296875, 0.05377197265625, 0.031707763671875, 0.037506103515625, 0.005107879638671875, -0.06927490234375, 0.0193023681640625, -0.037994384765625, 0.00020611286163330078, -0.00945281982421875, -0.055389404296875, 0.08148193359375, -0.022064208984375, -0.026519775390625, 0.0218658447265625, 0.054534912109375, 0.0123748779296875, 0.027618408203125, 0.0280914306640625, 0.036407470703125, 0.0675048828125, 0.0005221366882324219, 0.076416015625, -0.0263214111328125, 0.00693511962890625, 0.0911865234375, -0.0007824897766113281, 0.0538330078125, 0.0280303955078125, -0.0158843994140625, 0.045257568359375, 0.07421875, 0.003597259521484375, 0.0311737060546875, 0.0014619827270507812, -0.00847625732421875, 0.0003476142883300781, -0.02166748046875, -0.0303497314453125, 0.022369384765625, 0.01407623291015625, -0.019744873046875, -0.0019330978393554688, 0.019195556640625, 0.0239410400390625, -0.00045108795166015625, -0.003803253173828125, 0.056793212890625, 0.0034389495849609375, -0.06085205078125, 0.041351318359375, 0.007396697998046875, 0.048309326171875, -0.0462646484375, -0.01617431640625, -0.0258331298828125, -0.01052093505859375, -0.0131988525390625, -0.0640869140625, 0.0261688232421875, -0.00341796875, -0.03363037109375, -0.0191192626953125, 0.048980712890625, -0.03143310546875, -0.0261993408203125, 0.006137847900390625, 0.029632568359375, 0.04107666015625, 0.01114654541015625, -0.05023193359375, 0.010528564453125, 0.004329681396484375, -0.025909423828125, 0.009033203125, 0.037200927734375, -0.0015058517456054688, 0.054168701171875, 0.04412841796875, 0.001255035400390625, -0.0216217041015625, -0.02032470703125, 0.0716552734375, -0.05718994140625, -0.04718017578125, -0.058258056640625, 0.052398681640625, -0.004425048828125, -0.04254150390625, 0.051513671875, 0.03826904296875, 0.054595947265625, 0.0003266334533691406, 0.046783447265625, -0.03680419921875, 0.03656005859375, -0.037841796875, 0.050079345703125, -0.03619384765625, 0.01953125, -0.0177001953125, -0.05426025390625, 0.0005412101745605469, 0.051422119140625, -0.01020050048828125, 0.0019855499267578125, 0.056488037109375, 0.07275390625, 0.00485992431640625, 0.017181396484375, 0.0105438232421875, 0.0206451416015625, 0.01267242431640625, 0.043182373046875, 0.05816650390625, -0.037322998046875, 0.0291900634765625, -0.0238494873046875, -0.037353515625, -0.00777435302734375, -0.06158447265625, -0.0718994140625, -0.059814453125, -0.0138092041015625, -0.0379638671875, 0.01120758056640625, 0.08563232421875, 0.0487060546875, -0.0577392578125, -0.0278167724609375, 0.01861572265625, 0.000972747802734375, -0.0148162841796875, -0.0233001708984375, 0.015838623046875, -0.005199432373046875, -0.0518798828125, 0.0275726318359375, 0.0005369186401367188, 0.0189208984375, -0.0262451171875, 0.005329132080078125, -0.02044677734375, 0.011260986328125, 0.03997802734375, 0.0283660888671875, -0.057342529296875, -0.0260772705078125, 0.0229644775390625, -0.00634765625, -0.004730224609375, 0.041717529296875, -0.06658935546875, 0.035858154296875, 0.039154052734375, 0.024078369140625, 0.0300750732421875, 0.0049896240234375, 0.033782958984375, -0.047088623046875, -0.00397491455078125, 0.022369384765625, 0.0252532958984375, 0.02545166015625, -0.059051513671875, 0.033660888671875, 0.020233154296875, -0.05157470703125, -0.06732177734375, 0.005893707275390625, -0.08209228515625, -0.040985107421875, 0.0899658203125, -0.01514434814453125, -0.0261993408203125, -0.01031494140625, -0.048980712890625, 0.0099639892578125, -0.050018310546875, 0.048553466796875, 0.06365966796875, -0.033203125, -0.0088348388671875, -0.036376953125, 0.0316162109375, 0.0140838623046875, -0.0687255859375, 0.012603759765625, 0.05926513671875, 0.025848388671875, 0.0216064453125, 0.06500244140625, 0.0158233642578125, 0.0193634033203125, 0.00479888916015625, 0.00022792816162109375, -0.0234375, -0.0360107421875, -0.0175628662109375, 0.0095672607421875, -0.0189971923828125, -0.0165557861328125 ] ]
mosaicml/mpt-7b-8k-chat
2023-10-30T21:54:04.000Z
[ "transformers", "pytorch", "mpt", "text-generation", "Composer", "MosaicML", "llm-foundry", "custom_code", "dataset:camel-ai/code", "dataset:ehartford/wizard_vicuna_70k_unfiltered", "dataset:anon8231489123/ShareGPT_Vicuna_unfiltered", "dataset:teknium1/GPTeacher/roleplay-instruct-v2-final", "dataset:teknium1/GPTeacher/codegen-isntruct", "dataset:timdettmers/openassistant-guanaco", "dataset:camel-ai/math", "dataset:project-baize/baize-chatbot/medical_chat_data", "dataset:project-baize/baize-chatbot/quora_chat_data", "dataset:project-baize/baize-chatbot/stackoverflow_chat_data", "dataset:camel-ai/biology", "dataset:camel-ai/chemistry", "dataset:camel-ai/ai_society", "dataset:jondurbin/airoboros-gpt4-1.2", "dataset:LongConversations", "dataset:camel-ai/physics", "arxiv:2205.14135", "arxiv:2108.12409", "arxiv:2010.04245", "license:cc-by-nc-sa-4.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
mosaicml
null
null
mosaicml/mpt-7b-8k-chat
30
8,319
transformers
2023-06-22T22:00:29
--- license: cc-by-nc-sa-4.0 datasets: - camel-ai/code - ehartford/wizard_vicuna_70k_unfiltered - anon8231489123/ShareGPT_Vicuna_unfiltered - teknium1/GPTeacher/roleplay-instruct-v2-final - teknium1/GPTeacher/codegen-isntruct - timdettmers/openassistant-guanaco - camel-ai/math - project-baize/baize-chatbot/medical_chat_data - project-baize/baize-chatbot/quora_chat_data - project-baize/baize-chatbot/stackoverflow_chat_data - camel-ai/biology - camel-ai/chemistry - camel-ai/ai_society - jondurbin/airoboros-gpt4-1.2 - LongConversations - camel-ai/physics tags: - Composer - MosaicML - llm-foundry inference: false --- # MPT-7B-Chat-8k MPT-7B-Chat-8k is a chatbot-like model for dialogue generation. It was built by finetuning [MPT-7B-8k](https://huggingface.co/mosaicml/mpt-7b-8k) on the [ShareGPT-Vicuna](https://huggingface.co/datasets/anon8231489123/ShareGPT_Vicuna_unfiltered), [Camel-AI](https://huggingface.co/camel-ai), [GPTeacher](https://github.com/teknium1/GPTeacher), [Guanaco](https://huggingface.co/datasets/timdettmers/openassistant-guanaco), [Baize](https://github.com/project-baize/baize-chatbot) and some generated datasets. This is the same dataset that [MPT-30B-Chat](https://huggingface.co/mosaicml/mpt-30b-chat) was trained on. * License: _CC-By-NC-SA-4.0_ (non-commercial use only) This model was trained by [MosaicML](https://www.mosaicml.com) and follows a modified decoder-only transformer architecture. ## Model Date July 18, 2023 ## Model License _CC-By-NC-SA-4.0_ (non-commercial use only) ## Documentation * [Blog post: MPT-7B-8k](https://www.mosaicml.com/blog/long-context-mpt-7b-8k) * [Codebase (mosaicml/llm-foundry repo)](https://github.com/mosaicml/llm-foundry/) * Questions: Feel free to contact us via the [MosaicML Community Slack](https://mosaicml.me/slack)! ## How to Use This model is best used with the MosaicML [llm-foundry repository](https://github.com/mosaicml/llm-foundry) for training and finetuning. ```python import transformers model = transformers.AutoModelForCausalLM.from_pretrained( 'mosaicml/mpt-7b-chat-8k', trust_remote_code=True ) ``` Note: This model requires that `trust_remote_code=True` be passed to the `from_pretrained` method. This is because we use a custom `MPT` model architecture that is not yet part of the Hugging Face `transformers` package. `MPT` includes options for many training efficiency features such as [FlashAttention](https://arxiv.org/pdf/2205.14135.pdf), [ALiBi](https://arxiv.org/abs/2108.12409), [QK LayerNorm](https://arxiv.org/abs/2010.04245), and more. To use the optimized [triton implementation](https://github.com/openai/triton) of FlashAttention, you can load the model on GPU (`cuda:0`) with `attn_impl='triton'` and with `bfloat16` precision: ```python import torch import transformers name = 'mosaicml/mpt-7b-chat-8k' config = transformers.AutoConfig.from_pretrained(name, trust_remote_code=True) config.attn_config['attn_impl'] = 'triton' # change this to use triton-based FlashAttention config.init_device = 'cuda:0' # For fast initialization directly on GPU! model = transformers.AutoModelForCausalLM.from_pretrained( name, config=config, torch_dtype=torch.bfloat16, # Load model weights in bfloat16 trust_remote_code=True ) ``` The model was trained initially with a sequence length of 2048 with an additional pretraining stage for sequence length adapation up to 8192. However, ALiBi enables users to increase the maximum sequence length even further during finetuning and/or inference. For example: ```python import transformers name = 'mosaicml/mpt-7b-chat-8k' config = transformers.AutoConfig.from_pretrained(name, trust_remote_code=True) config.max_seq_len = 16384 # (input + output) tokens can now be up to 16384 model = transformers.AutoModelForCausalLM.from_pretrained( name, config=config, trust_remote_code=True ) ``` This model was trained with the MPT-7B-chat tokenizer which is based on the [EleutherAI/gpt-neox-20b](https://huggingface.co/EleutherAI/gpt-neox-20b) tokenizer and includes additional ChatML tokens. ```python from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained('mosaicml/mpt-7b-8k') ``` The model can then be used, for example, within a text-generation pipeline. Note: when running Torch modules in lower precision, it is best practice to use the [torch.autocast context manager](https://pytorch.org/docs/stable/amp.html). ```python from transformers import pipeline with torch.autocast('cuda', dtype=torch.bfloat16): inputs = tokenizer('Here is a recipe for vegan banana bread:\n', return_tensors="pt").to('cuda') outputs = model.generate(**inputs, max_new_tokens=100) print(tokenizer.batch_decode(outputs, skip_special_tokens=True)) # or using the HF pipeline pipe = pipeline('text-generation', model=model, tokenizer=tokenizer, device='cuda:0') with torch.autocast('cuda', dtype=torch.bfloat16): print( pipe('Here is a recipe for vegan banana bread:\n', max_new_tokens=100, do_sample=True, use_cache=True)) ``` ## Model Description The architecture is a modification of a standard decoder-only transformer. The model has been modified from a standard transformer in the following ways: * It uses [FlashAttention](https://arxiv.org/pdf/2205.14135.pdf) * It uses [ALiBi (Attention with Linear Biases)](https://arxiv.org/abs/2108.12409) and does not use positional embeddings * It does not use biases | Hyperparameter | Value | |----------------|-------| |n_parameters | 6.7B | |n_layers | 32 | | n_heads | 32 | | d_model | 4096 | | vocab size | 50432 | | sequence length | 2048 | ## Data Mix The model was trained on the following data mix: | Data Source | Number of Tokens in Source | Proportion | |-------------|----------------------------|------------| | Airoboros/GPT4-1.2 | 26.4M | 1.71% | | Baize | 55.0M | 3.57% | | Camel | 301M | 19.54% | | GPTeacher | 7.56M | 0.49% | | Guanaco | 15.6M | 1.02% | | LongCoversations | 18.4M | 1.19% | | ShareGPT | 821M | 53.24% | | WizardLM | 297M | 19.23% | "LongConversations" is a GPT3.5/4-generated dataset, details of which will be released at a later date. ### Training Configuration This model was trained on 192 H100s for about 48 minutes using the [MosaicML Platform](https://www.mosaicml.com/platform). The model was trained with sharded data parallelism using [FSDP](https://pytorch.org/docs/stable/fsdp.html) and used the AdamW optimizer. ## Limitations and Biases _The following language is modified from [EleutherAI's GPT-NeoX-20B](https://huggingface.co/EleutherAI/gpt-neox-20b)_ MPT-7B-Chat-8k can produce factually incorrect output, and should not be relied on to produce factually accurate information. MPT-7B-Chat-8k was trained on various public datasets. While great efforts have been taken to clean the pretraining data, it is possible that this model could generate lewd, biased or otherwise offensive outputs. ## Acknowledgements This model was finetuned by the MosaicML NLP team ## Disclaimer The license on this model does not constitute legal advice. We are not responsible for the actions of third parties who use this model. Please consult an attorney before using this model for commercial purposes. ## MosaicML Platform If you're interested in [training](https://www.mosaicml.com/training) and [deploying](https://www.mosaicml.com/inference) your own MPT or LLMs on the MosaicML Platform, [sign up here](https://www.mosaicml.com/get-started?utm_source=huggingface&utm_medium=referral&utm_campaign=mpt-7b-8k). ## Citation Please cite this model using the following format: ``` @online{MosaicML2023Introducing, author = {MosaicML NLP Team}, title = {Introducing MPT-30B: Raising the bar for open-source foundation models}, year = {2023}, url = {www.mosaicml.com/blog/mpt-30b}, note = {Accessed: 2023-06-22}, urldate = {2023-06-22} } ```
7,959
[ [ -0.034881591796875, -0.04461669921875, 0.01128387451171875, 0.0300140380859375, -0.0224761962890625, -0.0011682510375976562, -0.00887298583984375, -0.024383544921875, 0.00452423095703125, 0.0242462158203125, -0.044891357421875, -0.041717529296875, -0.05047607421875, 0.00211334228515625, -0.02197265625, 0.07928466796875, 0.006679534912109375, -0.00428009033203125, 0.0101165771484375, -0.00516510009765625, -0.01806640625, -0.03363037109375, -0.053863525390625, -0.02593994140625, 0.0308074951171875, 0.01285552978515625, 0.057342529296875, 0.05963134765625, 0.029296875, 0.02508544921875, -0.0177459716796875, 0.0101776123046875, -0.04119873046875, -0.0277557373046875, 0.01001739501953125, -0.0283203125, -0.04083251953125, 0.01299285888671875, 0.03326416015625, 0.0213165283203125, -0.0100860595703125, 0.0245361328125, 0.005252838134765625, 0.0207977294921875, -0.0260772705078125, 0.02435302734375, -0.03692626953125, 0.007457733154296875, -0.006805419921875, -0.0009198188781738281, -0.040130615234375, -0.016845703125, 0.00897216796875, -0.037628173828125, 0.020843505859375, 0.00273895263671875, 0.0804443359375, 0.01214599609375, -0.0289154052734375, -0.00263214111328125, -0.04400634765625, 0.049285888671875, -0.06903076171875, 0.0280303955078125, 0.0176239013671875, 0.016632080078125, -0.004638671875, -0.07464599609375, -0.055999755859375, -0.016143798828125, -0.004245758056640625, 0.0194091796875, -0.02362060546875, 0.00591278076171875, 0.034942626953125, 0.0274658203125, -0.04400634765625, -0.0084991455078125, -0.033721923828125, -0.02008056640625, 0.033905029296875, 0.0141448974609375, 0.0221710205078125, -0.03570556640625, -0.047454833984375, -0.0281524658203125, -0.047119140625, 0.0010051727294921875, 0.0321044921875, 0.0012178421020507812, -0.03729248046875, 0.0435791015625, -0.003948211669921875, 0.03961181640625, 0.01450347900390625, -0.004726409912109375, 0.0318603515625, -0.0301513671875, -0.02728271484375, -0.0070343017578125, 0.08953857421875, 0.0272064208984375, 0.00640106201171875, 0.002620697021484375, -0.0020503997802734375, -0.00420379638671875, 0.001209259033203125, -0.0819091796875, -0.031402587890625, 0.0167236328125, -0.034515380859375, -0.0202484130859375, 0.0033359527587890625, -0.04339599609375, -0.014862060546875, -0.006999969482421875, 0.04913330078125, -0.057159423828125, -0.03167724609375, 0.0018796920776367188, -0.0128936767578125, 0.02362060546875, 0.00681304931640625, -0.0626220703125, 0.004871368408203125, 0.0310516357421875, 0.07476806640625, -0.0039825439453125, -0.038482666015625, -0.00765228271484375, -0.0015153884887695312, -0.0009398460388183594, 0.0355224609375, -0.0128021240234375, -0.019287109375, -0.030242919921875, 0.01189422607421875, -0.0198974609375, -0.032440185546875, 0.0242919921875, -0.024383544921875, 0.034698486328125, -0.014801025390625, -0.035675048828125, -0.01178741455078125, 0.00745391845703125, -0.03955078125, 0.078125, 0.025970458984375, -0.06866455078125, 0.0225982666015625, -0.052886962890625, -0.00762939453125, -0.0086669921875, -0.001979827880859375, -0.05267333984375, -0.0161895751953125, 0.0292816162109375, 0.033599853515625, -0.0258026123046875, 0.0189056396484375, -0.0222015380859375, -0.034942626953125, 0.01776123046875, -0.04583740234375, 0.075927734375, 0.0228271484375, -0.05419921875, 0.0156097412109375, -0.05120849609375, -0.00952911376953125, 0.0191497802734375, -0.0255126953125, 0.02496337890625, -0.0198974609375, 0.0025882720947265625, 0.0278472900390625, 0.00830078125, -0.047210693359375, 0.0077056884765625, -0.03692626953125, 0.040802001953125, 0.05657958984375, -0.0026264190673828125, 0.0226898193359375, -0.03558349609375, 0.0233612060546875, 0.0240936279296875, 0.03533935546875, -0.013519287109375, -0.057952880859375, -0.073486328125, -0.029449462890625, 0.0222015380859375, 0.033721923828125, -0.0665283203125, 0.0357666015625, -0.01486968994140625, -0.051788330078125, -0.049072265625, -0.008941650390625, 0.030792236328125, 0.03729248046875, 0.037109375, -0.0283966064453125, -0.0489501953125, -0.06085205078125, 0.0003581047058105469, -0.0040130615234375, -0.0002409219741821289, 0.02581787109375, 0.04425048828125, -0.024169921875, 0.06903076171875, -0.022125244140625, 0.0020236968994140625, -0.0232696533203125, 0.017852783203125, 0.0377197265625, 0.04632568359375, 0.04461669921875, -0.048004150390625, -0.05194091796875, -0.01334381103515625, -0.0498046875, 0.004383087158203125, -0.005889892578125, -0.0189666748046875, 0.01294708251953125, 0.01508331298828125, -0.07122802734375, 0.04315185546875, 0.046051025390625, -0.03656005859375, 0.039581298828125, -0.004146575927734375, 0.006488800048828125, -0.1044921875, 0.00640869140625, -0.00524139404296875, -0.01445770263671875, -0.04266357421875, -0.0190887451171875, 0.003536224365234375, -0.00916290283203125, -0.056915283203125, 0.042022705078125, -0.0284881591796875, 0.0038433074951171875, -0.0189208984375, -0.01214599609375, -0.0061798095703125, 0.060394287109375, 0.0098419189453125, 0.056121826171875, 0.04052734375, -0.033538818359375, 0.03936767578125, 0.0233917236328125, -0.0210113525390625, 0.0175323486328125, -0.0528564453125, 0.011322021484375, 0.01401519775390625, 0.02435302734375, -0.06951904296875, -0.0100250244140625, 0.038787841796875, -0.045654296875, 0.0175628662109375, -0.0251922607421875, -0.033111572265625, -0.043670654296875, -0.00849151611328125, 0.028472900390625, 0.05224609375, -0.053924560546875, 0.04156494140625, 0.003814697265625, 0.0137939453125, -0.059844970703125, -0.04302978515625, -0.00463104248046875, -0.020355224609375, -0.06158447265625, 0.027008056640625, -0.0045928955078125, 0.0115814208984375, -0.010467529296875, -0.006622314453125, 0.01177215576171875, -0.01107025146484375, 0.02984619140625, 0.0251617431640625, -0.01788330078125, -0.0173492431640625, -0.01557159423828125, -0.01605224609375, 0.006542205810546875, -0.021728515625, 0.07757568359375, -0.0280303955078125, -0.0194091796875, -0.053619384765625, 0.008270263671875, 0.041961669921875, -0.0109100341796875, 0.085693359375, 0.08404541015625, -0.01132965087890625, 0.0021724700927734375, -0.04681396484375, -0.024658203125, -0.039154052734375, 0.0301513671875, -0.01485443115234375, -0.052154541015625, 0.05120849609375, 0.0177154541015625, -0.00228118896484375, 0.05181884765625, 0.062286376953125, 0.000583648681640625, 0.07391357421875, 0.03302001953125, 0.00789642333984375, 0.04827880859375, -0.051910400390625, 0.0028400421142578125, -0.06475830078125, -0.020782470703125, -0.01393890380859375, -0.0201263427734375, -0.051422119140625, -0.040679931640625, 0.01861572265625, -0.00839996337890625, -0.04766845703125, 0.050811767578125, -0.040802001953125, 0.0258026123046875, 0.057281494140625, 0.0262451171875, 0.00496673583984375, -0.009552001953125, -0.017120361328125, 0.01061248779296875, -0.06488037109375, -0.0290069580078125, 0.09173583984375, 0.03363037109375, 0.04498291015625, -0.00217437744140625, 0.053253173828125, -0.00818634033203125, 0.033203125, -0.02740478515625, 0.035125732421875, 0.0157928466796875, -0.0576171875, -0.007640838623046875, -0.0572509765625, -0.061737060546875, 0.01568603515625, -0.0162200927734375, -0.056060791015625, 0.0240325927734375, 0.01007843017578125, -0.032196044921875, 0.040069580078125, -0.07159423828125, 0.08203125, -0.00974273681640625, -0.0266876220703125, 0.00576019287109375, -0.06024169921875, 0.0284576416015625, 0.01178741455078125, -0.0055999755859375, -0.00957489013671875, 0.0221710205078125, 0.06365966796875, -0.036468505859375, 0.06866455078125, -0.01462554931640625, 0.0146484375, 0.0303497314453125, -0.00838470458984375, 0.037933349609375, 0.00428009033203125, 0.00566864013671875, 0.0201568603515625, 0.005077362060546875, -0.03302001953125, -0.02728271484375, 0.0411376953125, -0.08990478515625, -0.0367431640625, -0.035400390625, -0.04632568359375, 0.002285003662109375, 0.0123748779296875, 0.050201416015625, 0.02447509765625, 0.00485992431640625, 0.026947021484375, 0.04168701171875, -0.037109375, 0.051971435546875, 0.0158843994140625, -0.00923919677734375, -0.042633056640625, 0.06585693359375, -0.0051116943359375, 0.03802490234375, 0.00933837890625, 0.01329803466796875, -0.02178955078125, -0.02874755859375, -0.03167724609375, 0.0282135009765625, -0.041229248046875, -0.0254669189453125, -0.0533447265625, -0.03955078125, -0.03753662109375, 0.01171875, -0.04638671875, -0.0250701904296875, -0.0308837890625, -0.0011854171752929688, 0.028656005859375, 0.035125732421875, -0.002017974853515625, 0.05206298828125, -0.053558349609375, 0.018768310546875, 0.0230712890625, 0.029632568359375, 0.0017614364624023438, -0.05950927734375, -0.02227783203125, 0.0208282470703125, -0.044952392578125, -0.06689453125, 0.04327392578125, 0.0022525787353515625, 0.03631591796875, 0.02239990234375, -0.005466461181640625, 0.04669189453125, -0.0195465087890625, 0.06353759765625, 0.027679443359375, -0.064208984375, 0.024658203125, -0.035308837890625, 0.028900146484375, 0.010833740234375, 0.0355224609375, -0.041229248046875, -0.0239410400390625, -0.07275390625, -0.05694580078125, 0.07696533203125, 0.051025390625, 0.01092529296875, -0.003204345703125, 0.021759033203125, -0.00658416748046875, 0.01123809814453125, -0.08984375, -0.0245513916015625, -0.04193115234375, -0.0210113525390625, -0.0011205673217773438, -0.0099029541015625, -0.0096588134765625, -0.037322998046875, 0.0599365234375, 0.0036525726318359375, 0.057647705078125, 0.01245880126953125, -0.01102447509765625, -0.0021953582763671875, -0.00249481201171875, 0.03533935546875, 0.04638671875, -0.02569580078125, -0.002712249755859375, 0.0178375244140625, -0.04949951171875, -0.00042819976806640625, 0.0148162841796875, 0.000720977783203125, -0.00786590576171875, 0.020263671875, 0.08740234375, 0.000040471553802490234, -0.0181427001953125, 0.04803466796875, -0.01380157470703125, -0.01690673828125, -0.0097503662109375, 0.01285552978515625, 0.02972412109375, 0.03936767578125, 0.0153961181640625, 0.00528717041015625, -0.0196990966796875, -0.036865234375, 0.01776123046875, 0.016326904296875, -0.01528167724609375, -0.022857666015625, 0.06488037109375, 0.0093231201171875, -0.0201263427734375, 0.0626220703125, -0.006671905517578125, -0.0357666015625, 0.05303955078125, 0.050201416015625, 0.06024169921875, -0.0205230712890625, 0.01824951171875, 0.03594970703125, 0.02313232421875, -0.0023345947265625, 0.00534820556640625, -0.0007534027099609375, -0.056915283203125, -0.026611328125, -0.054473876953125, -0.017425537109375, 0.00881195068359375, -0.0330810546875, 0.03155517578125, -0.02874755859375, -0.02740478515625, -0.00943756103515625, 0.00366973876953125, -0.054656982421875, 0.01322174072265625, 0.0218658447265625, 0.061737060546875, -0.059661865234375, 0.072998046875, 0.0172119140625, -0.042327880859375, -0.0760498046875, -0.0210113525390625, -0.00971221923828125, -0.06890869140625, 0.0281982421875, 0.0196380615234375, 0.017120361328125, 0.0083770751953125, -0.04852294921875, -0.0633544921875, 0.10638427734375, 0.040802001953125, -0.0266876220703125, -0.0202178955078125, 0.03302001953125, 0.037933349609375, -0.02545166015625, 0.052886962890625, 0.042510986328125, 0.021697998046875, 0.030242919921875, -0.07098388671875, 0.00611114501953125, -0.0264434814453125, -0.0002205371856689453, 0.0018320083618164062, -0.06494140625, 0.08380126953125, -0.00789642333984375, -0.006488800048828125, 0.00609588623046875, 0.055328369140625, 0.0239410400390625, 0.00804901123046875, 0.02581787109375, 0.053863525390625, 0.031768798828125, -0.0240325927734375, 0.0838623046875, -0.0278167724609375, 0.05108642578125, 0.07080078125, 0.022918701171875, 0.03887939453125, 0.0238494873046875, -0.01348114013671875, 0.032562255859375, 0.06439208984375, -0.0157928466796875, 0.0280609130859375, -0.006259918212890625, -0.01165771484375, -0.01641845703125, 0.0167999267578125, -0.041717529296875, 0.02874755859375, 0.01739501953125, -0.04742431640625, -0.00872039794921875, 0.00720977783203125, 0.0169830322265625, -0.04327392578125, -0.0013408660888671875, 0.047027587890625, 0.0131988525390625, -0.04034423828125, 0.06280517578125, -0.0018291473388671875, 0.051605224609375, -0.039886474609375, 0.01125335693359375, -0.0225067138671875, 0.016143798828125, -0.01486968994140625, -0.051788330078125, 0.0169525146484375, -0.00920867919921875, 0.005767822265625, -0.01187896728515625, 0.027496337890625, -0.028900146484375, -0.0283966064453125, 0.01898193359375, 0.0264129638671875, 0.008087158203125, -0.01361083984375, -0.07135009765625, 0.0023651123046875, 0.0088958740234375, -0.037750244140625, 0.019866943359375, 0.02325439453125, 0.023223876953125, 0.047821044921875, 0.05377197265625, -0.006961822509765625, 0.0220947265625, -0.005847930908203125, 0.074462890625, -0.05035400390625, -0.028106689453125, -0.074951171875, 0.0445556640625, -0.0090484619140625, -0.0296478271484375, 0.061920166015625, 0.044219970703125, 0.0633544921875, -0.000995635986328125, 0.0338134765625, -0.0180511474609375, 0.01446533203125, -0.02825927734375, 0.06158447265625, -0.024200439453125, 0.0184173583984375, -0.032379150390625, -0.086669921875, -0.00588226318359375, 0.043914794921875, -0.02801513671875, 0.018585205078125, 0.05072021484375, 0.07012939453125, -0.01898193359375, 0.00914764404296875, 0.01433563232421875, 0.030731201171875, 0.0293731689453125, 0.0574951171875, 0.061065673828125, -0.0513916015625, 0.056610107421875, -0.043212890625, -0.0114898681640625, -0.007389068603515625, -0.05169677734375, -0.082763671875, -0.0435791015625, -0.0164947509765625, -0.04547119140625, -0.005962371826171875, 0.0762939453125, 0.0650634765625, -0.049285888671875, -0.0261688232421875, -0.00553131103515625, -0.0040130615234375, -0.006885528564453125, -0.016632080078125, 0.03497314453125, -0.002704620361328125, -0.055267333984375, 0.0033721923828125, 0.003936767578125, 0.02203369140625, -0.00318145751953125, -0.01175689697265625, -0.0298309326171875, 0.004055023193359375, 0.0330810546875, 0.01523590087890625, -0.040618896484375, -0.0222625732421875, 0.003326416015625, -0.008636474609375, 0.034698486328125, 0.02764892578125, -0.04400634765625, 0.02069091796875, 0.0247344970703125, 0.0219268798828125, 0.0750732421875, -0.0016574859619140625, 0.0379638671875, -0.047882080078125, 0.017791748046875, 0.0138397216796875, 0.032928466796875, 0.023895263671875, -0.02178955078125, 0.03167724609375, 0.0391845703125, -0.0439453125, -0.05377197265625, -0.0024700164794921875, -0.0809326171875, -0.003124237060546875, 0.0948486328125, -0.011962890625, -0.036376953125, 0.0165863037109375, -0.0266876220703125, 0.0501708984375, -0.0200653076171875, 0.051177978515625, 0.041717529296875, -0.01012420654296875, -0.032989501953125, -0.0264739990234375, 0.041015625, 0.0205230712890625, -0.05108642578125, -0.004703521728515625, 0.01012420654296875, 0.039886474609375, 0.01064300537109375, 0.03668212890625, -0.011016845703125, 0.0250701904296875, 0.005889892578125, 0.0184783935546875, -0.0181884765625, -0.0142974853515625, -0.0022068023681640625, 0.003875732421875, -0.024658203125, -0.01214599609375 ] ]
Yntec/epiCRealismVAE
2023-10-20T22:24:31.000Z
[ "diffusers", "Photorealistic", "Analog", "Portrait", "stable-diffusion", "stable-diffusion-diffusers", "text-to-image", "epinikion", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
Yntec
null
null
Yntec/epiCRealismVAE
2
8,319
diffusers
2023-08-16T18:26:44
--- license: creativeml-openrail-m library_name: diffusers pipeline_tag: text-to-image tags: - Photorealistic - Analog - Portrait - stable-diffusion - stable-diffusion-diffusers - diffusers - text-to-image - epinikion --- # Epic Realism VAE This model with the MoistV2VAE baked in. Samples and prompt: ![Sample](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/9DqvlZsBoRT91lkgGcGIy.png) (Click for larger) ![Sample](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/7WNKKuZtUFbcx8C1-g2_u.png) photo very cute princess with curly ponytail wearing choker who would marry me. fashion skirt. Original pages: https://civitai.com/models/25694?modelVersionId=30761 https://huggingface.co/Yntec/Dreamscapes_n_Dragonfire_v2/resolve/main/MoistMixV2.vae.pt
815
[ [ -0.01105499267578125, -0.05487060546875, 0.00910186767578125, 0.024505615234375, -0.01224517822265625, -0.010955810546875, 0.0291290283203125, -0.01238250732421875, 0.042022705078125, 0.070068359375, -0.046478271484375, -0.020355224609375, -0.0255126953125, -0.0229339599609375, -0.03155517578125, 0.0244293212890625, 0.00560760498046875, -0.00594329833984375, -0.0203399658203125, 0.0246734619140625, -0.021453857421875, -0.021759033203125, -0.0287933349609375, -0.0017795562744140625, 0.02191162109375, 0.0550537109375, 0.052001953125, 0.0309600830078125, 0.01369476318359375, 0.0211029052734375, -0.015228271484375, -0.0016460418701171875, -0.050689697265625, 0.0167236328125, -0.0072021484375, -0.036651611328125, -0.0399169921875, 0.0157318115234375, 0.0284881591796875, 0.0035037994384765625, -0.04290771484375, 0.0085296630859375, -0.0017004013061523438, 0.04327392578125, -0.051971435546875, -0.006565093994140625, -0.019500732421875, 0.0037899017333984375, -0.01406097412109375, -0.0014429092407226562, -0.01427459716796875, -0.0186767578125, 0.0145111083984375, -0.056365966796875, 0.0133514404296875, -0.0113525390625, 0.09234619140625, -0.012542724609375, -0.058929443359375, 0.0092010498046875, -0.07147216796875, 0.0293731689453125, -0.0455322265625, 0.041046142578125, 0.0193328857421875, 0.048187255859375, -0.034759521484375, -0.10076904296875, -0.046630859375, 0.00727081298828125, 0.0095977783203125, 0.0276947021484375, -0.0251007080078125, -0.0237274169921875, 0.0236663818359375, 0.022308349609375, -0.0496826171875, -0.0016937255859375, -0.025482177734375, 0.02142333984375, 0.037506103515625, 0.007541656494140625, 0.039520263671875, -0.00551605224609375, -0.0394287109375, -0.016571044921875, -0.042205810546875, 0.0176849365234375, 0.01824951171875, -0.02203369140625, -0.024810791015625, 0.0550537109375, 0.01280975341796875, 0.031585693359375, 0.017669677734375, -0.026702880859375, -0.0085296630859375, -0.00801849365234375, -0.0294647216796875, -0.019561767578125, 0.041778564453125, 0.055450439453125, 0.023162841796875, 0.022003173828125, 0.005702972412109375, -0.01922607421875, 0.031005859375, -0.11285400390625, -0.0223846435546875, 0.007312774658203125, -0.032073974609375, -0.046417236328125, 0.006130218505859375, -0.06500244140625, -0.004638671875, -0.00667572021484375, 0.01213836669921875, -0.0230255126953125, -0.04205322265625, 0.019195556640625, -0.01110076904296875, 0.026123046875, 0.008453369140625, -0.06591796875, 0.01261138916015625, 0.034332275390625, 0.046630859375, 0.056488037109375, 0.0236663818359375, 0.00354766845703125, 0.0168914794921875, -0.049591064453125, 0.058441162109375, -0.0255889892578125, -0.04364013671875, -0.01568603515625, 0.0150299072265625, 0.003009796142578125, -0.048858642578125, 0.08538818359375, -0.04241943359375, 0.00373077392578125, -0.0333251953125, -0.03759765625, -0.0264129638671875, -0.0149993896484375, -0.039703369140625, 0.041351318359375, 0.0176239013671875, -0.062255859375, 0.029296875, 0.0009136199951171875, 0.017791748046875, 0.016448974609375, -0.01444244384765625, -0.051513671875, 0.003078460693359375, 0.004528045654296875, 0.040679931640625, -0.034576416015625, -0.0039215087890625, -0.0595703125, -0.0129547119140625, 0.0153656005859375, -0.008453369140625, 0.03778076171875, 0.015655517578125, -0.0357666015625, -0.0106048583984375, -0.051239013671875, 0.01873779296875, 0.040618896484375, 0.0247039794921875, -0.0251617431640625, -0.0007920265197753906, 0.04327392578125, 0.0293121337890625, -0.006114959716796875, -0.0217437744140625, 0.033538818359375, -0.0357666015625, 0.0274505615234375, 0.03143310546875, 0.033905029296875, 0.0129241943359375, -0.0430908203125, 0.053466796875, -0.005443572998046875, 0.0156097412109375, 0.026275634765625, -0.0164947509765625, -0.061187744140625, -0.047088623046875, 0.03515625, 0.036865234375, -0.049041748046875, 0.034637451171875, 0.0017337799072265625, -0.08575439453125, -0.0278778076171875, -0.0247344970703125, 0.01168060302734375, 0.031890869140625, -0.0148468017578125, -0.029815673828125, -0.04443359375, -0.09954833984375, -0.005184173583984375, -0.017974853515625, -0.033660888671875, 0.0288848876953125, 0.030029296875, -0.01256561279296875, 0.050140380859375, -0.044097900390625, -0.0007643699645996094, 0.0019931793212890625, -0.000042378902435302734, 0.032867431640625, 0.04443359375, 0.08123779296875, -0.08209228515625, -0.03045654296875, -0.00783538818359375, -0.060760498046875, -0.0005116462707519531, 0.00237274169921875, -0.01953125, -0.039947509765625, 0.0096893310546875, -0.034576416015625, 0.043212890625, 0.015380859375, -0.05670166015625, 0.0738525390625, -0.0244598388671875, 0.06854248046875, -0.08746337890625, 0.00215911865234375, 0.0262451171875, -0.0014171600341796875, -0.04443359375, 0.0513916015625, 0.01088714599609375, -0.002941131591796875, -0.0592041015625, 0.05438232421875, -0.029205322265625, 0.0261077880859375, -0.0230255126953125, -0.0183868408203125, 0.018096923828125, -0.00995635986328125, 0.00507354736328125, 0.043701171875, 0.05078125, -0.029693603515625, 0.04833984375, 0.030975341796875, -0.0212554931640625, 0.06414794921875, -0.0772705078125, 0.0006418228149414062, 0.0047760009765625, 0.0148162841796875, -0.0440673828125, -0.06427001953125, 0.01345062255859375, -0.043212890625, 0.004337310791015625, -0.029266357421875, -0.071044921875, -0.0260009765625, -0.02716064453125, 0.0261383056640625, 0.0643310546875, -0.0389404296875, 0.030731201171875, 0.0170745849609375, 0.0012235641479492188, -0.007122039794921875, -0.0517578125, 0.003978729248046875, -0.043914794921875, -0.08441162109375, 0.048492431640625, -0.01335906982421875, -0.0284271240234375, -0.0091552734375, -0.001800537109375, -0.0526123046875, -0.0158538818359375, 0.036407470703125, 0.03607177734375, -0.0278778076171875, -0.042633056640625, 0.01476287841796875, 0.01546478271484375, 0.007396697998046875, -0.0012912750244140625, 0.0139007568359375, -0.018798828125, -0.0168304443359375, -0.055450439453125, 0.026031494140625, 0.07855224609375, -0.0031147003173828125, 0.072998046875, 0.023956298828125, -0.06414794921875, -0.0087738037109375, -0.04644775390625, -0.0113525390625, -0.033233642578125, 0.0033245086669921875, -0.03631591796875, -0.0316162109375, 0.03863525390625, 0.005031585693359375, -0.029510498046875, 0.040863037109375, 0.03839111328125, -0.006191253662109375, 0.06976318359375, 0.03839111328125, 0.01934814453125, -0.007335662841796875, -0.043548583984375, 0.01213836669921875, -0.03857421875, -0.03411865234375, -0.01947021484375, -0.01181793212890625, -0.0450439453125, -0.045623779296875, 0.04449462890625, 0.06640625, -0.030426025390625, 0.051116943359375, -0.02581787109375, 0.03424072265625, 0.01468658447265625, 0.033660888671875, -0.005828857421875, -0.005611419677734375, -0.0024471282958984375, -0.0614013671875, -0.03741455078125, -0.0282135009765625, 0.01302337646484375, 0.0243072509765625, 0.04412841796875, 0.0277862548828125, 0.031829833984375, -0.0114593505859375, -0.030853271484375, -0.045166015625, 0.0355224609375, 0.0002880096435546875, -0.048980712890625, 0.0164031982421875, -0.00904083251953125, -0.04595947265625, 0.01496124267578125, -0.03173828125, -0.0352783203125, 0.024658203125, 0.0088653564453125, -0.03289794921875, 0.0305633544921875, -0.044921875, 0.0697021484375, 0.0026187896728515625, -0.04498291015625, -0.0085906982421875, -0.027740478515625, 0.050140380859375, 0.019287109375, -0.0006704330444335938, 0.006378173828125, -0.012542724609375, 0.038330078125, -0.02899169921875, 0.03857421875, 0.01195526123046875, 0.01374053955078125, 0.0269622802734375, 0.025360107421875, 0.03485107421875, 0.025482177734375, -0.00911712646484375, -0.032562255859375, 0.006107330322265625, -0.044677734375, -0.055999755859375, 0.07275390625, -0.03900146484375, -0.01012420654296875, -0.051483154296875, 0.0027751922607421875, 0.00775909423828125, -0.00750732421875, 0.047698974609375, 0.048858642578125, -0.032623291015625, 0.0171661376953125, 0.06561279296875, 0.0013284683227539062, 0.01439666748046875, 0.03729248046875, -0.0236358642578125, -0.023223876953125, 0.053070068359375, 0.0289154052734375, 0.0152587890625, 0.00482940673828125, 0.0236968994140625, -0.005504608154296875, -0.006381988525390625, -0.0301666259765625, 0.0300140380859375, -0.030487060546875, -0.004863739013671875, -0.032135009765625, -0.055633544921875, -0.0352783203125, -0.044769287109375, -0.0294342041015625, -0.0175628662109375, -0.047119140625, -0.0109710693359375, 0.053558349609375, 0.052764892578125, -0.0015211105346679688, 0.0013990402221679688, -0.050872802734375, 0.0206451416015625, 0.050506591796875, -0.0005125999450683594, -0.0017948150634765625, -0.058929443359375, 0.0262451171875, -0.0184326171875, -0.0212554931640625, -0.055999755859375, 0.054779052734375, 0.013763427734375, 0.011444091796875, 0.038330078125, 0.01024627685546875, 0.06536865234375, -0.0394287109375, 0.060546875, 0.01108551025390625, -0.0360107421875, 0.038604736328125, -0.04833984375, 0.0007047653198242188, 0.0302886962890625, 0.036346435546875, -0.05413818359375, -0.00907135009765625, -0.08526611328125, -0.07562255859375, 0.00623321533203125, 0.03155517578125, 0.0027866363525390625, 0.0243072509765625, 0.053985595703125, -0.014892578125, 0.01404571533203125, -0.03009033203125, -0.043060302734375, 0.001773834228515625, 0.028228759765625, 0.0357666015625, -0.046417236328125, 0.01016998291015625, -0.042236328125, 0.0516357421875, 0.0157470703125, 0.011383056640625, 0.00884246826171875, 0.05255126953125, -0.002193450927734375, 0.00962066650390625, 0.05047607421875, 0.04644775390625, -0.028717041015625, -0.0112152099609375, -0.007144927978515625, -0.017730712890625, 0.015838623046875, -0.0094451904296875, -0.011383056640625, 0.001697540283203125, 0.037109375, 0.05975341796875, 0.040130615234375, -0.0284881591796875, 0.065185546875, -0.031036376953125, -0.0023403167724609375, -0.05059814453125, 0.017822265625, 0.041351318359375, 0.032012939453125, -0.0026683807373046875, 0.0286407470703125, 0.03179931640625, -0.03167724609375, 0.035736083984375, 0.0208587646484375, -0.043365478515625, -0.036224365234375, 0.07586669921875, -0.007129669189453125, -0.0213775634765625, 0.02783203125, -0.00522613525390625, -0.032470703125, 0.057891845703125, 0.042327880859375, 0.0576171875, -0.040252685546875, 0.0280303955078125, 0.043487548828125, -0.0084228515625, -0.00644683837890625, 0.0253143310546875, 0.0246734619140625, 0.002841949462890625, 0.0106048583984375, -0.052154541015625, -0.009246826171875, 0.02142333984375, -0.041839599609375, 0.07403564453125, -0.033599853515625, 0.033905029296875, 0.003528594970703125, -0.0272064208984375, -0.028472900390625, 0.03546142578125, -0.00341796875, 0.09478759765625, -0.07452392578125, 0.070556640625, 0.012603759765625, -0.0287322998046875, -0.08856201171875, 0.01462554931640625, 0.02850341796875, -0.04144287109375, 0.01427459716796875, 0.0126953125, 0.016876220703125, -0.01546478271484375, -0.047271728515625, -0.04632568359375, 0.0718994140625, 0.04364013671875, -0.05181884765625, -0.0321044921875, -0.0323486328125, 0.05133056640625, -0.026885986328125, 0.0438232421875, 0.05267333984375, 0.0283203125, 0.07568359375, -0.06549072265625, -0.033447265625, -0.041351318359375, 0.026947021484375, -0.002010345458984375, -0.056060791015625, 0.048492431640625, -0.0142974853515625, -0.005672454833984375, 0.040191650390625, 0.07098388671875, 0.02197265625, -5.364418029785156e-7, 0.03753662109375, 0.0650634765625, -0.005886077880859375, 0.0026340484619140625, 0.0626220703125, 0.0231475830078125, 0.032470703125, 0.0645751953125, -0.0196685791015625, 0.07623291015625, -0.00930023193359375, -0.019744873046875, 0.0228424072265625, 0.07952880859375, -0.0007452964782714844, 0.0504150390625, 0.0297393798828125, -0.05438232421875, -0.0203857421875, -0.03143310546875, -0.056121826171875, 0.032745361328125, 0.004962921142578125, -0.00197601318359375, -0.0076751708984375, 0.0119781494140625, 0.01424407958984375, 0.029571533203125, -0.01023101806640625, 0.0400390625, 0.012725830078125, -0.0126800537109375, 0.05133056640625, 0.00218963623046875, 0.03741455078125, -0.04534912109375, -0.0276336669921875, 0.0006799697875976562, 0.00010597705841064453, -0.0303802490234375, -0.0693359375, 0.0159759521484375, -0.007160186767578125, -0.0274505615234375, -0.0114898681640625, 0.05133056640625, -0.035400390625, -0.0780029296875, 0.023162841796875, -0.002422332763671875, 0.01019287109375, -0.005588531494140625, -0.05255126953125, 0.000579833984375, -0.01371002197265625, -0.0262451171875, 0.01458740234375, 0.038360595703125, 0.0195770263671875, 0.030731201171875, 0.00995635986328125, 0.00563812255859375, -0.00640869140625, -0.0134735107421875, 0.06085205078125, -0.0308074951171875, -0.0335693359375, -0.051483154296875, 0.042633056640625, -0.0047149658203125, -0.017120361328125, 0.0496826171875, 0.053558349609375, 0.0294647216796875, -0.021820068359375, 0.029510498046875, -0.02587890625, 0.03692626953125, -0.03143310546875, 0.07281494140625, -0.0711669921875, -0.01080322265625, -0.030364990234375, -0.044830322265625, 0.0025882720947265625, 0.0218505859375, 0.032623291015625, -0.0047760009765625, 0.014984130859375, 0.06781005859375, -0.0247344970703125, -0.00528717041015625, 0.02191162109375, 0.0307769775390625, 0.0233154296875, 0.01529693603515625, 0.03692626953125, -0.04302978515625, -0.0197296142578125, -0.04510498046875, -0.040557861328125, -0.02667236328125, -0.06500244140625, -0.052276611328125, -0.06500244140625, -0.016265869140625, -0.058746337890625, -0.0087432861328125, 0.08221435546875, 0.07440185546875, -0.04766845703125, -0.01678466796875, 0.0193328857421875, -0.0126495361328125, -0.00032591819763183594, -0.0164337158203125, -0.0001863241195678711, 0.05682373046875, -0.04766845703125, 0.033599853515625, 0.00876617431640625, 0.040435791015625, -0.005107879638671875, 0.0193634033203125, -0.021240234375, 0.0167083740234375, 0.023956298828125, 0.0207672119140625, -0.0546875, -0.0251312255859375, -0.01200103759765625, -0.0073089599609375, 0.00009721517562866211, 0.03271484375, -0.029205322265625, 0.03021240234375, 0.0391845703125, 0.01910400390625, 0.0399169921875, -0.01435089111328125, 0.0406494140625, -0.032196044921875, 0.026214599609375, 0.03045654296875, 0.018646240234375, 0.01293182373046875, -0.043609619140625, 0.02923583984375, 0.0469970703125, -0.064453125, -0.05615234375, 0.01422119140625, -0.0880126953125, -0.017181396484375, 0.047698974609375, 0.005268096923828125, -0.03485107421875, 0.03948974609375, -0.0206756591796875, 0.002780914306640625, -0.01336669921875, 0.025604248046875, 0.0517578125, -0.0034313201904296875, -0.007045745849609375, -0.065185546875, 0.0221405029296875, 0.00873565673828125, -0.05615234375, -0.00817108154296875, 0.021026611328125, 0.033355712890625, 0.00020241737365722656, 0.0251007080078125, -0.0026950836181640625, 0.039703369140625, 0.00714874267578125, 0.01873779296875, 0.0222015380859375, -0.0279083251953125, 0.00705718994140625, 0.01373291015625, 0.0090484619140625, -0.004627227783203125 ] ]
persiannlp/mt5-small-parsinlu-opus-translation_fa_en
2021-09-23T16:20:36.000Z
[ "transformers", "pytorch", "mt5", "text2text-generation", "machine-translation", "persian", "farsi", "fa", "multilingual", "dataset:parsinlu", "license:cc-by-nc-sa-4.0", "autotrain_compatible", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text2text-generation
persiannlp
null
null
persiannlp/mt5-small-parsinlu-opus-translation_fa_en
0
8,314
transformers
2022-03-02T23:29:05
--- language: - fa - multilingual thumbnail: https://upload.wikimedia.org/wikipedia/commons/a/a2/Farsi.svg tags: - machine-translation - mt5 - persian - farsi license: cc-by-nc-sa-4.0 datasets: - parsinlu metrics: - sacrebleu --- # Machine Translation (ترجمه‌ی ماشینی) This is an mT5-based model for machine translation (Persian -> English). Here is an example of how you can run this model: ```python from transformers import MT5ForConditionalGeneration, MT5Tokenizer model_size = "small" model_name = f"persiannlp/mt5-{model_size}-parsinlu-opus-translation_fa_en" tokenizer = MT5Tokenizer.from_pretrained(model_name) model = MT5ForConditionalGeneration.from_pretrained(model_name) def run_model(input_string, **generator_args): input_ids = tokenizer.encode(input_string, return_tensors="pt") res = model.generate(input_ids, **generator_args) output = tokenizer.batch_decode(res, skip_special_tokens=True) print(output) return output run_model("ستایش خدای را که پروردگار جهانیان است.") run_model("در هاید پارک کرنر بر گلدانی ایستاده موعظه می‌کند؛") run_model("وی از تمامی بلاگرها، سازمان‌ها و افرادی که از وی پشتیبانی کرده‌اند، تشکر کرد.") run_model("مشابه سال ۲۰۰۱، تولید آمونیاک بی آب در ایالات متحده در سال ۲۰۰۰ تقریباً ۱۷،۴۰۰،۰۰۰ تن (معادل بدون آب) با مصرف ظاهری ۲۲،۰۰۰،۰۰۰ تن و حدود ۴۶۰۰۰۰۰ با واردات خالص مواجه شد. ") run_model("می خواهم دکترای علوم کامپیوتر راجع به شبکه های اجتماعی را دنبال کنم، چالش حل نشده در شبکه های اجتماعی چیست؟") ``` For more details, visit this page: https://github.com/persiannlp/parsinlu/
1,559
[ [ -0.022308349609375, -0.048583984375, 0.0189666748046875, 0.019439697265625, -0.044189453125, 0.0017328262329101562, -0.0041046142578125, 0.0027599334716796875, -0.0022487640380859375, 0.049835205078125, -0.0469970703125, -0.046173095703125, -0.04608154296875, 0.0291748046875, -0.037445068359375, 0.08056640625, -0.0110321044921875, 0.03302001953125, 0.017242431640625, 0.0026073455810546875, -0.0275421142578125, -0.03228759765625, -0.037322998046875, -0.03192138671875, 0.01143646240234375, 0.020294189453125, 0.055328369140625, 0.037017822265625, 0.06494140625, 0.024871826171875, -0.0089569091796875, 0.0014705657958984375, -0.0074005126953125, 0.002044677734375, 0.0040435791015625, -0.0440673828125, -0.0241241455078125, -0.00403594970703125, 0.05474853515625, 0.035369873046875, -0.0220947265625, 0.031280517578125, -0.00380706787109375, 0.060302734375, -0.022064208984375, -0.0119781494140625, -0.007442474365234375, 0.01157379150390625, -0.024932861328125, -0.00937652587890625, -0.015869140625, -0.0202484130859375, -0.0362548828125, -0.033843994140625, 0.0035495758056640625, 0.0176849365234375, 0.08929443359375, 0.004924774169921875, -0.03521728515625, -0.0234222412109375, -0.06402587890625, 0.08856201171875, -0.0675048828125, 0.0128021240234375, 0.0190582275390625, 0.04052734375, -0.007465362548828125, -0.068115234375, -0.0606689453125, 0.0016527175903320312, -0.0223236083984375, 0.007358551025390625, -0.0064544677734375, -0.02081298828125, 0.0440673828125, 0.041900634765625, -0.04461669921875, -0.029632568359375, -0.049102783203125, 0.00162506103515625, 0.0205841064453125, 0.040679931640625, 0.047210693359375, -0.02203369140625, -0.040069580078125, -0.0197296142578125, -0.026947021484375, 0.0192413330078125, 0.01812744140625, 0.01349639892578125, -0.033935546875, 0.06243896484375, -0.0301666259765625, 0.06109619140625, 0.01666259765625, -0.020904541015625, 0.0264434814453125, -0.022247314453125, -0.04052734375, -0.004608154296875, 0.07061767578125, 0.0223388671875, 0.0116119384765625, 0.023193359375, -0.0156097412109375, -0.0188140869140625, 0.0203857421875, -0.072265625, 0.004573822021484375, 0.0190887451171875, -0.06304931640625, -0.0467529296875, 0.01168060302734375, -0.054443359375, -0.0020999908447265625, -0.005695343017578125, 0.0284423828125, -0.039093017578125, -0.0187835693359375, -0.00734710693359375, 0.0029544830322265625, 0.034698486328125, 0.00753021240234375, -0.060028076171875, 0.0190887451171875, 0.033416748046875, 0.06524658203125, 0.0202178955078125, -0.0177764892578125, -0.005748748779296875, 0.0304107666015625, -0.03192138671875, 0.0283050537109375, -0.0251617431640625, -0.0350341796875, -0.014434814453125, 0.0168304443359375, -0.0257415771484375, -0.022918701171875, 0.06365966796875, -0.041229248046875, 0.04144287109375, 0.001918792724609375, -0.0251617431640625, -0.051422119140625, 0.01531982421875, -0.0489501953125, 0.0736083984375, -0.006946563720703125, -0.084716796875, -0.00345611572265625, -0.0799560546875, -0.032470703125, -0.004810333251953125, 0.0121002197265625, -0.0548095703125, 0.0162353515625, 0.0191497802734375, 0.037628173828125, -0.0249176025390625, 0.00885772705078125, 0.0065765380859375, -0.0213775634765625, 0.0296478271484375, -0.00623321533203125, 0.07940673828125, 0.040283203125, -0.0230712890625, 0.032501220703125, -0.0592041015625, 0.01273345947265625, 0.0223388671875, -0.01143646240234375, -0.0179443359375, -0.004558563232421875, 0.00966644287109375, 0.034942626953125, 0.0301971435546875, -0.0306243896484375, 0.019927978515625, -0.05084228515625, 0.028350830078125, 0.038360595703125, 0.00394439697265625, 0.019927978515625, -0.0386962890625, 0.0445556640625, 0.0203094482421875, 0.01058197021484375, -0.00058746337890625, -0.03033447265625, -0.06866455078125, -0.014862060546875, 0.0206756591796875, 0.05419921875, -0.05859375, 0.0169677734375, -0.00431060791015625, -0.055999755859375, -0.039703369140625, -0.0022068023681640625, 0.02545166015625, 0.02410888671875, 0.0282745361328125, -0.0067138671875, -0.040191650390625, -0.057891845703125, -0.036224365234375, -0.00487518310546875, 0.0038299560546875, 0.0067138671875, 0.061920166015625, -0.0261383056640625, 0.019775390625, -0.0235137939453125, -0.0242919921875, -0.05303955078125, 0.003021240234375, 0.0723876953125, 0.05621337890625, 0.0240020751953125, -0.057891845703125, -0.0540771484375, 0.0172882080078125, -0.052886962890625, 0.004589080810546875, -0.01226806640625, -0.0246734619140625, 0.01409149169921875, 0.0129852294921875, -0.049346923828125, 0.03997802734375, 0.038482666015625, -0.042144775390625, 0.035186767578125, -0.0215606689453125, 0.0408935546875, -0.121337890625, 0.034759521484375, -0.04046630859375, -0.0234375, -0.042999267578125, 0.0244140625, -0.00039768218994140625, -0.0119171142578125, -0.03839111328125, 0.0357666015625, -0.03692626953125, 0.02069091796875, -0.0093994140625, -0.027099609375, -0.00888824462890625, 0.0270233154296875, -0.0154571533203125, 0.061248779296875, 0.0312347412109375, -0.0379638671875, 0.034423828125, 0.024932861328125, -0.0133209228515625, 0.0188751220703125, -0.064697265625, 0.0012216567993164062, 0.01363372802734375, -0.00580596923828125, -0.0648193359375, -0.0377197265625, 0.04449462890625, -0.04559326171875, 0.02301025390625, -0.003284454345703125, -0.050537109375, -0.02508544921875, -0.01474761962890625, 0.0190582275390625, 0.0517578125, -0.0267486572265625, 0.05108642578125, 0.00994110107421875, 0.00400543212890625, -0.0308837890625, -0.05853271484375, 0.01026153564453125, -0.031097412109375, -0.0379638671875, 0.01268768310546875, 0.00498199462890625, 0.0022563934326171875, -0.01291656494140625, 0.0182952880859375, -0.02142333984375, 0.01611328125, 0.032440185546875, 0.01202392578125, -0.01291656494140625, -0.032135009765625, 0.007598876953125, -0.0222320556640625, -0.0015211105346679688, 0.003978729248046875, 0.052001953125, -0.0184326171875, -0.029083251953125, -0.0609130859375, 0.0333251953125, 0.054473876953125, -0.0202484130859375, 0.07861328125, 0.06903076171875, -0.02557373046875, -0.002155303955078125, -0.01151275634765625, 0.0016031265258789062, -0.031585693359375, 0.0226898193359375, -0.046295166015625, -0.030364990234375, 0.03936767578125, -0.0213775634765625, -0.00647735595703125, 0.073974609375, 0.072265625, -0.01061248779296875, 0.09527587890625, 0.05157470703125, -0.00499725341796875, 0.00847625732421875, -0.032470703125, 0.0281524658203125, -0.06512451171875, -0.0217132568359375, -0.029296875, -0.01541900634765625, -0.0452880859375, -0.029632568359375, 0.0283660888671875, 0.0156097412109375, -0.0183258056640625, 0.034271240234375, -0.054229736328125, 0.0189361572265625, 0.041046142578125, -0.00550079345703125, 0.0203399658203125, -0.007175445556640625, -0.04339599609375, -0.01317596435546875, -0.04150390625, -0.059356689453125, 0.072509765625, 0.021575927734375, 0.037353515625, 0.022735595703125, 0.053619384765625, 0.01470947265625, 0.012786865234375, -0.025177001953125, 0.035675048828125, -0.0124664306640625, -0.06719970703125, -0.0107879638671875, -0.0153350830078125, -0.06268310546875, 0.025299072265625, -0.00196075439453125, -0.037750244140625, -0.00543212890625, -0.00885772705078125, -0.01451873779296875, 0.033294677734375, -0.032257080078125, 0.0836181640625, -0.0303955078125, -0.0169830322265625, -0.03033447265625, -0.051055908203125, 0.051239013671875, -0.000035762786865234375, 0.036834716796875, -0.0021076202392578125, 0.0008955001831054688, 0.0611572265625, -0.03887939453125, 0.046844482421875, -0.0004911422729492188, 0.0208587646484375, 0.014068603515625, 0.018890380859375, 0.0227203369140625, -0.0004470348358154297, -0.006134033203125, 0.023895263671875, 0.03131103515625, -0.039093017578125, -0.0163726806640625, 0.05914306640625, -0.060455322265625, -0.033966064453125, -0.06976318359375, -0.0303955078125, 0.031646728515625, 0.056884765625, 0.0209808349609375, 0.01995849609375, -0.0162200927734375, 0.0157928466796875, 0.01273345947265625, -0.038360595703125, 0.0284576416015625, 0.036651611328125, -0.036773681640625, -0.03997802734375, 0.05731201171875, -0.004302978515625, 0.00054931640625, 0.0187835693359375, 0.0100250244140625, -0.01129913330078125, -0.0265045166015625, -0.061492919921875, 0.0185394287109375, -0.032379150390625, -0.0128936767578125, -0.036712646484375, -0.0191802978515625, -0.0292510986328125, -0.0003883838653564453, -0.01300811767578125, -0.034423828125, -0.01390838623046875, 0.0072174072265625, 0.0230712890625, 0.048370361328125, -0.00862884521484375, 0.03472900390625, -0.0634765625, 0.03228759765625, -0.01806640625, 0.0016469955444335938, -0.01403045654296875, -0.04571533203125, -0.048675537109375, 0.00951385498046875, -0.043914794921875, -0.08050537109375, 0.0823974609375, 0.0124664306640625, 0.0282135009765625, 0.038970947265625, 0.0081634521484375, 0.049346923828125, -0.040863037109375, 0.06744384765625, 0.0035762786865234375, -0.06365966796875, 0.043182373046875, -0.035491943359375, 0.019256591796875, 0.027252197265625, 0.03271484375, -0.03936767578125, -0.01561737060546875, -0.04351806640625, -0.05615234375, 0.0645751953125, 0.033050537109375, 0.0145263671875, 0.0119476318359375, 0.011199951171875, -0.019012451171875, 0.018585205078125, -0.050445556640625, -0.04052734375, -0.015594482421875, -0.024749755859375, 0.00962066650390625, -0.0283660888671875, -0.01322174072265625, -0.034454345703125, 0.06787109375, 0.0274505615234375, 0.032562255859375, 0.01116180419921875, -0.00879669189453125, -0.00957489013671875, 0.0226287841796875, 0.05010986328125, 0.0211944580078125, -0.0133056640625, 0.00466156005859375, 0.0300750732421875, -0.03753662109375, 0.0030612945556640625, -0.0021305084228515625, -0.024688720703125, 0.01378631591796875, 0.035400390625, 0.05914306640625, -0.0039825439453125, -0.033538818359375, 0.03289794921875, -0.01149749755859375, 0.0002046823501586914, -0.051544189453125, -0.004871368408203125, 0.01361846923828125, 0.0116729736328125, 0.0144500732421875, 0.006549835205078125, 0.0244293212890625, -0.02777099609375, 0.0028324127197265625, 0.019195556640625, -0.0213470458984375, -0.02069091796875, 0.06732177734375, -0.0013713836669921875, -0.023223876953125, 0.046417236328125, -0.003124237060546875, -0.07196044921875, 0.045257568359375, 0.03350830078125, 0.06951904296875, -0.036712646484375, -0.001949310302734375, 0.05859375, 0.0235137939453125, -0.0034351348876953125, 0.039398193359375, -0.0038661956787109375, -0.0186767578125, -0.02203369140625, -0.06854248046875, -0.017974853515625, -0.0006914138793945312, -0.05242919921875, 0.0208587646484375, -0.042327880859375, -0.026763916015625, -0.00862884521484375, 0.0163116455078125, -0.035888671875, 0.03192138671875, -0.006439208984375, 0.0496826171875, -0.0535888671875, 0.08447265625, 0.07220458984375, -0.042144775390625, -0.06494140625, -0.0053558349609375, -0.014373779296875, -0.047607421875, 0.06658935546875, 0.01441192626953125, 0.0054931640625, 0.0065460205078125, -0.03302001953125, -0.0731201171875, 0.07501220703125, -0.0171051025390625, -0.0302581787109375, 0.01271820068359375, 0.023834228515625, 0.0310516357421875, -0.025604248046875, 0.0296478271484375, 0.0167694091796875, 0.033538818359375, -0.0053558349609375, -0.06903076171875, 0.02716064453125, -0.045989990234375, 0.0102081298828125, 0.007251739501953125, -0.043182373046875, 0.08770751953125, -0.0197906494140625, -0.020416259765625, 0.03582763671875, 0.04730224609375, 0.0111846923828125, 0.0190277099609375, 0.02032470703125, 0.04803466796875, 0.0183258056640625, -0.004276275634765625, 0.07916259765625, -0.058074951171875, 0.0450439453125, 0.041900634765625, 0.0121002197265625, 0.053375244140625, 0.04644775390625, -0.02593994140625, 0.06719970703125, 0.040435791015625, -0.0308074951171875, 0.041351318359375, 0.0005354881286621094, -0.0118560791015625, -0.03436279296875, 0.0008754730224609375, -0.03717041015625, 0.0152740478515625, 0.016876220703125, -0.045806884765625, -0.0129547119140625, -0.00992584228515625, -0.00269317626953125, 0.003448486328125, -0.0017347335815429688, 0.0394287109375, 0.013153076171875, -0.033538818359375, 0.05810546875, 0.04254150390625, 0.027923583984375, -0.0399169921875, -0.00937652587890625, -0.00011873245239257812, 0.035552978515625, -0.023284912109375, -0.041778564453125, 0.047943115234375, 0.0206756591796875, -0.0214385986328125, -0.0156097412109375, 0.0528564453125, -0.0069580078125, -0.07843017578125, 0.004901885986328125, 0.0310516357421875, 0.0210418701171875, 0.007373809814453125, -0.07806396484375, -0.0026702880859375, 0.018035888671875, -0.041778564453125, 0.0148162841796875, 0.03076171875, -0.0099945068359375, 0.048492431640625, 0.0784912109375, 0.009674072265625, -0.0010652542114257812, -0.0135498046875, 0.0592041015625, -0.07470703125, -0.029296875, -0.07781982421875, 0.053955078125, -0.01055908203125, -0.0239105224609375, 0.0810546875, 0.06365966796875, 0.0460205078125, -0.031951904296875, 0.049102783203125, -0.0161285400390625, 0.069580078125, -0.028076171875, 0.06329345703125, -0.054962158203125, 0.01039886474609375, -0.019561767578125, -0.03851318359375, -0.0261993408203125, 0.07025146484375, -0.024261474609375, 0.002471923828125, 0.03997802734375, 0.05859375, -0.0033283233642578125, -0.019927978515625, 0.0224761962890625, 0.02935791015625, 0.002628326416015625, 0.0440673828125, 0.062255859375, -0.047882080078125, 0.028656005859375, -0.03729248046875, 0.0018930435180664062, -0.0160369873046875, -0.03887939453125, -0.08465576171875, -0.0302276611328125, -0.0029449462890625, -0.041748046875, -0.006351470947265625, 0.069580078125, 0.034515380859375, -0.08477783203125, -0.034759521484375, 0.0005345344543457031, 0.0196075439453125, -0.0255889892578125, -0.019775390625, 0.0282745361328125, -0.01067352294921875, -0.0684814453125, 0.003398895263671875, -0.003162384033203125, 0.005718231201171875, -0.001129150390625, -0.007965087890625, -0.0269317626953125, 0.00916290283203125, 0.0136260986328125, 0.005855560302734375, -0.05377197265625, 0.01451873779296875, -0.00091552734375, -0.0302886962890625, -0.00342559814453125, 0.03289794921875, -0.031951904296875, 0.00994110107421875, 0.041290283203125, 0.01358795166015625, 0.042755126953125, -0.00008296966552734375, 0.041351318359375, -0.016754150390625, 0.0120086669921875, -0.0018739700317382812, 0.0167236328125, 0.0232086181640625, -0.0236968994140625, 0.031646728515625, 0.03717041015625, -0.03558349609375, -0.061767578125, 0.007419586181640625, -0.06805419921875, -0.0252685546875, 0.07464599609375, -0.01202392578125, -0.03497314453125, 0.01666259765625, -0.039794921875, 0.041168212890625, -0.01058197021484375, 0.039886474609375, 0.050323486328125, 0.00373077392578125, -0.0028324127197265625, -0.0640869140625, 0.0262603759765625, 0.0311431884765625, -0.02984619140625, -0.025390625, 0.00548553466796875, 0.0289764404296875, 0.01511383056640625, 0.0211944580078125, -0.01505279541015625, 0.01385498046875, -0.01207733154296875, 0.0261688232421875, -0.020477294921875, 0.01044464111328125, -0.0272369384765625, 0.000423431396484375, -0.0057373046875, -0.038665771484375 ] ]
ICBU-NPU/FashionGPT-70B-V1
2023-09-22T07:58:07.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:ehartford/samantha-data", "dataset:Open-Orca/OpenOrca", "dataset:jondurbin/airoboros-gpt4-1.4.1", "arxiv:2306.02707", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
ICBU-NPU
null
null
ICBU-NPU/FashionGPT-70B-V1
4
8,309
transformers
2023-09-17T11:09:47
--- license: llama2 datasets: - ehartford/samantha-data - Open-Orca/OpenOrca - jondurbin/airoboros-gpt4-1.4.1 language: - en --- # FashionGPT-V1 ### Introduction This is a llama-2-70B model combined with two adapters via appropriate methods. ### Dataset Here is the list of datasets used: * Orca-style 40K dataset. This dataset is a filtered subset of [OpenOrca-GPT4](<https://huggingface.co/datasets/Open-Orca/OpenOrca/blob/main/1M-GPT4-Augmented.parquet>) and [airoboros-gpt4-1.4.1](<https://huggingface.co/datasets/jondurbin/airoboros-gpt4-1.4.1>). * [Samantha](<https://huggingface.co/datasets/ehartford/samantha-data>) made by Eric Hartford and cleaned by us, about 6.5K samples. <br> ### Training * We train our adapters with [jondurbin's forked QLoRA repo](<https://github.com/jondurbin/qlora>) * We add multi-turn conversational data support from [fastchat repo](<https://github.com/lm-sys/FastChat/blob/main/fastchat/train/train.py>), with minor modifications. * We use bash shell script similar to [airoboros-70b-gpt4-1.4.1](<https://gist.github.com/jondurbin/87fc040b92a3073125ed516b04bc6e19>) to train our two adapters. * We found that combining multiple adapters to a single llama-2-70B could achieve better performance than merging only 1 adapter to llama-2-70B. The details of combining multiple adapters will be unveiled in our upcoming paper. <br> ### Prompt Template ``` ### System: {System} ### User: {User} ### Assistant: {Assistant} ``` <br> ### Evaluation | Metric | Value | |-----------------------|-------| | ARC (25-shot) | 71.08 | | HellaSwag (10-shot) | 87.32 | | MMLU (5-shot) | 70.70 | | TruthfulQA (0-shot) | 63.92 | | Avg. | 73.26 | <br> ### license disclaimer This model is bound by the license & usage restrictions of the original Llama-2 model. And comes with no warranty or gurantees of any kind. <br> ### Limitations & Biases Llama 2 and fine-tuned variants are a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Llama 2 and any fine-tuned varient's potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 2 variants, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available at <https://ai.meta.com/llama/responsible-use-guide/> <br> ### Citiation: * airoboros: <https://github.com/jondurbin/airoboros> * samantha: <https://erichartford.com/meet-samantha> ```bibtex @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ```bibtex @article{dettmers2023qlora, title={QLoRA: Efficient Finetuning of Quantized LLMs}, author={Dettmers, Tim and Pagnoni, Artidoro and Holtzman, Ari and Zettlemoyer, Luke}, journal={arXiv preprint arXiv:2305.14314}, year={2023} } ``` ```bibtex @software{touvron2023llama2, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushkar Mishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing Ellen Tan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu , Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom}, year={2023} } ```
4,528
[ [ -0.025115966796875, -0.0562744140625, 0.021026611328125, 0.0170745849609375, -0.03131103515625, 0.0012760162353515625, 0.00102996826171875, -0.044677734375, 0.0115966796875, 0.01654052734375, -0.050537109375, -0.035308837890625, -0.040435791015625, -0.015869140625, -0.01456451416015625, 0.07635498046875, -0.01055145263671875, -0.0311431884765625, 0.00809478759765625, -0.0157012939453125, -0.03765869140625, -0.0290069580078125, -0.0552978515625, -0.021820068359375, 0.0180816650390625, 0.02496337890625, 0.05047607421875, 0.0408935546875, 0.0347900390625, 0.023284912109375, -0.0248260498046875, 0.0260162353515625, -0.044281005859375, -0.0118560791015625, 0.01468658447265625, -0.040283203125, -0.0703125, -0.0010786056518554688, 0.0266265869140625, 0.007152557373046875, -0.031494140625, 0.0251312255859375, 0.011474609375, 0.035614013671875, -0.0270843505859375, 0.01375579833984375, -0.038299560546875, -0.001270294189453125, -0.025726318359375, -0.01291656494140625, 0.003017425537109375, -0.0230255126953125, 0.0011272430419921875, -0.0611572265625, -0.00811767578125, -0.0079345703125, 0.088134765625, 0.0235137939453125, -0.046875, -0.00974273681640625, -0.04278564453125, 0.054168701171875, -0.07415771484375, 0.0199737548828125, 0.005462646484375, 0.032989501953125, -0.031585693359375, -0.0703125, -0.055999755859375, 0.007266998291015625, -0.0027751922607421875, 0.0197601318359375, -0.034393310546875, -0.00142669677734375, 0.01052093505859375, 0.0207061767578125, -0.0435791015625, 0.019256591796875, -0.037139892578125, -0.01702880859375, 0.0411376953125, 0.0162811279296875, 0.0025043487548828125, -0.0006661415100097656, -0.0494384765625, -0.0078887939453125, -0.0631103515625, 0.020355224609375, 0.0281829833984375, 0.00018358230590820312, -0.04339599609375, 0.0352783203125, -0.01788330078125, 0.0394287109375, 0.0056304931640625, -0.0316162109375, 0.036956787109375, -0.050384521484375, -0.020538330078125, -0.0160675048828125, 0.0631103515625, 0.04364013671875, 0.00612640380859375, 0.0223846435546875, -0.007373809814453125, 0.0022640228271484375, -0.010498046875, -0.074462890625, -0.0113983154296875, 0.02777099609375, -0.0293426513671875, -0.0279998779296875, -0.0158843994140625, -0.061492919921875, -0.013824462890625, -0.0119476318359375, 0.0243072509765625, -0.007781982421875, -0.03790283203125, 0.0200042724609375, 0.00673675537109375, 0.043609619140625, 0.0172271728515625, -0.054779052734375, 0.0223541259765625, 0.035980224609375, 0.06964111328125, 0.0025501251220703125, -0.014434814453125, -0.00882720947265625, 0.004062652587890625, -0.0244293212890625, 0.06378173828125, -0.01323699951171875, -0.026885986328125, -0.0211029052734375, 0.003810882568359375, 0.006801605224609375, -0.034759521484375, 0.04412841796875, -0.03485107421875, 0.010040283203125, -0.0203704833984375, -0.0206298828125, -0.034149169921875, 0.0136566162109375, -0.0271453857421875, 0.0809326171875, 0.00821685791015625, -0.04345703125, 0.01715087890625, -0.0465087890625, -0.0084686279296875, -0.017822265625, -0.0033092498779296875, -0.050994873046875, -0.0297698974609375, 0.0281982421875, 0.030181884765625, -0.03125, 0.019683837890625, -0.03387451171875, -0.03326416015625, 0.010955810546875, -0.0306854248046875, 0.06134033203125, 0.021209716796875, -0.051239013671875, 0.017822265625, -0.056671142578125, -0.0006389617919921875, 0.0308837890625, -0.034881591796875, -0.0015773773193359375, -0.004009246826171875, -0.0096435546875, 0.0223846435546875, 0.0247650146484375, -0.02154541015625, 0.022979736328125, -0.020660400390625, 0.058929443359375, 0.0648193359375, 0.006359100341796875, 0.01861572265625, -0.03656005859375, 0.036468505859375, 0.01195526123046875, 0.039276123046875, -0.0007309913635253906, -0.049957275390625, -0.067138671875, -0.0222930908203125, -0.00391387939453125, 0.048370361328125, -0.042388916015625, 0.03704833984375, -0.008697509765625, -0.058135986328125, -0.02471923828125, 0.007495880126953125, 0.0311737060546875, 0.0343017578125, 0.026885986328125, -0.0231475830078125, -0.04571533203125, -0.055999755859375, -0.006771087646484375, -0.016845703125, -0.00128936767578125, 0.038848876953125, 0.0257568359375, -0.037384033203125, 0.0635986328125, -0.041046142578125, -0.0248260498046875, -0.00560760498046875, -0.0217437744140625, 0.030181884765625, 0.03973388671875, 0.0628662109375, -0.0411376953125, -0.0209503173828125, -0.012939453125, -0.05364990234375, -0.00614166259765625, 0.0008716583251953125, -0.034942626953125, 0.01178741455078125, 0.0184326171875, -0.054168701171875, 0.041748046875, 0.05316162109375, -0.038177490234375, 0.03961181640625, -0.006969451904296875, 0.0004811286926269531, -0.0823974609375, -0.0008397102355957031, 0.009185791015625, -0.00975799560546875, -0.052001953125, -0.0083160400390625, -0.00754547119140625, 0.01515960693359375, -0.04547119140625, 0.0528564453125, -0.028533935546875, -0.001071929931640625, -0.013824462890625, 0.01458740234375, -0.0005159378051757812, 0.05450439453125, -0.032073974609375, 0.06317138671875, 0.04248046875, -0.037322998046875, 0.0255889892578125, 0.021392822265625, -0.0218505859375, 0.036529541015625, -0.07086181640625, 0.025238037109375, 0.0102386474609375, 0.040435791015625, -0.09521484375, -0.013214111328125, 0.04241943359375, -0.026123046875, 0.0089263916015625, -0.0101776123046875, -0.0223846435546875, -0.03424072265625, -0.040618896484375, 0.03485107421875, 0.04266357421875, -0.047821044921875, 0.031402587890625, 0.03314208984375, 0.0013637542724609375, -0.035736083984375, -0.0543212890625, -0.0098724365234375, -0.041839599609375, -0.055023193359375, 0.0224151611328125, -0.0223388671875, -0.0008401870727539062, -0.0180511474609375, -0.0192108154296875, -0.01189422607421875, 0.0132904052734375, 0.021240234375, 0.0362548828125, -0.01297760009765625, -0.016082763671875, 0.0136260986328125, -0.0087432861328125, -0.00913238525390625, -0.0209503173828125, 0.04833984375, -0.0234527587890625, -0.0178070068359375, -0.05242919921875, 0.0005931854248046875, 0.03936767578125, -0.01216888427734375, 0.0440673828125, 0.05926513671875, -0.0227508544921875, 0.0199432373046875, -0.045013427734375, -0.01535797119140625, -0.04425048828125, 0.02874755859375, -0.0281982421875, -0.07098388671875, 0.061859130859375, 0.006031036376953125, 0.0206298828125, 0.04364013671875, 0.043701171875, 0.00809478759765625, 0.054229736328125, 0.03424072265625, 0.001972198486328125, 0.047088623046875, -0.038421630859375, 0.00939178466796875, -0.07989501953125, -0.033966064453125, -0.0220489501953125, -0.029571533203125, -0.056396484375, -0.0384521484375, 0.037567138671875, 0.0208740234375, -0.048583984375, 0.0208587646484375, -0.046630859375, 0.01549530029296875, 0.044464111328125, 0.0258331298828125, 0.015777587890625, 0.000980377197265625, 0.007061004638671875, 0.006366729736328125, -0.051025390625, -0.038482666015625, 0.099853515625, 0.0296478271484375, 0.054168701171875, 0.022979736328125, 0.04412841796875, -0.0125579833984375, 0.01041412353515625, -0.04815673828125, 0.041839599609375, 0.002716064453125, -0.048736572265625, -0.015899658203125, -0.0218353271484375, -0.0821533203125, 0.025604248046875, 0.0023021697998046875, -0.0499267578125, 0.0347900390625, 0.01557159423828125, -0.03875732421875, 0.0177001953125, -0.049835205078125, 0.044769287109375, -0.01806640625, -0.0160369873046875, -0.00885009765625, -0.05792236328125, 0.04827880859375, -0.0013093948364257812, 0.014404296875, -0.0179290771484375, -0.01493072509765625, 0.07171630859375, -0.037506103515625, 0.072998046875, -0.0133056640625, -0.0158843994140625, 0.0498046875, -0.00763702392578125, 0.050567626953125, 0.0190887451171875, 0.00275421142578125, 0.038665771484375, -0.00450897216796875, -0.02569580078125, -0.0218963623046875, 0.049530029296875, -0.0826416015625, -0.045257568359375, -0.0215301513671875, -0.0208282470703125, 0.014404296875, 0.016937255859375, 0.01534271240234375, 0.0268096923828125, 0.026580810546875, 0.0041656494140625, 0.031524658203125, -0.01300048828125, 0.032867431640625, 0.043670654296875, -0.005382537841796875, -0.047637939453125, 0.058258056640625, 0.0187835693359375, 0.031829833984375, 0.00762176513671875, 0.01453399658203125, -0.0182342529296875, -0.039459228515625, -0.0219573974609375, 0.03167724609375, -0.050811767578125, -0.0301361083984375, -0.031890869140625, -0.0165863037109375, -0.0255279541015625, 0.0012006759643554688, -0.034393310546875, -0.03448486328125, -0.055389404296875, -0.014434814453125, 0.04681396484375, 0.037445068359375, -0.0094146728515625, 0.038543701171875, -0.03717041015625, 0.018035888671875, 0.035919189453125, 0.0165557861328125, 0.00597381591796875, -0.05877685546875, -0.0005230903625488281, 0.0160369873046875, -0.04925537109375, -0.04425048828125, 0.0247955322265625, 0.018341064453125, 0.05059814453125, 0.0263519287109375, -0.00287628173828125, 0.08203125, -0.0231475830078125, 0.07275390625, 0.0164031982421875, -0.061676025390625, 0.048858642578125, -0.03692626953125, 0.01016998291015625, 0.0216064453125, 0.018218994140625, -0.026397705078125, -0.0117034912109375, -0.05316162109375, -0.078857421875, 0.053558349609375, 0.02105712890625, 0.00229644775390625, 0.01116943359375, 0.04974365234375, 0.001800537109375, 0.0105133056640625, -0.056610107421875, -0.040069580078125, -0.0300750732421875, 0.0026416778564453125, -0.0009336471557617188, -0.0335693359375, -0.0207061767578125, -0.0167999267578125, 0.0452880859375, -0.00806427001953125, 0.048614501953125, 0.0175323486328125, 0.0207061767578125, -0.0102386474609375, 0.00267791748046875, 0.0750732421875, 0.046783447265625, -0.014373779296875, -0.005725860595703125, 0.02008056640625, -0.04327392578125, 0.00791168212890625, 0.0094757080078125, 0.0014066696166992188, -0.01476287841796875, 0.0275115966796875, 0.07904052734375, 0.01446533203125, -0.0252838134765625, 0.0321044921875, -0.0057220458984375, -0.0233154296875, -0.0238037109375, 0.01435089111328125, 0.00732421875, 0.0513916015625, 0.032073974609375, 0.0146026611328125, 0.0018339157104492188, -0.033905029296875, 0.0033473968505859375, 0.0270233154296875, 0.0024261474609375, -0.037841796875, 0.06927490234375, 0.0170745849609375, -0.016204833984375, 0.034576416015625, -0.00623321533203125, -0.03643798828125, 0.058380126953125, 0.0404052734375, 0.04876708984375, -0.0251922607421875, -0.00518798828125, 0.03558349609375, 0.01934814453125, -0.01458740234375, 0.033111572265625, 0.005413055419921875, -0.0472412109375, -0.0165557861328125, -0.02734375, -0.0340576171875, 0.01409912109375, -0.037384033203125, 0.031982421875, -0.02313232421875, -0.0216522216796875, -0.0278472900390625, 0.01270294189453125, -0.0555419921875, -0.001129150390625, -0.005947113037109375, 0.0606689453125, -0.047271728515625, 0.0615234375, 0.033538818359375, -0.0157012939453125, -0.0860595703125, -0.008941650390625, 0.01161956787109375, -0.0758056640625, 0.0293731689453125, 0.007068634033203125, 0.00016748905181884766, 0.01509857177734375, -0.040557861328125, -0.08880615234375, 0.1287841796875, 0.027252197265625, -0.050628662109375, -0.00018477439880371094, -0.006740570068359375, 0.0259246826171875, -0.0006723403930664062, 0.042724609375, 0.05133056640625, 0.03643798828125, 0.022216796875, -0.09051513671875, 0.0234222412109375, -0.0160675048828125, 0.00382232666015625, -0.0032939910888671875, -0.09478759765625, 0.07861328125, -0.0271148681640625, -0.0148468017578125, 0.01494598388671875, 0.047821044921875, 0.05133056640625, 0.0225830078125, 0.029083251953125, 0.04241943359375, 0.060546875, -0.0115509033203125, 0.0665283203125, -0.01450347900390625, 0.042022705078125, 0.06097412109375, -0.0038051605224609375, 0.0528564453125, 0.0287017822265625, -0.045623779296875, 0.039031982421875, 0.0697021484375, 0.013092041015625, 0.05560302734375, 0.0065155029296875, -0.00872039794921875, 0.004261016845703125, 0.01458740234375, -0.052642822265625, 0.0301513671875, 0.0302734375, -0.0033359527587890625, -0.0111236572265625, -0.008544921875, 0.0225982666015625, -0.0374755859375, -0.005008697509765625, 0.048248291015625, 0.0135955810546875, -0.03118896484375, 0.08380126953125, -0.0084991455078125, 0.07684326171875, -0.05059814453125, -0.0038204193115234375, -0.0380859375, 0.01007080078125, -0.0266876220703125, -0.053802490234375, 0.003582000732421875, -0.00997161865234375, 0.0137939453125, 0.0109100341796875, 0.04974365234375, -0.01232147216796875, -0.0229034423828125, 0.01934814453125, 0.01403045654296875, 0.0173187255859375, 0.0175628662109375, -0.05078125, 0.0205078125, 0.00975799560546875, -0.057220458984375, 0.0214691162109375, 0.032684326171875, -0.0012521743774414062, 0.0611572265625, 0.058258056640625, -0.009368896484375, 0.0227813720703125, -0.0140533447265625, 0.09417724609375, -0.034149169921875, -0.03369140625, -0.07080078125, 0.04541015625, 0.008331298828125, -0.042083740234375, 0.048004150390625, 0.0303802490234375, 0.05438232421875, 0.0161895751953125, 0.0506591796875, -0.007251739501953125, 0.0202789306640625, -0.0367431640625, 0.04046630859375, -0.044464111328125, 0.038818359375, -0.011199951171875, -0.06097412109375, -0.016204833984375, 0.063232421875, -0.00516510009765625, 0.009979248046875, 0.035186767578125, 0.06878662109375, 0.0057220458984375, -0.0197296142578125, 0.0002887248992919922, 0.025390625, 0.06463623046875, 0.06573486328125, 0.043792724609375, -0.055908203125, 0.061279296875, -0.02484130859375, -0.0284881591796875, -0.03118896484375, -0.06439208984375, -0.0623779296875, -0.0291900634765625, -0.025634765625, -0.0270233154296875, 0.004261016845703125, 0.0557861328125, 0.05841064453125, -0.0389404296875, -0.0190887451171875, -0.01251220703125, 0.004062652587890625, -0.0244293212890625, -0.01372528076171875, 0.027679443359375, 0.01003265380859375, -0.0491943359375, 0.026123046875, 0.0026798248291015625, 0.037445068359375, -0.0152587890625, -0.0206146240234375, -0.01001739501953125, 0.0018157958984375, 0.03729248046875, 0.02301025390625, -0.06036376953125, -0.026458740234375, -0.007762908935546875, -0.003955841064453125, 0.014495849609375, 0.0149993896484375, -0.058746337890625, 0.01323699951171875, 0.0213165283203125, 0.015167236328125, 0.0494384765625, -0.00531005859375, 0.007740020751953125, -0.03814697265625, 0.030487060546875, -0.0094451904296875, 0.0192413330078125, 0.005527496337890625, -0.030517578125, 0.053375244140625, 0.0208587646484375, -0.054901123046875, -0.0697021484375, 0.0017499923706054688, -0.10174560546875, 0.0021800994873046875, 0.103515625, -0.022430419921875, -0.0011959075927734375, 0.01219940185546875, -0.0284881591796875, 0.0338134765625, -0.04278564453125, 0.056396484375, 0.03094482421875, -0.0124053955078125, -0.016693115234375, -0.05303955078125, 0.025634765625, 0.0038700103759765625, -0.07183837890625, -0.01515960693359375, 0.0146636962890625, 0.038299560546875, 0.0034923553466796875, 0.058746337890625, 0.0002346038818359375, 0.019012451171875, -0.0158538818359375, -0.0028514862060546875, -0.0220947265625, 0.0025043487548828125, -0.021697998046875, -0.0121612548828125, -0.00394439697265625, -0.0278167724609375 ] ]
cerebras/Cerebras-GPT-256M
2023-04-07T13:49:13.000Z
[ "transformers", "pytorch", "gpt2", "causal-lm", "text-generation", "en", "dataset:the_pile", "arxiv:2304.03208", "arxiv:2203.15556", "arxiv:2101.00027", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
cerebras
null
null
cerebras/Cerebras-GPT-256M
20
8,306
transformers
2023-03-20T20:40:06
--- language: - en tags: - pytorch - causal-lm license: apache-2.0 datasets: - the_pile pipeline_tag: text-generation --- # Cerebras-GPT 256M Check out our [Blog Post](https://www.cerebras.net/cerebras-gpt) and [arXiv paper](https://arxiv.org/abs/2304.03208)! ## Model Description The Cerebras-GPT family is released to facilitate research into LLM scaling laws using open architectures and data sets and demonstrate the simplicity of and scalability of training LLMs on the Cerebras software and hardware stack. All Cerebras-GPT models are available on Hugging Face. The family includes 111M, 256M, 590M, 1.3B, 2.7B, 6.7B, and 13B models. All models in the Cerebras-GPT family have been trained in accordance with [Chinchilla scaling laws](https://arxiv.org/abs/2203.15556) (20 tokens per model parameter) which is compute-optimal. These models were trained on the [Andromeda](https://www.cerebras.net/andromeda/) AI supercomputer comprised of 16 CS-2 wafer scale systems. Cerebras' [weight streaming technology](https://www.cerebras.net/blog/linear-scaling-made-possible-with-weight-streaming) simplifies the training of LLMs by disaggregating compute from model storage. This allowed for efficient scaling of training across nodes using simple data parallelism. Cerebras systems for pre-training and fine tuning are available in the cloud via the [Cerebras Model Studio](https://www.cerebras.net/product-cloud/). Cerebras CS-2 compatible checkpoints are available in [Cerebras Model Zoo](https://github.com/Cerebras/modelzoo). ## Model Details * Developed by: [Cerebras Systems](https://www.cerebras.net/) * License: Apache 2.0 * Model type: Transformer-based Language Model * Architecture: GPT-3 style architecture * Data set: The Pile * Tokenizer: Byte Pair Encoding * Vocabulary Size: 50257 * Sequence Length: 2048 * Optimizer: AdamW, (β1, β2) = (0.9, 0.95), adam_eps = 1e−8 (1e−9 for larger models) * Positional Encoding: Learned * Language: English * Learn more: Dense Scaling Laws Paper for training procedure, config files, and details on how to use. **Contact**: To ask questions about Cerebras-GPT models, join the [Cerebras Discord](https://discord.gg/q6bZcMWJVu). This is the standard parameterization version of Cerebras-GPT with **256M** parameters Related models: [Cerebras-GPT Models](https://huggingface.co/models?sort=downloads&search=cerebras-gpt) <br><br> | Model | Parameters | Layers | d_model | Heads | d_head | d_ffn | LR | BS (seq) | BS (tokens) | |---------------|------------|--------|---------|-------|--------|--------|----------|----------|----------------| | Cerebras-GPT | 111M | 10 | 768 | 12 | 64 | 3072 | 6.0E-04 | 120 | 246K | | Cerebras-GPT | 256M | 14 | 1088 | 17 | 64 | 4352 | 6.0E-04 | 264 | 541K | | Cerebras-GPT | 590M | 18 | 1536 | 12 | 128 | 6144 | 2.0E-04 | 264 | 541K | | Cerebras-GPT | 1.3B | 24 | 2048 | 16 | 128 | 8192 | 2.0E-04 | 528 | 1.08M | | Cerebras-GPT | 2.7B | 32 | 2560 | 20 | 128 | 10240 | 2.0E-04 | 528 | 1.08M | | Cerebras-GPT | 6.7B | 32 | 4096 | 32 | 128 | 16384 | 1.2E-04 | 1040 | 2.13M | | Cerebras-GPT | 13B | 40 | 5120 | 40 | 128 | 20480 | 1.2E-04 | 720 &rarr; 1080 | 1.47M &rarr; 2.21M | <br><br> ## Quickstart This model can be easily loaded using the AutoModelForCausalLM functionality: ```python from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("cerebras/Cerebras-GPT-256M") model = AutoModelForCausalLM.from_pretrained("cerebras/Cerebras-GPT-256M") text = "Generative AI is " ``` And can be used with Hugging Face Pipelines ```python from transformers import pipeline pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) generated_text = pipe(text, max_length=50, do_sample=False, no_repeat_ngram_size=2)[0] print(generated_text['generated_text']) ``` or with `model.generate()` ```python inputs = tokenizer(text, return_tensors="pt") outputs = model.generate(**inputs, num_beams=5, max_new_tokens=50, early_stopping=True, no_repeat_ngram_size=2) text_output = tokenizer.batch_decode(outputs, skip_special_tokens=True) print(text_output[0]) ``` <br><br> ## Training data Cerebras-GPT is trained using [the Pile](https://pile.eleuther.ai) dataset from [EleutherAI](https://www.eleuther.ai). See the [Pile paper](https://arxiv.org/abs/2101.00027) for a more detailed breakdown of data sources and methodology. The Pile was cleaned using the ftfy library to normalize the text, then filtered using scripts provided by Eleuther. We tokenized the data using byte-pair encoding using the GPT-2 vocabulary. Our tokenized version of the Pile has 371B tokens. We include more details about the training dataset preprocessing in Appendix A.1 of our paper. Recent works find significant duplicate data present in the Pile. Eleuther’s Pythia applies a deduplication process to reduce replicated data, decreasing the Pile dataset size. Pythia was trained on both the standard dataset and deduplicated dataset to characterize the impact. Our models are trained on the standard Pile without deduplication, which may present an opportunity for further improvement with the deduplicated data set. <br><br> ## Training procedure We use the GPT-3 style model architecture. All of our layers use full attention as opposed to the GPT-3 style sparse banded attention. The model shapes were selected to either follow aspect ratio 80 or are the same shape as GPT-3 models. Learning rate warmed up for 375M tokens (1500 steps for 111M and 256M models) and 10x cosine decayed. No dropout was used and weight decay was set to 0.1. All models are trained with MSL of 2048. All models were trained to Chinchilla point: 20 tokens per model parameter. Number of steps was chosen based on optimal batch size (varied by model) and fixed sequence length (2048). See Training Table, below, for details. <br> Model Params | Sequence Length | Batch Size | Number of Steps | Tokens | Tokens per Parameter | Flops ------------ | -------------- | ---------- | --------------- | ------ | -------------------- | ----- 111M | 2048 | 120 | 9037 | 2.22E+09 | 20 | 2.6E+18 256M | 2048 | 264 | 9468 | 5.12E+09 | 20 | 1.3E+19 590M | 2048 | 264 | 21836 | 1.18E+10 | 20 | 6.1E+19 1.3B | 2048 | 528 | 24334 | 2.63E+10 | 20 | 2.8E+20 2.7B | 2048 | 528 | 49041 | 5.30E+10 | 20 | 1.1E+21 6.7B | 2048 | 1040 | 62522 | 1.33E+11 | 20 | 6.3E+21 13B | 2048 | 720 | 174335 | 2.57E+11 | 20 | 2.3E+22 <br><br> ## Evaluations We trained models from smallest to largest and fit a power law as we went along. The power law was helpful for extrapolating the validation loss of the next largest model we trained and provided confidence about whether the training run was going well. We performed upstream (pre-training) evaluations of text prediction cross-entropy using the Pile validation and test splits. We performed downstream evaluations of text generation accuracy on standardized tasks using the [Eleuther lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness). Results are compared against many publicly available large language models in Section 3 of the paper. #### 0-shot Evaluation | Model | Params | Training FLOPs | PILE test xent | Hella-Swag | PIQA | Wino-Grande | Lambada | ARC-e | ARC-c | OpenBookQA | Downstream Average | | ------- | ----- | -------------- | -------------- | ---------- | ----- | ----------- | ------- | ----- | ----- | ---------- | ------------------ | | Cerebras-GPT | 111M | 2.6E+18 | 2.566 | 0.268 | 0.594 | 0.488 | 0.194 | 0.380 | 0.166 | 0.118 | 0.315 | | Cerebras-GPT | 256M | 1.3E+19 | 2.299 | 0.274 | 0.613 | 0.511 | 0.293 | 0.410 | 0.170 | 0.158 | 0.347 | | Cerebras-GPT | 590M | 6.1E+19 | 2.184 | 0.291 | 0.627 | 0.498 | 0.366 | 0.464 | 0.190 | 0.158 | 0.370 | | Cerebras-GPT | 1.3B | 2.8E+20 | 1.996 | 0.325 | 0.664 | 0.521 | 0.462 | 0.508 | 0.224 | 0.166 | 0.410 | | Cerebras-GPT | 2.7B | 1.1E+21 | 1.834 | 0.386 | 0.701 | 0.559 | 0.567 | 0.571 | 0.246 | 0.206 | 0.462 | | Cerebras-GPT | 6.7B | 6.3E+21 | 1.704 | 0.447 | 0.739 | 0.602 | 0.636 | 0.643 | 0.282 | 0.238 | 0.512 | | Cerebras-GPT | 13B | 2.3E+22 | 1.575 | 0.513 | 0.766 | 0.646 | 0.696 | 0.714 | 0.367 | 0.286 | 0.570 | #### 5-shot Evaluation | Model | Params | Hella-Swag | PIQA | Wino-Grande | Lambada | ARC-e | ARC-c | OpenBookQA | | -------- | ----- | ----------| ----- | ----------- | -------| ----- | ----- | ---------- | | Cerebras-GPT | 111M | 0.267 | 0.588 | 0.475 | 0.158 | 0.356 | 0.166 | 0.136 | | Cerebras-GPT | 256M | 0.278 | 0.606 | 0.522 | 0.225 | 0.422 | 0.183 | 0.164 | | Cerebras-GPT | 590M | 0.291 | 0.634 | 0.479 | 0.281 | 0.475 | 0.206 | 0.152 | | Cerebras-GPT | 1.3B | 0.326 | 0.668 | 0.536 | 0.395 | 0.529 | 0.241 | 0.174 | | Cerebras-GPT | 2.7B | 0.382 | 0.697 | 0.543 | 0.487 | 0.590 | 0.267 | 0.224 | | Cerebras-GPT | 6.7B | 0.444 | 0.736 | 0.590 | 0.591 | 0.667 | 0.314 | 0.270 | | Cerebras-GPT | 13B | 0.514 | 0.768 | 0.674 | 0.655 | 0.743 | 0.398 | 0.318 | <br><br> ## Uses and Limitations ### Intended Use The primary intended use is to further research into large language models. These models can be used as a foundation model for NLP, applications, ethics, and alignment research. Our primary intended users are researchers who are working to improve LLMs and practitioners seeking reference implementations, training setups, hyperparameters, or pre-trained models. We release these models with a fully permissive Apache license for the community to use freely. You may fine-tune and adapt Cerebras-GPT models for deployment via either Cerebras [Model Studio](https://www.cerebras.net/product-cloud/) or third-party libraries. Further safety-related testing and mitigations should be applied beore using the Cerebras-GPT model family in production downstream applications. Due to financial and compute budgets, Cerebras-GPT models were only trained and evaluated following the approaches described in the paper. ### Out of Scope Use Cerebras-GPT models are trained on the Pile, with English language only, and are not suitable for machine translation tasks. Cerebras-GPT models have not been tuned for human-facing dialog applications like chatbots and will not respond to prompts in a similar way to models that have received instruction tuning or reinforcement learning from human feedback (RLHF) like Flan-T5 or ChatGPT. Cerebras-GPT models can be tuned using those methods. ### Risk, Bias, Ethical Considerations * **Data**: The Pile dataset has been thoroughly analyzed from various ethical standpoints such as toxicity analysis, gender bias, pejorative content, racially sensitive content etc. Please refer to Pile dataset references. * **Human life**: The outputs from this model may or may not align with human values. The risk needs to be thoroughly investigated before deploying this model in a production environment where it can directly impact human life. * **Risks and harms**: There can be distributional bias in the Pile dataset that can manifest in various forms in the downstream model deployment. There are other risks associated with large language models such as amplifying stereotypes, memorizing training data, or revealing private or secure information. * **Mitigations**: Only mitigations in standard Pile dataset pre-processing were employed when pre-training Cerebras-GPT. <br><br> ## Acknowledgements We are thankful to all Cerebras engineers, past and present, that made this work possible.
12,559
[ [ -0.028839111328125, -0.046142578125, 0.0193328857421875, 0.01297760009765625, -0.01904296875, -0.0151214599609375, -0.015594482421875, -0.030853271484375, 0.013092041015625, 0.020904541015625, -0.0291900634765625, -0.03118896484375, -0.05548095703125, -0.0156097412109375, -0.03076171875, 0.08563232421875, -0.0067138671875, 0.005035400390625, 0.00919342041015625, -0.005527496337890625, -0.01357269287109375, -0.042755126953125, -0.05877685546875, -0.0294342041015625, 0.0357666015625, -0.00012105703353881836, 0.056854248046875, 0.06011962890625, 0.0254058837890625, 0.0221405029296875, -0.0276641845703125, -0.0031871795654296875, -0.024627685546875, -0.0236663818359375, 0.01160430908203125, -0.0187530517578125, -0.040496826171875, -0.00789642333984375, 0.05279541015625, 0.048797607421875, -0.02587890625, 0.0182037353515625, 0.0253753662109375, 0.054656982421875, -0.036865234375, 0.01175689697265625, -0.036376953125, 0.0007653236389160156, -0.0191192626953125, 0.00030422210693359375, -0.0207061767578125, -0.01464080810546875, 0.0019817352294921875, -0.0396728515625, 0.021453857421875, -0.0025768280029296875, 0.0955810546875, 0.0167694091796875, -0.0316162109375, -0.01922607421875, -0.03265380859375, 0.054443359375, -0.05682373046875, 0.029388427734375, 0.01397705078125, -0.0009388923645019531, -0.0022945404052734375, -0.0635986328125, -0.038543701171875, -0.016876220703125, -0.01396942138671875, 0.01131439208984375, -0.0152435302734375, 0.004535675048828125, 0.034332275390625, 0.038604736328125, -0.059326171875, 0.016021728515625, -0.037445068359375, -0.0191497802734375, 0.0521240234375, 0.012847900390625, 0.016510009765625, -0.026336669921875, -0.031829833984375, -0.0292205810546875, -0.0372314453125, 0.02410888671875, 0.032257080078125, 0.01399993896484375, -0.03082275390625, 0.0301055908203125, -0.013763427734375, 0.047027587890625, 0.0214996337890625, -0.00695037841796875, 0.0411376953125, -0.02264404296875, -0.03277587890625, -0.005458831787109375, 0.0789794921875, 0.01263427734375, 0.01358795166015625, 0.00615692138671875, -0.0145416259765625, -0.0108489990234375, 0.0007171630859375, -0.08197021484375, -0.025909423828125, 0.01401519775390625, -0.0433349609375, -0.0273590087890625, 0.0023288726806640625, -0.0531005859375, -0.014678955078125, -0.0305328369140625, 0.036468505859375, -0.038665771484375, -0.02410888671875, 0.00782012939453125, 0.002162933349609375, 0.034210205078125, 0.0191497802734375, -0.08953857421875, 0.02197265625, 0.0303497314453125, 0.0643310546875, 0.002368927001953125, -0.0291900634765625, -0.0174560546875, -0.0016765594482421875, -0.0114593505859375, 0.035400390625, -0.0034046173095703125, -0.026885986328125, -0.0174407958984375, 0.00922393798828125, -0.033447265625, -0.0267486572265625, 0.038604736328125, -0.0264892578125, 0.0167388916015625, -0.01009368896484375, -0.0399169921875, -0.028533935546875, 0.01220703125, -0.041717529296875, 0.0836181640625, 0.01439666748046875, -0.0692138671875, 0.02032470703125, -0.0343017578125, -0.01812744140625, -0.005054473876953125, -0.0120697021484375, -0.048370361328125, -0.01265716552734375, 0.03167724609375, 0.04302978515625, -0.02386474609375, 0.0259857177734375, -0.0170745849609375, -0.0224761962890625, -0.006206512451171875, -0.0390625, 0.08746337890625, 0.02166748046875, -0.045257568359375, 0.0005788803100585938, -0.05548095703125, 0.010833740234375, 0.0263519287109375, -0.03204345703125, 0.00921630859375, -0.01727294921875, 0.0089263916015625, 0.019134521484375, 0.0285491943359375, -0.0215911865234375, 0.01324462890625, -0.03326416015625, 0.040283203125, 0.05194091796875, 0.0040740966796875, 0.022552490234375, -0.024444580078125, 0.0340576171875, 0.006267547607421875, 0.0181121826171875, -0.00988006591796875, -0.039886474609375, -0.0567626953125, -0.0189056396484375, 0.0318603515625, 0.041046142578125, -0.033905029296875, 0.0372314453125, -0.02325439453125, -0.05950927734375, -0.0173187255859375, 0.005779266357421875, 0.03448486328125, 0.039337158203125, 0.0323486328125, -0.0193328857421875, -0.036407470703125, -0.07208251953125, -0.006122589111328125, -0.0174102783203125, -0.004390716552734375, 0.01715087890625, 0.05743408203125, -0.00511932373046875, 0.053314208984375, -0.0347900390625, -0.004489898681640625, -0.00457763671875, 0.01434326171875, 0.032623291015625, 0.047393798828125, 0.045867919921875, -0.0572509765625, -0.04119873046875, 0.0006518363952636719, -0.06011962890625, 0.01092529296875, -0.0151214599609375, 0.002674102783203125, 0.0223541259765625, 0.0330810546875, -0.055023193359375, 0.027069091796875, 0.048126220703125, -0.0244293212890625, 0.04766845703125, -0.0220947265625, 0.0008993148803710938, -0.079833984375, 0.0231475830078125, 0.0112457275390625, -0.003025054931640625, -0.04437255859375, 0.0037021636962890625, 0.018310546875, 0.0019025802612304688, -0.046234130859375, 0.037628173828125, -0.04498291015625, -0.0004253387451171875, -0.0002237558364868164, 0.009429931640625, -0.00733184814453125, 0.06536865234375, 0.0070037841796875, 0.052215576171875, 0.046875, -0.0478515625, 0.00994873046875, 0.01139068603515625, -0.0181732177734375, 0.027069091796875, -0.06256103515625, 0.002536773681640625, -0.0034275054931640625, 0.02587890625, -0.055328369140625, -0.013946533203125, 0.018157958984375, -0.04510498046875, 0.036590576171875, -0.0194244384765625, -0.03167724609375, -0.0474853515625, -0.0231475830078125, 0.0267791748046875, 0.051788330078125, -0.0428466796875, 0.0408935546875, 0.0184783935546875, -0.0029048919677734375, -0.04876708984375, -0.05364990234375, -0.00240325927734375, -0.0305328369140625, -0.06439208984375, 0.040069580078125, -0.006290435791015625, 0.0007305145263671875, -0.01456451416015625, 0.0040130615234375, 0.0026035308837890625, 0.00226593017578125, 0.023345947265625, 0.0218658447265625, -0.010162353515625, -0.007762908935546875, 0.0006308555603027344, -0.00682830810546875, 0.006618499755859375, -0.025482177734375, 0.054351806640625, -0.030853271484375, -0.01861572265625, -0.040863037109375, -0.011444091796875, 0.045074462890625, -0.01395416259765625, 0.064208984375, 0.060577392578125, -0.040679931640625, 0.01277923583984375, -0.03411865234375, -0.00231170654296875, -0.037506103515625, 0.03765869140625, -0.0295867919921875, -0.0528564453125, 0.054351806640625, 0.02197265625, 0.0071563720703125, 0.0634765625, 0.05609130859375, 0.00872802734375, 0.08538818359375, 0.0279083251953125, -0.016082763671875, 0.037384033203125, -0.0521240234375, -0.000797271728515625, -0.07183837890625, -0.020965576171875, -0.033843994140625, -0.013763427734375, -0.0526123046875, -0.021697998046875, 0.0186767578125, 0.0255279541015625, -0.0518798828125, 0.03680419921875, -0.054962158203125, 0.01549530029296875, 0.036407470703125, 0.01490020751953125, 0.005146026611328125, 0.0013103485107421875, -0.02374267578125, -0.000021696090698242188, -0.0528564453125, -0.0361328125, 0.09246826171875, 0.0411376953125, 0.033966064453125, -0.0086822509765625, 0.05914306640625, -0.001728057861328125, 0.02691650390625, -0.046905517578125, 0.033843994140625, -0.005825042724609375, -0.0457763671875, -0.0252685546875, -0.043212890625, -0.076416015625, 0.037445068359375, 0.0010290145874023438, -0.07427978515625, 0.018646240234375, 0.006900787353515625, -0.034698486328125, 0.043914794921875, -0.042755126953125, 0.06903076171875, -0.020233154296875, -0.02813720703125, -0.01102447509765625, -0.053466796875, 0.034942626953125, -0.00226593017578125, 0.0167694091796875, 0.010223388671875, 0.004974365234375, 0.0731201171875, -0.051177978515625, 0.052459716796875, -0.0250396728515625, -0.0116119384765625, 0.0411376953125, -0.010284423828125, 0.057159423828125, -0.0005941390991210938, -0.005527496337890625, 0.0187225341796875, 0.0012865066528320312, -0.0308685302734375, -0.0186920166015625, 0.057891845703125, -0.08270263671875, -0.034820556640625, -0.038360595703125, -0.03887939453125, 0.00385284423828125, 0.0118255615234375, 0.0389404296875, 0.0292816162109375, 0.0030269622802734375, 0.028778076171875, 0.048126220703125, -0.01318359375, 0.051177978515625, 0.0220947265625, -0.01605224609375, -0.046722412109375, 0.06292724609375, 0.0233306884765625, 0.0187530517578125, 0.013275146484375, 0.007175445556640625, -0.02886962890625, -0.0469970703125, -0.042724609375, 0.02386474609375, -0.046417236328125, -0.00910186767578125, -0.0601806640625, -0.03314208984375, -0.033538818359375, -0.0091400146484375, -0.02471923828125, -0.0296173095703125, -0.02618408203125, -0.005615234375, 0.027130126953125, 0.037933349609375, -0.006412506103515625, 0.027801513671875, -0.054229736328125, 0.007598876953125, 0.0234375, 0.01029205322265625, 0.0150299072265625, -0.07342529296875, -0.0255889892578125, 0.010467529296875, -0.047210693359375, -0.0614013671875, 0.0443115234375, -0.003902435302734375, 0.03521728515625, 0.0240936279296875, -0.0214996337890625, 0.054290771484375, -0.022064208984375, 0.0723876953125, 0.0247955322265625, -0.07135009765625, 0.03790283203125, -0.04510498046875, 0.0159759521484375, 0.03253173828125, 0.0299530029296875, -0.038604736328125, -0.0137176513671875, -0.0732421875, -0.07366943359375, 0.056396484375, 0.024993896484375, -0.0007343292236328125, 0.01190948486328125, 0.035369873046875, -0.01334381103515625, 0.01062774658203125, -0.076904296875, -0.0207672119140625, -0.021514892578125, -0.01549530029296875, -0.0025539398193359375, 0.0026073455810546875, 0.01061248779296875, -0.036407470703125, 0.06573486328125, -0.00847625732421875, 0.0183868408203125, 0.0191802978515625, -0.0128021240234375, -0.01006317138671875, -0.004817962646484375, 0.040008544921875, 0.04193115234375, -0.01129150390625, -0.01953125, 0.033447265625, -0.056396484375, 0.002841949462890625, 0.0224456787109375, -0.026275634765625, -0.00982666015625, 0.0178680419921875, 0.06964111328125, 0.01366424560546875, -0.0242462158203125, 0.03521728515625, 0.0026073455810546875, -0.042694091796875, -0.0289154052734375, 0.0008473396301269531, 0.0164337158203125, 0.0147857666015625, 0.02850341796875, -0.0006704330444335938, 0.001918792724609375, -0.0209808349609375, 0.01038360595703125, 0.0283355712890625, -0.0227813720703125, -0.0191802978515625, 0.07159423828125, -0.0031585693359375, -0.0080413818359375, 0.052276611328125, -0.01227569580078125, -0.036285400390625, 0.07659912109375, 0.0239105224609375, 0.0625, -0.0206298828125, 0.0107269287109375, 0.060791015625, 0.0276641845703125, -0.0191802978515625, 0.005062103271484375, 0.00592803955078125, -0.03753662109375, -0.020904541015625, -0.0601806640625, -0.0157623291015625, 0.0255584716796875, -0.054534912109375, 0.036041259765625, -0.038238525390625, -0.00809478759765625, -0.00537872314453125, 0.0255889892578125, -0.057464599609375, 0.030242919921875, 0.02166748046875, 0.0638427734375, -0.06304931640625, 0.069091796875, 0.038665771484375, -0.054840087890625, -0.08819580078125, -0.005069732666015625, -0.0032329559326171875, -0.06500244140625, 0.039031982421875, 0.0221710205078125, 0.0167236328125, 0.01445770263671875, -0.04022216796875, -0.08941650390625, 0.11956787109375, 0.018524169921875, -0.054443359375, -0.01277923583984375, 0.007598876953125, 0.0426025390625, -0.0086212158203125, 0.03814697265625, 0.03997802734375, 0.0333251953125, 0.0015668869018554688, -0.07843017578125, 0.019561767578125, -0.0218658447265625, 0.00768280029296875, 0.02166748046875, -0.08074951171875, 0.0902099609375, -0.01026153564453125, -0.00244903564453125, 0.01042938232421875, 0.055023193359375, 0.041290283203125, 0.01088714599609375, 0.04193115234375, 0.0611572265625, 0.06317138671875, -0.0058746337890625, 0.0855712890625, -0.045745849609375, 0.0533447265625, 0.0648193359375, 0.003597259521484375, 0.055145263671875, 0.032196044921875, -0.032012939453125, 0.047149658203125, 0.07061767578125, -0.01313018798828125, 0.020294189453125, 0.0210723876953125, -0.0052490234375, -0.00789642333984375, 0.01447296142578125, -0.04510498046875, 0.0109405517578125, 0.021453857421875, -0.0391845703125, -0.00884246826171875, -0.0011186599731445312, 0.0202484130859375, -0.01357269287109375, -0.03173828125, 0.030059814453125, 0.0114898681640625, -0.04583740234375, 0.0689697265625, 0.00917816162109375, 0.05364990234375, -0.039520263671875, 0.024383544921875, -0.01177215576171875, 0.015838623046875, -0.0265655517578125, -0.0491943359375, 0.007114410400390625, 0.0013608932495117188, -0.0028533935546875, -0.0149993896484375, 0.0400390625, -0.01715087890625, -0.036865234375, 0.0301055908203125, 0.0271453857421875, 0.01546478271484375, -0.01239013671875, -0.07080078125, -0.008270263671875, 0.005828857421875, -0.064208984375, 0.03179931640625, 0.026275634765625, -0.00562286376953125, 0.046173095703125, 0.044403076171875, -0.0030517578125, 0.0095062255859375, 0.00792694091796875, 0.0743408203125, -0.046295166015625, -0.031524658203125, -0.06488037109375, 0.049560546875, -0.0007696151733398438, -0.042144775390625, 0.0567626953125, 0.049560546875, 0.05755615234375, 0.0114288330078125, 0.04779052734375, -0.0212554931640625, 0.0162200927734375, -0.043182373046875, 0.051177978515625, -0.043670654296875, 0.0100250244140625, -0.0198974609375, -0.07257080078125, -0.0084228515625, 0.04302978515625, -0.03570556640625, 0.035552978515625, 0.05767822265625, 0.06304931640625, 0.00511932373046875, 0.00543975830078125, 0.005035400390625, 0.0212860107421875, 0.0224761962890625, 0.06494140625, 0.036163330078125, -0.0635986328125, 0.0576171875, -0.030181884765625, -0.01548004150390625, -0.0103912353515625, -0.050628662109375, -0.0560302734375, -0.0396728515625, -0.032867431640625, -0.0310516357421875, -0.003086090087890625, 0.05767822265625, 0.053497314453125, -0.050048828125, -0.0185546875, -0.0307769775390625, -0.01464080810546875, -0.01678466796875, -0.02056884765625, 0.050079345703125, -0.0200042724609375, -0.05706787109375, 0.005779266357421875, -0.007343292236328125, 0.0215911865234375, -0.024383544921875, -0.0286712646484375, -0.0157928466796875, 0.00142669677734375, 0.0238189697265625, 0.0250244140625, -0.0421142578125, -0.017425537109375, -0.0038051605224609375, -0.02459716796875, 0.00846099853515625, 0.032958984375, -0.04766845703125, 0.0006799697875976562, 0.034027099609375, 0.0234527587890625, 0.07269287109375, -0.00908660888671875, 0.016021728515625, -0.037139892578125, 0.01555633544921875, 0.0085601806640625, 0.0426025390625, 0.016845703125, -0.031219482421875, 0.047332763671875, 0.0285797119140625, -0.05950927734375, -0.059051513671875, -0.0069732666015625, -0.07220458984375, -0.015289306640625, 0.08355712890625, -0.01119232177734375, -0.0290985107421875, 0.017303466796875, -0.014373779296875, 0.0276031494140625, -0.0180511474609375, 0.045257568359375, 0.052734375, -0.004665374755859375, -0.01262664794921875, -0.053375244140625, 0.0281829833984375, 0.041595458984375, -0.054901123046875, -0.0026397705078125, 0.021942138671875, 0.031585693359375, 0.0151214599609375, 0.0491943359375, -0.022674560546875, 0.0157623291015625, 0.008636474609375, 0.02056884765625, -0.00039315223693847656, -0.00666046142578125, -0.0416259765625, 0.0119171142578125, -0.0051727294921875, -0.0079803466796875 ] ]
huggyllama/llama-65b
2023-04-07T15:51:00.000Z
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
huggyllama
null
null
huggyllama/llama-65b
64
8,241
transformers
2023-04-04T01:43:00
--- license: other --- This contains the weights for the LLaMA-65b model. This model is under a non-commercial license (see the LICENSE file). You should only use this repository if you have been granted access to the model by filling out [this form](https://docs.google.com/forms/d/e/1FAIpQLSfqNECQnMkycAp2jP4Z9TFX0cGR4uf7b_fBxjY_OjhJILlKGA/viewform?usp=send_form) but either lost your copy of the weights or got some trouble converting them to the Transformers format.
473
[ [ 0.002788543701171875, -0.007373809814453125, 0.0262908935546875, 0.041778564453125, -0.040252685546875, -0.00774383544921875, 0.021575927734375, -0.02764892578125, 0.027435302734375, 0.0498046875, -0.048736572265625, -0.0312042236328125, -0.055572509765625, 0.01107025146484375, -0.036712646484375, 0.0848388671875, -0.011627197265625, 0.0238494873046875, -0.02630615234375, 0.0006394386291503906, 0.000827789306640625, -0.01702880859375, -0.01107025146484375, -0.0491943359375, 0.042205810546875, 0.0323486328125, 0.048583984375, 0.0269012451171875, 0.058929443359375, 0.02020263671875, -0.0179443359375, -0.044647216796875, -0.039703369140625, -0.0244293212890625, 0.00359344482421875, -0.0640869140625, -0.083984375, 0.0011606216430664062, 0.049285888671875, 0.029144287109375, -0.04217529296875, 0.03155517578125, -0.033416748046875, 0.033538818359375, -0.03338623046875, 0.033477783203125, -0.0305633544921875, 0.005092620849609375, -0.03240966796875, -0.0101165771484375, -0.0258331298828125, -0.0186309814453125, -0.0178070068359375, -0.05596923828125, 0.005519866943359375, 0.003635406494140625, 0.08624267578125, 0.030792236328125, -0.049591064453125, -0.016357421875, -0.0198822021484375, 0.0616455078125, -0.0633544921875, 0.0084686279296875, 0.03277587890625, 0.038848876953125, -0.01163482666015625, -0.060791015625, -0.023284912109375, -0.0214385986328125, 0.00891876220703125, 0.00733184814453125, -0.030120849609375, 0.00797271728515625, 0.0266571044921875, 0.04986572265625, -0.030487060546875, -0.00013899803161621094, -0.08184814453125, -0.0120086669921875, 0.08056640625, 0.006252288818359375, 0.040252685546875, -0.0379638671875, -0.07476806640625, -0.0279541015625, -0.05267333984375, 0.0029811859130859375, 0.055206298828125, 0.0008678436279296875, -0.06256103515625, 0.093017578125, -0.01207733154296875, 0.020111083984375, 0.0018091201782226562, -0.014434814453125, 0.052154541015625, 0.01177215576171875, -0.027557373046875, 0.004894256591796875, 0.032989501953125, 0.0423583984375, 0.024932861328125, -0.00811767578125, -0.0198516845703125, -0.0023059844970703125, 0.0257568359375, -0.0550537109375, -0.0165252685546875, -0.00267791748046875, -0.041900634765625, -0.0182952880859375, 0.00714111328125, -0.02703857421875, -0.0171661376953125, -0.01175689697265625, 0.02734375, 0.00266265869140625, -0.029571533203125, 0.03314208984375, -0.0256805419921875, 0.033538818359375, 0.0087738037109375, -0.052001953125, 0.0289459228515625, -0.0011587142944335938, 0.038665771484375, 0.0185394287109375, -0.0020809173583984375, 0.000028789043426513672, 0.030609130859375, -0.0023059844970703125, 0.0506591796875, -0.01702880859375, -0.0743408203125, -0.01186370849609375, 0.040557861328125, 0.0014085769653320312, -0.03863525390625, 0.053009033203125, -0.068115234375, -0.00363922119140625, -0.018768310546875, -0.0240325927734375, -0.04339599609375, -0.01328277587890625, -0.07611083984375, 0.07073974609375, 0.0292816162109375, -0.052215576171875, 0.0208740234375, -0.038238525390625, -0.01465606689453125, 0.027008056640625, 0.004375457763671875, -0.042144775390625, 0.0167694091796875, -0.00803375244140625, 0.022979736328125, -0.01558685302734375, 0.0169219970703125, -0.037017822265625, -0.033905029296875, 0.0037326812744140625, -0.008697509765625, 0.09222412109375, 0.016632080078125, -0.006992340087890625, 0.00494384765625, -0.066650390625, -0.0249176025390625, 0.03851318359375, -0.0552978515625, 0.0038776397705078125, -0.028594970703125, 0.01358795166015625, -0.00017142295837402344, 0.0509033203125, -0.044097900390625, 0.032012939453125, 0.00809478759765625, 0.0088653564453125, 0.06024169921875, -0.00583648681640625, 0.03704833984375, -0.036865234375, 0.072509765625, -0.0012292861938476562, 0.024810791015625, 0.034027099609375, -0.046783447265625, -0.0428466796875, -0.01189422607421875, -0.0001322031021118164, 0.018524169921875, -0.03558349609375, 0.017242431640625, -0.0225067138671875, -0.06390380859375, -0.0219573974609375, 0.00888824462890625, 0.0139923095703125, 0.0201416015625, 0.0192413330078125, -0.025360107421875, -0.032012939453125, -0.08538818359375, 0.0266265869140625, -0.0142822265625, -0.0244140625, 0.0426025390625, 0.0738525390625, -0.021484375, 0.035400390625, -0.04327392578125, -0.0292816162109375, -0.0188751220703125, -0.00147247314453125, 0.01369476318359375, 0.020172119140625, 0.07733154296875, -0.052886962890625, -0.0023174285888671875, -0.019866943359375, -0.0626220703125, -0.0298309326171875, 0.0084686279296875, -0.01043701171875, -0.0030155181884765625, -0.004573822021484375, -0.055419921875, 0.033843994140625, 0.07550048828125, -0.0167694091796875, 0.0364990234375, -0.020355224609375, -0.01248931884765625, -0.075927734375, 0.0261077880859375, 0.010650634765625, -0.0169830322265625, 0.01071929931640625, 0.02325439453125, 0.00882720947265625, 0.0022258758544921875, -0.05572509765625, 0.0250701904296875, -0.027862548828125, -0.0298309326171875, -0.002132415771484375, 0.0015668869018554688, 0.00617218017578125, 0.022857666015625, -0.02655029296875, 0.06402587890625, 0.022247314453125, -0.0345458984375, 0.04443359375, 0.035919189453125, -0.030975341796875, 0.0271759033203125, -0.07147216796875, 0.002918243408203125, -0.0277862548828125, 0.0426025390625, -0.0340576171875, -0.0310211181640625, 0.03155517578125, -0.0172271728515625, 0.00801849365234375, -0.01366424560546875, -0.00624847412109375, -0.0167236328125, -0.0208740234375, 0.038970947265625, 0.02642822265625, -0.02386474609375, 0.06201171875, 0.03924560546875, -0.0038299560546875, -0.053314208984375, -0.08197021484375, -0.0224151611328125, -0.0246429443359375, -0.035308837890625, 0.0428466796875, -0.007274627685546875, -0.021240234375, 0.020660400390625, -0.00893402099609375, -0.01314544677734375, -0.0013475418090820312, 0.033447265625, 0.0262451171875, -0.00530242919921875, -0.00856781005859375, 0.012939453125, -0.0107574462890625, 0.0169525146484375, 0.01837158203125, 0.038116455078125, -0.005504608154296875, -0.01528167724609375, -0.048858642578125, -0.004116058349609375, 0.0386962890625, -0.004878997802734375, 0.042022705078125, 0.016082763671875, -0.035369873046875, -0.00949859619140625, -0.019439697265625, -0.0010061264038085938, -0.036041259765625, 0.0265960693359375, -0.0214996337890625, -0.0171661376953125, 0.04180908203125, 0.0092315673828125, -0.001983642578125, 0.0693359375, 0.060333251953125, 0.0005278587341308594, 0.0382080078125, 0.058929443359375, -0.01023101806640625, 0.0345458984375, -0.046630859375, -0.01343536376953125, -0.08062744140625, -0.0635986328125, -0.043365478515625, -0.05609130859375, -0.01250457763671875, -0.0215606689453125, 0.01039886474609375, 0.0013637542724609375, -0.042938232421875, 0.052978515625, -0.0160980224609375, 0.0186614990234375, 0.03607177734375, 0.02972412109375, 0.04461669921875, -0.0006732940673828125, -0.004161834716796875, 0.0030517578125, -0.0269927978515625, -0.04315185546875, 0.091064453125, 0.01499176025390625, 0.0889892578125, 0.0238800048828125, 0.04559326171875, 0.0296478271484375, 0.029327392578125, -0.061004638671875, 0.044342041015625, 0.00122833251953125, -0.07550048828125, 0.00756072998046875, -0.03558349609375, -0.06427001953125, 0.00969696044921875, 0.00023674964904785156, -0.04656982421875, 0.01134490966796875, -0.01149749755859375, 0.007770538330078125, 0.036712646484375, -0.048919677734375, 0.035369873046875, -0.019134521484375, 0.0218353271484375, -0.033172607421875, -0.0251617431640625, 0.052032470703125, 0.01053619384765625, 0.006397247314453125, -0.020477294921875, -0.00421142578125, 0.0643310546875, -0.026763916015625, 0.07354736328125, -0.025848388671875, -0.02301025390625, 0.035430908203125, -0.0122833251953125, 0.03485107421875, 0.0178070068359375, -0.0119781494140625, 0.0234222412109375, -0.0030841827392578125, -0.0240325927734375, -0.005283355712890625, 0.040863037109375, -0.0869140625, -0.030914306640625, -0.037078857421875, -0.04937744140625, 0.043212890625, 0.01001739501953125, 0.0144500732421875, -0.0009775161743164062, 0.02069091796875, 0.05072021484375, 0.028076171875, -0.012725830078125, 0.02587890625, 0.02978515625, -0.00876617431640625, -0.0158538818359375, 0.03082275390625, 0.005764007568359375, 0.0211029052734375, 0.0200042724609375, 0.01551055908203125, -0.0347900390625, -0.016845703125, -0.03717041015625, 0.0299835205078125, -0.06915283203125, -0.028594970703125, -0.0312347412109375, -0.0112457275390625, -0.01104736328125, -0.00909423828125, -0.0106201171875, -0.0538330078125, -0.03936767578125, -0.0204010009765625, 0.0438232421875, 0.06719970703125, -0.0010814666748046875, 0.06402587890625, -0.052978515625, 0.0201416015625, 0.0192413330078125, 0.01467132568359375, 0.003692626953125, -0.052581787109375, -0.0016870498657226562, -0.00482940673828125, -0.04571533203125, -0.0616455078125, 0.01537322998046875, -0.00014483928680419922, 0.03619384765625, 0.0209197998046875, -0.0029010772705078125, 0.03448486328125, -0.01285552978515625, 0.07733154296875, 0.0185089111328125, -0.060699462890625, 0.005710601806640625, -0.0204315185546875, 0.0028209686279296875, 0.01314544677734375, 0.0408935546875, -0.00804901123046875, 0.01384735107421875, -0.05389404296875, -0.06024169921875, 0.0469970703125, 0.004390716552734375, 0.0160675048828125, 0.041595458984375, 0.016143798828125, -0.0025634765625, 0.0255279541015625, -0.089599609375, -0.00769805908203125, -0.020660400390625, -0.025177001953125, 0.032196044921875, -0.017242431640625, -0.03076171875, -0.004016876220703125, 0.06292724609375, 0.00545501708984375, 0.0037784576416015625, -0.004673004150390625, -0.0225677490234375, -0.034271240234375, 0.007015228271484375, 0.039093017578125, 0.038055419921875, -0.031524658203125, -0.01580810546875, 0.026153564453125, -0.057586669921875, 0.00014138221740722656, 0.01389312744140625, -0.004413604736328125, -0.01971435546875, 0.01873779296875, 0.0306854248046875, 0.045654296875, -0.03857421875, 0.04595947265625, 0.003810882568359375, -0.033538818359375, -0.0325927734375, 0.0003063678741455078, 0.0180511474609375, 0.045135498046875, 0.0228271484375, -0.00543212890625, 0.01361083984375, -0.0219573974609375, 0.0026092529296875, 0.028533935546875, 0.0009713172912597656, -0.03961181640625, 0.079833984375, 0.01062774658203125, -0.00247955322265625, 0.044586181640625, 0.006977081298828125, -0.0026493072509765625, 0.06378173828125, 0.03961181640625, 0.05682373046875, -0.005886077880859375, 0.01485443115234375, 0.01180267333984375, 0.0185394287109375, -0.005645751953125, 0.057647705078125, 0.003692626953125, -0.0249786376953125, -0.0274658203125, -0.066162109375, -0.047149658203125, -0.007755279541015625, -0.058837890625, 0.045074462890625, -0.038726806640625, -0.02825927734375, -0.0187835693359375, 0.005077362060546875, -0.04364013671875, 0.034881591796875, 0.0187225341796875, 0.068115234375, -0.046844482421875, 0.055206298828125, 0.05645751953125, -0.071044921875, -0.07769775390625, -0.039764404296875, 0.00955963134765625, -0.09735107421875, 0.056182861328125, -0.00983428955078125, -0.0192413330078125, -0.00670623779296875, -0.0592041015625, -0.08294677734375, 0.1085205078125, 0.04119873046875, -0.039306640625, -0.04132080078125, 0.00351715087890625, -0.0002961158752441406, -0.0006208419799804688, 0.0196533203125, -0.006168365478515625, 0.042510986328125, 0.0266876220703125, -0.0648193359375, 0.003673553466796875, -0.00830078125, -0.004146575927734375, -0.006103515625, -0.07342529296875, 0.08465576171875, -0.026580810546875, -0.0146026611328125, 0.0220489501953125, 0.035614013671875, 0.032684326171875, -0.002529144287109375, 0.027191162109375, 0.057861328125, 0.0555419921875, -0.0006985664367675781, 0.0867919921875, -0.01432037353515625, 0.056640625, 0.038543701171875, -0.02545166015625, 0.0513916015625, 0.03582763671875, -0.034027099609375, 0.06866455078125, 0.0286712646484375, -0.031707763671875, 0.0216217041015625, 0.033966064453125, -0.0112152099609375, -0.00423431396484375, -0.0231475830078125, -0.054473876953125, 0.035675048828125, 0.004291534423828125, -0.037078857421875, -0.0258331298828125, -0.041229248046875, 0.006893157958984375, -0.0273895263671875, -0.020172119140625, 0.0193328857421875, 0.041351318359375, 0.01971435546875, 0.0272216796875, 0.01154327392578125, 0.04241943359375, -0.0679931640625, 0.0026702880859375, 0.00860595703125, 0.015228271484375, -0.042327880859375, -0.043304443359375, 0.02392578125, 0.007427215576171875, -0.0134124755859375, -0.01141357421875, 0.0347900390625, 0.01177215576171875, -0.055450439453125, 0.028411865234375, 0.004058837890625, 0.01617431640625, 0.021697998046875, -0.047027587890625, 0.0198822021484375, -0.0203094482421875, -0.03875732421875, 0.0132904052734375, -0.00525665283203125, -0.0039825439453125, 0.073974609375, 0.043914794921875, 0.0074005126953125, 0.015838623046875, 0.0133819580078125, 0.0745849609375, -0.04150390625, -0.023406982421875, -0.029937744140625, 0.0595703125, 0.011474609375, -0.044952392578125, 0.0423583984375, 0.0396728515625, 0.07696533203125, -0.0345458984375, 0.039520263671875, -0.02227783203125, -0.0198822021484375, -0.042083740234375, 0.07611083984375, -0.06927490234375, -0.005313873291015625, -0.01360321044921875, -0.07135009765625, -0.027099609375, 0.051971435546875, -0.00035309791564941406, 0.006103515625, 0.035552978515625, 0.038299560546875, 0.004230499267578125, 0.00885009765625, 0.00621795654296875, 0.005695343017578125, 0.03094482421875, 0.04833984375, 0.033843994140625, -0.05352783203125, 0.054351806640625, -0.009552001953125, -0.0182647705078125, -0.009063720703125, -0.0701904296875, -0.054229736328125, -0.010894775390625, -0.0057373046875, -0.0027751922607421875, -0.032196044921875, 0.061126708984375, 0.048065185546875, -0.0296478271484375, -0.031829833984375, 0.01456451416015625, -0.0014963150024414062, 0.0032138824462890625, -0.01253509521484375, 0.01959228515625, 0.0214385986328125, -0.072998046875, 0.0240325927734375, -0.005649566650390625, 0.04144287109375, -0.030792236328125, -0.000766754150390625, -0.0036067962646484375, -0.0136566162109375, 0.0168914794921875, 0.004680633544921875, -0.05224609375, -0.0018739700317382812, 0.0013341903686523438, -0.00426483154296875, -0.0019521713256835938, 0.0290679931640625, -0.040191650390625, 0.00954437255859375, 0.039794921875, 0.029205322265625, 0.044036865234375, 0.005496978759765625, 0.037445068359375, -0.04571533203125, 0.034942626953125, 0.017578125, 0.061279296875, 0.01885986328125, -0.02410888671875, 0.032989501953125, 0.021331787109375, -0.049224853515625, -0.042999267578125, 0.01107025146484375, -0.10504150390625, 0.01226043701171875, 0.06988525390625, -0.00925445556640625, -0.035736083984375, 0.041412353515625, -0.029876708984375, 0.018310546875, -0.038970947265625, 0.056427001953125, 0.048828125, 0.01708984375, -0.0208740234375, -0.0443115234375, 0.0080108642578125, 0.00888824462890625, -0.047607421875, -0.0305633544921875, 0.02728271484375, 0.029541015625, 0.01123046875, 0.0200347900390625, -0.049102783203125, -0.0027446746826171875, -0.00133514404296875, 0.02972412109375, -0.0031528472900390625, -0.0199127197265625, -0.0179901123046875, -0.006916046142578125, -0.006317138671875, -0.022613525390625 ] ]
roberta-large-openai-detector
2022-07-22T08:07:41.000Z
[ "transformers", "pytorch", "jax", "roberta", "text-classification", "exbert", "en", "dataset:bookcorpus", "dataset:wikipedia", "arxiv:1904.09751", "arxiv:1910.09700", "license:mit", "endpoints_compatible", "has_space", "region:us" ]
text-classification
null
null
null
roberta-large-openai-detector
8
8,231
transformers
2022-03-02T23:29:04
--- language: en license: mit tags: - exbert datasets: - bookcorpus - wikipedia --- # RoBERTa Large OpenAI Detector ## Table of Contents - [Model Details](#model-details) - [Uses](#uses) - [Risks, Limitations and Biases](#risks-limitations-and-biases) - [Training](#training) - [Evaluation](#evaluation) - [Environmental Impact](#environmental-impact) - [Technical Specifications](#technical-specifications) - [Citation Information](#citation-information) - [Model Card Authors](#model-card-authors) - [How To Get Started With the Model](#how-to-get-started-with-the-model) ## Model Details **Model Description:** RoBERTa large OpenAI Detector is the GPT-2 output detector model, obtained by fine-tuning a RoBERTa large model with the outputs of the 1.5B-parameter GPT-2 model. The model can be used to predict if text was generated by a GPT-2 model. This model was released by OpenAI at the same time as OpenAI released the weights of the [largest GPT-2 model](https://huggingface.co/gpt2-xl), the 1.5B parameter version. - **Developed by:** OpenAI, see [GitHub Repo](https://github.com/openai/gpt-2-output-dataset/tree/master/detector) and [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf) for full author list - **Model Type:** Fine-tuned transformer-based language model - **Language(s):** English - **License:** MIT - **Related Models:** [RoBERTa large](https://huggingface.co/roberta-large), [GPT-XL (1.5B parameter version)](https://huggingface.co/gpt2-xl), [GPT-Large (the 774M parameter version)](https://huggingface.co/gpt2-large), [GPT-Medium (the 355M parameter version)](https://huggingface.co/gpt2-medium) and [GPT-2 (the 124M parameter version)](https://huggingface.co/gpt2) - **Resources for more information:** - [Research Paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf) (see, in particular, the section beginning on page 12 about Automated ML-based detection). - [GitHub Repo](https://github.com/openai/gpt-2-output-dataset/tree/master/detector) - [OpenAI Blog Post](https://openai.com/blog/gpt-2-1-5b-release/) - [Explore the detector model here](https://huggingface.co/openai-detector ) ## Uses #### Direct Use The model is a classifier that can be used to detect text generated by GPT-2 models. #### Downstream Use The model's developers have stated that they developed and released the model to help with research related to synthetic text generation, so the model could potentially be used for downstream tasks related to synthetic text generation. See the [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf) for further discussion. #### Misuse and Out-of-scope Use The model should not be used to intentionally create hostile or alienating environments for people. In addition, the model developers discuss the risk of adversaries using the model to better evade detection in their [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf), suggesting that using the model for evading detection or for supporting efforts to evade detection would be a misuse of the model. ## Risks, Limitations and Biases **CONTENT WARNING: Readers should be aware this section may contain content that is disturbing, offensive, and can propagate historical and current stereotypes.** Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. #### Risks and Limitations In their [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf), the model developers discuss the risk that the model may be used by bad actors to develop capabilities for evading detection, though one purpose of releasing the model is to help improve detection research. In a related [blog post](https://openai.com/blog/gpt-2-1-5b-release/), the model developers also discuss the limitations of automated methods for detecting synthetic text and the need to pair automated detection tools with other, non-automated approaches. They write: > We conducted in-house detection research and developed a detection model that has detection rates of ~95% for detecting 1.5B GPT-2-generated text. We believe this is not high enough accuracy for standalone detection and needs to be paired with metadata-based approaches, human judgment, and public education to be more effective. The model developers also [report](https://openai.com/blog/gpt-2-1-5b-release/) finding that classifying content from larger models is more difficult, suggesting that detection with automated tools like this model will be increasingly difficult as model sizes increase. The authors find that training detector models on the outputs of larger models can improve accuracy and robustness. #### Bias Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)). Predictions generated by RoBERTa large and GPT-2 1.5B (which this model is built/fine-tuned on) can include disturbing and harmful stereotypes across protected classes; identity characteristics; and sensitive, social, and occupational groups (see the [RoBERTa large](https://huggingface.co/roberta-large) and [GPT-2 XL](https://huggingface.co/gpt2-xl) model cards for more information). The developers of this model discuss these issues further in their [paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf). ## Training #### Training Data The model is a sequence classifier based on RoBERTa large (see the [RoBERTa large model card](https://huggingface.co/roberta-large) for more details on the RoBERTa large training data) and then fine-tuned using the outputs of the 1.5B GPT-2 model (available [here](https://github.com/openai/gpt-2-output-dataset)). #### Training Procedure The model developers write that: > We based a sequence classifier on RoBERTaLARGE (355 million parameters) and fine-tuned it to classify the outputs from the 1.5B GPT-2 model versus WebText, the dataset we used to train the GPT-2 model. They later state: > To develop a robust detector model that can accurately classify generated texts regardless of the sampling method, we performed an analysis of the model’s transfer performance. See the [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf) for further details on the training procedure. ## Evaluation The following evaluation information is extracted from the [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf). #### Testing Data, Factors and Metrics The model is intended to be used for detecting text generated by GPT-2 models, so the model developers test the model on text datasets, measuring accuracy by: > testing 510-token test examples comprised of 5,000 samples from the WebText dataset and 5,000 samples generated by a GPT-2 model, which were not used during the training. #### Results The model developers [find](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf): > Our classifier is able to detect 1.5 billion parameter GPT-2-generated text with approximately 95% accuracy...The model’s accuracy depends on sampling methods used when generating outputs, like temperature, Top-K, and nucleus sampling ([Holtzman et al., 2019](https://arxiv.org/abs/1904.09751). Nucleus sampling outputs proved most difficult to correctly classify, but a detector trained using nucleus sampling transfers well across other sampling methods. As seen in Figure 1 [in the paper], we found consistently high accuracy when trained on nucleus sampling. See the [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf), Figure 1 (on page 14) and Figure 2 (on page 16) for full results. ## Environmental Impact Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** Unknown - **Hours used:** Unknown - **Cloud Provider:** Unknown - **Compute Region:** Unknown - **Carbon Emitted:** Unknown ## Technical Specifications The model developers write that: See the [associated paper](https://d4mucfpksywv.cloudfront.net/papers/GPT_2_Report.pdf) for further details on the modeling architecture and training details. ## Citation Information ```bibtex @article{solaiman2019release, title={Release strategies and the social impacts of language models}, author={Solaiman, Irene and Brundage, Miles and Clark, Jack and Askell, Amanda and Herbert-Voss, Ariel and Wu, Jeff and Radford, Alec and Krueger, Gretchen and Kim, Jong Wook and Kreps, Sarah and others}, journal={arXiv preprint arXiv:1908.09203}, year={2019} } ``` APA: - Solaiman, I., Brundage, M., Clark, J., Askell, A., Herbert-Voss, A., Wu, J., ... & Wang, J. (2019). Release strategies and the social impacts of language models. arXiv preprint arXiv:1908.09203. ## Model Card Authors This model card was written by the team at Hugging Face. ## How to Get Started with the Model More information needed
9,182
[ [ -0.02093505859375, -0.058502197265625, 0.040802001953125, -0.0177764892578125, -0.022003173828125, -0.036590576171875, -0.0200042724609375, -0.0665283203125, -0.03131103515625, 0.0270233154296875, -0.018768310546875, -0.0255889892578125, -0.070556640625, -0.005889892578125, -0.033599853515625, 0.095458984375, -0.0117034912109375, -0.0090789794921875, 0.01448822021484375, 0.0180511474609375, -0.0269775390625, -0.049072265625, -0.06201171875, -0.006267547607421875, 0.02044677734375, -0.00479888916015625, 0.03887939453125, 0.034942626953125, 0.036895751953125, 0.01788330078125, -0.0100860595703125, -0.00006157159805297852, -0.032958984375, -0.018646240234375, -0.007099151611328125, -0.0271453857421875, -0.032470703125, 0.02264404296875, 0.0452880859375, 0.0009398460388183594, 0.01261138916015625, 0.0018520355224609375, 0.007678985595703125, 0.03143310546875, -0.0282745361328125, 0.0178985595703125, -0.03973388671875, -0.0168609619140625, -0.0289764404296875, 0.00821685791015625, -0.049713134765625, -0.0018053054809570312, 0.01055908203125, -0.035736083984375, 0.03411865234375, -0.0182037353515625, 0.07965087890625, 0.01141357421875, -0.04315185546875, -0.034423828125, -0.06488037109375, 0.057037353515625, -0.050384521484375, 0.01328277587890625, 0.040740966796875, 0.0258331298828125, 0.00418853759765625, -0.0657958984375, -0.05072021484375, -0.01776123046875, -0.01617431640625, 0.0253753662109375, -0.03228759765625, -0.0087738037109375, 0.0294036865234375, 0.027374267578125, -0.0755615234375, 0.0119171142578125, -0.028106689453125, -0.0227203369140625, 0.045440673828125, -0.0107879638671875, 0.0244293212890625, -0.0184783935546875, -0.0235595703125, -0.0196380615234375, -0.03594970703125, -0.00836181640625, 0.05023193359375, 0.0188751220703125, -0.023712158203125, 0.032989501953125, 0.003726959228515625, 0.03533935546875, -0.016357421875, 0.00386810302734375, 0.01471710205078125, -0.051300048828125, -0.003021240234375, -0.0205535888671875, 0.07342529296875, 0.0114898681640625, 0.0192718505859375, 0.0023441314697265625, -0.019989013671875, 0.0135498046875, 0.01273345947265625, -0.081787109375, -0.032928466796875, 0.0168609619140625, -0.032745361328125, -0.03192138671875, 0.0248565673828125, -0.05902099609375, 0.0023479461669921875, -0.0118865966796875, 0.03326416015625, -0.0253448486328125, -0.035888671875, -0.0048370361328125, -0.02239990234375, 0.0140838623046875, 0.01538848876953125, -0.0665283203125, 0.01538848876953125, 0.0533447265625, 0.07037353515625, -0.00600433349609375, 0.0025482177734375, -0.01995849609375, -0.00916290283203125, -0.00909423828125, 0.058868408203125, -0.01508331298828125, -0.0229339599609375, -0.01482391357421875, 0.0076904296875, 0.0086669921875, -0.016265869140625, 0.0474853515625, -0.032379150390625, 0.052886962890625, -0.0162353515625, -0.03350830078125, -0.01030731201171875, -0.0021209716796875, -0.045745849609375, 0.0810546875, 0.033721923828125, -0.05853271484375, 0.019317626953125, -0.04473876953125, -0.023773193359375, -0.00597381591796875, 0.01129913330078125, -0.051025390625, -0.01345062255859375, 0.0103607177734375, 0.01070404052734375, -0.0311737060546875, 0.03143310546875, -0.005420684814453125, -0.0304718017578125, 0.01213836669921875, -0.04840087890625, 0.0775146484375, 0.034576416015625, -0.043243408203125, -0.0021190643310546875, -0.032379150390625, 0.00627899169921875, 0.0208892822265625, -0.022796630859375, -0.004840850830078125, -0.0095367431640625, 0.038787841796875, 0.019378662109375, 0.00841522216796875, -0.0303192138671875, 0.00431060791015625, -0.02032470703125, 0.0408935546875, 0.052398681640625, -0.0079345703125, 0.024932861328125, -0.0170440673828125, 0.0306549072265625, -0.004619598388671875, 0.0150146484375, -0.01119232177734375, -0.0672607421875, -0.040771484375, -0.01477813720703125, 0.04266357421875, 0.053924560546875, -0.04705810546875, 0.05169677734375, -0.0154571533203125, -0.048797607421875, -0.00748443603515625, -0.01067352294921875, 0.053741455078125, 0.0192413330078125, 0.03289794921875, -0.02587890625, -0.04766845703125, -0.0535888671875, -0.01715087890625, -0.02496337890625, -0.0135498046875, 0.01114654541015625, 0.06597900390625, -0.01158905029296875, 0.0634765625, -0.03997802734375, -0.0220184326171875, -0.0218658447265625, 0.020782470703125, -0.006214141845703125, 0.04150390625, 0.05059814453125, -0.07598876953125, -0.0418701171875, -0.0143890380859375, -0.06060791015625, 0.0006732940673828125, 0.004825592041015625, 0.002529144287109375, 0.050567626953125, 0.0213470458984375, -0.051055908203125, 0.035736083984375, 0.033599853515625, -0.032135009765625, 0.039154052734375, -0.00290679931640625, -0.0014982223510742188, -0.0902099609375, 0.0185699462890625, 0.0282745361328125, -0.0096588134765625, -0.0677490234375, 0.011871337890625, 0.0025653839111328125, -0.003917694091796875, -0.031341552734375, 0.053192138671875, -0.0254669189453125, -0.0051727294921875, -0.032135009765625, 0.0008792877197265625, -0.007335662841796875, 0.037933349609375, 0.01263427734375, 0.082275390625, 0.01221466064453125, -0.035858154296875, 0.0059967041015625, 0.0155181884765625, -0.026519775390625, 0.0263671875, -0.058258056640625, 0.030181884765625, -0.0029315948486328125, 0.04217529296875, -0.07421875, -0.02392578125, 0.041107177734375, -0.06268310546875, 0.04095458984375, -0.027435302734375, -0.052703857421875, -0.03997802734375, -0.01459503173828125, 0.0253143310546875, 0.06683349609375, -0.0300140380859375, 0.018585205078125, 0.0281219482421875, -0.0013170242309570312, -0.024993896484375, -0.0726318359375, 0.002986907958984375, -0.01200103759765625, -0.044891357421875, 0.023101806640625, 0.0019893646240234375, -0.007160186767578125, 0.0007114410400390625, 0.01088714599609375, -0.012298583984375, 0.00557708740234375, 0.00962066650390625, 0.0181884765625, -0.0166778564453125, 0.0139007568359375, -0.0094451904296875, -0.032684326171875, 0.00264739990234375, -0.04351806640625, 0.042327880859375, -0.005268096923828125, -0.0036869049072265625, -0.04644775390625, 0.022796630859375, 0.01702880859375, -0.01367950439453125, 0.0487060546875, 0.07940673828125, -0.036468505859375, 0.0123138427734375, -0.0230712890625, -0.0208740234375, -0.03204345703125, 0.044189453125, -0.0200958251953125, -0.06396484375, 0.024932861328125, 0.032135009765625, -0.0178985595703125, 0.06689453125, 0.04742431640625, 0.0181121826171875, 0.08331298828125, 0.042144775390625, -0.0094451904296875, 0.0390625, -0.0276336669921875, 0.029052734375, -0.077392578125, -0.00396728515625, -0.055755615234375, -0.003887176513671875, -0.0677490234375, -0.02880859375, 0.0155181884765625, 0.0094451904296875, -0.0301513671875, 0.03997802734375, -0.053985595703125, 0.02044677734375, 0.047882080078125, 0.0160675048828125, 0.0156097412109375, 0.0044708251953125, 0.0267486572265625, 0.002849578857421875, -0.0264892578125, -0.050872802734375, 0.11083984375, 0.041046142578125, 0.0189971923828125, 0.0303955078125, 0.025543212890625, 0.01531982421875, 0.025390625, -0.05364990234375, 0.0289306640625, -0.028106689453125, -0.065673828125, -0.02459716796875, -0.037353515625, -0.07061767578125, 0.00514984130859375, 0.007503509521484375, -0.06378173828125, 0.005573272705078125, 0.0055999755859375, -0.0005474090576171875, 0.0297698974609375, -0.058349609375, 0.06854248046875, -0.0095672607421875, -0.019195556640625, -0.0007562637329101562, -0.0460205078125, 0.04278564453125, -0.0168304443359375, -0.0003459453582763672, 0.0060577392578125, 0.006259918212890625, 0.059295654296875, -0.03363037109375, 0.060028076171875, -0.03045654296875, -0.01299285888671875, 0.047698974609375, -0.0179595947265625, 0.058013916015625, -0.0087738037109375, -0.0017137527465820312, 0.028778076171875, -0.01432037353515625, -0.0243988037109375, -0.0205230712890625, 0.03826904296875, -0.06591796875, -0.02813720703125, -0.052398681640625, -0.032501220703125, 0.017547607421875, 0.0186309814453125, 0.060760498046875, 0.03387451171875, -0.00490570068359375, 0.006305694580078125, 0.051025390625, -0.0247039794921875, 0.017791748046875, 0.0247344970703125, -0.01053619384765625, -0.0216064453125, 0.061065673828125, 0.011077880859375, 0.0253143310546875, 0.0243988037109375, 0.0139923095703125, -0.037078857421875, -0.05438232421875, -0.03460693359375, 0.009613037109375, -0.0526123046875, -0.0134735107421875, -0.061370849609375, -0.028106689453125, -0.050384521484375, 0.02392578125, -0.0125579833984375, -0.02880859375, -0.0338134765625, -0.0109710693359375, 0.0335693359375, 0.052886962890625, 0.0017566680908203125, 0.03131103515625, -0.035736083984375, 0.0240936279296875, 0.01396942138671875, 0.0248260498046875, -0.0097808837890625, -0.0645751953125, -0.0016632080078125, 0.02191162109375, -0.032440185546875, -0.053375244140625, 0.0238494873046875, -0.003021240234375, 0.02447509765625, 0.007904052734375, -0.00695037841796875, 0.012542724609375, -0.01107025146484375, 0.08026123046875, -0.005336761474609375, -0.06732177734375, 0.053619384765625, -0.043975830078125, 0.0104827880859375, 0.0257720947265625, 0.027557373046875, -0.02630615234375, -0.0213165283203125, -0.047515869140625, -0.05975341796875, 0.06719970703125, 0.035736083984375, 0.0110321044921875, -0.004848480224609375, 0.0308074951171875, -0.01268768310546875, -0.00435638427734375, -0.06719970703125, -0.005523681640625, -0.0191650390625, -0.02215576171875, -0.0190277099609375, -0.037261962890625, 0.00437164306640625, -0.0078277587890625, 0.064208984375, -0.00371551513671875, 0.06622314453125, 0.01898193359375, -0.0111541748046875, -0.004215240478515625, 0.011993408203125, 0.06219482421875, 0.0211944580078125, -0.01329803466796875, -0.00341796875, -0.007335662841796875, -0.059722900390625, 0.006557464599609375, 0.019866943359375, -0.03631591796875, -0.00812530517578125, 0.0287322998046875, 0.069091796875, -0.011505126953125, -0.0289459228515625, 0.048126220703125, -0.0024318695068359375, -0.03326416015625, -0.03680419921875, 0.00823211669921875, -0.015167236328125, 0.0040283203125, 0.021026611328125, 0.00946044921875, 0.023223876953125, -0.052215576171875, 0.021270751953125, 0.034393310546875, -0.0290985107421875, -0.0272979736328125, 0.06488037109375, 0.0018968582153320312, -0.0154266357421875, 0.056427001953125, -0.03985595703125, -0.056365966796875, 0.06500244140625, 0.052703857421875, 0.07244873046875, -0.007808685302734375, 0.016693115234375, 0.04754638671875, 0.03662109375, -0.025054931640625, 0.005809783935546875, 0.0160980224609375, -0.047210693359375, -0.0189056396484375, -0.03131103515625, -0.01207733154296875, 0.0311737060546875, -0.04290771484375, 0.020843505859375, -0.03509521484375, -0.01439666748046875, -0.0005927085876464844, 0.016326904296875, -0.042510986328125, 0.01134490966796875, 0.00740814208984375, 0.05987548828125, -0.0709228515625, 0.06951904296875, 0.025970458984375, -0.0450439453125, -0.06353759765625, 0.0144500732421875, 0.0266876220703125, -0.037506103515625, 0.033416748046875, 0.032745361328125, 0.0219879150390625, 0.0012416839599609375, -0.0401611328125, -0.060089111328125, 0.089599609375, -0.0006923675537109375, -0.0310211181640625, 0.008453369140625, 0.007755279541015625, 0.0582275390625, -0.00981903076171875, 0.040924072265625, 0.04345703125, 0.055633544921875, 0.002056121826171875, -0.06689453125, 0.0268402099609375, -0.02606201171875, 0.0169219970703125, 0.0272674560546875, -0.053955078125, 0.08221435546875, -0.0030879974365234375, -0.0265655517578125, 0.035736083984375, 0.031524658203125, 0.00411224365234375, 0.0289764404296875, 0.0163726806640625, 0.054534912109375, 0.0550537109375, -0.036224365234375, 0.1097412109375, 0.002471923828125, 0.038543701171875, 0.0810546875, -0.0097503662109375, 0.059722900390625, 0.0006570816040039062, -0.0311431884765625, 0.02813720703125, 0.04833984375, -0.0300140380859375, 0.04443359375, 0.004390716552734375, -0.0086212158203125, 0.01517486572265625, 0.007213592529296875, -0.040435791015625, 0.0303955078125, 0.0196533203125, -0.041107177734375, -0.006343841552734375, -0.0004000663757324219, 0.01708984375, -0.00397491455078125, 0.001750946044921875, 0.050872802734375, -0.006744384765625, -0.06060791015625, 0.03289794921875, 0.0244293212890625, 0.05535888671875, -0.042022705078125, 0.005611419677734375, -0.0014514923095703125, 0.02545166015625, -0.007755279541015625, -0.0634765625, 0.03338623046875, 0.0197296142578125, -0.01444244384765625, -0.0059356689453125, 0.058807373046875, -0.027557373046875, -0.031768798828125, 0.0181121826171875, 0.013214111328125, 0.027069091796875, -0.0295562744140625, -0.04833984375, 0.0031642913818359375, 0.00804901123046875, -0.0272674560546875, 0.03411865234375, 0.01788330078125, -0.01454925537109375, 0.0187835693359375, 0.06732177734375, -0.0133209228515625, -0.01337432861328125, -0.00112152099609375, 0.04620361328125, -0.0297698974609375, -0.026947021484375, -0.0521240234375, 0.0455322265625, -0.007091522216796875, -0.0347900390625, 0.04815673828125, 0.0560302734375, 0.07550048828125, -0.0015344619750976562, 0.0804443359375, -0.0215606689453125, 0.03387451171875, -0.050445556640625, 0.04559326171875, -0.022186279296875, 0.018280029296875, -0.0131378173828125, -0.07720947265625, -0.01425933837890625, 0.047271728515625, -0.030029296875, 0.037078857421875, 0.057586669921875, 0.057861328125, -0.0008597373962402344, 0.012451171875, -0.0018663406372070312, 0.01910400390625, 0.048431396484375, 0.05426025390625, 0.0301666259765625, -0.07403564453125, 0.0443115234375, -0.0143890380859375, -0.0244598388671875, -0.01507568359375, -0.05145263671875, -0.0645751953125, -0.05078125, -0.02490234375, -0.043243408203125, 0.01107025146484375, 0.032501220703125, 0.0462646484375, -0.06109619140625, 0.0099334716796875, -0.0347900390625, -0.0134429931640625, -0.00830078125, -0.0221405029296875, 0.0323486328125, -0.006130218505859375, -0.06536865234375, 0.0030536651611328125, -0.007080078125, 0.030303955078125, -0.0140838623046875, -0.01447296142578125, -0.01038360595703125, -0.013031005859375, 0.036590576171875, 0.0194549560546875, -0.056976318359375, -0.03631591796875, -0.01436614990234375, -0.0240631103515625, -0.008087158203125, 0.046051025390625, -0.04254150390625, 0.0160064697265625, 0.037811279296875, 0.023895263671875, 0.057281494140625, -0.00734710693359375, 0.0263671875, -0.04022216796875, 0.02313232421875, 0.00557708740234375, 0.01165771484375, 0.0186767578125, -0.05633544921875, 0.065185546875, 0.03326416015625, -0.051483154296875, -0.06146240234375, 0.0255889892578125, -0.076171875, -0.029937744140625, 0.10302734375, -0.0162506103515625, -0.0024623870849609375, -0.00455474853515625, -0.01059722900390625, 0.037017822265625, -0.0233612060546875, 0.03924560546875, 0.055206298828125, 0.0167083740234375, -0.0189208984375, -0.06439208984375, 0.0401611328125, 0.00481414794921875, -0.07080078125, -0.000732421875, 0.0300140380859375, 0.033416748046875, 0.0172882080078125, 0.051025390625, -0.0292816162109375, 0.0223388671875, 0.01007843017578125, -0.0009756088256835938, -0.01364898681640625, -0.021148681640625, -0.03680419921875, -0.00873565673828125, -0.015838623046875, 0.0024814605712890625 ] ]
Undi95/MLewd-v2.4-13B
2023-09-26T22:12:52.000Z
[ "transformers", "pytorch", "llama", "text-generation", "not-for-all-audiences", "nsfw", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us", "has_space" ]
text-generation
Undi95
null
null
Undi95/MLewd-v2.4-13B
15
8,225
transformers
2023-09-26T21:16:07
--- license: cc-by-nc-4.0 tags: - not-for-all-audiences - nsfw --- ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63ab1241ad514ca8d1430003/_fVY7xvQ9tdoZ0nVDu_WB.png) THIS MODEL IS MADE FOR LEWD SEXUAL, CRUDE AND KINKY CONTENT IN OUTPUT CAN AND WILL HAPPEN. YOU'RE WARNED Added the "magic touch" of MythoMax/Huginn/You call it. In addition, [LimaRP v3](https://huggingface.co/lemonilia/LimaRP-Llama2-13B-v3-EXPERIMENT) was used, is it recommanded to read the documentation. <!-- description start --> ## Description This repo contains fp16 files of MLewd-2.4-13B, very hot and lewd model based on ReMM (SLERP). <!-- description end --> <!-- description start --> ## Models and loras used - Undi95/ReMM-S-Light (base/private) - Undi95/CreativeEngine - Brouz/Slerpeno - The-Face-Of-Goonery/Huginn-v3-13b - zattio770/120-Days-of-LORA-v2-13B - PygmalionAI/pygmalion-2-13b - Undi95/StoryTelling - TokenBender/sakhi_13B_roleplayer_NSFW_chat_adapter - nRuaif/Kimiko-v2-13B - The-Face-Of-Goonery/Huginn-13b-FP16 - lemonilia/LimaRP-Llama2-13B-v3-EXPERIMENT <!-- description end --> <!-- prompt-template start --> ## Prompt template: Alpaca ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ``` ## LimaRP v3 usage and suggested settings ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63ab1241ad514ca8d1430003/ZC_iP2KkcEcRdgG_iyxYE.png) You can follow these instruction format settings in SillyTavern. Replace tiny with your desired response length: ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63ab1241ad514ca8d1430003/PIn8_HSPTJEMdSEpNVSdm.png) Special thanks to Sushi and Shena ♥ | I love U hh_aa. If you want to support me, you can [here](https://ko-fi.com/undiai).
1,840
[ [ -0.04876708984375, -0.068359375, 0.0238494873046875, 0.0390625, -0.041168212890625, -0.0248260498046875, 0.017242431640625, -0.050048828125, 0.06591796875, 0.06201171875, -0.059906005859375, -0.020599365234375, -0.050811767578125, 0.01372528076171875, -0.016998291015625, 0.10162353515625, 0.0182647705078125, -0.0168609619140625, 0.006195068359375, 0.01349639892578125, -0.033843994140625, -0.021514892578125, -0.0540771484375, -0.01424407958984375, 0.050140380859375, 0.02838134765625, 0.053802490234375, 0.041534423828125, 0.032470703125, 0.0245208740234375, -0.01262664794921875, 0.0294342041015625, -0.0469970703125, -0.0024471282958984375, 0.0021457672119140625, -0.027862548828125, -0.07879638671875, -0.005062103271484375, 0.045440673828125, 0.01314544677734375, -0.01007080078125, 0.0191192626953125, -0.000980377197265625, 0.038299560546875, -0.021575927734375, 0.009368896484375, -0.01064300537109375, 0.0204315185546875, -0.0283966064453125, 0.003833770751953125, -0.01131439208984375, -0.034759521484375, -0.01110076904296875, -0.059600830078125, 0.004314422607421875, 0.029022216796875, 0.07916259765625, 0.00803375244140625, -0.0141754150390625, -0.00807952880859375, -0.0281219482421875, 0.048919677734375, -0.06658935546875, 0.01134490966796875, 0.030670166015625, 0.021026611328125, -0.037933349609375, -0.0692138671875, -0.06524658203125, -0.018707275390625, -0.01134490966796875, 0.0037555694580078125, -0.02435302734375, -0.028350830078125, 0.03070068359375, 0.0232391357421875, -0.035247802734375, 0.0015010833740234375, -0.04229736328125, -0.01401519775390625, 0.0382080078125, 0.01511383056640625, 0.035430908203125, -0.033294677734375, -0.036956787109375, -0.003444671630859375, -0.041595458984375, 0.006786346435546875, 0.039337158203125, 0.00949859619140625, -0.041748046875, 0.07110595703125, 0.01052093505859375, 0.037384033203125, 0.033721923828125, -0.0087127685546875, 0.013397216796875, -0.0016317367553710938, -0.0242462158203125, -0.01361083984375, 0.08197021484375, 0.04461669921875, -0.0005578994750976562, 0.0122222900390625, -0.0017747879028320312, -0.003482818603515625, 0.0265655517578125, -0.08135986328125, -0.00923919677734375, 0.0152587890625, -0.038909912109375, -0.03729248046875, -0.0033550262451171875, -0.08099365234375, -0.0194549560546875, 0.006343841552734375, 0.0213165283203125, -0.033203125, -0.0257415771484375, 0.008087158203125, -0.002681732177734375, 0.0223846435546875, 0.0192718505859375, -0.068115234375, 0.043853759765625, 0.04632568359375, 0.0516357421875, 0.0095062255859375, -0.01274871826171875, -0.041168212890625, 0.0017881393432617188, -0.014801025390625, 0.042633056640625, -0.0214385986328125, -0.046661376953125, -0.0313720703125, 0.025238037109375, 0.00701141357421875, -0.030731201171875, 0.0606689453125, -0.037933349609375, 0.0247344970703125, -0.0257568359375, -0.033843994140625, -0.01145172119140625, -0.0009860992431640625, -0.05645751953125, 0.050384521484375, 0.005596160888671875, -0.082763671875, 0.0016565322875976562, -0.04144287109375, -0.00749969482421875, -0.0174102783203125, 0.0033817291259765625, -0.038787841796875, 0.00036406517028808594, 0.01020050048828125, 0.03521728515625, -0.0189361572265625, -0.01195526123046875, -0.03515625, -0.032135009765625, 0.031707763671875, -0.0013484954833984375, 0.060150146484375, 0.0235748291015625, -0.04534912109375, 0.0098419189453125, -0.060516357421875, -0.005481719970703125, 0.0379638671875, -0.0197906494140625, 0.0019083023071289062, -0.0191650390625, 0.0154876708984375, 0.0158233642578125, 0.035430908203125, -0.047698974609375, 0.0176849365234375, -0.030731201171875, 0.0141754150390625, 0.07269287109375, -0.0169830322265625, 0.01273345947265625, -0.046539306640625, 0.043548583984375, -0.005481719970703125, 0.02362060546875, 0.017242431640625, -0.057403564453125, -0.07757568359375, -0.0298004150390625, -0.01213836669921875, 0.0184326171875, -0.047607421875, 0.030426025390625, 0.01012420654296875, -0.055877685546875, -0.04071044921875, 0.00038814544677734375, 0.051483154296875, 0.049713134765625, 0.0191802978515625, -0.0377197265625, -0.0455322265625, -0.073486328125, 0.005035400390625, -0.01552581787109375, -0.0132904052734375, 0.04376220703125, 0.0302276611328125, -0.0287628173828125, 0.049102783203125, -0.041748046875, -0.027740478515625, -0.0197296142578125, -0.00537109375, 0.0447998046875, 0.042022705078125, 0.06671142578125, -0.0670166015625, -0.0163116455078125, -0.0155792236328125, -0.05438232421875, -0.0208587646484375, 0.0290679931640625, -0.0282745361328125, 0.00940704345703125, -0.003528594970703125, -0.070556640625, 0.02545166015625, 0.045440673828125, -0.04840087890625, 0.061248779296875, -0.03021240234375, 0.0311431884765625, -0.06988525390625, 0.0218505859375, 0.005512237548828125, -0.0220794677734375, -0.04022216796875, 0.0310516357421875, -0.01470947265625, -0.01062774658203125, -0.042938232421875, 0.055633544921875, -0.0178680419921875, -0.0116729736328125, -0.0298309326171875, 0.0023345947265625, 0.0036258697509765625, 0.042236328125, -0.006259918212890625, 0.044525146484375, 0.034393310546875, -0.043731689453125, 0.055084228515625, 0.0283966064453125, -0.0092620849609375, 0.040283203125, -0.091552734375, 0.0250701904296875, 0.0016345977783203125, 0.0247802734375, -0.061279296875, -0.040374755859375, 0.0750732421875, -0.0302276611328125, 0.02099609375, -0.007381439208984375, -0.04290771484375, -0.03594970703125, -0.032257080078125, 0.0260009765625, 0.0535888671875, -0.037933349609375, 0.03729248046875, 0.03094482421875, -0.01239013671875, -0.0367431640625, -0.058746337890625, -0.00766754150390625, -0.0261688232421875, -0.051849365234375, 0.0270233154296875, -0.0188140869140625, -0.003986358642578125, -0.004795074462890625, -0.0006628036499023438, -0.01495361328125, -0.033233642578125, 0.034210205078125, 0.036224365234375, -0.0059661865234375, -0.044891357421875, 0.0021953582763671875, -0.00975799560546875, 0.0105743408203125, 0.0064239501953125, 0.05206298828125, -0.0321044921875, -0.0297393798828125, -0.042938232421875, 0.029388427734375, 0.047698974609375, 0.00020039081573486328, 0.042327880859375, 0.063232421875, -0.0254669189453125, 0.016021728515625, -0.0400390625, -0.012908935546875, -0.037384033203125, -0.005523681640625, -0.028350830078125, -0.05462646484375, 0.04302978515625, 0.034637451171875, 0.00847625732421875, 0.04132080078125, 0.0382080078125, -0.01413726806640625, 0.064208984375, 0.064697265625, 0.00693511962890625, 0.00923919677734375, -0.0300140380859375, -0.003635406494140625, -0.06854248046875, -0.041168212890625, -0.0221099853515625, -0.028076171875, -0.0526123046875, -0.04559326171875, 0.0225067138671875, -0.0024738311767578125, -0.0171966552734375, 0.039031982421875, -0.037506103515625, 0.0094757080078125, 0.0238189697265625, 0.032257080078125, 0.0115509033203125, -0.002071380615234375, 0.0009489059448242188, -0.03302001953125, -0.0277099609375, -0.037628173828125, 0.05511474609375, 0.034912109375, 0.07098388671875, 0.0252685546875, 0.055450439453125, 0.0105743408203125, -0.0014057159423828125, -0.040008544921875, 0.0694580078125, -0.0145416259765625, -0.04290771484375, -0.0071868896484375, -0.0256805419921875, -0.05804443359375, 0.0195770263671875, -0.0016565322875976562, -0.06182861328125, 0.0166473388671875, 0.01248931884765625, -0.01210784912109375, 0.0266876220703125, -0.0283966064453125, 0.046783447265625, -0.004238128662109375, -0.0307769775390625, -0.005825042724609375, -0.050872802734375, 0.047821044921875, 0.007442474365234375, 0.01476287841796875, -0.01163482666015625, -0.01541900634765625, 0.06951904296875, -0.055328369140625, 0.08306884765625, -0.010223388671875, -0.0244293212890625, 0.017242431640625, 0.005954742431640625, 0.037139892578125, 0.0247344970703125, -0.0036983489990234375, 0.0017976760864257812, -0.01229095458984375, -0.02880859375, -0.0263824462890625, 0.0738525390625, -0.060455322265625, -0.061798095703125, -0.0298309326171875, -0.0218658447265625, 0.01702880859375, 0.00525665283203125, 0.038787841796875, 0.020843505859375, -0.005550384521484375, -0.004161834716796875, 0.06884765625, -0.00988006591796875, 0.02105712890625, 0.0200042724609375, -0.039794921875, -0.020843505859375, 0.06182861328125, -0.0143280029296875, 0.0107879638671875, 0.0135955810546875, 0.0274658203125, -0.0222930908203125, 0.01263427734375, -0.036529541015625, 0.051177978515625, -0.0386962890625, -0.00037479400634765625, -0.062744140625, -0.01158905029296875, -0.03338623046875, -0.00457000732421875, -0.0306396484375, -0.036529541015625, -0.056884765625, 0.0212860107421875, 0.04718017578125, 0.04278564453125, -0.018890380859375, 0.01470947265625, -0.045745849609375, 0.06451416015625, 0.042205810546875, 0.00829315185546875, -0.00921630859375, -0.0662841796875, 0.0241546630859375, 0.01025390625, -0.020111083984375, -0.06591796875, 0.0482177734375, 0.005512237548828125, 0.019622802734375, 0.026031494140625, -0.007511138916015625, 0.0526123046875, -0.047515869140625, 0.0462646484375, 0.02667236328125, -0.0526123046875, 0.05615234375, -0.0305633544921875, 0.005847930908203125, -0.001865386962890625, 0.038116455078125, -0.033905029296875, -0.039794921875, -0.06298828125, -0.046051025390625, 0.05010986328125, 0.0228118896484375, 0.02984619140625, -0.00102996826171875, 0.03131103515625, 0.01554107666015625, 0.0219268798828125, -0.06427001953125, -0.041748046875, -0.03460693359375, 0.0254058837890625, 0.0240936279296875, -0.042236328125, -0.01236724853515625, -0.0189056396484375, 0.055023193359375, 0.0013275146484375, 0.044708251953125, 0.01352691650390625, 0.01557159423828125, -0.01439666748046875, 0.0123291015625, 0.05340576171875, 0.0421142578125, -0.031707763671875, -0.0023212432861328125, -0.0032100677490234375, -0.049224853515625, 0.00589752197265625, 0.0055999755859375, -0.0010576248168945312, 0.0168914794921875, 0.00772857666015625, 0.058868408203125, 0.0256195068359375, -0.038909912109375, 0.042083740234375, -0.0210723876953125, 0.0150909423828125, -0.00704193115234375, 0.016265869140625, 0.01282501220703125, 0.0281219482421875, 0.0179290771484375, 0.00368499755859375, 0.01486968994140625, -0.03533935546875, 0.0113067626953125, 0.01172637939453125, -0.01190948486328125, -0.02569580078125, 0.073974609375, -0.01019287109375, -0.0218505859375, 0.0270843505859375, -0.020111083984375, -0.023956298828125, 0.06512451171875, 0.06591796875, 0.042083740234375, -0.016021728515625, 0.02642822265625, 0.0228729248046875, 0.00821685791015625, 0.020660400390625, 0.046417236328125, 0.0164642333984375, -0.0293731689453125, -0.01230621337890625, -0.06146240234375, -0.03094482421875, 0.02239990234375, -0.042205810546875, 0.0350341796875, -0.07269287109375, -0.0124969482421875, 0.0023174285888671875, 0.0005764961242675781, -0.0386962890625, 0.015960693359375, -0.01062774658203125, 0.059478759765625, -0.054290771484375, 0.05462646484375, 0.0482177734375, -0.035400390625, -0.04901123046875, -0.0140228271484375, 0.011871337890625, -0.07684326171875, 0.061279296875, 0.01195526123046875, -0.00762176513671875, -0.01322174072265625, -0.05712890625, -0.050323486328125, 0.05718994140625, 0.0206146240234375, -0.039520263671875, -0.01299285888671875, -0.014434814453125, 0.03338623046875, -0.03411865234375, 0.01093292236328125, 0.02880859375, 0.031768798828125, 0.0192108154296875, -0.0750732421875, 0.0343017578125, -0.0321044921875, 0.021575927734375, -0.0074005126953125, -0.060150146484375, 0.0711669921875, -0.03619384765625, -0.00414276123046875, 0.04510498046875, 0.07623291015625, 0.064697265625, 0.0036468505859375, 0.033447265625, 0.042236328125, 0.025177001953125, -0.00469207763671875, 0.07647705078125, 0.0129241943359375, 0.0249481201171875, 0.0625, -0.00440216064453125, 0.04937744140625, 0.039886474609375, -0.0207977294921875, 0.029266357421875, 0.051116943359375, -0.0173492431640625, 0.038970947265625, 0.0228271484375, -0.0197296142578125, 0.01143646240234375, -0.02691650390625, -0.049835205078125, 0.0222015380859375, 0.0295867919921875, -0.006496429443359375, 0.00035262107849121094, 0.01448822021484375, 0.0234527587890625, -0.005161285400390625, -0.01239776611328125, 0.02587890625, 0.01702880859375, -0.01479339599609375, 0.0506591796875, 0.0028896331787109375, 0.08673095703125, -0.044281005859375, -0.0090484619140625, -0.03125, -0.005191802978515625, -0.021240234375, -0.06524658203125, 0.017242431640625, 0.01476287841796875, -0.01515960693359375, -0.0006814002990722656, 0.06585693359375, -0.0191802978515625, -0.040252685546875, 0.0182342529296875, 0.006603240966796875, 0.046905517578125, 0.0278167724609375, -0.0665283203125, 0.02783203125, -0.0014219284057617188, -0.02398681640625, 0.0227508544921875, 0.025543212890625, 0.0208740234375, 0.063232421875, 0.0360107421875, 0.0157623291015625, 0.004360198974609375, -0.00130462646484375, 0.080810546875, -0.034698486328125, -0.03167724609375, -0.05230712890625, 0.035888671875, -0.0037136077880859375, -0.02410888671875, 0.05206298828125, 0.0210113525390625, 0.0440673828125, -0.030364990234375, 0.023101806640625, -0.038787841796875, 0.0233917236328125, -0.048828125, 0.0650634765625, -0.05999755859375, 0.00847625732421875, -0.03204345703125, -0.06573486328125, 0.00759124755859375, 0.05438232421875, 0.014556884765625, 0.00986480712890625, 0.025543212890625, 0.06134033203125, -0.024444580078125, -0.007450103759765625, 0.0200958251953125, 0.0198211669921875, 0.028167724609375, 0.041595458984375, 0.0577392578125, -0.04876708984375, 0.026611328125, -0.051116943359375, -0.02197265625, -0.029266357421875, -0.0704345703125, -0.063232421875, -0.04034423828125, -0.023651123046875, -0.032257080078125, -0.004291534423828125, 0.046417236328125, 0.051788330078125, -0.039031982421875, -0.021942138671875, 0.017578125, 0.0032367706298828125, 0.004520416259765625, -0.0218048095703125, 0.0183868408203125, 0.01490020751953125, -0.06402587890625, 0.02001953125, -0.000652313232421875, 0.044891357421875, -0.01064300537109375, -0.0035419464111328125, -0.0100860595703125, -0.01303863525390625, 0.03564453125, 0.04327392578125, -0.06982421875, -0.020233154296875, -0.0019683837890625, -0.01244354248046875, -0.01065826416015625, 0.031890869140625, -0.039459228515625, 0.0034580230712890625, 0.0182037353515625, 0.0163116455078125, 0.054840087890625, -0.0302886962890625, 0.0285491943359375, -0.064697265625, 0.0302886962890625, 0.009613037109375, 0.039520263671875, 0.019195556640625, -0.049102783203125, 0.0276031494140625, 0.0186004638671875, -0.048065185546875, -0.050140380859375, 0.00794219970703125, -0.092529296875, -0.00670623779296875, 0.0697021484375, -0.0186004638671875, -0.0367431640625, 0.0303192138671875, -0.049285888671875, 0.0175018310546875, -0.028594970703125, 0.058349609375, 0.0305328369140625, -0.0218505859375, -0.0257568359375, -0.0469970703125, 0.031890869140625, 0.025543212890625, -0.06884765625, -0.0076141357421875, 0.0516357421875, 0.031768798828125, 0.034271240234375, 0.0577392578125, -0.01708984375, 0.032562255859375, -0.01309967041015625, -0.0009455680847167969, -0.0041961669921875, -0.018951416015625, -0.0031566619873046875, -0.0059051513671875, -0.01500701904296875, -0.0240325927734375 ] ]
laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K
2023-04-18T22:04:49.000Z
[ "open_clip", "tensorboard", "clip", "zero-shot-image-classification", "arxiv:2201.03545", "arxiv:1910.04867", "license:mit", "has_space", "region:us" ]
zero-shot-image-classification
laion
null
null
laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K
2
8,222
open_clip
2023-01-03T00:25:22
--- license: mit library_name: open_clip pipeline_tag: zero-shot-image-classification tags: - clip --- # Model Card for CLIP-convnext_base_w.laion_aesthetic-s13B-b82k # Table of Contents 1. [Model Details](#model-details) 2. [Uses](#uses) 3. [Training Details](#training-details) 4. [Evaluation](#evaluation) 5. [Acknowledgements](#acknowledgements) 6. [Citation](#citation) # Model Details ## Model Description A series of CLIP [ConvNeXt-Base](https://arxiv.org/abs/2201.03545) (w/ wide embed dim) models trained on subsets LAION-5B (https://laion.ai/blog/laion-5b/) using OpenCLIP (https://github.com/mlfoundations/open_clip). Goals: * Explore an alternative to ViT and ResNet (w/ AttentionPooling) CLIP models that scales well with model size and image resolution Firsts: * First known ConvNeXt CLIP models trained at scale in the range of CLIP ViT-B/16 and RN50x4 models * First released model weights exploring increase of augmentation + regularization for image tower via adding (greater scale range of RRC, random erasing, stochastic depth) The models utilize the [timm](https://github.com/rwightman/pytorch-image-models) ConvNeXt-Base model (`convnext_base`) as the image tower, and the same text tower as the RN50x4 (depth 12, embed dim 640) model from OpenAI CLIP. The base models are trained at 256x256 image resolution and roughly match the RN50x4 models on FLOPs and activation counts. The models with `320` in the name are trained at 320x320. All models in this series were trained for 13B samples and have ImageNet Zero-Shot top-1 of >= 70.8%. Comparing to ViT-B/16 at 34B SS with zero-shot of 70.2% (68.1% for 13B SS) this suggests the ConvNeXt architecture may be more sample efficient in this range of model scale. More experiments needed to confirm. | Model | Dataset | Resolution | AugReg | Top-1 ImageNet Zero-Shot (%) | | ----- | ------- | ---------- | ------------ | --------- | | [convnext_base_w.laion2b_s13b_b82k](https://huggingface.co/laion/CLIP-convnext_base_w-laion2B-s13B-b82K) | LAION-2B | 256x256 | RRC (0.9, 1.0) | 70.8 | | [convnext_base_w.laion2b_s13b_b82k_augreg](https://huggingface.co/laion/CLIP-convnext_base_w-laion2B-s13B-b82K-augreg) | LAION-2B | 256x256 | RRC (0.33, 1.0), RE (0.35), SD (0.1) | 71.5 | | [convnext_base_w.laion_aesthetic_s13b_b82k](https://huggingface.co/laion/CLIP-convnext_base_w-laion_aesthetic-s13B-b82K) | LAION-A | 256x256 | RRC (0.9, 1.0) | 71.0 | | [convnext_base_w_320.laion_aesthetic_s13b_b82k](https://huggingface.co/laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K) | LAION-A | 320x320 | RRC (0.9, 1.0) | 71.7 | | [convnext_base_w_320.laion_aesthetic_s13b_b82k_augreg](https://huggingface.co/laion/CLIP-convnext_base_w_320-laion_aesthetic-s13B-b82K-augreg) | LAION-A | 320x320 | RRC (0.33, 1.0), RE (0.35), SD (0.1) | 71.3 | RRC = Random Resize Crop (crop pcts), RE = Random Erasing (prob), SD = Stochastic Depth (prob) -- image tower only LAION-A = LAION Aesthetic, an ~900M sample subset of LAION-2B with pHash dedupe and asthetic score filtering. Model training done by Ross Wightman across both the [stability.ai](https://stability.ai/) cluster and the [JUWELS Booster](https://apps.fz-juelich.de/jsc/hps/juwels/booster-overview.html) supercomputer. See acknowledgements below. # Uses As per the original [OpenAI CLIP model card](https://github.com/openai/CLIP/blob/d50d76daa670286dd6cacf3bcd80b5e4823fc8e1/model-card.md), this model is intended as a research output for research communities. We hope that this model will enable researchers to better understand and explore zero-shot, arbitrary image classification. We also hope it can be used for interdisciplinary studies of the potential impact of such model. The OpenAI CLIP paper includes a discussion of potential downstream impacts to provide an example for this sort of analysis. Additionally, the LAION-5B blog (https://laion.ai/blog/laion-5b/) and upcoming paper include additional discussion as it relates specifically to the training dataset. ## Direct Use Zero-shot image classification, image and text retrieval, among others. ## Downstream Use Image classification and other image task fine-tuning, linear probe image classification, image generation guiding and conditioning, among others. ## Out-of-Scope Use As per the OpenAI models, **Any** deployed use case of the model - whether commercial or not - is currently out of scope. Non-deployed use cases such as image search in a constrained environment, are also not recommended unless there is thorough in-domain testing of the model with a specific, fixed class taxonomy. This is because our safety assessment demonstrated a high need for task specific testing especially given the variability of CLIP’s performance with different class taxonomies. This makes untested and unconstrained deployment of the model in any use case currently potentially harmful. Certain use cases which would fall under the domain of surveillance and facial recognition are always out-of-scope regardless of performance of the model. This is because the use of artificial intelligence for tasks such as these can be premature currently given the lack of testing norms and checks to ensure its fair use. Since the model has not been purposefully trained in or evaluated on any languages other than English, its use should be limited to English language use cases. Further the above notice, the LAION-5B dataset used in training of these models has additional considerations, see below. # Training Details ## Training Data This model was trained with one of (see table in intro): * LAION-2B - A 2 billion sample English subset of LAION-5B (https://laion.ai/blog/laion-5b/). * LAION-Aesthetic - A 900M sample subset of LAION-2B with pHash dedupe and asthetic score filtering **IMPORTANT NOTE:** The motivation behind dataset creation is to democratize research and experimentation around large-scale multi-modal model training and handling of uncurated, large-scale datasets crawled from publically available internet. Our recommendation is therefore to use the dataset for research purposes. Be aware that this large-scale dataset is uncurated. Keep in mind that the uncurated nature of the dataset means that collected links may lead to strongly discomforting and disturbing content for a human viewer. Therefore, please use the demo links with caution and at your own risk. It is possible to extract a “safe” subset by filtering out samples based on the safety tags (using a customized trained NSFW classifier that we built). While this strongly reduces the chance for encountering potentially harmful content when viewing, we cannot entirely exclude the possibility for harmful content being still present in safe mode, so that the warning holds also there. We think that providing the dataset openly to broad research and other interested communities will allow for transparent investigation of benefits that come along with training large-scale models as well as pitfalls and dangers that may stay unreported or unnoticed when working with closed large datasets that remain restricted to a small community. Providing our dataset openly, we however do not recommend using it for creating ready-to-go industrial products, as the basic research about general properties and safety of such large-scale models, which we would like to encourage with this release, is still in progress. ## Training Procedure All models were trained with a global batch size of 81920 for 64 checkpoint intervals of 203.7M samples for a total of ~13B samples seen over training. For 256x256 models, a slurm script w/ srun below was used on 20 8-GPU (A100 40GB) nodes (Stability), switching to 40 4-GPU nodes for time on JUWELS. ``` /opt/slurm/sbin/srun --cpu_bind=v --accel-bind=gn python -m training.main \ --save-frequency 1 \ --name "convnext_256" \ --resume 'latest' \ --train-data="pipe:aws s3 cp s3://mybucket/path/{laion{00000..xxxxx}.tar -" \ --train-num-samples 203666042 \ --dataset-type webdataset \ --precision amp_bfloat16 \ --warmup 10000 \ --batch-size=512 \ --epochs=64 \ --dataset-resampled \ --clip-grad-norm 5.0 \ --lr 1e-3 \ --workers=6 \ --model "convnext_base_w" \ --seed 0 \ --ddp-static-graph \ --local-loss \ --gather-with-grad \ --grad-checkpointing ``` For 320x320 models, same as above but w/ 32 8-GPU nodes, local batch size 320, or 64 4-GPU nodes on JUWELs. # Evaluation Evaluation done with code in the [LAION CLIP Benchmark suite](https://github.com/LAION-AI/CLIP_benchmark). ## Testing Data, Factors & Metrics ### Testing Data The testing is performed with VTAB+ (A combination of VTAB (https://arxiv.org/abs/1910.04867) w/ additional robustness datasets) for classification and COCO and Flickr for retrieval. ## Results The models achieve between 70.8 and 71.7 zero-shot top-1 accuracy on ImageNet-1k. ![](convnext_base_w_zero_shot.png) An initial round of benchmarks have been performed on a wider range of datasets, to be viewable at https://github.com/LAION-AI/CLIP_benchmark/blob/main/benchmark/results.ipynb As part of exploring increased augmentation + regularization, early evalations suggest that `augreg` trained models evaluate well over a wider range of resolutions. This is especially true for the 320x320 LAION-A model, where the augreg run was lower than the non-augreg when evaluated at the train resolution of 320x320 (71.3 vs 71.7), but improves to 72.2 when evaluated at 384x384 (the non-augreg drops to 71.0 at 384x384). # Acknowledgements Acknowledging [stability.ai](https://stability.ai/) and the Gauss Centre for Supercomputing e.V. (http://gauss-centre.eu) for funding this part of work by providing computing time through the John von Neumann Institute for Computing (NIC) on the GCS Supercomputer JUWELS Booster at Jülich Supercomputing Centre (JSC). # Citation **BibTeX:** LAION-5B ```bibtex @inproceedings{schuhmann2022laionb, title={{LAION}-5B: An open large-scale dataset for training next generation image-text models}, author={Christoph Schuhmann and Romain Beaumont and Richard Vencu and Cade W Gordon and Ross Wightman and Mehdi Cherti and Theo Coombes and Aarush Katta and Clayton Mullis and Mitchell Wortsman and Patrick Schramowski and Srivatsa R Kundurthy and Katherine Crowson and Ludwig Schmidt and Robert Kaczmarczyk and Jenia Jitsev}, booktitle={Thirty-sixth Conference on Neural Information Processing Systems Datasets and Benchmarks Track}, year={2022}, url={https://openreview.net/forum?id=M3Y74vmsMcY} } ``` OpenCLIP software ```bibtex @software{ilharco_gabriel_2021_5143773, author = {Ilharco, Gabriel and Wortsman, Mitchell and Wightman, Ross and Gordon, Cade and Carlini, Nicholas and Taori, Rohan and Dave, Achal and Shankar, Vaishaal and Namkoong, Hongseok and Miller, John and Hajishirzi, Hannaneh and Farhadi, Ali and Schmidt, Ludwig}, title = {OpenCLIP}, month = jul, year = 2021, note = {If you use this software, please cite it as below.}, publisher = {Zenodo}, version = {0.1}, doi = {10.5281/zenodo.5143773}, url = {https://doi.org/10.5281/zenodo.5143773} } ``` OpenAI CLIP paper ```bibtex @inproceedings{Radford2021LearningTV, title={Learning Transferable Visual Models From Natural Language Supervision}, author={Alec Radford and Jong Wook Kim and Chris Hallacy and A. Ramesh and Gabriel Goh and Sandhini Agarwal and Girish Sastry and Amanda Askell and Pamela Mishkin and Jack Clark and Gretchen Krueger and Ilya Sutskever}, booktitle={ICML}, year={2021} } ``` ```bibtex @Article{liu2022convnet, author = {Zhuang Liu and Hanzi Mao and Chao-Yuan Wu and Christoph Feichtenhofer and Trevor Darrell and Saining Xie}, title = {A ConvNet for the 2020s}, journal = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, year = {2022}, } ``` ```bibtex @misc{rw2019timm, author = {Ross Wightman}, title = {PyTorch Image Models}, year = {2019}, publisher = {GitHub}, journal = {GitHub repository}, doi = {10.5281/zenodo.4414861}, howpublished = {\url{https://github.com/rwightman/pytorch-image-models}} } ```
12,627
[ [ -0.03546142578125, -0.03656005859375, 0.005435943603515625, 0.0017843246459960938, -0.03143310546875, -0.03216552734375, -0.0122833251953125, -0.049346923828125, 0.0256195068359375, 0.028106689453125, -0.040618896484375, -0.03692626953125, -0.03582763671875, -0.004077911376953125, -0.0231170654296875, 0.06842041015625, -0.003383636474609375, -0.000946044921875, 0.001232147216796875, -0.0300750732421875, -0.03485107421875, -0.037139892578125, -0.051055908203125, 0.00397491455078125, 0.0159912109375, 0.021209716796875, 0.045654296875, 0.056427001953125, 0.047821044921875, 0.01715087890625, -0.01416015625, -0.0099029541015625, -0.047821044921875, -0.0316162109375, 0.0125274658203125, -0.0233612060546875, -0.049560546875, 0.01018524169921875, 0.043426513671875, 0.0220489501953125, -0.006137847900390625, 0.02349853515625, 0.005451202392578125, 0.04522705078125, -0.054779052734375, -0.000888824462890625, -0.035858154296875, 0.00775909423828125, -0.024932861328125, 0.010406494140625, -0.0189971923828125, -0.008209228515625, 0.018768310546875, -0.05419921875, 0.0235748291015625, -0.00797271728515625, 0.09765625, 0.01242828369140625, -0.016265869140625, 0.017425537109375, -0.04052734375, 0.05877685546875, -0.060760498046875, 0.029754638671875, 0.0152587890625, 0.0193634033203125, 0.0167999267578125, -0.060760498046875, -0.0270233154296875, -0.0134429931640625, 0.006374359130859375, 0.029144287109375, -0.025543212890625, -0.0025424957275390625, 0.0367431640625, 0.0286407470703125, -0.042083740234375, 0.0034618377685546875, -0.041351318359375, 0.0013170242309570312, 0.0589599609375, -0.0015544891357421875, 0.020904541015625, -0.025421142578125, -0.0513916015625, -0.0211944580078125, -0.0516357421875, 0.03314208984375, 0.0206146240234375, -0.006359100341796875, -0.03466796875, 0.0323486328125, -0.01025390625, 0.03021240234375, 0.004047393798828125, -0.0102691650390625, 0.030487060546875, -0.027435302734375, -0.035430908203125, -0.014739990234375, 0.0804443359375, 0.050323486328125, 0.00363922119140625, 0.0163726806640625, -0.00859832763671875, -0.0102386474609375, 0.015838623046875, -0.0885009765625, -0.01038360595703125, 0.0120391845703125, -0.054046630859375, -0.03363037109375, 0.0228424072265625, -0.05224609375, 0.008636474609375, -0.010894775390625, 0.0435791015625, -0.04180908203125, -0.02301025390625, 0.006443023681640625, -0.0114898681640625, 0.0159759521484375, 0.025970458984375, -0.036651611328125, 0.0145721435546875, 0.0235137939453125, 0.079833984375, -0.007312774658203125, -0.0125274658203125, -0.0044403076171875, -0.0025043487548828125, -0.02947998046875, 0.03961181640625, 0.00009590387344360352, -0.0259857177734375, -0.0184326171875, 0.03179931640625, -0.0044403076171875, -0.036376953125, 0.051727294921875, -0.0164337158203125, -0.0027980804443359375, -0.0120086669921875, -0.0240478515625, -0.035247802734375, 0.01220703125, -0.0556640625, 0.068359375, 0.00910186767578125, -0.06988525390625, 0.022918701171875, -0.042877197265625, 0.0004949569702148438, -0.00567626953125, 0.0067138671875, -0.0469970703125, -0.00797271728515625, 0.0252685546875, 0.043853759765625, -0.023956298828125, 0.0149993896484375, -0.04425048828125, -0.031982421875, 0.0216522216796875, -0.0305938720703125, 0.068359375, 0.006755828857421875, -0.032562255859375, 0.007518768310546875, -0.05108642578125, -0.00402069091796875, 0.0271453857421875, 0.01393890380859375, -0.01513671875, -0.0292205810546875, -0.00025773048400878906, 0.025604248046875, 0.0081939697265625, -0.04791259765625, -0.005687713623046875, -0.01461029052734375, 0.03619384765625, 0.058074951171875, 0.0115814208984375, 0.02435302734375, -0.04034423828125, 0.042083740234375, 0.0020389556884765625, 0.0487060546875, -0.0178070068359375, -0.0321044921875, -0.05718994140625, -0.044647216796875, 0.031036376953125, 0.0308837890625, -0.035614013671875, 0.030731201171875, -0.0153350830078125, -0.0435791015625, -0.032867431640625, -0.0149993896484375, 0.0362548828125, 0.036651611328125, 0.0239410400390625, -0.037322998046875, -0.035919189453125, -0.0819091796875, 0.018585205078125, 0.01284027099609375, -0.0112457275390625, 0.056640625, 0.054779052734375, -0.000507354736328125, 0.0570068359375, -0.045989990234375, -0.02374267578125, -0.008880615234375, -0.0035343170166015625, 0.02264404296875, 0.038116455078125, 0.0706787109375, -0.061767578125, -0.047607421875, -0.00966644287109375, -0.07781982421875, 0.01439666748046875, 0.0014276504516601562, -0.0186614990234375, 0.01061248779296875, 0.026580810546875, -0.05078125, 0.054046630859375, 0.025390625, 0.005084991455078125, 0.0513916015625, -0.00954437255859375, 0.01168060302734375, -0.07696533203125, 0.03302001953125, 0.019989013671875, -0.0144805908203125, -0.0498046875, 0.01172637939453125, 0.0032482147216796875, -0.0230560302734375, -0.068359375, 0.04736328125, -0.0304412841796875, 0.0087890625, -0.0086669921875, 0.00016176700592041016, 0.00910186767578125, 0.0498046875, 0.01267242431640625, 0.06414794921875, 0.047515869140625, -0.03924560546875, 0.01116180419921875, 0.0227203369140625, -0.0226287841796875, 0.0435791015625, -0.0791015625, 0.00910186767578125, -0.006412506103515625, 0.0242767333984375, -0.04498291015625, -0.03411865234375, 0.0290069580078125, -0.037384033203125, 0.0268707275390625, -0.0262451171875, -0.0114898681640625, -0.04345703125, -0.060821533203125, 0.039825439453125, 0.05340576171875, -0.0421142578125, 0.016571044921875, 0.02545166015625, 0.0160980224609375, -0.05596923828125, -0.04791259765625, -0.0221099853515625, -0.031280517578125, -0.060699462890625, 0.03521728515625, 0.0022296905517578125, 0.0009202957153320312, 0.006099700927734375, 0.00213623046875, -0.0007386207580566406, -0.006877899169921875, 0.047027587890625, 0.042083740234375, -0.01343536376953125, -0.01898193359375, -0.0191802978515625, 0.00775909423828125, -0.00601959228515625, -0.0193023681640625, 0.026611328125, -0.015472412109375, -0.01291656494140625, -0.06268310546875, 0.01491546630859375, 0.0435791015625, -0.016082763671875, 0.05950927734375, 0.056884765625, -0.03570556640625, 0.0025424957275390625, -0.034393310546875, -0.01251220703125, -0.034759521484375, 0.022796630859375, -0.0165557861328125, -0.051666259765625, 0.049285888671875, 0.016510009765625, -0.004665374755859375, 0.052215576171875, 0.0238189697265625, -0.01291656494140625, 0.0694580078125, 0.043182373046875, -0.003620147705078125, 0.04852294921875, -0.0745849609375, -0.0023746490478515625, -0.0880126953125, -0.0239105224609375, -0.01085662841796875, -0.0239105224609375, -0.0440673828125, -0.03173828125, 0.050567626953125, 0.021728515625, -0.0220184326171875, 0.0362548828125, -0.0300750732421875, 0.0169525146484375, 0.043609619140625, 0.03729248046875, -0.01287078857421875, 0.0012493133544921875, 0.001251220703125, -0.007396697998046875, -0.05322265625, -0.01361846923828125, 0.09075927734375, 0.047515869140625, 0.051177978515625, -0.01139068603515625, 0.031982421875, 0.01082611083984375, 0.01055145263671875, -0.051177978515625, 0.04266357421875, -0.02923583984375, -0.050262451171875, -0.0180511474609375, -0.0300445556640625, -0.0654296875, 0.005825042724609375, -0.018402099609375, -0.055938720703125, 0.0283355712890625, 0.003925323486328125, -0.035247802734375, 0.037811279296875, -0.03887939453125, 0.0721435546875, -0.0182037353515625, -0.042877197265625, -0.002105712890625, -0.061126708984375, 0.044586181640625, 0.01187896728515625, 0.002872467041015625, -0.0137786865234375, 0.0193023681640625, 0.07666015625, -0.060546875, 0.05670166015625, -0.014739990234375, 0.02423095703125, 0.0535888671875, -0.00797271728515625, 0.0234527587890625, 0.010162353515625, 0.0031147003173828125, 0.0501708984375, 0.001758575439453125, -0.0212249755859375, -0.03253173828125, 0.041778564453125, -0.07080078125, -0.024200439453125, -0.0310211181640625, -0.035858154296875, 0.01812744140625, 0.01141357421875, 0.057891845703125, 0.061004638671875, -0.01514434814453125, 0.0330810546875, 0.043609619140625, -0.017608642578125, 0.036651611328125, 0.007228851318359375, -0.005828857421875, -0.056884765625, 0.07159423828125, 0.01806640625, 0.0286407470703125, 0.004581451416015625, 0.0134429931640625, -0.0184783935546875, -0.0310211181640625, -0.043212890625, 0.0304412841796875, -0.0374755859375, -0.034393310546875, -0.036865234375, -0.040069580078125, -0.03387451171875, -0.01174163818359375, -0.038330078125, -0.0284423828125, -0.046630859375, 0.004058837890625, 0.0295562744140625, 0.040924072265625, -0.01224517822265625, 0.031036376953125, -0.057281494140625, 0.01532745361328125, 0.01306915283203125, 0.02545166015625, 0.0030231475830078125, -0.062042236328125, -0.0159759521484375, 0.01861572265625, -0.03466796875, -0.051513671875, 0.0362548828125, 0.0171966552734375, 0.031280517578125, 0.050994873046875, -0.007404327392578125, 0.04901123046875, -0.02349853515625, 0.0751953125, 0.03173828125, -0.05072021484375, 0.0428466796875, -0.04718017578125, 0.0158233642578125, 0.043792724609375, 0.051177978515625, -0.028228759765625, 0.00299835205078125, -0.07183837890625, -0.06390380859375, 0.07171630859375, 0.01776123046875, -0.00177001953125, 0.01560211181640625, 0.0469970703125, -0.0024662017822265625, 0.01377105712890625, -0.0655517578125, -0.0168304443359375, -0.038604736328125, 0.0022487640380859375, 0.00946807861328125, -0.022491455078125, -0.0082550048828125, -0.0301361083984375, 0.0625, -0.01141357421875, 0.03997802734375, 0.019775390625, 0.0008726119995117188, -0.004302978515625, -0.0119781494140625, 0.04022216796875, 0.0262603759765625, -0.042694091796875, -0.0238189697265625, 0.01190948486328125, -0.04449462890625, -0.0014362335205078125, 0.0013666152954101562, -0.051788330078125, -0.008880615234375, 0.0252685546875, 0.08807373046875, 0.01030731201171875, -0.035736083984375, 0.06781005859375, -0.012908935546875, -0.0304412841796875, -0.0237274169921875, 0.008880615234375, -0.019561767578125, 0.0088348388671875, 0.01174163818359375, 0.023529052734375, 0.01873779296875, -0.0374755859375, 0.021514892578125, 0.0447998046875, -0.03924560546875, -0.040496826171875, 0.05889892578125, -0.0009098052978515625, -0.00653076171875, 0.0428466796875, -0.01465606689453125, -0.038360595703125, 0.060394287109375, 0.037506103515625, 0.0709228515625, -0.01332855224609375, 0.0253143310546875, 0.068359375, 0.0097503662109375, -0.0200958251953125, 0.007793426513671875, 0.0142974853515625, -0.0408935546875, -0.006465911865234375, -0.037872314453125, -0.01361846923828125, 0.0401611328125, -0.063720703125, 0.038665771484375, -0.0518798828125, -0.0310211181640625, -0.016143798828125, -0.00907135009765625, -0.044830322265625, 0.0229949951171875, 0.010711669921875, 0.0804443359375, -0.06756591796875, 0.048309326171875, 0.044769287109375, -0.0509033203125, -0.0791015625, -0.0025959014892578125, -0.003620147705078125, -0.0389404296875, 0.0310821533203125, 0.036285400390625, 0.0069427490234375, -0.0174560546875, -0.0643310546875, -0.07403564453125, 0.1064453125, 0.0264892578125, -0.02557373046875, 0.0011281967163085938, -0.0038623809814453125, 0.031646728515625, -0.0276336669921875, 0.044677734375, 0.0269317626953125, 0.00649261474609375, 0.0234222412109375, -0.0679931640625, 0.0017194747924804688, -0.0220184326171875, 0.0173187255859375, 0.0030956268310546875, -0.09710693359375, 0.07977294921875, -0.01213836669921875, -0.017333984375, 0.0108489990234375, 0.049957275390625, 0.0140533447265625, 0.013641357421875, 0.03729248046875, 0.06024169921875, 0.0355224609375, 0.00399017333984375, 0.081787109375, -0.01454925537109375, 0.024383544921875, 0.059722900390625, 0.0012216567993164062, 0.06097412109375, 0.0087738037109375, -0.01142120361328125, 0.0214996337890625, 0.05194091796875, -0.0228424072265625, 0.047210693359375, -0.00995635986328125, -0.004100799560546875, -0.004108428955078125, -0.0408935546875, -0.0328369140625, 0.036865234375, 0.0038166046142578125, -0.01727294921875, 0.0026569366455078125, 0.019989013671875, 0.004222869873046875, -0.0262603759765625, -0.022613525390625, 0.034088134765625, 0.01325225830078125, -0.03314208984375, 0.057647705078125, -0.0013589859008789062, 0.0675048828125, -0.043731689453125, 0.00530242919921875, -0.0148162841796875, 0.0113525390625, -0.0139007568359375, -0.06170654296875, 0.0156707763671875, -0.006988525390625, 0.0030117034912109375, -0.00787353515625, 0.058441162109375, -0.0099334716796875, -0.039093017578125, 0.029754638671875, -0.00020015239715576172, 0.0239105224609375, -0.01129913330078125, -0.06353759765625, 0.01116180419921875, 0.0045928955078125, -0.01457977294921875, 0.022308349609375, 0.017333984375, -0.0045318603515625, 0.039825439453125, 0.042694091796875, 0.0005474090576171875, 0.003711700439453125, 0.0003561973571777344, 0.07196044921875, -0.0236358642578125, -0.0266876220703125, -0.047332763671875, 0.04315185546875, -0.0187835693359375, -0.03509521484375, 0.05645751953125, 0.047149658203125, 0.06854248046875, -0.01413726806640625, 0.046478271484375, -0.0131072998046875, 0.0191192626953125, -0.040618896484375, 0.048248291015625, -0.0626220703125, -0.00385284423828125, -0.0283203125, -0.052703857421875, -0.009185791015625, 0.04327392578125, -0.01332855224609375, 0.005916595458984375, 0.042755126953125, 0.0555419921875, -0.0182647705078125, 0.0009093284606933594, 0.0163421630859375, 0.00716400146484375, 0.0143280029296875, 0.04388427734375, 0.047637939453125, -0.06988525390625, 0.03411865234375, -0.05963134765625, -0.0264739990234375, -0.0237884521484375, -0.052215576171875, -0.07464599609375, -0.04052734375, -0.02984619140625, -0.0229644775390625, -0.006870269775390625, 0.0537109375, 0.07891845703125, -0.054656982421875, -0.0203399658203125, 0.01324462890625, -0.0193634033203125, -0.014801025390625, -0.0163421630859375, 0.036407470703125, 0.021636962890625, -0.045654296875, 0.008087158203125, 0.013580322265625, 0.021575927734375, -0.0032711029052734375, 0.00014269351959228516, -0.0203857421875, -0.00958251953125, 0.039703369140625, 0.030120849609375, -0.041168212890625, -0.0259857177734375, 0.0253143310546875, 0.0025177001953125, 0.0200347900390625, 0.046478271484375, -0.036834716796875, 0.0115814208984375, 0.03546142578125, 0.0272064208984375, 0.0654296875, 0.01416015625, 0.00894927978515625, -0.052642822265625, 0.033905029296875, 0.005619049072265625, 0.031463623046875, 0.021514892578125, -0.034027099609375, 0.05584716796875, 0.038482666015625, -0.044830322265625, -0.0653076171875, -0.003963470458984375, -0.09527587890625, -0.009521484375, 0.085693359375, -0.0255126953125, -0.04803466796875, 0.035400390625, -0.016876220703125, 0.01861572265625, -0.0264739990234375, 0.030120849609375, 0.029510498046875, -0.0086669921875, -0.029266357421875, -0.058624267578125, 0.033782958984375, 0.0177154541015625, -0.05474853515625, -0.0154266357421875, 0.029541015625, 0.0399169921875, 0.0203857421875, 0.051483154296875, -0.0258331298828125, 0.0283203125, 0.002044677734375, 0.0176544189453125, -0.015960693359375, -0.033538818359375, -0.036865234375, 0.004119873046875, -0.0187225341796875, -0.0308990478515625 ] ]
Helsinki-NLP/opus-mt-sq-en
2023-08-16T12:04:25.000Z
[ "transformers", "pytorch", "tf", "marian", "text2text-generation", "translation", "sq", "en", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "region:us" ]
translation
Helsinki-NLP
null
null
Helsinki-NLP/opus-mt-sq-en
1
8,211
transformers
2022-03-02T23:29:04
--- tags: - translation license: apache-2.0 --- ### opus-mt-sq-en * source languages: sq * target languages: en * OPUS readme: [sq-en](https://github.com/Helsinki-NLP/OPUS-MT-train/blob/master/models/sq-en/README.md) * dataset: opus * model: transformer-align * pre-processing: normalization + SentencePiece * download original weights: [opus-2020-01-16.zip](https://object.pouta.csc.fi/OPUS-MT-models/sq-en/opus-2020-01-16.zip) * test set translations: [opus-2020-01-16.test.txt](https://object.pouta.csc.fi/OPUS-MT-models/sq-en/opus-2020-01-16.test.txt) * test set scores: [opus-2020-01-16.eval.txt](https://object.pouta.csc.fi/OPUS-MT-models/sq-en/opus-2020-01-16.eval.txt) ## Benchmarks | testset | BLEU | chr-F | |-----------------------|-------|-------| | Tatoeba.sq.en | 58.4 | 0.732 |
818
[ [ -0.017822265625, -0.034515380859375, 0.0174407958984375, 0.03289794921875, -0.0285797119140625, -0.0282440185546875, -0.0316162109375, -0.00788116455078125, 0.0035343170166015625, 0.031890869140625, -0.053070068359375, -0.041351318359375, -0.041839599609375, 0.0191650390625, -0.00543212890625, 0.04766845703125, -0.01102447509765625, 0.042449951171875, 0.01372528076171875, -0.0288238525390625, -0.0260467529296875, -0.031341552734375, -0.03466796875, -0.01515960693359375, 0.0258026123046875, 0.036468505859375, 0.032257080078125, 0.024688720703125, 0.062103271484375, 0.01788330078125, 0.0011548995971679688, 0.005474090576171875, -0.036041259765625, -0.0036640167236328125, 0.008209228515625, -0.047882080078125, -0.051025390625, -0.00901031494140625, 0.07916259765625, 0.027587890625, 0.0010366439819335938, 0.0293121337890625, -0.007419586181640625, 0.06817626953125, -0.0240936279296875, 0.005916595458984375, -0.041351318359375, 0.00885772705078125, -0.0271148681640625, -0.0221405029296875, -0.048187255859375, -0.019561767578125, 0.00736236572265625, -0.0582275390625, -0.003936767578125, 0.0185089111328125, 0.10498046875, 0.018768310546875, -0.0267791748046875, -0.00959014892578125, -0.0413818359375, 0.07464599609375, -0.059600830078125, 0.049163818359375, 0.035919189453125, 0.0250701904296875, 0.012298583984375, -0.04852294921875, -0.0270233154296875, 0.00653839111328125, -0.01461029052734375, 0.0135345458984375, -0.01253509521484375, -0.0204010009765625, 0.023681640625, 0.052764892578125, -0.059539794921875, -0.0007643699645996094, -0.04327392578125, -0.0011548995971679688, 0.055999755859375, 0.0081939697265625, 0.0106048583984375, -0.01238250732421875, -0.0328369140625, -0.040557861328125, -0.05914306640625, 0.00682830810546875, 0.02508544921875, 0.01476287841796875, -0.0255584716796875, 0.05633544921875, -0.0121917724609375, 0.03826904296875, -0.0007491111755371094, 0.00797271728515625, 0.07232666015625, -0.0302276611328125, -0.0282135009765625, -0.0180206298828125, 0.09521484375, 0.021942138671875, 0.00995635986328125, 0.00682830810546875, -0.02374267578125, -0.025177001953125, 0.00524139404296875, -0.06719970703125, -0.008636474609375, 0.01012420654296875, -0.0282745361328125, -0.0072784423828125, 0.0010843276977539062, -0.04461669921875, 0.0177001953125, -0.0300750732421875, 0.052490234375, -0.05377197265625, -0.017852783203125, 0.0292205810546875, 0.00408172607421875, 0.0248260498046875, -0.0033969879150390625, -0.043853759765625, 0.0163726806640625, 0.032012939453125, 0.053924560546875, -0.0233306884765625, -0.0223846435546875, -0.037353515625, -0.01910400390625, -0.00948333740234375, 0.046173095703125, -0.00988006591796875, -0.0309906005859375, -0.0024700164794921875, 0.034820556640625, -0.032501220703125, -0.027069091796875, 0.09759521484375, -0.0228424072265625, 0.06292724609375, -0.034271240234375, -0.04498291015625, -0.0266876220703125, 0.0294952392578125, -0.0477294921875, 0.10137939453125, 0.0110626220703125, -0.06292724609375, 0.01849365234375, -0.058807373046875, -0.01387786865234375, 0.0011548995971679688, -0.0023975372314453125, -0.050262451171875, 0.00836944580078125, 0.01290130615234375, 0.033966064453125, -0.022003173828125, 0.022247314453125, -0.0013799667358398438, -0.0281219482421875, 0.00966644287109375, -0.0283203125, 0.0704345703125, 0.02215576171875, -0.026397705078125, 0.019805908203125, -0.06585693359375, -0.0023288726806640625, 0.0010128021240234375, -0.0273895263671875, -0.013946533203125, 0.003925323486328125, 0.02398681640625, 0.01486968994140625, 0.0232391357421875, -0.0450439453125, 0.0165863037109375, -0.044189453125, 0.0160980224609375, 0.043182373046875, -0.018341064453125, 0.0276031494140625, -0.03314208984375, 0.032196044921875, 0.0104522705078125, 0.00885772705078125, 0.004344940185546875, -0.035614013671875, -0.06304931640625, -0.0212860107421875, 0.046722412109375, 0.07720947265625, -0.055023193359375, 0.0599365234375, -0.05133056640625, -0.056243896484375, -0.061370849609375, -0.0160675048828125, 0.0333251953125, 0.0211639404296875, 0.037841796875, -0.01194000244140625, -0.03778076171875, -0.08001708984375, -0.0130767822265625, -0.008453369140625, -0.0159759521484375, 0.0142822265625, 0.045196533203125, -0.011993408203125, 0.03790283203125, -0.035858154296875, -0.0281829833984375, -0.01036834716796875, 0.00875091552734375, 0.032012939453125, 0.044830322265625, 0.037078857421875, -0.066162109375, -0.044830322265625, -0.0024356842041015625, -0.0496826171875, -0.00815582275390625, 0.0029544830322265625, -0.0174407958984375, 0.005321502685546875, 0.01036834716796875, -0.033905029296875, 0.007358551025390625, 0.0450439453125, -0.047760009765625, 0.04718017578125, -0.00421142578125, 0.023284912109375, -0.101318359375, 0.011810302734375, -0.0105133056640625, -0.01018524169921875, -0.03118896484375, -0.0015325546264648438, 0.0211029052734375, 0.006488800048828125, -0.053558349609375, 0.041259765625, -0.02105712890625, -0.0079498291015625, 0.0220794677734375, 0.004009246826171875, 0.00933074951171875, 0.051116943359375, -0.0018482208251953125, 0.065673828125, 0.0491943359375, -0.035491943359375, 0.008697509765625, 0.04095458984375, -0.032989501953125, 0.024200439453125, -0.06256103515625, -0.023284912109375, 0.0286407470703125, -0.00787353515625, -0.04876708984375, 0.007904052734375, 0.027069091796875, -0.0526123046875, 0.031768798828125, 0.0013837814331054688, -0.055206298828125, -0.00021219253540039062, -0.0212249755859375, 0.037445068359375, 0.048431396484375, -0.008941650390625, 0.044189453125, 0.0066375732421875, -0.004871368408203125, -0.038848876953125, -0.0692138671875, -0.00848388671875, -0.031280517578125, -0.059539794921875, 0.0157623291015625, -0.0260467529296875, 0.0004940032958984375, 0.0017976760864257812, 0.02142333984375, -0.00450897216796875, 0.0012941360473632812, 0.006534576416015625, 0.015838623046875, -0.042022705078125, 0.01165008544921875, 0.0007214546203613281, -0.01129150390625, -0.006927490234375, -0.0129547119140625, 0.040771484375, -0.0258026123046875, -0.0178985595703125, -0.04766845703125, 0.00647735595703125, 0.044952392578125, -0.0304107666015625, 0.05712890625, 0.048736572265625, -0.0097503662109375, 0.01325225830078125, -0.031463623046875, 0.0033245086669921875, -0.032928466796875, 0.0195159912109375, -0.042633056640625, -0.06304931640625, 0.0338134765625, 0.007770538330078125, 0.030792236328125, 0.06500244140625, 0.04638671875, 0.00396728515625, 0.044708251953125, 0.021636962890625, -0.00418853759765625, 0.0325927734375, -0.033416748046875, -0.00750732421875, -0.07745361328125, 0.005237579345703125, -0.05169677734375, -0.0230865478515625, -0.060211181640625, -0.0294342041015625, 0.017608642578125, 0.010833740234375, -0.019805908203125, 0.052215576171875, -0.036468505859375, 0.014862060546875, 0.041839599609375, -0.01264190673828125, 0.0228118896484375, 0.0012559890747070312, -0.03643798828125, -0.0230255126953125, -0.04034423828125, -0.040863037109375, 0.09320068359375, 0.036346435546875, 0.0173492431640625, 0.012908935546875, 0.036407470703125, 0.00038051605224609375, 0.01363372802734375, -0.046875, 0.035675048828125, -0.0145721435546875, -0.051422119140625, -0.029876708984375, -0.046112060546875, -0.06597900390625, 0.042205810546875, -0.0218048095703125, -0.03326416015625, 0.0142364501953125, -0.0061492919921875, -0.007213592529296875, 0.025177001953125, -0.051422119140625, 0.08599853515625, -0.005771636962890625, -0.0067901611328125, 0.0123291015625, -0.0338134765625, 0.02471923828125, -0.00141143798828125, 0.02154541015625, -0.014495849609375, 0.0065765380859375, 0.046051025390625, -0.007343292236328125, 0.0330810546875, -0.0051727294921875, -0.003108978271484375, 0.0021686553955078125, 0.004154205322265625, 0.0312347412109375, -0.00304412841796875, -0.030731201171875, 0.0283355712890625, 0.005512237548828125, -0.03436279296875, -0.0123748779296875, 0.035125732421875, -0.0582275390625, -0.0014715194702148438, -0.03302001953125, -0.048583984375, -0.0007262229919433594, 0.026580810546875, 0.055023193359375, 0.04583740234375, -0.029937744140625, 0.03436279296875, 0.062103271484375, -0.021942138671875, 0.0246429443359375, 0.053009033203125, -0.0207366943359375, -0.045013427734375, 0.061370849609375, 0.0088653564453125, 0.0248260498046875, 0.04754638671875, 0.007419586181640625, -0.014556884765625, -0.051025390625, -0.0548095703125, 0.0205841064453125, -0.0157928466796875, -0.01438140869140625, -0.04376220703125, -0.005031585693359375, -0.02398681640625, 0.0236358642578125, -0.035858154296875, -0.050689697265625, -0.01036834716796875, -0.016632080078125, 0.0158843994140625, 0.0183258056640625, -0.0047149658203125, 0.03045654296875, -0.07794189453125, 0.01169586181640625, -0.00946044921875, 0.0185089111328125, -0.0345458984375, -0.062255859375, -0.0379638671875, 0.004871368408203125, -0.043792724609375, -0.049530029296875, 0.0396728515625, 0.0123443603515625, 0.018218994140625, 0.0255584716796875, 0.0101165771484375, 0.0277099609375, -0.0531005859375, 0.07061767578125, -0.003116607666015625, -0.055999755859375, 0.027008056640625, -0.0313720703125, 0.03070068359375, 0.063232421875, 0.0270233154296875, -0.032196044921875, -0.036407470703125, -0.053466796875, -0.06304931640625, 0.053558349609375, 0.0531005859375, -0.013275146484375, 0.0167083740234375, -0.0099334716796875, -0.0016651153564453125, 0.0105133056640625, -0.08050537109375, -0.0214996337890625, 0.004924774169921875, -0.0269775390625, -0.015869140625, -0.021942138671875, -0.0159149169921875, -0.01251220703125, 0.08306884765625, 0.0153961181640625, 0.01494598388671875, 0.0308837890625, -0.01354217529296875, -0.017913818359375, 0.0242919921875, 0.0751953125, 0.0350341796875, -0.03399658203125, -0.0143585205078125, 0.0242767333984375, -0.0345458984375, -0.0144805908203125, 0.01009368896484375, -0.0298309326171875, 0.025390625, 0.0362548828125, 0.08074951171875, 0.0218048095703125, -0.0465087890625, 0.037841796875, -0.028717041015625, -0.035369873046875, -0.053009033203125, -0.01219940185546875, 0.0123291015625, 0.0064544677734375, 0.0206298828125, 0.0078582763671875, 0.01373291015625, -0.011383056640625, 0.0184783935546875, 0.0025844573974609375, -0.04852294921875, -0.043792724609375, 0.030609130859375, 0.007289886474609375, -0.0236663818359375, 0.038665771484375, -0.0306854248046875, -0.042022705078125, 0.02435302734375, 0.00981903076171875, 0.079345703125, -0.0207366943359375, -0.0134124755859375, 0.056243896484375, 0.043609619140625, -0.0160980224609375, 0.036346435546875, 0.00989532470703125, -0.049072265625, -0.043914794921875, -0.056549072265625, -0.01007080078125, 0.0069122314453125, -0.0633544921875, 0.0258026123046875, 0.0233001708984375, 0.00196075439453125, -0.0269775390625, 0.022003173828125, -0.039337158203125, 0.0109710693359375, -0.02130126953125, 0.0838623046875, -0.07232666015625, 0.0679931640625, 0.03546142578125, -0.0196990966796875, -0.0670166015625, -0.0182037353515625, -0.0171051025390625, -0.037078857421875, 0.043304443359375, 0.0139312744140625, 0.0228118896484375, -0.011077880859375, -0.015289306640625, -0.06024169921875, 0.08758544921875, 0.01226043701171875, -0.056243896484375, 0.0028629302978515625, 0.0171966552734375, 0.0360107421875, -0.0279998779296875, 0.0119476318359375, 0.0308380126953125, 0.055419921875, 0.0038318634033203125, -0.08099365234375, -0.0167083740234375, -0.040008544921875, -0.0255279541015625, 0.040740966796875, -0.0450439453125, 0.068603515625, 0.0306243896484375, -0.01488494873046875, 0.0070953369140625, 0.055999755859375, 0.0272064208984375, 0.0293121337890625, 0.0372314453125, 0.08587646484375, 0.033966064453125, -0.03143310546875, 0.068115234375, -0.022979736328125, 0.033447265625, 0.088134765625, -0.005039215087890625, 0.0760498046875, 0.0252838134765625, -0.0124664306640625, 0.038177490234375, 0.048065185546875, -0.0215606689453125, 0.03143310546875, 0.00809478759765625, 0.01102447509765625, -0.0095367431640625, 0.0242462158203125, -0.052978515625, 0.01605224609375, 0.0191192626953125, -0.0113372802734375, 0.01113128662109375, -0.00768280029296875, 0.003330230712890625, -0.004024505615234375, -0.0065765380859375, 0.0399169921875, 0.0056304931640625, -0.0404052734375, 0.052703857421875, -0.004428863525390625, 0.0479736328125, -0.0499267578125, 0.01094818115234375, -0.007305145263671875, 0.01360321044921875, -0.0032634735107421875, -0.03997802734375, 0.03192138671875, -0.00403594970703125, -0.0192108154296875, -0.0243377685546875, 0.0156402587890625, -0.046142578125, -0.06463623046875, 0.0235748291015625, 0.0333251953125, 0.0281829833984375, -0.0009446144104003906, -0.067626953125, 0.00016736984252929688, 0.0128173828125, -0.050994873046875, 0.0065765380859375, 0.06658935546875, 0.0168304443359375, 0.03668212890625, 0.046417236328125, 0.01348876953125, 0.02252197265625, -0.0036640167236328125, 0.052581787109375, -0.038421630859375, -0.0285186767578125, -0.05279541015625, 0.062744140625, -0.0094757080078125, -0.05279541015625, 0.060333251953125, 0.081298828125, 0.0748291015625, -0.0125732421875, 0.02142333984375, -0.00363922119140625, 0.052703857421875, -0.046600341796875, 0.044403076171875, -0.06591796875, 0.018768310546875, -0.00647735595703125, -0.06793212890625, -0.0161895751953125, 0.0272064208984375, -0.0163726806640625, -0.0267791748046875, 0.061614990234375, 0.054931640625, -0.01409149169921875, -0.01123809814453125, 0.02557373046875, 0.02349853515625, 0.01026153564453125, 0.0472412109375, 0.033660888671875, -0.07958984375, 0.03778076171875, -0.016021728515625, -0.00556182861328125, -0.006107330322265625, -0.046966552734375, -0.060455322265625, -0.046905517578125, -0.01297760009765625, -0.0166168212890625, -0.0203857421875, 0.060943603515625, 0.0390625, -0.0679931640625, -0.040557861328125, 0.0018854141235351562, 0.005218505859375, -0.0148162841796875, -0.0196075439453125, 0.04736328125, -0.02874755859375, -0.07080078125, 0.03802490234375, 0.0034618377685546875, -0.01128387451171875, -0.0029850006103515625, -0.0263519287109375, -0.034149169921875, -0.0047454833984375, 0.023101806640625, 0.00447845458984375, -0.03631591796875, 0.0096893310546875, 0.0194549560546875, -0.01189422607421875, 0.0248870849609375, 0.032501220703125, -0.019012451171875, 0.0144805908203125, 0.062255859375, 0.028076171875, 0.037384033203125, -0.0099639892578125, 0.0457763671875, -0.048675537109375, 0.0212554931640625, 0.0228424072265625, 0.04345703125, 0.0235137939453125, -0.006694793701171875, 0.06109619140625, 0.01520538330078125, -0.057037353515625, -0.07672119140625, 0.00653839111328125, -0.08721923828125, -0.00000864267349243164, 0.06976318359375, -0.0262451171875, -0.019287109375, 0.023040771484375, -0.01136016845703125, 0.01329803466796875, -0.0243682861328125, 0.02984619140625, 0.06268310546875, 0.0296783447265625, 0.005558013916015625, -0.05548095703125, 0.0296783447265625, 0.050750732421875, -0.050262451171875, -0.01166534423828125, 0.02105712890625, 0.01245880126953125, 0.0233306884765625, 0.038177490234375, -0.0251312255859375, 0.01003265380859375, -0.0186614990234375, 0.0306243896484375, -0.0053558349609375, -0.01232147216796875, -0.025421142578125, -0.0024394989013671875, -0.0018138885498046875, -0.024932861328125 ] ]
lunarfish/furrydiffusion
2023-05-16T09:40:51.000Z
[ "diffusers", "text-to-image", "stable-diffusion", "furry", "anything-v3.0", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
lunarfish
null
null
lunarfish/furrydiffusion
48
8,201
diffusers
2022-12-26T23:17:54
--- license: creativeml-openrail-m tags: - text-to-image - stable-diffusion - furry - anything-v3.0 --- ![images](https://cdn.discordapp.com/attachments/1050047774315532300/1057079481581445230/grid-0005.png) FurryDiffusion is a model made to generate furry art, this model is very much in beta still and will keep improoving! To use this please make sure to include `furry` in your prompt and to make a specific breed add the breed name only. Example Prompts: ``` Positive: highres, furry, fox, orange fur, blue eyes Negative: lowres, bad anatomy, bad hands, text, error, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality, normal quality, blurry ``` Test the concept via A1111 Colab [fast-Colab-A1111](https://colab.research.google.com/github/TheLastBen/fast-stable-diffusion/blob/main/fast_stable_diffusion_AUTOMATIC1111.ipynb) Or you can run your new concept via `diffusers` [Colab Notebook for Inference](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/sd_dreambooth_inference.ipynb) **NOTE**: Its better to run it in Google Colab since you can use google's powerful gpu's for free. Go ahead try it now!
1,179
[ [ -0.06402587890625, -0.07159423828125, 0.04541015625, 0.051422119140625, -0.0263671875, 0.003147125244140625, 0.0132293701171875, -0.036529541015625, 0.04058837890625, 0.031585693359375, -0.050384521484375, -0.0157318115234375, -0.0301361083984375, 0.01117706298828125, -0.0277862548828125, 0.060821533203125, -0.005157470703125, -0.0028247833251953125, -0.015594482421875, 0.0267791748046875, -0.0288543701171875, -0.002899169921875, -0.048370361328125, -0.0249481201171875, 0.0063323974609375, 0.016845703125, 0.06195068359375, 0.0274200439453125, 0.017364501953125, 0.0296630859375, -0.01161956787109375, -0.007640838623046875, -0.0143585205078125, 0.005207061767578125, 0.00466156005859375, -0.0225830078125, -0.0226287841796875, 0.0242767333984375, 0.03802490234375, 0.0216827392578125, -0.0107574462890625, -0.0177154541015625, -0.01056671142578125, 0.0177459716796875, -0.03973388671875, 0.000004231929779052734, -0.00901031494140625, 0.00750732421875, -0.01253509521484375, 0.034942626953125, -0.0217742919921875, -0.033935546875, -0.01073455810546875, -0.07806396484375, 0.041748046875, 0.020233154296875, 0.076171875, -0.00603485107421875, -0.00803375244140625, -0.010009765625, -0.0294952392578125, 0.03448486328125, -0.03924560546875, 0.03228759765625, 0.002628326416015625, 0.01375579833984375, -0.027496337890625, -0.08184814453125, -0.04486083984375, -0.019866943359375, -0.0017366409301757812, 0.00634765625, -0.0211639404296875, 0.00336456298828125, 0.0284423828125, 0.035491943359375, -0.0341796875, -0.0220794677734375, -0.036712646484375, -0.038116455078125, 0.04193115234375, 0.005756378173828125, 0.03118896484375, 0.01094818115234375, -0.043426513671875, -0.0241241455078125, -0.0199737548828125, -0.007305145263671875, 0.0201263427734375, 0.0002313852310180664, -0.03265380859375, 0.021728515625, 0.010101318359375, 0.006587982177734375, 0.0462646484375, 0.0230255126953125, 0.028167724609375, 0.0009984970092773438, -0.0241851806640625, -0.024139404296875, 0.061004638671875, 0.03436279296875, -0.007251739501953125, -0.00885772705078125, -0.01142120361328125, -0.0176544189453125, -0.01070404052734375, -0.09222412109375, -0.041412353515625, 0.0206298828125, -0.03759765625, -0.042327880859375, -0.004352569580078125, -0.08319091796875, -0.03668212890625, 0.03607177734375, 0.0413818359375, -0.0297088623046875, -0.055877685546875, 0.027496337890625, -0.048828125, 0.0011911392211914062, 0.0423583984375, -0.061004638671875, 0.03204345703125, 0.0259552001953125, 0.0576171875, 0.003429412841796875, -0.0155029296875, -0.029022216796875, -0.01071929931640625, -0.032745361328125, 0.06390380859375, -0.04669189453125, -0.0469970703125, -0.0128326416015625, 0.0156097412109375, 0.01190185546875, -0.03515625, 0.04864501953125, -0.0255584716796875, 0.0357666015625, -0.006641387939453125, -0.046875, -0.036468505859375, -0.034332275390625, -0.07586669921875, 0.0633544921875, 0.0246124267578125, -0.054840087890625, 0.02215576171875, -0.0677490234375, -0.001956939697265625, 0.0250244140625, -0.0019989013671875, -0.0139312744140625, 0.018707275390625, -0.0112152099609375, 0.02081298828125, -0.0173492431640625, 0.005001068115234375, -0.043243408203125, -0.02154541015625, 0.00830078125, -0.00496673583984375, 0.0819091796875, 0.0242767333984375, -0.01485443115234375, -0.006984710693359375, -0.03851318359375, -0.0010595321655273438, 0.017059326171875, 0.0250701904296875, -0.01172637939453125, -0.02996826171875, 0.0198822021484375, 0.0139312744140625, 0.01119232177734375, -0.0594482421875, 0.02008056640625, -0.00652313232421875, 0.0167694091796875, 0.054351806640625, 0.004901885986328125, 0.038787841796875, -0.038177490234375, 0.042327880859375, 0.00624847412109375, 0.01898193359375, 0.01849365234375, -0.055999755859375, -0.04644775390625, -0.03515625, 0.01483154296875, 0.02593994140625, -0.0535888671875, 0.0072784423828125, 0.05535888671875, -0.03912353515625, -0.0078125, -0.00736236572265625, -0.019866943359375, 0.036407470703125, 0.002704620361328125, -0.0223846435546875, -0.0305023193359375, -0.06622314453125, 0.0251312255859375, -0.0261688232421875, 0.0009045600891113281, 0.00835418701171875, 0.03839111328125, -0.02191162109375, 0.0467529296875, -0.0657958984375, 0.0079498291015625, 0.005603790283203125, 0.033599853515625, 0.033843994140625, 0.0474853515625, 0.07635498046875, -0.053741455078125, -0.0240631103515625, -0.0380859375, -0.03369140625, -0.0289154052734375, -0.00522613525390625, -0.05126953125, -0.01358795166015625, 0.029541015625, -0.07684326171875, 0.022186279296875, 0.030731201171875, -0.0640869140625, 0.07379150390625, -0.0147552490234375, 0.0296630859375, -0.06890869140625, -0.005767822265625, 0.02178955078125, -0.018463134765625, -0.045440673828125, 0.01117706298828125, 0.00037217140197753906, -0.0115203857421875, -0.03900146484375, 0.06890869140625, -0.0202178955078125, 0.04949951171875, -0.01525115966796875, -0.00891876220703125, 0.0233306884765625, 0.00849151611328125, 0.0188751220703125, 0.053802490234375, 0.07501220703125, -0.042022705078125, 0.03558349609375, 0.052703857421875, -0.01287841796875, 0.0139312744140625, -0.04071044921875, 0.00421905517578125, -0.00408935546875, 0.0161590576171875, -0.0780029296875, -0.0277862548828125, 0.043853759765625, -0.025634765625, 0.0204620361328125, 0.0293426513671875, -0.0259857177734375, -0.030670166015625, -0.0377197265625, 0.0235595703125, 0.06805419921875, -0.04205322265625, 0.0304412841796875, 0.0022678375244140625, -0.005645751953125, -0.0223236083984375, -0.0499267578125, 0.00444793701171875, -0.0203094482421875, -0.0721435546875, 0.044158935546875, -0.0279388427734375, -0.026947021484375, 0.0193023681640625, 0.002475738525390625, -0.0313720703125, -0.00019490718841552734, 0.0190582275390625, 0.0267181396484375, -0.03094482421875, 0.006984710693359375, -0.0089263916015625, 0.0027523040771484375, 0.0160980224609375, 0.0145416259765625, 0.021514892578125, -0.0276336669921875, -0.015869140625, -0.04315185546875, 0.04559326171875, 0.054779052734375, 0.0279083251953125, 0.026580810546875, 0.060089111328125, -0.035980224609375, -0.0193023681640625, -0.053009033203125, -0.03466796875, -0.039154052734375, 0.0016155242919921875, -0.01338958740234375, -0.041229248046875, 0.054656982421875, -0.00957489013671875, 0.0106658935546875, 0.058685302734375, 0.017486572265625, -0.030609130859375, 0.0867919921875, 0.0797119140625, 0.0218963623046875, 0.044158935546875, -0.03204345703125, -0.006221771240234375, -0.04437255859375, -0.01450347900390625, -0.034454345703125, -0.02001953125, -0.054046630859375, -0.040130615234375, -0.005184173583984375, 0.0307159423828125, -0.02783203125, 0.0077972412109375, -0.0303497314453125, 0.026275634765625, 0.0283203125, 0.0126495361328125, 0.0139312744140625, 0.01552581787109375, -0.0169219970703125, 0.0004096031188964844, -0.0316162109375, -0.023101806640625, 0.028045654296875, 0.0325927734375, 0.08526611328125, -0.01152801513671875, 0.06695556640625, 0.0229644775390625, 0.03765869140625, -0.0303955078125, 0.0386962890625, -0.0253448486328125, -0.06439208984375, -0.00189208984375, 0.01074981689453125, -0.0267486572265625, 0.0217742919921875, -0.0241241455078125, -0.06365966796875, 0.035552978515625, 0.006320953369140625, -0.005687713623046875, 0.036163330078125, -0.057281494140625, 0.05609130859375, -0.006237030029296875, -0.036346435546875, 0.0026454925537109375, 0.0008077621459960938, 0.03924560546875, -0.00667572021484375, 0.0050201416015625, -0.02252197265625, -0.012847900390625, 0.05364990234375, -0.044647216796875, 0.05377197265625, -0.03228759765625, 0.01030731201171875, 0.02435302734375, 0.0174407958984375, 0.037109375, 0.00835418701171875, -0.0282745361328125, 0.001262664794921875, 0.0159759521484375, -0.0179901123046875, -0.0304412841796875, 0.03253173828125, -0.04296875, -0.06671142578125, -0.03753662109375, -0.018707275390625, -0.0122222900390625, 0.0040740966796875, 0.03936767578125, 0.0149078369140625, -0.0213165283203125, 0.00696563720703125, 0.04034423828125, -0.006900787353515625, 0.028106689453125, 0.0238189697265625, -0.046478271484375, -0.034515380859375, 0.050689697265625, -0.0418701171875, 0.0174407958984375, -0.0095672607421875, 0.04095458984375, -0.041046142578125, -0.025299072265625, -0.05633544921875, 0.038787841796875, -0.056549072265625, -0.019012451171875, -0.033477783203125, -0.027008056640625, -0.02215576171875, -0.035797119140625, -0.01000213623046875, -0.030029296875, -0.055328369140625, 0.00882720947265625, 0.0718994140625, 0.041046142578125, -0.043670654296875, 0.0281829833984375, -0.03802490234375, 0.033050537109375, 0.0094146728515625, 0.03204345703125, -0.0175933837890625, -0.031982421875, 0.0211944580078125, 0.0171356201171875, -0.04547119140625, -0.06207275390625, 0.035369873046875, 0.0222320556640625, 0.054443359375, 0.02801513671875, -0.00641632080078125, 0.0689697265625, -0.0177764892578125, 0.057708740234375, 0.04742431640625, -0.07110595703125, 0.028167724609375, -0.00437164306640625, 0.00637054443359375, 0.04052734375, 0.033447265625, -0.049896240234375, -0.07659912109375, -0.0633544921875, -0.026947021484375, 0.042816162109375, 0.0279388427734375, 0.0247955322265625, 0.000293731689453125, 0.025238037109375, 0.035186767578125, -0.004650115966796875, -0.056243896484375, -0.019622802734375, -0.0260772705078125, 0.003513336181640625, 0.012481689453125, -0.007068634033203125, 0.004558563232421875, -0.043670654296875, 0.068359375, 0.0006985664367675781, 0.031219482421875, 0.0170745849609375, 0.0090484619140625, -0.0300445556640625, -0.0115509033203125, 0.024627685546875, 0.02276611328125, -0.039031982421875, -0.00818634033203125, -0.009796142578125, -0.06304931640625, 0.0232696533203125, 0.00626373291015625, -0.022003173828125, 0.031036376953125, 0.01071929931640625, 0.061309814453125, -0.0242462158203125, -0.0193634033203125, 0.03729248046875, -0.035125732421875, -0.042266845703125, 0.00511932373046875, 0.0229034423828125, 0.0143890380859375, 0.0369873046875, 0.02294921875, 0.0143585205078125, 0.02838134765625, -0.020904541015625, 0.01381683349609375, 0.028594970703125, -0.0181427001953125, -0.0063323974609375, 0.06597900390625, 0.027496337890625, -0.006725311279296875, 0.056365966796875, -0.03662109375, -0.0177764892578125, 0.0828857421875, 0.0684814453125, 0.09423828125, -0.00901031494140625, 0.05450439453125, 0.03802490234375, 0.00017547607421875, -0.0089263916015625, 0.039825439453125, 0.00713348388671875, -0.0296630859375, -0.018707275390625, -0.06988525390625, -0.037322998046875, 0.041717529296875, -0.00991058349609375, 0.0458984375, -0.028533935546875, -0.00826263427734375, 0.025054931640625, -0.005527496337890625, -0.036773681640625, 0.00820159912109375, 0.0214996337890625, 0.11688232421875, -0.09320068359375, 0.033843994140625, 0.0462646484375, -0.0237274169921875, -0.042205810546875, 0.0032100677490234375, -0.00702667236328125, -0.03790283203125, 0.05609130859375, 0.01251220703125, -0.020263671875, -0.01059722900390625, -0.05572509765625, -0.051544189453125, 0.065185546875, 0.0284881591796875, -0.018280029296875, 0.007778167724609375, -0.037109375, 0.0263519287109375, -0.04559326171875, 0.0202789306640625, -0.0005230903625488281, 0.04241943359375, 0.046600341796875, -0.0443115234375, 0.0288543701171875, -0.038818359375, 0.004940032958984375, 0.0103912353515625, -0.034820556640625, 0.080078125, -0.03961181640625, -0.04058837890625, 0.03863525390625, 0.05389404296875, 0.0109710693359375, 0.0190887451171875, 0.055206298828125, 0.0214996337890625, 0.046295166015625, -0.0205230712890625, 0.050689697265625, -0.0097808837890625, 0.0391845703125, 0.044830322265625, -0.0109710693359375, 0.0235595703125, 0.015960693359375, -0.01004791259765625, 0.049530029296875, 0.0950927734375, -0.034027099609375, 0.04913330078125, 0.0204620361328125, -0.034759521484375, -0.0305023193359375, 0.00614166259765625, -0.047454833984375, 0.0108184814453125, 0.029998779296875, -0.0164031982421875, -0.0322265625, 0.0232391357421875, -0.016876220703125, -0.0294342041015625, -0.007289886474609375, 0.03387451171875, -0.005207061767578125, -0.028228759765625, 0.061126708984375, -0.0107879638671875, 0.06683349609375, -0.042938232421875, -0.0364990234375, -0.0225830078125, -0.00827789306640625, -0.0257720947265625, -0.0810546875, 0.0218505859375, -0.01038360595703125, 0.0038585662841796875, -0.01244354248046875, 0.067626953125, -0.01546478271484375, -0.053985595703125, 0.01849365234375, 0.03076171875, 0.039398193359375, 0.0087738037109375, -0.06304931640625, 0.0169677734375, -0.0081634521484375, -0.014373779296875, -0.01519775390625, 0.01531219482421875, 0.006946563720703125, 0.04925537109375, 0.022857666015625, 0.01111602783203125, -0.02191162109375, -0.002635955810546875, 0.05572509765625, -0.04248046875, -0.0352783203125, -0.05657958984375, 0.0298919677734375, -0.0164794921875, -0.00763702392578125, 0.0352783203125, 0.04046630859375, 0.03814697265625, -0.037933349609375, 0.06134033203125, -0.038909912109375, -0.01299285888671875, -0.018951416015625, 0.053924560546875, -0.053436279296875, -0.0307769775390625, -0.024658203125, -0.04327392578125, -0.0009188652038574219, 0.059661865234375, 0.01019287109375, 0.007404327392578125, 0.072998046875, 0.07763671875, -0.019256591796875, 0.027435302734375, 0.0140533447265625, 0.01171112060546875, 0.01345062255859375, 0.026947021484375, 0.0826416015625, -0.021514892578125, 0.01409149169921875, -0.06854248046875, -0.0301513671875, -0.031158447265625, -0.090576171875, -0.03857421875, -0.0723876953125, -0.050384521484375, -0.04913330078125, -0.011322021484375, 0.03759765625, 0.07281494140625, -0.05859375, -0.013031005859375, -0.01459503173828125, 0.02435302734375, -0.004085540771484375, -0.02056884765625, 0.00879669189453125, -0.00696563720703125, -0.0506591796875, -0.0185546875, 0.004772186279296875, 0.051055908203125, -0.016326904296875, 0.00302886962890625, -0.007320404052734375, 0.0001322031021118164, 0.0297088623046875, 0.0219573974609375, -0.04547119140625, -0.03076171875, -0.002002716064453125, -0.0099945068359375, 0.00554656982421875, 0.0338134765625, -0.036651611328125, 0.0298309326171875, 0.0557861328125, -0.0081329345703125, 0.049468994140625, -0.01406097412109375, 0.0303955078125, -0.0119781494140625, 0.01432037353515625, 0.01971435546875, 0.02398681640625, 0.005046844482421875, -0.0472412109375, 0.0203094482421875, 0.036865234375, -0.022552490234375, -0.05706787109375, 0.005344390869140625, -0.07415771484375, -0.040740966796875, 0.0699462890625, 0.02667236328125, -0.029754638671875, 0.00579833984375, -0.040985107421875, -0.00000762939453125, -0.038909912109375, 0.056884765625, 0.0743408203125, -0.052215576171875, 0.00211334228515625, -0.05279541015625, 0.047637939453125, -0.01448822021484375, -0.062164306640625, 0.00646209716796875, 0.054779052734375, 0.049530029296875, 0.035369873046875, 0.046417236328125, -0.007770538330078125, 0.054351806640625, 0.041259765625, 0.0205078125, -0.004055023193359375, -0.00095367431640625, -0.0082244873046875, 0.003017425537109375, -0.0009407997131347656, -0.0380859375 ] ]
lllyasviel/control_v11p_sd15_inpaint
2023-05-26T09:58:45.000Z
[ "diffusers", "art", "controlnet", "stable-diffusion", "controlnet-v1-1", "image-to-image", "arxiv:2302.05543", "license:openrail", "has_space", "diffusers:ControlNetModel", "region:us" ]
image-to-image
lllyasviel
null
null
lllyasviel/control_v11p_sd15_inpaint
55
8,191
diffusers
2023-04-14T19:25:42
--- license: openrail base_model: runwayml/stable-diffusion-v1-5 tags: - art - controlnet - stable-diffusion - controlnet-v1-1 - image-to-image duplicated_from: ControlNet-1-1-preview/control_v11p_sd15_inpaint --- # Controlnet - v1.1 - *InPaint Version* **Controlnet v1.1** was released in [lllyasviel/ControlNet-v1-1](https://huggingface.co/lllyasviel/ControlNet-v1-1) by [Lvmin Zhang](https://huggingface.co/lllyasviel). This checkpoint is a conversion of [the original checkpoint](https://huggingface.co/lllyasviel/ControlNet-v1-1/blob/main/control_v11p_sd15_inpaint.pth) into `diffusers` format. It can be used in combination with **Stable Diffusion**, such as [runwayml/stable-diffusion-v1-5](https://huggingface.co/runwayml/stable-diffusion-v1-5). For more details, please also have a look at the [🧨 Diffusers docs](https://huggingface.co/docs/diffusers/api/pipelines/stable_diffusion/controlnet). ControlNet is a neural network structure to control diffusion models by adding extra conditions. ![img](./sd.png) This checkpoint corresponds to the ControlNet conditioned on **inpaint images**. ## Model Details - **Developed by:** Lvmin Zhang, Maneesh Agrawala - **Model type:** Diffusion-based text-to-image generation model - **Language(s):** English - **License:** [The CreativeML OpenRAIL M license](https://huggingface.co/spaces/CompVis/stable-diffusion-license) is an [Open RAIL M license](https://www.licenses.ai/blog/2022/8/18/naming-convention-of-responsible-ai-licenses), adapted from the work that [BigScience](https://bigscience.huggingface.co/) and [the RAIL Initiative](https://www.licenses.ai/) are jointly carrying in the area of responsible AI licensing. See also [the article about the BLOOM Open RAIL license](https://bigscience.huggingface.co/blog/the-bigscience-rail-license) on which our license is based. - **Resources for more information:** [GitHub Repository](https://github.com/lllyasviel/ControlNet), [Paper](https://arxiv.org/abs/2302.05543). - **Cite as:** @misc{zhang2023adding, title={Adding Conditional Control to Text-to-Image Diffusion Models}, author={Lvmin Zhang and Maneesh Agrawala}, year={2023}, eprint={2302.05543}, archivePrefix={arXiv}, primaryClass={cs.CV} } ## Introduction Controlnet was proposed in [*Adding Conditional Control to Text-to-Image Diffusion Models*](https://arxiv.org/abs/2302.05543) by Lvmin Zhang, Maneesh Agrawala. The abstract reads as follows: *We present a neural network structure, ControlNet, to control pretrained large diffusion models to support additional input conditions. The ControlNet learns task-specific conditions in an end-to-end way, and the learning is robust even when the training dataset is small (< 50k). Moreover, training a ControlNet is as fast as fine-tuning a diffusion model, and the model can be trained on a personal devices. Alternatively, if powerful computation clusters are available, the model can scale to large amounts (millions to billions) of data. We report that large diffusion models like Stable Diffusion can be augmented with ControlNets to enable conditional inputs like edge maps, segmentation maps, keypoints, etc. This may enrich the methods to control large diffusion models and further facilitate related applications.* ## Example It is recommended to use the checkpoint with [Stable Diffusion v1-5](https://huggingface.co/runwayml/stable-diffusion-v1-5) as the checkpoint has been trained on it. Experimentally, the checkpoint can be used with other diffusion models such as dreamboothed stable diffusion. 1. Let's install `diffusers` and related packages: ``` $ pip install diffusers transformers accelerate ``` 2. Run code: ```python # !pip install transformers accelerate from diffusers import StableDiffusionControlNetInpaintPipeline, ControlNetModel from diffusers.utils import load_image import numpy as np import torch init_image = load_image( "https://huggingface.co/datasets/diffusers/test-arrays/resolve/main/stable_diffusion_inpaint/boy.png" ) init_image = init_image.resize((512, 512)) generator = torch.Generator(device="cpu").manual_seed(1) mask_image = load_image( "https://huggingface.co/datasets/diffusers/test-arrays/resolve/main/stable_diffusion_inpaint/boy_mask.png" ) mask_image = mask_image.resize((512, 512)) def make_inpaint_condition(image, image_mask): image = np.array(image.convert("RGB")).astype(np.float32) / 255.0 image_mask = np.array(image_mask.convert("L")).astype(np.float32) / 255.0 assert image.shape[0:1] == image_mask.shape[0:1], "image and image_mask must have the same image size" image[image_mask > 0.5] = -1.0 # set as masked pixel image = np.expand_dims(image, 0).transpose(0, 3, 1, 2) image = torch.from_numpy(image) return image control_image = make_inpaint_condition(init_image, mask_image) controlnet = ControlNetModel.from_pretrained( "lllyasviel/control_v11p_sd15_inpaint", torch_dtype=torch.float16 ) pipe = StableDiffusionControlNetInpaintPipeline.from_pretrained( "runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16 ) pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config) pipe.enable_model_cpu_offload() # generate image image = pipe( "a handsome man with ray-ban sunglasses", num_inference_steps=20, generator=generator, eta=1.0, image=init_image, mask_image=mask_image, control_image=control_image, ).images[0] ``` ![original](https://huggingface.co/datasets/diffusers/test-arrays/resolve/main/stable_diffusion_inpaint/boy.png) ![img](https://huggingface.co/datasets/diffusers/test-arrays/resolve/main/stable_diffusion_inpaint/boy_with_mask.png) ![img](https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd_controlnet/boy_ray_ban.png) ## Other released checkpoints v1-1 The authors released 14 different checkpoints, each trained with [Stable Diffusion v1-5](https://huggingface.co/runwayml/stable-diffusion-v1-5) on a different type of conditioning: | Model Name | Control Image Overview| Condition Image | Control Image Example | Generated Image Example | |---|---|---|---|---| |[lllyasviel/control_v11p_sd15_canny](https://huggingface.co/lllyasviel/control_v11p_sd15_canny)<br/> | *Trained with canny edge detection* | A monochrome image with white edges on a black background.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_canny/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_canny/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_canny/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_canny/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11e_sd15_ip2p](https://huggingface.co/lllyasviel/control_v11e_sd15_ip2p)<br/> | *Trained with pixel to pixel instruction* | No condition .|<a href="https://huggingface.co/lllyasviel/control_v11e_sd15_ip2p/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11e_sd15_ip2p/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11e_sd15_ip2p/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11e_sd15_ip2p/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_inpaint](https://huggingface.co/lllyasviel/control_v11p_sd15_inpaint)<br/> | Trained with image inpainting | No condition.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_inpaint/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_inpaint/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_inpaint/resolve/main/images/output.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_inpaint/resolve/main/images/output.png"/></a>| |[lllyasviel/control_v11p_sd15_mlsd](https://huggingface.co/lllyasviel/control_v11p_sd15_mlsd)<br/> | Trained with multi-level line segment detection | An image with annotated line segments.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_mlsd/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_mlsd/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_mlsd/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_mlsd/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11f1p_sd15_depth](https://huggingface.co/lllyasviel/control_v11f1p_sd15_depth)<br/> | Trained with depth estimation | An image with depth information, usually represented as a grayscale image.|<a href="https://huggingface.co/lllyasviel/control_v11f1p_sd15_depth/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11f1p_sd15_depth/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11f1p_sd15_depth/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11f1p_sd15_depth/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_normalbae](https://huggingface.co/lllyasviel/control_v11p_sd15_normalbae)<br/> | Trained with surface normal estimation | An image with surface normal information, usually represented as a color-coded image.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_normalbae/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_normalbae/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_normalbae/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_normalbae/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_seg](https://huggingface.co/lllyasviel/control_v11p_sd15_seg)<br/> | Trained with image segmentation | An image with segmented regions, usually represented as a color-coded image.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_seg/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_seg/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_seg/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_seg/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_lineart](https://huggingface.co/lllyasviel/control_v11p_sd15_lineart)<br/> | Trained with line art generation | An image with line art, usually black lines on a white background.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_lineart/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_lineart/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_lineart/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_lineart/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15s2_lineart_anime](https://huggingface.co/lllyasviel/control_v11p_sd15s2_lineart_anime)<br/> | Trained with anime line art generation | An image with anime-style line art.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15s2_lineart_anime/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15s2_lineart_anime/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15s2_lineart_anime/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15s2_lineart_anime/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_openpose](https://huggingface.co/lllyasviel/control_v11p_sd15s2_lineart_anime)<br/> | Trained with human pose estimation | An image with human poses, usually represented as a set of keypoints or skeletons.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_openpose/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_openpose/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_openpose/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_openpose/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_scribble](https://huggingface.co/lllyasviel/control_v11p_sd15_scribble)<br/> | Trained with scribble-based image generation | An image with scribbles, usually random or user-drawn strokes.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_scribble/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_scribble/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_scribble/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_scribble/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11p_sd15_softedge](https://huggingface.co/lllyasviel/control_v11p_sd15_softedge)<br/> | Trained with soft edge image generation | An image with soft edges, usually to create a more painterly or artistic effect.|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_softedge/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11p_sd15_softedge/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11p_sd15_softedge/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11p_sd15_softedge/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11e_sd15_shuffle](https://huggingface.co/lllyasviel/control_v11e_sd15_shuffle)<br/> | Trained with image shuffling | An image with shuffled patches or regions.|<a href="https://huggingface.co/lllyasviel/control_v11e_sd15_shuffle/resolve/main/images/control.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11e_sd15_shuffle/resolve/main/images/control.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11e_sd15_shuffle/resolve/main/images/image_out.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11e_sd15_shuffle/resolve/main/images/image_out.png"/></a>| |[lllyasviel/control_v11f1e_sd15_tile](https://huggingface.co/lllyasviel/control_v11f1e_sd15_tile)<br/> | Trained with image tiling | A blurry image or part of an image .|<a href="https://huggingface.co/lllyasviel/control_v11f1e_sd15_tile/resolve/main/images/original.png"><img width="64" style="margin:0;padding:0;" src="https://huggingface.co/lllyasviel/control_v11f1e_sd15_tile/resolve/main/images/original.png"/></a>|<a href="https://huggingface.co/lllyasviel/control_v11f1e_sd15_tile/resolve/main/images/output.png"><img width="64" src="https://huggingface.co/lllyasviel/control_v11f1e_sd15_tile/resolve/main/images/output.png"/></a>| ## More information For more information, please also have a look at the [Diffusers ControlNet Blog Post](https://huggingface.co/blog/controlnet) and have a look at the [official docs](https://github.com/lllyasviel/ControlNet-v1-1-nightly).
15,912
[ [ -0.0416259765625, -0.04046630859375, 0.01259613037109375, 0.04150390625, -0.0164337158203125, -0.012237548828125, 0.0023174285888671875, -0.0355224609375, 0.036865234375, 0.021240234375, -0.05450439453125, -0.0228118896484375, -0.056610107421875, -0.01509857177734375, -0.00849151611328125, 0.06353759765625, -0.024658203125, -0.00035762786865234375, 0.002666473388671875, -0.005290985107421875, -0.0046844482421875, -0.006572723388671875, -0.09002685546875, -0.03558349609375, 0.0298004150390625, 0.00771331787109375, 0.03900146484375, 0.03448486328125, 0.04010009765625, 0.0290069580078125, -0.0276031494140625, 0.0032196044921875, -0.0274505615234375, -0.017822265625, 0.01512908935546875, -0.0124664306640625, -0.046875, 0.01012420654296875, 0.054168701171875, 0.0244140625, -0.0035648345947265625, -0.01151275634765625, 0.01444244384765625, 0.05426025390625, -0.03802490234375, -0.00860595703125, -0.0119781494140625, 0.0175628662109375, -0.013336181640625, 0.004405975341796875, -0.0106658935546875, -0.0251007080078125, 0.0044708251953125, -0.06005859375, 0.00010758638381958008, -0.0180816650390625, 0.10003662109375, 0.0239410400390625, -0.025665283203125, -0.00348663330078125, -0.017822265625, 0.05523681640625, -0.06121826171875, 0.004543304443359375, 0.0099029541015625, 0.0190277099609375, -0.0225067138671875, -0.0770263671875, -0.040802001953125, -0.00801849365234375, -0.0117340087890625, 0.038360595703125, -0.0251617431640625, 0.0008678436279296875, 0.021636962890625, 0.0206451416015625, -0.0311431884765625, 0.0135650634765625, -0.02374267578125, -0.0247650146484375, 0.049896240234375, 0.0024700164794921875, 0.04248046875, 0.004917144775390625, -0.049774169921875, -0.005496978759765625, -0.03363037109375, 0.0237579345703125, 0.0186004638671875, -0.011962890625, -0.05718994140625, 0.0305328369140625, -0.0043487548828125, 0.054412841796875, 0.03802490234375, -0.02008056640625, 0.038055419921875, -0.01629638671875, -0.031768798828125, -0.02178955078125, 0.075439453125, 0.03582763671875, 0.00737762451171875, -0.002300262451171875, -0.00986480712890625, -0.01177215576171875, -0.00490570068359375, -0.09906005859375, -0.01392364501953125, 0.016998291015625, -0.0406494140625, -0.025543212890625, -0.011627197265625, -0.060638427734375, -0.01308441162109375, -0.006195068359375, 0.034423828125, -0.03924560546875, -0.035125732421875, 0.0142364501953125, -0.035247802734375, 0.03131103515625, 0.0474853515625, -0.034149169921875, 0.01557159423828125, 0.01140594482421875, 0.07470703125, -0.0221099853515625, -0.008453369140625, -0.01454925537109375, -0.004894256591796875, -0.025299072265625, 0.030914306640625, -0.01509857177734375, -0.007785797119140625, -0.00860595703125, 0.0255126953125, -0.01140594482421875, -0.033416748046875, 0.0277252197265625, -0.0301971435546875, 0.0168914794921875, -0.0009565353393554688, -0.0316162109375, -0.01367950439453125, 0.013397216796875, -0.03228759765625, 0.059356689453125, 0.0256805419921875, -0.07525634765625, 0.028350830078125, -0.042266845703125, -0.01232147216796875, -0.015655517578125, 0.0077972412109375, -0.0595703125, -0.030853271484375, -0.0007963180541992188, 0.042877197265625, -0.0015201568603515625, -0.00499725341796875, -0.034210205078125, -0.00762176513671875, 0.01117706298828125, -0.0095367431640625, 0.09912109375, 0.010162353515625, -0.050689697265625, 0.00748443603515625, -0.0577392578125, 0.0012149810791015625, 0.011138916015625, -0.0206146240234375, 0.00923919677734375, -0.0299224853515625, 0.01003265380859375, 0.05010986328125, 0.02642822265625, -0.055633544921875, 0.006000518798828125, -0.0215606689453125, 0.033447265625, 0.053009033203125, 0.01611328125, 0.046844482421875, -0.036102294921875, 0.0435791015625, 0.0245819091796875, 0.0203094482421875, 0.0035953521728515625, -0.0440673828125, -0.0758056640625, -0.048828125, -0.0032100677490234375, 0.0379638671875, -0.06134033203125, 0.05096435546875, 0.00571441650390625, -0.0528564453125, -0.0180816650390625, 0.004688262939453125, 0.03778076171875, 0.042083740234375, 0.0234527587890625, -0.0416259765625, -0.0212249755859375, -0.068359375, 0.0154571533203125, 0.0202178955078125, 0.0006937980651855469, 0.01374053955078125, 0.05126953125, -0.0082855224609375, 0.0511474609375, -0.020050048828125, -0.0259552001953125, -0.0003941059112548828, -0.00726318359375, 0.02880859375, 0.07794189453125, 0.05950927734375, -0.056732177734375, -0.054473876953125, -0.0024738311767578125, -0.06378173828125, -0.0015916824340820312, -0.01415252685546875, -0.040069580078125, 0.018524169921875, 0.04534912109375, -0.050628662109375, 0.062164306640625, 0.04290771484375, -0.039337158203125, 0.04205322265625, -0.024932861328125, 0.01369476318359375, -0.07293701171875, 0.016143798828125, 0.024688720703125, -0.0260162353515625, -0.04364013671875, 0.00911712646484375, 0.0083465576171875, 0.002033233642578125, -0.05511474609375, 0.058197021484375, -0.037109375, 0.0198516845703125, -0.0207061767578125, -0.0118408203125, 0.0035572052001953125, 0.053741455078125, 0.0082855224609375, 0.03936767578125, 0.07501220703125, -0.04766845703125, 0.02655029296875, 0.029998779296875, -0.0235443115234375, 0.0621337890625, -0.06195068359375, 0.01189422607421875, -0.01451873779296875, 0.0419921875, -0.07659912109375, -0.0212249755859375, 0.05059814453125, -0.032073974609375, 0.045623779296875, -0.021881103515625, -0.023773193359375, -0.027130126953125, -0.0245819091796875, 0.023406982421875, 0.057403564453125, -0.04132080078125, 0.024322509765625, 0.01526641845703125, 0.0154571533203125, -0.039154052734375, -0.0704345703125, -0.005474090576171875, -0.0308380126953125, -0.0625, 0.04376220703125, -0.01959228515625, 0.0019512176513671875, 0.0009965896606445312, 0.004558563232421875, -0.0181427001953125, -0.0046844482421875, 0.02728271484375, 0.017425537109375, -0.006244659423828125, -0.0172882080078125, 0.00652313232421875, -0.01419830322265625, -0.003658294677734375, -0.0207366943359375, 0.03216552734375, 0.00858306884765625, -0.0186767578125, -0.074462890625, 0.01617431640625, 0.042144775390625, 0.006103515625, 0.068359375, 0.060516357421875, -0.028594970703125, -0.003002166748046875, -0.027618408203125, -0.01226806640625, -0.03912353515625, -0.0002484321594238281, -0.0152435302734375, -0.052520751953125, 0.052978515625, 0.0032100677490234375, -0.0011644363403320312, 0.048980712890625, 0.0372314453125, -0.0169830322265625, 0.0648193359375, 0.03802490234375, -0.00443267822265625, 0.059722900390625, -0.0604248046875, -0.01540374755859375, -0.07379150390625, -0.0245819091796875, -0.0212249755859375, -0.0552978515625, -0.0264739990234375, -0.031768798828125, 0.036163330078125, 0.027435302734375, -0.05828857421875, 0.031646728515625, -0.045379638671875, 0.01027679443359375, 0.0243682861328125, 0.04339599609375, -0.01226806640625, -0.0016050338745117188, -0.01035308837890625, -0.0005249977111816406, -0.0474853515625, -0.01218414306640625, 0.0489501953125, 0.03228759765625, 0.046661376953125, -0.0032253265380859375, 0.0439453125, -0.003009796142578125, 0.0218658447265625, -0.034027099609375, 0.03875732421875, 0.0002830028533935547, -0.044342041015625, -0.012908935546875, -0.026580810546875, -0.0792236328125, 0.0065155029296875, -0.0310821533203125, -0.05035400390625, 0.0325927734375, 0.01593017578125, -0.0024242401123046875, 0.034881591796875, -0.04815673828125, 0.055938720703125, 0.002166748046875, -0.045745849609375, -0.0016307830810546875, -0.062164306640625, 0.015655517578125, 0.0148162841796875, -0.01470947265625, 0.0013761520385742188, -0.0106201171875, 0.067138671875, -0.057769775390625, 0.06622314453125, -0.0399169921875, 0.001674652099609375, 0.0270233154296875, -0.00319671630859375, 0.043487548828125, -0.01042938232421875, -0.016204833984375, 0.00811767578125, -0.0121612548828125, -0.04864501953125, -0.0322265625, 0.0496826171875, -0.050628662109375, -0.01360321044921875, -0.02301025390625, -0.020538330078125, 0.01540374755859375, 0.01360321044921875, 0.053497314453125, 0.034088134765625, 0.014068603515625, 0.00392913818359375, 0.0533447265625, -0.0244140625, 0.05267333984375, 0.0029315948486328125, -0.008636474609375, -0.03802490234375, 0.053436279296875, 0.0061187744140625, 0.03912353515625, 0.01287078857421875, 0.0121002197265625, -0.01029205322265625, -0.03717041015625, -0.039947509765625, 0.035736083984375, -0.049163818359375, -0.033172607421875, -0.042144775390625, -0.03424072265625, -0.020721435546875, -0.035430908203125, -0.02020263671875, -0.028839111328125, -0.054290771484375, 0.017913818359375, 0.0489501953125, 0.03472900390625, -0.0159759521484375, 0.041107177734375, -0.0175323486328125, 0.0169830322265625, 0.0216064453125, 0.031005859375, -0.0011148452758789062, -0.055419921875, -0.0088653564453125, 0.00740814208984375, -0.040618896484375, -0.0555419921875, 0.03582763671875, 0.00768280029296875, 0.03662109375, 0.037384033203125, -0.0249481201171875, 0.056243896484375, -0.0224456787109375, 0.044219970703125, 0.042724609375, -0.05474853515625, 0.03729248046875, -0.029693603515625, 0.0186920166015625, 0.0228729248046875, 0.038421630859375, -0.03240966796875, -0.03118896484375, -0.06768798828125, -0.05072021484375, 0.049041748046875, 0.022613525390625, 0.0008516311645507812, 0.0256500244140625, 0.04901123046875, -0.0200653076171875, 0.0106048583984375, -0.057342529296875, -0.040679931640625, -0.01678466796875, 0.005207061767578125, -0.0002734661102294922, 0.006744384765625, -0.0107879638671875, -0.04034423828125, 0.07269287109375, -0.00043702125549316406, 0.04364013671875, 0.033966064453125, 0.00835418701171875, -0.0175018310546875, -0.02777099609375, 0.03961181640625, 0.0408935546875, -0.00934600830078125, -0.021881103515625, 0.0034008026123046875, -0.033782958984375, 0.0160369873046875, -0.003810882568359375, -0.0277099609375, -0.0033550262451171875, 0.0204925537109375, 0.0582275390625, -0.01214599609375, -0.011505126953125, 0.059600830078125, 0.0035152435302734375, -0.038116455078125, -0.020172119140625, 0.004917144775390625, 0.01096343994140625, 0.033660888671875, 0.0089874267578125, 0.038360595703125, 0.0058135986328125, -0.00905609130859375, 0.019622802734375, 0.048492431640625, -0.040252685546875, -0.01372528076171875, 0.05706787109375, -0.0032958984375, -0.003032684326171875, 0.03253173828125, -0.0311431884765625, -0.043731689453125, 0.0732421875, 0.04229736328125, 0.058929443359375, -0.0038356781005859375, 0.0228729248046875, 0.05926513671875, 0.011383056640625, 0.001758575439453125, 0.0144195556640625, 0.01187896728515625, -0.0531005859375, -0.02886962890625, -0.032073974609375, -0.004215240478515625, 0.01116943359375, -0.0264892578125, 0.03118896484375, -0.054779052734375, -0.0141448974609375, -0.00989532470703125, 0.00525665283203125, -0.05389404296875, 0.032440185546875, 0.0011987686157226562, 0.08807373046875, -0.06561279296875, 0.0587158203125, 0.0396728515625, -0.03582763671875, -0.06634521484375, -0.0064697265625, -0.000812530517578125, -0.060333251953125, 0.050445556640625, 0.00821685791015625, -0.00464630126953125, 0.006771087646484375, -0.0684814453125, -0.051177978515625, 0.09759521484375, 0.029266357421875, -0.01163482666015625, 0.0175933837890625, -0.032928466796875, 0.034423828125, -0.031585693359375, 0.03533935546875, 0.0283050537109375, 0.0404052734375, 0.03363037109375, -0.054473876953125, 0.0168914794921875, -0.037109375, 0.00959014892578125, 0.0120391845703125, -0.07659912109375, 0.069580078125, 0.0010805130004882812, -0.0161285400390625, 0.0173187255859375, 0.055938720703125, 0.0206146240234375, 0.01111602783203125, 0.044708251953125, 0.07012939453125, 0.0268402099609375, -0.00893402099609375, 0.0787353515625, -0.0013570785522460938, 0.0270843505859375, 0.043304443359375, 0.026336669921875, 0.033966064453125, 0.02880859375, -0.0118255615234375, 0.043060302734375, 0.0648193359375, 0.002834320068359375, 0.03924560546875, 0.036376953125, -0.0234527587890625, -0.0033779144287109375, 0.0002982616424560547, -0.0294189453125, 0.003978729248046875, 0.0180816650390625, -0.020538330078125, -0.0181121826171875, 0.017974853515625, 0.0247802734375, -0.015045166015625, -0.029296875, 0.0479736328125, -0.0072174072265625, -0.037445068359375, 0.06005859375, -0.00713348388671875, 0.087890625, -0.05194091796875, -0.00000959634780883789, -0.0207061767578125, 0.00665283203125, -0.032012939453125, -0.06634521484375, 0.01568603515625, -0.017333984375, 0.016326904296875, -0.0289764404296875, 0.05010986328125, -0.030609130859375, -0.038238525390625, 0.035003662109375, 0.004634857177734375, 0.034423828125, 0.01432037353515625, -0.08282470703125, 0.01059722900390625, 0.00638580322265625, -0.038970947265625, 0.02398681640625, 0.0267486572265625, 0.0159759521484375, 0.05096435546875, 0.028533935546875, 0.0267486572265625, 0.0251312255859375, -0.0203094482421875, 0.0770263671875, -0.015472412109375, -0.0204620361328125, -0.04962158203125, 0.061614990234375, -0.02252197265625, -0.038116455078125, 0.04638671875, 0.027984619140625, 0.05853271484375, -0.00823974609375, 0.05010986328125, -0.031524658203125, 0.011993408203125, -0.048492431640625, 0.06585693359375, -0.0709228515625, -0.0262908935546875, -0.027069091796875, -0.054901123046875, -0.021697998046875, 0.0625, -0.002193450927734375, 0.020477294921875, 0.03961181640625, 0.0828857421875, -0.0192413330078125, -0.044219970703125, 0.00266265869140625, 0.003597259521484375, 0.0252227783203125, 0.051483154296875, 0.0465087890625, -0.05230712890625, 0.0181121826171875, -0.046417236328125, -0.036865234375, -0.0051727294921875, -0.07373046875, -0.059234619140625, -0.058074951171875, -0.052978515625, -0.057037353515625, -0.0221099853515625, 0.049652099609375, 0.0869140625, -0.049560546875, -0.01415252685546875, -0.0244293212890625, 0.0038280487060546875, -0.0169830322265625, -0.01568603515625, 0.030731201171875, -0.0069122314453125, -0.07208251953125, 0.0023784637451171875, 0.0205841064453125, 0.037200927734375, -0.005878448486328125, -0.03131103515625, -0.0257415771484375, -0.0170440673828125, 0.0167083740234375, 0.0323486328125, -0.0307769775390625, -0.0110931396484375, -0.026641845703125, -0.019439697265625, 0.0142822265625, 0.037384033203125, -0.039154052734375, 0.024261474609375, 0.0452880859375, 0.0295562744140625, 0.0675048828125, -0.017822265625, 0.00630950927734375, -0.045867919921875, 0.042205810546875, 0.0021209716796875, 0.031951904296875, 0.0096282958984375, -0.0266265869140625, 0.035400390625, 0.0247344970703125, -0.0595703125, -0.033447265625, 0.01000213623046875, -0.10357666015625, -0.006397247314453125, 0.0721435546875, -0.023956298828125, -0.0273895263671875, 0.01284027099609375, -0.028839111328125, 0.0252227783203125, -0.02532958984375, 0.0220184326171875, 0.02935791015625, -0.0135955810546875, -0.027374267578125, -0.0303192138671875, 0.049896240234375, 0.0208892822265625, -0.059722900390625, -0.0394287109375, 0.035064697265625, 0.0323486328125, 0.0260772705078125, 0.0706787109375, -0.0135650634765625, 0.01200103759765625, -0.00605010986328125, 0.01727294921875, 0.004611968994140625, -0.004489898681640625, -0.04095458984375, -0.00797271728515625, -0.0230865478515625, -0.0292205810546875 ] ]
Abe13/juniper-certificate-Llama-2-7b-chat-hf
2023-10-21T14:07:02.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:apache-2.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Abe13
null
null
Abe13/juniper-certificate-Llama-2-7b-chat-hf
0
8,190
transformers
2023-10-03T01:38:18
--- license: apache-2.0 --- OpenAI: "This is a sophisticated and highly advanced model, designed to meticulously analyze and process a wide range of publicly accessible documents. Through its intricate algorithms and extensive data processing capabilities, it can delve deep into a vast repository of information, extracting valuable insights, patterns, and knowledge. Operating on a diverse spectrum of materials, from research papers and legal documents to historical archives and government publications, this model demonstrates its versatility in comprehending and interpreting the complex world of written information. Its ability to navigate through the complexities of language, syntax, and semantics ensures that it can unlock the hidden gems of knowledge buried within these public documents. By harnessing the power of natural language processing, machine learning, and deep neural networks, this model has the potential to revolutionize the way we access, analyze, and understand the wealth of information available in the public domain. Whether it's to facilitate academic research, enhance decision-making processes, or simply satisfy a thirst for knowledge, this model train is an invaluable tool in the quest for information and insights."
1,257
[ [ -0.0157928466796875, -0.044921875, 0.033660888671875, -0.0089569091796875, -0.0136566162109375, -0.0225677490234375, -0.00997161865234375, -0.03594970703125, -0.00959014892578125, 0.03271484375, -0.0277252197265625, -0.0306854248046875, -0.05633544921875, -0.00896453857421875, -0.035430908203125, 0.08245849609375, -0.0118408203125, 0.0035419464111328125, -0.00830841064453125, -0.00592041015625, -0.01513671875, -0.0245361328125, -0.035980224609375, -0.0289306640625, 0.0469970703125, 0.01611328125, 0.05401611328125, 0.048858642578125, 0.06585693359375, 0.02001953125, -0.00490570068359375, 0.0099029541015625, -0.049713134765625, -0.01367950439453125, -0.0216064453125, -0.05487060546875, -0.035186767578125, 0.01068878173828125, 0.052337646484375, 0.033966064453125, 0.00858306884765625, 0.0177459716796875, -0.0086669921875, 0.056884765625, -0.05413818359375, 0.030914306640625, -0.0362548828125, -0.0035610198974609375, -0.04736328125, -0.0160675048828125, -0.017425537109375, -0.032379150390625, 0.0234832763671875, -0.06915283203125, 0.006214141845703125, -0.01678466796875, 0.0889892578125, 0.0095977783203125, -0.01482391357421875, -0.0164642333984375, -0.08270263671875, 0.026947021484375, -0.040435791015625, 0.0173797607421875, 0.0246124267578125, 0.053375244140625, 0.002193450927734375, -0.074462890625, -0.048980712890625, 0.00908660888671875, 0.003192901611328125, 0.01776123046875, 0.0005879402160644531, 0.0034198760986328125, 0.01953125, 0.056671142578125, -0.037261962890625, 0.01137542724609375, -0.06109619140625, 0.014801025390625, 0.038543701171875, 0.01198577880859375, 0.04632568359375, -0.0006318092346191406, -0.01412200927734375, -0.01654052734375, -0.0232086181640625, 0.005096435546875, 0.048675537109375, 0.02862548828125, 0.0011434555053710938, 0.037933349609375, -0.03216552734375, 0.045166015625, -0.05072021484375, 0.00836181640625, 0.007232666015625, -0.01045989990234375, -0.051300048828125, 0.00010687112808227539, 0.05389404296875, 0.0017690658569335938, 0.0157012939453125, -0.009429931640625, -0.01384735107421875, 0.0040435791015625, 0.025482177734375, -0.046966552734375, -0.01187896728515625, 0.021392822265625, -0.05224609375, -0.0210723876953125, 0.0306243896484375, -0.06683349609375, -0.035675048828125, -0.042083740234375, 0.0367431640625, -0.0325927734375, -0.0347900390625, 0.004703521728515625, -0.03033447265625, 0.0175323486328125, 0.0516357421875, -0.05316162109375, 0.01111602783203125, 0.062408447265625, 0.09124755859375, -0.0005197525024414062, -0.016021728515625, -0.005451202392578125, 0.025360107421875, -0.0081787109375, 0.047698974609375, -0.06634521484375, -0.01146697998046875, -0.00217437744140625, -0.01412200927734375, -0.008392333984375, -0.01406097412109375, 0.05242919921875, -0.0523681640625, 0.00627899169921875, -0.0164947509765625, -0.026702880859375, -0.041961669921875, 0.0325927734375, -0.0916748046875, 0.0343017578125, 0.001041412353515625, -0.0614013671875, 0.0281829833984375, -0.10394287109375, -0.0008015632629394531, 0.015838623046875, -0.00881195068359375, -0.01837158203125, 0.01345062255859375, 0.0164947509765625, 0.02178955078125, -0.04498291015625, 0.037872314453125, -0.026702880859375, -0.017486572265625, 0.0122222900390625, -0.03564453125, 0.062164306640625, 0.0408935546875, 0.006732940673828125, 0.0211181640625, -0.05548095703125, -0.0294036865234375, 0.0185394287109375, -0.04510498046875, -0.04608154296875, -0.00963592529296875, 0.01496124267578125, -0.005767822265625, 0.0183258056640625, -0.048980712890625, 0.039581298828125, -0.03759765625, 0.031951904296875, 0.054168701171875, -0.01062774658203125, 0.03765869140625, -0.019317626953125, 0.02325439453125, 0.01523590087890625, 0.01457977294921875, -0.0219573974609375, -0.050872802734375, -0.04052734375, -0.0168304443359375, 0.04498291015625, 0.035491943359375, -0.01708984375, -0.0027751922607421875, -0.034759521484375, -0.041229248046875, -0.07330322265625, -0.00408935546875, 0.0116119384765625, 0.0292816162109375, 0.0030384063720703125, -0.01084136962890625, -0.03363037109375, -0.05120849609375, -0.031890869140625, -0.0023345947265625, 0.004611968994140625, 0.018798828125, 0.064208984375, -0.026214599609375, 0.057891845703125, -0.030426025390625, -0.032196044921875, -0.0111846923828125, 0.038665771484375, 0.0267333984375, 0.026214599609375, 0.0063323974609375, -0.1080322265625, -0.01181793212890625, 0.0253143310546875, -0.0819091796875, 0.0306243896484375, -0.0173492431640625, -0.0287017822265625, -0.019287109375, 0.0161285400390625, -0.035064697265625, 0.059600830078125, 0.00507354736328125, -0.01025390625, 0.0206298828125, -0.0220489501953125, 0.0037097930908203125, -0.072998046875, 0.044921875, -0.01030731201171875, 0.0235595703125, -0.0555419921875, 0.032257080078125, 0.0009284019470214844, -0.0401611328125, -0.0250396728515625, 0.039276123046875, -0.03759765625, -0.0136566162109375, -0.008544921875, 0.0031490325927734375, -0.0010271072387695312, 0.021728515625, 0.00557708740234375, 0.046600341796875, 0.02899169921875, -0.0482177734375, 0.0212249755859375, 0.027069091796875, -0.03717041015625, 0.0167083740234375, -0.04400634765625, 0.0037212371826171875, -0.0303802490234375, -0.010009765625, -0.054931640625, -0.049896240234375, 0.01264190673828125, -0.035797119140625, 0.0230865478515625, 0.0066986083984375, -0.0355224609375, -0.017486572265625, -0.00031256675720214844, 0.0241851806640625, 0.0066680908203125, -0.042205810546875, 0.0699462890625, 0.0372314453125, 0.0034503936767578125, -0.039886474609375, -0.06005859375, 0.00897216796875, 0.00908660888671875, -0.056732177734375, 0.005584716796875, 0.0092620849609375, -0.02728271484375, 0.01346588134765625, 0.0198822021484375, -0.0292816162109375, 0.02178955078125, 0.04046630859375, 0.019317626953125, -0.033966064453125, 0.0067596435546875, -0.00223541259765625, -0.00870513916015625, -0.004558563232421875, -0.0274200439453125, 0.0286712646484375, -0.0170745849609375, -0.0322265625, -0.044464111328125, 0.030029296875, 0.06915283203125, -0.04449462890625, 0.0633544921875, 0.022216796875, -0.05157470703125, 0.009796142578125, 0.0011081695556640625, 0.025482177734375, -0.029022216796875, 0.0157928466796875, -0.0292816162109375, -0.03179931640625, 0.0391845703125, -0.0213775634765625, 0.0178985595703125, 0.04583740234375, 0.02593994140625, 0.0179443359375, 0.0631103515625, 0.0609130859375, -0.0248870849609375, 0.020294189453125, -0.04022216796875, 0.025115966796875, -0.06365966796875, -0.0458984375, -0.056365966796875, -0.035186767578125, -0.0281219482421875, -0.001926422119140625, 0.0204315185546875, -0.004909515380859375, -0.0200042724609375, 0.004611968994140625, -0.03564453125, 0.032470703125, 0.07147216796875, 0.01352691650390625, 0.026702880859375, -0.008941650390625, -0.002826690673828125, 0.01806640625, -0.028778076171875, -0.0572509765625, 0.08538818359375, 0.051971435546875, 0.0526123046875, 0.01332855224609375, 0.0389404296875, 0.01947021484375, 0.01146697998046875, -0.04644775390625, 0.015716552734375, -0.0152130126953125, -0.06561279296875, -0.0298919677734375, -0.0171661376953125, -0.06207275390625, -0.002716064453125, -0.006839752197265625, -0.048370361328125, 0.0266876220703125, -0.00943756103515625, -0.02777099609375, 0.054473876953125, -0.037933349609375, 0.06939697265625, -0.0201263427734375, -0.00557708740234375, -0.01013946533203125, -0.03314208984375, 0.044097900390625, -0.0028133392333984375, 0.005939483642578125, 0.0145263671875, -0.005176544189453125, 0.0684814453125, -0.00812530517578125, 0.045501708984375, 0.0063323974609375, 0.01739501953125, 0.0121002197265625, 0.0045928955078125, 0.00856781005859375, -0.005092620849609375, -0.02337646484375, 0.0246734619140625, -0.014984130859375, -0.019805908203125, -0.050079345703125, 0.04376220703125, -0.09454345703125, -0.015899658203125, -0.03857421875, -0.04541015625, 0.0254058837890625, 0.04052734375, 0.01329803466796875, 0.032470703125, -0.032989501953125, 0.0076751708984375, 0.0526123046875, -0.0169219970703125, 0.0210723876953125, 0.02642822265625, 0.0015363693237304688, -0.0220947265625, 0.0784912109375, 0.0211029052734375, -0.005207061767578125, 0.053802490234375, 0.0152740478515625, -0.013641357421875, -0.055694580078125, -0.031982421875, 0.0156707763671875, -0.06219482421875, -0.0022907257080078125, -0.056121826171875, -0.02587890625, -0.034393310546875, 0.009124755859375, -0.0046234130859375, -0.012939453125, -0.043701171875, -0.01146697998046875, 0.003879547119140625, 0.08062744140625, 0.00798797607421875, 0.0283050537109375, -0.06134033203125, 0.037841796875, 0.0183868408203125, 0.046844482421875, 0.00376129150390625, -0.020111083984375, -0.009063720703125, 0.0048980712890625, -0.01617431640625, -0.056427001953125, 0.026611328125, 0.0131683349609375, 0.0526123046875, 0.0538330078125, 0.0018987655639648438, 0.031585693359375, -0.0065765380859375, 0.0596923828125, -0.01180267333984375, -0.0423583984375, 0.0556640625, -0.047149658203125, 0.0223388671875, 0.040069580078125, 0.06134033203125, -0.0184478759765625, -0.0034809112548828125, -0.043853759765625, -0.06109619140625, 0.06634521484375, 0.0132904052734375, -0.00846099853515625, 0.029815673828125, 0.0187530517578125, 0.01183319091796875, 0.0211181640625, -0.053985595703125, 0.01271820068359375, -0.033660888671875, -0.01183319091796875, 0.00882720947265625, -0.00809478759765625, -0.01488494873046875, -0.00652313232421875, 0.049774169921875, 0.0036773681640625, 0.031341552734375, 0.0104522705078125, -0.00946807861328125, -0.01174163818359375, 0.0172882080078125, 0.04541015625, 0.049652099609375, -0.0323486328125, -0.01047515869140625, -0.024566650390625, -0.046966552734375, -0.02337646484375, 0.00376129150390625, -0.03924560546875, -0.0178375244140625, 0.022247314453125, 0.091796875, -0.02752685546875, -0.033782958984375, 0.0217437744140625, -0.022796630859375, -0.01374053955078125, -0.026580810546875, 0.013641357421875, -0.006793975830078125, 0.0208282470703125, 0.0277099609375, 0.0234832763671875, 0.01934814453125, -0.0640869140625, 0.0018529891967773438, 0.006145477294921875, -0.031524658203125, -0.01067352294921875, 0.05364990234375, 0.0209197998046875, -0.0411376953125, 0.06951904296875, 0.01763916015625, -0.02459716796875, 0.0445556640625, 0.0248870849609375, 0.077392578125, 0.0137176513671875, 0.00931549072265625, 0.0247039794921875, 0.0275726318359375, -0.018402099609375, 0.015289306640625, -0.00913238525390625, -0.03924560546875, -0.00991058349609375, -0.062225341796875, -0.0305938720703125, 0.03961181640625, -0.055389404296875, 0.02056884765625, -0.0482177734375, 0.00667572021484375, 0.0111846923828125, -0.007648468017578125, -0.050628662109375, 0.03741455078125, 0.01654052734375, 0.1097412109375, -0.06512451171875, 0.061004638671875, 0.03887939453125, -0.075439453125, -0.051361083984375, 0.0025043487548828125, -0.001983642578125, -0.04638671875, 0.0611572265625, 0.034210205078125, 0.0071563720703125, -0.01247406005859375, -0.050323486328125, -0.0543212890625, 0.08233642578125, 0.0195465087890625, -0.040863037109375, -0.0249786376953125, 0.007175445556640625, 0.0413818359375, -0.022918701171875, 0.03997802734375, 0.04608154296875, 0.047454833984375, 0.0014324188232421875, -0.0548095703125, -0.0115509033203125, -0.0576171875, 0.004978179931640625, 0.0238800048828125, -0.0278167724609375, 0.0799560546875, -0.0037364959716796875, -0.020233154296875, -0.0029964447021484375, 0.06402587890625, -0.0242156982421875, 0.04327392578125, 0.01104736328125, 0.052001953125, 0.08050537109375, -0.01390838623046875, 0.08770751953125, -0.0015125274658203125, -0.01140594482421875, 0.06768798828125, -0.0020313262939453125, 0.061004638671875, 0.004573822021484375, 0.015655517578125, 0.0445556640625, 0.03179931640625, -0.01248931884765625, 0.032928466796875, -0.017547607421875, -0.00664520263671875, -0.00027561187744140625, -0.01181793212890625, -0.01470184326171875, 0.044097900390625, 0.03228759765625, -0.033294677734375, -0.0187530517578125, 0.006931304931640625, -0.0159912109375, 0.0178375244140625, -0.048980712890625, 0.06396484375, -0.0126190185546875, -0.049652099609375, 0.0021610260009765625, 0.007568359375, 0.0282135009765625, -0.055572509765625, -0.0210418701171875, 0.01224517822265625, 0.0250396728515625, -0.019439697265625, -0.07574462890625, 0.032745361328125, 0.003429412841796875, -0.0167236328125, -0.00511932373046875, 0.06622314453125, 0.0080718994140625, -0.018341064453125, 0.0185089111328125, 0.00226593017578125, 0.0234527587890625, -0.0159912109375, -0.053466796875, 0.0024127960205078125, -0.0115203857421875, -0.008148193359375, 0.03021240234375, 0.050994873046875, 0.0005512237548828125, 0.05462646484375, 0.06298828125, 0.017333984375, -0.0053253173828125, 0.00890350341796875, 0.075439453125, -0.047271728515625, -0.055877685546875, -0.04705810546875, 0.045623779296875, -0.031890869140625, -0.048248291015625, 0.033843994140625, 0.04547119140625, 0.07867431640625, -0.0028514862060546875, 0.09185791015625, -0.0016918182373046875, 0.0302734375, -0.0308990478515625, 0.07568359375, -0.037933349609375, 0.0014600753784179688, -0.01451873779296875, -0.0369873046875, -0.017974853515625, 0.036376953125, -0.0312042236328125, -0.0178375244140625, 0.0179443359375, 0.04461669921875, -0.02691650390625, 0.0318603515625, 0.00905609130859375, 0.004058837890625, 0.0333251953125, 0.0305328369140625, 0.05157470703125, -0.034820556640625, 0.045013427734375, 0.0038013458251953125, -0.0294036865234375, -0.01316070556640625, -0.0487060546875, -0.072509765625, -0.05224609375, 0.01080322265625, -0.007335662841796875, 0.00435638427734375, 0.042938232421875, 0.042938232421875, -0.06134033203125, -0.0180206298828125, -0.0193023681640625, 0.004276275634765625, 0.0022335052490234375, -0.01666259765625, 0.01345062255859375, -0.014801025390625, -0.05316162109375, 0.0206756591796875, 0.0218505859375, 0.0010747909545898438, -0.035064697265625, -0.0151519775390625, -0.043853759765625, 0.0075836181640625, 0.0113067626953125, 0.03875732421875, -0.0308990478515625, 0.017547607421875, -0.00975799560546875, -0.011810302734375, 0.00614166259765625, 0.073974609375, -0.038299560546875, 0.0550537109375, 0.06756591796875, 0.07037353515625, 0.0269927978515625, -0.004192352294921875, 0.056488037109375, -0.010467529296875, 0.0025157928466796875, 0.0214691162109375, 0.0147705078125, 0.022796630859375, -0.049224853515625, 0.0386962890625, 0.00643157958984375, -0.044189453125, -0.06768798828125, 0.014739990234375, -0.06280517578125, -0.037261962890625, 0.05206298828125, -0.0225677490234375, -0.0523681640625, -0.00540924072265625, -0.01227569580078125, 0.0428466796875, -0.0323486328125, 0.0538330078125, 0.036773681640625, 0.0173492431640625, -0.0011091232299804688, -0.0535888671875, 0.0215606689453125, -0.0179595947265625, -0.0609130859375, -0.00635528564453125, 0.00896453857421875, -0.003421783447265625, 0.035552978515625, 0.044921875, -0.0223388671875, 0.0297393798828125, 0.002262115478515625, -0.0087432861328125, -0.039031982421875, -0.0633544921875, -0.0364990234375, 0.0357666015625, -0.0007104873657226562, -0.0226287841796875 ] ]
LeoLM/leo-hessianai-7b-chat-bilingual
2023-09-29T13:16:38.000Z
[ "transformers", "pytorch", "llama", "text-generation", "custom_code", "en", "de", "dataset:LeoLM/OpenSchnabeltier", "dataset:OpenAssistant/OASST-DE", "dataset:FreedomIntelligence/alpaca-gpt4-deutsch", "dataset:FreedomIntelligence/evol-instruct-deutsch", "dataset:LeoLM/German_Poems", "dataset:LeoLM/German_Songs", "dataset:garage-bAInd/Open-Platypus", "dataset:WizardLM/WizardLM_evol_instruct_70k", "dataset:bjoernp/oasst25-08-23-filtered", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
LeoLM
null
null
LeoLM/leo-hessianai-7b-chat-bilingual
6
8,182
transformers
2023-09-10T19:00:52
--- datasets: - LeoLM/OpenSchnabeltier - OpenAssistant/OASST-DE - FreedomIntelligence/alpaca-gpt4-deutsch - FreedomIntelligence/evol-instruct-deutsch - LeoLM/German_Poems - LeoLM/German_Songs - garage-bAInd/Open-Platypus - WizardLM/WizardLM_evol_instruct_70k - bjoernp/oasst25-08-23-filtered language: - en - de library_name: transformers pipeline_tag: text-generation --- # LAION LeoLM: **L**inguistically **E**nhanced **O**pen **L**anguage **M**odel Meet LeoLM, the first open and commercially available German Foundation Language Model built on Llama-2. Our models extend Llama-2's capabilities into German through continued pretraining on a large corpus of German-language and mostly locality specific text. Thanks to a compute grant at HessianAI's new supercomputer **42**, we release two foundation models trained with 8k context length, [`LeoLM/leo-hessianai-7b`](https://huggingface.co/LeoLM/leo-hessianai-7b) and [`LeoLM/leo-hessianai-13b`](https://huggingface.co/LeoLM/leo-hessianai-13b) under the [Llama-2 community license](https://huggingface.co/meta-llama/Llama-2-70b/raw/main/LICENSE.txt) (70b also coming soon! 👀). With this release, we hope to bring a new wave of opportunities to German open-source and commercial LLM research and accelerate adoption. Read our [blog post]() or our paper (preprint coming soon) for more details! *A project by Björn Plüster and Christoph Schuhmann in collaboration with LAION and HessianAI.* ## LeoLM Chat `LeoLM/leo-hessianai-7b-chat-bilingual` is a bilingual English-German chat model built on our foundation model `LeoLM/leo-hessianai-7b` and finetuned on a selection of German translateed instruction datasets and their English counterparts. The model performs exceptionally well on writing, explanation and discussion tasks but struggles somewhat with math and advanced reasoning. See our MT-Bench scores: ``` { "first_turn": 5.64375, "second_turn": 4.075, "categories": { "writing": 5.925, "roleplay": 5.25, "reasoning": 3.1, "math": 1.8, "coding": 3.4, "extraction": 5, "stem": 6.5, "humanities": 7.9 }, "average": 4.859375 } ``` ## Model Details - **Finetuned from:** [LeoLM/leo-hessianai-7b](https://huggingface.co/LeoLM/leo-hessianai-7b) - **Model type:** Causal decoder-only transformer language model - **Language:** English and German - **Demo:** [Web Demo]() - **License:** [LLAMA 2 COMMUNITY LICENSE AGREEMENT](https://huggingface.co/meta-llama/Llama-2-70b/raw/main/LICENSE.txt) - **Contact:** [LAION Discord](https://discord.com/invite/eq3cAMZtCC) or [Björn Plüster](mailto:bjoern.pl@outlook.de) ## Use in 🤗Transformers First install direct dependencies: ``` pip install transformers torch sentencepiece ``` If you want faster inference using flash-attention2, you need to install these dependencies: ```bash pip install packaging ninja pip install flash-attn==v2.1.1 --no-build-isolation pip install git+https://github.com/HazyResearch/flash-attention.git@v2.1.1#subdirectory=csrc/rotary ``` Then load the model in transformers: ```python from transformers import pipeline import torch system_prompt = """<|im_start|>system Dies ist eine Unterhaltung zwischen einem intelligenten, hilfsbereitem KI-Assistenten und einem Nutzer. Der Assistent gibt ausführliche, hilfreiche und ehrliche Antworten.<|im_end|> """ prompt_format = "<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant\n" prompt = "Erkläre mir wie die Fahrradwegesituation in Hamburg ist." generator = pipeline(model="LeoLM/leo-hessianai-7b-chat-bilingual", device="cuda", torch_dtype=torch.float16, trust_remote_code=True) # True for flash-attn2 else False print(generator(prompt_format.format(prompt=prompt), do_sample=True, top_p=0.95, max_length=8192)) ``` "*Als KI kann ich keine persönlichen Beobachtungen teilen, aber ich kann einige allgemeine Informationen zur Fahrradwegesituation in Hamburg liefern. Im Vergleich zu vielen anderen großen Städten hat Hamburg eine hohe Anzahl von Fahrradfahrern und nimmt seine Verantwortung für nachhaltige Verkehrsmittel sehr ernst. Es gibt viele Fahrradwege und separate Fahrspuren, die Radfahrern ermöglichen, zusammen mit dem Autoverkehr zu fahren. Diese Fahrradspuren sind oft mit Markierungen gekennzeichnet und durch physische Trennungen von anderen Fahrspuren abgegrenzt. Darüber hinaus gibt es viele Fahrradstraßen, auf denen Radfahrer Vorfahrt haben und Autos langsamer fahren müssen.* *In einigen städtischen Gebieten können Fahrradwege jedoch eng oder überfüllt sein, besonders während der Stoßzeiten. Es gibt auch viele Kreuzungen, an denen Radfahrer anhalten und auf Grün warten müssen, ähnlich wie Autofahrer. Insgesamt ist die Fahrradinfrastruktur in Hamburg ziemlich gut, aber wie überall gibt es immer Raum für Verbesserungen.*" ## Prompting / Prompt Template Prompt dialogue template (ChatML format): ``` """ <|im_start|>system {system_message}<|im_end|> <|im_start|>user {prompt}<|im_end|> <|im_start|>assistant """ ``` The model input can contain multiple conversation turns between user and assistant, e.g. ``` <|im_start|>user {prompt 1}<|im_end|> <|im_start|>assistant {reply 1}<|im_end|> <|im_start|>user {prompt 2}<|im_end|> <|im_start|>assistant (...) ``` ## Ethical Considerations and Limitations LeoLM has been tested in English and German, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, the potential outputs of `LeoLM/leo-hessianai-7b-chat` cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of `LeoLM/leo-hessianai-7b-chat`, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see Meta's [Responsible Use Guide](https://ai.meta.com/llama/responsible-use-guide/). ## Finetuning Details | Hyperparameter | Value | |---|---| | Num epochs | 3 | | Examples per epoch | 233275 | | Global batch size | 256 | | Learning rate | 3e-5 | | Warmup steps | 100 | | LR scheduler | Cosine | | Adam betas | (0.9, 0.95) | | Weight decay | 0.001 | ## Dataset Details ``` ## Stats for 'Subset of LeoLM/OpenSchnabeltier' (21314 samples (100.0%)) ----------------- Accepted: 21314/21314 (100.0%) Accepted tokens: 8134690 Skipped: 0 (0.0%) Min tokens per sample: 25 Max tokens per sample: 1202 Avg tokens per sample: 381.65947264708643 ----------------- ## Stats for 'Subset of garage-bAInd/Open-Platypus' (24427 samples (100.0%)) ----------------- Accepted: 24427/24427 (100.0%) Accepted tokens: 9549043 Skipped: 0 (0.0%) Min tokens per sample: 23 Max tokens per sample: 5054 Avg tokens per sample: 390.9216440823679 ----------------- ## Stats for 'Subset of WizardLM/WizardLM_evol_instruct_70k' (68600 samples (100.0%)) ----------------- Accepted: 68600/68600 (100.0%) Accepted tokens: 33045040 Skipped: 0 (0.0%) Min tokens per sample: 18 Max tokens per sample: 11810 Avg tokens per sample: 481.7061224489796 ----------------- ## Stats for 'Subset of FreedomIntelligence/evol-instruct-deutsch' (57841 samples (100.0%)) ----------------- Accepted: 57841/57841 (100.0%) Accepted tokens: 42958192 Skipped: 0 (0.0%) Min tokens per sample: 33 Max tokens per sample: 5507 Avg tokens per sample: 742.6944900675991 ----------------- ## Stats for 'Subset of FreedomIntelligence/alpaca-gpt4-deutsch' (48969 samples (100.0%)) ----------------- Accepted: 48969/48969 (100.0%) Accepted tokens: 13372005 Skipped: 0 (0.0%) Min tokens per sample: 19 Max tokens per sample: 1359 Avg tokens per sample: 273.07082031489307 ----------------- ## Stats for 'Subset of LeoLM/German_Songs' (490 samples (100.0%)) ----------------- Accepted: 490/490 (100.0%) Accepted tokens: 618642 Skipped: 0 (0.0%) Min tokens per sample: 747 Max tokens per sample: 1678 Avg tokens per sample: 1262.534693877551 ----------------- ## Stats for 'Subset of LeoLM/German_Poems' (392 samples (100.0%)) ----------------- Accepted: 392/392 (100.0%) Accepted tokens: 187897 Skipped: 0 (0.0%) Min tokens per sample: 231 Max tokens per sample: 826 Avg tokens per sample: 479.3290816326531 ----------------- ## Stats for 'Subset of OpenAssistant/OASST_DE' (3646 samples (100.0%)) ----------------- Accepted: 3646/3646 (100.0%) Accepted tokens: 2338738 Skipped: 0 (0.0%) Min tokens per sample: 29 Max tokens per sample: 2484 Avg tokens per sample: 641.4530992868897 ----------------- ## Stats for 'Subset of bjoernp/oasst25-08-23-filtered' (8922 samples (100.0%)) ----------------- Accepted: 8922/8922 (100.0%) Accepted tokens: 4526427 Skipped: 0 (0.0%) Min tokens per sample: 23 Max tokens per sample: 5407 Avg tokens per sample: 507.3332212508406 ----------------- ## Stats for 'total' (235632 samples (100.0%)) ----------------- Accepted: 235632/235632 (100.0%) Accepted tokens: 115862397 Skipped: 0 (0.0%) Min tokens per sample: 18 Max tokens per sample: 11810 Avg tokens per sample: 491.70909299246284 ----------------- ```
9,164
[ [ -0.0283966064453125, -0.05706787109375, 0.007747650146484375, 0.03594970703125, -0.017913818359375, -0.0170440673828125, -0.0118255615234375, -0.034088134765625, 0.0283660888671875, 0.01812744140625, -0.047760009765625, -0.054168701171875, -0.0458984375, 0.01123809814453125, -0.0250396728515625, 0.07501220703125, 0.00646209716796875, 0.009002685546875, 0.005382537841796875, -0.017425537109375, -0.0264892578125, -0.033721923828125, -0.049407958984375, -0.01308441162109375, 0.023590087890625, 0.030609130859375, 0.036956787109375, 0.03192138671875, 0.036590576171875, 0.023681640625, -0.01247406005859375, 0.0031414031982421875, -0.03607177734375, 0.006244659423828125, 0.00922393798828125, -0.03265380859375, -0.04937744140625, 0.0079345703125, 0.04083251953125, 0.0232696533203125, -0.005100250244140625, 0.0220489501953125, 0.0030498504638671875, 0.033660888671875, -0.03314208984375, 0.0189208984375, -0.0258941650390625, 0.00548553466796875, -0.0155487060546875, -0.00937652587890625, -0.028778076171875, -0.0157012939453125, 0.007099151611328125, -0.04901123046875, 0.01629638671875, 0.008697509765625, 0.07806396484375, 0.0095367431640625, -0.0231475830078125, -0.023590087890625, -0.035858154296875, 0.0687255859375, -0.058990478515625, 0.031646728515625, 0.038970947265625, 0.01806640625, -0.0160980224609375, -0.06341552734375, -0.04052734375, -0.0211334228515625, -0.02166748046875, 0.01508331298828125, -0.035491943359375, -0.0123138427734375, 0.01904296875, 0.0231475830078125, -0.044891357421875, -0.00403594970703125, -0.0318603515625, -0.0247955322265625, 0.054534912109375, 0.013214111328125, 0.006862640380859375, -0.0193328857421875, -0.0361328125, -0.02703857421875, -0.041412353515625, 0.00797271728515625, 0.017242431640625, 0.01418304443359375, -0.0263519287109375, 0.0543212890625, -0.017974853515625, 0.043060302734375, 0.00237274169921875, -0.019134521484375, 0.038299560546875, -0.04229736328125, -0.0404052734375, -0.0219573974609375, 0.0806884765625, 0.02960205078125, 0.00820159912109375, 0.0179290771484375, -0.02313232421875, -0.01453399658203125, -0.01508331298828125, -0.057403564453125, -0.0069732666015625, 0.019866943359375, -0.0283203125, -0.0235748291015625, -0.0004968643188476562, -0.05999755859375, -0.01953125, 0.005367279052734375, 0.023468017578125, -0.043548583984375, -0.01538848876953125, 0.006374359130859375, -0.0078887939453125, 0.0230865478515625, 0.019500732421875, -0.05108642578125, 0.007648468017578125, 0.03204345703125, 0.059417724609375, -0.00557708740234375, -0.020416259765625, -0.030487060546875, -0.01203155517578125, -0.01318359375, 0.0394287109375, -0.0093536376953125, -0.04803466796875, -0.0103912353515625, 0.009765625, -0.0074462890625, -0.0305938720703125, 0.045745849609375, -0.029937744140625, 0.0477294921875, -0.01739501953125, -0.0360107421875, -0.007106781005859375, 0.011199951171875, -0.035186767578125, 0.09075927734375, 0.01100921630859375, -0.06219482421875, 0.01335906982421875, -0.057708740234375, -0.0276031494140625, -0.01047515869140625, -0.0026378631591796875, -0.024627685546875, -0.0183868408203125, 0.022247314453125, 0.039794921875, -0.032012939453125, 0.0290069580078125, -0.0069732666015625, -0.01983642578125, 0.0287017822265625, -0.040618896484375, 0.0970458984375, 0.030914306640625, -0.0489501953125, 0.01158905029296875, -0.05078125, -0.00797271728515625, 0.0183868408203125, -0.033477783203125, 0.0049285888671875, -0.0238800048828125, 0.022674560546875, 0.0224761962890625, 0.01568603515625, -0.03167724609375, 0.005138397216796875, -0.043609619140625, 0.02032470703125, 0.06719970703125, -0.00778961181640625, 0.031219482421875, -0.03729248046875, 0.051177978515625, 0.0066680908203125, 0.0241546630859375, 0.006931304931640625, -0.05615234375, -0.08502197265625, -0.035308837890625, 0.01629638671875, 0.056427001953125, -0.046478271484375, 0.058990478515625, -0.017791748046875, -0.0472412109375, -0.042144775390625, 0.005573272705078125, 0.03973388671875, 0.0450439453125, 0.0247650146484375, -0.0215606689453125, -0.0418701171875, -0.07037353515625, 0.0027008056640625, -0.0281524658203125, -0.005008697509765625, 0.0287933349609375, 0.057220458984375, -0.0225372314453125, 0.08056640625, -0.03656005859375, -0.0241241455078125, -0.0196685791015625, -0.00557708740234375, 0.033935546875, 0.040618896484375, 0.052001953125, -0.050537109375, -0.0421142578125, -0.0173187255859375, -0.05963134765625, -0.0090789794921875, 0.0018434524536132812, -0.0176849365234375, 0.0372314453125, 0.020294189453125, -0.055206298828125, 0.034942626953125, 0.033935546875, -0.04913330078125, 0.035491943359375, -0.01006317138671875, 0.0009050369262695312, -0.08447265625, 0.018280029296875, 0.0015010833740234375, -0.01055908203125, -0.0513916015625, -0.006877899169921875, -0.01389312744140625, 0.0090484619140625, -0.054840087890625, 0.074462890625, -0.03045654296875, 0.0036678314208984375, 0.00386810302734375, 0.0094451904296875, -0.00823211669921875, 0.038787841796875, -0.01082611083984375, 0.064208984375, 0.047882080078125, -0.032745361328125, 0.025787353515625, 0.02398681640625, -0.03045654296875, 0.0209503173828125, -0.057342529296875, -0.002040863037109375, 0.00247955322265625, 0.03076171875, -0.08209228515625, -0.021514892578125, 0.055938720703125, -0.047760009765625, 0.0120086669921875, 0.01142120361328125, -0.038055419921875, -0.0249786376953125, -0.0233306884765625, 0.009002685546875, 0.062103271484375, -0.0218963623046875, 0.0494384765625, 0.01611328125, 0.004116058349609375, -0.07366943359375, -0.0645751953125, -0.01322174072265625, -0.02056884765625, -0.065673828125, 0.0123748779296875, -0.01465606689453125, -0.024261474609375, -0.00600433349609375, 0.0018606185913085938, 0.003814697265625, 0.007320404052734375, 0.02239990234375, 0.021942138671875, -0.0123138427734375, -0.0020694732666015625, -0.01593017578125, -0.0196685791015625, -0.00077056884765625, -0.0123748779296875, 0.053619384765625, -0.035125732421875, -0.030670166015625, -0.0455322265625, 0.01203155517578125, 0.04803466796875, -0.007701873779296875, 0.07000732421875, 0.0562744140625, -0.01357269287109375, 0.00556182861328125, -0.040130615234375, -0.02532958984375, -0.039337158203125, 0.0280609130859375, -0.01308441162109375, -0.055206298828125, 0.052520751953125, 0.03057861328125, 0.0165557861328125, 0.059539794921875, 0.05303955078125, -0.00791168212890625, 0.05224609375, 0.043212890625, -0.01230621337890625, 0.03948974609375, -0.046844482421875, 0.00560760498046875, -0.04046630859375, -0.022247314453125, -0.0296478271484375, -0.0211181640625, -0.034820556640625, -0.041778564453125, 0.032440185546875, 0.01146697998046875, -0.0240631103515625, 0.02447509765625, -0.0273284912109375, 0.015472412109375, 0.054779052734375, -0.00339508056640625, 0.015869140625, 0.00530242919921875, -0.0182647705078125, 0.0029621124267578125, -0.052093505859375, -0.037078857421875, 0.08416748046875, 0.037109375, 0.042205810546875, 0.012237548828125, 0.04937744140625, 0.0120086669921875, 0.041351318359375, -0.042694091796875, 0.059539794921875, 0.01139068603515625, -0.0758056640625, -0.0270843505859375, -0.0182952880859375, -0.0682373046875, 0.0224761962890625, -0.018707275390625, -0.0595703125, 0.0180511474609375, 0.0097808837890625, -0.016510009765625, 0.01279449462890625, -0.05450439453125, 0.059539794921875, -0.0150146484375, -0.0172271728515625, -0.00472259521484375, -0.064208984375, 0.0294342041015625, -0.00312042236328125, 0.0215606689453125, -0.0199432373046875, 0.005153656005859375, 0.07171630859375, -0.03253173828125, 0.072998046875, -0.01050567626953125, -0.0011205673217773438, 0.028594970703125, -0.006656646728515625, 0.03973388671875, 0.00738525390625, -0.012939453125, 0.0178375244140625, -0.01375579833984375, -0.032745361328125, -0.0240478515625, 0.04608154296875, -0.06805419921875, -0.05670166015625, -0.046051025390625, -0.04229736328125, 0.007266998291015625, 0.01593017578125, 0.0450439453125, 0.018585205078125, -0.01136016845703125, 0.01318359375, 0.03082275390625, -0.0305938720703125, 0.039306640625, 0.04168701171875, -0.023956298828125, -0.03936767578125, 0.05615234375, -0.0004792213439941406, 0.0203094482421875, 0.0118560791015625, 0.01123809814453125, -0.034088134765625, -0.0263671875, -0.031707763671875, 0.037872314453125, -0.07257080078125, -0.013397216796875, -0.054046630859375, -0.0173492431640625, -0.0394287109375, -0.013946533203125, -0.0253448486328125, -0.035552978515625, -0.0447998046875, -0.0143585205078125, 0.045684814453125, 0.035003662109375, -0.0128936767578125, 0.02496337890625, -0.040740966796875, 0.004421234130859375, 0.009765625, 0.005214691162109375, -0.003162384033203125, -0.05706787109375, -0.00676727294921875, 0.00962066650390625, -0.0268707275390625, -0.0677490234375, 0.04425048828125, 0.002208709716796875, 0.040863037109375, 0.035430908203125, 0.00882720947265625, 0.04156494140625, -0.01143646240234375, 0.0770263671875, 0.01873779296875, -0.06475830078125, 0.040740966796875, -0.0266265869140625, 0.00928497314453125, 0.036224365234375, 0.027130126953125, -0.048126220703125, -0.041229248046875, -0.06646728515625, -0.07147216796875, 0.06842041015625, 0.04754638671875, 0.00843048095703125, -0.0034732818603515625, 0.0108489990234375, -0.018341064453125, 0.005985260009765625, -0.06646728515625, -0.056732177734375, -0.007007598876953125, -0.0164031982421875, -0.0221710205078125, -0.01261138916015625, -0.0267791748046875, -0.035369873046875, 0.066162109375, 0.0099639892578125, 0.048126220703125, 0.021881103515625, 0.00025725364685058594, 0.00247955322265625, 0.0178070068359375, 0.062103271484375, 0.04595947265625, -0.0217742919921875, -0.0005202293395996094, 0.035552978515625, -0.0513916015625, 0.0197601318359375, 0.01206207275390625, -0.0201263427734375, 0.004062652587890625, 0.04095458984375, 0.06854248046875, -0.007080078125, -0.050079345703125, 0.0450439453125, -0.00946044921875, -0.01727294921875, -0.03521728515625, 0.01100921630859375, 0.0144805908203125, 0.0236968994140625, 0.029632568359375, 0.00374603271484375, -0.01421356201171875, -0.0290069580078125, 0.0159454345703125, 0.031158447265625, -0.029876708984375, -0.01459503173828125, 0.058624267578125, 0.01488494873046875, -0.0301666259765625, 0.053619384765625, -0.01165008544921875, -0.0272064208984375, 0.0592041015625, 0.04248046875, 0.06658935546875, -0.01123809814453125, 0.01334381103515625, 0.032501220703125, 0.0208282470703125, -0.0005249977111816406, 0.0238800048828125, 0.018768310546875, -0.05352783203125, -0.0123291015625, -0.049224853515625, -0.0093994140625, 0.0095367431640625, -0.03875732421875, 0.030303955078125, -0.0244903564453125, -0.023223876953125, -0.009368896484375, 0.006591796875, -0.050811767578125, 0.01129913330078125, -0.0051422119140625, 0.08642578125, -0.069580078125, 0.07147216796875, 0.0447998046875, -0.032928466796875, -0.057464599609375, -0.01003265380859375, 0.0121307373046875, -0.061431884765625, 0.059417724609375, 0.0014581680297851562, -0.00969696044921875, -0.0011692047119140625, -0.035858154296875, -0.0748291015625, 0.103515625, 0.0266265869140625, -0.0279541015625, 0.0151519775390625, 0.008270263671875, 0.045806884765625, -0.0288543701171875, 0.034576416015625, 0.043212890625, 0.044464111328125, 0.0189666748046875, -0.0670166015625, 0.01157379150390625, -0.033660888671875, -0.0166473388671875, -0.006103515625, -0.059173583984375, 0.07135009765625, -0.00978851318359375, -0.022308349609375, 0.002040863037109375, 0.060821533203125, 0.03607177734375, 0.0246124267578125, 0.038116455078125, 0.034912109375, 0.0655517578125, -0.01220703125, 0.08575439453125, -0.028533935546875, 0.03424072265625, 0.05615234375, 0.00005239248275756836, 0.0535888671875, 0.032501220703125, -0.0184783935546875, 0.0237884521484375, 0.05072021484375, -0.0057830810546875, 0.044097900390625, -0.00201416015625, -0.0244293212890625, -0.0017595291137695312, 0.0002586841583251953, -0.040283203125, 0.0233306884765625, 0.0192413330078125, -0.0338134765625, -0.002532958984375, 0.004177093505859375, 0.0219879150390625, -0.034912109375, 0.0084686279296875, 0.0309600830078125, 0.01055908203125, -0.045379638671875, 0.0723876953125, 0.0173187255859375, 0.06317138671875, -0.047332763671875, 0.0181427001953125, -0.034271240234375, 0.0142974853515625, -0.00818634033203125, -0.05487060546875, 0.017425537109375, 0.011444091796875, -0.0046539306640625, -0.0208282470703125, 0.03857421875, -0.0173187255859375, -0.04766845703125, 0.029998779296875, 0.0379638671875, 0.02545166015625, 0.0203704833984375, -0.059051513671875, 0.0034027099609375, 0.0216064453125, -0.0472412109375, 0.01506805419921875, 0.00928497314453125, -0.00014793872833251953, 0.055145263671875, 0.04742431640625, -0.01056671142578125, 0.0095977783203125, -0.014801025390625, 0.07135009765625, -0.04827880859375, -0.02789306640625, -0.07110595703125, 0.06256103515625, -0.00209808349609375, -0.0504150390625, 0.0697021484375, 0.04791259765625, 0.05279541015625, -0.006931304931640625, 0.0533447265625, -0.0235137939453125, 0.0178375244140625, -0.033172607421875, 0.052734375, -0.050537109375, 0.027130126953125, -0.01450347900390625, -0.059417724609375, -0.0255126953125, 0.05108642578125, -0.026702880859375, 0.0033397674560546875, 0.04339599609375, 0.0791015625, -0.00021839141845703125, -0.0011749267578125, 0.008270263671875, 0.0214996337890625, 0.03155517578125, 0.04669189453125, 0.051361083984375, -0.04388427734375, 0.045623779296875, -0.033233642578125, -0.01506805419921875, -0.0243377685546875, -0.05780029296875, -0.074951171875, -0.04534912109375, -0.0132904052734375, -0.049713134765625, -0.01120758056640625, 0.0753173828125, 0.042938232421875, -0.0697021484375, -0.0284881591796875, 0.002330780029296875, 0.01036834716796875, -0.0103302001953125, -0.018035888671875, 0.040985107421875, -0.002620697021484375, -0.061553955078125, 0.0254974365234375, -0.0069732666015625, 0.026702880859375, -0.0166168212890625, -0.027313232421875, -0.0219573974609375, 0.005031585693359375, 0.03466796875, 0.0186767578125, -0.06817626953125, -0.01154327392578125, 0.0318603515625, -0.029083251953125, 0.0077362060546875, 0.0233612060546875, -0.040740966796875, 0.0172576904296875, 0.04412841796875, 0.01474761962890625, 0.052734375, -0.00600433349609375, 0.0307159423828125, -0.06103515625, 0.0343017578125, 0.0179290771484375, 0.029876708984375, 0.01366424560546875, -0.0293426513671875, 0.050140380859375, 0.0283660888671875, -0.03741455078125, -0.058502197265625, 0.00330352783203125, -0.072509765625, -0.0170440673828125, 0.1009521484375, -0.01439666748046875, -0.015411376953125, -0.00010645389556884766, -0.0193328857421875, 0.0206146240234375, -0.039215087890625, 0.035491943359375, 0.05615234375, -0.018646240234375, -0.003292083740234375, -0.048828125, 0.0352783203125, 0.033935546875, -0.051025390625, -0.004062652587890625, 0.038665771484375, 0.032135009765625, 0.0228118896484375, 0.08026123046875, 0.0089874267578125, 0.01219940185546875, -0.008697509765625, 0.00820159912109375, -0.00754547119140625, -0.0164337158203125, -0.0318603515625, 0.00492095947265625, -0.011749267578125, -0.0167388916015625 ] ]
codellama/CodeLlama-34b-Python-hf
2023-10-27T18:10:31.000Z
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "llama-2", "code", "arxiv:2308.12950", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
codellama
null
null
codellama/CodeLlama-34b-Python-hf
63
8,179
transformers
2023-08-24T16:57:23
--- language: - code pipeline_tag: text-generation tags: - llama-2 license: llama2 --- # **Code Llama** Code Llama is a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 34 billion parameters. This is the repository for the 34B Python specialist version in the Hugging Face Transformers format. This model is designed for general code synthesis and understanding. Links to other models can be found in the index at the bottom. | | Base Model | Python | Instruct | | --- | ----------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------- | | 7B | [codellama/CodeLlama-7b-hf](https://huggingface.co/codellama/CodeLlama-7b-hf) | [codellama/CodeLlama-7b-Python-hf](https://huggingface.co/codellama/CodeLlama-7b-Python-hf) | [codellama/CodeLlama-7b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf) | | 13B | [codellama/CodeLlama-13b-hf](https://huggingface.co/codellama/CodeLlama-13b-hf) | [codellama/CodeLlama-13b-Python-hf](https://huggingface.co/codellama/CodeLlama-13b-Python-hf) | [codellama/CodeLlama-13b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-13b-Instruct-hf) | | 34B | [codellama/CodeLlama-34b-hf](https://huggingface.co/codellama/CodeLlama-34b-hf) | [codellama/CodeLlama-34b-Python-hf](https://huggingface.co/codellama/CodeLlama-34b-Python-hf) | [codellama/CodeLlama-34b-Instruct-hf](https://huggingface.co/codellama/CodeLlama-34b-Instruct-hf) | ## Model Use To use this model, please make sure to install transformers from `main` until the next version is released: ```bash pip install git+https://github.com/huggingface/transformers.git@main accelerate ``` Model capabilities: - [x] Code completion. - [ ] Infilling. - [ ] Instructions / chat. - [x] Python specialist. ## Model Details *Note: Use of this model is governed by the Meta license. Meta developed and publicly released the Code Llama family of large language models (LLMs). **Model Developers** Meta **Variations** Code Llama comes in three model sizes, and three variants: * Code Llama: base models designed for general code synthesis and understanding * Code Llama - Python: designed specifically for Python * Code Llama - Instruct: for instruction following and safer deployment All variants are available in sizes of 7B, 13B and 34B parameters. **This repository contains the Python version of the 34B parameters model.** **Input** Models input text only. **Output** Models generate text only. **Model Architecture** Code Llama is an auto-regressive language model that uses an optimized transformer architecture. **Model Dates** Code Llama and its variants have been trained between January 2023 and July 2023. **Status** This is a static model trained on an offline dataset. Future versions of Code Llama - Instruct will be released as we improve model safety with community feedback. **License** A custom commercial license is available at: [https://ai.meta.com/resources/models-and-libraries/llama-downloads/](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) **Research Paper** More information can be found in the paper "[Code Llama: Open Foundation Models for Code](https://ai.meta.com/research/publications/code-llama-open-foundation-models-for-code/)" or its [arXiv page](https://arxiv.org/abs/2308.12950). ## Intended Use **Intended Use Cases** Code Llama and its variants is intended for commercial and research use in English and relevant programming languages. The base model Code Llama can be adapted for a variety of code synthesis and understanding tasks, Code Llama - Python is designed specifically to handle the Python programming language, and Code Llama - Instruct is intended to be safer to use for code assistant and generation applications. **Out-of-Scope Uses** Use in any manner that violates applicable laws or regulations (including trade compliance laws). Use in languages other than English. Use in any other way that is prohibited by the Acceptable Use Policy and Licensing Agreement for Code Llama and its variants. ## Hardware and Software **Training Factors** We used custom training libraries. The training and fine-tuning of the released models have been performed Meta’s Research Super Cluster. **Carbon Footprint** In aggregate, training all 9 Code Llama models required 400K GPU hours of computation on hardware of type A100-80GB (TDP of 350-400W). Estimated total emissions were 65.3 tCO2eq, 100% of which were offset by Meta’s sustainability program. ## Training Data All experiments reported here and the released models have been trained and fine-tuned using the same data as Llama 2 with different weights (see Section 2 and Table 1 in the [research paper](https://ai.meta.com/research/publications/code-llama-open-foundation-models-for-code/) for details). ## Evaluation Results See evaluations for the main models and detailed ablations in Section 3 and safety evaluations in Section 4 of the research paper. ## Ethical Considerations and Limitations Code Llama and its variants are a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Code Llama’s potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate or objectionable responses to user prompts. Therefore, before deploying any applications of Code Llama, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available available at [https://ai.meta.com/llama/responsible-use-guide](https://ai.meta.com/llama/responsible-use-guide).
6,162
[ [ -0.027740478515625, -0.0462646484375, 0.02154541015625, 0.041656494140625, -0.016204833984375, 0.012115478515625, -0.00568389892578125, -0.047821044921875, 0.0181121826171875, 0.0372314453125, -0.02960205078125, -0.040802001953125, -0.042144775390625, 0.02447509765625, -0.03680419921875, 0.0899658203125, -0.0046844482421875, -0.0237579345703125, -0.0220184326171875, -0.0003421306610107422, -0.0169219970703125, -0.047454833984375, -0.0122222900390625, -0.0347900390625, 0.0255279541015625, 0.019317626953125, 0.054931640625, 0.046478271484375, 0.036956787109375, 0.0237274169921875, -0.0234527587890625, -0.00048470497131347656, -0.0213623046875, -0.027374267578125, 0.016357421875, -0.044708251953125, -0.05810546875, -0.002033233642578125, 0.0253753662109375, 0.025909423828125, -0.0230560302734375, 0.031494140625, -0.01336669921875, 0.03631591796875, -0.0250396728515625, 0.0158843994140625, -0.045562744140625, -0.0034122467041015625, 0.0031261444091796875, -0.005565643310546875, -0.00688934326171875, -0.04168701171875, -0.00848388671875, -0.032440185546875, -0.007732391357421875, -0.0041656494140625, 0.08154296875, 0.04052734375, -0.0232391357421875, -0.01776123046875, -0.020416259765625, 0.059661865234375, -0.07269287109375, 0.0007958412170410156, 0.0291290283203125, -0.00380706787109375, -0.01104736328125, -0.061981201171875, -0.055694580078125, -0.0273590087890625, -0.0087890625, -0.0034503936767578125, -0.036041259765625, 0.005237579345703125, 0.031494140625, 0.038330078125, -0.0343017578125, 0.0123748779296875, -0.032562255859375, -0.017852783203125, 0.068359375, 0.00859832763671875, 0.032623291015625, -0.0192108154296875, -0.0254974365234375, -0.0007390975952148438, -0.06378173828125, 0.0017690658569335938, 0.035980224609375, -0.0108184814453125, -0.058441162109375, 0.056304931640625, -0.01412200927734375, 0.042236328125, 0.0018415451049804688, -0.041656494140625, 0.040069580078125, -0.0241851806640625, -0.022186279296875, -0.0108489990234375, 0.06640625, 0.038116455078125, 0.0282440185546875, 0.003612518310546875, -0.0183258056640625, 0.0243682861328125, 0.01004791259765625, -0.06072998046875, -0.00493621826171875, 0.0241851806640625, -0.0460205078125, -0.051483154296875, -0.0225067138671875, -0.0606689453125, -0.009429931640625, -0.003948211669921875, 0.00830078125, -0.012847900390625, -0.031524658203125, 0.01555633544921875, 0.0065460205078125, 0.03375244140625, 0.0082855224609375, -0.06451416015625, 0.0030879974365234375, 0.037261962890625, 0.05657958984375, 0.0016326904296875, -0.037200927734375, 0.0033054351806640625, -0.01001739501953125, -0.0263824462890625, 0.049957275390625, -0.035308837890625, -0.036651611328125, -0.006862640380859375, 0.006710052490234375, -0.000141143798828125, -0.038787841796875, 0.01523590087890625, -0.0267181396484375, -0.0018854141235351562, 0.01125335693359375, -0.0194854736328125, -0.034149169921875, 0.003513336181640625, -0.042388916015625, 0.08477783203125, 0.021484375, -0.049407958984375, -0.00354766845703125, -0.041961669921875, -0.0274658203125, -0.0190887451171875, -0.0021076202392578125, -0.0487060546875, -0.0044097900390625, 0.0132598876953125, 0.036956787109375, -0.0299530029296875, 0.033203125, -0.00872802734375, -0.0299530029296875, 0.01715087890625, -0.01213836669921875, 0.07476806640625, 0.02734375, -0.0333251953125, 0.016357421875, -0.06854248046875, -0.00958251953125, 0.036834716796875, -0.041290283203125, 0.00995635986328125, -0.009735107421875, -0.0006756782531738281, -0.0036678314208984375, 0.042694091796875, -0.01983642578125, 0.042327880859375, -0.0285491943359375, 0.0567626953125, 0.048492431640625, -0.0020427703857421875, 0.0298309326171875, -0.0447998046875, 0.059844970703125, -0.01293182373046875, 0.01328277587890625, -0.0217132568359375, -0.056549072265625, -0.07427978515625, -0.02215576171875, 0.0016841888427734375, 0.05291748046875, -0.03619384765625, 0.046051025390625, 0.0008177757263183594, -0.056793212890625, -0.0377197265625, 0.01552581787109375, 0.040496826171875, 0.0191802978515625, 0.0241851806640625, -0.00653076171875, -0.060089111328125, -0.06329345703125, 0.004425048828125, -0.0333251953125, 0.007480621337890625, 0.016021728515625, 0.06390380859375, -0.050750732421875, 0.05853271484375, -0.03204345703125, 0.0007042884826660156, -0.0285186767578125, -0.0204620361328125, 0.0382080078125, 0.04034423828125, 0.055755615234375, -0.0439453125, -0.0173797607421875, 0.00450897216796875, -0.06414794921875, -0.00957489013671875, -0.01556396484375, -0.001895904541015625, 0.03240966796875, 0.022186279296875, -0.049560546875, 0.038543701171875, 0.0675048828125, -0.015869140625, 0.046142578125, -0.01096343994140625, -0.0120849609375, -0.07763671875, 0.0157928466796875, -0.0108184814453125, -0.0012054443359375, -0.03759765625, 0.0282440185546875, 0.00690460205078125, 0.006710052490234375, -0.038970947265625, 0.025115966796875, -0.0276641845703125, -0.002162933349609375, -0.00913238525390625, -0.01727294921875, -0.0025844573974609375, 0.0557861328125, -0.003536224365234375, 0.07464599609375, 0.0390625, -0.04779052734375, 0.024383544921875, 0.0239715576171875, -0.029052734375, 0.0140380859375, -0.07061767578125, 0.026702880859375, 0.0098419189453125, 0.0258636474609375, -0.057373046875, -0.019683837890625, 0.02581787109375, -0.033477783203125, 0.00676727294921875, -0.0016279220581054688, -0.036407470703125, -0.0355224609375, -0.019256591796875, 0.0335693359375, 0.064453125, -0.0458984375, 0.0299530029296875, 0.0311431884765625, 0.007232666015625, -0.055450439453125, -0.054595947265625, 0.0101165771484375, -0.03515625, -0.04705810546875, 0.03143310546875, -0.0230560302734375, -0.016632080078125, -0.01236724853515625, 0.0034999847412109375, -0.0013456344604492188, 0.0233154296875, 0.033782958984375, 0.0304718017578125, -0.0092620849609375, -0.0167236328125, -0.00021851062774658203, -0.007389068603515625, 0.00403594970703125, 0.0123748779296875, 0.057373046875, -0.0297393798828125, -0.0154571533203125, -0.0423583984375, 0.01425933837890625, 0.04388427734375, -0.019927978515625, 0.043426513671875, 0.026519775390625, -0.0291900634765625, -0.001644134521484375, -0.048187255859375, 0.011474609375, -0.040802001953125, 0.022796630859375, -0.01837158203125, -0.06256103515625, 0.0491943359375, 0.00640106201171875, 0.01397705078125, 0.035186767578125, 0.06072998046875, 0.007350921630859375, 0.055694580078125, 0.07257080078125, -0.032989501953125, 0.0299224853515625, -0.040069580078125, 0.0077056884765625, -0.059906005859375, -0.03472900390625, -0.047698974609375, -0.0014324188232421875, -0.051788330078125, -0.033782958984375, 0.023162841796875, 0.0147552490234375, -0.03704833984375, 0.056243896484375, -0.0592041015625, 0.0311737060546875, 0.03314208984375, 0.0009145736694335938, 0.029327392578125, 0.003284454345703125, -0.0014495849609375, 0.023162841796875, -0.032928466796875, -0.05426025390625, 0.0911865234375, 0.03399658203125, 0.06365966796875, -0.002506256103515625, 0.063232421875, 0.005947113037109375, 0.024627685546875, -0.05169677734375, 0.044952392578125, 0.022186279296875, -0.0372314453125, 0.0006852149963378906, -0.0167999267578125, -0.06890869140625, 0.011627197265625, 0.0057373046875, -0.060791015625, 0.005298614501953125, -0.0036373138427734375, -0.017303466796875, 0.0231475830078125, -0.0487060546875, 0.045379638671875, -0.015899658203125, 0.0028553009033203125, -0.0145263671875, -0.0390625, 0.0440673828125, -0.01047515869140625, 0.0173187255859375, -0.01018524169921875, -0.016632080078125, 0.04925537109375, -0.03997802734375, 0.07977294921875, 0.010650634765625, -0.035980224609375, 0.044921875, -0.00103759765625, 0.034515380859375, 0.00037789344787597656, -0.0172271728515625, 0.05364990234375, 0.0006480216979980469, -0.01461029052734375, -0.0078887939453125, 0.04705810546875, -0.08013916015625, -0.057037353515625, -0.032562255859375, -0.03582763671875, 0.021453857421875, 0.01172637939453125, 0.0278778076171875, 0.003284454345703125, 0.01381683349609375, 0.00982666015625, 0.02850341796875, -0.052520751953125, 0.0469970703125, 0.0269012451171875, -0.0209808349609375, -0.03631591796875, 0.06182861328125, -0.0099945068359375, 0.01490020751953125, 0.02032470703125, 0.00334930419921875, -0.00968170166015625, -0.035858154296875, -0.030364990234375, 0.033477783203125, -0.047698974609375, -0.041259765625, -0.047027587890625, -0.0269012451171875, -0.026336669921875, -0.0239715576171875, -0.02001953125, -0.020843505859375, -0.0501708984375, -0.0123748779296875, 0.05755615234375, 0.060791015625, 0.0026702880859375, 0.033599853515625, -0.04559326171875, 0.034271240234375, 0.00778961181640625, 0.028076171875, 0.00013434886932373047, -0.03515625, -0.00904083251953125, -0.0014734268188476562, -0.040283203125, -0.06524658203125, 0.04608154296875, 0.00907135009765625, 0.047576904296875, 0.00943756103515625, -0.0034027099609375, 0.050140380859375, -0.032562255859375, 0.0712890625, 0.025299072265625, -0.08154296875, 0.04718017578125, -0.0190887451171875, 0.0022563934326171875, 0.005687713623046875, 0.0267181396484375, -0.03265380859375, -0.019622802734375, -0.048095703125, -0.055908203125, 0.045684814453125, 0.0131378173828125, 0.021728515625, 0.0030422210693359375, 0.0328369140625, -0.0014963150024414062, 0.023651123046875, -0.07989501953125, -0.02471923828125, -0.02471923828125, -0.0177154541015625, -0.0071563720703125, -0.0212860107421875, -0.006282806396484375, -0.0209503173828125, 0.03265380859375, -0.0140533447265625, 0.04010009765625, 0.00995635986328125, -0.012359619140625, -0.01873779296875, 0.004306793212890625, 0.050689697265625, 0.044281005859375, -0.002044677734375, -0.0114288330078125, 0.0291595458984375, -0.040740966796875, 0.01739501953125, -0.00890350341796875, -0.00583648681640625, -0.0228424072265625, 0.042144775390625, 0.04718017578125, 0.00913238525390625, -0.06329345703125, 0.037811279296875, 0.0116424560546875, -0.020263671875, -0.03863525390625, 0.021026611328125, 0.0215301513671875, 0.0269317626953125, 0.0201873779296875, 0.0025653839111328125, -0.0078125, -0.03253173828125, -0.0006151199340820312, 0.026275634765625, 0.01418304443359375, -0.026275634765625, 0.06878662109375, 0.007335662841796875, -0.0279998779296875, 0.035491943359375, 0.00653076171875, -0.0440673828125, 0.0887451171875, 0.05206298828125, 0.056610107421875, -0.01480865478515625, 0.00858306884765625, 0.03436279296875, 0.041046142578125, -0.00015211105346679688, 0.0318603515625, 0.0004258155822753906, -0.03924560546875, -0.0250396728515625, -0.06475830078125, -0.03021240234375, 0.00731658935546875, -0.0345458984375, 0.0230712890625, -0.046905517578125, -0.00287628173828125, -0.0283966064453125, 0.00787353515625, -0.04583740234375, -0.00043487548828125, 0.0099334716796875, 0.07196044921875, -0.046630859375, 0.0693359375, 0.044189453125, -0.05401611328125, -0.06695556640625, -0.01458740234375, -0.004619598388671875, -0.0921630859375, 0.03631591796875, 0.021453857421875, 0.0049591064453125, 0.00485992431640625, -0.0703125, -0.0806884765625, 0.0966796875, 0.034423828125, -0.038055419921875, -0.0018014907836914062, 0.016143798828125, 0.0423583984375, -0.026458740234375, 0.0305023193359375, 0.0489501953125, 0.03228759765625, -0.0089263916015625, -0.09088134765625, 0.0244903564453125, -0.03021240234375, 0.016632080078125, -0.0230712890625, -0.07830810546875, 0.078857421875, -0.0418701171875, -0.009918212890625, 0.0367431640625, 0.047698974609375, 0.041961669921875, 0.01561737060546875, 0.0253448486328125, 0.0423583984375, 0.048309326171875, 0.0009222030639648438, 0.089111328125, -0.03326416015625, 0.030731201171875, 0.036407470703125, -0.009033203125, 0.054229736328125, 0.0306243896484375, -0.045074462890625, 0.056488037109375, 0.058074951171875, -0.0159759521484375, 0.021453857421875, 0.02459716796875, -0.00545501708984375, -0.002124786376953125, -0.007171630859375, -0.057098388671875, 0.02850341796875, 0.0240936279296875, -0.0252532958984375, 0.00496673583984375, -0.0163116455078125, 0.022796630859375, -0.0097503662109375, -0.005985260009765625, 0.047943115234375, 0.017333984375, -0.040496826171875, 0.087890625, 0.00908660888671875, 0.0738525390625, -0.039398193359375, -0.00913238525390625, -0.03326416015625, 0.0042877197265625, -0.042999267578125, -0.039947509765625, 0.0134124755859375, 0.0225372314453125, 0.0008821487426757812, -0.0096282958984375, 0.034912109375, -0.0043487548828125, -0.0361328125, 0.029144287109375, 0.01276397705078125, 0.0214080810546875, 0.009490966796875, -0.050323486328125, 0.034881591796875, 0.01389312744140625, -0.034698486328125, 0.0272216796875, 0.007663726806640625, 0.0036296844482421875, 0.07110595703125, 0.05828857421875, -0.00940704345703125, 0.01248931884765625, -0.01031494140625, 0.08477783203125, -0.05255126953125, -0.0258636474609375, -0.059661865234375, 0.048126220703125, 0.022918701171875, -0.03302001953125, 0.04583740234375, 0.0267181396484375, 0.06103515625, -0.00952911376953125, 0.0625, -0.01434326171875, 0.005245208740234375, -0.034271240234375, 0.050079345703125, -0.05853271484375, 0.0288848876953125, -0.0374755859375, -0.069091796875, -0.023590087890625, 0.0655517578125, -0.00308990478515625, 0.004154205322265625, 0.039276123046875, 0.0740966796875, 0.0243377685546875, -0.007415771484375, 0.01525115966796875, 0.01467132568359375, 0.029693603515625, 0.05999755859375, 0.0750732421875, -0.044158935546875, 0.05401611328125, -0.04296875, -0.0184783935546875, -0.021942138671875, -0.07440185546875, -0.07318115234375, -0.037139892578125, -0.026031494140625, -0.0293426513671875, -0.0205230712890625, 0.0672607421875, 0.0416259765625, -0.04437255859375, -0.0361328125, -0.01074981689453125, 0.0312347412109375, -0.00855255126953125, -0.01519012451171875, 0.020904541015625, -0.009368896484375, -0.06317138671875, 0.029296875, -0.0027370452880859375, 0.01189422607421875, -0.02593994140625, -0.019500732421875, -0.009765625, 0.0008816719055175781, 0.034942626953125, 0.027191162109375, -0.0628662109375, -0.0146636962890625, 0.005672454833984375, -0.014892578125, 0.00910186767578125, 0.031524658203125, -0.048187255859375, -0.005825042724609375, 0.026153564453125, 0.03302001953125, 0.024688720703125, -0.0179443359375, 0.0173187255859375, -0.0268402099609375, 0.0322265625, 0.0003941059112548828, 0.0379638671875, 0.00749969482421875, -0.044525146484375, 0.052398681640625, 0.0209808349609375, -0.050689697265625, -0.06890869140625, 0.010284423828125, -0.08380126953125, -0.0164337158203125, 0.09814453125, -0.00859832763671875, -0.0252532958984375, 0.0132598876953125, -0.0287322998046875, 0.019378662109375, -0.0289306640625, 0.052825927734375, 0.02252197265625, -0.00614166259765625, -0.0111846923828125, -0.031036376953125, 0.019683837890625, 0.018798828125, -0.07110595703125, -0.01181793212890625, 0.0277862548828125, 0.029876708984375, 0.0160675048828125, 0.05133056640625, -0.00974273681640625, 0.01334381103515625, 0.004413604736328125, 0.034027099609375, -0.007068634033203125, -0.016876220703125, -0.0300445556640625, -0.00423431396484375, -0.00725555419921875, -0.00244140625 ] ]
DeepChem/ChemBERTa-10M-MTR
2022-11-16T23:00:19.000Z
[ "transformers", "pytorch", "roberta", "arxiv:1910.09700", "endpoints_compatible", "region:us" ]
null
DeepChem
null
null
DeepChem/ChemBERTa-10M-MTR
4
8,164
transformers
2022-03-02T23:29:04
--- tags: - roberta --- # Model Card for ChemBERTa-10M-MTR # Model Details ## Model Description More information needed - **Developed by:** DeepChem - **Shared by [Optional]:** DeepChem - **Model type:** Token Classification - **Language(s) (NLP):** More information needed - **License:** More information needed - **Parent Model:** [RoBERTa](https://huggingface.co/roberta-base?text=The+goal+of+life+is+%3Cmask%3E.) - **Resources for more information:** More information needed # Uses ## Direct Use More information needed. ## Downstream Use [Optional] More information needed. ## Out-of-Scope Use The model should not be used to intentionally create hostile or alienating environments for people. # Bias, Risks, and Limitations Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)). Predictions generated by the model may include disturbing and harmful stereotypes across protected classes; identity characteristics; and sensitive, social, and occupational groups. ## Recommendations Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. # Training Details ## Training Data More information needed ## Training Procedure ### Preprocessing More information needed ### Speeds, Sizes, Times More information needed # Evaluation ## Testing Data, Factors & Metrics ### Testing Data More information needed ### Factors More information needed ### Metrics More information needed ## Results More information needed # Model Examination More information needed # Environmental Impact Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** More information needed - **Hours used:** More information needed - **Cloud Provider:** More information needed - **Compute Region:** More information needed - **Carbon Emitted:** More information needed # Technical Specifications [optional] ## Model Architecture and Objective More information needed ## Compute Infrastructure More information needed ### Hardware More information needed ### Software More information needed. # Citation **BibTeX:** ```bibtex @book{Ramsundar-et-al-2019, title={Deep Learning for the Life Sciences}, author={Bharath Ramsundar and Peter Eastman and Patrick Walters and Vijay Pande and Karl Leswing and Zhenqin Wu}, publisher={O'Reilly Media}, note={\url{https://www.amazon.com/Deep-Learning-Life-Sciences-Microscopy/dp/1492039837}}, year={2019} } ``` **APA:** More information needed # Glossary [optional] More information needed # More Information [optional] More information needed # Model Card Authors [optional] DeepChem in collaboration with Ezi Ozoani and the Hugging Face team # Model Card Contact More information needed # How to Get Started with the Model Use the code below to get started with the model. <details> <summary> Click to expand </summary> ```python from transformers import AutoTokenizer, RobertaForRegression tokenizer = AutoTokenizer.from_pretrained("DeepChem/ChemBERTa-10M-MTR") model = RobertaForRegression.from_pretrained("DeepChem/ChemBERTa-10M-MTR") ``` </details>
3,581
[ [ -0.019317626953125, -0.04010009765625, 0.037139892578125, -0.00640106201171875, -0.0267486572265625, 0.0076751708984375, 0.006946563720703125, -0.02703857421875, 0.007038116455078125, 0.037109375, -0.049774169921875, -0.0491943359375, -0.051544189453125, -0.01061248779296875, -0.01568603515625, 0.0654296875, 0.016815185546875, 0.03265380859375, -0.01287841796875, 0.0072021484375, -0.016204833984375, -0.034332275390625, -0.0628662109375, -0.037322998046875, 0.045654296875, 0.0087890625, 0.03924560546875, 0.050506591796875, 0.043731689453125, 0.0276641845703125, -0.038330078125, 0.004543304443359375, -0.017578125, -0.025604248046875, -0.0149993896484375, -0.019622802734375, -0.066162109375, 0.0157012939453125, 0.039154052734375, 0.07122802734375, -0.00933074951171875, 0.0248260498046875, 0.01236724853515625, 0.039398193359375, -0.03289794921875, 0.021148681640625, -0.034332275390625, 0.02093505859375, -0.004367828369140625, 0.00608062744140625, -0.0152740478515625, -0.00998687744140625, -0.00731658935546875, -0.033355712890625, 0.009429931640625, -0.01100921630859375, 0.0762939453125, 0.013641357421875, -0.034332275390625, -0.01494598388671875, -0.04443359375, 0.05072021484375, -0.07366943359375, 0.01837158203125, 0.01552581787109375, 0.03167724609375, -0.0031585693359375, -0.074462890625, -0.04425048828125, -0.01175689697265625, -0.007358551025390625, 0.0195159912109375, -0.00445556640625, 0.008270263671875, 0.0165252685546875, 0.0540771484375, -0.028717041015625, -0.0024700164794921875, -0.04669189453125, -0.026153564453125, 0.038482666015625, 0.039947509765625, 0.01001739501953125, -0.0207366943359375, -0.032470703125, -0.025634765625, -0.0167999267578125, 0.0051727294921875, 0.026519775390625, 0.0242919921875, -0.0250701904296875, 0.0399169921875, -0.01561737060546875, 0.02374267578125, 0.003826141357421875, 0.009429931640625, 0.03497314453125, -0.035308837890625, -0.0255126953125, 0.0011186599731445312, 0.05712890625, 0.015106201171875, -0.0102996826171875, 0.018463134765625, -0.00775909423828125, -0.00481414794921875, 0.00412750244140625, -0.1002197265625, -0.022979736328125, 0.02325439453125, -0.042144775390625, -0.033111572265625, 0.01523590087890625, -0.08160400390625, -0.0179443359375, -0.018157958984375, 0.026397705078125, -0.018096923828125, -0.02093505859375, 0.0159454345703125, -0.001140594482421875, 0.01934814453125, 0.01126861572265625, -0.04229736328125, 0.0296173095703125, 0.0292205810546875, 0.0679931640625, -0.006061553955078125, -0.0167999267578125, -0.019439697265625, -0.003421783447265625, -0.01153564453125, 0.047943115234375, -0.04412841796875, -0.03924560546875, -0.01093292236328125, 0.032257080078125, -0.000980377197265625, -0.0338134765625, 0.041259765625, -0.033721923828125, 0.0252838134765625, -0.0182342529296875, -0.04034423828125, -0.034515380859375, 0.00405120849609375, -0.057708740234375, 0.07208251953125, 0.01715087890625, -0.047698974609375, 0.0263519287109375, -0.075927734375, -0.017852783203125, 0.0189666748046875, 0.004604339599609375, -0.051544189453125, -0.0175628662109375, -0.010162353515625, 0.03826904296875, -0.019134521484375, 0.048004150390625, -0.0379638671875, 0.0062255859375, 0.006175994873046875, -0.0223846435546875, 0.096435546875, 0.046234130859375, -0.020263671875, 0.02227783203125, -0.06500244140625, 0.007568359375, 0.00919342041015625, -0.0340576171875, 0.006519317626953125, -0.0181884765625, 0.0179443359375, 0.02459716796875, 0.024993896484375, -0.03997802734375, -0.002742767333984375, -0.01132965087890625, 0.0538330078125, 0.049713134765625, 0.0090179443359375, 0.01593017578125, -0.01343536376953125, 0.0321044921875, 0.0277557373046875, 0.0289459228515625, -0.00658416748046875, -0.0262298583984375, -0.0673828125, -0.014923095703125, 0.0274658203125, 0.043212890625, -0.01461029052734375, 0.066162109375, -0.00897216796875, -0.06884765625, -0.0247039794921875, 0.01216888427734375, 0.0323486328125, 0.03863525390625, 0.040069580078125, -0.024444580078125, -0.045318603515625, -0.07330322265625, -0.01360321044921875, 0.01216888427734375, 0.013397216796875, 0.026947021484375, 0.076416015625, -0.047271728515625, 0.055328369140625, -0.05609130859375, -0.0253448486328125, -0.0136871337890625, 0.0054779052734375, 0.019012451171875, 0.0594482421875, 0.0318603515625, -0.051544189453125, -0.029449462890625, -0.01666259765625, -0.047607421875, 0.011383056640625, 0.011871337890625, -0.01198577880859375, 0.01181793212890625, 0.035064697265625, -0.039459228515625, 0.0350341796875, 0.03271484375, -0.042449951171875, 0.0281524658203125, -0.0183258056640625, 0.004215240478515625, -0.096923828125, 0.04052734375, -0.0019664764404296875, -0.01434326171875, -0.049102783203125, 0.004169464111328125, 0.002147674560546875, -0.01824951171875, -0.06439208984375, 0.059783935546875, -0.0187530517578125, 0.0232086181640625, -0.0249176025390625, -0.03704833984375, 0.01108551025390625, 0.04913330078125, 0.0094146728515625, 0.0445556640625, 0.03192138671875, -0.055023193359375, 0.01404571533203125, 0.01477813720703125, -0.0275421142578125, 0.0295257568359375, -0.057861328125, 0.00916290283203125, 0.0075531005859375, 0.035064697265625, -0.056243896484375, -0.0204925537109375, 0.031036376953125, -0.035858154296875, 0.0250091552734375, 0.0167694091796875, -0.040069580078125, -0.028564453125, 0.00006604194641113281, 0.036834716796875, 0.0426025390625, -0.0282135009765625, 0.058441162109375, 0.026031494140625, 0.011016845703125, -0.0168304443359375, -0.068359375, -0.0054931640625, -0.0231170654296875, -0.02203369140625, 0.038299560546875, -0.0203399658203125, 0.0005788803100585938, 0.0033702850341796875, 0.00144195556640625, -0.0189666748046875, 0.00707244873046875, 0.027557373046875, 0.0160369873046875, 0.00518798828125, 0.0026874542236328125, -0.0010614395141601562, -0.0280914306640625, 0.0189208984375, -0.01910400390625, 0.050201416015625, -0.0211181640625, 0.00513458251953125, -0.0404052734375, 0.0224151611328125, 0.041717529296875, -0.01299285888671875, 0.057464599609375, 0.0633544921875, -0.046600341796875, 0.0104217529296875, -0.03240966796875, -0.020843505859375, -0.038299560546875, 0.0411376953125, -0.01213836669921875, -0.0467529296875, 0.0596923828125, -0.0123443603515625, -0.0255126953125, 0.07598876953125, 0.037139892578125, 0.0015687942504882812, 0.061614990234375, 0.04461669921875, -0.003055572509765625, 0.0258026123046875, -0.048858642578125, -0.0024433135986328125, -0.0706787109375, -0.048858642578125, -0.046417236328125, -0.014007568359375, -0.0295562744140625, -0.03070068359375, 0.00213623046875, -0.00603485107421875, -0.06353759765625, 0.033233642578125, -0.04962158203125, 0.016143798828125, 0.03955078125, 0.024658203125, 0.006732940673828125, -0.003692626953125, -0.0272064208984375, 0.0063934326171875, -0.053619384765625, -0.04241943359375, 0.053436279296875, 0.040679931640625, 0.04608154296875, -0.007740020751953125, 0.0352783203125, 0.0153045654296875, 0.01065826416015625, -0.041839599609375, 0.04833984375, -0.0177154541015625, -0.08099365234375, -0.0153656005859375, -0.029632568359375, -0.07232666015625, -0.004116058349609375, -0.004245758056640625, -0.039459228515625, 0.042755126953125, 0.00975799560546875, -0.031402587890625, 0.043212890625, -0.0252685546875, 0.08660888671875, -0.030548095703125, -0.0229339599609375, -0.0121917724609375, -0.059844970703125, 0.0217437744140625, -0.00980377197265625, 0.0192718505859375, -0.0100860595703125, -0.00787353515625, 0.068359375, -0.0455322265625, 0.09405517578125, -0.032073974609375, 0.0017719268798828125, 0.0290374755859375, -0.005306243896484375, 0.03497314453125, -0.00836181640625, -0.006000518798828125, 0.03485107421875, 0.0029964447021484375, -0.0316162109375, -0.0089111328125, 0.048919677734375, -0.059814453125, -0.0269622802734375, -0.0521240234375, -0.0301055908203125, 0.020843505859375, 0.0435791015625, 0.024993896484375, 0.01111602783203125, -0.004673004150390625, 0.0254669189453125, 0.0511474609375, -0.04083251953125, 0.024017333984375, 0.03936767578125, -0.0113983154296875, -0.057159423828125, 0.07421875, 0.016204833984375, 0.0226593017578125, 0.023284912109375, 0.0257568359375, -0.025543212890625, -0.01708984375, -0.0044708251953125, 0.00452423095703125, -0.060394287109375, -0.0208587646484375, -0.0654296875, -0.041839599609375, -0.0299224853515625, 0.00582122802734375, -0.038818359375, -0.0125274658203125, -0.052886962890625, -0.01470184326171875, 0.03143310546875, 0.0279083251953125, -0.0196380615234375, 0.024658203125, -0.048492431640625, 0.00933074951171875, 0.0290374755859375, 0.03704833984375, 0.013031005859375, -0.04656982421875, -0.016387939453125, 0.0153656005859375, -0.037445068359375, -0.0589599609375, 0.03533935546875, 0.02044677734375, 0.04315185546875, 0.0252227783203125, 0.0009369850158691406, 0.0518798828125, -0.0210418701171875, 0.06903076171875, 0.01409912109375, -0.075927734375, 0.04156494140625, -0.020111083984375, 0.02264404296875, 0.055511474609375, 0.028839111328125, -0.032562255859375, -0.0012025833129882812, -0.065673828125, -0.06640625, 0.03594970703125, 0.01128387451171875, 0.0235748291015625, 0.0014476776123046875, 0.035797119140625, 0.0018291473388671875, 0.0164031982421875, -0.0791015625, -0.032073974609375, -0.03350830078125, -0.006549835205078125, -0.006687164306640625, -0.01561737060546875, -0.00171661376953125, -0.055908203125, 0.06854248046875, -0.005840301513671875, 0.03204345703125, 0.024444580078125, 0.0022640228271484375, -0.0035686492919921875, 0.006008148193359375, 0.0574951171875, 0.0181884765625, -0.038665771484375, 0.01537322998046875, 0.025848388671875, -0.05029296875, 0.0182342529296875, 0.0211029052734375, -0.0034580230712890625, -0.0203094482421875, 0.0185394287109375, 0.055938720703125, 0.0016489028930664062, -0.0318603515625, 0.0268402099609375, 0.0157470703125, -0.039093017578125, -0.03094482421875, 0.0161895751953125, 0.02093505859375, 0.0208740234375, 0.0308990478515625, 0.0116119384765625, 0.0275115966796875, -0.0345458984375, 0.006023406982421875, 0.041595458984375, -0.0606689453125, -0.01218414306640625, 0.07733154296875, 0.02191162109375, -0.0273895263671875, 0.045379638671875, -0.0236663818359375, -0.04095458984375, 0.07513427734375, 0.02606201171875, 0.0804443359375, -0.015869140625, 0.004009246826171875, 0.0509033203125, 0.015350341796875, 0.01415252685546875, 0.0235443115234375, 0.00801849365234375, -0.039764404296875, -0.01113128662109375, -0.0511474609375, -0.01218414306640625, 0.040008544921875, -0.04229736328125, 0.021728515625, -0.046173095703125, -0.01523590087890625, 0.01053619384765625, 0.035797119140625, -0.0673828125, 0.0338134765625, 0.0014629364013671875, 0.08203125, -0.06768798828125, 0.0677490234375, 0.04315185546875, -0.051971435546875, -0.052093505859375, -0.034271240234375, -0.0030040740966796875, -0.047943115234375, 0.057281494140625, 0.01422119140625, -0.000537872314453125, 0.01047515869140625, -0.05682373046875, -0.05413818359375, 0.107666015625, 0.0167236328125, -0.045440673828125, 0.016876220703125, 0.013580322265625, 0.036163330078125, -0.043914794921875, 0.0216064453125, 0.02423095703125, 0.059234619140625, 0.01194000244140625, -0.03326416015625, 0.0269775390625, -0.033203125, 0.0007677078247070312, -0.00684356689453125, -0.07452392578125, 0.0755615234375, -0.006866455078125, -0.0212554931640625, 0.013214111328125, 0.04010009765625, 0.015869140625, 0.031768798828125, 0.0275115966796875, 0.045318603515625, 0.047393798828125, 0.01084136962890625, 0.06414794921875, -0.033538818359375, 0.0374755859375, 0.08441162109375, -0.0118255615234375, 0.03936767578125, 0.032501220703125, -0.02984619140625, 0.042144775390625, 0.07659912109375, -0.0399169921875, 0.037506103515625, 0.0172271728515625, -0.0023822784423828125, -0.0258941650390625, -0.01561737060546875, -0.050872802734375, 0.00308990478515625, 0.0200653076171875, -0.059844970703125, -0.006114959716796875, -0.0164031982421875, 0.003414154052734375, -0.015106201171875, -0.0149993896484375, 0.045379638671875, 0.0128631591796875, -0.0292205810546875, 0.0208740234375, 0.00218963623046875, 0.039093017578125, -0.03668212890625, -0.01338958740234375, -0.015838623046875, 0.019805908203125, -0.0210723876953125, -0.04132080078125, 0.0199737548828125, -0.023040771484375, -0.026611328125, 0.0077362060546875, 0.051025390625, -0.0235748291015625, -0.06964111328125, 0.0305633544921875, 0.02459716796875, 0.01959228515625, 0.00554656982421875, -0.07379150390625, -0.0007162094116210938, -0.004520416259765625, -0.0216064453125, 0.01250457763671875, 0.01141357421875, 0.020751953125, 0.052459716796875, 0.048858642578125, -0.007526397705078125, -0.00788116455078125, -0.0181427001953125, 0.07415771484375, -0.0653076171875, -0.023162841796875, -0.06573486328125, 0.056182861328125, -0.014251708984375, -0.0291748046875, 0.040435791015625, 0.07366943359375, 0.0570068359375, -0.020263671875, 0.049713134765625, 0.0038242340087890625, 0.0240936279296875, -0.0280609130859375, 0.062744140625, -0.0428466796875, 0.01198577880859375, -0.020843505859375, -0.07952880859375, -0.023284912109375, 0.047088623046875, -0.00860595703125, 0.03558349609375, 0.0574951171875, 0.06439208984375, -0.0169677734375, -0.010955810546875, 0.0087890625, 0.027862548828125, 0.034454345703125, 0.032928466796875, 0.035491943359375, -0.040679931640625, 0.03167724609375, -0.02728271484375, -0.00045013427734375, -0.013397216796875, -0.08392333984375, -0.06890869140625, -0.0406494140625, -0.048187255859375, -0.028900146484375, -0.01155853271484375, 0.054412841796875, 0.070068359375, -0.06414794921875, -0.01715087890625, -0.03192138671875, -0.01477813720703125, -0.0213165283203125, -0.016754150390625, 0.0379638671875, -0.0168304443359375, -0.047454833984375, -0.0018463134765625, 0.0007991790771484375, 0.0245208740234375, -0.042633056640625, -0.02911376953125, -0.0254669189453125, 0.012786865234375, 0.0258941650390625, 0.0191497802734375, -0.054412841796875, -0.0021839141845703125, -0.006931304931640625, -0.0216827392578125, 0.0028076171875, 0.037384033203125, -0.045440673828125, 0.01461029052734375, 0.0295257568359375, 0.0256195068359375, 0.04742431640625, -0.0245208740234375, 0.021514892578125, -0.013519287109375, -0.0017004013061523438, 0.01467132568359375, 0.0253143310546875, 0.013214111328125, -0.046722412109375, 0.04681396484375, 0.0243377685546875, -0.0498046875, -0.05780029296875, 0.00197601318359375, -0.0870361328125, -0.04150390625, 0.08056640625, 0.0014629364013671875, -0.031005859375, 0.01369476318359375, -0.02069091796875, 0.03521728515625, -0.03143310546875, 0.037567138671875, 0.0494384765625, 0.0160980224609375, -0.017974853515625, -0.03363037109375, 0.035430908203125, 0.008209228515625, -0.07696533203125, -0.00609588623046875, 0.02374267578125, 0.0301513671875, 0.01172637939453125, 0.0214691162109375, -0.0227508544921875, 0.01049041748046875, 0.00701141357421875, 0.025054931640625, -0.023040771484375, -0.0173492431640625, -0.03375244140625, -0.000010848045349121094, -0.014801025390625, -0.0139312744140625 ] ]
castorini/monot5-large-msmarco
2021-10-17T11:20:56.000Z
[ "transformers", "pytorch", "jax", "t5", "feature-extraction", "endpoints_compatible", "text-generation-inference", "region:us" ]
feature-extraction
castorini
null
null
castorini/monot5-large-msmarco
0
8,152
transformers
2022-03-02T23:29:05
This model is a T5-large reranker fine-tuned on the MS MARCO passage dataset for 100k steps (or 10 epochs). For more details on how to use it, check the following links: - [A simple reranking example](https://github.com/castorini/pygaggle#a-simple-reranking-example) - [Rerank MS MARCO passages](https://github.com/castorini/pygaggle/blob/master/docs/experiments-msmarco-passage-subset.md) - [Rerank Robust04 documents](https://github.com/castorini/pygaggle/blob/master/docs/experiments-robust04-monot5-gpu.md) Paper describing the model: [Document Ranking with a Pretrained Sequence-to-Sequence Model](https://www.aclweb.org/anthology/2020.findings-emnlp.63/)
662
[ [ -0.0125579833984375, -0.04681396484375, 0.03521728515625, 0.0006694793701171875, -0.0201873779296875, -0.0069580078125, 0.0008535385131835938, -0.014495849609375, 0.023956298828125, 0.035400390625, -0.04656982421875, -0.0498046875, -0.04217529296875, 0.0113983154296875, -0.04058837890625, 0.0733642578125, 0.020294189453125, 0.0175323486328125, 0.0194549560546875, 0.0131378173828125, -0.01519012451171875, 0.00237274169921875, -0.056243896484375, -0.02764892578125, 0.07830810546875, 0.04736328125, 0.04779052734375, 0.054473876953125, 0.040924072265625, 0.0225372314453125, -0.0208740234375, -0.0261077880859375, -0.053253173828125, 0.00894927978515625, -0.0159149169921875, -0.0287322998046875, -0.0615234375, 0.016998291015625, 0.062347412109375, 0.044586181640625, -0.01251220703125, 0.027679443359375, -0.00284576416015625, 0.0298004150390625, -0.025634765625, 0.0020542144775390625, -0.0291900634765625, 0.00345611572265625, -0.039581298828125, -0.001300811767578125, -0.05841064453125, -0.0265045166015625, 0.017608642578125, -0.052490234375, 0.03106689453125, 0.004558563232421875, 0.08428955078125, 0.0082550048828125, -0.03887939453125, 0.005657196044921875, -0.011383056640625, 0.043731689453125, -0.04547119140625, 0.031158447265625, 0.0164794921875, 0.044464111328125, -0.0042572021484375, -0.09588623046875, -0.0216217041015625, -0.009796142578125, 0.021697998046875, -0.01001739501953125, 0.0155487060546875, 0.00510406494140625, 0.034210205078125, 0.0355224609375, -0.05792236328125, -0.01171875, -0.052032470703125, -0.0017652511596679688, 0.05255126953125, 0.017913818359375, 0.004955291748046875, -0.0031490325927734375, -0.0302734375, -0.010406494140625, -0.040374755859375, 0.0010271072387695312, 0.02227783203125, 0.007709503173828125, 0.0127716064453125, 0.0287628173828125, -0.0271759033203125, 0.0787353515625, 0.01172637939453125, -0.006618499755859375, 0.036773681640625, -0.02001953125, -0.02008056640625, -0.009765625, 0.046142578125, 0.0036373138427734375, 0.0210723876953125, -0.0138397216796875, -0.0236663818359375, -0.0312347412109375, 0.057464599609375, -0.07366943359375, -0.0426025390625, 0.003749847412109375, -0.052886962890625, -0.044342041015625, 0.020782470703125, -0.048309326171875, 0.0021305084228515625, -0.0149688720703125, 0.07366943359375, -0.0240325927734375, -0.004299163818359375, 0.0009555816650390625, -0.01331329345703125, 0.024444580078125, 0.0211334228515625, -0.046783447265625, -0.005340576171875, 0.048675537109375, 0.07366943359375, 0.0084075927734375, -0.04534912109375, -0.022003173828125, 0.00933074951171875, -0.0234222412109375, 0.0400390625, -0.048828125, -0.03179931640625, -0.00531005859375, 0.036163330078125, -0.0193328857421875, -0.0035915374755859375, 0.06634521484375, -0.05609130859375, 0.022552490234375, -0.0095672607421875, -0.040924072265625, -0.0262908935546875, 0.0243988037109375, -0.05792236328125, 0.052215576171875, -0.001232147216796875, -0.047027587890625, 0.032501220703125, -0.06365966796875, -0.01554107666015625, 0.0043487548828125, 0.043365478515625, -0.0521240234375, -0.0024261474609375, -0.01284027099609375, 0.0181732177734375, -0.00798797607421875, 0.0172119140625, -0.05364990234375, -0.0438232421875, -0.0030612945556640625, -0.022125244140625, 0.0645751953125, 0.043182373046875, -0.006809234619140625, 0.019012451171875, -0.053924560546875, 0.0254669189453125, 0.0093231201171875, -0.037811279296875, -0.01374053955078125, -0.0179290771484375, 0.01045989990234375, 0.01751708984375, 0.044464111328125, -0.039642333984375, 0.05279541015625, -0.0022373199462890625, 0.05194091796875, 0.03985595703125, 0.00847625732421875, 0.0193328857421875, -0.035400390625, 0.03582763671875, -0.00489044189453125, 0.046356201171875, -0.0263671875, -0.04876708984375, -0.05596923828125, -0.00933074951171875, 0.06414794921875, 0.0250396728515625, -0.03277587890625, 0.0225067138671875, -0.04034423828125, -0.0596923828125, -0.047760009765625, -0.002716064453125, 0.01959228515625, 0.022369384765625, 0.02423095703125, -0.013397216796875, -0.0181732177734375, -0.07354736328125, -0.01031494140625, 0.0128173828125, -0.0008358955383300781, -0.0113983154296875, 0.044952392578125, -0.018890380859375, 0.031585693359375, -0.044921875, -0.0198516845703125, 0.00015783309936523438, 0.0208740234375, 0.041473388671875, 0.03839111328125, 0.0159759521484375, -0.05572509765625, -0.025543212890625, -0.041778564453125, -0.040802001953125, 0.0007395744323730469, 0.0157318115234375, -0.0301971435546875, -0.0093994140625, 0.050689697265625, -0.0237579345703125, 0.029754638671875, 0.040924072265625, -0.04620361328125, 0.01465606689453125, -0.0305328369140625, 0.0226898193359375, -0.108642578125, 0.02508544921875, -0.0006017684936523438, -0.0285186767578125, -0.04193115234375, 0.0310821533203125, 0.025238037109375, -0.03369140625, -0.017364501953125, 0.010345458984375, -0.0416259765625, 0.006694793701171875, -0.01267242431640625, -0.00553131103515625, -0.01303863525390625, 0.0162200927734375, 0.001567840576171875, 0.037841796875, 0.036376953125, -0.0362548828125, 0.0018711090087890625, 0.0248260498046875, -0.0275726318359375, 0.0254974365234375, -0.066162109375, 0.0171661376953125, 0.0192718505859375, 0.022247314453125, -0.08184814453125, -0.00799560546875, -0.00949859619140625, -0.044036865234375, 0.03363037109375, -0.03399658203125, -0.03155517578125, -0.029327392578125, -0.03515625, 0.048980712890625, 0.007598876953125, -0.04266357421875, 0.0328369140625, 0.0312347412109375, -0.022613525390625, -0.04388427734375, -0.036224365234375, 0.0267791748046875, -0.0235748291015625, -0.06292724609375, 0.0294647216796875, -0.005146026611328125, 0.0188140869140625, -0.023773193359375, 0.0269012451171875, -0.00447845458984375, 0.0015096664428710938, 0.0305023193359375, 0.018646240234375, -0.014984130859375, 0.00368499755859375, -0.00733184814453125, -0.0193328857421875, -0.0002951622009277344, -0.018524169921875, 0.0419921875, -0.00720977783203125, 0.005718231201171875, -0.023284912109375, 0.00634765625, 0.05267333984375, -0.027557373046875, 0.056304931640625, 0.050994873046875, -0.02435302734375, -0.018157958984375, -0.0240020751953125, -0.0113372802734375, -0.034210205078125, 0.016937255859375, -0.049591064453125, -0.036346435546875, 0.0369873046875, -0.003047943115234375, 0.0013856887817382812, 0.040924072265625, 0.025238037109375, 0.003322601318359375, 0.06158447265625, 0.0302581787109375, -0.03338623046875, 0.057098388671875, -0.03814697265625, 0.01039886474609375, -0.054840087890625, -0.02044677734375, -0.047393798828125, -0.051361083984375, -0.0654296875, -0.031402587890625, 0.036956787109375, 0.00041747093200683594, -0.05255126953125, 0.0211944580078125, -0.019500732421875, 0.02813720703125, 0.0640869140625, 0.033233642578125, 0.0267486572265625, -0.01183319091796875, -0.014068603515625, 0.004032135009765625, -0.05267333984375, -0.031097412109375, 0.11767578125, 0.0265960693359375, 0.0721435546875, 0.0238037109375, 0.059326171875, 0.0301971435546875, 0.0233154296875, -0.04791259765625, 0.03125, -0.01519012451171875, -0.06768798828125, -0.037322998046875, -0.022430419921875, -0.08538818359375, 0.0152435302734375, -0.0199737548828125, -0.027557373046875, 0.00722503662109375, 0.005878448486328125, -0.00995635986328125, 0.01177215576171875, -0.05059814453125, 0.091552734375, 0.002044677734375, -0.044769287109375, -0.02685546875, -0.057769775390625, 0.032867431640625, 0.006298065185546875, -0.013092041015625, 0.033660888671875, 0.0056610107421875, 0.0419921875, -0.025390625, 0.034881591796875, -0.0234527587890625, 0.00511932373046875, 0.0019369125366210938, 0.006504058837890625, 0.035736083984375, 0.0125885009765625, -0.00868988037109375, 0.022125244140625, -0.00792694091796875, -0.0447998046875, 0.0034542083740234375, 0.08013916015625, -0.043182373046875, -0.0257110595703125, -0.040863037109375, -0.0297393798828125, 0.00865936279296875, 0.04241943359375, 0.026611328125, 0.034088134765625, -0.0177764892578125, 0.03387451171875, 0.060516357421875, -0.0113067626953125, 0.041351318359375, 0.0540771484375, -0.010986328125, -0.040283203125, 0.06781005859375, 0.00838470458984375, 0.011627197265625, 0.040313720703125, -0.00360107421875, -0.034637451171875, -0.03985595703125, -0.019805908203125, 0.03173828125, -0.0259552001953125, -0.005886077880859375, -0.0243988037109375, -0.02081298828125, -0.03204345703125, -0.0009293556213378906, -0.0156402587890625, -0.0303802490234375, -0.0055389404296875, -0.01251983642578125, 0.002277374267578125, 0.060638427734375, 0.0017709732055664062, 0.0168304443359375, -0.039306640625, 0.0185394287109375, 0.0225372314453125, 0.028167724609375, -0.00940704345703125, -0.07012939453125, -0.0204010009765625, -0.01226043701171875, -0.044921875, -0.06610107421875, 0.033233642578125, 0.00931549072265625, 0.042266845703125, 0.047760009765625, -0.018890380859375, 0.050445556640625, -0.04400634765625, 0.053070068359375, -0.0224151611328125, -0.06024169921875, 0.042388916015625, -0.05303955078125, 0.0225067138671875, 0.0498046875, 0.0208740234375, -0.02703857421875, -0.01418304443359375, -0.055450439453125, -0.07183837890625, 0.04730224609375, -0.01226806640625, -0.0233612060546875, 0.0243377685546875, 0.0261077880859375, -0.004360198974609375, 0.01396942138671875, -0.07037353515625, -0.00653076171875, -0.0179901123046875, -0.0192108154296875, -0.042449951171875, -0.0283050537109375, -0.006168365478515625, -0.01812744140625, 0.06085205078125, -0.002315521240234375, 0.02117919921875, 0.005218505859375, -0.0318603515625, -0.002582550048828125, 0.00566864013671875, 0.06549072265625, 0.05352783203125, -0.043487548828125, -0.01119232177734375, -0.0037174224853515625, -0.039642333984375, -0.00466156005859375, 0.0207977294921875, -0.0216827392578125, -0.0028972625732421875, 0.03472900390625, 0.045013427734375, 0.024078369140625, -0.0085296630859375, 0.03521728515625, 0.0005054473876953125, -0.024078369140625, -0.032684326171875, 0.001720428466796875, 0.01457977294921875, 0.021820068359375, 0.0242919921875, -0.0185699462890625, 0.0303497314453125, -0.03326416015625, 0.029876708984375, 0.0157623291015625, -0.025299072265625, -0.0204315185546875, 0.0718994140625, 0.0264739990234375, -0.03155517578125, 0.059906005859375, -0.004650115966796875, -0.021331787109375, 0.0190887451171875, 0.023101806640625, 0.05242919921875, -0.038421630859375, 0.021484375, 0.05023193359375, 0.0267791748046875, -0.0313720703125, 0.0182647705078125, -0.00855255126953125, -0.01446533203125, -0.0035076141357421875, -0.037567138671875, -0.01837158203125, 0.0162506103515625, -0.0628662109375, 0.035003662109375, -0.0218658447265625, -0.0190277099609375, 0.0009679794311523438, 0.0258636474609375, -0.0419921875, 0.026641845703125, 0.000988006591796875, 0.0911865234375, -0.062408447265625, 0.0865478515625, 0.04473876953125, -0.038177490234375, -0.05328369140625, 0.00594329833984375, -0.00765228271484375, -0.054718017578125, 0.038848876953125, 0.004238128662109375, 0.014404296875, 0.007232666015625, -0.042724609375, -0.0589599609375, 0.11968994140625, 0.0118865966796875, -0.057373046875, -0.01509857177734375, -0.01421356201171875, 0.04217529296875, -0.0187835693359375, 0.034759521484375, 0.023406982421875, 0.036163330078125, -0.00614166259765625, -0.07110595703125, -0.00626373291015625, -0.0270233154296875, 0.005115509033203125, 0.0223388671875, -0.07080078125, 0.07696533203125, -0.033172607421875, 0.0302276611328125, 0.02923583984375, 0.02862548828125, 0.02740478515625, 0.044708251953125, 0.04022216796875, 0.0733642578125, 0.035552978515625, -0.020172119140625, 0.0689697265625, -0.038116455078125, 0.049957275390625, 0.0765380859375, 0.011871337890625, 0.07769775390625, 0.0281982421875, -0.0095062255859375, 0.043487548828125, 0.07568359375, -0.004787445068359375, 0.0560302734375, 0.019317626953125, -0.0048980712890625, -0.03729248046875, 0.0286865234375, -0.037994384765625, 0.0224151611328125, 0.01198577880859375, -0.07769775390625, -0.00984954833984375, -0.0207061767578125, -0.0110626220703125, -0.03515625, -0.0380859375, 0.04266357421875, 0.0026226043701171875, -0.051666259765625, 0.036346435546875, 0.0052032470703125, 0.031890869140625, -0.047088623046875, -0.006439208984375, -0.0269775390625, 0.0274658203125, -0.0007038116455078125, -0.056365966796875, -0.022674560546875, -0.009124755859375, -0.0214996337890625, -0.025054931640625, 0.03326416015625, -0.044036865234375, -0.044403076171875, 0.008697509765625, 0.01299285888671875, 0.0163421630859375, 0.0033054351806640625, -0.06622314453125, -0.016448974609375, 0.00792694091796875, -0.042999267578125, 0.018157958984375, 0.03607177734375, -0.0115509033203125, 0.06732177734375, 0.034393310546875, -0.0013475418090820312, 0.012603759765625, 0.0207366943359375, 0.0404052734375, -0.07080078125, -0.0552978515625, -0.0338134765625, 0.031829833984375, -0.007373809814453125, -0.043121337890625, 0.046905517578125, 0.059112548828125, 0.05523681640625, -0.0246734619140625, 0.0285491943359375, 0.007289886474609375, 0.035736083984375, -0.035858154296875, 0.0657958984375, -0.05792236328125, 0.004657745361328125, -0.024078369140625, -0.06304931640625, -0.00008279085159301758, 0.0552978515625, -0.00896453857421875, 0.01335906982421875, 0.047027587890625, 0.06475830078125, -0.0174407958984375, -0.007091522216796875, 0.0291595458984375, 0.037750244140625, 0.01629638671875, 0.051971435546875, 0.0352783203125, -0.047271728515625, 0.052734375, 0.00507354736328125, 0.00797271728515625, -0.03173828125, -0.046630859375, -0.06146240234375, -0.05291748046875, -0.0097503662109375, -0.042388916015625, 0.00786590576171875, 0.0421142578125, 0.05999755859375, -0.0499267578125, -0.01485443115234375, 0.010040283203125, -0.0010690689086914062, 0.00167083740234375, -0.0182647705078125, 0.013092041015625, -0.01299285888671875, -0.0714111328125, 0.0290374755859375, -0.0034084320068359375, -0.013519287109375, -0.01486968994140625, -0.00865936279296875, -0.0289306640625, -0.017669677734375, 0.0225067138671875, -0.00014603137969970703, -0.02532958984375, -0.0174713134765625, -0.00439453125, -0.0018024444580078125, -0.00975799560546875, 0.05401611328125, -0.034088134765625, 0.038909912109375, 0.049957275390625, 0.00963592529296875, 0.0621337890625, 0.0224151611328125, 0.042022705078125, -0.02008056640625, -0.01812744140625, -0.006084442138671875, 0.03857421875, 0.006038665771484375, -0.003025054931640625, 0.0560302734375, 0.0255584716796875, -0.066650390625, -0.057861328125, -0.00804901123046875, -0.096923828125, -0.01175689697265625, 0.07147216796875, -0.016815185546875, -0.0169219970703125, 0.0116119384765625, -0.007129669189453125, 0.01519012451171875, -0.04425048828125, 0.069091796875, 0.057464599609375, 0.01898193359375, -0.0163421630859375, -0.04327392578125, 0.0245361328125, 0.0163726806640625, -0.042388916015625, -0.031707763671875, 0.0292205810546875, 0.07366943359375, 0.0164947509765625, 0.0225067138671875, -0.0160064697265625, 0.0246734619140625, 0.005725860595703125, 0.0013246536254882812, -0.023956298828125, -0.038818359375, -0.0372314453125, 0.038177490234375, 0.01849365234375, -0.035125732421875 ] ]
google/t5-large-lm-adapt
2023-01-24T16:52:08.000Z
[ "transformers", "pytorch", "tf", "t5", "text2text-generation", "t5-lm-adapt", "en", "dataset:c4", "arxiv:2002.05202", "arxiv:1910.10683", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text2text-generation
google
null
null
google/t5-large-lm-adapt
6
8,150
transformers
2022-03-02T23:29:05
--- language: en datasets: - c4 tags: - t5-lm-adapt license: apache-2.0 --- [Google's T5](https://ai.googleblog.com/2020/02/exploring-transfer-learning-with-t5.html) Version 1.1 - LM-Adapted ## Version 1.1 - LM-Adapted [T5 Version 1.1 - LM Adapted](https://github.com/google-research/text-to-text-transfer-transformer/blob/main/released_checkpoints.md#lm-adapted-t511lm100k) includes the following improvements compared to the original [T5 model](https://huggingface.co/t5-large): - GEGLU activation in feed-forward hidden layer, rather than ReLU - see [here](https://arxiv.org/abs/2002.05202). - Dropout was turned off in pre-training (quality win). Dropout should be re-enabled during fine-tuning. - Pre-trained on C4 only without mixing in the downstream tasks. - no parameter sharing between embedding and classifier layer - "xl" and "xxl" replace "3B" and "11B". The model shapes are a bit different - larger `d_model` and smaller `num_heads` and `d_ff`. and is pretrained on both the denoising and language modeling objective. More specifically, this checkpoint is initialized from [T5 Version 1.1 - Large](https://huggingface.co/google/https://huggingface.co/google/t5-v1_1-large) and then trained for an additional 100K steps on the LM objective discussed in the [T5 paper](https://arxiv.org/pdf/1910.10683.pdf). This adaptation improves the ability of the model to be used for prompt tuning. **Note**: A popular fine-tuned version of the *T5 Version 1.1 - LM Adapted* model is [BigScience's T0pp](https://huggingface.co/bigscience/T0pp). Pretraining Dataset: [C4](https://huggingface.co/datasets/c4) Other Community Checkpoints: [here](https://huggingface.co/models?other=t5-lm-adapt) Paper: [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/pdf/1910.10683.pdf) Authors: *Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu* ## Abstract Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code. ![model image](https://camo.githubusercontent.com/623b4dea0b653f2ad3f36c71ebfe749a677ac0a1/68747470733a2f2f6d69726f2e6d656469756d2e636f6d2f6d61782f343030362f312a44304a31674e51663876727255704b657944387750412e706e67)
3,207
[ [ -0.0239105224609375, -0.03082275390625, 0.0318603515625, 0.0198822021484375, -0.0110931396484375, 0.01145172119140625, -0.0281829833984375, -0.04681396484375, -0.01110076904296875, 0.03289794921875, -0.05462646484375, -0.04296875, -0.061614990234375, 0.0227203369140625, -0.032806396484375, 0.0927734375, -0.00914764404296875, -0.006923675537109375, 0.006137847900390625, -0.005054473876953125, -0.03326416015625, -0.032684326171875, -0.06976318359375, -0.02764892578125, 0.032989501953125, 0.025421142578125, 0.0204010009765625, 0.027618408203125, 0.04498291015625, 0.01369476318359375, -0.00821685791015625, -0.0039005279541015625, -0.0478515625, -0.025177001953125, -0.0234832763671875, -0.016021728515625, -0.03839111328125, 0.003570556640625, 0.0550537109375, 0.0552978515625, -0.0027828216552734375, 0.01561737060546875, 0.0240631103515625, 0.049591064453125, -0.04681396484375, 0.0146331787109375, -0.04656982421875, 0.01111602783203125, -0.0015325546264648438, -0.0013246536254882812, -0.053131103515625, -0.0196380615234375, 0.034027099609375, -0.05413818359375, 0.02008056640625, -0.0027065277099609375, 0.0838623046875, 0.0293426513671875, -0.027435302734375, -0.01959228515625, -0.047607421875, 0.0689697265625, -0.052459716796875, 0.0309600830078125, 0.01479339599609375, 0.0254364013671875, 0.0011730194091796875, -0.07977294921875, -0.034576416015625, -0.0092620849609375, -0.005886077880859375, 0.004344940185546875, -0.0179901123046875, 0.0028171539306640625, 0.0149383544921875, 0.033447265625, -0.036590576171875, 0.022705078125, -0.044219970703125, -0.0226287841796875, 0.0310516357421875, -0.01439666748046875, 0.023406982421875, 0.0003147125244140625, -0.03912353515625, -0.017303466796875, -0.03955078125, 0.009765625, -0.019317626953125, 0.029571533203125, -0.026092529296875, -0.008056640625, -0.0007967948913574219, 0.06036376953125, 0.0155487060546875, -0.0107421875, 0.0283966064453125, -0.047454833984375, -0.01500701904296875, -0.0087738037109375, 0.0640869140625, 0.01605224609375, 0.024810791015625, -0.028350830078125, -0.006195068359375, -0.02069091796875, 0.0244598388671875, -0.07208251953125, -0.02545166015625, -0.004001617431640625, -0.025726318359375, -0.0310821533203125, 0.0048980712890625, -0.043792724609375, -0.0103912353515625, -0.02252197265625, 0.041900634765625, -0.04339599609375, -0.018585205078125, 0.0279388427734375, 0.0039005279541015625, 0.036590576171875, 0.040618896484375, -0.07647705078125, 0.0285491943359375, 0.03411865234375, 0.067626953125, -0.038726806640625, -0.025421142578125, -0.047576904296875, -0.003253936767578125, -0.01232147216796875, 0.058349609375, -0.023834228515625, -0.019927978515625, -0.00591278076171875, 0.0193634033203125, -0.02459716796875, -0.0277099609375, 0.060760498046875, -0.0323486328125, 0.0478515625, -0.023681640625, -0.03558349609375, -0.0325927734375, 0.01485443115234375, -0.05029296875, 0.0830078125, 0.01239013671875, -0.039459228515625, 0.0323486328125, -0.06719970703125, -0.0285186767578125, -0.013519287109375, 0.0309600830078125, -0.0291595458984375, -0.0140380859375, 0.024871826171875, 0.039886474609375, -0.0128631591796875, 0.00788116455078125, -0.01519775390625, -0.02166748046875, -0.00830841064453125, -0.003696441650390625, 0.0682373046875, 0.022979736328125, -0.0200042724609375, 0.0083160400390625, -0.05206298828125, 0.01076507568359375, -0.0011615753173828125, -0.0254974365234375, 0.00792694091796875, -0.021697998046875, 0.003314971923828125, 0.0303192138671875, 0.019439697265625, -0.0228118896484375, 0.01824951171875, -0.029296875, 0.04205322265625, 0.0423583984375, -0.0088043212890625, 0.06317138671875, -0.03533935546875, 0.03759765625, 0.00377655029296875, 0.0000514984130859375, -0.016815185546875, -0.016693115234375, -0.061370849609375, -0.00772857666015625, 0.04974365234375, 0.0550537109375, -0.05084228515625, 0.050018310546875, -0.036865234375, -0.03759765625, -0.0518798828125, 0.006229400634765625, 0.0262451171875, 0.051177978515625, 0.054840087890625, -0.0222625732421875, -0.042724609375, -0.0390625, -0.024810791015625, 0.0009250640869140625, -0.00699615478515625, -0.00001800060272216797, 0.0289764404296875, -0.01454925537109375, 0.05621337890625, -0.0208587646484375, -0.03759765625, -0.046295166015625, 0.00677490234375, 0.006092071533203125, 0.04437255859375, 0.051239013671875, -0.03424072265625, -0.042205810546875, 0.0031452178955078125, -0.053680419921875, -0.01849365234375, -0.0146484375, -0.0031299591064453125, 0.0289154052734375, 0.049102783203125, -0.0204925537109375, 0.011627197265625, 0.054473876953125, -0.0233917236328125, 0.0260772705078125, -0.01454925537109375, 0.0018529891967773438, -0.118408203125, 0.0272064208984375, 0.004734039306640625, -0.036865234375, -0.06103515625, -0.005855560302734375, 0.0153961181640625, 0.005741119384765625, -0.03948974609375, 0.054412841796875, -0.0404052734375, 0.006656646728515625, -0.018707275390625, 0.01953125, -0.00211334228515625, 0.04150390625, -0.005184173583984375, 0.059722900390625, 0.040618896484375, -0.061614990234375, -0.0057525634765625, 0.02630615234375, -0.02044677734375, 0.0136566162109375, -0.045135498046875, 0.03204345703125, -0.005634307861328125, 0.0296478271484375, -0.06549072265625, 0.018585205078125, 0.034515380859375, -0.04156494140625, 0.04541015625, -0.00818634033203125, -0.0168304443359375, -0.01222991943359375, -0.028076171875, 0.018890380859375, 0.0509033203125, -0.046295166015625, 0.04034423828125, 0.009796142578125, -0.00572967529296875, -0.04693603515625, -0.048736572265625, 0.01690673828125, -0.0198211669921875, -0.041778564453125, 0.05487060546875, -0.004917144775390625, 0.013946533203125, -0.01053619384765625, -0.00344085693359375, -0.00972747802734375, 0.018798828125, -0.012725830078125, 0.0162353515625, -0.0070648193359375, 0.01123809814453125, 0.00673675537109375, -0.0187530517578125, -0.006618499755859375, -0.030426025390625, 0.029876708984375, -0.00875091552734375, 0.0202789306640625, -0.041168212890625, -0.001682281494140625, 0.026092529296875, -0.01128387451171875, 0.058685302734375, 0.0721435546875, -0.01824951171875, -0.0256805419921875, -0.0249176025390625, -0.020965576171875, -0.034698486328125, 0.0341796875, -0.034698486328125, -0.0789794921875, 0.03289794921875, -0.0162811279296875, 0.0216522216796875, 0.050445556640625, 0.01505279541015625, 0.0009832382202148438, 0.048187255859375, 0.077880859375, -0.028839111328125, 0.055999755859375, -0.0245819091796875, 0.0141143798828125, -0.07080078125, -0.0097198486328125, -0.056304931640625, -0.0245819091796875, -0.04638671875, -0.0246734619140625, 0.005825042724609375, 0.024505615234375, -0.01519012451171875, 0.037200927734375, -0.0266876220703125, 0.02545166015625, 0.0158233642578125, 0.01153564453125, 0.0294647216796875, 0.006381988525390625, 0.00006985664367675781, -0.0145111083984375, -0.06427001953125, -0.03570556640625, 0.08544921875, 0.0300140380859375, 0.0364990234375, 0.003917694091796875, 0.044219970703125, 0.0307464599609375, 0.0262603759765625, -0.0596923828125, 0.03778076171875, -0.0300140380859375, -0.015594482421875, -0.02545166015625, -0.033294677734375, -0.08489990234375, 0.01904296875, -0.03228759765625, -0.053802490234375, -0.00783538818359375, 0.00485992431640625, -0.014068603515625, 0.0298919677734375, -0.060577392578125, 0.07916259765625, 0.0017528533935546875, -0.01090240478515625, -0.004642486572265625, -0.058746337890625, 0.01548004150390625, -0.011962890625, 0.0007224082946777344, 0.0023193359375, -0.00287628173828125, 0.0611572265625, -0.01203155517578125, 0.061370849609375, -0.01690673828125, -0.0095977783203125, -0.000858306884765625, -0.0022602081298828125, 0.042724609375, -0.029388427734375, -0.006938934326171875, 0.0215911865234375, -0.0007586479187011719, -0.036956787109375, -0.03729248046875, 0.03717041015625, -0.066162109375, -0.02801513671875, -0.0250396728515625, -0.02508544921875, -0.00917816162109375, 0.0252838134765625, 0.029083251953125, 0.01568603515625, -0.017547607421875, 0.02154541015625, 0.0557861328125, -0.0208740234375, 0.04595947265625, 0.03729248046875, -0.0250396728515625, -0.011016845703125, 0.057159423828125, -0.003673553466796875, 0.038482666015625, 0.04681396484375, 0.01025390625, -0.0250091552734375, -0.0562744140625, -0.03887939453125, 0.01898193359375, -0.046661376953125, -0.009185791015625, -0.058135986328125, -0.031097412109375, -0.042755126953125, -0.006439208984375, -0.029083251953125, -0.026031494140625, -0.042755126953125, -0.0148162841796875, 0.0204315185546875, 0.043121337890625, 0.0084686279296875, 0.0200347900390625, -0.07342529296875, 0.01264190673828125, 0.0020427703857421875, 0.0173492431640625, -0.006641387939453125, -0.08551025390625, -0.01548004150390625, 0.0009055137634277344, -0.0234222412109375, -0.048828125, 0.032012939453125, 0.0401611328125, 0.034912109375, 0.00899505615234375, 0.00493621826171875, 0.03912353515625, -0.0284271240234375, 0.06103515625, 0.0127716064453125, -0.081787109375, 0.032196044921875, -0.0160980224609375, 0.036468505859375, 0.055633544921875, 0.0367431640625, -0.0445556640625, -0.00899505615234375, -0.042755126953125, -0.046112060546875, 0.06396484375, 0.0027675628662109375, -0.005580902099609375, 0.033538818359375, 0.0304718017578125, 0.0245819091796875, -0.004917144775390625, -0.0689697265625, -0.01068878173828125, -0.01142120361328125, -0.018585205078125, -0.019439697265625, 0.001201629638671875, 0.0268402099609375, -0.02862548828125, 0.04595947265625, -0.0203704833984375, 0.0258026123046875, 0.026123046875, -0.040740966796875, 0.008270263671875, 0.0220489501953125, 0.05279541015625, 0.054962158203125, -0.0167694091796875, -0.0040130615234375, 0.039398193359375, -0.04461669921875, -0.003101348876953125, 0.01284027099609375, -0.014617919921875, -0.0040740966796875, 0.0340576171875, 0.07696533203125, 0.023590087890625, -0.02398681640625, 0.0323486328125, -0.00846099853515625, -0.040618896484375, -0.00917816162109375, 0.007965087890625, -0.004253387451171875, -0.004123687744140625, 0.0235595703125, 0.012542724609375, 0.01387786865234375, -0.03240966796875, 0.00494384765625, 0.01058197021484375, -0.040008544921875, -0.0465087890625, 0.04254150390625, 0.029266357421875, -0.0193939208984375, 0.051483154296875, -0.0182342529296875, -0.041168212890625, 0.0275726318359375, 0.04754638671875, 0.0828857421875, -0.0158233642578125, 0.02325439453125, 0.05218505859375, 0.0299224853515625, -0.014007568359375, -0.0017032623291015625, -0.0145111083984375, -0.0645751953125, -0.062255859375, -0.0426025390625, -0.031707763671875, 0.0143280029296875, -0.04571533203125, 0.03643798828125, -0.032012939453125, 0.01496124267578125, -0.007144927978515625, 0.01284027099609375, -0.05767822265625, 0.0121002197265625, 0.006435394287109375, 0.07781982421875, -0.056365966796875, 0.08087158203125, 0.05609130859375, -0.0167694091796875, -0.061309814453125, 0.0027217864990234375, -0.02313232421875, -0.061187744140625, 0.0261688232421875, 0.0154876708984375, -0.00858306884765625, 0.021026611328125, -0.043243408203125, -0.06842041015625, 0.0963134765625, 0.037078857421875, -0.03924560546875, -0.0186614990234375, 0.00514984130859375, 0.041290283203125, -0.0248565673828125, 0.01157379150390625, 0.049468994140625, 0.03045654296875, 0.0154571533203125, -0.10009765625, 0.021514892578125, -0.014068603515625, -0.004779815673828125, 0.01690673828125, -0.044769287109375, 0.04656982421875, -0.0278472900390625, -0.021881103515625, 0.00035119056701660156, 0.049713134765625, 0.005558013916015625, 0.01284027099609375, 0.043121337890625, 0.056854248046875, 0.06048583984375, -0.01261138916015625, 0.0811767578125, -0.0164031982421875, 0.0304412841796875, 0.0791015625, -0.0010385513305664062, 0.059295654296875, 0.030914306640625, -0.017852783203125, 0.039642333984375, 0.04656982421875, 0.00794219970703125, 0.045196533203125, 0.00449371337890625, -0.007686614990234375, -0.0084381103515625, 0.0037136077880859375, -0.036590576171875, 0.0191497802734375, 0.0182342529296875, -0.0258331298828125, -0.0294647216796875, 0.0029621124267578125, 0.014892578125, -0.00888824462890625, -0.020599365234375, 0.08099365234375, 0.01387786865234375, -0.053131103515625, 0.05413818359375, -0.0008792877197265625, 0.0667724609375, -0.044525146484375, 0.0053863525390625, -0.0257110595703125, 0.01541900634765625, -0.0144195556640625, -0.050628662109375, 0.0251617431640625, -0.008544921875, -0.0146026611328125, -0.049835205078125, 0.07275390625, -0.0281524658203125, -0.01953125, 0.0294647216796875, 0.0379638671875, 0.0162506103515625, -0.002593994140625, -0.0599365234375, -0.0063934326171875, 0.0208282470703125, -0.01155853271484375, 0.03961181640625, 0.036651611328125, 0.004241943359375, 0.049285888671875, 0.044342041015625, 0.003772735595703125, 0.006420135498046875, 0.010406494140625, 0.0531005859375, -0.06512451171875, -0.035919189453125, -0.048614501953125, 0.038360595703125, 0.002185821533203125, -0.03912353515625, 0.04693603515625, 0.02508544921875, 0.08111572265625, -0.0108489990234375, 0.056365966796875, -0.0010251998901367188, 0.047454833984375, -0.05072021484375, 0.044708251953125, -0.037017822265625, 0.0160369873046875, -0.024993896484375, -0.07025146484375, -0.02935791015625, 0.0401611328125, -0.0215301513671875, 0.0187835693359375, 0.0672607421875, 0.045135498046875, -0.00023376941680908203, -0.006500244140625, 0.0196533203125, 0.0110626220703125, 0.033660888671875, 0.061737060546875, 0.042816162109375, -0.06103515625, 0.056793212890625, -0.01291656494140625, -0.00867462158203125, -0.00872039794921875, -0.08245849609375, -0.06402587890625, -0.052337646484375, -0.03082275390625, -0.021087646484375, 0.0026378631591796875, 0.05670166015625, 0.052886962890625, -0.0557861328125, -0.006092071533203125, -0.0181732177734375, 0.0005950927734375, -0.01345062255859375, -0.0161895751953125, 0.024749755859375, -0.0538330078125, -0.05767822265625, 0.015838623046875, 0.0022602081298828125, 0.0021572113037109375, 0.00868988037109375, -0.01386260986328125, -0.019927978515625, -0.0300445556640625, 0.04608154296875, 0.0266265869140625, -0.0323486328125, -0.02764892578125, 0.00958251953125, -0.004253387451171875, 0.016510009765625, 0.044921875, -0.0662841796875, 0.019989013671875, 0.0321044921875, 0.07244873046875, 0.055511474609375, -0.01424407958984375, 0.045440673828125, -0.04364013671875, -0.00542449951171875, 0.01177215576171875, 0.00966644287109375, 0.0276336669921875, -0.00907135009765625, 0.046875, 0.017242431640625, -0.045989990234375, -0.046600341796875, -0.00659942626953125, -0.0980224609375, -0.01313018798828125, 0.085205078125, -0.0110626220703125, -0.01129913330078125, 0.0024890899658203125, -0.017852783203125, 0.0234832763671875, -0.0278167724609375, 0.064453125, 0.06231689453125, 0.01373291015625, -0.03839111328125, -0.032135009765625, 0.055877685546875, 0.0472412109375, -0.080322265625, -0.0189361572265625, 0.0142822265625, 0.036834716796875, 0.00283050537109375, 0.043121337890625, -0.0074462890625, 0.017303466796875, -0.040008544921875, 0.01421356201171875, 0.0020809173583984375, -0.0217742919921875, -0.05419921875, 0.0056610107421875, -0.00018584728240966797, -0.0177764892578125 ] ]
IkariDev/Athena-v3
2023-10-03T14:39:22.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
IkariDev
null
null
IkariDev/Athena-v3
10
8,144
transformers
2023-09-26T18:59:34
--- license: cc-by-nc-4.0 --- ![image/png](https://cdn-uploads.huggingface.co/production/uploads/630dfb008df86f1e5becadc3/LjO8no5EzagA9qWdtYKxG.png) Experimental Athena v3 model. Use Alpaca format. Suitable for RP, ERP and general stuff. <!-- description start --> ## Description <!-- [Recommended settings - contributed by localfultonextractor](https://files.catbox.moe/ue0tja.json) --> This repo contains fp16 files of Athena-V3. [GGUF - By TheBloke](https://huggingface.co/TheBloke/Athena-v3-GGUF) [GPTQ - By TheBloke](https://huggingface.co/TheBloke/Athena-v3-GPTQ) <!-- [exl2 - by AzureBlack](https://huggingface.co/AzureBlack/Athena-v2-6.0bit-exl2) --> [AWQ - By TheBloke](https://huggingface.co/TheBloke/Athena-v3-AWQ) [fp16 - by IkariDev+Undi95](https://huggingface.co/IkariDev/Athena-v3) <!-- [GGUF - by IkariDev+Undi95](https://huggingface.co/IkariDev/Athena-v3-GGUF) --> [OLD(GGUF - by IkariDev+Undi95)](https://huggingface.co/IkariDev/Athena-v3-GGUF) ## Ratings: Note: I have permission of all users to upload their ratings, i DONT screenshot random reviews without asking if i can put them here! https://snombler.neocities.org/logs#athenav3 <!-- description end --> <!-- description start --> ## Models and loras used - Athena-v2 - migtissera/Synthia-13B-v1.2 - The-Face-Of-Goonery/Huginn-13b-FP16 - PygmalionAI/pygmalion-2-13b - The-Face-Of-Goonery/LegerDemain-FP16 - chargoddard/storytime-13b - lemonilia/LimaRP-Llama2-13B-v3-EXPERIMENT - zattio770/120-Days-of-LORA-v2-13B ``` Loras: [lemonilia/LimaRP-Llama2-13B-v3-EXPERIMENT(0.65) + zattio770/120-Days-of-LORA-v2-13B(0.35)](0.3) to the final model + [Athena-v2(0.70) + migtissera/Synthia-13B-v1.2(0.3)](0.5) + [The-Face-Of-Goonery/Huginn-13b-FP16(0.85) + PygmalionAI/pygmalion-2-13b](0.15)](0.40) + [The-Face-Of-Goonery/LegerDemain-FP16(0.3) chargoddard/storytime-13b(0.7)](0.10) ``` <!-- description end --> <!-- prompt-template start --> ## Prompt template: Alpaca ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ``` HUGE thanks to [Undi95](https://huggingface.co/Undi95) for doing the merging (Recipe was my idea, he merged) To TheBloke: please if you quant this, please include [IkariDev](https://huggingface.co/IkariDev) + [Undi95](https://huggingface.co/Undi95) in all the credits/links to the creator.
2,400
[ [ -0.0614013671875, -0.05181884765625, 0.0396728515625, 0.02374267578125, -0.0418701171875, -0.036285400390625, 0.0322265625, -0.059783935546875, 0.05694580078125, 0.0430908203125, -0.05511474609375, -0.0282135009765625, -0.043365478515625, 0.00786590576171875, -0.0108795166015625, 0.07537841796875, -0.00904083251953125, -0.0034732818603515625, 0.0230712890625, -0.021514892578125, -0.01511383056640625, -0.01023101806640625, -0.0543212890625, -0.03485107421875, 0.043853759765625, 0.004589080810546875, 0.04461669921875, 0.045135498046875, 0.023040771484375, 0.0266876220703125, -0.0270843505859375, 0.00860595703125, -0.010040283203125, -0.01335906982421875, 0.0022907257080078125, -0.004360198974609375, -0.07525634765625, 0.0110321044921875, 0.041900634765625, 0.040557861328125, -0.01131439208984375, 0.0300750732421875, 0.017913818359375, 0.053436279296875, -0.036590576171875, 0.02679443359375, -0.0007314682006835938, 0.0166778564453125, -0.01215362548828125, 0.0177764892578125, -0.007137298583984375, -0.03643798828125, 0.0072021484375, -0.0802001953125, -0.01032257080078125, 0.0177459716796875, 0.08428955078125, 0.01023101806640625, -0.0262451171875, -0.0060882568359375, -0.033111572265625, 0.06927490234375, -0.067138671875, 0.008758544921875, 0.024078369140625, 0.0213165283203125, -0.0253448486328125, -0.057098388671875, -0.056793212890625, 0.0024929046630859375, 0.0032672882080078125, 0.02239990234375, -0.0418701171875, -0.025848388671875, 0.0188446044921875, 0.041473388671875, -0.011077880859375, -0.0022220611572265625, -0.0382080078125, -0.01351165771484375, 0.023956298828125, 0.013702392578125, 0.035003662109375, -0.0174713134765625, -0.0247039794921875, -0.03558349609375, -0.03692626953125, 0.00748443603515625, 0.03021240234375, 0.0229339599609375, -0.04876708984375, 0.07525634765625, -0.021240234375, 0.039520263671875, 0.0278167724609375, -0.0179290771484375, 0.0300445556640625, -0.01177978515625, -0.02734375, -0.017333984375, 0.07373046875, 0.040771484375, -0.02447509765625, 0.026611328125, 0.00650787353515625, 0.005298614501953125, 0.0123748779296875, -0.069091796875, 0.012542724609375, 0.0168914794921875, -0.0567626953125, -0.03424072265625, -0.00783538818359375, -0.073486328125, -0.0137481689453125, -0.00650787353515625, 0.0379638671875, -0.04534912109375, -0.017608642578125, 0.0167236328125, 0.01459503173828125, 0.040985107421875, 0.034271240234375, -0.05548095703125, 0.04498291015625, 0.039154052734375, 0.048797607421875, 0.01708984375, -0.02142333984375, -0.0305938720703125, 0.0122833251953125, -0.0249786376953125, 0.04681396484375, 0.0005359649658203125, -0.035736083984375, -0.0239105224609375, 0.016754150390625, 0.0016231536865234375, -0.022613525390625, 0.07281494140625, -0.01026153564453125, 0.036163330078125, -0.0372314453125, -0.0312042236328125, -0.0411376953125, 0.0037631988525390625, -0.07342529296875, 0.071044921875, 0.0215911865234375, -0.08026123046875, 0.015777587890625, -0.052215576171875, -0.00348663330078125, -0.008880615234375, 0.00821685791015625, -0.045928955078125, 0.005645751953125, 0.0262451171875, 0.0220947265625, -0.026641845703125, -0.038421630859375, -0.032379150390625, -0.019500732421875, 0.009735107421875, 0.0221405029296875, 0.062042236328125, 0.03631591796875, -0.0389404296875, -0.005504608154296875, -0.06488037109375, 0.0005388259887695312, 0.0396728515625, -0.022857666015625, -0.009307861328125, -0.013580322265625, -0.0037078857421875, -0.001983642578125, 0.0239410400390625, -0.04010009765625, 0.0271148681640625, -0.0181732177734375, 0.0161895751953125, 0.046600341796875, -0.007568359375, 0.025238037109375, -0.056304931640625, 0.04052734375, -0.0072021484375, 0.02734375, 0.0061187744140625, -0.06475830078125, -0.0728759765625, -0.028045654296875, 0.001605987548828125, 0.02362060546875, -0.04010009765625, 0.03173828125, 0.01541900634765625, -0.05902099609375, -0.042144775390625, -0.016937255859375, 0.039398193359375, 0.052581787109375, 0.02471923828125, -0.0390625, -0.045928955078125, -0.07568359375, 0.0084228515625, -0.0236358642578125, 0.001434326171875, 0.039520263671875, 0.040985107421875, -0.026580810546875, 0.037689208984375, -0.04052734375, -0.017242431640625, -0.0216522216796875, 0.009674072265625, 0.032928466796875, 0.044464111328125, 0.0643310546875, -0.0215911865234375, -0.0116729736328125, -0.0046234130859375, -0.056793212890625, -0.01280975341796875, 0.04095458984375, -0.018524169921875, 0.0221710205078125, -0.00997161865234375, -0.06866455078125, 0.036712646484375, 0.042022705078125, -0.048370361328125, 0.05322265625, -0.0245819091796875, 0.039886474609375, -0.0989990234375, 0.0275726318359375, 0.0037937164306640625, -0.013641357421875, -0.0390625, 0.034881591796875, -0.0017499923706054688, -0.01218414306640625, -0.048126220703125, 0.0621337890625, -0.038360595703125, -0.0244140625, -0.006504058837890625, -0.01042938232421875, 0.0106201171875, 0.03643798828125, -0.0083465576171875, 0.03326416015625, 0.04302978515625, -0.0367431640625, 0.03411865234375, 0.0478515625, -0.0307769775390625, 0.0513916015625, -0.06884765625, 0.016387939453125, 0.007175445556640625, 0.03265380859375, -0.0350341796875, -0.02923583984375, 0.060302734375, -0.0262908935546875, 0.002849578857421875, -0.016387939453125, -0.0250091552734375, -0.0330810546875, -0.039306640625, 0.03759765625, 0.056243896484375, -0.0307159423828125, 0.064697265625, 0.02142333984375, -0.01019287109375, -0.03814697265625, -0.050811767578125, -0.0170440673828125, -0.0423583984375, -0.0379638671875, 0.028350830078125, -0.006534576416015625, -0.02593994140625, 0.000675201416015625, 0.005283355712890625, 0.00171661376953125, -0.022247314453125, 0.0305633544921875, 0.041534423828125, -0.01445770263671875, -0.044403076171875, -0.0030040740966796875, -0.014404296875, -0.011260986328125, -0.001483917236328125, 0.0487060546875, -0.0285797119140625, -0.02850341796875, -0.06585693359375, 0.0411376953125, 0.06500244140625, -0.0027446746826171875, 0.05743408203125, 0.045806884765625, -0.041259765625, 0.0284576416015625, -0.0556640625, -0.00197601318359375, -0.0300750732421875, -0.0084991455078125, -0.035400390625, -0.046234130859375, 0.0726318359375, 0.029327392578125, 0.0027294158935546875, 0.06231689453125, 0.027374267578125, 0.00142669677734375, 0.085693359375, 0.048370361328125, -0.0196533203125, 0.022613525390625, -0.042999267578125, -0.0030422210693359375, -0.087890625, -0.03289794921875, -0.04351806640625, -0.01396942138671875, -0.037750244140625, -0.023651123046875, 0.031951904296875, 0.018951416015625, -0.029052734375, 0.031982421875, -0.038116455078125, 0.0011587142944335938, 0.03582763671875, 0.02191162109375, 0.010955810546875, 0.00379180908203125, -0.007114410400390625, -0.00551605224609375, -0.016265869140625, -0.0251922607421875, 0.06378173828125, 0.04461669921875, 0.047393798828125, 0.02056884765625, 0.05902099609375, -0.0063323974609375, -0.0146331787109375, -0.0361328125, 0.056243896484375, -0.0028629302978515625, -0.0290679931640625, -0.0066375732421875, -0.0220184326171875, -0.07122802734375, 0.0232696533203125, -0.019500732421875, -0.060791015625, 0.01268768310546875, 0.006000518798828125, -0.0400390625, 0.026824951171875, -0.0288848876953125, 0.0550537109375, 0.0123443603515625, -0.022308349609375, -0.01361083984375, -0.0255584716796875, 0.033843994140625, 0.01385498046875, 0.021636962890625, -0.007415771484375, -0.00946807861328125, 0.068603515625, -0.075927734375, 0.043853759765625, -0.0062713623046875, -0.0194244384765625, 0.038787841796875, 0.009368896484375, 0.050079345703125, -0.0024662017822265625, -0.0016641616821289062, -0.007720947265625, -0.0036182403564453125, -0.037139892578125, -0.0205841064453125, 0.061920166015625, -0.063232421875, -0.0220184326171875, -0.048126220703125, -0.0272979736328125, 0.00833892822265625, 0.01108551025390625, 0.032562255859375, 0.044677734375, 0.001491546630859375, 0.00035834312438964844, 0.047393798828125, -0.021484375, 0.028045654296875, 0.0261077880859375, -0.035003662109375, -0.04833984375, 0.043609619140625, -0.01270294189453125, 0.0086669921875, -0.0047454833984375, 0.01515960693359375, -0.01314544677734375, -0.02606201171875, -0.047515869140625, 0.043792724609375, -0.034271240234375, -0.006893157958984375, -0.048583984375, -0.0093536376953125, -0.036407470703125, 0.0021839141845703125, -0.040191650390625, -0.05194091796875, -0.0391845703125, 0.00673675537109375, 0.055694580078125, 0.056884765625, -0.0152740478515625, 0.0164794921875, -0.0438232421875, 0.031951904296875, 0.034759521484375, 0.0209197998046875, -0.0087738037109375, -0.04388427734375, 0.004241943359375, 0.00649261474609375, -0.021514892578125, -0.08447265625, 0.0416259765625, -0.0116119384765625, 0.0264739990234375, 0.0196380615234375, -0.0238037109375, 0.058990478515625, -0.0247802734375, 0.0628662109375, 0.032196044921875, -0.0574951171875, 0.050079345703125, -0.0538330078125, 0.0286102294921875, 0.02716064453125, 0.0207061767578125, -0.01275634765625, -0.0207672119140625, -0.052276611328125, -0.07049560546875, 0.05792236328125, 0.031280517578125, -0.001194000244140625, 0.01203155517578125, 0.043731689453125, 0.00765228271484375, -0.0004780292510986328, -0.07122802734375, -0.0294647216796875, -0.02630615234375, 0.002079010009765625, 0.0172119140625, -0.02001953125, -0.0110321044921875, -0.0258026123046875, 0.07025146484375, -0.00312042236328125, 0.032989501953125, 0.0190887451171875, 0.0253753662109375, -0.0252532958984375, 0.0061187744140625, 0.03851318359375, 0.0279693603515625, -0.03662109375, -0.0160980224609375, 0.028961181640625, -0.043365478515625, -0.00444793701171875, 0.0259552001953125, -0.01220703125, -0.01263427734375, 0.013763427734375, 0.05328369140625, 0.0131988525390625, -0.028228759765625, 0.017822265625, -0.0257415771484375, -0.0032405853271484375, -0.0168609619140625, 0.0018711090087890625, 0.01457977294921875, 0.025177001953125, 0.0300140380859375, -0.005779266357421875, 0.0240020751953125, -0.061798095703125, -0.0137939453125, 0.0301055908203125, -0.01187896728515625, -0.0197296142578125, 0.052490234375, -0.006198883056640625, -0.00736236572265625, 0.0218658447265625, -0.0273284912109375, -0.0220794677734375, 0.050262451171875, 0.04296875, 0.058319091796875, -0.033355712890625, 0.01081085205078125, 0.042999267578125, 0.0059661865234375, -0.0101470947265625, 0.044830322265625, 0.006381988525390625, -0.0323486328125, -0.0183563232421875, -0.06756591796875, -0.034637451171875, 0.04052734375, -0.05084228515625, 0.024139404296875, -0.046539306640625, -0.0160980224609375, 0.0032596588134765625, 0.024444580078125, -0.038848876953125, 0.028594970703125, -0.008575439453125, 0.06658935546875, -0.054931640625, 0.04327392578125, 0.051910400390625, -0.033294677734375, -0.07891845703125, -0.0311431884765625, 0.00910186767578125, -0.055206298828125, 0.0160980224609375, 0.0024013519287109375, -0.01064300537109375, -0.0225372314453125, -0.041473388671875, -0.06597900390625, 0.09173583984375, 0.01617431640625, -0.02606201171875, 0.009033203125, -0.015380859375, 0.02874755859375, -0.04351806640625, 0.02362060546875, 0.032379150390625, 0.032073974609375, 0.022491455078125, -0.08184814453125, 0.0246124267578125, -0.0447998046875, 0.0034503936767578125, -0.0008058547973632812, -0.0589599609375, 0.08416748046875, -0.01305389404296875, 0.005725860595703125, 0.061126708984375, 0.055999755859375, 0.0550537109375, 0.01300048828125, 0.043304443359375, 0.082275390625, 0.0174407958984375, -0.00968170166015625, 0.08984375, -0.00396728515625, 0.0233001708984375, 0.054107666015625, -0.022735595703125, 0.047576904296875, 0.021759033203125, -0.0244140625, 0.0439453125, 0.058929443359375, 0.0013904571533203125, 0.034637451171875, -0.002498626708984375, -0.030853271484375, 0.0231170654296875, -0.0093231201171875, -0.060577392578125, 0.0220794677734375, 0.01023101806640625, -0.0108184814453125, 0.0001081228256225586, -0.0087738037109375, 0.035308837890625, -0.01708984375, -0.0249786376953125, 0.042724609375, 0.0089569091796875, -0.045928955078125, 0.03900146484375, 0.006961822509765625, 0.04193115234375, -0.044830322265625, -0.002613067626953125, -0.02362060546875, 0.01200103759765625, -0.0098724365234375, -0.047149658203125, 0.0163421630859375, -0.0005536079406738281, 0.00205230712890625, -0.0009136199951171875, 0.045196533203125, 0.004352569580078125, -0.051025390625, 0.028472900390625, 0.0186309814453125, 0.029449462890625, 0.019073486328125, -0.06256103515625, 0.040313720703125, -0.00644683837890625, -0.038116455078125, 0.0240478515625, 0.01445770263671875, 0.0188446044921875, 0.0494384765625, 0.029632568359375, 0.0064239501953125, -0.006252288818359375, 0.0007662773132324219, 0.070556640625, -0.037811279296875, -0.0302734375, -0.041168212890625, 0.039642333984375, 0.00019824504852294922, -0.0394287109375, 0.0511474609375, 0.0423583984375, 0.0297393798828125, -0.00998687744140625, 0.031036376953125, -0.0180511474609375, 0.0472412109375, -0.051025390625, 0.048187255859375, -0.05767822265625, -0.0032367706298828125, -0.04644775390625, -0.08544921875, 0.0014600753784179688, 0.06170654296875, 0.00809478759765625, 0.019073486328125, 0.033294677734375, 0.0374755859375, -0.01445770263671875, 0.002719879150390625, -0.016143798828125, 0.0269927978515625, 0.01239776611328125, 0.046844482421875, 0.05718994140625, -0.05328369140625, 0.01593017578125, -0.039764404296875, -0.02850341796875, -0.0158843994140625, -0.0758056640625, -0.04681396484375, -0.04083251953125, -0.03265380859375, -0.0396728515625, -0.00839996337890625, 0.04815673828125, 0.05810546875, -0.052337646484375, -0.021209716796875, 0.005817413330078125, -0.0152740478515625, -0.0157012939453125, -0.0136260986328125, 0.0267181396484375, 0.0262908935546875, -0.0672607421875, 0.0244140625, 0.0037555694580078125, 0.0535888671875, -0.006160736083984375, -0.023345947265625, -0.0010089874267578125, 0.01354217529296875, 0.0240020751953125, 0.042724609375, -0.052276611328125, -0.0246124267578125, -0.0254058837890625, 0.0036373138427734375, -0.011810302734375, 0.023956298828125, -0.047515869140625, -0.00646209716796875, 0.0291290283203125, -0.00914764404296875, 0.062286376953125, -0.01474761962890625, 0.0215911865234375, -0.01503753662109375, 0.0293426513671875, -0.0006799697875976562, 0.068359375, 0.0177764892578125, -0.0178070068359375, 0.041900634765625, -0.0006961822509765625, -0.040557861328125, -0.058929443359375, 0.01459503173828125, -0.11083984375, -0.0015468597412109375, 0.055572509765625, -0.0053253173828125, -0.03851318359375, 0.038177490234375, -0.04083251953125, 0.0291290283203125, -0.01947021484375, 0.06243896484375, 0.03680419921875, -0.0231475830078125, -0.0178070068359375, -0.040771484375, 0.03070068359375, 0.035003662109375, -0.07684326171875, -0.0208587646484375, 0.044036865234375, 0.0235595703125, 0.044036865234375, 0.04913330078125, -0.046295166015625, 0.029937744140625, -0.01239776611328125, 0.01220703125, -0.0012922286987304688, 0.0064544677734375, -0.0227508544921875, -0.002300262451171875, -0.0032367706298828125, -0.0276336669921875 ] ]
google/long-t5-local-base
2023-09-07T15:36:37.000Z
[ "transformers", "pytorch", "jax", "safetensors", "longt5", "text2text-generation", "en", "arxiv:2112.07916", "arxiv:1912.08777", "arxiv:1910.10683", "license:apache-2.0", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
text2text-generation
google
null
null
google/long-t5-local-base
13
8,141
transformers
2022-04-14T16:53:51
--- license: apache-2.0 language: en --- # LongT5 (local attention, base-sized model) LongT5 model pre-trained on English language. The model was introduced in the paper [LongT5: Efficient Text-To-Text Transformer for Long Sequences](https://arxiv.org/pdf/2112.07916.pdf) by Guo et al. and first released in [the LongT5 repository](https://github.com/google-research/longt5). All the model architecture and configuration can be found in [Flaxformer repository](https://github.com/google/flaxformer) which uses another Google research project repository [T5x](https://github.com/google-research/t5x). Disclaimer: The team releasing LongT5 did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description LongT5 model is an encoder-decoder transformer pre-trained in a text-to-text denoising generative setting ([Pegasus-like generation pre-training](https://arxiv.org/pdf/1912.08777.pdf)). LongT5 model is an extension of [T5 model](https://arxiv.org/pdf/1910.10683.pdf), and it enables using one of the two different efficient attention mechanisms - (1) Local attention, or (2) Transient-Global attention. The usage of attention sparsity patterns allows the model to efficiently handle input sequence. LongT5 is particularly effective when fine-tuned for text generation (summarization, question answering) which requires handling long input sequences (up to 16,384 tokens). ## Intended uses & limitations The model is mostly meant to be fine-tuned on a supervised dataset. See the [model hub](https://huggingface.co/models?search=longt5) to look for fine-tuned versions on a task that interests you. ### How to use ```python from transformers import AutoTokenizer, LongT5Model tokenizer = AutoTokenizer.from_pretrained("google/long-t5-local-base") model = LongT5Model.from_pretrained("google/long-t5-local-base") inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") outputs = model(**inputs) last_hidden_states = outputs.last_hidden_state ``` ### BibTeX entry and citation info ```bibtex @article{guo2021longt5, title={LongT5: Efficient Text-To-Text Transformer for Long Sequences}, author={Guo, Mandy and Ainslie, Joshua and Uthus, David and Ontanon, Santiago and Ni, Jianmo and Sung, Yun-Hsuan and Yang, Yinfei}, journal={arXiv preprint arXiv:2112.07916}, year={2021} } ```
2,371
[ [ -0.037933349609375, -0.046417236328125, 0.032501220703125, 0.0252685546875, -0.019622802734375, -0.01485443115234375, -0.024658203125, -0.047821044921875, 0.00608062744140625, 0.025604248046875, -0.037933349609375, -0.036102294921875, -0.0469970703125, 0.03582763671875, -0.03204345703125, 0.08984375, -0.0178070068359375, -0.023712158203125, -0.0011301040649414062, -0.01165771484375, -0.0167999267578125, -0.042816162109375, -0.04541015625, -0.028045654296875, 0.05401611328125, 0.0016469955444335938, 0.0289154052734375, 0.039215087890625, 0.050567626953125, 0.0217437744140625, -0.0180511474609375, -0.006011962890625, -0.04510498046875, -0.0240478515625, -0.00362396240234375, -0.028167724609375, -0.05596923828125, -0.0098114013671875, 0.052032470703125, 0.031219482421875, 0.01226043701171875, 0.0182037353515625, 0.000995635986328125, 0.034149169921875, -0.0301666259765625, 0.01258087158203125, -0.011322021484375, 0.0046844482421875, -0.0193939208984375, 0.0126800537109375, -0.0296173095703125, -0.0229339599609375, 0.010955810546875, -0.03167724609375, 0.031982421875, -0.01207733154296875, 0.0816650390625, 0.0181427001953125, -0.04034423828125, -0.01393890380859375, -0.05316162109375, 0.0660400390625, -0.0574951171875, 0.042938232421875, 0.0132598876953125, 0.0207672119140625, -0.00836181640625, -0.0908203125, -0.05438232421875, -0.007720947265625, -0.0162506103515625, 0.0240325927734375, -0.0128936767578125, 0.017608642578125, 0.03778076171875, 0.044677734375, -0.047149658203125, -0.0007891654968261719, -0.047698974609375, 0.0020885467529296875, 0.0389404296875, -0.01276397705078125, 0.01055908203125, -0.025177001953125, -0.045928955078125, 0.0013380050659179688, -0.038360595703125, 0.00949859619140625, 0.01092529296875, 0.00925445556640625, -0.03143310546875, 0.0226593017578125, -0.01000213623046875, 0.046600341796875, 0.031219482421875, -0.0135345458984375, 0.031524658203125, -0.033660888671875, -0.0244140625, -0.00897216796875, 0.07415771484375, 0.00675201416015625, 0.020416259765625, -0.01263427734375, -0.0210723876953125, -0.0003306865692138672, 0.0188446044921875, -0.080078125, 0.00865936279296875, 0.019622802734375, -0.043060302734375, -0.032012939453125, -0.0030059814453125, -0.04461669921875, 0.0031795501708984375, -0.006168365478515625, 0.044769287109375, -0.033935546875, -0.01371002197265625, 0.007663726806640625, 0.0087890625, 0.02691650390625, 0.003002166748046875, -0.07061767578125, 0.0177764892578125, 0.031280517578125, 0.06402587890625, -0.0262908935546875, -0.031707763671875, -0.0152740478515625, 0.0071258544921875, -0.005764007568359375, 0.039398193359375, -0.0142669677734375, -0.02203369140625, -0.007091522216796875, 0.0254669189453125, 0.00017750263214111328, -0.0181732177734375, 0.0645751953125, -0.030120849609375, 0.05352783203125, -0.00457000732421875, -0.03741455078125, -0.0164794921875, 0.01020050048828125, -0.06024169921875, 0.08673095703125, 0.00960540771484375, -0.06982421875, 0.0208587646484375, -0.09100341796875, -0.021881103515625, -0.0128936767578125, 0.0185546875, -0.05279541015625, -0.007808685302734375, 0.034759521484375, 0.052764892578125, -0.016937255859375, 0.018707275390625, -0.0205230712890625, -0.0301513671875, -0.007122039794921875, -0.01812744140625, 0.051361083984375, 0.022918701171875, -0.033355712890625, 0.032196044921875, -0.05865478515625, -0.019073486328125, 0.02606201171875, -0.019287109375, -0.00423431396484375, -0.01381683349609375, 0.004245758056640625, 0.0222015380859375, 0.0258941650390625, -0.0374755859375, 0.0284271240234375, -0.043182373046875, 0.058990478515625, 0.035308837890625, -0.0128173828125, 0.03265380859375, -0.02362060546875, 0.026458740234375, 0.02679443359375, 0.01165008544921875, -0.03192138671875, -0.0142974853515625, -0.06536865234375, -0.0219573974609375, 0.0323486328125, 0.0262603759765625, -0.0472412109375, 0.0306549072265625, -0.0496826171875, -0.0389404296875, -0.041259765625, -0.0036163330078125, 0.0140533447265625, 0.0301055908203125, 0.03857421875, -0.019805908203125, -0.03857421875, -0.061553955078125, -0.0152740478515625, 0.0188446044921875, 0.004611968994140625, 0.0005178451538085938, 0.052825927734375, -0.026458740234375, 0.07025146484375, -0.0186767578125, -0.0195159912109375, -0.03582763671875, 0.0193634033203125, 0.0298309326171875, 0.032440185546875, 0.04803466796875, -0.04931640625, -0.0333251953125, -0.01812744140625, -0.049102783203125, 0.00991058349609375, -0.01363372802734375, -0.000415802001953125, 0.037811279296875, 0.0246429443359375, -0.07147216796875, 0.033538818359375, 0.0312347412109375, -0.0187530517578125, 0.018096923828125, -0.00571441650390625, 0.0017375946044921875, -0.1251220703125, 0.023834228515625, 0.0037994384765625, -0.038726806640625, -0.046630859375, 0.0038204193115234375, 0.0185089111328125, -0.01515960693359375, -0.0352783203125, 0.056549072265625, -0.05841064453125, 0.0003314018249511719, -0.007781982421875, 0.0007424354553222656, -0.00818634033203125, 0.047332763671875, 0.011810302734375, 0.057952880859375, 0.0207061767578125, -0.040802001953125, 0.0298614501953125, 0.0164794921875, -0.01334381103515625, 0.0217742919921875, -0.0638427734375, 0.0283203125, -0.016204833984375, 0.0318603515625, -0.060882568359375, -0.02081298828125, 0.006198883056640625, -0.046234130859375, 0.0300445556640625, -0.00977325439453125, -0.03631591796875, -0.0526123046875, -0.023956298828125, 0.03912353515625, 0.049072265625, -0.0521240234375, 0.04339599609375, 0.0011835098266601562, -0.0014715194702148438, -0.04449462890625, -0.03985595703125, -0.0036296844482421875, -0.03228759765625, -0.058441162109375, 0.048309326171875, -0.008453369140625, 0.019775390625, -0.0192108154296875, 0.011688232421875, 0.0027980804443359375, -0.0152740478515625, 0.0078887939453125, 0.0006127357482910156, -0.0195159912109375, 0.004360198974609375, -0.017486572265625, -0.015472412109375, 0.0042877197265625, -0.01421356201171875, 0.042083740234375, -0.0132904052734375, 0.0023632049560546875, -0.04010009765625, 0.029388427734375, 0.05908203125, -0.028167724609375, 0.044708251953125, 0.07745361328125, -0.0357666015625, -0.00649261474609375, -0.04254150390625, -0.021087646484375, -0.036895751953125, 0.039031982421875, -0.041748046875, -0.05950927734375, 0.0467529296875, 0.0022182464599609375, 0.01058197021484375, 0.04925537109375, 0.0469970703125, 0.0035400390625, 0.08209228515625, 0.06494140625, -0.017181396484375, 0.04461669921875, -0.0230712890625, 0.02532958984375, -0.051116943359375, 0.004589080810546875, -0.01415252685546875, -0.0189056396484375, -0.0533447265625, -0.02349853515625, 0.020355224609375, -0.0081329345703125, -0.0302581787109375, 0.025848388671875, -0.049713134765625, 0.01580810546875, 0.047515869140625, -0.0098419189453125, 0.003520965576171875, -0.002429962158203125, 0.00695037841796875, -0.00782012939453125, -0.041351318359375, -0.017578125, 0.07354736328125, 0.0517578125, 0.052276611328125, 0.00882720947265625, 0.06719970703125, -0.0025463104248046875, 0.00992584228515625, -0.061431884765625, 0.0290069580078125, -0.006664276123046875, -0.041748046875, -0.0177001953125, -0.020294189453125, -0.09326171875, -0.0104827880859375, -0.005523681640625, -0.0455322265625, -0.0025463104248046875, 0.0026721954345703125, -0.030059814453125, 0.01849365234375, -0.058502197265625, 0.07940673828125, -0.01107025146484375, -0.030059814453125, 0.004497528076171875, -0.058868408203125, 0.02960205078125, 0.00565338134765625, -0.00418853759765625, 0.0200653076171875, 0.0179443359375, 0.0626220703125, -0.0246734619140625, 0.07037353515625, -0.00234222412109375, -0.0107574462890625, 0.01375579833984375, -0.026458740234375, 0.054107666015625, -0.01532745361328125, 0.01800537109375, 0.0182952880859375, 0.00583648681640625, -0.037261962890625, -0.039093017578125, 0.04791259765625, -0.07452392578125, -0.034027099609375, -0.044525146484375, -0.0246734619140625, 0.000946044921875, 0.0426025390625, 0.037994384765625, 0.019805908203125, -0.00664520263671875, 0.02325439453125, 0.04559326171875, -0.00960540771484375, 0.060546875, 0.007358551025390625, -0.0223388671875, -0.0426025390625, 0.052001953125, 0.0159454345703125, 0.0187530517578125, 0.0304412841796875, 0.00591278076171875, -0.02777099609375, -0.02386474609375, -0.0254669189453125, 0.046051025390625, -0.03765869140625, -0.01430511474609375, -0.0438232421875, -0.037567138671875, -0.053741455078125, -0.00783538818359375, -0.03179931640625, -0.016448974609375, -0.0286407470703125, -0.0072479248046875, 0.0300140380859375, 0.05047607421875, 0.0160064697265625, 0.0286712646484375, -0.060882568359375, 0.037353515625, 0.014373779296875, 0.03662109375, -0.0133209228515625, -0.044036865234375, -0.0196075439453125, -0.004512786865234375, -0.0284271240234375, -0.06591796875, 0.04266357421875, 0.0070953369140625, 0.0258941650390625, 0.025634765625, -0.0035400390625, 0.053375244140625, -0.02801513671875, 0.0635986328125, 0.01222991943359375, -0.07086181640625, 0.0323486328125, -0.0386962890625, 0.0469970703125, 0.00495147705078125, 0.03411865234375, -0.041717529296875, -0.005168914794921875, -0.056549072265625, -0.07147216796875, 0.052734375, 0.0223846435546875, 0.0146636962890625, 0.005931854248046875, 0.02606201171875, 0.006336212158203125, 0.00762939453125, -0.09210205078125, -0.01446533203125, -0.0352783203125, -0.035400390625, -0.005462646484375, -0.036651611328125, 0.0033893585205078125, -0.01873779296875, 0.04461669921875, -0.01094818115234375, 0.06146240234375, 0.0308074951171875, -0.01788330078125, 0.00959014892578125, 0.0156707763671875, 0.0633544921875, 0.03485107421875, -0.020294189453125, -0.005527496337890625, 0.025390625, -0.042236328125, -0.0205535888671875, 0.0221405029296875, -0.01116180419921875, 0.0106201171875, 0.0294342041015625, 0.0838623046875, 0.004894256591796875, -0.007755279541015625, 0.04144287109375, 0.0021343231201171875, -0.0243377685546875, -0.047210693359375, -0.013397216796875, 0.01491546630859375, 0.0057373046875, 0.015899658203125, -0.027191162109375, 0.0031833648681640625, -0.036407470703125, 0.0033855438232421875, 0.0103912353515625, -0.025054931640625, -0.037689208984375, 0.062255859375, 0.030120849609375, -0.0142669677734375, 0.035400390625, -0.0037288665771484375, -0.038482666015625, 0.035186767578125, 0.061004638671875, 0.0675048828125, -0.013580322265625, -0.0168609619140625, 0.04864501953125, 0.0095062255859375, -0.004665374755859375, 0.0254669189453125, -0.0047149658203125, -0.0364990234375, -0.035980224609375, -0.043304443359375, 0.0012054443359375, 0.038543701171875, -0.0389404296875, 0.043060302734375, -0.0239715576171875, -0.0277099609375, 0.0107879638671875, 0.018768310546875, -0.053375244140625, 0.030548095703125, 0.0215301513671875, 0.0687255859375, -0.035491943359375, 0.0716552734375, 0.048492431640625, -0.044189453125, -0.055816650390625, -0.00647735595703125, -0.028167724609375, -0.048431396484375, 0.048919677734375, 0.02716064453125, -0.0009737014770507812, 0.0155029296875, -0.048095703125, -0.08270263671875, 0.093017578125, 0.01029205322265625, -0.03753662109375, -0.0305023193359375, 0.01548004150390625, 0.03778076171875, -0.01551055908203125, 0.032196044921875, 0.0243682861328125, 0.0323486328125, 0.005184173583984375, -0.08856201171875, 0.0104827880859375, -0.0272064208984375, -0.00006812810897827148, 0.03106689453125, -0.0760498046875, 0.0653076171875, -0.0194091796875, -0.00714874267578125, 0.005046844482421875, 0.077880859375, 0.00286865234375, 0.0134124755859375, 0.024566650390625, 0.040374755859375, 0.03936767578125, -0.0105133056640625, 0.0673828125, -0.0307159423828125, 0.050445556640625, 0.0606689453125, 0.002429962158203125, 0.05438232421875, 0.035888671875, -0.00763702392578125, 0.03192138671875, 0.052215576171875, -0.0133209228515625, 0.034515380859375, -0.00228118896484375, -0.01085662841796875, -0.008331298828125, 0.0214080810546875, -0.03863525390625, 0.0283050537109375, 0.01087188720703125, -0.049346923828125, -0.0111541748046875, -0.0009531974792480469, 0.025177001953125, -0.0272979736328125, -0.01715087890625, 0.055938720703125, 0.0113067626953125, -0.053375244140625, 0.06744384765625, 0.015716552734375, 0.07470703125, -0.043182373046875, 0.005939483642578125, -0.02471923828125, 0.0261077880859375, -0.016204833984375, -0.043212890625, 0.018157958984375, 0.00930023193359375, -0.025604248046875, -0.0219879150390625, 0.054412841796875, -0.033416748046875, -0.04486083984375, 0.0133819580078125, 0.01203155517578125, 0.01390838623046875, 0.0078277587890625, -0.046234130859375, -0.0139923095703125, -0.00218963623046875, -0.04498291015625, 0.0195159912109375, 0.02972412109375, -0.02301025390625, 0.0482177734375, 0.044525146484375, -0.00821685791015625, 0.0021800994873046875, -0.0013570785522460938, 0.049163818359375, -0.07843017578125, -0.0452880859375, -0.061248779296875, 0.045501708984375, -0.00803375244140625, -0.034942626953125, 0.040740966796875, 0.042205810546875, 0.05560302734375, -0.0142669677734375, 0.06292724609375, -0.000247955322265625, 0.05072021484375, -0.032623291015625, 0.0662841796875, -0.0478515625, -0.0186614990234375, -0.01259613037109375, -0.06268310546875, -0.0239715576171875, 0.047607421875, -0.030517578125, 0.034759521484375, 0.055267333984375, 0.040740966796875, -0.0241546630859375, -0.01410675048828125, 0.0246429443359375, 0.0364990234375, 0.0273590087890625, 0.055267333984375, 0.0299072265625, -0.047637939453125, 0.03973388671875, -0.0220184326171875, 0.013580322265625, -0.01508331298828125, -0.0731201171875, -0.0859375, -0.046844482421875, -0.006641387939453125, -0.02874755859375, 0.0118255615234375, 0.07122802734375, 0.05169677734375, -0.05072021484375, -0.0010986328125, -0.0016603469848632812, -0.0034961700439453125, 0.006031036376953125, -0.01812744140625, 0.047210693359375, -0.022613525390625, -0.07586669921875, 0.0020771026611328125, -0.00522613525390625, 0.0271148681640625, -0.01171112060546875, 0.00140380859375, -0.00040149688720703125, 0.002735137939453125, 0.055816650390625, 0.0211181640625, -0.053375244140625, -0.0311737060546875, 0.0174102783203125, -0.0174102783203125, 0.02386474609375, 0.02972412109375, -0.0567626953125, 0.01493072509765625, 0.03314208984375, 0.0330810546875, 0.059661865234375, 0.000701904296875, 0.04986572265625, -0.045654296875, 0.01535797119140625, 0.00968170166015625, 0.0260162353515625, 0.03021240234375, -0.0238800048828125, 0.0382080078125, 0.01824951171875, -0.0499267578125, -0.063232421875, 0.006343841552734375, -0.09124755859375, 0.004314422607421875, 0.095458984375, -0.00914764404296875, -0.03643798828125, 0.021575927734375, -0.01873779296875, 0.039581298828125, -0.02423095703125, 0.0631103515625, 0.060211181640625, 0.00215911865234375, -0.028717041015625, -0.041229248046875, 0.05401611328125, 0.024566650390625, -0.061553955078125, -0.01276397705078125, 0.01171875, 0.036346435546875, 0.0159912109375, 0.0377197265625, -0.0031833648681640625, 0.01493072509765625, -0.0220794677734375, 0.03460693359375, -0.004741668701171875, -0.006816864013671875, -0.023223876953125, 0.01157379150390625, -0.01190948486328125, -0.017791748046875 ] ]