gsaltintas's picture
Upload config.yaml with huggingface_hub
9d933c2 verified
# Config for super vocab built from tokenizers: flexitok/bpe_script_Arab_16000, flexitok/bpe_script_CmJp_16000, flexitok/bpe_ltr_ell_Grek_8000_v2, flexitok/bpe_ltr_fw_edu_32000_v2, flexitok/bpe_ltr_hun_Latn_8000_v2, flexitok/bpe_ltr_rus_Cyrl_16000_v2, flexitok/bpe_ltr_tur_Latn_8000_v2, flexitok/bpe_script_Germ_32000, flexitok/bpe_script_Roma_32000, flexitok/bpe_script_SEAS_16000, flexitok/bpe_script_Slav_16000
## Training superset tokenizer with individual tokenizers trained on Fineweb-2-hq
# use with apps/main/configs/flexitok/llama_1b_base.yaml
##TODO: add router details, make sure name is correct
model:
vocab_size: 165022
name: script_1
dump_dir: /fsx/craffel/lingua_logs/script_1
data:
tokenizer:
name: supertokenizer
seed: 42
superset_code_name: script_1
n_words: 165022
tokenizers:
- name: huggingface
path: flexitok/bpe_script_Arab_16000
load_supermapping: true
- name: huggingface
path: flexitok/bpe_script_CmJp_16000
load_supermapping: true
- name: huggingface
path: flexitok/bpe_ltr_ell_Grek_8000_v2
load_supermapping: true
- name: huggingface
path: flexitok/bpe_ltr_fw_edu_32000_v2
load_supermapping: true
- name: huggingface
path: flexitok/bpe_ltr_hun_Latn_8000_v2
load_supermapping: true
- name: huggingface
path: flexitok/bpe_ltr_rus_Cyrl_16000_v2
load_supermapping: true
- name: huggingface
path: flexitok/bpe_ltr_tur_Latn_8000_v2
load_supermapping: true
- name: huggingface
path: flexitok/bpe_script_Germ_32000
load_supermapping: true
- name: huggingface
path: flexitok/bpe_script_Roma_32000
load_supermapping: true
- name: huggingface
path: flexitok/bpe_script_SEAS_16000
load_supermapping: true
- name: huggingface
path: flexitok/bpe_script_Slav_16000
load_supermapping: true
checkpoint:
path: /fsx/craffel/lingua_logs/checkpoints/script_1