llama-1b / config.json
kmfoda's picture
Run 9. Outer Step 11. Inner Step 0. Peers 1.
013b4b9 verified
raw
history blame
7.43 kB
{
"_name_or_path": "miolg/bz2",
"all_reduce_scores": {
"0": "NOT_ALIVE",
"1": "NON_PARTICIPATING",
"10": "NON_PARTICIPATING",
"100": "NOT_ALIVE",
"101": "NOT_ALIVE",
"102": "NOT_ALIVE",
"103": "NON_PARTICIPATING",
"104": "NON_PARTICIPATING",
"105": "NON_PARTICIPATING",
"106": "FAIL",
"107": "NON_PARTICIPATING",
"108": "NOT_ALIVE",
"109": "NON_PARTICIPATING",
"11": "FAIL",
"110": "NOT_ALIVE",
"111": "NON_PARTICIPATING",
"112": "NOT_ALIVE",
"113": "NON_PARTICIPATING",
"114": "NON_PARTICIPATING",
"115": "NON_PARTICIPATING",
"116": "FAIL",
"117": "NOT_ALIVE",
"118": "NON_PARTICIPATING",
"119": "NOT_ALIVE",
"12": "NON_PARTICIPATING",
"120": "NOT_ALIVE",
"121": "NOT_ALIVE",
"122": "NON_PARTICIPATING",
"123": "FAIL",
"124": "NON_PARTICIPATING",
"125": "FAIL",
"126": "NON_PARTICIPATING",
"127": "NON_PARTICIPATING",
"128": "FAIL",
"129": "FAIL",
"13": "FAIL",
"130": "NOT_ALIVE",
"131": "NOT_ALIVE",
"132": "NOT_ALIVE",
"133": "NON_PARTICIPATING",
"134": "NON_PARTICIPATING",
"135": "NOT_ALIVE",
"136": "NOT_ALIVE",
"137": "NOT_ALIVE",
"138": "NON_PARTICIPATING",
"139": "NOT_ALIVE",
"14": "NOT_ALIVE",
"140": "NOT_ALIVE",
"141": "NOT_ALIVE",
"142": "NON_PARTICIPATING",
"143": "NON_PARTICIPATING",
"144": "NOT_ALIVE",
"145": "NON_PARTICIPATING",
"146": "NON_PARTICIPATING",
"147": "NON_PARTICIPATING",
"148": "NOT_ALIVE",
"149": "NON_PARTICIPATING",
"15": "NON_PARTICIPATING",
"150": "NON_PARTICIPATING",
"151": "NOT_ALIVE",
"152": "NOT_ALIVE",
"153": "FAIL",
"154": "NON_PARTICIPATING",
"155": "NON_PARTICIPATING",
"156": "NON_PARTICIPATING",
"157": "FAIL",
"158": "NON_PARTICIPATING",
"159": "NON_PARTICIPATING",
"16": "NON_PARTICIPATING",
"160": "NOT_ALIVE",
"161": "NOT_ALIVE",
"162": "NON_PARTICIPATING",
"163": "NON_PARTICIPATING",
"164": "NOT_ALIVE",
"165": "FAIL",
"166": "NON_PARTICIPATING",
"167": "NON_PARTICIPATING",
"168": "NON_PARTICIPATING",
"169": "NON_PARTICIPATING",
"17": "NON_PARTICIPATING",
"170": "NON_PARTICIPATING",
"171": "NOT_ALIVE",
"172": "NOT_ALIVE",
"173": "NON_PARTICIPATING",
"174": "NOT_ALIVE",
"175": "NON_PARTICIPATING",
"176": "NOT_ALIVE",
"177": "NON_PARTICIPATING",
"178": "NOT_ALIVE",
"179": "FAIL",
"18": "NON_PARTICIPATING",
"180": "NON_PARTICIPATING",
"181": "NON_PARTICIPATING",
"182": "NOT_ALIVE",
"183": "NOT_ALIVE",
"184": "FAIL",
"185": "NOT_ALIVE",
"186": "NOT_ALIVE",
"187": "NON_PARTICIPATING",
"188": "NON_PARTICIPATING",
"189": "NON_PARTICIPATING",
"19": "FAIL",
"190": "NON_PARTICIPATING",
"191": "NOT_ALIVE",
"192": "NON_PARTICIPATING",
"193": "NON_PARTICIPATING",
"194": "FAIL",
"195": "NOT_ALIVE",
"196": "FAIL",
"197": "NON_PARTICIPATING",
"198": "NOT_ALIVE",
"199": "FAIL",
"2": "NON_PARTICIPATING",
"20": "NOT_ALIVE",
"200": "NOT_ALIVE",
"201": "NON_PARTICIPATING",
"202": "FAIL",
"203": "FAIL",
"204": "NOT_ALIVE",
"205": "NOT_ALIVE",
"206": "NON_PARTICIPATING",
"207": "NOT_ALIVE",
"208": "FAIL",
"209": "NON_PARTICIPATING",
"21": "NOT_ALIVE",
"210": "NON_PARTICIPATING",
"211": "FAIL",
"212": "FAIL",
"213": "NOT_ALIVE",
"214": "NOT_ALIVE",
"215": "FAIL",
"216": "NOT_ALIVE",
"217": "FAIL",
"218": "FAIL",
"219": "FAIL",
"22": "NOT_ALIVE",
"220": "FAIL",
"221": "NON_PARTICIPATING",
"222": "NON_PARTICIPATING",
"223": "FAIL",
"224": "NOT_ALIVE",
"225": "FAIL",
"226": "NON_PARTICIPATING",
"227": "FAIL",
"228": "NON_PARTICIPATING",
"229": "NOT_ALIVE",
"23": "NON_PARTICIPATING",
"230": "NOT_ALIVE",
"231": "NOT_ALIVE",
"232": "NON_PARTICIPATING",
"233": "NON_PARTICIPATING",
"234": "NON_PARTICIPATING",
"235": "NON_PARTICIPATING",
"236": "FAIL",
"237": "NON_PARTICIPATING",
"238": "NOT_ALIVE",
"239": "NOT_ALIVE",
"24": "FAIL",
"240": "NOT_ALIVE",
"241": "NON_PARTICIPATING",
"242": "NOT_ALIVE",
"243": "NOT_ALIVE",
"244": "NOT_ALIVE",
"245": "NON_PARTICIPATING",
"246": "NON_PARTICIPATING",
"247": "NOT_ALIVE",
"248": "NON_PARTICIPATING",
"249": "NOT_ALIVE",
"25": "SUCCESS",
"250": "FAIL",
"251": "NOT_ALIVE",
"252": "FAIL",
"253": "NON_PARTICIPATING",
"254": "NON_PARTICIPATING",
"255": "NON_PARTICIPATING",
"26": "NOT_ALIVE",
"27": "NON_PARTICIPATING",
"28": "NON_PARTICIPATING",
"29": "NOT_ALIVE",
"3": "NON_PARTICIPATING",
"30": "FAIL",
"31": "NON_PARTICIPATING",
"32": "NOT_ALIVE",
"33": "NOT_ALIVE",
"34": "FAIL",
"35": "NOT_ALIVE",
"36": "NON_PARTICIPATING",
"37": "NOT_ALIVE",
"38": "FAIL",
"39": "NON_PARTICIPATING",
"4": "NON_PARTICIPATING",
"40": "FAIL",
"41": "NOT_ALIVE",
"42": "NOT_ALIVE",
"43": "NON_PARTICIPATING",
"44": "NOT_ALIVE",
"45": "NON_PARTICIPATING",
"46": "FAIL",
"47": "NON_PARTICIPATING",
"48": "NON_PARTICIPATING",
"49": "NOT_ALIVE",
"5": "FAIL",
"50": "FAIL",
"51": "NON_PARTICIPATING",
"52": "NON_PARTICIPATING",
"53": "FAIL",
"54": "NOT_ALIVE",
"55": "NON_PARTICIPATING",
"56": "NON_PARTICIPATING",
"57": "FAIL",
"58": "NOT_ALIVE",
"59": "NON_PARTICIPATING",
"6": "NOT_ALIVE",
"60": "FAIL",
"61": "NON_PARTICIPATING",
"62": "NON_PARTICIPATING",
"63": "NON_PARTICIPATING",
"64": "NOT_ALIVE",
"65": "NOT_ALIVE",
"66": "FAIL",
"67": "NON_PARTICIPATING",
"68": "NOT_ALIVE",
"69": "FAIL",
"7": "FAIL",
"70": "FAIL",
"71": "NOT_ALIVE",
"72": "NON_PARTICIPATING",
"73": "NON_PARTICIPATING",
"74": "NOT_ALIVE",
"75": "NON_PARTICIPATING",
"76": "FAIL",
"77": "NOT_ALIVE",
"78": "NON_PARTICIPATING",
"79": "FAIL",
"8": "NON_PARTICIPATING",
"80": "NOT_ALIVE",
"81": "NOT_ALIVE",
"82": "NOT_ALIVE",
"83": "NON_PARTICIPATING",
"84": "FAIL",
"85": "NON_PARTICIPATING",
"86": "FAIL",
"87": "NON_PARTICIPATING",
"88": "FAIL",
"89": "FAIL",
"9": "NON_PARTICIPATING",
"90": "NON_PARTICIPATING",
"91": "NON_PARTICIPATING",
"92": "FAIL",
"93": "FAIL",
"94": "NOT_ALIVE",
"95": "NOT_ALIVE",
"96": "NON_PARTICIPATING",
"97": "NON_PARTICIPATING",
"98": "NON_PARTICIPATING",
"99": "FAIL"
},
"architectures": [
"LlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"block_list": [],
"bos_token_id": 1,
"eos_token_id": 2,
"hidden_act": "silu",
"hidden_size": 2048,
"initializer_range": 0.02,
"inner_step": 0,
"intermediate_size": 5632,
"last_allreduce_block": 6456333,
"max_position_embeddings": 2048,
"mlp_bias": false,
"model_type": "llama",
"num_attention_heads": 32,
"num_hidden_layers": 22,
"num_key_value_heads": 4,
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000.0,
"tie_word_embeddings": false,
"torch_dtype": "float32",
"transformers_version": "4.39.3",
"use_cache": false,
"vocab_size": 32000
}