Update inference-cache-config/llama.json
Browse files
inference-cache-config/llama.json
CHANGED
|
@@ -59,4 +59,12 @@
|
|
| 59 |
"auto_cast_type": "bf16"
|
| 60 |
}
|
| 61 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
}
|
|
|
|
| 59 |
"auto_cast_type": "bf16"
|
| 60 |
}
|
| 61 |
],
|
| 62 |
+
"meta-llama/Meta-Llama-3-8B": [
|
| 63 |
+
{
|
| 64 |
+
"batch_size": 1,
|
| 65 |
+
"sequence_length": 4096,
|
| 66 |
+
"num_cores": 2,
|
| 67 |
+
"auto_cast_type": "bf16"
|
| 68 |
+
}
|
| 69 |
+
]
|
| 70 |
}
|