hf-transformers-bot commited on
Commit
a14bacc
·
verified ·
1 Parent(s): e7be724

Upload 2025-11-22/ci_results_run_models_gpu/new_failures.json with huggingface_hub

Browse files
2025-11-22/ci_results_run_models_gpu/new_failures.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "albert": {
3
+ "single-gpu": [],
4
+ "multi-gpu": [
5
+ "tests/models/albert/test_modeling_albert.py::AlbertModelTest::test_flash_attn_2_inference_equivalence"
6
+ ]
7
+ },
8
+ "data2vec": {
9
+ "single-gpu": [],
10
+ "multi-gpu": [
11
+ "tests/models/data2vec/test_modeling_data2vec_text.py::Data2VecTextModelTest::test_flash_attn_2_inference_equivalence"
12
+ ]
13
+ },
14
+ "dbrx": {
15
+ "single-gpu": [],
16
+ "multi-gpu": [
17
+ "tests/models/dbrx/test_modeling_dbrx.py::DbrxModelTest::test_flash_attn_2_equivalence"
18
+ ]
19
+ },
20
+ "electra": {
21
+ "single-gpu": [],
22
+ "multi-gpu": [
23
+ "tests/models/electra/test_modeling_electra.py::ElectraModelTest::test_flash_attn_2_inference_equivalence"
24
+ ]
25
+ },
26
+ "ernie": {
27
+ "single-gpu": [],
28
+ "multi-gpu": [
29
+ "tests/models/ernie/test_modeling_ernie.py::ErnieModelTest::test_flash_attn_2_inference_equivalence"
30
+ ]
31
+ },
32
+ "flex_olmo": {
33
+ "single-gpu": [
34
+ "tests/models/flex_olmo/test_modeling_flex_olmo.py::FlexOlmoModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs"
35
+ ],
36
+ "multi-gpu": []
37
+ }
38
+ }