hf-transformers-bot commited on
Commit
1708be0
·
verified ·
1 Parent(s): f836567

Upload 2025-12-11/ci_results_run_models_gpu/new_failures.json with huggingface_hub

Browse files
2025-12-11/ci_results_run_models_gpu/new_failures.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "albert": {
3
+ "single-gpu": [
4
+ "tests/models/albert/test_modeling_albert.py::AlbertModelTest::test_flash_attn_2_inference_equivalence"
5
+ ],
6
+ "multi-gpu": []
7
+ },
8
+ "chameleon": {
9
+ "single-gpu": [
10
+ "tests/models/chameleon/test_modeling_chameleon.py::ChameleonVision2SeqModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids_and_fa_kwargs"
11
+ ],
12
+ "multi-gpu": []
13
+ },
14
+ "ernie": {
15
+ "single-gpu": [
16
+ "tests/models/ernie/test_modeling_ernie.py::ErnieModelTest::test_flash_attn_2_inference_equivalence_right_padding"
17
+ ],
18
+ "multi-gpu": [
19
+ "tests/models/ernie/test_modeling_ernie.py::ErnieModelTest::test_flash_attn_2_inference_equivalence_right_padding"
20
+ ]
21
+ },
22
+ "glm4": {
23
+ "single-gpu": [
24
+ "tests/models/glm4/test_modeling_glm4.py::Glm4ModelTest::test_flash_attn_2_equivalence"
25
+ ],
26
+ "multi-gpu": [
27
+ "tests/models/glm4/test_modeling_glm4.py::Glm4ModelTest::test_flash_attn_2_equivalence"
28
+ ]
29
+ },
30
+ "gpt_oss": {
31
+ "single-gpu": [],
32
+ "multi-gpu": [
33
+ "tests/models/gpt_oss/test_modeling_gpt_oss.py::GptOssModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids"
34
+ ]
35
+ },
36
+ "mixtral": {
37
+ "single-gpu": [
38
+ "tests/models/mixtral/test_modeling_mixtral.py::MixtralModelTest::test_flash_attention_2_padding_matches_padding_free_with_position_ids"
39
+ ],
40
+ "multi-gpu": []
41
+ },
42
+ "roberta": {
43
+ "single-gpu": [],
44
+ "multi-gpu": [
45
+ "tests/models/roberta/test_modeling_roberta.py::RobertaModelTest::test_flash_attn_2_inference_equivalence",
46
+ "tests/models/roberta/test_modeling_roberta.py::RobertaModelTest::test_flash_attn_2_inference_equivalence_right_padding"
47
+ ]
48
+ }
49
+ }