File size: 2,564 Bytes
08bbbe9
 
 
 
 
 
 
 
 
 
 
 
 
59a1921
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
```CODE: 
# Use a pipeline as a high-level helper
from transformers import pipeline

pipe = pipeline("text-generation", model="ByteDance-Seed/Stable-DiffCoder-8B-Instruct", trust_remote_code=True)
messages = [
    {"role": "user", "content": "Who are you?"},
]
pipe(messages)
```

ERROR: 
Traceback (most recent call last):
  File "/tmp/ByteDance-Seed_Stable-DiffCoder-8B-Instruct_0iWaOu2.py", line 30, in <module>
    pipe(messages)
    ~~~~^^^^^^^^^^
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line 293, in __call__
    return super().__call__(text_inputs, **kwargs)
           ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line 1274, in __call__
    return self.run_single(inputs, preprocess_params, forward_params, postprocess_params)
           ~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line 1281, in run_single
    model_outputs = self.forward(model_inputs, **forward_params)
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/base.py", line 1173, in forward
    model_outputs = self._forward(model_inputs, **forward_params)
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/transformers/pipelines/text_generation.py", line 397, in _forward
    output = self.model.generate(input_ids=input_ids, attention_mask=attention_mask, **generate_kwargs)
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/torch/utils/_contextlib.py", line 124, in decorate_context
    return func(*args, **kwargs)
  File "/tmp/.cache/huggingface/modules/transformers_modules/ByteDance_hyphen_Seed/Stable_hyphen_DiffCoder_hyphen_8B_hyphen_Instruct/d0808fa34fa03de9163a29cabfc7107294a535a8/modeling_seed_diffcoder.py", line 24, in generate
    output_ids, nfe = generate_block(
                      ~~~~~~~~~~~~~~^
        model=self,
        ^^^^^^^^^^^
        prompt=prompt,
        ^^^^^^^^^^^^^^
        **kwargs,
        ^^^^^^^^^
    )
    ^
  File "/tmp/.cache/uv/environments-v2/d1df88e7f5c3f72a/lib/python3.13/site-packages/torch/utils/_contextlib.py", line 124, in decorate_context
    return func(*args, **kwargs)
TypeError: generate_block() got an unexpected keyword argument 'attention_mask'