agentmish commited on
Commit
24b8cd3
·
verified ·
1 Parent(s): c9fe366

fix: use inputs_embeds in create_causal_mask (deprecation fix)

Browse files
Files changed (1) hide show
  1. modeling.py +2 -2
modeling.py CHANGED
@@ -61,7 +61,7 @@ class PPLXQwen3Model(Qwen3Model):
61
  attention_mask = {
62
  "full_attention": create_causal_mask(
63
  config=self.config,
64
- input_embeds=inputs_embeds,
65
  attention_mask=attention_mask,
66
  cache_position=dummy_cache_position,
67
  past_key_values=None,
@@ -80,4 +80,4 @@ class PPLXQwen3Model(Qwen3Model):
80
  cache_position=cache_position,
81
  **kwargs,
82
  )
83
- return outputs
 
61
  attention_mask = {
62
  "full_attention": create_causal_mask(
63
  config=self.config,
64
+ inputs_embeds=inputs_embeds,
65
  attention_mask=attention_mask,
66
  cache_position=dummy_cache_position,
67
  past_key_values=None,
 
80
  cache_position=cache_position,
81
  **kwargs,
82
  )
83
+ return outputs