Commit
·
5df5630
1
Parent(s):
2b6b788
Fixes
Browse files- modeling_functionary.py +3 -2
modeling_functionary.py
CHANGED
|
@@ -48,7 +48,9 @@ class FunctionaryForCausalLM(LlamaForCausalLM):
|
|
| 48 |
negative_prompt_attention_mask: Optional[torch.Tensor] = None,
|
| 49 |
**kwargs,
|
| 50 |
) -> Union[GenerateOutput, torch.LongTensor]:
|
| 51 |
-
|
|
|
|
|
|
|
| 52 |
results = self.generate(
|
| 53 |
inputs=inputs,
|
| 54 |
generation_config=generation_config,
|
|
@@ -63,7 +65,6 @@ class FunctionaryForCausalLM(LlamaForCausalLM):
|
|
| 63 |
**kwargs,
|
| 64 |
)
|
| 65 |
|
| 66 |
-
tokenizer = kwargs.pop("tokenizer", None) # Pull this out first, we use it to parse raw output
|
| 67 |
input_ids = kwargs.pop("input_ids")
|
| 68 |
function_call_token = "<|reserved_special_token_249|>"
|
| 69 |
|
|
|
|
| 48 |
negative_prompt_attention_mask: Optional[torch.Tensor] = None,
|
| 49 |
**kwargs,
|
| 50 |
) -> Union[GenerateOutput, torch.LongTensor]:
|
| 51 |
+
|
| 52 |
+
tokenizer = kwargs.pop("tokenizer", None) # Pull this out first, we use it to parse raw output
|
| 53 |
+
|
| 54 |
results = self.generate(
|
| 55 |
inputs=inputs,
|
| 56 |
generation_config=generation_config,
|
|
|
|
| 65 |
**kwargs,
|
| 66 |
)
|
| 67 |
|
|
|
|
| 68 |
input_ids = kwargs.pop("input_ids")
|
| 69 |
function_call_token = "<|reserved_special_token_249|>"
|
| 70 |
|