runtime error

Exit code: 1. Reason: ait fn(*processed_input) ^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/gradio/utils.py", line 1003, in async_wrapper response = await f(*args, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/gradio/chat_interface.py", line 1040, in _examples_fn response = await run_sync(self.fn, *inputs, limiter=self.limiter) # type: ignore ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/anyio/to_thread.py", line 63, in run_sync return await get_async_backend().run_sync_in_worker_thread( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "/usr/local/lib/python3.13/site-packages/anyio/_backends/_asyncio.py", line 2502, in run_sync_in_worker_thread return await future ^^^^^^^^^^^^ File "/usr/local/lib/python3.13/site-packages/anyio/_backends/_asyncio.py", line 986, in run result = context.run(func, *args) File "/app/app.py", line 90, in stealth_chat final_response = humanize_text(raw_response) File "/app/app.py", line 46, in humanize_text encoding = humanizer_tokenizer.encode_plus(input_text, pad_to_max_length=True, return_tensors="pt") File "/usr/local/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 3263, in encode_plus return self._encode_plus( ~~~~~~~~~~~~~~~~~^ text=text, ^^^^^^^^^^ ...<18 lines>... **kwargs, ^^^^^^^^^ ) ^ File "/usr/local/lib/python3.13/site-packages/transformers/tokenization_utils_fast.py", line 627, in _encode_plus batched_output = self._batch_encode_plus( batched_input, ...<17 lines>... **kwargs, ) TypeError: PreTrainedTokenizerFast._batch_encode_plus() got an unexpected keyword argument 'pad_to_max_length'

Container logs:

Fetching error logs...