vector misalignment

#1
by Hocassian - opened

Traceback (most recent call last):
File "F:\file\gitruck\hocassian-gitruck-codebase\gitruck-python\utils\third_party\open_gv_lab\intern_video.py", line 179, in
response, chat_history = model.chat(
File "F:\ai\cache\huggingface\modules\transformers_modules\OpenGVLab\InternVideo2_Chat_8B_InternLM2_5\de18473033b9aa716fa6d391a2cc7c2881f72253\modeling_videochat2.py", line 308, in chat
generation_output = self.generate_caption(
File "F:\ai\cache\huggingface\modules\transformers_modules\OpenGVLab\InternVideo2_Chat_8B_InternLM2_5\de18473033b9aa716fa6d391a2cc7c2881f72253\modeling_videochat2.py", line 179, in generate_caption
outputs = self.lm.generate(
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\peft\peft_model.py", line 975, in generate
outputs = self.base_model.generate(**kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils_contextlib.py", line 116, in decorate_context
return func(*args, **kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\generation\utils.py", line 2024, in generate
result = self._sample(
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\generation\utils.py", line 2982, in _sample
outputs = self(**model_inputs, return_dict=True)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "F:\ai\cache\huggingface\modules\transformers_modules\internlm\internlm2_5-7b-chat-1m\846dee6fb6f5a96fb9ab7d3f1f9c383ac9e73bc1\modeling_internlm2.py", line 1212, in forward
outputs = self.model(
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1553, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Administrator\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1562, in _call_impl
return forward_call(*args, **kwargs)
File "F:\ai\cache\huggingface\modules\transformers_modules\internlm\internlm2_5-7b-chat-1m\846dee6fb6f5a96fb9ab7d3f1f9c383ac9e73bc1\modeling_internlm2.py", line 980, in forward
causal_mask = self._update_causal_mask(
File "F:\ai\cache\huggingface\modules\transformers_modules\internlm\internlm2_5-7b-chat-1m\846dee6fb6f5a96fb9ab7d3f1f9c383ac9e73bc1\modeling_internlm2.py", line 1105, in _update_causal_mask
causal_mask *= torch.arange(target_length, device=device) > cache_position.reshape(-1, 1)
RuntimeError: The size of tensor a (0) must match the size of tensor b (377) at non-singleton dimension 0

Resolved, transformers version should be 4.38.0

Hocassian changed discussion status to closed

Sign up or log in to comment