Xiang_wan_2_2_lora / logs /0_log.txt
svjack's picture
Upload folder using huggingface_hub
a8bb393 verified
Running 1 job
Error running job: /environment/miniconda3/lib/python3.11/site-packages/flash_attn_2_cuda.cpython-311-x86_64-linux-gnu.so: undefined symbol: _ZN3c104cuda9SetDeviceEab
Error running on_error: cannot access local variable 'job' where it is not associated with a value
========================================
Result:
- 0 completed jobs
- 1 failure
========================================
Traceback (most recent call last):
Traceback (most recent call last):
File "/home/featurize/ai-toolkit/run.py", line 120, in <module>
File "/home/featurize/ai-toolkit/run.py", line 120, in <module>
main()main()
File "/home/featurize/ai-toolkit/run.py", line 108, in main
File "/home/featurize/ai-toolkit/run.py", line 108, in main
raise eraise e
File "/home/featurize/ai-toolkit/run.py", line 95, in main
File "/home/featurize/ai-toolkit/run.py", line 95, in main
job = get_job(config_file, args.name)job = get_job(config_file, args.name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/featurize/ai-toolkit/toolkit/job.py", line 28, in get_job
File "/home/featurize/ai-toolkit/toolkit/job.py", line 28, in get_job
from jobs import ExtensionJobfrom jobs import ExtensionJob
File "/home/featurize/ai-toolkit/jobs/__init__.py", line 1, in <module>
File "/home/featurize/ai-toolkit/jobs/__init__.py", line 1, in <module>
from .BaseJob import BaseJobfrom .BaseJob import BaseJob
File "/home/featurize/ai-toolkit/jobs/BaseJob.py", line 5, in <module>
File "/home/featurize/ai-toolkit/jobs/BaseJob.py", line 5, in <module>
from jobs.process import BaseProcessfrom jobs.process import BaseProcess
File "/home/featurize/ai-toolkit/jobs/process/__init__.py", line 1, in <module>
File "/home/featurize/ai-toolkit/jobs/process/__init__.py", line 1, in <module>
from .BaseExtractProcess import BaseExtractProcessfrom .BaseExtractProcess import BaseExtractProcess
File "/home/featurize/ai-toolkit/jobs/process/BaseExtractProcess.py", line 7, in <module>
File "/home/featurize/ai-toolkit/jobs/process/BaseExtractProcess.py", line 7, in <module>
from toolkit.metadata import get_meta_for_safetensorsfrom toolkit.metadata import get_meta_for_safetensors
File "/home/featurize/ai-toolkit/toolkit/metadata.py", line 9, in <module>
File "/home/featurize/ai-toolkit/toolkit/metadata.py", line 9, in <module>
from toolkit.train_tools import addnet_hash_legacyfrom toolkit.train_tools import addnet_hash_legacy
File "/home/featurize/ai-toolkit/toolkit/train_tools.py", line 25, in <module>
File "/home/featurize/ai-toolkit/toolkit/train_tools.py", line 25, in <module>
from transformers import T5Tokenizer, T5EncoderModel, UMT5EncoderModelfrom transformers import T5Tokenizer, T5EncoderModel, UMT5EncoderModel
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/utils/import_utils.py", line 2045, in __getattr__
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/utils/import_utils.py", line 2045, in __getattr__
module = self._get_module(self._class_to_module[name])module = self._get_module(self._class_to_module[name])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/utils/import_utils.py", line 2075, in _get_module
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/utils/import_utils.py", line 2075, in _get_module
raise eraise e
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/utils/import_utils.py", line 2073, in _get_module
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/utils/import_utils.py", line 2073, in _get_module
return importlib.import_module("." + module_name, self.__name__)return importlib.import_module("." + module_name, self.__name__)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/environment/miniconda3/lib/python3.11/importlib/__init__.py", line 126, in import_module
File "/environment/miniconda3/lib/python3.11/importlib/__init__.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/models/t5/modeling_t5.py", line 40, in <module>
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/models/t5/modeling_t5.py", line 40, in <module>
from ...modeling_utils import PreTrainedModelfrom ...modeling_utils import PreTrainedModel
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/modeling_utils.py", line 61, in <module>
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/modeling_utils.py", line 61, in <module>
from .integrations.flash_attention import flash_attention_forwardfrom .integrations.flash_attention import flash_attention_forward
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/integrations/flash_attention.py", line 5, in <module>
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/integrations/flash_attention.py", line 5, in <module>
from ..modeling_flash_attention_utils import _flash_attention_forward, flash_attn_supports_top_left_maskfrom ..modeling_flash_attention_utils import _flash_attention_forward, flash_attn_supports_top_left_mask
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/modeling_flash_attention_utils.py", line 36, in <module>
File "/environment/miniconda3/lib/python3.11/site-packages/transformers/modeling_flash_attention_utils.py", line 36, in <module>
from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqafrom flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/environment/miniconda3/lib/python3.11/site-packages/flash_attn/__init__.py", line 3, in <module>
File "/environment/miniconda3/lib/python3.11/site-packages/flash_attn/__init__.py", line 3, in <module>
from flash_attn.flash_attn_interface import (from flash_attn.flash_attn_interface import (
File "/environment/miniconda3/lib/python3.11/site-packages/flash_attn/flash_attn_interface.py", line 15, in <module>
File "/environment/miniconda3/lib/python3.11/site-packages/flash_attn/flash_attn_interface.py", line 15, in <module>
import flash_attn_2_cuda as flash_attn_gpuimport flash_attn_2_cuda as flash_attn_gpu
ImportErrorImportError: : /environment/miniconda3/lib/python3.11/site-packages/flash_attn_2_cuda.cpython-311-x86_64-linux-gnu.so: undefined symbol: _ZN3c104cuda9SetDeviceEab/environment/miniconda3/lib/python3.11/site-packages/flash_attn_2_cuda.cpython-311-x86_64-linux-gnu.so: undefined symbol: _ZN3c104cuda9SetDeviceEab