runtime error

Exit code: 1. Reason: last): File "/home/user/app/app.py", line 8, in <module> model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 571, in from_pretrained return model_class.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/modeling_utils.py", line 279, in _wrapper return func(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/modeling_utils.py", line 4214, in from_pretrained config.quantization_config = AutoHfQuantizer.merge_quantization_configs( File "/usr/local/lib/python3.10/site-packages/transformers/quantizers/auto.py", line 198, in merge_quantization_configs quantization_config = AutoQuantizationConfig.from_dict(quantization_config) File "/usr/local/lib/python3.10/site-packages/transformers/quantizers/auto.py", line 128, in from_dict return target_cls.from_dict(quantization_config_dict) File "/usr/local/lib/python3.10/site-packages/transformers/utils/quantization_config.py", line 119, in from_dict config = cls(**config_dict) File "/usr/local/lib/python3.10/site-packages/transformers/utils/quantization_config.py", line 438, in __init__ self.post_init() File "/usr/local/lib/python3.10/site-packages/transformers/utils/quantization_config.py", line 496, in post_init if self.load_in_4bit and not version.parse(importlib.metadata.version("bitsandbytes")) >= version.parse( File "/usr/local/lib/python3.10/importlib/metadata/__init__.py", line 996, in version return distribution(distribution_name).version File "/usr/local/lib/python3.10/importlib/metadata/__init__.py", line 969, in distribution return Distribution.from_name(distribution_name) File "/usr/local/lib/python3.10/importlib/metadata/__init__.py", line 548, in from_name raise PackageNotFoundError(name) importlib.metadata.PackageNotFoundError: No package metadata was found for bitsandbytes

Container logs:

Fetching error logs...