AttributeError: 'SpanMarkerConfig' object has no attribute '_attn_implementation_internal'
warnings.warn(
AttributeError Traceback (most recent call last)
/usr/local/lib/python3.12/dist-packages/span_marker/configuration.py in getattribute(self, key)
113 try:
--> 114 return super().getattribute(key)
115 except AttributeError as e:
13 frames
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in getattribute(self, key)
206 key = super().getattribute("attribute_map")[key]
--> 207 return super().getattribute(key)
208
AttributeError: 'SpanMarkerConfig' object has no attribute '_attn_implementation_internal'
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
/tmp/ipython-input-1093887576.py in <cell line: 0>()
2
3 # Download from the 🤗 Hub
----> 4 model = SpanMarkerModel.from_pretrained("tomaarsen/span-marker-mbert-base-multinerd")
5 # Run inference
6 entities = model.predict("Amelia Earhart flew her single engine Lockheed Vega 5B across the Atlantic to Paris.")
/usr/local/lib/python3.12/dist-packages/span_marker/modeling.py in from_pretrained(cls, pretrained_model_name_or_path, labels, config, model_card_data, *model_args, **kwargs)
252 # If loading a SpanMarkerConfig, then we don't want to override id2label and label2id
253 # Create an encoder or SpanMarker config
--> 254 config: PretrainedConfig = config or AutoConfig.from_pretrained(
255 pretrained_model_name_or_path, *model_args, **kwargs
256 )
/usr/local/lib/python3.12/dist-packages/transformers/models/auto/configuration_auto.py in from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
1319 "pip install git+https://github.com/huggingface/transformers.git"
1320 )
-> 1321 return config_class.from_dict(config_dict, **unused_kwargs)
1322 else:
1323 # Fallback: use pattern matching on the string.
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in from_dict(cls, config_dict, **kwargs)
806 config_dict["attn_implementation"] = kwargs.pop("attn_implementation", None)
807
--> 808 config = cls(**config_dict)
809
810 if hasattr(config, "pruned_heads"):
/usr/local/lib/python3.12/dist-packages/span_marker/configuration.py in init(self, encoder_config, model_max_length, marker_max_length, entity_max_length, max_prev_context, max_next_context, **kwargs)
64 self.trained_with_document_context = False
65 self.span_marker_version = kwargs.pop("span_marker_version", None)
---> 66 super().init(**kwargs)
67
68 if not self.encoder:
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in init(self, output_hidden_states, output_attentions, return_dict, torchscript, dtype, pruned_heads, tie_word_embeddings, chunk_size_feed_forward, is_encoder_decoder, is_decoder, cross_attention_hidden_size, add_cross_attention, tie_encoder_decoder, architectures, finetuning_task, id2label, label2id, num_labels, task_specific_params, problem_type, tokenizer_class, prefix, bos_token_id, pad_token_id, eos_token_id, sep_token_id, decoder_start_token_id, **kwargs)
326
327 # Attention implementation to use, if relevant (it sets it recursively on sub-configs)
--> 328 self._attn_implementation = kwargs.pop("attn_implementation", None)
329
330 # Drop the transformers version info
/usr/local/lib/python3.12/dist-packages/span_marker/configuration.py in setattr(self, name, value)
108 if name == "outside_id":
109 return
--> 110 return super().setattr(name, value)
111
112 def getattribute(self, key: str) -> Any:
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in setattr(self, key, value)
200 if key in super().getattribute("attribute_map"):
201 key = super().getattribute("attribute_map")[key]
--> 202 super().setattr(key, value)
203
204 def getattribute(self, key):
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in _attn_implementation(self, value)
412 """We set it recursively on the sub-configs as well"""
413 # Set if for current config
--> 414 current_attn = getattr(self, "_attn_implementation", None)
415 attn_implementation = value if not isinstance(value, dict) else value.get("", current_attn)
416 self._attn_implementation_internal = attn_implementation
/usr/local/lib/python3.12/dist-packages/span_marker/configuration.py in getattribute(self, key)
112 def getattribute(self, key: str) -> Any:
113 try:
--> 114 return super().getattribute(key)
115 except AttributeError as e:
116 try:
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in getattribute(self, key)
205 if key != "attribute_map" and key in super().getattribute("attribute_map"):
206 key = super().getattribute("attribute_map")[key]
--> 207 return super().getattribute(key)
208
209 def init(
/usr/local/lib/python3.12/dist-packages/transformers/configuration_utils.py in _attn_implementation(self)
406 @property
407 def _attn_implementation(self):
--> 408 return self._attn_implementation_internal
409
410 @_attn_implementation.setter
/usr/local/lib/python3.12/dist-packages/span_marker/configuration.py in getattribute(self, key)
115 except AttributeError as e:
116 try:
--> 117 return super().getattribute("encoder")[key]
118 except KeyError:
119 raise e
TypeError: 'NoneType' object is not subscriptable
from span_marker import SpanMarkerModel
Download from the 🤗 Hub
model = SpanMarkerModel.from_pretrained("tomaarsen/span-marker-mbert-base-multinerd")
Run inference
entities = model.predict("Amelia Earhart flew her single engine Lockheed Vega 5B across the Atlantic to Paris.")