after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def create_lcwa_instances(self, use_tqdm: Optional[bool] = None) -> Instances:
"""Create LCWA instances for this factory's triples."""
return LCWAInstances.from_triples(
mapped_triples=self._add_inverse_triples_if_necessary(
mapped_triples=self.mapped_triples
),
num_entities=self.num_entities,
)
|
def create_lcwa_instances(self, use_tqdm: Optional[bool] = None) -> Instances:
"""Create LCWA instances for this factory's triples."""
return LCWAInstances.from_triples(
mapped_triples=self.mapped_triples, num_entities=self.num_entities
)
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def split(
self,
ratios: Union[float, Sequence[float]] = 0.8,
*,
random_state: TorchRandomHint = None,
randomize_cleanup: bool = False,
) -> List["TriplesFactory"]:
"""Split a triples factory into a train/test.
:param ratios: There are three options for this argument. First, a float can be given between 0 and 1.0,
non-inclusive. The first triples factory will get this ratio and the second will get the rest. Second,
a list of ratios can be given for which factory in which order should get what ratios as in ``[0.8, 0.1]``.
The final ratio can be omitted because that can be calculated. Third, all ratios can be explicitly set in
order such as in ``[0.8, 0.1, 0.1]`` where the sum of all ratios is 1.0.
:param random_state: The random state used to shuffle and split the triples in this factory.
:param randomize_cleanup: If true, uses the non-deterministic method for moving triples to the training set.
This has the advantage that it doesn't necessarily have to move all of them, but it might be slower.
.. code-block:: python
ratio = 0.8 # makes a [0.8, 0.2] split
training_factory, testing_factory = factory.split(ratio)
ratios = [0.8, 0.1] # makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
ratios = [0.8, 0.1, 0.1] # also makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
"""
# input normalization
ratios = normalize_ratios(ratios)
generator = ensure_torch_random_state(random_state)
# convert to absolute sizes
sizes = get_absolute_split_sizes(n_total=self.num_triples, ratios=ratios)
# Split indices
idx = torch.randperm(self.num_triples, generator=generator)
idx_groups = idx.split(split_size=sizes, dim=0)
# Split triples
triples_groups = [self.mapped_triples[idx] for idx in idx_groups]
logger.info(
"done splitting triples to groups of sizes %s",
[triples.shape[0] for triples in triples_groups],
)
# Make sure that the first element has all the right stuff in it
logger.debug("cleaning up groups")
triples_groups = _tf_cleanup_all(
triples_groups, random_state=generator if randomize_cleanup else None
)
logger.debug("done cleaning up groups")
for i, (triples, exp_size, exp_ratio) in enumerate(
zip(triples_groups, sizes, ratios)
):
actual_size = triples.shape[0]
actual_ratio = actual_size / exp_size * exp_ratio
if actual_size != exp_size:
logger.warning(
f"Requested ratio[{i}]={exp_ratio:.3f} (equal to size {exp_size}), but got {actual_ratio:.3f} "
f"(equal to size {actual_size}) to ensure that all entities/relations occur in train.",
)
# Make new triples factories for each group
return [
self.clone_and_exchange_triples(mapped_triples=triples)
for triples in triples_groups
]
|
def split(
self,
ratios: Union[float, Sequence[float]] = 0.8,
*,
random_state: RandomHint = None,
randomize_cleanup: bool = False,
) -> List["TriplesFactory"]:
"""Split a triples factory into a train/test.
:param ratios: There are three options for this argument. First, a float can be given between 0 and 1.0,
non-inclusive. The first triples factory will get this ratio and the second will get the rest. Second,
a list of ratios can be given for which factory in which order should get what ratios as in ``[0.8, 0.1]``.
The final ratio can be omitted because that can be calculated. Third, all ratios can be explicitly set in
order such as in ``[0.8, 0.1, 0.1]`` where the sum of all ratios is 1.0.
:param random_state: The random state used to shuffle and split the triples in this factory.
:param randomize_cleanup: If true, uses the non-deterministic method for moving triples to the training set.
This has the advantage that it doesn't necessarily have to move all of them, but it might be slower.
.. code-block:: python
ratio = 0.8 # makes a [0.8, 0.2] split
training_factory, testing_factory = factory.split(ratio)
ratios = [0.8, 0.1] # makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
ratios = [0.8, 0.1, 0.1] # also makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
"""
n_triples = self.triples.shape[0]
# Prepare shuffle index
idx = np.arange(n_triples)
random_state = ensure_random_state(random_state)
random_state.shuffle(idx)
# Prepare split index
if isinstance(ratios, float):
ratios = [ratios]
ratio_sum = sum(ratios)
if ratio_sum == 1.0:
ratios = ratios[:-1] # vsplit doesn't take the final number into account.
elif ratio_sum > 1.0:
raise ValueError(f"ratios sum to more than 1.0: {ratios} (sum={ratio_sum})")
sizes = [int(split_ratio * n_triples) for split_ratio in ratios]
# Take cumulative sum so the get separated properly
split_idxs = np.cumsum(sizes)
# Split triples
triples_groups = np.vsplit(self.triples[idx], split_idxs)
logger.info(
"done splitting triples to groups of sizes %s",
[triples.shape[0] for triples in triples_groups],
)
# Make sure that the first element has all the right stuff in it
logger.debug("cleaning up groups")
triples_groups = _tf_cleanup_all(
triples_groups, random_state=random_state if randomize_cleanup else None
)
logger.debug("done cleaning up groups")
for i, (triples, exp_size, exp_ratio) in enumerate(
zip(triples_groups, sizes, ratios)
):
actual_size = triples.shape[0]
actual_ratio = actual_size / exp_size * exp_ratio
if actual_size != exp_size:
logger.warning(
f"Requested ratio[{i}]={exp_ratio:.3f} (equal to size {exp_size}), but got {actual_ratio:.3f} "
f"(equal to size {actual_size}) to ensure that all entities/relations occur in train.",
)
# Make new triples factories for each group
return [
TriplesFactory.from_labeled_triples(
triples=triples,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
compact_id=False,
)
for triples in triples_groups
]
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def get_most_frequent_relations(self, n: Union[int, float]) -> Set[int]:
"""Get the IDs of the n most frequent relations.
:param n: Either the (integer) number of top relations to keep or the (float) percentage of top relationships
to keep
"""
logger.info(f"applying cutoff of {n} to {self}")
if isinstance(n, float):
assert 0 < n < 1
n = int(self.num_relations * n)
elif not isinstance(n, int):
raise TypeError("n must be either an integer or a float")
uniq, counts = self.mapped_triples[:, 1].unique(return_counts=True)
top_counts, top_ids = counts.topk(k=n, largest=True)
return set(uniq[top_ids].tolist())
|
def get_most_frequent_relations(self, n: Union[int, float]) -> Set[str]:
"""Get the n most frequent relations.
:param n: Either the (integer) number of top relations to keep or the (float) percentage of top relationships
to keep
"""
logger.info(f"applying cutoff of {n} to {self}")
if isinstance(n, float):
assert 0 < n < 1
n = int(self.num_relations * n)
elif not isinstance(n, int):
raise TypeError("n must be either an integer or a float")
counter = Counter(self.triples[:, 1])
return {relation for relation, _ in counter.most_common(n)}
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def entity_word_cloud(self, top: Optional[int] = None):
"""Make a word cloud based on the frequency of occurrence of each entity in a Jupyter notebook.
:param top: The number of top entities to show. Defaults to 100.
.. warning::
This function requires the ``word_cloud`` package. Use ``pip install pykeen[plotting]`` to
install it automatically, or install it yourself with
``pip install git+https://github.com/kavgan/word_cloud.git``.
"""
return self._word_cloud(
ids=self.mapped_triples[:, [0, 2]],
id_to_label=self.entity_id_to_label,
top=top or 100,
)
|
def entity_word_cloud(self, top: Optional[int] = None):
"""Make a word cloud based on the frequency of occurrence of each entity in a Jupyter notebook.
:param top: The number of top entities to show. Defaults to 100.
.. warning::
This function requires the ``word_cloud`` package. Use ``pip install pykeen[plotting]`` to
install it automatically, or install it yourself with
``pip install git+https://github.com/kavgan/word_cloud.git``.
"""
text = [f"{h} {t}" for h, _, t in self.triples]
return self._word_cloud(text=text, top=top or 100)
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def relation_word_cloud(self, top: Optional[int] = None):
"""Make a word cloud based on the frequency of occurrence of each relation in a Jupyter notebook.
:param top: The number of top relations to show. Defaults to 100.
.. warning::
This function requires the ``word_cloud`` package. Use ``pip install pykeen[plotting]`` to
install it automatically, or install it yourself with
``pip install git+https://github.com/kavgan/word_cloud.git``.
"""
return self._word_cloud(
ids=self.mapped_triples[:, 1],
id_to_label=self.relation_id_to_label,
top=top or 100,
)
|
def relation_word_cloud(self, top: Optional[int] = None):
"""Make a word cloud based on the frequency of occurrence of each relation in a Jupyter notebook.
:param top: The number of top relations to show. Defaults to 100.
.. warning::
This function requires the ``word_cloud`` package. Use ``pip install pykeen[plotting]`` to
install it automatically, or install it yourself with
``pip install git+https://github.com/kavgan/word_cloud.git``.
"""
text = [r for _, r, _ in self.triples]
return self._word_cloud(text=text, top=top or 100)
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def _word_cloud(
self, *, ids: torch.LongTensor, id_to_label: Mapping[int, str], top: int
):
try:
from word_cloud.word_cloud_generator import WordCloud
except ImportError:
logger.warning(
"Could not import module `word_cloud`. "
"Try installing it with `pip install git+https://github.com/kavgan/word_cloud.git`",
)
return
# pre-filter to keep only topk
uniq, counts = ids.view(-1).unique(return_counts=True)
top_counts, top_ids = counts.topk(k=top, largest=True)
# generate text
text = list(
itertools.chain(
*(
itertools.repeat(id_to_label[e_id], count)
for e_id, count in zip(top_ids.tolist(), top_counts.tolist())
)
)
)
from IPython.core.display import HTML
word_cloud = WordCloud()
return HTML(word_cloud.get_embed_code(text=text, topn=top))
|
def _word_cloud(self, *, text: List[str], top: int):
try:
from word_cloud.word_cloud_generator import WordCloud
except ImportError:
logger.warning(
"Could not import module `word_cloud`. "
"Try installing it with `pip install git+https://github.com/kavgan/word_cloud.git`",
)
return
from IPython.core.display import HTML
word_cloud = WordCloud()
return HTML(word_cloud.get_embed_code(text=text, topn=top))
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def tensor_to_df(
self,
tensor: torch.LongTensor,
**kwargs: Union[torch.Tensor, np.ndarray, Sequence],
) -> pd.DataFrame:
"""Take a tensor of triples and make a pandas dataframe with labels.
:param tensor: shape: (n, 3)
The triples, ID-based and in format (head_id, relation_id, tail_id).
:param kwargs:
Any additional number of columns. Each column needs to be of shape (n,). Reserved column names:
{"head_id", "head_label", "relation_id", "relation_label", "tail_id", "tail_label"}.
:return:
A dataframe with n rows, and 6 + len(kwargs) columns.
"""
# Input validation
additional_columns = set(kwargs.keys())
forbidden = additional_columns.intersection(TRIPLES_DF_COLUMNS)
if len(forbidden) > 0:
raise ValueError(
f"The key-words for additional arguments must not be in {TRIPLES_DF_COLUMNS}, but {forbidden} were "
f"used.",
)
# convert to numpy
tensor = tensor.cpu().numpy()
data = dict(zip(["head_id", "relation_id", "tail_id"], tensor.T))
# vectorized label lookup
for column, id_to_label in dict(
head=self._vectorized_entity_labeler,
relation=self._vectorized_relation_labeler,
tail=self._vectorized_entity_labeler,
).items():
data[f"{column}_label"] = id_to_label(data[f"{column}_id"])
# Additional columns
for key, values in kwargs.items():
# convert PyTorch tensors to numpy
if torch.is_tensor(values):
values = values.cpu().numpy()
data[key] = values
# convert to dataframe
rv = pd.DataFrame(data=data)
# Re-order columns
columns = list(TRIPLES_DF_COLUMNS) + sorted(
set(rv.columns).difference(TRIPLES_DF_COLUMNS)
)
return rv.loc[:, columns]
|
def tensor_to_df(
self,
tensor: torch.LongTensor,
**kwargs: Union[torch.Tensor, np.ndarray, Sequence],
) -> pd.DataFrame:
"""Take a tensor of triples and make a pandas dataframe with labels.
:param tensor: shape: (n, 3)
The triples, ID-based and in format (head_id, relation_id, tail_id).
:param kwargs:
Any additional number of columns. Each column needs to be of shape (n,). Reserved column names:
{"head_id", "head_label", "relation_id", "relation_label", "tail_id", "tail_label"}.
:return:
A dataframe with n rows, and 6 + len(kwargs) columns.
"""
# Input validation
additional_columns = set(kwargs.keys())
forbidden = additional_columns.intersection(TRIPLES_DF_COLUMNS)
if len(forbidden) > 0:
raise ValueError(
f"The key-words for additional arguments must not be in {TRIPLES_DF_COLUMNS}, but {forbidden} were "
f"used.",
)
# convert to numpy
tensor = tensor.cpu().numpy()
data = dict(zip(["head_id", "relation_id", "tail_id"], tensor.T))
# vectorized label lookup
entity_id_to_label = np.vectorize(self.entity_id_to_label.__getitem__)
relation_id_to_label = np.vectorize(self.relation_id_to_label.__getitem__)
for column, id_to_label in dict(
head=entity_id_to_label,
relation=relation_id_to_label,
tail=entity_id_to_label,
).items():
data[f"{column}_label"] = id_to_label(data[f"{column}_id"])
# Additional columns
for key, values in kwargs.items():
# convert PyTorch tensors to numpy
if torch.is_tensor(values):
values = values.cpu().numpy()
data[key] = values
# convert to dataframe
rv = pd.DataFrame(data=data)
# Re-order columns
columns = list(TRIPLES_DF_COLUMNS) + sorted(
set(rv.columns).difference(TRIPLES_DF_COLUMNS)
)
return rv.loc[:, columns]
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def new_with_restriction(
self,
entities: Union[None, Collection[int], Collection[str]] = None,
relations: Union[None, Collection[int], Collection[str]] = None,
invert_entity_selection: bool = False,
invert_relation_selection: bool = False,
) -> "TriplesFactory":
"""Make a new triples factory only keeping the given entities and relations, but keeping the ID mapping.
:param entities:
The entities of interest. If None, defaults to all entities.
:param relations:
The relations of interest. If None, defaults to all relations.
:param invert_entity_selection:
Whether to invert the entity selection, i.e. select those triples without the provided entities.
:param invert_relation_selection:
Whether to invert the relation selection, i.e. select those triples without the provided relations.
:return:
A new triples factory, which has only a subset of the triples containing the entities and relations of
interest. The label-to-ID mapping is *not* modified.
"""
keep_mask = None
# Filter for entities
if entities is not None:
keep_mask = self.get_mask_for_entities(
entities=entities, invert=invert_entity_selection
)
remaining_entities = (
self.num_entities - len(entities)
if invert_entity_selection
else len(entities)
)
logger.info(
f"keeping {format_relative_comparison(remaining_entities, self.num_entities)} entities."
)
# Filter for relations
if relations is not None:
relation_mask = self.get_mask_for_relations(
relations=relations, invert=invert_relation_selection
)
remaining_relations = (
self.num_relations - len(relations)
if invert_entity_selection
else len(relations)
)
logger.info(
f"keeping {format_relative_comparison(remaining_relations, self.num_relations)} relations."
)
keep_mask = relation_mask if keep_mask is None else keep_mask & relation_mask
# No filtering happened
if keep_mask is None:
return self
num_triples = keep_mask.sum()
logger.info(
f"keeping {format_relative_comparison(num_triples, self.num_triples)} triples."
)
return self.clone_and_exchange_triples(
mapped_triples=self.mapped_triples[keep_mask]
)
|
def new_with_restriction(
self,
entities: Optional[Collection[str]] = None,
relations: Optional[Collection[str]] = None,
) -> "TriplesFactory":
"""Make a new triples factory only keeping the given entities and relations, but keeping the ID mapping.
:param entities:
The entities of interest. If None, defaults to all entities.
:param relations:
The relations of interest. If None, defaults to all relations.
:return:
A new triples factory, which has only a subset of the triples containing the entities and relations of
interest. The label-to-ID mapping is *not* modified.
"""
if self.create_inverse_triples and relations is not None:
logger.info(
"Since %s already contain inverse relations, the relation filter is expanded to contain the inverse "
"relations as well.",
str(self),
)
relations = list(relations) + list(
map(self.relation_to_inverse.__getitem__, relations)
)
keep_mask = None
# Filter for entities
if entities is not None:
keep_mask = self.get_idx_for_entities(entities=entities)
logger.info("Keeping %d/%d entities", len(entities), self.num_entities)
# Filter for relations
if relations is not None:
relation_mask = self.get_idx_for_relations(relations=relations)
logger.info("Keeping %d/%d relations", len(relations), self.num_relations)
keep_mask = relation_mask if keep_mask is None else keep_mask & relation_mask
# No filtering happened
if keep_mask is None:
return self
logger.info("Keeping %d/%d triples", keep_mask.sum(), self.num_triples)
factory = TriplesFactory.from_labeled_triples(
triples=self.triples[keep_mask],
create_inverse_triples=False,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
compact_id=False,
)
# manually copy the inverse relation mappings
if self.create_inverse_triples:
factory.relation_to_inverse = self.relation_to_inverse
return factory
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def _tf_cleanup_all(
triples_groups: List[MappedTriples],
*,
random_state: TorchRandomHint = None,
) -> Sequence[MappedTriples]:
"""Cleanup a list of triples array with respect to the first array."""
reference, *others = triples_groups
rv = []
for other in others:
if random_state is not None:
reference, other = _tf_cleanup_randomized(reference, other, random_state)
else:
reference, other = _tf_cleanup_deterministic(reference, other)
rv.append(other)
# [...] is necessary for Python 3.7 compatibility
return [reference, *rv]
|
def _tf_cleanup_all(
triples_groups: List[np.ndarray],
*,
random_state: RandomHint = None,
) -> List[np.ndarray]:
"""Cleanup a list of triples array with respect to the first array."""
reference, *others = triples_groups
rv = []
for other in others:
if random_state is not None:
reference, other = _tf_cleanup_randomized(reference, other, random_state)
else:
reference, other = _tf_cleanup_deterministic(reference, other)
rv.append(other)
return [reference, *rv]
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def _tf_cleanup_deterministic(
training: MappedTriples, testing: MappedTriples
) -> Tuple[MappedTriples, MappedTriples]:
"""Cleanup a triples array (testing) with respect to another (training)."""
move_id_mask = _prepare_cleanup(training, testing)
training = torch.cat([training, testing[move_id_mask]])
testing = testing[~move_id_mask]
return training, testing
|
def _tf_cleanup_deterministic(
training: np.ndarray, testing: np.ndarray
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array (testing) with respect to another (training)."""
move_id_mask = _prepare_cleanup(training, testing)
training = np.concatenate([training, testing[move_id_mask]])
testing = testing[~move_id_mask]
return training, testing
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def _tf_cleanup_randomized(
training: MappedTriples,
testing: MappedTriples,
random_state: TorchRandomHint = None,
) -> Tuple[MappedTriples, MappedTriples]:
"""Cleanup a triples array, but randomly select testing triples and recalculate to minimize moves.
1. Calculate ``move_id_mask`` as in :func:`_tf_cleanup_deterministic`
2. Choose a triple to move, recalculate move_id_mask
3. Continue until move_id_mask has no true bits
"""
generator = ensure_torch_random_state(random_state)
move_id_mask = _prepare_cleanup(training, testing)
# While there are still triples that should be moved to the training set
while move_id_mask.any():
# Pick a random triple to move over to the training triples
(candidates,) = move_id_mask.nonzero(as_tuple=True)
idx = torch.randint(candidates.shape[0], size=(1,), generator=generator)
idx = candidates[idx]
# add to training
training = torch.cat([training, testing[idx].view(1, -1)], dim=0)
# remove from testing
testing = torch.cat([testing[:idx], testing[idx + 1 :]], dim=0)
# Recalculate the move_id_mask
move_id_mask = _prepare_cleanup(training, testing)
return training, testing
|
def _tf_cleanup_randomized(
training: np.ndarray,
testing: np.ndarray,
random_state: RandomHint = None,
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array, but randomly select testing triples and recalculate to minimize moves.
1. Calculate ``move_id_mask`` as in :func:`_tf_cleanup_deterministic`
2. Choose a triple to move, recalculate move_id_mask
3. Continue until move_id_mask has no true bits
"""
random_state = ensure_random_state(random_state)
move_id_mask = _prepare_cleanup(training, testing)
# While there are still triples that should be moved to the training set
while move_id_mask.any():
# Pick a random triple to move over to the training triples
idx = random_state.choice(move_id_mask.nonzero()[0])
training = np.concatenate([training, testing[idx].reshape(1, -1)])
# Recalculate the testing triples without that index
testing_mask = np.ones_like(move_id_mask)
testing_mask[idx] = False
testing = testing[testing_mask]
# Recalculate the training entities, testing entities, to_move, and move_id_mask
move_id_mask = _prepare_cleanup(training, testing)
return training, testing
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def _prepare_cleanup(
training: MappedTriples,
testing: MappedTriples,
max_ids: Optional[Tuple[int, int]] = None,
) -> torch.BoolTensor:
"""
Calculate a mask for the test triples with triples containing test-only entities or relations.
:param training: shape: (n, 3)
The training triples.
:param testing: shape: (m, 3)
The testing triples.
:return: shape: (m,)
The move mask.
"""
# base cases
if len(testing) == 0:
return torch.empty(0, dtype=torch.bool)
if len(training) == 0:
return torch.ones(testing.shape[0], dtype=torch.bool)
columns = [[0, 2], [1]]
to_move_mask = torch.zeros(1, dtype=torch.bool)
if max_ids is None:
max_ids = [
max(training[:, col].max().item(), testing[:, col].max().item()) + 1
for col in columns
]
for col, max_id in zip(columns, max_ids):
# IDs not in training
not_in_training_mask = torch.ones(max_id, dtype=torch.bool)
not_in_training_mask[training[:, col].view(-1)] = False
# triples with exclusive test IDs
exclusive_triples = (
not_in_training_mask[testing[:, col].view(-1)]
.view(-1, len(col))
.any(dim=-1)
)
to_move_mask = to_move_mask | exclusive_triples
return to_move_mask
|
def _prepare_cleanup(training: np.ndarray, testing: np.ndarray) -> np.ndarray:
to_move_mask = None
for col in [[0, 2], 1]:
training_ids, test_ids = [
np.unique(triples[:, col]) for triples in [training, testing]
]
to_move = test_ids[~np.isin(test_ids, training_ids)]
this_to_move_mask = np.isin(testing[:, col], to_move)
if this_to_move_mask.ndim > 1:
this_to_move_mask = this_to_move_mask.any(axis=1)
if to_move_mask is None:
to_move_mask = this_to_move_mask
else:
to_move_mask = this_to_move_mask | to_move_mask
return to_move_mask
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def __init__(
self,
*,
path: Union[None, str, TextIO] = None,
triples: Optional[LabeledTriples] = None,
path_to_numeric_triples: Union[None, str, TextIO] = None,
numeric_triples: Optional[np.ndarray] = None,
**kwargs,
) -> None:
"""Initialize the multi-modal triples factory.
:param path: The path to a 3-column TSV file with triples in it. If not specified,
you should specify ``triples``.
:param triples: A 3-column numpy array with triples in it. If not specified,
you should specify ``path``
:param path_to_numeric_triples: The path to a 3-column TSV file with triples and
numeric. If not specified, you should specify ``numeric_triples``.
:param numeric_triples: A 3-column numpy array with numeric triples in it. If not
specified, you should specify ``path_to_numeric_triples``.
"""
if path is None:
base = TriplesFactory.from_labeled_triples(triples=triples, **kwargs)
else:
base = TriplesFactory.from_path(path=path, **kwargs)
super().__init__(
entity_to_id=base.entity_to_id,
relation_to_id=base.relation_to_id,
mapped_triples=base.mapped_triples,
create_inverse_triples=base.create_inverse_triples,
)
if path_to_numeric_triples is None and numeric_triples is None:
raise ValueError(
"Must specify one of path_to_numeric_triples or numeric_triples"
)
elif path_to_numeric_triples is not None and numeric_triples is not None:
raise ValueError(
"Must not specify both path_to_numeric_triples and numeric_triples"
)
elif path_to_numeric_triples is not None:
numeric_triples = load_triples(path_to_numeric_triples)
self.numeric_literals, self.literals_to_id = create_matrix_of_literals(
numeric_triples=numeric_triples,
entity_to_id=self.entity_to_id,
)
|
def __init__(
self,
*,
path: Union[None, str, TextIO] = None,
triples: Optional[LabeledTriples] = None,
path_to_numeric_triples: Union[None, str, TextIO] = None,
numeric_triples: Optional[np.ndarray] = None,
**kwargs,
) -> None:
"""Initialize the multi-modal triples factory.
:param path: The path to a 3-column TSV file with triples in it. If not specified,
you should specify ``triples``.
:param triples: A 3-column numpy array with triples in it. If not specified,
you should specify ``path``
:param path_to_numeric_triples: The path to a 3-column TSV file with triples and
numeric. If not specified, you should specify ``numeric_triples``.
:param numeric_triples: A 3-column numpy array with numeric triples in it. If not
specified, you should specify ``path_to_numeric_triples``.
"""
if path is None:
base = TriplesFactory.from_labeled_triples(triples=triples, **kwargs)
else:
base = TriplesFactory.from_path(path=path, **kwargs)
super().__init__(
entity_to_id=base.entity_to_id,
relation_to_id=base.relation_to_id,
_triples=base.triples,
mapped_triples=base.mapped_triples,
relation_to_inverse=base.relation_to_inverse,
)
if path_to_numeric_triples is None and numeric_triples is None:
raise ValueError(
"Must specify one of path_to_numeric_triples or numeric_triples"
)
elif path_to_numeric_triples is not None and numeric_triples is not None:
raise ValueError(
"Must not specify both path_to_numeric_triples and numeric_triples"
)
elif path_to_numeric_triples is not None:
numeric_triples = load_triples(path_to_numeric_triples)
self.numeric_literals, self.literals_to_id = create_matrix_of_literals(
numeric_triples=numeric_triples,
entity_to_id=self.entity_to_id,
)
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def get_entities(triples: torch.LongTensor) -> Set[int]:
"""Get all entities from the triples."""
return set(triples[:, [0, 2]].flatten().tolist())
|
def get_entities(triples) -> Set:
"""Get all entities from the triples."""
return set(triples[:, [0, 2]].flatten().tolist())
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def get_relations(triples: torch.LongTensor) -> Set[int]:
"""Get all relations from the triples."""
return set(triples[:, 1].tolist())
|
def get_relations(triples) -> Set:
"""Get all relations from the triples."""
return set(triples[:, 1])
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def invert_mapping(mapping: Mapping[K, V]) -> Mapping[V, K]:
"""
Invert a mapping.
:param mapping:
The mapping, key -> value.
:return:
The inverse mapping, value -> key.
"""
num_unique_values = len(set(mapping.values()))
num_keys = len(mapping)
if num_unique_values < num_keys:
raise ValueError(
f"Mapping is not bijective! Only {num_unique_values}/{num_keys} are unique."
)
return {value: key for key, value in mapping.items()}
|
def invert_mapping(mapping: Mapping[str, int]) -> Mapping[int, str]:
"""
Invert a mapping.
:param mapping:
The mapping, key -> value.
:return:
The inverse mapping, value -> key.
"""
num_unique_values = len(set(mapping.values()))
num_keys = len(mapping)
if num_unique_values < num_keys:
raise ValueError(
f"Mapping is not bijective! Only {num_unique_values}/{num_keys} are unique."
)
return {value: key for key, value in mapping.items()}
|
https://github.com/pykeen/pykeen/issues/146
|
Traceback (most recent call last):
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 49, in <module>
main()
File "/Users/cthoyt/dev/pykeen/scratch/tst.py", line 36, in main
testing.new_with_restriction(relations=evaluation_relation_whitelist)
File "/Users/cthoyt/dev/pykeen/src/pykeen/triples/triples_factory.py", line 641, in new_with_restriction
relations = list(relations) + list(map(self.relation_to_inverse.__getitem__, relations))
KeyError: 'accusation_inverse'
|
KeyError
|
def prepare_ablation_from_config(
config: Mapping[str, Any], directory: str, save_artifacts: bool
):
"""Prepare a set of ablation study directories."""
metadata = config["metadata"]
optuna_config = config["optuna"]
ablation_config = config["ablation"]
evaluator = ablation_config["evaluator"]
evaluator_kwargs = ablation_config["evaluator_kwargs"]
evaluation_kwargs = ablation_config["evaluation_kwargs"]
it = itt.product(
ablation_config["datasets"],
ablation_config["create_inverse_triples"],
ablation_config["models"],
ablation_config["loss_functions"],
ablation_config["regularizers"],
ablation_config["optimizers"],
ablation_config["training_loops"],
)
directories = []
for counter, (
dataset,
create_inverse_triples,
model,
loss,
regularizer,
optimizer,
training_loop,
) in enumerate(it):
experiment_name = (
f"{counter:04d}_{normalize_string(dataset)}_{normalize_string(model)}"
)
output_directory = os.path.join(directory, experiment_name)
os.makedirs(output_directory, exist_ok=True)
# TODO what happens if already exists?
_experiment_optuna_config = optuna_config.copy()
_experiment_optuna_config["storage"] = (
f"sqlite:///{output_directory}/optuna_results.db"
)
if save_artifacts:
save_model_directory = os.path.join(output_directory, "artifacts")
os.makedirs(save_model_directory, exist_ok=True)
_experiment_optuna_config["save_model_directory"] = save_model_directory
hpo_config = dict()
for retain_key in ("stopper", "stopper_kwargs"):
if retain_key in ablation_config:
logger.info(f"Retaining {retain_key} configuration in HPO")
hpo_config[retain_key] = deepcopy(ablation_config[retain_key])
for error_key in ("early_stopping", "early_stopping_kwargs"):
if error_key in ablation_config:
raise ValueError(f"Outdated key: {error_key}. Please update")
# TODO incorporate setting of random seed
# pipeline_kwargs=dict(
# random_seed=random_non_negative_int(),
# ),
def _set_arguments(key: str, value: str) -> None:
"""Set argument and its values."""
d = {key: value}
kwargs = ablation_config[f"{key}_kwargs"][model][value]
if kwargs:
d[f"{key}_kwargs"] = kwargs
kwargs_ranges = ablation_config[f"{key}_kwargs_ranges"][model][value]
if kwargs_ranges:
d[f"{key}_kwargs_ranges"] = kwargs_ranges
hpo_config.update(d)
# Add dataset to current_pipeline
hpo_config["dataset"] = dataset
logger.info(f"Dataset: {dataset}")
hpo_config["dataset_kwargs"] = dict(
create_inverse_triples=create_inverse_triples
)
logger.info(f"Add inverse triples: {create_inverse_triples}")
hpo_config["model"] = model
model_kwargs = ablation_config["model_kwargs"][model]
if model_kwargs:
hpo_config["model_kwargs"] = ablation_config["model_kwargs"][model]
hpo_config["model_kwargs_ranges"] = ablation_config["model_kwargs_ranges"][
model
]
logger.info(f"Model: {model}")
# Add loss function to current_pipeline
_set_arguments(key="loss", value=loss)
logger.info(f"Loss function: {loss}")
# Add regularizer to current_pipeline
_set_arguments(key="regularizer", value=regularizer)
logger.info(f"Regularizer: {regularizer}")
# Add optimizer to current_pipeline
_set_arguments(key="optimizer", value=optimizer)
logger.info(f"Optimizer: {optimizer}")
# Add training approach to current_pipeline
hpo_config["training_loop"] = training_loop
logger.info(f"Training loop: {training_loop}")
if normalize_string(training_loop, suffix=_TRAINING_LOOP_SUFFIX) == "slcwa":
negative_sampler = ablation_config["negative_sampler"]
_set_arguments(key="negative_sampler", value=negative_sampler)
logger.info(f"Negative sampler: {negative_sampler}")
# Add training kwargs and kwargs_ranges
training_kwargs = ablation_config["training_kwargs"][model][training_loop]
if training_kwargs:
hpo_config["training_kwargs"] = training_kwargs
hpo_config["training_kwargs_ranges"] = ablation_config[
"training_kwargs_ranges"
][model][training_loop]
# Add evaluation
hpo_config["evaluator"] = evaluator
if evaluator_kwargs:
hpo_config["evaluator_kwargs"] = evaluator_kwargs
hpo_config["evaluation_kwargs"] = evaluation_kwargs
logger.info(f"Evaluator: {evaluator}")
rv_config = dict(
type="hpo",
metadata=metadata,
pipeline=hpo_config,
optuna=_experiment_optuna_config,
)
rv_config_path = os.path.join(output_directory, "hpo_config.json")
with open(rv_config_path, "w") as file:
json.dump(rv_config, file, indent=2, ensure_ascii=True)
directories.append((output_directory, rv_config_path))
return directories
|
def prepare_ablation_from_config(
config: Mapping[str, Any], directory: str, save_artifacts: bool
):
"""Prepare a set of ablation study directories."""
metadata = config["metadata"]
optuna_config = config["optuna"]
ablation_config = config["ablation"]
evaluator = ablation_config["evaluator"]
evaluator_kwargs = ablation_config["evaluator_kwargs"]
evaluation_kwargs = ablation_config["evaluation_kwargs"]
it = itt.product(
ablation_config["datasets"],
ablation_config["create_inverse_triples"],
ablation_config["models"],
ablation_config["loss_functions"],
ablation_config["regularizers"],
ablation_config["optimizers"],
ablation_config["training_loops"],
)
directories = []
for counter, (
dataset,
create_inverse_triples,
model,
loss,
regularizer,
optimizer,
training_loop,
) in enumerate(it):
experiment_name = (
f"{counter:04d}_{normalize_string(dataset)}_{normalize_string(model)}"
)
output_directory = os.path.join(directory, experiment_name)
os.makedirs(output_directory, exist_ok=True)
# TODO what happens if already exists?
_experiment_optuna_config = optuna_config.copy()
_experiment_optuna_config["storage"] = (
f"sqlite:///{output_directory}/optuna_results.db"
)
if save_artifacts:
save_model_directory = os.path.join(output_directory, "artifacts")
os.makedirs(save_model_directory, exist_ok=True)
_experiment_optuna_config["save_model_directory"] = save_model_directory
hpo_config = dict()
for retain_key in ("stopper", "stopper_kwargs"):
if retain_key in ablation_config:
logger.info(f"Retaining {retain_key} configuration in HPO")
hpo_config[retain_key] = deepcopy(ablation_config[retain_key])
for error_key in ("early_stopping", "early_stopping_kwargs"):
if error_key in ablation_config:
raise ValueError(f"Outdated key: {error_key}. Please update")
# TODO incorporate setting of random seed
# pipeline_kwargs=dict(
# random_seed=random.randint(1, 2 ** 32 - 1),
# ),
def _set_arguments(key: str, value: str) -> None:
"""Set argument and its values."""
d = {key: value}
kwargs = ablation_config[f"{key}_kwargs"][model][value]
if kwargs:
d[f"{key}_kwargs"] = kwargs
kwargs_ranges = ablation_config[f"{key}_kwargs_ranges"][model][value]
if kwargs_ranges:
d[f"{key}_kwargs_ranges"] = kwargs_ranges
hpo_config.update(d)
# Add dataset to current_pipeline
hpo_config["dataset"] = dataset
logger.info(f"Dataset: {dataset}")
hpo_config["dataset_kwargs"] = dict(
create_inverse_triples=create_inverse_triples
)
logger.info(f"Add inverse triples: {create_inverse_triples}")
hpo_config["model"] = model
model_kwargs = ablation_config["model_kwargs"][model]
if model_kwargs:
hpo_config["model_kwargs"] = ablation_config["model_kwargs"][model]
hpo_config["model_kwargs_ranges"] = ablation_config["model_kwargs_ranges"][
model
]
logger.info(f"Model: {model}")
# Add loss function to current_pipeline
_set_arguments(key="loss", value=loss)
logger.info(f"Loss function: {loss}")
# Add regularizer to current_pipeline
_set_arguments(key="regularizer", value=regularizer)
logger.info(f"Regularizer: {regularizer}")
# Add optimizer to current_pipeline
_set_arguments(key="optimizer", value=optimizer)
logger.info(f"Optimizer: {optimizer}")
# Add training approach to current_pipeline
hpo_config["training_loop"] = training_loop
logger.info(f"Training loop: {training_loop}")
if normalize_string(training_loop, suffix=_TRAINING_LOOP_SUFFIX) == "slcwa":
negative_sampler = ablation_config["negative_sampler"]
_set_arguments(key="negative_sampler", value=negative_sampler)
logger.info(f"Negative sampler: {negative_sampler}")
# Add training kwargs and kwargs_ranges
training_kwargs = ablation_config["training_kwargs"][model][training_loop]
if training_kwargs:
hpo_config["training_kwargs"] = training_kwargs
hpo_config["training_kwargs_ranges"] = ablation_config[
"training_kwargs_ranges"
][model][training_loop]
# Add evaluation
hpo_config["evaluator"] = evaluator
if evaluator_kwargs:
hpo_config["evaluator_kwargs"] = evaluator_kwargs
hpo_config["evaluation_kwargs"] = evaluation_kwargs
logger.info(f"Evaluator: {evaluator}")
rv_config = dict(
type="hpo",
metadata=metadata,
pipeline=hpo_config,
optuna=_experiment_optuna_config,
)
rv_config_path = os.path.join(output_directory, "hpo_config.json")
with open(rv_config_path, "w") as file:
json.dump(rv_config, file, indent=2, ensure_ascii=True)
directories.append((output_directory, rv_config_path))
return directories
|
https://github.com/pykeen/pykeen/issues/92
|
ValueError Traceback (most recent call last)
<ipython-input-13-ef15ccdc9011> in <module>
3
4 tf = TriplesFactory(path=work_path + '/eucalyptus_triplets.txt')
----> 5 training, testing = tf.split()
6
7 pipeline_result = pipeline(
~\anaconda3\envs\pykeen\lib\site-packages\pykeen\triples\triples_factory.py in split(self, ratios, random_state, randomize_cleanup)
419 idx = np.arange(n_triples)
420 if random_state is None:
--> 421 random_state = np.random.randint(0, 2 ** 32 - 1)
422 logger.warning(f'Using random_state={random_state} to split {self}')
423 if isinstance(random_state, int):
mtrand.pyx in numpy.random.mtrand.RandomState.randint()
_bounded_integers.pyx in numpy.random._bounded_integers._rand_int32()
ValueError: high is out of bounds for int32
|
ValueError
|
def main(
path: str,
directory: str,
test_ratios,
no_validation: bool,
validation_ratios,
reload,
seed,
):
"""Make a dataset from the given triples."""
os.makedirs(directory, exist_ok=True)
triples_factory = TriplesFactory(path=path)
ratios = test_ratios if no_validation else validation_ratios
if seed is None:
seed = random_non_negative_int()
sub_triples_factories = triples_factory.split(ratios, random_state=seed)
for subset_name, subset_tf in zip(LABELS, sub_triples_factories):
output_path = os.path.join(directory, f"{subset_name}.txt")
click.echo(f"Outputing {subset_name} to {output_path}")
np.savetxt(output_path, subset_tf.triples, delimiter="\t", fmt="%s")
metadata = dict(
source=os.path.abspath(path),
ratios=dict(zip(LABELS, ratios)),
seed=seed,
)
with open(os.path.join(directory, "metadata.json"), "w") as file:
json.dump(metadata, file, indent=2)
if reload:
if no_validation:
click.secho(
"Can not load as dataset if --no-validation was flagged.", fg="red"
)
return
d = PathDataSet(
training_path=os.path.join(directory, "train.txt"),
testing_path=os.path.join(directory, "test.txt"),
validation_path=os.path.join(directory, "valid.txt"),
eager=True,
)
print(d)
|
def main(
path: str,
directory: str,
test_ratios,
no_validation: bool,
validation_ratios,
reload,
seed,
):
"""Make a dataset from the given triples."""
os.makedirs(directory, exist_ok=True)
triples_factory = TriplesFactory(path=path)
ratios = test_ratios if no_validation else validation_ratios
if seed is None:
seed = np.random.randint(0, 2**32 - 1)
sub_triples_factories = triples_factory.split(ratios, random_state=seed)
for subset_name, subset_tf in zip(LABELS, sub_triples_factories):
output_path = os.path.join(directory, f"{subset_name}.txt")
click.echo(f"Outputing {subset_name} to {output_path}")
np.savetxt(output_path, subset_tf.triples, delimiter="\t", fmt="%s")
metadata = dict(
source=os.path.abspath(path),
ratios=dict(zip(LABELS, ratios)),
seed=seed,
)
with open(os.path.join(directory, "metadata.json"), "w") as file:
json.dump(metadata, file, indent=2)
if reload:
if no_validation:
click.secho(
"Can not load as dataset if --no-validation was flagged.", fg="red"
)
return
d = PathDataSet(
training_path=os.path.join(directory, "train.txt"),
testing_path=os.path.join(directory, "test.txt"),
validation_path=os.path.join(directory, "valid.txt"),
eager=True,
)
print(d)
|
https://github.com/pykeen/pykeen/issues/92
|
ValueError Traceback (most recent call last)
<ipython-input-13-ef15ccdc9011> in <module>
3
4 tf = TriplesFactory(path=work_path + '/eucalyptus_triplets.txt')
----> 5 training, testing = tf.split()
6
7 pipeline_result = pipeline(
~\anaconda3\envs\pykeen\lib\site-packages\pykeen\triples\triples_factory.py in split(self, ratios, random_state, randomize_cleanup)
419 idx = np.arange(n_triples)
420 if random_state is None:
--> 421 random_state = np.random.randint(0, 2 ** 32 - 1)
422 logger.warning(f'Using random_state={random_state} to split {self}')
423 if isinstance(random_state, int):
mtrand.pyx in numpy.random.mtrand.RandomState.randint()
_bounded_integers.pyx in numpy.random._bounded_integers._rand_int32()
ValueError: high is out of bounds for int32
|
ValueError
|
def pipeline( # noqa: C901
*,
# 1. Dataset
dataset: Union[None, str, Type[DataSet]] = None,
dataset_kwargs: Optional[Mapping[str, Any]] = None,
training_triples_factory: Optional[TriplesFactory] = None,
testing_triples_factory: Optional[TriplesFactory] = None,
validation_triples_factory: Optional[TriplesFactory] = None,
evaluation_entity_whitelist: Optional[Collection[str]] = None,
evaluation_relation_whitelist: Optional[Collection[str]] = None,
# 2. Model
model: Union[str, Type[Model]],
model_kwargs: Optional[Mapping[str, Any]] = None,
# 3. Loss
loss: Union[None, str, Type[Loss]] = None,
loss_kwargs: Optional[Mapping[str, Any]] = None,
# 4. Regularizer
regularizer: Union[None, str, Type[Regularizer]] = None,
regularizer_kwargs: Optional[Mapping[str, Any]] = None,
# 5. Optimizer
optimizer: Union[None, str, Type[Optimizer]] = None,
optimizer_kwargs: Optional[Mapping[str, Any]] = None,
clear_optimizer: bool = True,
# 6. Training Loop
training_loop: Union[None, str, Type[TrainingLoop]] = None,
negative_sampler: Union[None, str, Type[NegativeSampler]] = None,
negative_sampler_kwargs: Optional[Mapping[str, Any]] = None,
# 7. Training (ronaldo style)
training_kwargs: Optional[Mapping[str, Any]] = None,
stopper: Union[None, str, Type[Stopper]] = None,
stopper_kwargs: Optional[Mapping[str, Any]] = None,
# 8. Evaluation
evaluator: Union[None, str, Type[Evaluator]] = None,
evaluator_kwargs: Optional[Mapping[str, Any]] = None,
evaluation_kwargs: Optional[Mapping[str, Any]] = None,
# 9. Tracking
result_tracker: Union[None, str, Type[ResultTracker]] = None,
result_tracker_kwargs: Optional[Mapping[str, Any]] = None,
# Misc
metadata: Optional[Dict[str, Any]] = None,
device: Union[None, str, torch.device] = None,
random_seed: Optional[int] = None,
use_testing_data: bool = True,
) -> PipelineResult:
"""Train and evaluate a model.
:param dataset:
The name of the dataset (a key from :data:`pykeen.datasets.datasets`) or the :class:`pykeen.datasets.DataSet`
instance. Alternatively, the ``training_triples_factory`` and ``testing_triples_factory`` can be specified.
:param dataset_kwargs:
The keyword arguments passed to the dataset upon instantiation
:param training_triples_factory:
A triples factory with training instances if a dataset was not specified
:param testing_triples_factory:
A triples factory with training instances if a dataset was not specified
:param validation_triples_factory:
A triples factory with validation instances if a dataset was not specified
:param evaluation_entity_whitelist:
Optional restriction of evaluation to triples containing *only* these entities. Useful if the downstream task
is only interested in certain entities, but the relational patterns with other entities improve the entity
embedding quality.
:param evaluation_relation_whitelist:
Optional restriction of evaluation to triples containing *only* these relations. Useful if the downstream task
is only interested in certain relation, but the relational patterns with other relations improve the entity
embedding quality.
:param model:
The name of the model or the model class
:param model_kwargs:
Keyword arguments to pass to the model class on instantiation
:param loss:
The name of the loss or the loss class.
:param loss_kwargs:
Keyword arguments to pass to the loss on instantiation
:param regularizer:
The name of the regularizer or the regularizer class.
:param regularizer_kwargs:
Keyword arguments to pass to the regularizer on instantiation
:param optimizer:
The name of the optimizer or the optimizer class. Defaults to :class:`torch.optim.Adagrad`.
:param optimizer_kwargs:
Keyword arguments to pass to the optimizer on instantiation
:param clear_optimizer:
Whether to delete the optimizer instance after training. As the optimizer might have additional memory
consumption due to e.g. moments in Adam, this is the default option. If you want to continue training, you
should set it to False, as the optimizer's internal parameter will get lost otherwise.
:param training_loop:
The name of the training loop's training approach (``'slcwa'`` or ``'lcwa'``) or the training loop class.
Defaults to :class:`pykeen.training.SLCWATrainingLoop`.
:param negative_sampler:
The name of the negative sampler (``'basic'`` or ``'bernoulli'``) or the negative sampler class.
Only allowed when training with sLCWA.
Defaults to :class:`pykeen.sampling.BasicNegativeSampler`.
:param negative_sampler_kwargs:
Keyword arguments to pass to the negative sampler class on instantiation
:param training_kwargs:
Keyword arguments to pass to the training loop's train function on call
:param stopper:
What kind of stopping to use. Default to no stopping, can be set to 'early'.
:param stopper_kwargs:
Keyword arguments to pass to the stopper upon instantiation.
:param evaluator:
The name of the evaluator or an evaluator class. Defaults to :class:`pykeen.evaluation.RankBasedEvaluator`.
:param evaluator_kwargs:
Keyword arguments to pass to the evaluator on instantiation
:param evaluation_kwargs:
Keyword arguments to pass to the evaluator's evaluate function on call
:param result_tracker:
The ResultsTracker class or name
:param result_tracker_kwargs:
The keyword arguments passed to the results tracker on instantiation
:param metadata:
A JSON dictionary to store with the experiment
:param use_testing_data:
If true, use the testing triples. Otherwise, use the validation triples. Defaults to true - use testing triples.
"""
if random_seed is None:
random_seed = random_non_negative_int()
logger.warning(f"No random seed is specified. Setting to {random_seed}.")
set_random_seed(random_seed)
result_tracker_cls: Type[ResultTracker] = get_result_tracker_cls(result_tracker)
result_tracker = result_tracker_cls(**(result_tracker_kwargs or {}))
if not metadata:
metadata = {}
title = metadata.get("title")
# Start tracking
result_tracker.start_run(run_name=title)
device = resolve_device(device)
result_tracker.log_params(dict(dataset=dataset))
training_triples_factory, testing_triples_factory, validation_triples_factory = (
get_dataset(
dataset=dataset,
dataset_kwargs=dataset_kwargs,
training_triples_factory=training_triples_factory,
testing_triples_factory=testing_triples_factory,
validation_triples_factory=validation_triples_factory,
)
)
# evaluation restriction to a subset of entities/relations
if any(
f is not None
for f in (evaluation_entity_whitelist, evaluation_relation_whitelist)
):
testing_triples_factory = testing_triples_factory.new_with_restriction(
entities=evaluation_entity_whitelist,
relations=evaluation_relation_whitelist,
)
if validation_triples_factory is not None:
validation_triples_factory = (
validation_triples_factory.new_with_restriction(
entities=evaluation_entity_whitelist,
relations=evaluation_relation_whitelist,
)
)
if model_kwargs is None:
model_kwargs = {}
model_kwargs.update(preferred_device=device)
model_kwargs.setdefault("random_seed", random_seed)
if regularizer is not None:
# FIXME this should never happen.
if "regularizer" in model_kwargs:
logger.warning(
"Can not specify regularizer in kwargs and model_kwargs. removing from model_kwargs"
)
del model_kwargs["regularizer"]
regularizer_cls: Type[Regularizer] = get_regularizer_cls(regularizer)
model_kwargs["regularizer"] = regularizer_cls(
device=device,
**(regularizer_kwargs or {}),
)
if loss is not None:
if "loss" in model_kwargs: # FIXME
logger.warning(
"duplicate loss in kwargs and model_kwargs. removing from model_kwargs"
)
del model_kwargs["loss"]
loss_cls = get_loss_cls(loss)
_loss = loss_cls(**(loss_kwargs or {}))
model_kwargs.setdefault("loss", _loss)
model = get_model_cls(model)
model_instance: Model = model(
triples_factory=training_triples_factory,
**model_kwargs,
)
# Log model parameters
result_tracker.log_params(
params=dict(cls=model.__name__, kwargs=model_kwargs), prefix="model"
)
optimizer = get_optimizer_cls(optimizer)
training_loop = get_training_loop_cls(training_loop)
if optimizer_kwargs is None:
optimizer_kwargs = {}
# Log optimizer parameters
result_tracker.log_params(
params=dict(cls=optimizer.__name__, kwargs=optimizer_kwargs), prefix="optimizer"
)
optimizer_instance = optimizer(
params=model_instance.get_grad_params(),
**optimizer_kwargs,
)
result_tracker.log_params(
params=dict(cls=training_loop.__name__), prefix="training_loop"
)
if negative_sampler is None:
training_loop_instance: TrainingLoop = training_loop(
model=model_instance,
optimizer=optimizer_instance,
)
elif training_loop is not SLCWATrainingLoop:
raise ValueError("Can not specify negative sampler with LCWA")
else:
negative_sampler = get_negative_sampler_cls(negative_sampler)
result_tracker.log_params(
params=dict(cls=negative_sampler.__name__, kwargs=negative_sampler_kwargs),
prefix="negative_sampler",
)
training_loop_instance: TrainingLoop = SLCWATrainingLoop(
model=model_instance,
optimizer=optimizer_instance,
negative_sampler_cls=negative_sampler,
negative_sampler_kwargs=negative_sampler_kwargs,
)
evaluator = get_evaluator_cls(evaluator)
evaluator_instance: Evaluator = evaluator(
**(evaluator_kwargs or {}),
)
if evaluation_kwargs is None:
evaluation_kwargs = {}
if training_kwargs is None:
training_kwargs = {}
# Stopping
if "stopper" in training_kwargs and stopper is not None:
raise ValueError("Specified stopper in training_kwargs and as stopper")
if "stopper" in training_kwargs:
stopper = training_kwargs.pop("stopper")
if stopper_kwargs is None:
stopper_kwargs = {}
# Load the evaluation batch size for the stopper, if it has been set
_evaluation_batch_size = evaluation_kwargs.get("batch_size")
if _evaluation_batch_size is not None:
stopper_kwargs.setdefault("evaluation_batch_size", _evaluation_batch_size)
# By default there's a stopper that does nothing interesting
stopper_cls: Type[Stopper] = get_stopper_cls(stopper)
stopper: Stopper = stopper_cls(
model=model_instance,
evaluator=evaluator_instance,
evaluation_triples_factory=validation_triples_factory,
result_tracker=result_tracker,
**stopper_kwargs,
)
training_kwargs.setdefault("num_epochs", 5)
training_kwargs.setdefault("batch_size", 256)
result_tracker.log_params(params=training_kwargs, prefix="training")
# Add logging for debugging
logging.debug("Run Pipeline based on following config:")
logging.debug(f"dataset: {dataset}")
logging.debug(f"dataset_kwargs: {dataset_kwargs}")
logging.debug(f"model: {model}")
logging.debug(f"model_kwargs: {model_kwargs}")
logging.debug(f"loss: {loss}")
logging.debug(f"loss_kwargs: {loss_kwargs}")
logging.debug(f"regularizer: {regularizer}")
logging.debug(f"regularizer_kwargs: {regularizer_kwargs}")
logging.debug(f"optimizer: {optimizer}")
logging.debug(f"optimizer_kwargs: {optimizer_kwargs}")
logging.debug(f"training_loop: {training_loop}")
logging.debug(f"negative_sampler: {negative_sampler}")
logging.debug(f"_negative_sampler_kwargs: {negative_sampler_kwargs}")
logging.debug(f"_training_kwargs: {training_kwargs}")
logging.debug(f"stopper: {stopper}")
logging.debug(f"stopper_kwargs: {stopper_kwargs}")
logging.debug(f"evaluator: {evaluator}")
logging.debug(f"evaluator_kwargs: {evaluator_kwargs}")
# Train like Cristiano Ronaldo
training_start_time = time.time()
losses = training_loop_instance.train(
stopper=stopper,
result_tracker=result_tracker,
clear_optimizer=clear_optimizer,
**training_kwargs,
)
training_end_time = time.time() - training_start_time
if use_testing_data:
mapped_triples = testing_triples_factory.mapped_triples
else:
mapped_triples = validation_triples_factory.mapped_triples
# Evaluate
# Reuse optimal evaluation parameters from training if available
if (
evaluator_instance.batch_size is not None
or evaluator_instance.slice_size is not None
):
evaluation_kwargs["batch_size"] = evaluator_instance.batch_size
evaluation_kwargs["slice_size"] = evaluator_instance.slice_size
# Add logging about evaluator for debugging
logging.debug("Evaluation will be run with following parameters:")
logging.debug(f"evaluation_kwargs: {evaluation_kwargs}")
evaluate_start_time = time.time()
metric_results: MetricResults = evaluator_instance.evaluate(
model=model_instance,
mapped_triples=mapped_triples,
**evaluation_kwargs,
)
evaluate_end_time = time.time() - evaluate_start_time
result_tracker.log_metrics(
metrics=metric_results.to_dict(),
step=training_kwargs.get("num_epochs"),
)
result_tracker.end_run()
return PipelineResult(
random_seed=random_seed,
model=model_instance,
training_loop=training_loop_instance,
losses=losses,
stopper=stopper,
metric_results=metric_results,
metadata=metadata,
train_seconds=training_end_time,
evaluate_seconds=evaluate_end_time,
)
|
def pipeline( # noqa: C901
*,
# 1. Dataset
dataset: Union[None, str, Type[DataSet]] = None,
dataset_kwargs: Optional[Mapping[str, Any]] = None,
training_triples_factory: Optional[TriplesFactory] = None,
testing_triples_factory: Optional[TriplesFactory] = None,
validation_triples_factory: Optional[TriplesFactory] = None,
evaluation_entity_whitelist: Optional[Collection[str]] = None,
evaluation_relation_whitelist: Optional[Collection[str]] = None,
# 2. Model
model: Union[str, Type[Model]],
model_kwargs: Optional[Mapping[str, Any]] = None,
# 3. Loss
loss: Union[None, str, Type[Loss]] = None,
loss_kwargs: Optional[Mapping[str, Any]] = None,
# 4. Regularizer
regularizer: Union[None, str, Type[Regularizer]] = None,
regularizer_kwargs: Optional[Mapping[str, Any]] = None,
# 5. Optimizer
optimizer: Union[None, str, Type[Optimizer]] = None,
optimizer_kwargs: Optional[Mapping[str, Any]] = None,
clear_optimizer: bool = True,
# 6. Training Loop
training_loop: Union[None, str, Type[TrainingLoop]] = None,
negative_sampler: Union[None, str, Type[NegativeSampler]] = None,
negative_sampler_kwargs: Optional[Mapping[str, Any]] = None,
# 7. Training (ronaldo style)
training_kwargs: Optional[Mapping[str, Any]] = None,
stopper: Union[None, str, Type[Stopper]] = None,
stopper_kwargs: Optional[Mapping[str, Any]] = None,
# 8. Evaluation
evaluator: Union[None, str, Type[Evaluator]] = None,
evaluator_kwargs: Optional[Mapping[str, Any]] = None,
evaluation_kwargs: Optional[Mapping[str, Any]] = None,
# 9. Tracking
result_tracker: Union[None, str, Type[ResultTracker]] = None,
result_tracker_kwargs: Optional[Mapping[str, Any]] = None,
# Misc
metadata: Optional[Dict[str, Any]] = None,
device: Union[None, str, torch.device] = None,
random_seed: Optional[int] = None,
use_testing_data: bool = True,
) -> PipelineResult:
"""Train and evaluate a model.
:param dataset:
The name of the dataset (a key from :data:`pykeen.datasets.datasets`) or the :class:`pykeen.datasets.DataSet`
instance. Alternatively, the ``training_triples_factory`` and ``testing_triples_factory`` can be specified.
:param dataset_kwargs:
The keyword arguments passed to the dataset upon instantiation
:param training_triples_factory:
A triples factory with training instances if a dataset was not specified
:param testing_triples_factory:
A triples factory with training instances if a dataset was not specified
:param validation_triples_factory:
A triples factory with validation instances if a dataset was not specified
:param evaluation_entity_whitelist:
Optional restriction of evaluation to triples containing *only* these entities. Useful if the downstream task
is only interested in certain entities, but the relational patterns with other entities improve the entity
embedding quality.
:param evaluation_relation_whitelist:
Optional restriction of evaluation to triples containing *only* these relations. Useful if the downstream task
is only interested in certain relation, but the relational patterns with other relations improve the entity
embedding quality.
:param model:
The name of the model or the model class
:param model_kwargs:
Keyword arguments to pass to the model class on instantiation
:param loss:
The name of the loss or the loss class.
:param loss_kwargs:
Keyword arguments to pass to the loss on instantiation
:param regularizer:
The name of the regularizer or the regularizer class.
:param regularizer_kwargs:
Keyword arguments to pass to the regularizer on instantiation
:param optimizer:
The name of the optimizer or the optimizer class. Defaults to :class:`torch.optim.Adagrad`.
:param optimizer_kwargs:
Keyword arguments to pass to the optimizer on instantiation
:param clear_optimizer:
Whether to delete the optimizer instance after training. As the optimizer might have additional memory
consumption due to e.g. moments in Adam, this is the default option. If you want to continue training, you
should set it to False, as the optimizer's internal parameter will get lost otherwise.
:param training_loop:
The name of the training loop's training approach (``'slcwa'`` or ``'lcwa'``) or the training loop class.
Defaults to :class:`pykeen.training.SLCWATrainingLoop`.
:param negative_sampler:
The name of the negative sampler (``'basic'`` or ``'bernoulli'``) or the negative sampler class.
Only allowed when training with sLCWA.
Defaults to :class:`pykeen.sampling.BasicNegativeSampler`.
:param negative_sampler_kwargs:
Keyword arguments to pass to the negative sampler class on instantiation
:param training_kwargs:
Keyword arguments to pass to the training loop's train function on call
:param stopper:
What kind of stopping to use. Default to no stopping, can be set to 'early'.
:param stopper_kwargs:
Keyword arguments to pass to the stopper upon instantiation.
:param evaluator:
The name of the evaluator or an evaluator class. Defaults to :class:`pykeen.evaluation.RankBasedEvaluator`.
:param evaluator_kwargs:
Keyword arguments to pass to the evaluator on instantiation
:param evaluation_kwargs:
Keyword arguments to pass to the evaluator's evaluate function on call
:param result_tracker:
The ResultsTracker class or name
:param result_tracker_kwargs:
The keyword arguments passed to the results tracker on instantiation
:param metadata:
A JSON dictionary to store with the experiment
:param use_testing_data:
If true, use the testing triples. Otherwise, use the validation triples. Defaults to true - use testing triples.
"""
if random_seed is None:
random_seed = random.randint(0, 2**32 - 1)
logger.warning(f"No random seed is specified. Setting to {random_seed}.")
set_random_seed(random_seed)
result_tracker_cls: Type[ResultTracker] = get_result_tracker_cls(result_tracker)
result_tracker = result_tracker_cls(**(result_tracker_kwargs or {}))
if not metadata:
metadata = {}
title = metadata.get("title")
# Start tracking
result_tracker.start_run(run_name=title)
device = resolve_device(device)
result_tracker.log_params(dict(dataset=dataset))
training_triples_factory, testing_triples_factory, validation_triples_factory = (
get_dataset(
dataset=dataset,
dataset_kwargs=dataset_kwargs,
training_triples_factory=training_triples_factory,
testing_triples_factory=testing_triples_factory,
validation_triples_factory=validation_triples_factory,
)
)
# evaluation restriction to a subset of entities/relations
if any(
f is not None
for f in (evaluation_entity_whitelist, evaluation_relation_whitelist)
):
testing_triples_factory = testing_triples_factory.new_with_restriction(
entities=evaluation_entity_whitelist,
relations=evaluation_relation_whitelist,
)
if validation_triples_factory is not None:
validation_triples_factory = (
validation_triples_factory.new_with_restriction(
entities=evaluation_entity_whitelist,
relations=evaluation_relation_whitelist,
)
)
if model_kwargs is None:
model_kwargs = {}
model_kwargs.update(preferred_device=device)
model_kwargs.setdefault("random_seed", random_seed)
if regularizer is not None:
# FIXME this should never happen.
if "regularizer" in model_kwargs:
logger.warning(
"Can not specify regularizer in kwargs and model_kwargs. removing from model_kwargs"
)
del model_kwargs["regularizer"]
regularizer_cls: Type[Regularizer] = get_regularizer_cls(regularizer)
model_kwargs["regularizer"] = regularizer_cls(
device=device,
**(regularizer_kwargs or {}),
)
if loss is not None:
if "loss" in model_kwargs: # FIXME
logger.warning(
"duplicate loss in kwargs and model_kwargs. removing from model_kwargs"
)
del model_kwargs["loss"]
loss_cls = get_loss_cls(loss)
_loss = loss_cls(**(loss_kwargs or {}))
model_kwargs.setdefault("loss", _loss)
model = get_model_cls(model)
model_instance: Model = model(
triples_factory=training_triples_factory,
**model_kwargs,
)
# Log model parameters
result_tracker.log_params(
params=dict(cls=model.__name__, kwargs=model_kwargs), prefix="model"
)
optimizer = get_optimizer_cls(optimizer)
training_loop = get_training_loop_cls(training_loop)
if optimizer_kwargs is None:
optimizer_kwargs = {}
# Log optimizer parameters
result_tracker.log_params(
params=dict(cls=optimizer.__name__, kwargs=optimizer_kwargs), prefix="optimizer"
)
optimizer_instance = optimizer(
params=model_instance.get_grad_params(),
**optimizer_kwargs,
)
result_tracker.log_params(
params=dict(cls=training_loop.__name__), prefix="training_loop"
)
if negative_sampler is None:
training_loop_instance: TrainingLoop = training_loop(
model=model_instance,
optimizer=optimizer_instance,
)
elif training_loop is not SLCWATrainingLoop:
raise ValueError("Can not specify negative sampler with LCWA")
else:
negative_sampler = get_negative_sampler_cls(negative_sampler)
result_tracker.log_params(
params=dict(cls=negative_sampler.__name__, kwargs=negative_sampler_kwargs),
prefix="negative_sampler",
)
training_loop_instance: TrainingLoop = SLCWATrainingLoop(
model=model_instance,
optimizer=optimizer_instance,
negative_sampler_cls=negative_sampler,
negative_sampler_kwargs=negative_sampler_kwargs,
)
evaluator = get_evaluator_cls(evaluator)
evaluator_instance: Evaluator = evaluator(
**(evaluator_kwargs or {}),
)
if evaluation_kwargs is None:
evaluation_kwargs = {}
if training_kwargs is None:
training_kwargs = {}
# Stopping
if "stopper" in training_kwargs and stopper is not None:
raise ValueError("Specified stopper in training_kwargs and as stopper")
if "stopper" in training_kwargs:
stopper = training_kwargs.pop("stopper")
if stopper_kwargs is None:
stopper_kwargs = {}
# Load the evaluation batch size for the stopper, if it has been set
_evaluation_batch_size = evaluation_kwargs.get("batch_size")
if _evaluation_batch_size is not None:
stopper_kwargs.setdefault("evaluation_batch_size", _evaluation_batch_size)
# By default there's a stopper that does nothing interesting
stopper_cls: Type[Stopper] = get_stopper_cls(stopper)
stopper: Stopper = stopper_cls(
model=model_instance,
evaluator=evaluator_instance,
evaluation_triples_factory=validation_triples_factory,
result_tracker=result_tracker,
**stopper_kwargs,
)
training_kwargs.setdefault("num_epochs", 5)
training_kwargs.setdefault("batch_size", 256)
result_tracker.log_params(params=training_kwargs, prefix="training")
# Add logging for debugging
logging.debug("Run Pipeline based on following config:")
logging.debug(f"dataset: {dataset}")
logging.debug(f"dataset_kwargs: {dataset_kwargs}")
logging.debug(f"model: {model}")
logging.debug(f"model_kwargs: {model_kwargs}")
logging.debug(f"loss: {loss}")
logging.debug(f"loss_kwargs: {loss_kwargs}")
logging.debug(f"regularizer: {regularizer}")
logging.debug(f"regularizer_kwargs: {regularizer_kwargs}")
logging.debug(f"optimizer: {optimizer}")
logging.debug(f"optimizer_kwargs: {optimizer_kwargs}")
logging.debug(f"training_loop: {training_loop}")
logging.debug(f"negative_sampler: {negative_sampler}")
logging.debug(f"_negative_sampler_kwargs: {negative_sampler_kwargs}")
logging.debug(f"_training_kwargs: {training_kwargs}")
logging.debug(f"stopper: {stopper}")
logging.debug(f"stopper_kwargs: {stopper_kwargs}")
logging.debug(f"evaluator: {evaluator}")
logging.debug(f"evaluator_kwargs: {evaluator_kwargs}")
# Train like Cristiano Ronaldo
training_start_time = time.time()
losses = training_loop_instance.train(
stopper=stopper,
result_tracker=result_tracker,
clear_optimizer=clear_optimizer,
**training_kwargs,
)
training_end_time = time.time() - training_start_time
if use_testing_data:
mapped_triples = testing_triples_factory.mapped_triples
else:
mapped_triples = validation_triples_factory.mapped_triples
# Evaluate
# Reuse optimal evaluation parameters from training if available
if (
evaluator_instance.batch_size is not None
or evaluator_instance.slice_size is not None
):
evaluation_kwargs["batch_size"] = evaluator_instance.batch_size
evaluation_kwargs["slice_size"] = evaluator_instance.slice_size
# Add logging about evaluator for debugging
logging.debug("Evaluation will be run with following parameters:")
logging.debug(f"evaluation_kwargs: {evaluation_kwargs}")
evaluate_start_time = time.time()
metric_results: MetricResults = evaluator_instance.evaluate(
model=model_instance,
mapped_triples=mapped_triples,
**evaluation_kwargs,
)
evaluate_end_time = time.time() - evaluate_start_time
result_tracker.log_metrics(
metrics=metric_results.to_dict(),
step=training_kwargs.get("num_epochs"),
)
result_tracker.end_run()
return PipelineResult(
random_seed=random_seed,
model=model_instance,
training_loop=training_loop_instance,
losses=losses,
stopper=stopper,
metric_results=metric_results,
metadata=metadata,
train_seconds=training_end_time,
evaluate_seconds=evaluate_end_time,
)
|
https://github.com/pykeen/pykeen/issues/92
|
ValueError Traceback (most recent call last)
<ipython-input-13-ef15ccdc9011> in <module>
3
4 tf = TriplesFactory(path=work_path + '/eucalyptus_triplets.txt')
----> 5 training, testing = tf.split()
6
7 pipeline_result = pipeline(
~\anaconda3\envs\pykeen\lib\site-packages\pykeen\triples\triples_factory.py in split(self, ratios, random_state, randomize_cleanup)
419 idx = np.arange(n_triples)
420 if random_state is None:
--> 421 random_state = np.random.randint(0, 2 ** 32 - 1)
422 logger.warning(f'Using random_state={random_state} to split {self}')
423 if isinstance(random_state, int):
mtrand.pyx in numpy.random.mtrand.RandomState.randint()
_bounded_integers.pyx in numpy.random._bounded_integers._rand_int32()
ValueError: high is out of bounds for int32
|
ValueError
|
def create_lcwa_instances(self, use_tqdm: Optional[bool] = None) -> LCWAInstances:
"""Create LCWA instances for this factory's triples."""
s_p_to_multi_tails = _create_multi_label_tails_instance(
mapped_triples=self.mapped_triples,
use_tqdm=use_tqdm,
)
sp, multi_o = zip(*s_p_to_multi_tails.items())
mapped_triples: torch.LongTensor = torch.tensor(sp, dtype=torch.long)
labels = np.array([np.array(item) for item in multi_o], dtype=object)
return LCWAInstances(
mapped_triples=mapped_triples,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
labels=labels,
)
|
def create_lcwa_instances(self, use_tqdm: Optional[bool] = None) -> LCWAInstances:
"""Create LCWA instances for this factory's triples."""
s_p_to_multi_tails = _create_multi_label_tails_instance(
mapped_triples=self.mapped_triples,
use_tqdm=use_tqdm,
)
sp, multi_o = zip(*s_p_to_multi_tails.items())
mapped_triples: torch.LongTensor = torch.tensor(sp, dtype=torch.long)
labels = np.array([np.array(item) for item in multi_o])
return LCWAInstances(
mapped_triples=mapped_triples,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
labels=labels,
)
|
https://github.com/pykeen/pykeen/issues/92
|
ValueError Traceback (most recent call last)
<ipython-input-13-ef15ccdc9011> in <module>
3
4 tf = TriplesFactory(path=work_path + '/eucalyptus_triplets.txt')
----> 5 training, testing = tf.split()
6
7 pipeline_result = pipeline(
~\anaconda3\envs\pykeen\lib\site-packages\pykeen\triples\triples_factory.py in split(self, ratios, random_state, randomize_cleanup)
419 idx = np.arange(n_triples)
420 if random_state is None:
--> 421 random_state = np.random.randint(0, 2 ** 32 - 1)
422 logger.warning(f'Using random_state={random_state} to split {self}')
423 if isinstance(random_state, int):
mtrand.pyx in numpy.random.mtrand.RandomState.randint()
_bounded_integers.pyx in numpy.random._bounded_integers._rand_int32()
ValueError: high is out of bounds for int32
|
ValueError
|
def split(
self,
ratios: Union[float, Sequence[float]] = 0.8,
*,
random_state: Union[None, int, np.random.RandomState] = None,
randomize_cleanup: bool = False,
) -> List["TriplesFactory"]:
"""Split a triples factory into a train/test.
:param ratios: There are three options for this argument. First, a float can be given between 0 and 1.0,
non-inclusive. The first triples factory will get this ratio and the second will get the rest. Second,
a list of ratios can be given for which factory in which order should get what ratios as in ``[0.8, 0.1]``.
The final ratio can be omitted because that can be calculated. Third, all ratios can be explicitly set in
order such as in ``[0.8, 0.1, 0.1]`` where the sum of all ratios is 1.0.
:param random_state: The random state used to shuffle and split the triples in this factory.
:param randomize_cleanup: If true, uses the non-deterministic method for moving triples to the training set.
This has the advantage that it doesn't necessarily have to move all of them, but it might be slower.
.. code-block:: python
ratio = 0.8 # makes a [0.8, 0.2] split
training_factory, testing_factory = factory.split(ratio)
ratios = [0.8, 0.1] # makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
ratios = [0.8, 0.1, 0.1] # also makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
"""
n_triples = self.triples.shape[0]
# Prepare shuffle index
idx = np.arange(n_triples)
if random_state is None:
random_state = random_non_negative_int()
logger.warning(f"Using random_state={random_state} to split {self}")
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
random_state.shuffle(idx)
# Prepare split index
if isinstance(ratios, float):
ratios = [ratios]
ratio_sum = sum(ratios)
if ratio_sum == 1.0:
ratios = ratios[:-1] # vsplit doesn't take the final number into account.
elif ratio_sum > 1.0:
raise ValueError(f"ratios sum to more than 1.0: {ratios} (sum={ratio_sum})")
sizes = [int(split_ratio * n_triples) for split_ratio in ratios]
# Take cumulative sum so the get separated properly
split_idxs = np.cumsum(sizes)
# Split triples
triples_groups = np.vsplit(self.triples[idx], split_idxs)
logger.info(
f"split triples to groups of sizes {[triples.shape[0] for triples in triples_groups]}"
)
# Make sure that the first element has all the right stuff in it
triples_groups = _tf_cleanup_all(
triples_groups, random_state=random_state if randomize_cleanup else None
)
for i, (triples, exp_size, exp_ratio) in enumerate(
zip(triples_groups, sizes, ratios)
):
actual_size = triples.shape[0]
actual_ratio = actual_size / exp_size * exp_ratio
if actual_size != exp_size:
logger.warning(
f"Requested ratio[{i}]={exp_ratio:.3f} (equal to size {exp_size}), but got {actual_ratio:.3f} "
f"(equal to size {actual_size}) to ensure that all entities/relations occur in train.",
)
# Make new triples factories for each group
return [
TriplesFactory(
triples=triples,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
compact_id=False,
)
for triples in triples_groups
]
|
def split(
self,
ratios: Union[float, Sequence[float]] = 0.8,
*,
random_state: Union[None, int, np.random.RandomState] = None,
randomize_cleanup: bool = False,
) -> List["TriplesFactory"]:
"""Split a triples factory into a train/test.
:param ratios: There are three options for this argument. First, a float can be given between 0 and 1.0,
non-inclusive. The first triples factory will get this ratio and the second will get the rest. Second,
a list of ratios can be given for which factory in which order should get what ratios as in ``[0.8, 0.1]``.
The final ratio can be omitted because that can be calculated. Third, all ratios can be explicitly set in
order such as in ``[0.8, 0.1, 0.1]`` where the sum of all ratios is 1.0.
:param random_state: The random state used to shuffle and split the triples in this factory.
:param randomize_cleanup: If true, uses the non-deterministic method for moving triples to the training set.
This has the advantage that it doesn't necessarily have to move all of them, but it might be slower.
.. code-block:: python
ratio = 0.8 # makes a [0.8, 0.2] split
training_factory, testing_factory = factory.split(ratio)
ratios = [0.8, 0.1] # makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
ratios = [0.8, 0.1, 0.1] # also makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
"""
n_triples = self.triples.shape[0]
# Prepare shuffle index
idx = np.arange(n_triples)
if random_state is None:
random_state = np.random.randint(0, 2**32 - 1)
logger.warning(f"Using random_state={random_state} to split {self}")
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
random_state.shuffle(idx)
# Prepare split index
if isinstance(ratios, float):
ratios = [ratios]
ratio_sum = sum(ratios)
if ratio_sum == 1.0:
ratios = ratios[:-1] # vsplit doesn't take the final number into account.
elif ratio_sum > 1.0:
raise ValueError(f"ratios sum to more than 1.0: {ratios} (sum={ratio_sum})")
sizes = [int(split_ratio * n_triples) for split_ratio in ratios]
# Take cumulative sum so the get separated properly
split_idxs = np.cumsum(sizes)
# Split triples
triples_groups = np.vsplit(self.triples[idx], split_idxs)
logger.info(
f"split triples to groups of sizes {[triples.shape[0] for triples in triples_groups]}"
)
# Make sure that the first element has all the right stuff in it
triples_groups = _tf_cleanup_all(
triples_groups, random_state=random_state if randomize_cleanup else None
)
for i, (triples, exp_size, exp_ratio) in enumerate(
zip(triples_groups, sizes, ratios)
):
actual_size = triples.shape[0]
actual_ratio = actual_size / exp_size * exp_ratio
if actual_size != exp_size:
logger.warning(
f"Requested ratio[{i}]={exp_ratio:.3f} (equal to size {exp_size}), but got {actual_ratio:.3f} "
f"(equal to size {actual_size}) to ensure that all entities/relations occur in train.",
)
# Make new triples factories for each group
return [
TriplesFactory(
triples=triples,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
compact_id=False,
)
for triples in triples_groups
]
|
https://github.com/pykeen/pykeen/issues/92
|
ValueError Traceback (most recent call last)
<ipython-input-13-ef15ccdc9011> in <module>
3
4 tf = TriplesFactory(path=work_path + '/eucalyptus_triplets.txt')
----> 5 training, testing = tf.split()
6
7 pipeline_result = pipeline(
~\anaconda3\envs\pykeen\lib\site-packages\pykeen\triples\triples_factory.py in split(self, ratios, random_state, randomize_cleanup)
419 idx = np.arange(n_triples)
420 if random_state is None:
--> 421 random_state = np.random.randint(0, 2 ** 32 - 1)
422 logger.warning(f'Using random_state={random_state} to split {self}')
423 if isinstance(random_state, int):
mtrand.pyx in numpy.random.mtrand.RandomState.randint()
_bounded_integers.pyx in numpy.random._bounded_integers._rand_int32()
ValueError: high is out of bounds for int32
|
ValueError
|
def _tf_cleanup_randomized(
training: np.ndarray,
testing: np.ndarray,
random_state: Union[None, int, np.random.RandomState] = None,
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array, but randomly select testing triples and recalculate to minimize moves.
1. Calculate ``move_id_mask`` as in :func:`_tf_cleanup_deterministic`
2. Choose a triple to move, recalculate move_id_mask
3. Continue until move_id_mask has no true bits
"""
if random_state is None:
random_state = random_non_negative_int()
logger.warning("Using random_state=%s", random_state)
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
move_id_mask = _prepare_cleanup(training, testing)
# While there are still triples that should be moved to the training set
while move_id_mask.any():
# Pick a random triple to move over to the training triples
idx = random_state.choice(move_id_mask.nonzero()[0])
training = np.concatenate([training, testing[idx].reshape(1, -1)])
# Recalculate the testing triples without that index
testing_mask = np.ones_like(move_id_mask)
testing_mask[idx] = False
testing = testing[testing_mask]
# Recalculate the training entities, testing entities, to_move, and move_id_mask
move_id_mask = _prepare_cleanup(training, testing)
return training, testing
|
def _tf_cleanup_randomized(
training: np.ndarray,
testing: np.ndarray,
random_state: Union[None, int, np.random.RandomState] = None,
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array, but randomly select testing triples and recalculate to minimize moves.
1. Calculate ``move_id_mask`` as in :func:`_tf_cleanup_deterministic`
2. Choose a triple to move, recalculate move_id_mask
3. Continue until move_id_mask has no true bits
"""
if random_state is None:
random_state = np.random.randint(0, 2**32 - 1)
logger.warning("Using random_state=%s", random_state)
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
move_id_mask = _prepare_cleanup(training, testing)
# While there are still triples that should be moved to the training set
while move_id_mask.any():
# Pick a random triple to move over to the training triples
idx = random_state.choice(move_id_mask.nonzero()[0])
training = np.concatenate([training, testing[idx].reshape(1, -1)])
# Recalculate the testing triples without that index
testing_mask = np.ones_like(move_id_mask)
testing_mask[idx] = False
testing = testing[testing_mask]
# Recalculate the training entities, testing entities, to_move, and move_id_mask
move_id_mask = _prepare_cleanup(training, testing)
return training, testing
|
https://github.com/pykeen/pykeen/issues/92
|
ValueError Traceback (most recent call last)
<ipython-input-13-ef15ccdc9011> in <module>
3
4 tf = TriplesFactory(path=work_path + '/eucalyptus_triplets.txt')
----> 5 training, testing = tf.split()
6
7 pipeline_result = pipeline(
~\anaconda3\envs\pykeen\lib\site-packages\pykeen\triples\triples_factory.py in split(self, ratios, random_state, randomize_cleanup)
419 idx = np.arange(n_triples)
420 if random_state is None:
--> 421 random_state = np.random.randint(0, 2 ** 32 - 1)
422 logger.warning(f'Using random_state={random_state} to split {self}')
423 if isinstance(random_state, int):
mtrand.pyx in numpy.random.mtrand.RandomState.randint()
_bounded_integers.pyx in numpy.random._bounded_integers._rand_int32()
ValueError: high is out of bounds for int32
|
ValueError
|
def split(
self,
ratios: Union[float, Sequence[float]] = 0.8,
*,
random_state: Union[None, int, np.random.RandomState] = None,
randomize_cleanup: bool = False,
) -> List["TriplesFactory"]:
"""Split a triples factory into a train/test.
:param ratios: There are three options for this argument. First, a float can be given between 0 and 1.0,
non-inclusive. The first triples factory will get this ratio and the second will get the rest. Second,
a list of ratios can be given for which factory in which order should get what ratios as in ``[0.8, 0.1]``.
The final ratio can be omitted because that can be calculated. Third, all ratios can be explicitly set in
order such as in ``[0.8, 0.1, 0.1]`` where the sum of all ratios is 1.0.
:param random_state: The random state used to shuffle and split the triples in this factory.
:param randomize_cleanup: If true, uses the non-deterministic method for moving triples to the training set.
This has the advantage that it doesn't necessarily have to move all of them, but it might be slower.
.. code-block:: python
ratio = 0.8 # makes a [0.8, 0.2] split
training_factory, testing_factory = factory.split(ratio)
ratios = [0.8, 0.1] # makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
ratios = [0.8, 0.1, 0.1] # also makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
"""
n_triples = self.triples.shape[0]
# Prepare shuffle index
idx = np.arange(n_triples)
if random_state is None:
random_state = np.random.randint(0, 2**32 - 1)
logger.warning(f"Using random_state={random_state} to split {self}")
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
random_state.shuffle(idx)
# Prepare split index
if isinstance(ratios, float):
ratios = [ratios]
ratio_sum = sum(ratios)
if ratio_sum == 1.0:
ratios = ratios[:-1] # vsplit doesn't take the final number into account.
elif ratio_sum > 1.0:
raise ValueError(f"ratios sum to more than 1.0: {ratios} (sum={ratio_sum})")
sizes = [int(split_ratio * n_triples) for split_ratio in ratios]
# Take cumulative sum so the get separated properly
split_idxs = np.cumsum(sizes)
# Split triples
triples_groups = np.vsplit(self.triples[idx], split_idxs)
logger.info(
f"split triples to groups of sizes {[triples.shape[0] for triples in triples_groups]}"
)
# Make sure that the first element has all the right stuff in it
triples_groups = _tf_cleanup_all(
triples_groups, random_state=random_state if randomize_cleanup else None
)
for i, (triples, exp_size, exp_ratio) in enumerate(
zip(triples_groups, sizes, ratios)
):
actual_size = triples.shape[0]
actual_ratio = actual_size / exp_size * exp_ratio
if actual_size != exp_size:
logger.warning(
f"Requested ratio[{i}]={exp_ratio:.3f} (equal to size {exp_size}), but got {actual_ratio:.3f} "
f"(equal to size {actual_size}) to ensure that all entities/relations occur in train."
)
# Make new triples factories for each group
return [
TriplesFactory(
triples=triples,
entity_to_id=self.entity_to_id,
relation_to_id=self.relation_to_id,
compact_id=False,
)
for triples in triples_groups
]
|
def split(
self,
ratios: Union[float, Sequence[float]] = 0.8,
*,
random_state: Union[None, int, np.random.RandomState] = None,
randomize_cleanup: bool = False,
) -> List["TriplesFactory"]:
"""Split a triples factory into a train/test.
:param ratios: There are three options for this argument. First, a float can be given between 0 and 1.0,
non-inclusive. The first triples factory will get this ratio and the second will get the rest. Second,
a list of ratios can be given for which factory in which order should get what ratios as in ``[0.8, 0.1]``.
The final ratio can be omitted because that can be calculated. Third, all ratios can be explicitly set in
order such as in ``[0.8, 0.1, 0.1]`` where the sum of all ratios is 1.0.
:param random_state: The random state used to shuffle and split the triples in this factory.
:param randomize_cleanup: If true, uses the non-deterministic method for moving triples to the training set.
This has the advantage that it doesn't necessarily have to move all of them, but it might be slower.
.. code-block:: python
ratio = 0.8 # makes a [0.8, 0.2] split
training_factory, testing_factory = factory.split(ratio)
ratios = [0.8, 0.1] # makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
ratios = [0.8, 0.1, 0.1] # also makes a [0.8, 0.1, 0.1] split
training_factory, testing_factory, validation_factory = factory.split(ratios)
"""
n_triples = self.triples.shape[0]
# Prepare shuffle index
idx = np.arange(n_triples)
if random_state is None:
random_state = np.random.randint(0, 2**32 - 1)
logger.warning(f"Using random_state={random_state} to split {self}")
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
random_state.shuffle(idx)
# Prepare split index
if isinstance(ratios, float):
ratios = [ratios]
ratio_sum = sum(ratios)
if ratio_sum == 1.0:
ratios = ratios[:-1] # vsplit doesn't take the final number into account.
elif ratio_sum > 1.0:
raise ValueError(f"ratios sum to more than 1.0: {ratios} (sum={ratio_sum})")
split_idxs = [int(split_ratio * n_triples) for split_ratio in ratios]
# Take cumulative sum so the get separated properly
split_idxs = np.cumsum(split_idxs)
# Split triples
triples_groups = np.vsplit(self.triples[idx], split_idxs)
logger.info(
f"split triples to groups of sizes {[triples.shape[0] for triples in triples_groups]}"
)
# Make sure that the first element has all the right stuff in it
triples_groups = _tf_cleanup_all(
triples_groups, random_state=random_state if randomize_cleanup else None
)
# Make new triples factories for each group
return [
TriplesFactory(
triples=triples,
entity_to_id=deepcopy(self.entity_to_id),
relation_to_id=deepcopy(self.relation_to_id),
)
for triples in triples_groups
]
|
https://github.com/pykeen/pykeen/issues/58
|
Using random_state=1333753659 to split TriplesFactory(path="/tmp/out")
No random seed is specified. Setting to 2098687070.
No cuda devices were available. The model runs on CPU
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]INFO:pykeen.training.training_loop:using stopper: <pykeen.stoppers.stopper.NopStopper object at 0x7f42e4842cc0>
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]
Traceback (most recent call last):
File "test.py", line 8, in <module>
training_triples_factory=training, testing_triples_factory=testing, model="TransH"
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/pipeline.py", line 815, in pipeline
**training_kwargs,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 190, in train
num_workers=num_workers,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 376, in _train
slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 438, in _forward_pass
slice_size=slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/slcwa.py", line 101, in _process_batch
positive_scores = self.model.score_hrt(positive_batch)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/models/unimodal/trans_h.py", line 134, in score_hrt
d_r = self.relation_embeddings(hrt_batch[:, 1])
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/module.py", line 550, in __call__
result = self.forward(*input, **kwargs)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/sparse.py", line 114, in forward
self.norm_type, self.scale_grad_by_freq, self.sparse)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/functional.py", line 1724, in embedding
return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
IndexError: index out of range in self
|
IndexError
|
def _tf_cleanup_deterministic(
training: np.ndarray, testing: np.ndarray
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array (testing) with respect to another (training)."""
move_id_mask = _prepare_cleanup(training, testing)
training = np.concatenate([training, testing[move_id_mask]])
testing = testing[~move_id_mask]
return training, testing
|
def _tf_cleanup_deterministic(
training: np.ndarray, testing: np.ndarray
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array (testing) with respect to another (training)."""
training_entities, testing_entities, to_move, move_id_mask = _prepare_cleanup(
training, testing
)
training = np.concatenate([training, testing[move_id_mask]])
testing = testing[~move_id_mask]
return training, testing
|
https://github.com/pykeen/pykeen/issues/58
|
Using random_state=1333753659 to split TriplesFactory(path="/tmp/out")
No random seed is specified. Setting to 2098687070.
No cuda devices were available. The model runs on CPU
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]INFO:pykeen.training.training_loop:using stopper: <pykeen.stoppers.stopper.NopStopper object at 0x7f42e4842cc0>
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]
Traceback (most recent call last):
File "test.py", line 8, in <module>
training_triples_factory=training, testing_triples_factory=testing, model="TransH"
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/pipeline.py", line 815, in pipeline
**training_kwargs,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 190, in train
num_workers=num_workers,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 376, in _train
slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 438, in _forward_pass
slice_size=slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/slcwa.py", line 101, in _process_batch
positive_scores = self.model.score_hrt(positive_batch)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/models/unimodal/trans_h.py", line 134, in score_hrt
d_r = self.relation_embeddings(hrt_batch[:, 1])
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/module.py", line 550, in __call__
result = self.forward(*input, **kwargs)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/sparse.py", line 114, in forward
self.norm_type, self.scale_grad_by_freq, self.sparse)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/functional.py", line 1724, in embedding
return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
IndexError: index out of range in self
|
IndexError
|
def _tf_cleanup_randomized(
training: np.ndarray,
testing: np.ndarray,
random_state: Union[None, int, np.random.RandomState] = None,
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array, but randomly select testing triples and recalculate to minimize moves.
1. Calculate ``move_id_mask`` as in :func:`_tf_cleanup_deterministic`
2. Choose a triple to move, recalculate move_id_mask
3. Continue until move_id_mask has no true bits
"""
if random_state is None:
random_state = np.random.randint(0, 2**32 - 1)
logger.warning("Using random_state=%s", random_state)
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
move_id_mask = _prepare_cleanup(training, testing)
# While there are still triples that should be moved to the training set
while move_id_mask.any():
# Pick a random triple to move over to the training triples
idx = random_state.choice(move_id_mask.nonzero()[0])
training = np.concatenate([training, testing[idx].reshape(1, -1)])
# Recalculate the testing triples without that index
testing_mask = np.ones_like(move_id_mask)
testing_mask[idx] = False
testing = testing[testing_mask]
# Recalculate the training entities, testing entities, to_move, and move_id_mask
move_id_mask = _prepare_cleanup(training, testing)
return training, testing
|
def _tf_cleanup_randomized(
training: np.ndarray,
testing: np.ndarray,
random_state: Union[None, int, np.random.RandomState] = None,
) -> Tuple[np.ndarray, np.ndarray]:
"""Cleanup a triples array, but randomly select testing triples and recalculate to minimize moves.
1. Calculate ``move_id_mask`` as in :func:`_tf_cleanup_deterministic`
2. Choose a triple to move, recalculate move_id_mask
3. Continue until move_id_mask has no true bits
"""
if random_state is None:
random_state = np.random.randint(0, 2**32 - 1)
logger.warning("Using random_state=%s", random_state)
if isinstance(random_state, int):
random_state = np.random.RandomState(random_state)
training_entities, testing_entities, to_move, move_id_mask = _prepare_cleanup(
training, testing
)
# While there are still triples that should be moved to the training set
while move_id_mask.any():
# Pick a random triple to move over to the training triples
idx = random_state.choice(move_id_mask.nonzero()[0])
training = np.concatenate([training, testing[idx].reshape(1, -1)])
# Recalculate the testing triples without that index
testing_mask = np.ones_like(move_id_mask)
testing_mask[idx] = False
testing = testing[testing_mask]
# Recalculate the training entities, testing entities, to_move, and move_id_mask
training_entities, testing_entities, to_move, move_id_mask = _prepare_cleanup(
training, testing
)
return training, testing
|
https://github.com/pykeen/pykeen/issues/58
|
Using random_state=1333753659 to split TriplesFactory(path="/tmp/out")
No random seed is specified. Setting to 2098687070.
No cuda devices were available. The model runs on CPU
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]INFO:pykeen.training.training_loop:using stopper: <pykeen.stoppers.stopper.NopStopper object at 0x7f42e4842cc0>
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]
Traceback (most recent call last):
File "test.py", line 8, in <module>
training_triples_factory=training, testing_triples_factory=testing, model="TransH"
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/pipeline.py", line 815, in pipeline
**training_kwargs,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 190, in train
num_workers=num_workers,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 376, in _train
slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 438, in _forward_pass
slice_size=slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/slcwa.py", line 101, in _process_batch
positive_scores = self.model.score_hrt(positive_batch)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/models/unimodal/trans_h.py", line 134, in score_hrt
d_r = self.relation_embeddings(hrt_batch[:, 1])
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/module.py", line 550, in __call__
result = self.forward(*input, **kwargs)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/sparse.py", line 114, in forward
self.norm_type, self.scale_grad_by_freq, self.sparse)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/functional.py", line 1724, in embedding
return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
IndexError: index out of range in self
|
IndexError
|
def _prepare_cleanup(training: np.ndarray, testing: np.ndarray) -> np.ndarray:
to_move_mask = None
for col in [[0, 2], 1]:
training_ids, test_ids = [
np.unique(triples[:, col]) for triples in [training, testing]
]
to_move = test_ids[~np.isin(test_ids, training_ids)]
this_to_move_mask = np.isin(testing[:, col], to_move)
if this_to_move_mask.ndim > 1:
this_to_move_mask = this_to_move_mask.any(axis=1)
if to_move_mask is None:
to_move_mask = this_to_move_mask
else:
to_move_mask = this_to_move_mask | to_move_mask
return to_move_mask
|
def _prepare_cleanup(training: np.ndarray, testing: np.ndarray):
training_entities = _get_unique(training)
testing_entities = _get_unique(testing)
to_move = testing_entities[~np.isin(testing_entities, training_entities)]
move_id_mask = np.isin(testing[:, [0, 2]], to_move).any(axis=1)
return training_entities, testing_entities, to_move, move_id_mask
|
https://github.com/pykeen/pykeen/issues/58
|
Using random_state=1333753659 to split TriplesFactory(path="/tmp/out")
No random seed is specified. Setting to 2098687070.
No cuda devices were available. The model runs on CPU
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]INFO:pykeen.training.training_loop:using stopper: <pykeen.stoppers.stopper.NopStopper object at 0x7f42e4842cc0>
Training epochs on cpu: 0%| | 0/5 [00:00<?, ?epoch/s]
Traceback (most recent call last):
File "test.py", line 8, in <module>
training_triples_factory=training, testing_triples_factory=testing, model="TransH"
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/pipeline.py", line 815, in pipeline
**training_kwargs,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 190, in train
num_workers=num_workers,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 376, in _train
slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/training_loop.py", line 438, in _forward_pass
slice_size=slice_size,
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/training/slcwa.py", line 101, in _process_batch
positive_scores = self.model.score_hrt(positive_batch)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/pykeen/models/unimodal/trans_h.py", line 134, in score_hrt
d_r = self.relation_embeddings(hrt_batch[:, 1])
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/module.py", line 550, in __call__
result = self.forward(*input, **kwargs)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/modules/sparse.py", line 114, in forward
self.norm_type, self.scale_grad_by_freq, self.sparse)
File "/home/dobraczka/.local/share/virtualenvs/embedding-transformers-I3i1Obsv/lib/python3.7/site-packages/torch/nn/functional.py", line 1724, in embedding
return torch.embedding(weight, input, padding_idx, scale_grad_by_freq, sparse)
IndexError: index out of range in self
|
IndexError
|
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
"""Initialize Supervisor add-on builder."""
self.coresys: CoreSys = coresys
self.addon = addon
try:
build_file = find_one_filetype(
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
)
except ConfigurationFileError:
build_file = self.addon.path_location / "build.json"
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
"""Initialize Supervisor add-on builder."""
self.coresys: CoreSys = coresys
self.addon = addon
super().__init__(
find_one_filetype(self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION),
SCHEMA_BUILD_CONFIG,
)
|
https://github.com/home-assistant/supervisor/issues/2669
|
21-03-03 19:11:51 ERROR (MainThread) [supervisor.jobs] Unhandled exception: 'NoneType' object has no attribute 'is_file'
Traceback (most recent call last):
File "/usr/src/supervisor/supervisor/jobs/decorator.py", line 100, in wrapper
return await self._method(*args, **kwargs)
File "/usr/src/supervisor/supervisor/addons/init.py", line 182, in install
await addon.instance.install(store.version, store.image)
File "/usr/src/supervisor/supervisor/utils/init.py", line 32, in wrap_api
return await method(api, *args, **kwargs)
File "/usr/local/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/usr/src/supervisor/supervisor/docker/addon.py", line 491, in _install
self._build(version)
File "/usr/src/supervisor/supervisor/docker/addon.py", line 500, in _build
build_env = AddonBuild(self.coresys, self.addon)
File "/usr/src/supervisor/supervisor/addons/build.py", line 32, in init
super().init(
File "/usr/src/supervisor/supervisor/utils/common.py", line 59, in init
self.read_data()
File "/usr/src/supervisor/supervisor/utils/common.py", line 72, in read_data
if self._file.is_file():
AttributeError: 'NoneType' object has no attribute 'is_file'
|
AttributeError
|
def _read_git_repository(self, path: Path) -> None:
"""Process a custom repository folder."""
slug = extract_hash_from_path(path)
# exists repository json
try:
repository_file = find_one_filetype(
path, "repository", FILE_SUFFIX_CONFIGURATION
)
except ConfigurationFileError:
_LOGGER.warning("No repository information exists at %s", path)
return
try:
repository_info = SCHEMA_REPOSITORY_CONFIG(
read_json_or_yaml_file(repository_file)
)
except ConfigurationFileError:
_LOGGER.warning("Can't read repository information from %s", repository_file)
return
except vol.Invalid:
_LOGGER.warning("Repository parse error %s", repository_file)
return
# process data
self.repositories[slug] = repository_info
self._read_addons_folder(path, slug)
|
def _read_git_repository(self, path: Path) -> None:
"""Process a custom repository folder."""
slug = extract_hash_from_path(path)
# exists repository json
repository_file = find_one_filetype(path, "repository", FILE_SUFFIX_CONFIGURATION)
if repository_file is None:
_LOGGER.warning("No repository information exists at %s", path)
return
try:
repository_info = SCHEMA_REPOSITORY_CONFIG(
read_json_or_yaml_file(repository_file)
)
except ConfigurationFileError:
_LOGGER.warning("Can't read repository information from %s", repository_file)
return
except vol.Invalid:
_LOGGER.warning("Repository parse error %s", repository_file)
return
# process data
self.repositories[slug] = repository_info
self._read_addons_folder(path, slug)
|
https://github.com/home-assistant/supervisor/issues/2669
|
21-03-03 19:11:51 ERROR (MainThread) [supervisor.jobs] Unhandled exception: 'NoneType' object has no attribute 'is_file'
Traceback (most recent call last):
File "/usr/src/supervisor/supervisor/jobs/decorator.py", line 100, in wrapper
return await self._method(*args, **kwargs)
File "/usr/src/supervisor/supervisor/addons/init.py", line 182, in install
await addon.instance.install(store.version, store.image)
File "/usr/src/supervisor/supervisor/utils/init.py", line 32, in wrap_api
return await method(api, *args, **kwargs)
File "/usr/local/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/usr/src/supervisor/supervisor/docker/addon.py", line 491, in _install
self._build(version)
File "/usr/src/supervisor/supervisor/docker/addon.py", line 500, in _build
build_env = AddonBuild(self.coresys, self.addon)
File "/usr/src/supervisor/supervisor/addons/build.py", line 32, in init
super().init(
File "/usr/src/supervisor/supervisor/utils/common.py", line 59, in init
self.read_data()
File "/usr/src/supervisor/supervisor/utils/common.py", line 72, in read_data
if self._file.is_file():
AttributeError: 'NoneType' object has no attribute 'is_file'
|
AttributeError
|
def find_one_filetype(path: Path, filename: str, filetypes: List[str]) -> Path:
"""Find first file matching filetypes."""
for file in path.glob(f"**/{filename}.*"):
if file.suffix in filetypes:
return file
raise ConfigurationFileError(f"{path!s}/{filename}.({filetypes}) not exists!")
|
def find_one_filetype(
path: Path, filename: str, filetypes: List[str]
) -> Optional[Path]:
"""Find first file matching filetypes."""
for file in path.glob(f"**/{filename}.*"):
if file.suffix in filetypes:
return file
return None
|
https://github.com/home-assistant/supervisor/issues/2669
|
21-03-03 19:11:51 ERROR (MainThread) [supervisor.jobs] Unhandled exception: 'NoneType' object has no attribute 'is_file'
Traceback (most recent call last):
File "/usr/src/supervisor/supervisor/jobs/decorator.py", line 100, in wrapper
return await self._method(*args, **kwargs)
File "/usr/src/supervisor/supervisor/addons/init.py", line 182, in install
await addon.instance.install(store.version, store.image)
File "/usr/src/supervisor/supervisor/utils/init.py", line 32, in wrap_api
return await method(api, *args, **kwargs)
File "/usr/local/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/usr/src/supervisor/supervisor/docker/addon.py", line 491, in _install
self._build(version)
File "/usr/src/supervisor/supervisor/docker/addon.py", line 500, in _build
build_env = AddonBuild(self.coresys, self.addon)
File "/usr/src/supervisor/supervisor/addons/build.py", line 32, in init
super().init(
File "/usr/src/supervisor/supervisor/utils/common.py", line 59, in init
self.read_data()
File "/usr/src/supervisor/supervisor/utils/common.py", line 72, in read_data
if self._file.is_file():
AttributeError: 'NoneType' object has no attribute 'is_file'
|
AttributeError
|
def ipconfig_struct(config: IpConfig) -> Dict[str, Any]:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
}
|
def ipconfig_struct(config: IpConfig) -> dict:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
}
|
https://github.com/home-assistant/supervisor/issues/2333
|
20-12-03 14:56:14 ERROR (MainThread) [aiohttp.server] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_protocol.py", line 422, in _handle_request
resp = await self._request_handler(request)
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 123, in sentry_app_handle
reraise(*_capture_exception(hub))
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/_compat.py", line 54, in reraise
raise value
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 113, in sentry_app_handle
response = await old_handle(self, request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_app.py", line 499, in _handle
resp = await handler(request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_middlewares.py", line 118, in impl
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 133, in system_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 195, in token_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/utils.py", line 60, in wrap_api
answer = await method(api, *args, **kwargs)
File "/usr/src/supervisor/supervisor/api/network.py", line 154, in info
ATTR_INTERFACES: [
File "/usr/src/supervisor/supervisor/api/network.py", line 155, in <listcomp>
interface_struct(interface)
File "/usr/src/supervisor/supervisor/api/network.py", line 114, in interface_struct
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
File "/usr/src/supervisor/supervisor/api/network.py", line 96, in wifi_struct
ATTR_MODE: config.mode,
AttributeError: 'VlanConfig' object has no attribute 'mode'
|
AttributeError
|
def wifi_struct(config: WifiConfig) -> Dict[str, Any]:
"""Return a dict with information about wifi configuration."""
return {
ATTR_MODE: config.mode,
ATTR_AUTH: config.auth,
ATTR_SSID: config.ssid,
ATTR_SIGNAL: config.signal,
}
|
def wifi_struct(config: WifiConfig) -> dict:
"""Return a dict with information about wifi configuration."""
return {
ATTR_MODE: config.mode,
ATTR_AUTH: config.auth,
ATTR_SSID: config.ssid,
ATTR_SIGNAL: config.signal,
}
|
https://github.com/home-assistant/supervisor/issues/2333
|
20-12-03 14:56:14 ERROR (MainThread) [aiohttp.server] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_protocol.py", line 422, in _handle_request
resp = await self._request_handler(request)
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 123, in sentry_app_handle
reraise(*_capture_exception(hub))
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/_compat.py", line 54, in reraise
raise value
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 113, in sentry_app_handle
response = await old_handle(self, request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_app.py", line 499, in _handle
resp = await handler(request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_middlewares.py", line 118, in impl
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 133, in system_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 195, in token_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/utils.py", line 60, in wrap_api
answer = await method(api, *args, **kwargs)
File "/usr/src/supervisor/supervisor/api/network.py", line 154, in info
ATTR_INTERFACES: [
File "/usr/src/supervisor/supervisor/api/network.py", line 155, in <listcomp>
interface_struct(interface)
File "/usr/src/supervisor/supervisor/api/network.py", line 114, in interface_struct
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
File "/usr/src/supervisor/supervisor/api/network.py", line 96, in wifi_struct
ATTR_MODE: config.mode,
AttributeError: 'VlanConfig' object has no attribute 'mode'
|
AttributeError
|
def interface_struct(interface: Interface) -> Dict[str, Any]:
"""Return a dict with information of a interface to be used in th API."""
return {
ATTR_INTERFACE: interface.name,
ATTR_TYPE: interface.type,
ATTR_ENABLED: interface.enabled,
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
}
|
def interface_struct(interface: Interface) -> dict:
"""Return a dict with information of a interface to be used in th API."""
return {
ATTR_INTERFACE: interface.name,
ATTR_TYPE: interface.type,
ATTR_ENABLED: interface.enabled,
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
}
|
https://github.com/home-assistant/supervisor/issues/2333
|
20-12-03 14:56:14 ERROR (MainThread) [aiohttp.server] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_protocol.py", line 422, in _handle_request
resp = await self._request_handler(request)
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 123, in sentry_app_handle
reraise(*_capture_exception(hub))
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/_compat.py", line 54, in reraise
raise value
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 113, in sentry_app_handle
response = await old_handle(self, request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_app.py", line 499, in _handle
resp = await handler(request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_middlewares.py", line 118, in impl
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 133, in system_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 195, in token_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/utils.py", line 60, in wrap_api
answer = await method(api, *args, **kwargs)
File "/usr/src/supervisor/supervisor/api/network.py", line 154, in info
ATTR_INTERFACES: [
File "/usr/src/supervisor/supervisor/api/network.py", line 155, in <listcomp>
interface_struct(interface)
File "/usr/src/supervisor/supervisor/api/network.py", line 114, in interface_struct
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
File "/usr/src/supervisor/supervisor/api/network.py", line 96, in wifi_struct
ATTR_MODE: config.mode,
AttributeError: 'VlanConfig' object has no attribute 'mode'
|
AttributeError
|
def accesspoint_struct(accesspoint: AccessPoint) -> Dict[str, Any]:
"""Return a dict for AccessPoint."""
return {
ATTR_MODE: accesspoint.mode,
ATTR_SSID: accesspoint.ssid,
ATTR_FREQUENCY: accesspoint.frequency,
ATTR_SIGNAL: accesspoint.signal,
ATTR_MAC: accesspoint.mac,
}
|
def accesspoint_struct(accesspoint: AccessPoint) -> dict:
"""Return a dict for AccessPoint."""
return {
ATTR_MODE: accesspoint.mode,
ATTR_SSID: accesspoint.ssid,
ATTR_FREQUENCY: accesspoint.frequency,
ATTR_SIGNAL: accesspoint.signal,
ATTR_MAC: accesspoint.mac,
}
|
https://github.com/home-assistant/supervisor/issues/2333
|
20-12-03 14:56:14 ERROR (MainThread) [aiohttp.server] Error handling request
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_protocol.py", line 422, in _handle_request
resp = await self._request_handler(request)
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 123, in sentry_app_handle
reraise(*_capture_exception(hub))
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/_compat.py", line 54, in reraise
raise value
File "/usr/local/lib/python3.8/site-packages/sentry_sdk/integrations/aiohttp.py", line 113, in sentry_app_handle
response = await old_handle(self, request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_app.py", line 499, in _handle
resp = await handler(request)
File "/usr/local/lib/python3.8/site-packages/aiohttp/web_middlewares.py", line 118, in impl
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 133, in system_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/security.py", line 195, in token_validation
return await handler(request)
File "/usr/src/supervisor/supervisor/api/utils.py", line 60, in wrap_api
answer = await method(api, *args, **kwargs)
File "/usr/src/supervisor/supervisor/api/network.py", line 154, in info
ATTR_INTERFACES: [
File "/usr/src/supervisor/supervisor/api/network.py", line 155, in <listcomp>
interface_struct(interface)
File "/usr/src/supervisor/supervisor/api/network.py", line 114, in interface_struct
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
File "/usr/src/supervisor/supervisor/api/network.py", line 96, in wifi_struct
ATTR_MODE: config.mode,
AttributeError: 'VlanConfig' object has no attribute 'mode'
|
AttributeError
|
def find_key(self, items, write=False):
overwrite = self.config["overwrite"].get(bool)
command = [self.config["bin"].as_str()]
# The KeyFinder GUI program needs the -f flag before the path.
# keyfinder-cli is similar, but just wants the path with no flag.
if "keyfinder-cli" not in os.path.basename(command[0]).lower():
command.append("-f")
for item in items:
if item["initial_key"] and not overwrite:
continue
try:
output = util.command_output(command + [util.syspath(item.path)]).stdout
except (subprocess.CalledProcessError, OSError) as exc:
self._log.error("execution failed: {0}", exc)
continue
except UnicodeEncodeError:
# Workaround for Python 2 Windows bug.
# https://bugs.python.org/issue1759845
self._log.error("execution failed for Unicode path: {0!r}", item.path)
continue
try:
key_raw = output.rsplit(None, 1)[-1]
except IndexError:
# Sometimes keyfinder-cli returns 0 but with no key, usually
# when the file is silent or corrupt, so we log and skip.
self._log.error("no key returned for path: {0}", item.path)
continue
try:
key = util.text_string(key_raw)
except UnicodeDecodeError:
self._log.error("output is invalid UTF-8")
continue
item["initial_key"] = key
self._log.info(
"added computed initial key {0} for {1}",
key,
util.displayable_path(item.path),
)
if write:
item.try_write()
item.store()
|
def find_key(self, items, write=False):
overwrite = self.config["overwrite"].get(bool)
command = [self.config["bin"].as_str()]
# The KeyFinder GUI program needs the -f flag before the path.
# keyfinder-cli is similar, but just wants the path with no flag.
if "keyfinder-cli" not in os.path.basename(command[0]).lower():
command.append("-f")
for item in items:
if item["initial_key"] and not overwrite:
continue
try:
output = util.command_output(command + [util.syspath(item.path)]).stdout
except (subprocess.CalledProcessError, OSError) as exc:
self._log.error("execution failed: {0}", exc)
continue
except UnicodeEncodeError:
# Workaround for Python 2 Windows bug.
# https://bugs.python.org/issue1759845
self._log.error("execution failed for Unicode path: {0!r}", item.path)
continue
key_raw = output.rsplit(None, 1)[-1]
try:
key = util.text_string(key_raw)
except UnicodeDecodeError:
self._log.error("output is invalid UTF-8")
continue
item["initial_key"] = key
self._log.info(
"added computed initial key {0} for {1}",
key,
util.displayable_path(item.path),
)
if write:
item.try_write()
item.store()
|
https://github.com/beetbox/beets/issues/2242
|
user configuration: /home/diomekes/.config/beets/config.yaml
data directory: /home/diomekes/.config/beets
plugin paths:
Sending event: pluginload
inline: adding item field disc_and_track
library database: /home/diomekes/.config/beets/library.db
library directory: /home/diomekes/media/music
Sending event: library_opened
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.19', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beetsplug/keyfinder.py", line 48, in command
self.find_key(lib.items(ui.decargs(args)), write=ui.should_write())
File "/usr/lib/python2.7/site-packages/beetsplug/keyfinder.py", line 74, in find_key
key_raw = output.rsplit(None, 1)[-1]
IndexError: list index out of range
|
IndexError
|
def commands(self):
cmd = ui.Subcommand("lyrics", help="fetch song lyrics")
cmd.parser.add_option(
"-p",
"--print",
dest="printlyr",
action="store_true",
default=False,
help="print lyrics to console",
)
cmd.parser.add_option(
"-r",
"--write-rest",
dest="writerest",
action="store",
default=None,
metavar="dir",
help="write lyrics to given directory as ReST files",
)
cmd.parser.add_option(
"-f",
"--force",
dest="force_refetch",
action="store_true",
default=False,
help="always re-download lyrics",
)
cmd.parser.add_option(
"-l",
"--local",
dest="local_only",
action="store_true",
default=False,
help="do not fetch missing lyrics",
)
def func(lib, opts, args):
# The "write to files" option corresponds to the
# import_write config value.
write = ui.should_write()
if opts.writerest:
self.writerest_indexes(opts.writerest)
items = lib.items(ui.decargs(args))
for item in items:
if not opts.local_only and not self.config["local"]:
self.fetch_item_lyrics(
lib,
item,
write,
opts.force_refetch or self.config["force"],
)
if item.lyrics:
if opts.printlyr:
ui.print_(item.lyrics)
if opts.writerest:
self.appendrest(opts.writerest, item)
if opts.writerest and items:
# flush last artist & write to ReST
self.writerest(opts.writerest)
ui.print_("ReST files generated. to build, use one of:")
ui.print_(" sphinx-build -b html %s _build/html" % opts.writerest)
ui.print_(" sphinx-build -b epub %s _build/epub" % opts.writerest)
ui.print_(
(
" sphinx-build -b latex %s _build/latex "
"&& make -C _build/latex all-pdf"
)
% opts.writerest
)
cmd.func = func
return [cmd]
|
def commands(self):
cmd = ui.Subcommand("lyrics", help="fetch song lyrics")
cmd.parser.add_option(
"-p",
"--print",
dest="printlyr",
action="store_true",
default=False,
help="print lyrics to console",
)
cmd.parser.add_option(
"-r",
"--write-rest",
dest="writerest",
action="store",
default=None,
metavar="dir",
help="write lyrics to given directory as ReST files",
)
cmd.parser.add_option(
"-f",
"--force",
dest="force_refetch",
action="store_true",
default=False,
help="always re-download lyrics",
)
cmd.parser.add_option(
"-l",
"--local",
dest="local_only",
action="store_true",
default=False,
help="do not fetch missing lyrics",
)
def func(lib, opts, args):
# The "write to files" option corresponds to the
# import_write config value.
write = ui.should_write()
if opts.writerest:
self.writerest_indexes(opts.writerest)
for item in lib.items(ui.decargs(args)):
if not opts.local_only and not self.config["local"]:
self.fetch_item_lyrics(
lib,
item,
write,
opts.force_refetch or self.config["force"],
)
if item.lyrics:
if opts.printlyr:
ui.print_(item.lyrics)
if opts.writerest:
self.writerest(opts.writerest, item)
if opts.writerest:
# flush last artist
self.writerest(opts.writerest, None)
ui.print_("ReST files generated. to build, use one of:")
ui.print_(" sphinx-build -b html %s _build/html" % opts.writerest)
ui.print_(" sphinx-build -b epub %s _build/epub" % opts.writerest)
ui.print_(
(
" sphinx-build -b latex %s _build/latex "
"&& make -C _build/latex all-pdf"
)
% opts.writerest
)
cmd.func = func
return [cmd]
|
https://github.com/beetbox/beets/issues/2805
|
$ beet -vv lyrics artie shaw carioca
user configuration: /home/doron/.config/beets/config.yaml
data directory: /home/doron/.config/beets
plugin paths:
Sending event: pluginload
inline: adding item field isMultiDisc
library database: /var/lib/beets/db
library directory: /var/lib/mpd/music
Sending event: library_opened
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw-and-His-Orchestra/Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw/Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/es/letras/Buddy-Rich-Artie-Shaw/The-Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw/Carioca (404)
lyrics: lyrics not found: FLAC % Artie Shaw - Begin the Beguine | CD-04 | 25 Carioca
Traceback (most recent call last):
File "/usr/bin/beet", line 11, in <module>
load_entry_point('beets==1.4.6', 'console_scripts', 'beet')()
File "/usr/lib/python3.6/site-packages/beets/ui/__init__.py", line 1256, in main
_raw_main(args)
File "/usr/lib/python3.6/site-packages/beets/ui/__init__.py", line 1243, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python3.6/site-packages/beetsplug/lyrics.py", line 734, in func
self.writerest(opts.writerest, None)
File "/usr/lib/python3.6/site-packages/beetsplug/lyrics.py", line 762, in writerest
self.artist = item.artist.strip()
AttributeError: 'NoneType' object has no attribute 'artist'
|
AttributeError
|
def func(lib, opts, args):
# The "write to files" option corresponds to the
# import_write config value.
write = ui.should_write()
if opts.writerest:
self.writerest_indexes(opts.writerest)
items = lib.items(ui.decargs(args))
for item in items:
if not opts.local_only and not self.config["local"]:
self.fetch_item_lyrics(
lib,
item,
write,
opts.force_refetch or self.config["force"],
)
if item.lyrics:
if opts.printlyr:
ui.print_(item.lyrics)
if opts.writerest:
self.appendrest(opts.writerest, item)
if opts.writerest and items:
# flush last artist & write to ReST
self.writerest(opts.writerest)
ui.print_("ReST files generated. to build, use one of:")
ui.print_(" sphinx-build -b html %s _build/html" % opts.writerest)
ui.print_(" sphinx-build -b epub %s _build/epub" % opts.writerest)
ui.print_(
(" sphinx-build -b latex %s _build/latex && make -C _build/latex all-pdf")
% opts.writerest
)
|
def func(lib, opts, args):
# The "write to files" option corresponds to the
# import_write config value.
write = ui.should_write()
if opts.writerest:
self.writerest_indexes(opts.writerest)
for item in lib.items(ui.decargs(args)):
if not opts.local_only and not self.config["local"]:
self.fetch_item_lyrics(
lib,
item,
write,
opts.force_refetch or self.config["force"],
)
if item.lyrics:
if opts.printlyr:
ui.print_(item.lyrics)
if opts.writerest:
self.writerest(opts.writerest, item)
if opts.writerest:
# flush last artist
self.writerest(opts.writerest, None)
ui.print_("ReST files generated. to build, use one of:")
ui.print_(" sphinx-build -b html %s _build/html" % opts.writerest)
ui.print_(" sphinx-build -b epub %s _build/epub" % opts.writerest)
ui.print_(
(" sphinx-build -b latex %s _build/latex && make -C _build/latex all-pdf")
% opts.writerest
)
|
https://github.com/beetbox/beets/issues/2805
|
$ beet -vv lyrics artie shaw carioca
user configuration: /home/doron/.config/beets/config.yaml
data directory: /home/doron/.config/beets
plugin paths:
Sending event: pluginload
inline: adding item field isMultiDisc
library database: /var/lib/beets/db
library directory: /var/lib/mpd/music
Sending event: library_opened
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw-and-His-Orchestra/Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw/Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/es/letras/Buddy-Rich-Artie-Shaw/The-Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw/Carioca (404)
lyrics: lyrics not found: FLAC % Artie Shaw - Begin the Beguine | CD-04 | 25 Carioca
Traceback (most recent call last):
File "/usr/bin/beet", line 11, in <module>
load_entry_point('beets==1.4.6', 'console_scripts', 'beet')()
File "/usr/lib/python3.6/site-packages/beets/ui/__init__.py", line 1256, in main
_raw_main(args)
File "/usr/lib/python3.6/site-packages/beets/ui/__init__.py", line 1243, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python3.6/site-packages/beetsplug/lyrics.py", line 734, in func
self.writerest(opts.writerest, None)
File "/usr/lib/python3.6/site-packages/beetsplug/lyrics.py", line 762, in writerest
self.artist = item.artist.strip()
AttributeError: 'NoneType' object has no attribute 'artist'
|
AttributeError
|
def writerest(self, directory):
"""Write self.rest to a ReST file"""
if self.rest is not None and self.artist is not None:
path = os.path.join(directory, "artists", slug(self.artist) + ".rst")
with open(path, "wb") as output:
output.write(self.rest.encode("utf-8"))
|
def writerest(self, directory, item):
"""Write the item to an ReST file
This will keep state (in the `rest` variable) in order to avoid
writing continuously to the same files.
"""
if item is None or slug(self.artist) != slug(item.albumartist):
if self.rest is not None:
path = os.path.join(directory, "artists", slug(self.artist) + ".rst")
with open(path, "wb") as output:
output.write(self.rest.encode("utf-8"))
self.rest = None
if item is None:
return
self.artist = item.albumartist.strip()
self.rest = "%s\n%s\n\n.. contents::\n :local:\n\n" % (
self.artist,
"=" * len(self.artist),
)
if self.album != item.album:
tmpalbum = self.album = item.album.strip()
if self.album == "":
tmpalbum = "Unknown album"
self.rest += "%s\n%s\n\n" % (tmpalbum, "-" * len(tmpalbum))
title_str = ":index:`%s`" % item.title.strip()
block = "| " + item.lyrics.replace("\n", "\n| ")
self.rest += "%s\n%s\n\n%s\n\n" % (title_str, "~" * len(title_str), block)
|
https://github.com/beetbox/beets/issues/2805
|
$ beet -vv lyrics artie shaw carioca
user configuration: /home/doron/.config/beets/config.yaml
data directory: /home/doron/.config/beets
plugin paths:
Sending event: pluginload
inline: adding item field isMultiDisc
library database: /var/lib/beets/db
library directory: /var/lib/mpd/music
Sending event: library_opened
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw-and-His-Orchestra/Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw/Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/es/letras/Buddy-Rich-Artie-Shaw/The-Carioca (404)
lyrics: failed to fetch: https://www.musixmatch.com/lyrics/Artie-Shaw/Carioca (404)
lyrics: lyrics not found: FLAC % Artie Shaw - Begin the Beguine | CD-04 | 25 Carioca
Traceback (most recent call last):
File "/usr/bin/beet", line 11, in <module>
load_entry_point('beets==1.4.6', 'console_scripts', 'beet')()
File "/usr/lib/python3.6/site-packages/beets/ui/__init__.py", line 1256, in main
_raw_main(args)
File "/usr/lib/python3.6/site-packages/beets/ui/__init__.py", line 1243, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python3.6/site-packages/beetsplug/lyrics.py", line 734, in func
self.writerest(opts.writerest, None)
File "/usr/lib/python3.6/site-packages/beetsplug/lyrics.py", line 762, in writerest
self.artist = item.artist.strip()
AttributeError: 'NoneType' object has no attribute 'artist'
|
AttributeError
|
def album_for_id(self, album_id):
"""Fetches an album by its Discogs ID and returns an AlbumInfo object
or None if the album is not found.
"""
if not self.discogs_client:
return
self._log.debug("Searching for release {0}", album_id)
# Discogs-IDs are simple integers. We only look for those at the end
# of an input string as to avoid confusion with other metadata plugins.
# An optional bracket can follow the integer, as this is how discogs
# displays the release ID on its webpage.
match = re.search(r"(^|\[*r|discogs\.com/.+/release/)(\d+)($|\])", album_id)
if not match:
return None
result = Release(self.discogs_client, {"id": int(match.group(2))})
# Try to obtain title to verify that we indeed have a valid Release
try:
getattr(result, "title")
except DiscogsAPIError as e:
if e.status_code != 404:
self._log.debug(
"API Error: {0} (query: {1})", e, result.data["resource_url"]
)
if e.status_code == 401:
self.reset_auth()
return self.album_for_id(album_id)
return None
except CONNECTION_ERRORS:
self._log.debug("Connection error in album lookup", exc_info=True)
return None
return self.get_album_info(result)
|
def album_for_id(self, album_id):
"""Fetches an album by its Discogs ID and returns an AlbumInfo object
or None if the album is not found.
"""
if not self.discogs_client:
return
self._log.debug("Searching for release {0}", album_id)
# Discogs-IDs are simple integers. We only look for those at the end
# of an input string as to avoid confusion with other metadata plugins.
# An optional bracket can follow the integer, as this is how discogs
# displays the release ID on its webpage.
match = re.search(r"(^|\[*r|discogs\.com/.+/release/)(\d+)($|\])", album_id)
if not match:
return None
result = Release(self.discogs_client, {"id": int(match.group(2))})
# Try to obtain title to verify that we indeed have a valid Release
try:
getattr(result, "title")
except DiscogsAPIError as e:
if e.status_code != 404:
self._log.debug("API Error: {0} (query: {1})", e, result._uri)
if e.status_code == 401:
self.reset_auth()
return self.album_for_id(album_id)
return None
except CONNECTION_ERRORS:
self._log.debug("Connection error in album lookup", exc_info=True)
return None
return self.get_album_info(result)
|
https://github.com/beetbox/beets/issues/3239
|
File "/beets/beetsplug/discogs.py", line 257, in get_master_year
year = result.fetch('year')
File "/usr/lib/python3.7/site-packages/discogs_client/models.py", line 245, in fetch
self.refresh()
File "/usr/lib/python3.7/site-packages/discogs_client/models.py", line 211, in refresh
data = self.client._get(self.data['resource_url'])
File "/usr/lib/python3.7/site-packages/discogs_client/client.py", line 123, in _get
return self._request('GET', url)
File "/usr/lib/python3.7/site-packages/discogs_client/client.py", line 120, in _request
raise HTTPError(body['message'], status_code)
discogs_client.exceptions.HTTPError: 502: The Discogs API is undergoing maintenance. We'll be back in a moment.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/bin/beet", line 11, in <module>
load_entry_point('beets', 'console_scripts', 'beet')()
File "/beets/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/beets/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/beets/beets/ui/commands.py", line 955, in import_func
import_files(lib, paths, query)
File "/beets/beets/ui/commands.py", line 925, in import_files
session.run()
File "/beets/beets/importer.py", line 329, in run
pl.run_parallel(QUEUE_SIZE)
File "/beets/beets/util/pipeline.py", line 445, in run_parallel
six.reraise(exc_info[0], exc_info[1], exc_info[2])
File "/usr/lib/python3.7/site-packages/six.py", line 693, in reraise
raise value
File "/beets/beets/util/pipeline.py", line 312, in run
out = self.coro.send(msg)
File "/beets/beets/util/pipeline.py", line 194, in coro
func(*(args + (task,)))
File "/beets/beets/importer.py", line 1351, in lookup_candidates
task.lookup_candidates()
File "/beets/beets/importer.py", line 641, in lookup_candidates
autotag.tag_album(self.items, search_ids=self.search_ids)
File "/beets/beets/autotag/match.py", line 460, in tag_album
va_likely):
File "/beets/beets/plugins.py", line 571, in decorated
for v in generator(*args, **kwargs):
File "/beets/beets/autotag/hooks.py", line 620, in album_candidates
for candidate in plugins.candidates(items, artist, album, va_likely):
File "/beets/beets/plugins.py", line 381, in candidates
for candidate in plugin.candidates(items, artist, album, va_likely):
File "/beets/beetsplug/discogs.py", line 176, in candidates
return self.get_albums(query)
File "/beets/beetsplug/discogs.py", line 245, in get_albums
return [album for album in map(self.get_album_info, releases[:5])
File "/beets/beetsplug/discogs.py", line 245, in <listcomp>
return [album for album in map(self.get_album_info, releases[:5])
File "/beets/beetsplug/discogs.py", line 335, in get_album_info
original_year = self.get_master_year(master_id) if master_id else year
File "/beets/beetsplug/discogs.py", line 262, in get_master_year
self._log.debug(u'API Error: {0} (query: {1})', e, result._uri)
AttributeError: 'Master' object has no attribute '_uri'
|
discogs_client.exceptions.HTTPError
|
def get_master_year(self, master_id):
"""Fetches a master release given its Discogs ID and returns its year
or None if the master release is not found.
"""
self._log.debug("Searching for master release {0}", master_id)
result = Master(self.discogs_client, {"id": master_id})
self.request_start()
try:
year = result.fetch("year")
self.request_finished()
return year
except DiscogsAPIError as e:
if e.status_code != 404:
self._log.debug(
"API Error: {0} (query: {1})", e, result.data["resource_url"]
)
if e.status_code == 401:
self.reset_auth()
return self.get_master_year(master_id)
return None
except CONNECTION_ERRORS:
self._log.debug("Connection error in master release lookup", exc_info=True)
return None
|
def get_master_year(self, master_id):
"""Fetches a master release given its Discogs ID and returns its year
or None if the master release is not found.
"""
self._log.debug("Searching for master release {0}", master_id)
result = Master(self.discogs_client, {"id": master_id})
self.request_start()
try:
year = result.fetch("year")
self.request_finished()
return year
except DiscogsAPIError as e:
if e.status_code != 404:
self._log.debug("API Error: {0} (query: {1})", e, result._uri)
if e.status_code == 401:
self.reset_auth()
return self.get_master_year(master_id)
return None
except CONNECTION_ERRORS:
self._log.debug("Connection error in master release lookup", exc_info=True)
return None
|
https://github.com/beetbox/beets/issues/3239
|
File "/beets/beetsplug/discogs.py", line 257, in get_master_year
year = result.fetch('year')
File "/usr/lib/python3.7/site-packages/discogs_client/models.py", line 245, in fetch
self.refresh()
File "/usr/lib/python3.7/site-packages/discogs_client/models.py", line 211, in refresh
data = self.client._get(self.data['resource_url'])
File "/usr/lib/python3.7/site-packages/discogs_client/client.py", line 123, in _get
return self._request('GET', url)
File "/usr/lib/python3.7/site-packages/discogs_client/client.py", line 120, in _request
raise HTTPError(body['message'], status_code)
discogs_client.exceptions.HTTPError: 502: The Discogs API is undergoing maintenance. We'll be back in a moment.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/bin/beet", line 11, in <module>
load_entry_point('beets', 'console_scripts', 'beet')()
File "/beets/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/beets/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/beets/beets/ui/commands.py", line 955, in import_func
import_files(lib, paths, query)
File "/beets/beets/ui/commands.py", line 925, in import_files
session.run()
File "/beets/beets/importer.py", line 329, in run
pl.run_parallel(QUEUE_SIZE)
File "/beets/beets/util/pipeline.py", line 445, in run_parallel
six.reraise(exc_info[0], exc_info[1], exc_info[2])
File "/usr/lib/python3.7/site-packages/six.py", line 693, in reraise
raise value
File "/beets/beets/util/pipeline.py", line 312, in run
out = self.coro.send(msg)
File "/beets/beets/util/pipeline.py", line 194, in coro
func(*(args + (task,)))
File "/beets/beets/importer.py", line 1351, in lookup_candidates
task.lookup_candidates()
File "/beets/beets/importer.py", line 641, in lookup_candidates
autotag.tag_album(self.items, search_ids=self.search_ids)
File "/beets/beets/autotag/match.py", line 460, in tag_album
va_likely):
File "/beets/beets/plugins.py", line 571, in decorated
for v in generator(*args, **kwargs):
File "/beets/beets/autotag/hooks.py", line 620, in album_candidates
for candidate in plugins.candidates(items, artist, album, va_likely):
File "/beets/beets/plugins.py", line 381, in candidates
for candidate in plugin.candidates(items, artist, album, va_likely):
File "/beets/beetsplug/discogs.py", line 176, in candidates
return self.get_albums(query)
File "/beets/beetsplug/discogs.py", line 245, in get_albums
return [album for album in map(self.get_album_info, releases[:5])
File "/beets/beetsplug/discogs.py", line 245, in <listcomp>
return [album for album in map(self.get_album_info, releases[:5])
File "/beets/beetsplug/discogs.py", line 335, in get_album_info
original_year = self.get_master_year(master_id) if master_id else year
File "/beets/beetsplug/discogs.py", line 262, in get_master_year
self._log.debug(u'API Error: {0} (query: {1})', e, result._uri)
AttributeError: 'Master' object has no attribute '_uri'
|
discogs_client.exceptions.HTTPError
|
def open_audio_file(self, item):
"""Open the file to read the PCM stream from the using
``item.path``.
:return: the audiofile instance
:rtype: :class:`audiotools.AudioFile`
:raises :exc:`ReplayGainError`: if the file is not found or the
file format is not supported
"""
try:
audiofile = self._mod_audiotools.open(py3_path(syspath(item.path)))
except IOError:
raise ReplayGainError("File {} was not found".format(item.path))
except self._mod_audiotools.UnsupportedFile:
raise ReplayGainError("Unsupported file type {}".format(item.format))
return audiofile
|
def open_audio_file(self, item):
"""Open the file to read the PCM stream from the using
``item.path``.
:return: the audiofile instance
:rtype: :class:`audiotools.AudioFile`
:raises :exc:`ReplayGainError`: if the file is not found or the
file format is not supported
"""
try:
audiofile = self._mod_audiotools.open(item.path)
except IOError:
raise ReplayGainError("File {} was not found".format(item.path))
except self._mod_audiotools.UnsupportedFile:
raise ReplayGainError("Unsupported file type {}".format(item.format))
return audiofile
|
https://github.com/beetbox/beets/issues/3305
|
Sending event: library_opened
replaygain: analyzing Pink Floyd - The Wall - In the Flesh?
Traceback (most recent call last):
File "/usr/bin/beet", line 11, in <module>
load_entry_point('beets==1.5.0', 'console_scripts', 'beet')()
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beets/ui/__init__.py", line 1267, in main
_raw_main(args)
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beets/ui/__init__.py", line 1254, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beetsplug/replaygain.py", line 1052, in func
self.handle_track(item, write, force)
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beetsplug/replaygain.py", line 1003, in handle_track
track_gains = backend_instance.compute_track_gain([item])
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beetsplug/replaygain.py", line 752, in compute_track_gain
return [self._compute_track_gain(item) for item in items]
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beetsplug/replaygain.py", line 752, in <listcomp>
return [self._compute_track_gain(item) for item in items]
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beetsplug/replaygain.py", line 781, in _compute_track_gain
rg_track_gain, rg_track_peak = self._title_gain(rg, audiofile)
File "/usr/lib/python3.7/site-packages/beets-1.5.0-py3.7.egg/beetsplug/replaygain.py", line 764, in _title_gain
return rg.title_gain(audiofile.to_pcm())
File "/usr/lib/python3.7/site-packages/audiotools/mp3.py", line 217, in to_pcm
return MP3Decoder(self.filename)
TypeError: argument 1 must be str, not bytes
|
TypeError
|
def authenticate(self):
if self.m.is_authenticated():
return
# Checks for OAuth2 credentials,
# if they don't exist - performs authorization
oauth_file = self.config["oauth_file"].as_filename()
if os.path.isfile(oauth_file):
uploader_id = self.config["uploader_id"]
uploader_name = self.config["uploader_name"]
self.m.login(
oauth_credentials=oauth_file,
uploader_id=uploader_id.as_str().upper() or None,
uploader_name=uploader_name.as_str() or None,
)
else:
self.m.perform_oauth(oauth_file)
|
def authenticate(self):
if self.m.is_authenticated():
return
# Checks for OAuth2 credentials,
# if they don't exist - performs authorization
oauth_file = self.config["oauth_file"].as_str()
if os.path.isfile(oauth_file):
uploader_id = self.config["uploader_id"]
uploader_name = self.config["uploader_name"]
self.m.login(
oauth_credentials=oauth_file,
uploader_id=uploader_id.as_str().upper() or None,
uploader_name=uploader_name.as_str() or None,
)
else:
self.m.perform_oauth(oauth_file)
|
https://github.com/beetbox/beets/issues/3270
|
user configuration: /home/jlivin25/.config/beets/config.yaml
data directory: /home/jlivin25/.config/beets
plugin paths: /home/jlivin25/.config/beets/myplugins
Sending event: pluginload
Traceback (most recent call last):
File "/home/jlivin25/.local/bin/beet", line 11, in <module>
sys.exit(main())
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1249, in _raw_main
subcommands, plugins, lib = _setup(options, lib)
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1135, in _setup
plugins = _load_plugins(config)
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1121, in _load_plugins
plugins.send("pluginload")
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/plugins.py", line 488, in send
for handler in event_handlers()[event]:
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/plugins.py", line 471, in event_handlers
for plugin in find_plugins():
File "/home/jlivin25/.local/lib/python2.7/site-packages/beets/plugins.py", line 307, in find_plugins
_instances[cls] = cls()
File "/home/jlivin25/.local/lib/python2.7/site-packages/beetsplug/gmusic.py", line 39, in __init__
u'oauth_file': gmusicapi.clients.OAUTH_FILEPATH,
AttributeError: 'module' object has no attribute 'OAUTH_FILEPATH'
|
AttributeError
|
def parse_tool_output(self, text, path_list, is_album):
"""Given the output from bs1770gain, parse the text and
return a list of dictionaries
containing information about each analyzed file.
"""
per_file_gain = {}
album_gain = {} # mutable variable so it can be set from handlers
parser = xml.parsers.expat.ParserCreate(encoding="utf-8")
state = {"file": None, "gain": None, "peak": None}
def start_element_handler(name, attrs):
if name == "track":
state["file"] = bytestring_path(attrs["file"])
if state["file"] in per_file_gain:
raise ReplayGainError("duplicate filename in bs1770gain output")
elif name == "integrated":
state["gain"] = float(attrs["lu"])
elif name == "sample-peak":
state["peak"] = float(attrs["factor"])
def end_element_handler(name):
if name == "track":
if state["gain"] is None or state["peak"] is None:
raise ReplayGainError(
"could not parse gain or peak from the output of bs1770gain"
)
per_file_gain[state["file"]] = Gain(state["gain"], state["peak"])
state["gain"] = state["peak"] = None
elif name == "summary":
if state["gain"] is None or state["peak"] is None:
raise ReplayGainError(
"could not parse gain or peak from the output of bs1770gain"
)
album_gain["album"] = Gain(state["gain"], state["peak"])
state["gain"] = state["peak"] = None
parser.StartElementHandler = start_element_handler
parser.EndElementHandler = end_element_handler
try:
parser.Parse(text, True)
except xml.parsers.expat.ExpatError:
raise ReplayGainError(
"The bs1770gain tool produced malformed XML. "
"Using version >=0.4.10 may solve this problem."
)
if len(per_file_gain) != len(path_list):
raise ReplayGainError(
"the number of results returned by bs1770gain does not match "
"the number of files passed to it"
)
# bs1770gain does not return the analysis results in the order that
# files are passed on the command line, because it is sorting the files
# internally. We must recover the order from the filenames themselves.
try:
out = [per_file_gain[os.path.basename(p)] for p in path_list]
except KeyError:
raise ReplayGainError(
"unrecognized filename in bs1770gain output "
"(bs1770gain can only deal with utf-8 file names)"
)
if is_album:
out.append(album_gain["album"])
return out
|
def parse_tool_output(self, text, path_list, is_album):
"""Given the output from bs1770gain, parse the text and
return a list of dictionaries
containing information about each analyzed file.
"""
per_file_gain = {}
album_gain = {} # mutable variable so it can be set from handlers
parser = xml.parsers.expat.ParserCreate(encoding="utf-8")
state = {"file": None, "gain": None, "peak": None}
def start_element_handler(name, attrs):
if name == "track":
state["file"] = bytestring_path(attrs["file"])
if state["file"] in per_file_gain:
raise ReplayGainError("duplicate filename in bs1770gain output")
elif name == "integrated":
state["gain"] = float(attrs["lu"])
elif name == "sample-peak":
state["peak"] = float(attrs["factor"])
def end_element_handler(name):
if name == "track":
if state["gain"] is None or state["peak"] is None:
raise ReplayGainError(
"could not parse gain or peak from the output of bs1770gain"
)
per_file_gain[state["file"]] = Gain(state["gain"], state["peak"])
state["gain"] = state["peak"] = None
elif name == "summary":
if state["gain"] is None or state["peak"] is None:
raise ReplayGainError(
"could not parse gain or peak from the output of bs1770gain"
)
album_gain["album"] = Gain(state["gain"], state["peak"])
state["gain"] = state["peak"] = None
parser.StartElementHandler = start_element_handler
parser.EndElementHandler = end_element_handler
parser.Parse(text, True)
if len(per_file_gain) != len(path_list):
raise ReplayGainError(
"the number of results returned by bs1770gain does not match "
"the number of files passed to it"
)
# bs1770gain does not return the analysis results in the order that
# files are passed on the command line, because it is sorting the files
# internally. We must recover the order from the filenames themselves.
try:
out = [per_file_gain[os.path.basename(p)] for p in path_list]
except KeyError:
raise ReplayGainError(
"unrecognized filename in bs1770gain output "
"(bs1770gain can only deal with utf-8 file names)"
)
if is_album:
out.append(album_gain["album"])
return out
|
https://github.com/beetbox/beets/issues/2983
|
[…]
replaygain: executing bs1770gain --ebu --xml -p /home/breversa/Musique/00tz 00tz/Endzeit Bunkertracks [act VII] – The bonus tracks/00tz 00tz - Poisoned minds and broken hearts (Alarm mix).flac /home/breversa/Musique/[Fabrikmutter]/Endzeit Bunkertracks [act VII] – The bonus tracks/[Fabrikmutter] - The minimal devotion.flac /home/breversa/Musique/[Product]/Endzeit Bunkertracks [act VII] – The bonus tracks/[Product] - To the wind (chant).flac /home/breversa/Musique/[Synaptic_Reactor]/Endzeit Bunkertracks [act VII] – The bonus tracks/[Synaptic_Reactor] - The devil's work (Ruinizer mix).flac /home/breversa/Musique/A.D.R.O.N./Endzeit Bunkertracks [act VII] – The bonus tracks/A.D.R.O.N. - Be subordinated.flac /home/breversa/Musique/Aim & Execute/Endzeit Bunkertracks [act VII] – The bonus tracks/Aim & Execute - Phantom energy.flac /home/breversa/Musique/Antibody/Endzeit Bunkertracks [act VII] – The bonus tracks/Antibody - Shrimps.flac /home/breversa/Musique/Ayria/Endzeit Bunkertracks [act VII] – The bonus tracks/Ayria - Hunger (Acylum mix).flac /home/breversa/Musique/Bereshit/Endzeit Bunkertracks [act VII] – The bonus tracks/Bereshit - Virtual freedom.flac /home/breversa/Musique/Biomechanimal/Endzeit Bunkertracks [act VII] – The bonus tracks/Biomechanimal - Elder gods.flac /home/breversa/Musique/C-[Phalea]/Endzeit Bunkertracks [act VII] – The bonus tracks/C-[Phalea] - Process to filth.flac /home/breversa/Musique/C2/Endzeit Bunkertracks [act VII] – The bonus tracks/C2 - Affirmative negative infinite.flac /home/breversa/Musique/Code_ Red Core/Endzeit Bunkertracks [act VII] – The bonus tracks/Code_ Red Core - Miss betrayal.flac /home/breversa/Musique/Denial of Service/Endzeit Bunkertracks [act VII] – The bonus tracks/Denial of Service - Red cross red crescent.flac /home/breversa/Musique/Die System/Endzeit Bunkertracks [act VII] – The bonus tracks/Die System - Cyanide.flac /home/breversa/Musique/Diverje/Endzeit Bunkertracks [act VII] – The bonus tracks/Diverje - All the fakes (touched by Stahlnebel & Black Selket).flac /home/breversa/Musique/Dolls of Pain/Endzeit Bunkertracks [act VII] – The bonus tracks/Dolls of Pain - Drugs on the floor (Endzeit mix).flac /home/breversa/Musique/Durandal x Xiescive/Endzeit Bunkertracks [act VII] – The bonus tracks/Durandal x Xiescive - Rabid.flac /home/breversa/Musique/DYM/Endzeit Bunkertracks [act VII] – The bonus tracks/DYM - With a smile (edit).flac /home/breversa/Musique/Flammpunkt/Endzeit Bunkertracks [act VII] – The bonus tracks/Flammpunkt - One in the brain (Binary Division mix).flac /home/breversa/Musique/Framework/Endzeit Bunkertracks [act VII] – The bonus tracks/Framework - Faith (Contact #2).flac /home/breversa/Musique/Ginger Snap5/Endzeit Bunkertracks [act VII] – The bonus tracks/Ginger Snap5 - Waiting for… (Pride and Fall mix).flac /home/breversa/Musique/Gusano/Endzeit Bunkertracks [act VII] – The bonus tracks/Gusano - Sangre eternal.flac /home/breversa/Musique/Hasswut/Endzeit Bunkertracks [act VII] – The bonus tracks/Hasswut - Nicht für mich.flac /home/breversa/Musique/Hydra Division V/Endzeit Bunkertracks [act VII] – The bonus tracks/Hydra Division V - Ostracized.flac /home/breversa/Musique/Larva/Endzeit Bunkertracks [act VII] – The bonus tracks/Larva - Mi mundo nunco fue el vuestro.flac /home/breversa/Musique/M.O.D./Endzeit Bunkertracks [act VII] – The bonus tracks/M.O.D. - Without regrets.flac /home/breversa/Musique/Mechanic Doll Machine/Endzeit Bunkertracks [act VII] – The bonus tracks/Mechanic Doll Machine - Mirage.flac /home/breversa/Musique/Mentallo and The Fixer/Endzeit Bunkertracks [act VII] – The bonus tracks/Mentallo and The Fixer - Gammera (Equinox).flac /home/breversa/Musique/Monospore/Endzeit Bunkertracks [act VII] – The bonus tracks/Monospore - Standing high.flac /home/breversa/Musique/MRDTC/Endzeit Bunkertracks [act VII] – The bonus tracks/MRDTC - Brain talk (Bunker hellectric).flac /home/breversa/Musique/Neonsol/Endzeit Bunkertracks [act VII] – The bonus tracks/Neonsol - Manipulation.flac /home/breversa/Musique/Neustrohm/Endzeit Bunkertracks [act VII] – The bonus tracks/Neustrohm - Non toxic.flac /home/breversa/Musique/Protectorate/Endzeit Bunkertracks [act VII] – The bonus tracks/Protectorate - Universal exports.flac /home/breversa/Musique/Psychicold/Endzeit Bunkertracks [act VII] – The bonus tracks/Psychicold - Burn in Hell.flac /home/breversa/Musique/Renoized/Endzeit Bunkertracks [act VII] – The bonus tracks/Renoized - Defcon.flac /home/breversa/Musique/Riotlegion/Endzeit Bunkertracks [act VII] – The bonus tracks/Riotlegion - God(b)less.flac /home/breversa/Musique/Sequenz_/Endzeit Bunkertracks [act VII] – The bonus tracks/Sequenz_ - Despair (A.D.N. mix).flac /home/breversa/Musique/Shadow Lady/Endzeit Bunkertracks [act VII] – The bonus tracks/Shadow Lady - Species (Endzeit mix).flac /home/breversa/Musique/Shadow System/Endzeit Bunkertracks [act VII] – The bonus tracks/Shadow System - Nex.flac /home/breversa/Musique/Statik Sky/Endzeit Bunkertracks [act VII] – The bonus tracks/Statik Sky - Fuqin noize.flac /home/breversa/Musique/Studio-X/Endzeit Bunkertracks [act VII] – The bonus tracks/Studio-X - The source of energy.flac /home/breversa/Musique/Suicidal Romance/Endzeit Bunkertracks [act VII] – The bonus tracks/Suicidal Romance - Touch (ES23 mix).flac /home/breversa/Musique/Switchface/Endzeit Bunkertracks [act VII] – The bonus tracks/Switchface - A crack in the monolith.flac /home/breversa/Musique/Synapsyche/Endzeit Bunkertracks [act VII] – The bonus tracks/Synapsyche - Breath control.flac /home/breversa/Musique/Tamtrum/Endzeit Bunkertracks [act VII] – The bonus tracks/Tamtrum - Le son de la pluie (Fils de pute mix by Grendel).flac /home/breversa/Musique/Third Realm/Endzeit Bunkertracks [act VII] – The bonus tracks/Third Realm - Deliverance.flac /home/breversa/Musique/Traumatize/Endzeit Bunkertracks [act VII] – The bonus tracks/Traumatize - Moje odbicie.flac /home/breversa/Musique/Unter Null/Endzeit Bunkertracks [act VII] – The bonus tracks/Unter Null - The fall (The Chemical Sweet Kid mix).flac /home/breversa/Musique/Vault 113/Endzeit Bunkertracks [act VII] – The bonus tracks/Vault 113 - The prophecy.flac
Sending event: album_imported
replaygain: analysis finished: <bs1770gain>
<album>
<track total="50" number="1" file="00tz 00tz - Poisoned minds and broken hearts (Alarm mix).flac">
<integrated lufs="-7.16" lu="-15.84" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="2" file="A.D.R.O.N. - Be subordinated.flac">
<integrated lufs="-6.34" lu="-16.66" />
<sample-peak spfs="-0.20" factor="0.977111" />
</track>
<track total="50" number="3" file="Aim & Execute - Phantom energy.flac">
<integrated lufs="-7.90" lu="-15.10" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="4" file="Antibody - Shrimps.flac">
<integrated lufs="-5.91" lu="-17.09" />
<sample-peak spfs="-0.20" factor="0.977111" />
</track>
<track total="50" number="5" file="Ayria - Hunger (Acylum mix).flac">
<integrated lufs="-8.48" lu="-14.52" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="6" file="Bereshit - Virtual freedom.flac">
<integrated lufs="-5.49" lu="-17.51" />
<sample-peak spfs="-0.21" factor="0.976043" />
</track>
<track total="50" number="7" file="Biomechanimal - Elder gods.flac">
<integrated lufs="-8.85" lu="-14.15" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="8" file="C-[Phalea] - Process to filth.flac">
<integrated lufs="-8.00" lu="-15.00" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="9" file="C2 - Affirmative negative infinite.flac">
<integrated lufs="-8.91" lu="-14.09" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="10" file="Code_ Red Core - Miss betrayal.flac">
<integrated lufs="-7.46" lu="-15.54" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="11" file="DYM - With a smile (edit).flac">
<integrated lufs="-9.42" lu="-13.58" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="12" file="Denial of Service - Red cross red crescent.flac">
<integrated lufs="-8.26" lu="-14.74" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="13" file="Die System - Cyanide.flac">
<integrated lufs="-8.53" lu="-14.47" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="14" file="Diverje - All the fakes (touched by Stahlnebel & Black Selket).flac">
<integrated lufs="-8.13" lu="-14.87" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="15" file="Dolls of Pain - Drugs on the floor (Endzeit mix).flac">
<integrated lufs="-8.38" lu="-14.62" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="16" file="Durandal x Xiescive - Rabid.flac">
<integrated lufs="-9.56" lu="-13.44" />
<sample-peak spfs="0.00" factor="1.000031" />
</track>
<track total="50" number="17" file="Flammpunkt - One in the brain (Binary Division mix).flac">
<integrated lufs="-9.36" lu="-13.64" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="18" file="Framework - Faith (Contact #2).flac">
<integrated lufs="-8.65" lu="-14.35" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="19" file="Ginger Snap5 - Waiting for… (Pride and Fall mix).flac">
<integrated lufs="-6.26" lu="-16.74" />
<sample-peak spfs="-0.15" factor="0.982391" />
</track>
<track total="50" number="20" file="Gusano - Sangre eternal.flac">
<integrated lufs="-7.86" lu="-15.14" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="21" file="Hasswut - Nicht für mich.flac">
<integrated lufs="-5.16" lu="-17.84" />
<sample-peak spfs="-0.30" factor="0.965514" />
</track>
<track total="50" number="22" file="Hydra Division V - Ostracized.flac">
<integrated lufs="-8.22" lu="-14.78" />
<sample-peak spfs="-0.17" factor="0.980499" />
</track>
<track total="50" number="23" file="Larva - Mi mundo nunco fue el vuestro.flac">
<integrated lufs="-9.14" lu="-13.86" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="24" file="M.O.D. - Without regrets.flac">
<integrated lufs="-5.29" lu="-17.71" />
<sample-peak spfs="-0.01" factor="0.999145" />
</track>
<track total="50" number="25" file="MRDTC - Brain talk (Bunker hellectric).flac">
<integrated lufs="-7.79" lu="-15.21" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="26" file="Mechanic Doll Machine - Mirage.flac">
<integrated lufs="-8.33" lu="-14.67" />
<sample-peak spfs="0.00" factor="1.000031" />
</track>
<track total="50" number="27" file="Mentallo and The Fixer - Gammera (Equinox).flac">
<integrated lufs="-6.47" lu="-16.53" />
<sample-peak spfs="-0.01" factor="0.999023" />
</track>
<track total="50" number="28" file="Monospore - Standing high.flac">
<integrated lufs="-8.62" lu="-14.38" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="29" file="Neonsol - Manipulation.flac">
<integrated lufs="-7.12" lu="-15.88" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="30" file="Neustrohm - Non toxic.flac">
<integrated lufs="-8.88" lu="-14.12" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="31" file="Protectorate - Universal exports.flac">
<integrated lufs="-7.59" lu="-15.41" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="32" file="Psychicold - Burn in Hell.flac">
<integrated lufs="-8.39" lu="-14.61" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="33" file="Renoized - Defcon.flac">
<integrated lufs="-8.42" lu="-14.58" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="34" file="Riotlegion - God(b)less.flac">
<integrated lufs="-8.79" lu="-14.21" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="35" file="Sequenz_ - Despair (A.D.N. mix).flac">
<integrated lufs="-9.00" lu="-14.00" />
<sample-peak spfs="0.00" factor="1.000000" />
</track>
<track total="50" number="36" file="Shadow Lady - Species (Endzeit mix).flac">
<integrated lufs="-6.52" lu="-16.48" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="37" file="Shadow System - Nex.flac">
<integrated lufs="-9.22" lu="-13.78" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="38" file="Statik Sky - Fuqin noize.flac">
<integrated lufs="-8.39" lu="-14.61" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="39" file="Studio-X - The source of energy.flac">
<integrated lufs="-6.26" lu="-16.74" />
<sample-peak spfs="0.00" factor="1.000000" />
</track>
<track total="50" number="40" file="Suicidal Romance - Touch (ES23 mix).flac">
<integrated lufs="-8.69" lu="-14.31" />
<sample-peak spfs="-0.09" factor="0.990020" />
</track>
<track total="50" number="41" file="Switchface - A crack in the monolith.flac">
<integrated lufs="-8.08" lu="-14.92" />
<sample-peak spfs="0.00" factor="1.000000" />
</track>
<track total="50" number="42" file="Synapsyche - Breath control.flac">
<integrated lufs="-9.48" lu="-13.52" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="43" file="Tamtrum - Le son de la pluie (Fils de pute mix by Grendel).flac">
<integrated lufs="-7.78" lu="-15.22" />
<sample-peak spfs="-0.50" factor="0.944060" />
</track>
<track total="50" number="44" file="Third Realm - Deliverance.flac">
<integrated lufs="-7.40" lu="-15.60" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="45" file="Traumatize - Moje odbicie.flac">
<integrated lufs="-9.20" lu="-13.80" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="46" file="Unter Null - The fall (The Chemical Sweet Kid mix).flac">
<integrated lufs="-11.11" lu="-11.89" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="47" file="Vault 113 - The prophecy.flac">
<integrated lufs="-9.16" lu="-13.84" />
<sample-peak spfs="-0.22" factor="0.975188" />
</track>
<track total="50" number="48" file="[Fabrikmutter] - The minimal devotion.flac">
<integrated lufs="-8.19" lu="-14.81" />
<sample-peak spfs="0.00" factor="1.000031" />
</track>
<track total="50" number="49" file="[Product] - To the wind (chant).flac">
<integrated lufs="-9.00" lu="-14.00" />
<sample-peak spfs="-0.03" factor="0.996765" />
</track>
<track total="50" number="50" file="[Synaptic_Reactor] - The devil's work (Ruinizer mix).flac">
<integrated lufs="-10.23" lu="-12.77" />
<sample-peak spfs="-0.22" factor="0.975176" />
</track>
<summary total="50">
<integrated lufs="-8.00" lu="-15.00" />
<sample-peak spfs="0.00" factor="1.000031" />
</summary>
</album>
</bs1770gain>
Traceback (most recent call last):
File "/usr/local/bin/beet", line 11, in <module>
sys.exit(main())
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 1256, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 1243, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 955, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 925, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 329, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 445, in run_parallel
six.reraise(exc_info[0], exc_info[1], exc_info[2])
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 312, in run
out = self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 194, in coro
func(*(args + (task,)))
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 1511, in plugin_stage
func(session, task)
File "/usr/local/lib/python2.7/dist-packages/beets/plugins.py", line 140, in wrapper
return func(*args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/replaygain.py", line 1028, in imported
self.handle_album(task.album, False)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/replaygain.py", line 955, in handle_album
album_gain = backend_instance.compute_album_gain(album)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/replaygain.py", line 136, in compute_album_gain
output = self.compute_gain(supported_items, True)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/replaygain.py", line 189, in compute_gain
return self.compute_chunk_gain(items, is_album)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/replaygain.py", line 213, in compute_chunk_gain
results = self.parse_tool_output(output, path_list, is_album)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/replaygain.py", line 254, in parse_tool_output
parser.Parse(text, True)
xml.parsers.expat.ExpatError: not well-formed (invalid token): line 11, column 44
|
xml.parsers.expat.ExpatError
|
def __init__(self):
super(LyricsPlugin, self).__init__()
self.import_stages = [self.imported]
self.config.add(
{
"auto": True,
"bing_client_secret": None,
"bing_lang_from": [],
"bing_lang_to": None,
"google_API_key": None,
"google_engine_ID": "009217259823014548361:lndtuqkycfu",
"genius_api_key": "Ryq93pUGm8bM6eUWwD_M3NOFFDAtp2yEE7W"
"76V-uFL5jks5dNvcGCdarqFjDhP9c",
"fallback": None,
"force": False,
"local": False,
"sources": self.SOURCES,
}
)
self.config["bing_client_secret"].redact = True
self.config["google_API_key"].redact = True
self.config["google_engine_ID"].redact = True
self.config["genius_api_key"].redact = True
# State information for the ReST writer.
# First, the current artist we're writing.
self.artist = "Unknown artist"
# The current album: False means no album yet.
self.album = False
# The current rest file content. None means the file is not
# open yet.
self.rest = None
available_sources = list(self.SOURCES)
sources = plugins.sanitize_choices(
self.config["sources"].as_str_seq(), available_sources
)
if "google" in sources:
if not self.config["google_API_key"].get():
# We log a *debug* message here because the default
# configuration includes `google`. This way, the source
# is silent by default but can be enabled just by
# setting an API key.
self._log.debug("Disabling google source: no API key configured.")
sources.remove("google")
elif not HAS_BEAUTIFUL_SOUP:
self._log.warning(
"To use the google lyrics source, you must "
"install the beautifulsoup4 module. See "
"the documentation for further details."
)
sources.remove("google")
if "genius" in sources and not HAS_BEAUTIFUL_SOUP:
self._log.debug(
"The Genius backend requires BeautifulSoup, which is not "
"installed, so the source is disabled."
)
sources.remove("genius")
self.config["bing_lang_from"] = [
x.lower() for x in self.config["bing_lang_from"].as_str_seq()
]
self.bing_auth_token = None
if not HAS_LANGDETECT and self.config["bing_client_secret"].get():
self._log.warning(
"To use bing translations, you need to "
"install the langdetect module. See the "
"documentation for further details."
)
self.backends = [
self.SOURCE_BACKENDS[source](self.config, self._log) for source in sources
]
|
def __init__(self):
super(LyricsPlugin, self).__init__()
self.import_stages = [self.imported]
self.config.add(
{
"auto": True,
"bing_client_secret": None,
"bing_lang_from": [],
"bing_lang_to": None,
"google_API_key": None,
"google_engine_ID": "009217259823014548361:lndtuqkycfu",
"genius_api_key": "Ryq93pUGm8bM6eUWwD_M3NOFFDAtp2yEE7W"
"76V-uFL5jks5dNvcGCdarqFjDhP9c",
"fallback": None,
"force": False,
"local": False,
"sources": self.SOURCES,
}
)
self.config["bing_client_secret"].redact = True
self.config["google_API_key"].redact = True
self.config["google_engine_ID"].redact = True
self.config["genius_api_key"].redact = True
# State information for the ReST writer.
# First, the current artist we're writing.
self.artist = "Unknown artist"
# The current album: False means no album yet.
self.album = False
# The current rest file content. None means the file is not
# open yet.
self.rest = None
available_sources = list(self.SOURCES)
sources = plugins.sanitize_choices(
self.config["sources"].as_str_seq(), available_sources
)
if "google" in sources:
if not self.config["google_API_key"].get():
# We log a *debug* message here because the default
# configuration includes `google`. This way, the source
# is silent by default but can be enabled just by
# setting an API key.
self._log.debug("Disabling google source: no API key configured.")
sources.remove("google")
elif not HAS_BEAUTIFUL_SOUP:
self._log.warning(
"To use the google lyrics source, you must "
"install the beautifulsoup4 module. See "
"the documentation for further details."
)
sources.remove("google")
if "genius" in sources and not HAS_BEAUTIFUL_SOUP:
self._log.debug(
"The Genius backend requires BeautifulSoup, which is not "
"installed, so the source is disabled."
)
sources.remove("google")
self.config["bing_lang_from"] = [
x.lower() for x in self.config["bing_lang_from"].as_str_seq()
]
self.bing_auth_token = None
if not HAS_LANGDETECT and self.config["bing_client_secret"].get():
self._log.warning(
"To use bing translations, you need to "
"install the langdetect module. See the "
"documentation for further details."
)
self.backends = [
self.SOURCE_BACKENDS[source](self.config, self._log) for source in sources
]
|
https://github.com/beetbox/beets/issues/2911
|
(Pdb++) n
Traceback (most recent call last):
File "/home/q/envs/beets/bin/beet", line 11, in <module>
sys.exit(main())
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/ui/__init__.py", line 1257, in main
_raw_main(args)
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/ui/__init__.py", line 1239, in _raw_main
subcommands, plugins, lib = _setup(options, lib)
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/ui/__init__.py", line 1130, in _setup
plugins = _load_plugins(config)
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/ui/__init__.py", line 1116, in _load_plugins
plugins.send("pluginload")
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/plugins.py", line 475, in send
for handler in event_handlers()[event]:
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/plugins.py", line 458, in event_handlers
for plugin in find_plugins():
File "/home/q/envs/beets/lib/python3.6/site-packages/beets/plugins.py", line 304, in find_plugins
_instances[cls] = cls()
File "/home/q/envs/beets/lib/python3.6/site-packages/beetsplug/lyrics.py", line 682, in __init__
sources.remove('google')
ValueError: list.remove(x): x not in list
|
ValueError
|
def lyrics_from_song_api_path(self, song_api_path):
song_url = self.base_url + song_api_path
response = requests.get(song_url, headers=self.headers)
json = response.json()
path = json["response"]["song"]["path"]
# Gotta go regular html scraping... come on Genius.
page_url = "https://genius.com" + path
page = requests.get(page_url)
html = BeautifulSoup(page.text, "html.parser")
# Remove script tags that they put in the middle of the lyrics.
[h.extract() for h in html("script")]
# At least Genius is nice and has a tag called 'lyrics'!
# Updated css where the lyrics are based in HTML.
lyrics = html.find("div", class_="lyrics").get_text()
return lyrics
|
def lyrics_from_song_api_path(self, song_api_path):
song_url = self.base_url + song_api_path
response = requests.get(song_url, headers=self.headers)
json = response.json()
path = json["response"]["song"]["path"]
# gotta go regular html scraping... come on Genius
page_url = "https://genius.com" + path
page = requests.get(page_url)
html = BeautifulSoup(page.text, "html.parser")
# remove script tags that they put in the middle of the lyrics
[h.extract() for h in html("script")]
# at least Genius is nice and has a tag called 'lyrics'!
lyrics = html.find(
"div", class_="lyrics"
).get_text() # updated css where the lyrics are based in HTML
return lyrics
|
https://github.com/beetbox/beets/issues/2545
|
======================================================================
FAIL: Test default backends with songs known to exist in respective databases.
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/flap/Dev/beets/test/test_lyrics.py", line 316, in test_backend_sources_ok
self.assertFalse(errors)
AssertionError: ['LyricsCom', 'MusiXmatch', 'Genius'] is not false
-------------------- >> begin captured logging << --------------------
[...]
beets.lyrics: DEBUG: lyrics: genius: requesting search https://api.genius.com/search?q=The%20Beatles%20Lady%20Madonna
requests.packages.urllib3.connectionpool: DEBUG: Starting new HTTPS connection (1): api.genius.com
requests.packages.urllib3.connectionpool: DEBUG: https://api.genius.com:443 "GET /search?q=The%20Beatles%20Lady%20Madonna HTTP/1.1" 200 None
beets.lyrics: DEBUG: lyrics: genius: requesting lyrics for link https://genius.com/The-beatles-lady-madonna-lyrics
requests.packages.urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): genius-api.com
beets.lyrics: DEBUG: lyrics: genius: request error: HTTPConnectionPool(host='genius-api.com', port=80): Max retries exceeded with url: /api/lyricsInfo (Caused by NewConnectionError('<requests.packages.urllib3.connection.HTTPConnection object at 0x104b29590>: Failed to establish a new connection: [Errno 8] nodename nor servname provided, or not known',))
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
Ran 17 tests in 15.582s
FAILED (failures=1)
|
AssertionError
|
def item_file(item_id):
item = g.lib.get_item(item_id)
item_path = (
util.syspath(item.path) if (os.name == "nt") else (util.py3_path(item.path))
)
response = flask.send_file(
item_path,
as_attachment=True,
attachment_filename=os.path.basename(util.py3_path(item.path)),
)
response.headers["Content-Length"] = os.path.getsize(item_path)
return response
|
def item_file(item_id):
item = g.lib.get_item(item_id)
item_path = util.syspath(item.path) if os.name == "nt" else util.py3_path(item.path)
response = flask.send_file(
item_path,
as_attachment=True,
attachment_filename=os.path.basename(util.py3_path(item.path)),
)
response.headers["Content-Length"] = os.path.getsize(item_path)
return response
|
https://github.com/beetbox/beets/issues/2592
|
[2017-06-14 21:36:27,233] ERROR in app: Exception on /item/52117/file [GET]
Traceback (most recent call last):
File "c:\python\lib\site-packages\flask\app.py", line 1982, in wsgi_app
response = self.full_dispatch_request()
File "c:\python\lib\site-packages\flask\app.py", line 1614, in full_dispatch_request
rv = self.handle_user_exception(e)
File "c:\python\lib\site-packages\flask_cors\extension.py", line 161, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "c:\python\lib\site-packages\flask\app.py", line 1517, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "c:\python\lib\site-packages\flask\app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "c:\python\lib\site-packages\flask\app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "c:\python\lib\site-packages\beetsplug\web\__init__.py", line 209, in item_file
attachment_filename=os.path.basename(util.py3_path(item.path)),
File "c:\python\lib\site-packages\flask\helpers.py", line 549, in send_file
file = open(filename, 'rb')
IOError: [Errno 2] No such file or directory: 'PATH_REDACTED\\01 Marta Kubis\xcc\x8cova\xcc\x81 - Tak Dej Se K Na\xcc\x81m A Projdem Svet.flac'
127.0.0.1 - - [14/Jun/2017 21:36:27] "GET /item/52117/file HTTP/1.1" 500 -
|
IOError
|
def item_file(item_id):
item = g.lib.get_item(item_id)
item_path = (
util.syspath(item.path) if (os.name == "nt") else (util.py3_path(item.path))
)
response = flask.send_file(
item_path,
as_attachment=True,
attachment_filename=os.path.basename(util.py3_path(item.path)),
)
response.headers["Content-Length"] = os.path.getsize(item_path)
return response
|
def item_file(item_id):
item = g.lib.get_item(item_id)
response = flask.send_file(
util.py3_path(item.path),
as_attachment=True,
attachment_filename=os.path.basename(util.py3_path(item.path)),
)
response.headers["Content-Length"] = os.path.getsize(item.path)
return response
|
https://github.com/beetbox/beets/issues/2592
|
[2017-06-14 21:36:27,233] ERROR in app: Exception on /item/52117/file [GET]
Traceback (most recent call last):
File "c:\python\lib\site-packages\flask\app.py", line 1982, in wsgi_app
response = self.full_dispatch_request()
File "c:\python\lib\site-packages\flask\app.py", line 1614, in full_dispatch_request
rv = self.handle_user_exception(e)
File "c:\python\lib\site-packages\flask_cors\extension.py", line 161, in wrapped_function
return cors_after_request(app.make_response(f(*args, **kwargs)))
File "c:\python\lib\site-packages\flask\app.py", line 1517, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "c:\python\lib\site-packages\flask\app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "c:\python\lib\site-packages\flask\app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "c:\python\lib\site-packages\beetsplug\web\__init__.py", line 209, in item_file
attachment_filename=os.path.basename(util.py3_path(item.path)),
File "c:\python\lib\site-packages\flask\helpers.py", line 549, in send_file
file = open(filename, 'rb')
IOError: [Errno 2] No such file or directory: 'PATH_REDACTED\\01 Marta Kubis\xcc\x8cova\xcc\x81 - Tak Dej Se K Na\xcc\x81m A Projdem Svet.flac'
127.0.0.1 - - [14/Jun/2017 21:36:27] "GET /item/52117/file HTTP/1.1" 500 -
|
IOError
|
def _rep(obj, expand=False):
"""Get a flat -- i.e., JSON-ish -- representation of a beets Item or
Album object. For Albums, `expand` dictates whether tracks are
included.
"""
out = dict(obj)
if isinstance(obj, beets.library.Item):
if app.config.get("INCLUDE_PATHS", False):
out["path"] = util.displayable_path(out["path"])
else:
del out["path"]
# Filter all bytes attributes and convert them to strings
for key, value in out.items():
if isinstance(out[key], bytes):
out[key] = base64.b64encode(out[key]).decode("ascii")
# Get the size (in bytes) of the backing file. This is useful
# for the Tomahawk resolver API.
try:
out["size"] = os.path.getsize(util.syspath(obj.path))
except OSError:
out["size"] = 0
return out
elif isinstance(obj, beets.library.Album):
del out["artpath"]
if expand:
out["items"] = [_rep(item) for item in obj.items()]
return out
|
def _rep(obj, expand=False):
"""Get a flat -- i.e., JSON-ish -- representation of a beets Item or
Album object. For Albums, `expand` dictates whether tracks are
included.
"""
out = dict(obj)
if isinstance(obj, beets.library.Item):
if app.config.get("INCLUDE_PATHS", False):
out["path"] = util.displayable_path(out["path"])
else:
del out["path"]
# Filter all bytes attributes and convert them to strings
for key in filter(lambda key: isinstance(out[key], bytes), out):
out[key] = base64.b64encode(out[key]).decode("ascii")
# Get the size (in bytes) of the backing file. This is useful
# for the Tomahawk resolver API.
try:
out["size"] = os.path.getsize(util.syspath(obj.path))
except OSError:
out["size"] = 0
return out
elif isinstance(obj, beets.library.Album):
del out["artpath"]
if expand:
out["items"] = [_rep(item) for item in obj.items()]
return out
|
https://github.com/beetbox/beets/issues/2532
|
~
➔ beet -vv web
* Running on http://127.0.0.1:8337/ (Press CTRL+C to quit)
127.0.0.1 - - [30/Apr/2017 11:05:15] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [30/Apr/2017 11:05:22] "GET /item/query/isabel HTTP/1.1" 200 -
Error on request:
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/werkzeug/serving.py", line 209, in run_wsgi
execute(self.server.app)
File "/usr/lib/python3.6/site-packages/werkzeug/serving.py", line 199, in execute
for data in application_iter:
File "/usr/lib/python3.6/site-packages/werkzeug/wsgi.py", line 704, in __next__
return self._next()
File "/usr/lib/python3.6/site-packages/werkzeug/wrappers.py", line 81, in _iter_encoded
for item in iterable:
File "/usr/lib/python3.6/site-packages/beetsplug/web/__init__.py", line 74, in json_generator
yield json.dumps(_rep(item, expand=expand))
File "/usr/lib/python3.6/json/__init__.py", line 231, in dumps
return _default_encoder.encode(obj)
File "/usr/lib/python3.6/json/encoder.py", line 199, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/usr/lib/python3.6/json/encoder.py", line 257, in iterencode
return _iterencode(o, 0)
File "/usr/lib/python3.6/json/encoder.py", line 180, in default
o.__class__.__name__)
TypeError: Object of type 'bytes' is not JSON serializable
|
TypeError
|
def _rep(obj, expand=False):
"""Get a flat -- i.e., JSON-ish -- representation of a beets Item or
Album object. For Albums, `expand` dictates whether tracks are
included.
"""
out = dict(obj)
if isinstance(obj, beets.library.Item):
if app.config.get("INCLUDE_PATHS", False):
out["path"] = util.displayable_path(out["path"])
else:
del out["path"]
# Filter all bytes attributes and convert them to strings
for key, value in out.items():
if isinstance(out[key], bytes):
out[key] = base64.b64encode(out[key]).decode("ascii")
# Get the size (in bytes) of the backing file. This is useful
# for the Tomahawk resolver API.
try:
out["size"] = os.path.getsize(util.syspath(obj.path))
except OSError:
out["size"] = 0
return out
elif isinstance(obj, beets.library.Album):
del out["artpath"]
if expand:
out["items"] = [_rep(item) for item in obj.items()]
return out
|
def _rep(obj, expand=False):
"""Get a flat -- i.e., JSON-ish -- representation of a beets Item or
Album object. For Albums, `expand` dictates whether tracks are
included.
"""
out = dict(obj)
if isinstance(obj, beets.library.Item):
if app.config.get("INCLUDE_PATHS", False):
out["path"] = util.displayable_path(out["path"])
else:
del out["path"]
# Get the size (in bytes) of the backing file. This is useful
# for the Tomahawk resolver API.
try:
out["size"] = os.path.getsize(util.syspath(obj.path))
except OSError:
out["size"] = 0
return out
elif isinstance(obj, beets.library.Album):
del out["artpath"]
if expand:
out["items"] = [_rep(item) for item in obj.items()]
return out
|
https://github.com/beetbox/beets/issues/2532
|
~
➔ beet -vv web
* Running on http://127.0.0.1:8337/ (Press CTRL+C to quit)
127.0.0.1 - - [30/Apr/2017 11:05:15] "GET / HTTP/1.1" 200 -
127.0.0.1 - - [30/Apr/2017 11:05:22] "GET /item/query/isabel HTTP/1.1" 200 -
Error on request:
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/werkzeug/serving.py", line 209, in run_wsgi
execute(self.server.app)
File "/usr/lib/python3.6/site-packages/werkzeug/serving.py", line 199, in execute
for data in application_iter:
File "/usr/lib/python3.6/site-packages/werkzeug/wsgi.py", line 704, in __next__
return self._next()
File "/usr/lib/python3.6/site-packages/werkzeug/wrappers.py", line 81, in _iter_encoded
for item in iterable:
File "/usr/lib/python3.6/site-packages/beetsplug/web/__init__.py", line 74, in json_generator
yield json.dumps(_rep(item, expand=expand))
File "/usr/lib/python3.6/json/__init__.py", line 231, in dumps
return _default_encoder.encode(obj)
File "/usr/lib/python3.6/json/encoder.py", line 199, in encode
chunks = self.iterencode(o, _one_shot=True)
File "/usr/lib/python3.6/json/encoder.py", line 257, in iterencode
return _iterencode(o, 0)
File "/usr/lib/python3.6/json/encoder.py", line 180, in default
o.__class__.__name__)
TypeError: Object of type 'bytes' is not JSON serializable
|
TypeError
|
def get_albums(self, query):
"""Returns a list of AlbumInfo objects for a discogs search query."""
# Strip non-word characters from query. Things like "!" and "-" can
# cause a query to return no results, even if they match the artist or
# album title. Use `re.UNICODE` flag to avoid stripping non-english
# word characters.
# FIXME: Encode as ASCII to work around a bug:
# https://github.com/beetbox/beets/issues/1051
# When the library is fixed, we should encode as UTF-8.
query = re.sub(r"(?u)\W+", " ", query).encode("ascii", "replace")
# Strip medium information from query, Things like "CD1" and "disk 1"
# can also negate an otherwise positive result.
query = re.sub(rb"(?i)\b(CD|disc)\s*\d+", b"", query)
try:
releases = self.discogs_client.search(query, type="release").page(1)
except CONNECTION_ERRORS:
self._log.debug(
"Communication error while searching for {0!r}", query, exc_info=True
)
return []
return [album for album in map(self.get_album_info, releases[:5]) if album]
|
def get_albums(self, query):
"""Returns a list of AlbumInfo objects for a discogs search query."""
# Strip non-word characters from query. Things like "!" and "-" can
# cause a query to return no results, even if they match the artist or
# album title. Use `re.UNICODE` flag to avoid stripping non-english
# word characters.
# FIXME: Encode as ASCII to work around a bug:
# https://github.com/beetbox/beets/issues/1051
# When the library is fixed, we should encode as UTF-8.
query = re.sub(r"(?u)\W+", " ", query).encode("ascii", "replace")
# Strip medium information from query, Things like "CD1" and "disk 1"
# can also negate an otherwise positive result.
query = re.sub(rb"(?i)\b(CD|disc)\s*\d+", b"", query)
try:
releases = self.discogs_client.search(query, type="release").page(1)
except CONNECTION_ERRORS:
self._log.debug(
"Communication error while searching for {0!r}", query, exc_info=True
)
return []
return [self.get_album_info(release) for release in releases[:5]]
|
https://github.com/beetbox/beets/issues/2302
|
/data/Music/Unsorted/New Stuff/Para One - 2003 - Paraone - Iris - Le Sept - Flynt - Lyricson (Maxi) [CD - FLAC] (6 items)
Finding tags for album "Paraone - Iris - Le sept - Flynt - Lyricson - Paraone - Iris - Le Sept - Flynt - Lyricson".
Candidates:
1. The Kenny Clarke - Francy Boland Big Band - Paris Jazz Concert: TNP - Oct. 29th, 1969, Part 2 (35.9%) (tracks, artist, album, ...) (CD, 2002, DE, LaserLight Digital)
2. A - Z Consolidated - A - Z Consolidated (33.8%) (tracks, album, artist)
3. Μίκης Θεοδωράκης - Βασίλης Τσιτσάνης - Σταμάτης Κόκοτας - Θεοδωράκης - Τσιτσάνης - Κόκοτας (29.3%) (tracks, missing tracks, album, ...) (12" Vinyl, 1985, GR, Μίνως Μάτσας & Υιός Α.Ε.)
4. Pirkko Mannola - Brita Koivunen - Vieno Kekkonen - Pirkko - Brita - Vieno (20.2%) (missing tracks, tracks, artist, ...) (CD, 1989, FI, Finnlevy)
5. _ - _ (7.7%) (missing tracks, tracks, album, ...)
# selection (default 1), Skip, Use as-is, as Tracks, Group albums,
Enter search, enter Id, aBort? I
Enter release ID: 690105
Traceback (most recent call last):
File "/home/cesar/.local/bin/beet", line 11, in <module>
sys.exit(main())
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1250, in main
_raw_main(args)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1240, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/commands.py", line 967, in import_func
import_files(lib, paths, query)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/commands.py", line 944, in import_files
session.run()
File "/home/cesar/.local/lib/python2.7/site-packages/beets/importer.py", line 320, in run
pl.run_parallel(QUEUE_SIZE)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/util/pipeline.py", line 301, in run
out = self.coro.send(msg)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/util/pipeline.py", line 160, in coro
task = func(*(args + (task,)))
File "/home/cesar/.local/lib/python2.7/site-packages/beets/importer.py", line 1285, in user_query
task.choose_match(session)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/importer.py", line 779, in choose_match
choice = session.choose_match(self)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/commands.py", line 755, in choose_match
task.items, search_ids=search_id.split()
File "/home/cesar/.local/lib/python2.7/site-packages/beets/autotag/match.py", line 407, in tag_album
search_cands.extend(hooks.albums_for_id(search_id))
File "/home/cesar/.local/lib/python2.7/site-packages/beets/autotag/hooks.py", line 541, in albums_for_id
plugin_albums = plugins.album_for_id(album_id)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/plugins.py", line 373, in album_for_id
res = plugin.album_for_id(album_id)
File "/home/cesar/.local/lib/python2.7/site-packages/beetsplug/discogs.py", line 188, in album_for_id
return self.get_album_info(result)
File "/home/cesar/.local/lib/python2.7/site-packages/beetsplug/discogs.py", line 225, in get_album_info
result.data['formats'][0].get('descriptions', [])) or None
IndexError: list index out of range
|
IndexError
|
def get_album_info(self, result):
"""Returns an AlbumInfo object for a discogs Release object."""
# Explicitly reload the `Release` fields, as they might not be yet
# present if the result is from a `discogs_client.search()`.
if not result.data.get("artists"):
result.refresh()
# Sanity check for required fields. The list of required fields is
# defined at Guideline 1.3.1.a, but in practice some releases might be
# lacking some of these fields. This function expects at least:
# `artists` (>0), `title`, `id`, `tracklist` (>0)
# https://www.discogs.com/help/doc/submission-guidelines-general-rules
if not all([result.data.get(k) for k in ["artists", "title", "id", "tracklist"]]):
self._log.warn("Release does not contain the required fields")
return None
artist, artist_id = self.get_artist([a.data for a in result.artists])
album = re.sub(r" +", " ", result.title)
album_id = result.data["id"]
# Use `.data` to access the tracklist directly instead of the
# convenient `.tracklist` property, which will strip out useful artist
# information and leave us with skeleton `Artist` objects that will
# each make an API call just to get the same data back.
tracks = self.get_tracks(result.data["tracklist"])
# Extract information for the optional AlbumInfo fields, if possible.
va = result.data["artists"][0].get("name", "").lower() == "various"
year = result.data.get("year")
mediums = len(set(t.medium for t in tracks))
country = result.data.get("country")
data_url = result.data.get("uri")
# Extract information for the optional AlbumInfo fields that are
# contained on nested discogs fields.
albumtype = media = label = catalogno = None
if result.data.get("formats"):
albumtype = ", ".join(result.data["formats"][0].get("descriptions", [])) or None
media = result.data["formats"][0]["name"]
if result.data.get("labels"):
label = result.data["labels"][0].get("name")
catalogno = result.data["labels"][0].get("catno")
# Additional cleanups (various artists name, catalog number, media).
if va:
artist = config["va_name"].as_str()
if catalogno == "none":
catalogno = None
# Explicitly set the `media` for the tracks, since it is expected by
# `autotag.apply_metadata`, and set `medium_total`.
for track in tracks:
track.media = media
track.medium_total = mediums
return AlbumInfo(
album,
album_id,
artist,
artist_id,
tracks,
asin=None,
albumtype=albumtype,
va=va,
year=year,
month=None,
day=None,
label=label,
mediums=mediums,
artist_sort=None,
releasegroup_id=None,
catalognum=catalogno,
script=None,
language=None,
country=country,
albumstatus=None,
media=media,
albumdisambig=None,
artist_credit=None,
original_year=None,
original_month=None,
original_day=None,
data_source="Discogs",
data_url=data_url,
)
|
def get_album_info(self, result):
"""Returns an AlbumInfo object for a discogs Release object."""
artist, artist_id = self.get_artist([a.data for a in result.artists])
album = re.sub(r" +", " ", result.title)
album_id = result.data["id"]
# Use `.data` to access the tracklist directly instead of the
# convenient `.tracklist` property, which will strip out useful artist
# information and leave us with skeleton `Artist` objects that will
# each make an API call just to get the same data back.
tracks = self.get_tracks(result.data["tracklist"])
albumtype = ", ".join(result.data["formats"][0].get("descriptions", [])) or None
va = result.data["artists"][0]["name"].lower() == "various"
if va:
artist = config["va_name"].as_str()
year = result.data["year"]
label = result.data["labels"][0]["name"]
mediums = len(set(t.medium for t in tracks))
catalogno = result.data["labels"][0]["catno"]
if catalogno == "none":
catalogno = None
country = result.data.get("country")
media = result.data["formats"][0]["name"]
# Explicitly set the `media` for the tracks, since it is expected by
# `autotag.apply_metadata`, and set `medium_total`.
for track in tracks:
track.media = media
track.medium_total = mediums
data_url = result.data["uri"]
return AlbumInfo(
album,
album_id,
artist,
artist_id,
tracks,
asin=None,
albumtype=albumtype,
va=va,
year=year,
month=None,
day=None,
label=label,
mediums=mediums,
artist_sort=None,
releasegroup_id=None,
catalognum=catalogno,
script=None,
language=None,
country=country,
albumstatus=None,
media=media,
albumdisambig=None,
artist_credit=None,
original_year=None,
original_month=None,
original_day=None,
data_source="Discogs",
data_url=data_url,
)
|
https://github.com/beetbox/beets/issues/2302
|
/data/Music/Unsorted/New Stuff/Para One - 2003 - Paraone - Iris - Le Sept - Flynt - Lyricson (Maxi) [CD - FLAC] (6 items)
Finding tags for album "Paraone - Iris - Le sept - Flynt - Lyricson - Paraone - Iris - Le Sept - Flynt - Lyricson".
Candidates:
1. The Kenny Clarke - Francy Boland Big Band - Paris Jazz Concert: TNP - Oct. 29th, 1969, Part 2 (35.9%) (tracks, artist, album, ...) (CD, 2002, DE, LaserLight Digital)
2. A - Z Consolidated - A - Z Consolidated (33.8%) (tracks, album, artist)
3. Μίκης Θεοδωράκης - Βασίλης Τσιτσάνης - Σταμάτης Κόκοτας - Θεοδωράκης - Τσιτσάνης - Κόκοτας (29.3%) (tracks, missing tracks, album, ...) (12" Vinyl, 1985, GR, Μίνως Μάτσας & Υιός Α.Ε.)
4. Pirkko Mannola - Brita Koivunen - Vieno Kekkonen - Pirkko - Brita - Vieno (20.2%) (missing tracks, tracks, artist, ...) (CD, 1989, FI, Finnlevy)
5. _ - _ (7.7%) (missing tracks, tracks, album, ...)
# selection (default 1), Skip, Use as-is, as Tracks, Group albums,
Enter search, enter Id, aBort? I
Enter release ID: 690105
Traceback (most recent call last):
File "/home/cesar/.local/bin/beet", line 11, in <module>
sys.exit(main())
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1250, in main
_raw_main(args)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1240, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/commands.py", line 967, in import_func
import_files(lib, paths, query)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/commands.py", line 944, in import_files
session.run()
File "/home/cesar/.local/lib/python2.7/site-packages/beets/importer.py", line 320, in run
pl.run_parallel(QUEUE_SIZE)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/util/pipeline.py", line 301, in run
out = self.coro.send(msg)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/util/pipeline.py", line 160, in coro
task = func(*(args + (task,)))
File "/home/cesar/.local/lib/python2.7/site-packages/beets/importer.py", line 1285, in user_query
task.choose_match(session)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/importer.py", line 779, in choose_match
choice = session.choose_match(self)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/ui/commands.py", line 755, in choose_match
task.items, search_ids=search_id.split()
File "/home/cesar/.local/lib/python2.7/site-packages/beets/autotag/match.py", line 407, in tag_album
search_cands.extend(hooks.albums_for_id(search_id))
File "/home/cesar/.local/lib/python2.7/site-packages/beets/autotag/hooks.py", line 541, in albums_for_id
plugin_albums = plugins.album_for_id(album_id)
File "/home/cesar/.local/lib/python2.7/site-packages/beets/plugins.py", line 373, in album_for_id
res = plugin.album_for_id(album_id)
File "/home/cesar/.local/lib/python2.7/site-packages/beetsplug/discogs.py", line 188, in album_for_id
return self.get_album_info(result)
File "/home/cesar/.local/lib/python2.7/site-packages/beetsplug/discogs.py", line 225, in get_album_info
result.data['formats'][0].get('descriptions', [])) or None
IndexError: list index out of range
|
IndexError
|
def match(self, item):
if self.field not in item:
return False
timestamp = float(item[self.field])
date = datetime.utcfromtimestamp(timestamp)
return self.interval.contains(date)
|
def match(self, item):
timestamp = float(item[self.field])
date = datetime.utcfromtimestamp(timestamp)
return self.interval.contains(date)
|
https://github.com/beetbox/beets/issues/1938
|
user configuration: ~/.config/beets/config.yaml
data directory: ~/.config/beets
plugin paths: /usr/local/bin/wlg
Sending event: pluginload
inline: adding item field multidisc
artresizer: method is (2, (6, 9, 3))
thumbnails: using IM to write metadata
thumbnails: using GIO to compute URIs
library database: /net/nfs4/server/mediacentre/Music/Catalogs/beets_test.blb
library directory: /net/nfs4/server/mediacentre/Music/Library_test
Sending event: library_opened
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.17', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1236, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1226, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 1068, in list_func
list_items(lib, decargs(args), opts.album)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 1063, in list_items
for item in lib.items(query):
File "/usr/lib/python2.7/site-packages/beets/dbcore/db.py", line 538, in _get_objects
if not self.query or self.query.match(obj):
File "/usr/lib/python2.7/site-packages/beets/dbcore/query.py", line 438, in match
return all([q.match(item) for q in self.subqueries])
File "/usr/lib/python2.7/site-packages/beets/dbcore/query.py", line 629, in match
timestamp = float(item[self.field])
File "/usr/lib/python2.7/site-packages/beets/dbcore/db.py", line 231, in __getitem__
raise KeyError(key)
KeyError: u'last_played'
|
KeyError
|
def _is_hidden_osx(path):
"""Return whether or not a file is hidden on OS X.
This uses os.lstat to work out if a file has the "hidden" flag.
"""
file_stat = os.lstat(beets.util.syspath(path))
if hasattr(file_stat, "st_flags") and hasattr(stat, "UF_HIDDEN"):
return bool(file_stat.st_flags & stat.UF_HIDDEN)
else:
return False
|
def _is_hidden_osx(path):
"""Return whether or not a file is hidden on OS X.
This uses os.lstat to work out if a file has the "hidden" flag.
"""
file_stat = os.lstat(path)
if hasattr(file_stat, "st_flags") and hasattr(stat, "UF_HIDDEN"):
return bool(file_stat.st_flags & stat.UF_HIDDEN)
else:
return False
|
https://github.com/beetbox/beets/issues/2168
|
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.19', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 967, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 944, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 320, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 251, in run
msg = next(self.coro)
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1202, in read_tasks
for t in task_factory.tasks():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1038, in tasks
for dirs, paths in self.paths():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1090, in paths
for dirs, paths in albums_in_dir(self.toppath):
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1480, in albums_in_dir
logger=log):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 205, in sorted_walk
for res in sorted_walk(cur, ignore, ignore_hidden, logger):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 190, in sorted_walk
if (ignore_hidden and not hidden.is_hidden(cur)) or not ignore_hidden:
File "/usr/lib/python2.7/site-packages/beets/util/hidden.py", line 78, in is_hidden
path = path.decode('utf-8')
File "/usr/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeDecodeError: 'utf8' codec can't decode byte 0x96 in position 76: invalid start byte
|
UnicodeDecodeError
|
def _is_hidden_win(path):
"""Return whether or not a file is hidden on Windows.
This uses GetFileAttributes to work out if a file has the "hidden" flag
(FILE_ATTRIBUTE_HIDDEN).
"""
# FILE_ATTRIBUTE_HIDDEN = 2 (0x2) from GetFileAttributes documentation.
hidden_mask = 2
# Retrieve the attributes for the file.
attrs = ctypes.windll.kernel32.GetFileAttributesW(beets.util.syspath(path))
# Ensure we have valid attribues and compare them against the mask.
return attrs >= 0 and attrs & hidden_mask
|
def _is_hidden_win(path):
"""Return whether or not a file is hidden on Windows.
This uses GetFileAttributes to work out if a file has the "hidden" flag
(FILE_ATTRIBUTE_HIDDEN).
"""
# FILE_ATTRIBUTE_HIDDEN = 2 (0x2) from GetFileAttributes documentation.
hidden_mask = 2
# Retrieve the attributes for the file.
attrs = ctypes.windll.kernel32.GetFileAttributesW(path)
# Ensure we have valid attribues and compare them against the mask.
return attrs >= 0 and attrs & hidden_mask
|
https://github.com/beetbox/beets/issues/2168
|
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.19', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 967, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 944, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 320, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 251, in run
msg = next(self.coro)
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1202, in read_tasks
for t in task_factory.tasks():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1038, in tasks
for dirs, paths in self.paths():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1090, in paths
for dirs, paths in albums_in_dir(self.toppath):
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1480, in albums_in_dir
logger=log):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 205, in sorted_walk
for res in sorted_walk(cur, ignore, ignore_hidden, logger):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 190, in sorted_walk
if (ignore_hidden and not hidden.is_hidden(cur)) or not ignore_hidden:
File "/usr/lib/python2.7/site-packages/beets/util/hidden.py", line 78, in is_hidden
path = path.decode('utf-8')
File "/usr/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeDecodeError: 'utf8' codec can't decode byte 0x96 in position 76: invalid start byte
|
UnicodeDecodeError
|
def _is_hidden_dot(path):
"""Return whether or not a file starts with a dot.
Files starting with a dot are seen as "hidden" files on Unix-based OSes.
"""
return os.path.basename(path).startswith(b".")
|
def _is_hidden_dot(path):
"""Return whether or not a file starts with a dot.
Files starting with a dot are seen as "hidden" files on Unix-based OSes.
"""
return os.path.basename(path).startswith(".")
|
https://github.com/beetbox/beets/issues/2168
|
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.19', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 967, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 944, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 320, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 251, in run
msg = next(self.coro)
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1202, in read_tasks
for t in task_factory.tasks():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1038, in tasks
for dirs, paths in self.paths():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1090, in paths
for dirs, paths in albums_in_dir(self.toppath):
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1480, in albums_in_dir
logger=log):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 205, in sorted_walk
for res in sorted_walk(cur, ignore, ignore_hidden, logger):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 190, in sorted_walk
if (ignore_hidden and not hidden.is_hidden(cur)) or not ignore_hidden:
File "/usr/lib/python2.7/site-packages/beets/util/hidden.py", line 78, in is_hidden
path = path.decode('utf-8')
File "/usr/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeDecodeError: 'utf8' codec can't decode byte 0x96 in position 76: invalid start byte
|
UnicodeDecodeError
|
def is_hidden(path):
"""Return whether or not a file is hidden. `path` should be a
bytestring filename.
This method works differently depending on the platform it is called on.
On OS X, it uses both the result of `is_hidden_osx` and `is_hidden_dot` to
work out if a file is hidden.
On Windows, it uses the result of `is_hidden_win` to work out if a file is
hidden.
On any other operating systems (i.e. Linux), it uses `is_hidden_dot` to
work out if a file is hidden.
"""
# Run platform specific functions depending on the platform
if sys.platform == "darwin":
return _is_hidden_osx(path) or _is_hidden_dot(path)
elif sys.platform == "win32":
return _is_hidden_win(path)
else:
return _is_hidden_dot(path)
|
def is_hidden(path):
"""Return whether or not a file is hidden.
This method works differently depending on the platform it is called on.
On OS X, it uses both the result of `is_hidden_osx` and `is_hidden_dot` to
work out if a file is hidden.
On Windows, it uses the result of `is_hidden_win` to work out if a file is
hidden.
On any other operating systems (i.e. Linux), it uses `is_hidden_dot` to
work out if a file is hidden.
"""
# Convert the path to unicode if it is not already.
if not isinstance(path, six.text_type):
path = path.decode("utf-8")
# Run platform specific functions depending on the platform
if sys.platform == "darwin":
return _is_hidden_osx(path) or _is_hidden_dot(path)
elif sys.platform == "win32":
return _is_hidden_win(path)
else:
return _is_hidden_dot(path)
|
https://github.com/beetbox/beets/issues/2168
|
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.19', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 967, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 944, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 320, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 251, in run
msg = next(self.coro)
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1202, in read_tasks
for t in task_factory.tasks():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1038, in tasks
for dirs, paths in self.paths():
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1090, in paths
for dirs, paths in albums_in_dir(self.toppath):
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1480, in albums_in_dir
logger=log):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 205, in sorted_walk
for res in sorted_walk(cur, ignore, ignore_hidden, logger):
File "/usr/lib/python2.7/site-packages/beets/util/__init__.py", line 190, in sorted_walk
if (ignore_hidden and not hidden.is_hidden(cur)) or not ignore_hidden:
File "/usr/lib/python2.7/site-packages/beets/util/hidden.py", line 78, in is_hidden
path = path.decode('utf-8')
File "/usr/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeDecodeError: 'utf8' codec can't decode byte 0x96 in position 76: invalid start byte
|
UnicodeDecodeError
|
def sort(self, objs):
# TODO: Conversion and null-detection here. In Python 3,
# comparisons with None fail. We should also support flexible
# attributes with different types without falling over.
def key(item):
field_val = item.get(self.field, "")
if self.case_insensitive and isinstance(field_val, unicode):
field_val = field_val.lower()
return field_val
return sorted(objs, key=key, reverse=not self.ascending)
|
def sort(self, objs):
# TODO: Conversion and null-detection here. In Python 3,
# comparisons with None fail. We should also support flexible
# attributes with different types without falling over.
def key(item):
field_val = getattr(item, self.field)
if self.case_insensitive and isinstance(field_val, unicode):
field_val = field_val.lower()
return field_val
return sorted(objs, key=key, reverse=not self.ascending)
|
https://github.com/beetbox/beets/issues/1734
|
$ beet ls -- --
Traceback (most recent call last):
File "/home/asampson/.local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.16', 'console_scripts', 'beet')()
File "/home/asampson/beets/beets/ui/__init__.py", line 1207, in main
_raw_main(args)
File "/home/asampson/beets/beets/ui/__init__.py", line 1197, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/home/asampson/beets/beets/ui/commands.py", line 958, in list_func
list_items(lib, decargs(args), opts.album)
File "/home/asampson/beets/beets/ui/commands.py", line 953, in list_items
for item in lib.items(query):
File "/home/asampson/beets/beets/dbcore/db.py", line 545, in __iter__
objects = self.sort.sort(list(self._get_objects()))
File "/home/asampson/beets/beets/dbcore/query.py", line 775, in sort
return sorted(objs, key=key, reverse=not self.ascending)
File "/home/asampson/beets/beets/dbcore/query.py", line 770, in key
field_val = getattr(item, self.field)
File "/home/asampson/beets/beets/dbcore/db.py", line 326, in __getattr__
raise AttributeError('no such field {0!r}'.format(key))
AttributeError: no such field '-'
|
AttributeError
|
def key(item):
field_val = item.get(self.field, "")
if self.case_insensitive and isinstance(field_val, unicode):
field_val = field_val.lower()
return field_val
|
def key(item):
field_val = getattr(item, self.field)
if self.case_insensitive and isinstance(field_val, unicode):
field_val = field_val.lower()
return field_val
|
https://github.com/beetbox/beets/issues/1734
|
$ beet ls -- --
Traceback (most recent call last):
File "/home/asampson/.local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.16', 'console_scripts', 'beet')()
File "/home/asampson/beets/beets/ui/__init__.py", line 1207, in main
_raw_main(args)
File "/home/asampson/beets/beets/ui/__init__.py", line 1197, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/home/asampson/beets/beets/ui/commands.py", line 958, in list_func
list_items(lib, decargs(args), opts.album)
File "/home/asampson/beets/beets/ui/commands.py", line 953, in list_items
for item in lib.items(query):
File "/home/asampson/beets/beets/dbcore/db.py", line 545, in __iter__
objects = self.sort.sort(list(self._get_objects()))
File "/home/asampson/beets/beets/dbcore/query.py", line 775, in sort
return sorted(objs, key=key, reverse=not self.ascending)
File "/home/asampson/beets/beets/dbcore/query.py", line 770, in key
field_val = getattr(item, self.field)
File "/home/asampson/beets/beets/dbcore/db.py", line 326, in __getattr__
raise AttributeError('no such field {0!r}'.format(key))
AttributeError: no such field '-'
|
AttributeError
|
def is_page_candidate(self, url_link, url_title, title, artist):
"""Return True if the URL title makes it a good candidate to be a
page that contains lyrics of title by artist.
"""
title = self.slugify(title.lower())
artist = self.slugify(artist.lower())
sitename = re.search("//([^/]+)/.*", self.slugify(url_link.lower())).group(1)
url_title = self.slugify(url_title.lower())
# Check if URL title contains song title (exact match)
if url_title.find(title) != -1:
return True
# or try extracting song title from URL title and check if
# they are close enough
tokens = (
[by + "_" + artist for by in self.BY_TRANS]
+ [artist, sitename, sitename.replace("www.", "")]
+ self.LYRICS_TRANS
)
tokens = [re.escape(t) for t in tokens]
song_title = re.sub("(%s)" % "|".join(tokens), "", url_title)
song_title = song_title.strip("_|")
typo_ratio = 0.9
ratio = difflib.SequenceMatcher(None, song_title, title).ratio()
return ratio >= typo_ratio
|
def is_page_candidate(self, url_link, url_title, title, artist):
"""Return True if the URL title makes it a good candidate to be a
page that contains lyrics of title by artist.
"""
title = self.slugify(title.lower())
artist = self.slugify(artist.lower())
sitename = re.search("//([^/]+)/.*", self.slugify(url_link.lower())).group(1)
url_title = self.slugify(url_title.lower())
# Check if URL title contains song title (exact match)
if url_title.find(title) != -1:
return True
# or try extracting song title from URL title and check if
# they are close enough
tokens = (
[by + "_" + artist for by in self.BY_TRANS]
+ [artist, sitename, sitename.replace("www.", "")]
+ self.LYRICS_TRANS
)
song_title = re.sub("(%s)" % "|".join(tokens), "", url_title)
song_title = song_title.strip("_|")
typo_ratio = 0.9
ratio = difflib.SequenceMatcher(None, song_title, title).ratio()
return ratio >= typo_ratio
|
https://github.com/beetbox/beets/issues/1673
|
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.14', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 1140, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 1130, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 875, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 852, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 316, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 301, in run
out = self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 183, in coro
func(*(args + (task,)))
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 1353, in plugin_stage
func(session, task)
File "/usr/local/lib/python2.7/dist-packages/beets/plugins.py", line 123, in wrapper
return func(*args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/lyrics.py", line 509, in imported
False, self.config['force'])
File "/usr/local/lib/python2.7/dist-packages/beetsplug/lyrics.py", line 521, in fetch_item_lyrics
lyrics = [self.get_lyrics(artist, title) for title in titles]
File "/usr/local/lib/python2.7/dist-packages/beetsplug/lyrics.py", line 548, in get_lyrics
lyrics = backend.fetch(artist, title)
File "/usr/local/lib/python2.7/dist-packages/beetsplug/lyrics.py", line 433, in fetch
title, artist):
File "/usr/local/lib/python2.7/dist-packages/beetsplug/lyrics.py", line 409, in is_page_candidate
songTitle = re.sub(u'(%s)' % u'|'.join(tokens), u'', urlTitle)
File "/usr/lib/python2.7/re.py", line 151, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/usr/lib/python2.7/re.py", line 244, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def __init__(self):
super(DuplicatesPlugin, self).__init__()
self.config.add(
{
"format": "",
"count": False,
"album": False,
"full": False,
"strict": False,
"path": False,
"keys": ["mb_trackid", "mb_albumid"],
"checksum": "",
"copy": "",
"move": "",
"delete": False,
"tag": "",
}
)
self._command = Subcommand("duplicates", help=__doc__, aliases=["dup"])
self._command.parser.add_option(
"-c", "--count", dest="count", action="store_true", help="show duplicate counts"
)
self._command.parser.add_option(
"-C",
"--checksum",
dest="checksum",
action="store",
metavar="PROG",
help="report duplicates based on arbitrary command",
)
self._command.parser.add_option(
"-d",
"--delete",
dest="delete",
action="store_true",
help="delete items from library and disk",
)
self._command.parser.add_option(
"-F",
"--full",
dest="full",
action="store_true",
help="show all versions of duplicate tracks or albums",
)
self._command.parser.add_option(
"-s",
"--strict",
dest="strict",
action="store_true",
help="report duplicates only if all attributes are set",
)
self._command.parser.add_option(
"-k",
"--keys",
dest="keys",
action="callback",
metavar="KEY1 KEY2",
callback=vararg_callback,
help="report duplicates based on keys",
)
self._command.parser.add_option(
"-m",
"--move",
dest="move",
action="store",
metavar="DEST",
help="move items to dest",
)
self._command.parser.add_option(
"-o",
"--copy",
dest="copy",
action="store",
metavar="DEST",
help="copy items to dest",
)
self._command.parser.add_option(
"-t",
"--tag",
dest="tag",
action="store",
help="tag matched items with 'k=v' attribute",
)
self._command.parser.add_all_common_options()
|
def __init__(self):
super(DuplicatesPlugin, self).__init__()
self.config.add(
{
"format": "",
"count": False,
"album": False,
"full": False,
"strict": False,
"path": False,
"keys": ["mb_trackid", "mb_albumid"],
"checksum": None,
"copy": False,
"move": False,
"delete": False,
"tag": False,
}
)
self._command = Subcommand("duplicates", help=__doc__, aliases=["dup"])
self._command.parser.add_option(
"-c", "--count", dest="count", action="store_true", help="show duplicate counts"
)
self._command.parser.add_option(
"-C",
"--checksum",
dest="checksum",
action="store",
metavar="PROG",
help="report duplicates based on arbitrary command",
)
self._command.parser.add_option(
"-d",
"--delete",
dest="delete",
action="store_true",
help="delete items from library and disk",
)
self._command.parser.add_option(
"-F",
"--full",
dest="full",
action="store_true",
help="show all versions of duplicate tracks or albums",
)
self._command.parser.add_option(
"-s",
"--strict",
dest="strict",
action="store_true",
help="report duplicates only if all attributes are set",
)
self._command.parser.add_option(
"-k",
"--keys",
dest="keys",
action="callback",
metavar="KEY1 KEY2",
callback=vararg_callback,
help="report duplicates based on keys",
)
self._command.parser.add_option(
"-m",
"--move",
dest="move",
action="store",
metavar="DEST",
help="move items to dest",
)
self._command.parser.add_option(
"-o",
"--copy",
dest="copy",
action="store",
metavar="DEST",
help="copy items to dest",
)
self._command.parser.add_option(
"-t",
"--tag",
dest="tag",
action="store",
help="tag matched items with 'k=v' attribute",
)
self._command.parser.add_all_common_options()
|
https://github.com/beetbox/beets/issues/1457
|
$ beet dup
Warning: top-level configuration of `color` is deprecated. Configure color use under `ui`. See documentation for more info.
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.14', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1104, in main
_raw_main(args)
File "/usr/local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1094, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/site-packages/beetsplug/duplicates.py", line 148, in _dup
fmt=fmt.format(obj_count))
File "/usr/local/lib/python2.7/site-packages/beetsplug/duplicates.py", line 161, in _process_item
item.move(basedir=move, copy=False)
File "/usr/local/lib/python2.7/site-packages/beets/library.py", line 713, in move
dest = self.destination(basedir=basedir)
File "/usr/local/lib/python2.7/site-packages/beets/library.py", line 808, in destination
return normpath(os.path.join(basedir, subpath))
File "/usr/local/lib/python2.7/posixpath.py", line 77, in join
elif path == '' or path.endswith('/'):
AttributeError: 'bool' object has no attribute 'endswith'
|
AttributeError
|
def commands(self):
def _dup(lib, opts, args):
self.config.set_args(opts)
album = self.config["album"].get(bool)
checksum = self.config["checksum"].get(str)
copy = self.config["copy"].get(str)
count = self.config["count"].get(bool)
delete = self.config["delete"].get(bool)
fmt = self.config["format"].get(str)
full = self.config["full"].get(bool)
keys = self.config["keys"].get(list)
move = self.config["move"].get(str)
path = self.config["path"].get(bool)
strict = self.config["strict"].get(bool)
tag = self.config["tag"].get(str)
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if path:
fmt = "$path"
# Default format string for count mode.
if count and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
for i in items:
k, _ = self._checksum(i, checksum)
keys = [k]
for obj_id, obj_count, objs in self._duplicates(
items, keys=keys, full=full, strict=strict
):
if obj_id: # Skip empty IDs.
for o in objs:
self._process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count),
)
self._command.func = _dup
return [self._command]
|
def commands(self):
def _dup(lib, opts, args):
self.config.set_args(opts)
fmt = self.config["format"].get()
album = self.config["album"].get(bool)
full = self.config["full"].get(bool)
strict = self.config["strict"].get(bool)
keys = self.config["keys"].get()
checksum = self.config["checksum"].get()
copy = self.config["copy"].get()
move = self.config["move"].get()
delete = self.config["delete"].get(bool)
tag = self.config["tag"].get()
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if self.config["path"]:
fmt = "$path"
# Default format string for count mode.
if self.config["count"] and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
if not isinstance(checksum, basestring):
raise UserError('duplicates: "checksum" option must be a command')
for i in items:
k, _ = self._checksum(i, checksum)
keys = [k]
for obj_id, obj_count, objs in self._duplicates(
items, keys=keys, full=full, strict=strict
):
if obj_id: # Skip empty IDs.
for o in objs:
self._process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count),
)
self._command.func = _dup
return [self._command]
|
https://github.com/beetbox/beets/issues/1457
|
$ beet dup
Warning: top-level configuration of `color` is deprecated. Configure color use under `ui`. See documentation for more info.
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.14', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1104, in main
_raw_main(args)
File "/usr/local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1094, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/site-packages/beetsplug/duplicates.py", line 148, in _dup
fmt=fmt.format(obj_count))
File "/usr/local/lib/python2.7/site-packages/beetsplug/duplicates.py", line 161, in _process_item
item.move(basedir=move, copy=False)
File "/usr/local/lib/python2.7/site-packages/beets/library.py", line 713, in move
dest = self.destination(basedir=basedir)
File "/usr/local/lib/python2.7/site-packages/beets/library.py", line 808, in destination
return normpath(os.path.join(basedir, subpath))
File "/usr/local/lib/python2.7/posixpath.py", line 77, in join
elif path == '' or path.endswith('/'):
AttributeError: 'bool' object has no attribute 'endswith'
|
AttributeError
|
def _dup(lib, opts, args):
self.config.set_args(opts)
album = self.config["album"].get(bool)
checksum = self.config["checksum"].get(str)
copy = self.config["copy"].get(str)
count = self.config["count"].get(bool)
delete = self.config["delete"].get(bool)
fmt = self.config["format"].get(str)
full = self.config["full"].get(bool)
keys = self.config["keys"].get(list)
move = self.config["move"].get(str)
path = self.config["path"].get(bool)
strict = self.config["strict"].get(bool)
tag = self.config["tag"].get(str)
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if path:
fmt = "$path"
# Default format string for count mode.
if count and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
for i in items:
k, _ = self._checksum(i, checksum)
keys = [k]
for obj_id, obj_count, objs in self._duplicates(
items, keys=keys, full=full, strict=strict
):
if obj_id: # Skip empty IDs.
for o in objs:
self._process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count),
)
|
def _dup(lib, opts, args):
self.config.set_args(opts)
fmt = self.config["format"].get()
album = self.config["album"].get(bool)
full = self.config["full"].get(bool)
strict = self.config["strict"].get(bool)
keys = self.config["keys"].get()
checksum = self.config["checksum"].get()
copy = self.config["copy"].get()
move = self.config["move"].get()
delete = self.config["delete"].get(bool)
tag = self.config["tag"].get()
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if self.config["path"]:
fmt = "$path"
# Default format string for count mode.
if self.config["count"] and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
if not isinstance(checksum, basestring):
raise UserError('duplicates: "checksum" option must be a command')
for i in items:
k, _ = self._checksum(i, checksum)
keys = [k]
for obj_id, obj_count, objs in self._duplicates(
items, keys=keys, full=full, strict=strict
):
if obj_id: # Skip empty IDs.
for o in objs:
self._process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count),
)
|
https://github.com/beetbox/beets/issues/1457
|
$ beet dup
Warning: top-level configuration of `color` is deprecated. Configure color use under `ui`. See documentation for more info.
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.14', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1104, in main
_raw_main(args)
File "/usr/local/lib/python2.7/site-packages/beets/ui/__init__.py", line 1094, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/site-packages/beetsplug/duplicates.py", line 148, in _dup
fmt=fmt.format(obj_count))
File "/usr/local/lib/python2.7/site-packages/beetsplug/duplicates.py", line 161, in _process_item
item.move(basedir=move, copy=False)
File "/usr/local/lib/python2.7/site-packages/beets/library.py", line 713, in move
dest = self.destination(basedir=basedir)
File "/usr/local/lib/python2.7/site-packages/beets/library.py", line 808, in destination
return normpath(os.path.join(basedir, subpath))
File "/usr/local/lib/python2.7/posixpath.py", line 77, in join
elif path == '' or path.endswith('/'):
AttributeError: 'bool' object has no attribute 'endswith'
|
AttributeError
|
def write(self, path=None, tags=None):
"""Write the item's metadata to a media file.
All fields in `_media_fields` are written to disk according to
the values on this object.
`path` is the path of the mediafile to wirte the data to. It
defaults to the item's path.
`tags` is a dictionary of additional metadata the should be
written to the file. (These tags need not be in `_media_fields`.)
Can raise either a `ReadError` or a `WriteError`.
"""
if path is None:
path = self.path
else:
path = normpath(path)
# Get the data to write to the file.
item_tags = dict(self)
item_tags = {
k: v for k, v in item_tags.items() if k in self._media_fields
} # Only write media fields.
if tags is not None:
item_tags.update(tags)
plugins.send("write", item=self, path=path, tags=item_tags)
# Open the file.
try:
mediafile = MediaFile(syspath(path), id3v23=beets.config["id3v23"].get(bool))
except (OSError, IOError, UnreadableFileError) as exc:
raise ReadError(self.path, exc)
# Write the tags to the file.
mediafile.update(item_tags)
try:
mediafile.save()
except (OSError, IOError, MutagenError) as exc:
raise WriteError(self.path, exc)
# The file has a new mtime.
if path == self.path:
self.mtime = self.current_mtime()
plugins.send("after_write", item=self, path=path)
|
def write(self, path=None, tags=None):
"""Write the item's metadata to a media file.
All fields in `_media_fields` are written to disk according to
the values on this object.
`path` is the path of the mediafile to wirte the data to. It
defaults to the item's path.
`tags` is a dictionary of additional metadata the should be
written to the file.
Can raise either a `ReadError` or a `WriteError`.
"""
if path is None:
path = self.path
else:
path = normpath(path)
item_tags = dict(self)
if tags is not None:
item_tags.update(tags)
plugins.send("write", item=self, path=path, tags=item_tags)
try:
mediafile = MediaFile(syspath(path), id3v23=beets.config["id3v23"].get(bool))
except (OSError, IOError, UnreadableFileError) as exc:
raise ReadError(self.path, exc)
mediafile.update(item_tags)
try:
mediafile.save()
except (OSError, IOError, MutagenError) as exc:
raise WriteError(self.path, exc)
# The file has a new mtime.
if path == self.path:
self.mtime = self.current_mtime()
plugins.send("after_write", item=self, path=path)
|
https://github.com/beetbox/beets/issues/1404
|
user configuration: /home/user/.config/beets/config.yaml
data directory: /home/user/.config/beets
plugin paths:
Sending event: pluginload
library database: /home/user/.beets-data/musiclibrary.blb
library directory: /home/user/Music
Sending event: library_opened
Modifying 13 items.
10 Years - The Autumn Effect - Fault Line
date: 2005-08-16
10 Years - The Autumn Effect - The Recipe
date: 2005-08-16
10 Years - The Autumn Effect - Cast it Out
date: 2005-08-16
10 Years - The Autumn Effect - Wasteland
date: 2005-08-16
10 Years - The Autumn Effect - Seasons to Cycles
date: 2005-08-16
10 Years - The Autumn Effect - Half Life
date: 2005-08-16
10 Years - The Autumn Effect - Through the Iris
date: 2005-08-16
10 Years - The Autumn Effect - Empires
date: 2005-08-16
10 Years - The Autumn Effect - Prey
date: 2005-08-16
10 Years - The Autumn Effect - Insects
date: 2005-08-16
10 Years - The Autumn Effect - Paralyzing Kings
date: 2005-08-16
10 Years - The Autumn Effect - The Autumn Effect / Slowly Falling Awake
date: 2005-08-16
Really modify, move and write tags (Y/n)? y
moving object /home/user/Music/10 Years/The Autumn Effect/11 Insects.flac
Sending event: before_item_moved
Sending event: item_moved
Sending event: database_change
Sending event: database_change
Sending event: write
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1081, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 1071, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 1328, in modify_func
not opts.yes)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 1299, in modify_items
obj.try_sync(write)
File "/usr/lib/python2.7/site-packages/beets/library.py", line 610, in try_sync
self.try_write(path=write)
File "/usr/lib/python2.7/site-packages/beets/library.py", line 591, in try_write
self.write(path, tags)
File "/usr/lib/python2.7/site-packages/beets/library.py", line 573, in write
mediafile.update(item_tags)
File "/usr/lib/python2.7/site-packages/beets/mediafile.py", line 1490, in update
setattr(self, field, dict[field])
File "/usr/lib/python2.7/site-packages/beets/mediafile.py", line 1158, in __set__
self._set_date_tuple(mediafile, date.year, date.month, date.day)
AttributeError: 'unicode' object has no attribute 'year'
|
AttributeError
|
def authenticate(self, c_key, c_secret):
# Get the link for the OAuth page.
auth_client = Client(USER_AGENT, c_key, c_secret)
try:
_, _, url = auth_client.get_authorize_url()
except CONNECTION_ERRORS as e:
self._log.debug("connection error: {0}", e)
raise beets.ui.UserError("communication with Discogs failed")
beets.ui.print_("To authenticate with Discogs, visit:")
beets.ui.print_(url)
# Ask for the code and validate it.
code = beets.ui.input_("Enter the code:")
try:
token, secret = auth_client.get_access_token(code)
except DiscogsAPIError:
raise beets.ui.UserError("Discogs authorization failed")
except CONNECTION_ERRORS as e:
self._log.debug("connection error: {0}", e)
raise beets.ui.UserError("Discogs token request failed")
# Save the token for later use.
self._log.debug("Discogs token {0}, secret {1}", token, secret)
with open(self._tokenfile(), "w") as f:
json.dump({"token": token, "secret": secret}, f)
return token, secret
|
def authenticate(self, c_key, c_secret):
# Get the link for the OAuth page.
auth_client = Client(USER_AGENT, c_key, c_secret)
_, _, url = auth_client.get_authorize_url()
beets.ui.print_("To authenticate with Discogs, visit:")
beets.ui.print_(url)
# Ask for the code and validate it.
code = beets.ui.input_("Enter the code:")
try:
token, secret = auth_client.get_access_token(code)
except DiscogsAPIError:
raise beets.ui.UserError("Discogs authorization failed")
except CONNECTION_ERRORS as e:
self._log.debug("connection error: {0}", e)
raise beets.ui.UserError("communication with Discogs failed")
# Save the token for later use.
self._log.debug("Discogs token {0}, secret {1}", token, secret)
with open(self._tokenfile(), "w") as f:
json.dump({"token": token, "secret": secret}, f)
return token, secret
|
https://github.com/beetbox/beets/issues/1417
|
Successfully installed beets discogs-client pyacoustid enum34 mutagen munkres unidecode musicbrainzngs pyyaml requests six oauthlib audioread
Cleaning up...
(env)jwyant@bigbox ~/code/beets $ ./env/bin/beet import ~/.music-incoming/flac
Traceback (most recent call last):
File "./env/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/ui/__init__.py", line 1081, in main
_raw_main(args)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/ui/__init__.py", line 1071, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/ui/commands.py", line 892, in import_func
import_files(lib, paths, query)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/ui/commands.py", line 869, in import_files
session.run()
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/importer.py", line 313, in run
plugins.send('import_begin', session=self)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/plugins.py", line 455, in send
result = handler(**arguments)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beets/plugins.py", line 123, in wrapper
return func(*args, **kwargs)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beetsplug/discogs.py", line 76, in setup
token, secret = self.authenticate(c_key, c_secret)
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/beetsplug/discogs.py", line 98, in authenticate
_, _, url = auth_client.get_authorize_url()
File "/home/jwyant/code/beets/env/lib/python2.7/site-packages/discogs_client/client.py", line 61, in get_authorize_url
raise HTTPError('Invalid response from request token URL.', status_code)
discogs_client.exceptions.HTTPError: 401: Invalid response from request token URL.
|
discogs_client.exceptions.HTTPError
|
def _getters(cls):
getters = plugins.item_field_getters()
getters["singleton"] = lambda i: i.album_id is None
# Filesize is given in bytes
getters["filesize"] = lambda i: i.try_filesize()
return getters
|
def _getters(cls):
getters = plugins.item_field_getters()
getters["singleton"] = lambda i: i.album_id is None
# Filesize is given in bytes
getters["filesize"] = lambda i: os.path.getsize(syspath(i.path))
return getters
|
https://github.com/beetbox/beets/issues/1326
|
$ beet im /dunehd/Musica\ pendiente/Madonna\ –\ Rebel\ Heart\ \[Deluxe\ Edition\ 2015\]\(MP3\ 320\ Kbps\)/
/dunehd/Musica pendiente/Madonna – Rebel Heart [Deluxe Edition 2015](MP3 320 Kbps)/Track List (25 items)
Correcting tags from:
Madonna - Rebel Heart Deluxe
To:
Madonna - Rebel Heart (Super Deluxe Edition)
URL:
http://musicbrainz.org/release/0911931a-3770-4cb7-ba67-3d2fda24d2e8
(Similarity: 97.5%) (album, tracks) (Digital Media, 2015)
* Living For Love -> Living for Love
* Bitch I'm Madonna (feat. Nicki Minaj) -> Bitch I’m Madonna (title)
* Iconic (feat. Chance the Rapper & Mike Tyson) -> Iconic (title)
* Veni Vidi Vici (feat. Nas) -> Veni Vedi Vici (title)
This album is already in the library!
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/__init__.py", line 953, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/__init__.py", line 943, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 874, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 851, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 316, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/util/pipeline.py", line 301, in run
out = self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/util/pipeline.py", line 160, in coro
task = func(*(args + (task,)))
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 1287, in user_query
resolve_duplicates(session, task)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 1299, in resolve_duplicates
session.resolve_duplicate(task, found_duplicates)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 789, in resolve_duplicate
not task.is_album,
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 446, in summarize_items
total_filesize = sum([item.filesize for item in items])
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/dbcore/db.py", line 316, in __getattr__
return self[key]
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/dbcore/db.py", line 224, in __getitem__
return getters[key](self)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/library.py", line 424, in <lambda>
getters['filesize'] = lambda i: os.path.getsize(syspath(i.path))
File "/usr/lib/python2.7/genericpath.py", line 49, in getsize
return os.stat(filename).st_size
OSError: [Errno 2] No such file or directory: '/home/simon/M\xc3\xbasica/Madonna/Rebel Heart (Super Deluxe Edition)/01 - Living for Love.mp3'
|
OSError
|
def _getters(cls):
getters = plugins.item_field_getters()
getters["singleton"] = lambda i: i.album_id is None
getters["filesize"] = Item.try_filesize # In bytes.
return getters
|
def _getters(cls):
getters = plugins.item_field_getters()
getters["singleton"] = lambda i: i.album_id is None
# Filesize is given in bytes
getters["filesize"] = lambda i: i.try_filesize()
return getters
|
https://github.com/beetbox/beets/issues/1326
|
$ beet im /dunehd/Musica\ pendiente/Madonna\ –\ Rebel\ Heart\ \[Deluxe\ Edition\ 2015\]\(MP3\ 320\ Kbps\)/
/dunehd/Musica pendiente/Madonna – Rebel Heart [Deluxe Edition 2015](MP3 320 Kbps)/Track List (25 items)
Correcting tags from:
Madonna - Rebel Heart Deluxe
To:
Madonna - Rebel Heart (Super Deluxe Edition)
URL:
http://musicbrainz.org/release/0911931a-3770-4cb7-ba67-3d2fda24d2e8
(Similarity: 97.5%) (album, tracks) (Digital Media, 2015)
* Living For Love -> Living for Love
* Bitch I'm Madonna (feat. Nicki Minaj) -> Bitch I’m Madonna (title)
* Iconic (feat. Chance the Rapper & Mike Tyson) -> Iconic (title)
* Veni Vidi Vici (feat. Nas) -> Veni Vedi Vici (title)
This album is already in the library!
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/__init__.py", line 953, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/__init__.py", line 943, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 874, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 851, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 316, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/util/pipeline.py", line 301, in run
out = self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/util/pipeline.py", line 160, in coro
task = func(*(args + (task,)))
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 1287, in user_query
resolve_duplicates(session, task)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 1299, in resolve_duplicates
session.resolve_duplicate(task, found_duplicates)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 789, in resolve_duplicate
not task.is_album,
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 446, in summarize_items
total_filesize = sum([item.filesize for item in items])
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/dbcore/db.py", line 316, in __getattr__
return self[key]
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/dbcore/db.py", line 224, in __getitem__
return getters[key](self)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/library.py", line 424, in <lambda>
getters['filesize'] = lambda i: os.path.getsize(syspath(i.path))
File "/usr/lib/python2.7/genericpath.py", line 49, in getsize
return os.stat(filename).st_size
OSError: [Errno 2] No such file or directory: '/home/simon/M\xc3\xbasica/Madonna/Rebel Heart (Super Deluxe Edition)/01 - Living for Love.mp3'
|
OSError
|
def try_filesize(self):
"""Get the size of the underlying file in bytes.
If the file is missing, return 0 (and log a warning).
"""
try:
return os.path.getsize(syspath(self.path))
except (OSError, Exception) as exc:
log.warning("could not get filesize: {0}", exc)
return 0
|
def try_filesize(self):
try:
return os.path.getsize(syspath(self.path))
except (OSError, Exception) as exc:
log.warning("could not get filesize: {0}", exc)
return 0
|
https://github.com/beetbox/beets/issues/1326
|
$ beet im /dunehd/Musica\ pendiente/Madonna\ –\ Rebel\ Heart\ \[Deluxe\ Edition\ 2015\]\(MP3\ 320\ Kbps\)/
/dunehd/Musica pendiente/Madonna – Rebel Heart [Deluxe Edition 2015](MP3 320 Kbps)/Track List (25 items)
Correcting tags from:
Madonna - Rebel Heart Deluxe
To:
Madonna - Rebel Heart (Super Deluxe Edition)
URL:
http://musicbrainz.org/release/0911931a-3770-4cb7-ba67-3d2fda24d2e8
(Similarity: 97.5%) (album, tracks) (Digital Media, 2015)
* Living For Love -> Living for Love
* Bitch I'm Madonna (feat. Nicki Minaj) -> Bitch I’m Madonna (title)
* Iconic (feat. Chance the Rapper & Mike Tyson) -> Iconic (title)
* Veni Vidi Vici (feat. Nas) -> Veni Vedi Vici (title)
This album is already in the library!
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/__init__.py", line 953, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/__init__.py", line 943, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 874, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 851, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 316, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/util/pipeline.py", line 301, in run
out = self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/util/pipeline.py", line 160, in coro
task = func(*(args + (task,)))
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 1287, in user_query
resolve_duplicates(session, task)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/importer.py", line 1299, in resolve_duplicates
session.resolve_duplicate(task, found_duplicates)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 789, in resolve_duplicate
not task.is_album,
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/ui/commands.py", line 446, in summarize_items
total_filesize = sum([item.filesize for item in items])
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/dbcore/db.py", line 316, in __getattr__
return self[key]
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/dbcore/db.py", line 224, in __getitem__
return getters[key](self)
File "/usr/local/lib/python2.7/dist-packages/beets-1.3.11-py2.7.egg/beets/library.py", line 424, in <lambda>
getters['filesize'] = lambda i: os.path.getsize(syspath(i.path))
File "/usr/lib/python2.7/genericpath.py", line 49, in getsize
return os.stat(filename).st_size
OSError: [Errno 2] No such file or directory: '/home/simon/M\xc3\xbasica/Madonna/Rebel Heart (Super Deluxe Edition)/01 - Living for Love.mp3'
|
OSError
|
def _process_item(item, lib, copy=False, move=False, delete=False, tag=False, fmt=""):
"""Process Item `item` in `lib`."""
if copy:
item.move(basedir=copy, copy=True)
item.store()
if move:
item.move(basedir=move, copy=False)
item.store()
if delete:
item.remove(delete=True)
if tag:
try:
k, v = tag.split("=")
except:
raise UserError("%s: can't parse k=v tag: %s" % (PLUGIN, tag))
setattr(k, v)
item.store()
print_(format(item, fmt))
|
def _process_item(
item, lib, copy=False, move=False, delete=False, tag=False, format=""
):
"""Process Item `item` in `lib`."""
if copy:
item.move(basedir=copy, copy=True)
item.store()
if move:
item.move(basedir=move, copy=False)
item.store()
if delete:
item.remove(delete=True)
if tag:
try:
k, v = tag.split("=")
except:
raise UserError("%s: can't parse k=v tag: %s" % (PLUGIN, tag))
setattr(k, v)
item.store()
print_(format(item, format))
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def commands(self):
def _dup(lib, opts, args):
self.config.set_args(opts)
fmt = self.config["format"].get()
album = self.config["album"].get(bool)
full = self.config["full"].get(bool)
keys = self.config["keys"].get()
checksum = self.config["checksum"].get()
copy = self.config["copy"].get()
move = self.config["move"].get()
delete = self.config["delete"].get(bool)
tag = self.config["tag"].get()
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if self.config["path"]:
fmt = "$path"
# Default format string for count mode.
if self.config["count"] and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
if not isinstance(checksum, basestring):
raise UserError('duplicates: "checksum" option must be a command')
for i in items:
k, _ = self._checksum(i, checksum, self._log)
keys = [k]
for obj_id, obj_count, objs in _duplicates(
items, keys=keys, full=full, log=self._log
):
if obj_id: # Skip empty IDs.
for o in objs:
_process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count),
)
self._command.func = _dup
return [self._command]
|
def commands(self):
def _dup(lib, opts, args):
self.config.set_args(opts)
fmt = self.config["format"].get()
album = self.config["album"].get(bool)
full = self.config["full"].get(bool)
keys = self.config["keys"].get()
checksum = self.config["checksum"].get()
copy = self.config["copy"].get()
move = self.config["move"].get()
delete = self.config["delete"].get(bool)
tag = self.config["tag"].get()
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if self.config["path"]:
fmt = "$path"
# Default format string for count mode.
if self.config["count"] and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
if not isinstance(checksum, basestring):
raise UserError('duplicates: "checksum" option must be a command')
for i in items:
k, _ = self._checksum(i, checksum, self._log)
keys = [k]
for obj_id, obj_count, objs in _duplicates(
items, keys=keys, full=full, log=self._log
):
if obj_id: # Skip empty IDs.
for o in objs:
_process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
format=fmt.format(obj_count),
)
self._command.func = _dup
return [self._command]
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def _dup(lib, opts, args):
self.config.set_args(opts)
fmt = self.config["format"].get()
album = self.config["album"].get(bool)
full = self.config["full"].get(bool)
keys = self.config["keys"].get()
checksum = self.config["checksum"].get()
copy = self.config["copy"].get()
move = self.config["move"].get()
delete = self.config["delete"].get(bool)
tag = self.config["tag"].get()
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if self.config["path"]:
fmt = "$path"
# Default format string for count mode.
if self.config["count"] and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
if not isinstance(checksum, basestring):
raise UserError('duplicates: "checksum" option must be a command')
for i in items:
k, _ = self._checksum(i, checksum, self._log)
keys = [k]
for obj_id, obj_count, objs in _duplicates(
items, keys=keys, full=full, log=self._log
):
if obj_id: # Skip empty IDs.
for o in objs:
_process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
fmt=fmt.format(obj_count),
)
|
def _dup(lib, opts, args):
self.config.set_args(opts)
fmt = self.config["format"].get()
album = self.config["album"].get(bool)
full = self.config["full"].get(bool)
keys = self.config["keys"].get()
checksum = self.config["checksum"].get()
copy = self.config["copy"].get()
move = self.config["move"].get()
delete = self.config["delete"].get(bool)
tag = self.config["tag"].get()
if album:
keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
items = lib.items(decargs(args))
if self.config["path"]:
fmt = "$path"
# Default format string for count mode.
if self.config["count"] and not fmt:
if album:
fmt = "$albumartist - $album"
else:
fmt = "$albumartist - $album - $title"
fmt += ": {0}"
if checksum:
if not isinstance(checksum, basestring):
raise UserError('duplicates: "checksum" option must be a command')
for i in items:
k, _ = self._checksum(i, checksum, self._log)
keys = [k]
for obj_id, obj_count, objs in _duplicates(
items, keys=keys, full=full, log=self._log
):
if obj_id: # Skip empty IDs.
for o in objs:
_process_item(
o,
lib,
copy=copy,
move=move,
delete=delete,
tag=tag,
format=fmt.format(obj_count),
)
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def tmpl_time(s, fmt):
"""Format a time value using `strftime`."""
cur_fmt = beets.config["time_format"].get(unicode)
return time.strftime(fmt, time.strptime(s, cur_fmt))
|
def tmpl_time(s, format):
"""Format a time value using `strftime`."""
cur_fmt = beets.config["time_format"].get(unicode)
return time.strftime(format, time.strptime(s, cur_fmt))
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def get_format(fmt=None):
"""Return the command tempate and the extension from the config."""
if not fmt:
fmt = config["convert"]["format"].get(unicode).lower()
fmt = ALIASES.get(fmt, fmt)
try:
format_info = config["convert"]["formats"][fmt].get(dict)
command = format_info["command"]
extension = format_info["extension"]
except KeyError:
raise ui.UserError(
'convert: format {0} needs "command" and "extension" fields'.format(fmt)
)
except ConfigTypeError:
command = config["convert"]["formats"][fmt].get(bytes)
extension = fmt
# Convenience and backwards-compatibility shortcuts.
keys = config["convert"].keys()
if "command" in keys:
command = config["convert"]["command"].get(unicode)
elif "opts" in keys:
# Undocumented option for backwards compatibility with < 1.3.1.
command = "ffmpeg -i $source -y {0} $dest".format(
config["convert"]["opts"].get(unicode)
)
if "extension" in keys:
extension = config["convert"]["extension"].get(unicode)
return (command.encode("utf8"), extension.encode("utf8"))
|
def get_format(format=None):
"""Return the command tempate and the extension from the config."""
if not format:
format = config["convert"]["format"].get(unicode).lower()
format = ALIASES.get(format, format)
try:
format_info = config["convert"]["formats"][format].get(dict)
command = format_info["command"]
extension = format_info["extension"]
except KeyError:
raise ui.UserError(
'convert: format {0} needs "command" and "extension" fields'.format(format)
)
except ConfigTypeError:
command = config["convert"]["formats"][format].get(bytes)
extension = format
# Convenience and backwards-compatibility shortcuts.
keys = config["convert"].keys()
if "command" in keys:
command = config["convert"]["command"].get(unicode)
elif "opts" in keys:
# Undocumented option for backwards compatibility with < 1.3.1.
command = "ffmpeg -i $source -y {0} $dest".format(
config["convert"]["opts"].get(unicode)
)
if "extension" in keys:
extension = config["convert"]["extension"].get(unicode)
return (command.encode("utf8"), extension.encode("utf8"))
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def should_transcode(item, fmt):
"""Determine whether the item should be transcoded as part of
conversion (i.e., its bitrate is high or it has the wrong format).
"""
if config["convert"]["never_convert_lossy_files"] and not (
item.format.lower() in LOSSLESS_FORMATS
):
return False
maxbr = config["convert"]["max_bitrate"].get(int)
return fmt.lower() != item.format.lower() or item.bitrate >= 1000 * maxbr
|
def should_transcode(item, format):
"""Determine whether the item should be transcoded as part of
conversion (i.e., its bitrate is high or it has the wrong format).
"""
if config["convert"]["never_convert_lossy_files"] and not (
item.format.lower() in LOSSLESS_FORMATS
):
return False
maxbr = config["convert"]["max_bitrate"].get(int)
return format.lower() != item.format.lower() or item.bitrate >= 1000 * maxbr
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def convert_item(self, dest_dir, keep_new, path_formats, fmt, pretend=False):
command, ext = get_format(fmt)
item, original, converted = None, None, None
while True:
item = yield (item, original, converted)
dest = item.destination(basedir=dest_dir, path_formats=path_formats)
# When keeping the new file in the library, we first move the
# current (pristine) file to the destination. We'll then copy it
# back to its old path or transcode it to a new path.
if keep_new:
original = dest
converted = item.path
if should_transcode(item, fmt):
converted = replace_ext(converted, ext)
else:
original = item.path
if should_transcode(item, fmt):
dest = replace_ext(dest, ext)
converted = dest
# Ensure that only one thread tries to create directories at a
# time. (The existence check is not atomic with the directory
# creation inside this function.)
if not pretend:
with _fs_lock:
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
self._log.info(
"Skipping {0} (target file exists)", util.displayable_path(item.path)
)
continue
if keep_new:
if pretend:
self._log.info(
"mv {0} {1}",
util.displayable_path(item.path),
util.displayable_path(original),
)
else:
self._log.info("Moving to {0}", util.displayable_path(original))
util.move(item.path, original)
if should_transcode(item, fmt):
try:
self.encode(command, original, converted, pretend)
except subprocess.CalledProcessError:
continue
else:
if pretend:
self._log.info(
"cp {0} {1}",
util.displayable_path(original),
util.displayable_path(converted),
)
else:
# No transcoding necessary.
self._log.info("Copying {0}", util.displayable_path(item.path))
util.copy(original, converted)
if pretend:
continue
# Write tags from the database to the converted file.
item.try_write(path=converted)
if keep_new:
# If we're keeping the transcoded file, read it again (after
# writing) to get new bitrate, duration, etc.
item.path = converted
item.read()
item.store() # Store new path and audio data.
if self.config["embed"]:
album = item.get_album()
if album and album.artpath:
EmbedCoverArtPlugin().embed_item(
item, album.artpath, itempath=converted
)
if keep_new:
plugins.send("after_convert", item=item, dest=dest, keepnew=True)
else:
plugins.send("after_convert", item=item, dest=converted, keepnew=False)
|
def convert_item(self, dest_dir, keep_new, path_formats, format, pretend=False):
command, ext = get_format(format)
item, original, converted = None, None, None
while True:
item = yield (item, original, converted)
dest = item.destination(basedir=dest_dir, path_formats=path_formats)
# When keeping the new file in the library, we first move the
# current (pristine) file to the destination. We'll then copy it
# back to its old path or transcode it to a new path.
if keep_new:
original = dest
converted = item.path
if should_transcode(item, format):
converted = replace_ext(converted, ext)
else:
original = item.path
if should_transcode(item, format):
dest = replace_ext(dest, ext)
converted = dest
# Ensure that only one thread tries to create directories at a
# time. (The existence check is not atomic with the directory
# creation inside this function.)
if not pretend:
with _fs_lock:
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
self._log.info(
"Skipping {0} (target file exists)", util.displayable_path(item.path)
)
continue
if keep_new:
if pretend:
self._log.info(
"mv {0} {1}",
util.displayable_path(item.path),
util.displayable_path(original),
)
else:
self._log.info("Moving to {0}", util.displayable_path(original))
util.move(item.path, original)
if should_transcode(item, format):
try:
self.encode(command, original, converted, pretend)
except subprocess.CalledProcessError:
continue
else:
if pretend:
self._log.info(
"cp {0} {1}",
util.displayable_path(original),
util.displayable_path(converted),
)
else:
# No transcoding necessary.
self._log.info("Copying {0}", util.displayable_path(item.path))
util.copy(original, converted)
if pretend:
continue
# Write tags from the database to the converted file.
item.try_write(path=converted)
if keep_new:
# If we're keeping the transcoded file, read it again (after
# writing) to get new bitrate, duration, etc.
item.path = converted
item.read()
item.store() # Store new path and audio data.
if self.config["embed"]:
album = item.get_album()
if album and album.artpath:
EmbedCoverArtPlugin().embed_item(
item, album.artpath, itempath=converted
)
if keep_new:
plugins.send("after_convert", item=item, dest=dest, keepnew=True)
else:
plugins.send("after_convert", item=item, dest=converted, keepnew=False)
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def convert_on_import(self, lib, item):
"""Transcode a file automatically after it is imported into the
library.
"""
fmt = self.config["format"].get(unicode).lower()
if should_transcode(item, fmt):
command, ext = get_format()
fd, dest = tempfile.mkstemp("." + ext)
os.close(fd)
_temp_files.append(dest) # Delete the transcode later.
try:
self.encode(command, item.path, dest)
except subprocess.CalledProcessError:
return
item.path = dest
item.write()
item.read() # Load new audio information data.
item.store()
|
def convert_on_import(self, lib, item):
"""Transcode a file automatically after it is imported into the
library.
"""
format = self.config["format"].get(unicode).lower()
if should_transcode(item, format):
command, ext = get_format()
fd, dest = tempfile.mkstemp("." + ext)
os.close(fd)
_temp_files.append(dest) # Delete the transcode later.
try:
self.encode(command, item.path, dest)
except subprocess.CalledProcessError:
return
item.path = dest
item.write()
item.read() # Load new audio information data.
item.store()
|
https://github.com/beetbox/beets/issues/1300
|
C:\Users\100557855>beet duplicates
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_ma
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 232, in _dup
format=fmt.format(obj_count))
File "c:\users\100557855\src\beets\beetsplug\duplicates.py", line 48, in _proc
ess_item
print_(format(item, format))
TypeError: 'unicode' object is not callable
|
TypeError
|
def authenticate(self, c_key, c_secret):
# Get the link for the OAuth page.
auth_client = Client(USER_AGENT, c_key, c_secret)
_, _, url = auth_client.get_authorize_url()
beets.ui.print_("To authenticate with Discogs, visit:")
beets.ui.print_(url)
# Ask for the code and validate it.
code = beets.ui.input_("Enter the code:")
try:
token, secret = auth_client.get_access_token(code)
except DiscogsAPIError:
raise beets.ui.UserError("Discogs authorization failed")
except (ConnectionError, socket.error) as e:
self._log.debug("connection error: {0}", e)
raise beets.ui.UserError("communication with Discogs failed")
# Save the token for later use.
self._log.debug("Discogs token {0}, secret {1}", token, secret)
with open(self._tokenfile(), "w") as f:
json.dump({"token": token, "secret": secret}, f)
return token, secret
|
def authenticate(self, c_key, c_secret):
# Get the link for the OAuth page.
auth_client = Client(USER_AGENT, c_key, c_secret)
_, _, url = auth_client.get_authorize_url()
beets.ui.print_("To authenticate with Discogs, visit:")
beets.ui.print_(url)
# Ask for the code and validate it.
code = beets.ui.input_("Enter the code:")
try:
token, secret = auth_client.get_access_token(code)
except DiscogsAPIError:
raise beets.ui.UserError("Discogs authorization failed")
# Save the token for later use.
self._log.debug("Discogs token {0}, secret {1}", token, secret)
with open(self._tokenfile(), "w") as f:
json.dump({"token": token, "secret": secret}, f)
return token, secret
|
https://github.com/beetbox/beets/issues/1299
|
Apply, More candidates, Skip, Use as-is, as Tracks, Group albums,
Enter search, enter Id, aBort? a
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_m
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beets\ui\commands.py", line 870, in import
func
import_files(lib, paths, query)
File "c:\users\100557855\src\beets\beets\ui\commands.py", line 847, in import
files
session.run()
File "c:\users\100557855\src\beets\beets\importer.py", line 316, in run
pl.run_parallel(QUEUE_SIZE)
File "c:\users\100557855\src\beets\beets\util\pipeline.py", line 301, in run
out = self.coro.send(msg)
File "c:\users\100557855\src\beets\beets\util\pipeline.py", line 183, in coro
func(*(args + (task,)))
File "c:\users\100557855\src\beets\beets\importer.py", line 1227, in lookup_c
ndidates
task.lookup_candidates()
File "c:\users\100557855\src\beets\beets\importer.py", line 558, in lookup_ca
didates
autotag.tag_album(self.items)
File "c:\users\100557855\src\beets\beets\autotag\match.py", line 433, in tag_
lbum
search_album, va_likely)
File "c:\users\100557855\src\beets\beets\autotag\hooks.py", line 563, in albu
_candidates
out.extend(plugins.candidates(items, artist, album, va_likely))
File "c:\users\100557855\src\beets\beets\plugins.py", line 360, in candidates
out.extend(plugin.candidates(items, artist, album, va_likely))
File "c:\users\100557855\src\beets\beetsplug\discogs.py", line 121, in candid
tes
return self.get_albums(query)
File "c:\users\100557855\src\beets\beetsplug\discogs.py", line 173, in get_al
ums
return [self.get_album_info(release) for release in releases[:5]]
File "c:\users\100557855\src\beets\beetsplug\discogs.py", line 178, in get_al
um_info
artist, artist_id = self.get_artist([a.data for a in result.artists])
File "C:\Python27\lib\site-packages\discogs_client\models.py", line 96, in __
et__
return [wrapper_class(instance.client, d) for d in instance.fetch(self.name
[])]
File "C:\Python27\lib\site-packages\discogs_client\models.py", line 237, in f
tch
self.refresh()
File "C:\Python27\lib\site-packages\discogs_client\models.py", line 203, in r
fresh
data = self.client._get(self.data['resource_url'])
File "C:\Python27\lib\site-packages\discogs_client\client.py", line 110, in _
et
return self._request('GET', url)
File "C:\Python27\lib\site-packages\discogs_client\client.py", line 97, in _r
quest
content, status_code = self._fetcher.fetch(self, method, url, data=data, he
ders=headers)
File "C:\Python27\lib\site-packages\discogs_client\fetchers.py", line 73, in
etch
resp, content = self.oauth_client.request(url, method, headers=headers)
File "C:\Python27\lib\site-packages\oauth2\__init__.py", line 682, in request
connection_type=connection_type)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1593, in requ
st
(response, content) = self._request(conn, authority, uri, request_uri, meth
d, body, headers, redirections, cachekey)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1335, in _req
est
(response, content) = self._conn_request(conn, request_uri, method, body, h
aders)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1291, in _con
_request
response = conn.getresponse()
File "C:\Python27\lib\httplib.py", line 1074, in getresponse
response.begin()
File "C:\Python27\lib\httplib.py", line 415, in begin
version, status, reason = self._read_status()
File "C:\Python27\lib\httplib.py", line 371, in _read_status
line = self.fp.readline(_MAXLINE + 1)
File "C:\Python27\lib\socket.py", line 476, in readline
data = self._sock.recv(self._rbufsize)
socket.error: [Errno 10054] An existing connection was forcibly closed by the r
mote host
|
socket.error
|
def candidates(self, items, artist, album, va_likely):
"""Returns a list of AlbumInfo objects for discogs search results
matching an album and artist (if not various).
"""
if not self.discogs_client:
return
if va_likely:
query = album
else:
query = "%s %s" % (artist, album)
try:
return self.get_albums(query)
except DiscogsAPIError as e:
self._log.debug("API Error: {0} (query: {1})", e, query)
return []
except (ConnectionError, socket.error) as e:
self._log.debug("HTTP Connection Error: {0}", e)
return []
|
def candidates(self, items, artist, album, va_likely):
"""Returns a list of AlbumInfo objects for discogs search results
matching an album and artist (if not various).
"""
if not self.discogs_client:
return
if va_likely:
query = album
else:
query = "%s %s" % (artist, album)
try:
return self.get_albums(query)
except DiscogsAPIError as e:
self._log.debug("API Error: {0} (query: {1})", e, query)
return []
except ConnectionError as e:
self._log.debug("HTTP Connection Error: {0}", e)
return []
|
https://github.com/beetbox/beets/issues/1299
|
Apply, More candidates, Skip, Use as-is, as Tracks, Group albums,
Enter search, enter Id, aBort? a
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.11', 'console_scripts', 'beet')()
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 953, in main
_raw_main(args)
File "c:\users\100557855\src\beets\beets\ui\__init__.py", line 943, in _raw_m
in
subcommand.func(lib, suboptions, subargs)
File "c:\users\100557855\src\beets\beets\ui\commands.py", line 870, in import
func
import_files(lib, paths, query)
File "c:\users\100557855\src\beets\beets\ui\commands.py", line 847, in import
files
session.run()
File "c:\users\100557855\src\beets\beets\importer.py", line 316, in run
pl.run_parallel(QUEUE_SIZE)
File "c:\users\100557855\src\beets\beets\util\pipeline.py", line 301, in run
out = self.coro.send(msg)
File "c:\users\100557855\src\beets\beets\util\pipeline.py", line 183, in coro
func(*(args + (task,)))
File "c:\users\100557855\src\beets\beets\importer.py", line 1227, in lookup_c
ndidates
task.lookup_candidates()
File "c:\users\100557855\src\beets\beets\importer.py", line 558, in lookup_ca
didates
autotag.tag_album(self.items)
File "c:\users\100557855\src\beets\beets\autotag\match.py", line 433, in tag_
lbum
search_album, va_likely)
File "c:\users\100557855\src\beets\beets\autotag\hooks.py", line 563, in albu
_candidates
out.extend(plugins.candidates(items, artist, album, va_likely))
File "c:\users\100557855\src\beets\beets\plugins.py", line 360, in candidates
out.extend(plugin.candidates(items, artist, album, va_likely))
File "c:\users\100557855\src\beets\beetsplug\discogs.py", line 121, in candid
tes
return self.get_albums(query)
File "c:\users\100557855\src\beets\beetsplug\discogs.py", line 173, in get_al
ums
return [self.get_album_info(release) for release in releases[:5]]
File "c:\users\100557855\src\beets\beetsplug\discogs.py", line 178, in get_al
um_info
artist, artist_id = self.get_artist([a.data for a in result.artists])
File "C:\Python27\lib\site-packages\discogs_client\models.py", line 96, in __
et__
return [wrapper_class(instance.client, d) for d in instance.fetch(self.name
[])]
File "C:\Python27\lib\site-packages\discogs_client\models.py", line 237, in f
tch
self.refresh()
File "C:\Python27\lib\site-packages\discogs_client\models.py", line 203, in r
fresh
data = self.client._get(self.data['resource_url'])
File "C:\Python27\lib\site-packages\discogs_client\client.py", line 110, in _
et
return self._request('GET', url)
File "C:\Python27\lib\site-packages\discogs_client\client.py", line 97, in _r
quest
content, status_code = self._fetcher.fetch(self, method, url, data=data, he
ders=headers)
File "C:\Python27\lib\site-packages\discogs_client\fetchers.py", line 73, in
etch
resp, content = self.oauth_client.request(url, method, headers=headers)
File "C:\Python27\lib\site-packages\oauth2\__init__.py", line 682, in request
connection_type=connection_type)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1593, in requ
st
(response, content) = self._request(conn, authority, uri, request_uri, meth
d, body, headers, redirections, cachekey)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1335, in _req
est
(response, content) = self._conn_request(conn, request_uri, method, body, h
aders)
File "C:\Python27\lib\site-packages\httplib2\__init__.py", line 1291, in _con
_request
response = conn.getresponse()
File "C:\Python27\lib\httplib.py", line 1074, in getresponse
response.begin()
File "C:\Python27\lib\httplib.py", line 415, in begin
version, status, reason = self._read_status()
File "C:\Python27\lib\httplib.py", line 371, in _read_status
line = self.fp.readline(_MAXLINE + 1)
File "C:\Python27\lib\socket.py", line 476, in readline
data = self._sock.recv(self._rbufsize)
socket.error: [Errno 10054] An existing connection was forcibly closed by the r
mote host
|
socket.error
|
def commands(self):
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
try:
mf = mediafile.MediaFile(item.path, config["id3v23"].get(bool))
except IOError as exc:
self._log.error("scrubbing failed: {0}", exc)
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
scrubbing = False
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
scrub_cmd.parser.add_option(
"-W",
"--nowrite",
dest="write",
action="store_false",
default=True,
help="leave tags empty",
)
scrub_cmd.func = scrub_func
return [scrub_cmd]
|
def commands(self):
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
mf = mediafile.MediaFile(item.path, config["id3v23"].get(bool))
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
scrubbing = False
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
scrub_cmd.parser.add_option(
"-W",
"--nowrite",
dest="write",
action="store_false",
default=True,
help="leave tags empty",
)
scrub_cmd.func = scrub_func
return [scrub_cmd]
|
https://github.com/beetbox/beets/issues/1297
|
scrubbing: C:\Users\100557855\Desktop\Music\Above & Beyond\TriΓÇÉState\01 TriΓÇÉ
State.mp3
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.10', 'console_scripts', 'beet')()
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 945, in main
_raw_main(args)
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 935, in _raw_m
ain
subcommand.func(lib, suboptions, subargs)
File "C:\Python27\lib\site-packages\beetsplug\scrub.py", line 73, in scrub_fun
c
config['id3v23'].get(bool))
File "C:\Python27\lib\site-packages\beets\mediafile.py", line 1314, in __init_
_
self.mgfile = mutagen.File(path)
File "C:\Python27\lib\site-packages\mutagen\_file.py", line 221, in File
fileobj = open(filename, "rb")
IOError: [Errno 2] No such file or directory: 'C:\\Users\\100557855\\Desktop\\Mu
sic\\Above & Beyond\\Tri\xe2\x80\x90State\\01 Tri\xe2\x80\x90State.mp3'
|
IOError
|
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
try:
mf = mediafile.MediaFile(item.path, config["id3v23"].get(bool))
except IOError as exc:
self._log.error("scrubbing failed: {0}", exc)
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
scrubbing = False
|
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
mf = mediafile.MediaFile(item.path, config["id3v23"].get(bool))
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
scrubbing = False
|
https://github.com/beetbox/beets/issues/1297
|
scrubbing: C:\Users\100557855\Desktop\Music\Above & Beyond\TriΓÇÉState\01 TriΓÇÉ
State.mp3
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.10', 'console_scripts', 'beet')()
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 945, in main
_raw_main(args)
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 935, in _raw_m
ain
subcommand.func(lib, suboptions, subargs)
File "C:\Python27\lib\site-packages\beetsplug\scrub.py", line 73, in scrub_fun
c
config['id3v23'].get(bool))
File "C:\Python27\lib\site-packages\beets\mediafile.py", line 1314, in __init_
_
self.mgfile = mutagen.File(path)
File "C:\Python27\lib\site-packages\mutagen\_file.py", line 221, in File
fileobj = open(filename, "rb")
IOError: [Errno 2] No such file or directory: 'C:\\Users\\100557855\\Desktop\\Mu
sic\\Above & Beyond\\Tri\xe2\x80\x90State\\01 Tri\xe2\x80\x90State.mp3'
|
IOError
|
def commands(self):
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
try:
mf = mediafile.MediaFile(
util.syspath(item.path), config["id3v23"].get(bool)
)
except IOError as exc:
self._log.error("could not open file to scrub: {0}", exc)
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(util.syspath(item.path))
mf.art = art
mf.save()
scrubbing = False
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
scrub_cmd.parser.add_option(
"-W",
"--nowrite",
dest="write",
action="store_false",
default=True,
help="leave tags empty",
)
scrub_cmd.func = scrub_func
return [scrub_cmd]
|
def commands(self):
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
try:
mf = mediafile.MediaFile(item.path, config["id3v23"].get(bool))
except IOError as exc:
self._log.error("scrubbing failed: {0}", exc)
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
scrubbing = False
scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
scrub_cmd.parser.add_option(
"-W",
"--nowrite",
dest="write",
action="store_false",
default=True,
help="leave tags empty",
)
scrub_cmd.func = scrub_func
return [scrub_cmd]
|
https://github.com/beetbox/beets/issues/1297
|
scrubbing: C:\Users\100557855\Desktop\Music\Above & Beyond\TriΓÇÉState\01 TriΓÇÉ
State.mp3
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.10', 'console_scripts', 'beet')()
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 945, in main
_raw_main(args)
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 935, in _raw_m
ain
subcommand.func(lib, suboptions, subargs)
File "C:\Python27\lib\site-packages\beetsplug\scrub.py", line 73, in scrub_fun
c
config['id3v23'].get(bool))
File "C:\Python27\lib\site-packages\beets\mediafile.py", line 1314, in __init_
_
self.mgfile = mutagen.File(path)
File "C:\Python27\lib\site-packages\mutagen\_file.py", line 221, in File
fileobj = open(filename, "rb")
IOError: [Errno 2] No such file or directory: 'C:\\Users\\100557855\\Desktop\\Mu
sic\\Above & Beyond\\Tri\xe2\x80\x90State\\01 Tri\xe2\x80\x90State.mp3'
|
IOError
|
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
try:
mf = mediafile.MediaFile(
util.syspath(item.path), config["id3v23"].get(bool)
)
except IOError as exc:
self._log.error("could not open file to scrub: {0}", exc)
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(util.syspath(item.path))
mf.art = art
mf.save()
scrubbing = False
|
def scrub_func(lib, opts, args):
# This is a little bit hacky, but we set a global flag to
# avoid autoscrubbing when we're also explicitly scrubbing.
global scrubbing
scrubbing = True
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
self._log.info("scrubbing: {0}", util.displayable_path(item.path))
# Get album art if we need to restore it.
if opts.write:
try:
mf = mediafile.MediaFile(item.path, config["id3v23"].get(bool))
except IOError as exc:
self._log.error("scrubbing failed: {0}", exc)
art = mf.art
# Remove all tags.
self._scrub(item.path)
# Restore tags, if enabled.
if opts.write:
self._log.debug("writing new tags after scrub")
item.try_write()
if art:
self._log.info("restoring art")
mf = mediafile.MediaFile(item.path)
mf.art = art
mf.save()
scrubbing = False
|
https://github.com/beetbox/beets/issues/1297
|
scrubbing: C:\Users\100557855\Desktop\Music\Above & Beyond\TriΓÇÉState\01 TriΓÇÉ
State.mp3
Traceback (most recent call last):
File "C:\Python27\Scripts\beet-script.py", line 9, in <module>
load_entry_point('beets==1.3.10', 'console_scripts', 'beet')()
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 945, in main
_raw_main(args)
File "C:\Python27\lib\site-packages\beets\ui\__init__.py", line 935, in _raw_m
ain
subcommand.func(lib, suboptions, subargs)
File "C:\Python27\lib\site-packages\beetsplug\scrub.py", line 73, in scrub_fun
c
config['id3v23'].get(bool))
File "C:\Python27\lib\site-packages\beets\mediafile.py", line 1314, in __init_
_
self.mgfile = mutagen.File(path)
File "C:\Python27\lib\site-packages\mutagen\_file.py", line 221, in File
fileobj = open(filename, "rb")
IOError: [Errno 2] No such file or directory: 'C:\\Users\\100557855\\Desktop\\Mu
sic\\Above & Beyond\\Tri\xe2\x80\x90State\\01 Tri\xe2\x80\x90State.mp3'
|
IOError
|
def parse_query_string(s, model_cls):
"""Given a beets query string, return the `Query` and `Sort` they
represent.
The string is split into components using shell-like syntax.
"""
# A bug in Python < 2.7.3 prevents correct shlex splitting of
# Unicode strings.
# http://bugs.python.org/issue6988
if isinstance(s, unicode):
s = s.encode("utf8")
try:
parts = [p.decode("utf8") for p in shlex.split(s)]
except ValueError as exc:
raise ValueError("Cannot parse {0!r} (error was: {1})".format(s, exc))
return parse_query_parts(parts, model_cls)
|
def parse_query_string(s, model_cls):
"""Given a beets query string, return the `Query` and `Sort` they
represent.
The string is split into components using shell-like syntax.
"""
# A bug in Python < 2.7.3 prevents correct shlex splitting of
# Unicode strings.
# http://bugs.python.org/issue6988
if isinstance(s, unicode):
s = s.encode("utf8")
parts = [p.decode("utf8") for p in shlex.split(s)]
return parse_query_parts(parts, model_cls)
|
https://github.com/beetbox/beets/issues/1290
|
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.10', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 945, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 935, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 873, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 845, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 305, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 345, in run
self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 158, in coro
task = func(*(args + (task,)))
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 1291, in manipulate_files
session=session,
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 635, in manipulate_files
item.move(copy, link)
File "/usr/local/lib/python2.7/dist-packages/beets/library.py", line 611, in move
dest = self.destination(basedir=basedir)
File "/usr/local/lib/python2.7/dist-packages/beets/library.py", line 653, in destination
query, _ = parse_query_string(query, type(self))
File "/usr/local/lib/python2.7/dist-packages/beets/library.py", line 1016, in parse_query_string
parts = [p.decode('utf8') for p in shlex.split(s)]
File "/usr/lib/python2.7/shlex.py", line 279, in split
return list(lex)
File "/usr/lib/python2.7/shlex.py", line 269, in next
token = self.get_token()
File "/usr/lib/python2.7/shlex.py", line 96, in get_token
raw = self.read_token()
File "/usr/lib/python2.7/shlex.py", line 172, in read_token
raise ValueError, "No closing quotation"
ValueError: No closing quotation
|
ValueError
|
def is_lyrics(text, artist=None):
"""Determine whether the text seems to be valid lyrics."""
if not text:
return
badTriggersOcc = []
nbLines = text.count("\n")
if nbLines <= 1:
log.debug("Ignoring too short lyrics '{0}'".format(text))
return 0
elif nbLines < 5:
badTriggersOcc.append("too_short")
else:
# Lyrics look legit, remove credits to avoid being penalized further
# down
text = remove_credits(text)
badTriggers = ["lyrics", "copyright", "property", "links"]
if artist:
badTriggersOcc += [artist]
for item in badTriggers:
badTriggersOcc += [item] * len(re.findall(r"\W%s\W" % item, text, re.I))
if badTriggersOcc:
log.debug("Bad triggers detected: {0}".format(badTriggersOcc))
return len(badTriggersOcc) < 2
|
def is_lyrics(text, artist=None):
"""Determine whether the text seems to be valid lyrics."""
if not text:
return
badTriggersOcc = []
nbLines = text.count("\n")
if nbLines <= 1:
log.debug("Ignoring too short lyrics '{0}'".format(text.decode("utf8")))
return 0
elif nbLines < 5:
badTriggersOcc.append("too_short")
else:
# Lyrics look legit, remove credits to avoid being penalized further
# down
text = remove_credits(text)
badTriggers = ["lyrics", "copyright", "property", "links"]
if artist:
badTriggersOcc += [artist]
for item in badTriggers:
badTriggersOcc += [item] * len(re.findall(r"\W%s\W" % item, text, re.I))
if badTriggersOcc:
log.debug("Bad triggers detected: {0}".format(badTriggersOcc))
return len(badTriggersOcc) < 2
|
https://github.com/beetbox/beets/issues/1135
|
fetching Elephanz Stereo
Traceback (most recent call last):
File "/Users/flap/bin/beet", line 20, in <module>
beets.ui.main()
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 937, in main
_raw_main(args)
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 927, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 872, in import_func
import_files(lib, paths, query)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 844, in import_files
session.run()
File "/Users/flap/Dev/beets/beets/importer.py", line 300, in run
pl.run_parallel(QUEUE_SIZE)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/Users/flap/Dev/beets/beets/importer.py", line 1263, in plugin_stage
func(session, task)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 457, in imported
False, False)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 473, in fetch_item_lyrics
lyrics = [self.get_lyrics(artist, title) for title in titles]
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 503, in get_lyrics
lyrics = backend(artist, title)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 405, in fetch_google
if is_lyrics(lyrics, artist):
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 302, in is_lyrics
text.decode('utf8')))
File "/usr/local/Cellar/python/2.7.8_1/Frameworks/Python.framework/Versions/2.7/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeEncodeError: 'ascii' codec can't encode character u'\xe9' in position 61: ordinal not in range(128)
|
UnicodeEncodeError
|
def scrape_lyrics_from_html(html):
"""Scrape lyrics from a URL. If no lyrics can be found, return None
instead.
"""
from bs4 import SoupStrainer, BeautifulSoup
if not html:
return None
def is_text_notcode(text):
length = len(text)
return (
length > 20
and text.count(" ") > length / 25
and (text.find("{") == -1 or text.find(";") == -1)
)
html = _scrape_strip_cruft(html)
html = _scrape_merge_paragraphs(html)
# extract all long text blocks that are not code
try:
soup = BeautifulSoup(
html, "html.parser", parse_only=SoupStrainer(text=is_text_notcode)
)
except HTMLParseError:
return None
soup = sorted(soup.stripped_strings, key=len)[-1]
if isinstance(soup, str):
soup = soup.decode("utf8", "ignore")
return soup
|
def scrape_lyrics_from_html(html):
"""Scrape lyrics from a URL. If no lyrics can be found, return None
instead.
"""
from bs4 import SoupStrainer, BeautifulSoup
if not html:
return None
def is_text_notcode(text):
length = len(text)
return (
length > 20
and text.count(" ") > length / 25
and (text.find("{") == -1 or text.find(";") == -1)
)
html = _scrape_strip_cruft(html)
html = _scrape_merge_paragraphs(html)
# extract all long text blocks that are not code
try:
soup = BeautifulSoup(
html, "html.parser", parse_only=SoupStrainer(text=is_text_notcode)
)
except HTMLParseError:
return None
soup = sorted(soup.stripped_strings, key=len)[-1]
return soup
|
https://github.com/beetbox/beets/issues/1135
|
fetching Elephanz Stereo
Traceback (most recent call last):
File "/Users/flap/bin/beet", line 20, in <module>
beets.ui.main()
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 937, in main
_raw_main(args)
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 927, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 872, in import_func
import_files(lib, paths, query)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 844, in import_files
session.run()
File "/Users/flap/Dev/beets/beets/importer.py", line 300, in run
pl.run_parallel(QUEUE_SIZE)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/Users/flap/Dev/beets/beets/importer.py", line 1263, in plugin_stage
func(session, task)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 457, in imported
False, False)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 473, in fetch_item_lyrics
lyrics = [self.get_lyrics(artist, title) for title in titles]
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 503, in get_lyrics
lyrics = backend(artist, title)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 405, in fetch_google
if is_lyrics(lyrics, artist):
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 302, in is_lyrics
text.decode('utf8')))
File "/usr/local/Cellar/python/2.7.8_1/Frameworks/Python.framework/Versions/2.7/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeEncodeError: 'ascii' codec can't encode character u'\xe9' in position 61: ordinal not in range(128)
|
UnicodeEncodeError
|
def get_lyrics(self, artist, title):
"""Fetch lyrics, trying each source in turn. Return a string or
None if no lyrics were found.
"""
for backend in self.backends:
lyrics = backend(artist, title)
if lyrics:
log.debug("got lyrics from backend: {0}".format(backend.__name__))
return lyrics.strip()
|
def get_lyrics(self, artist, title):
"""Fetch lyrics, trying each source in turn. Return a string or
None if no lyrics were found.
"""
for backend in self.backends:
lyrics = backend(artist, title)
if lyrics:
if isinstance(lyrics, str):
lyrics = lyrics.decode("utf8", "ignore")
log.debug("got lyrics from backend: {0}".format(backend.__name__))
return lyrics.strip()
|
https://github.com/beetbox/beets/issues/1135
|
fetching Elephanz Stereo
Traceback (most recent call last):
File "/Users/flap/bin/beet", line 20, in <module>
beets.ui.main()
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 937, in main
_raw_main(args)
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 927, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 872, in import_func
import_files(lib, paths, query)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 844, in import_files
session.run()
File "/Users/flap/Dev/beets/beets/importer.py", line 300, in run
pl.run_parallel(QUEUE_SIZE)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/Users/flap/Dev/beets/beets/importer.py", line 1263, in plugin_stage
func(session, task)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 457, in imported
False, False)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 473, in fetch_item_lyrics
lyrics = [self.get_lyrics(artist, title) for title in titles]
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 503, in get_lyrics
lyrics = backend(artist, title)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 405, in fetch_google
if is_lyrics(lyrics, artist):
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 302, in is_lyrics
text.decode('utf8')))
File "/usr/local/Cellar/python/2.7.8_1/Frameworks/Python.framework/Versions/2.7/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeEncodeError: 'ascii' codec can't encode character u'\xe9' in position 61: ordinal not in range(128)
|
UnicodeEncodeError
|
def fetch_url(url):
"""Retrieve the content at a given URL, or return None if the source
is unreachable.
"""
r = requests.get(url)
if r.status_code == requests.codes.ok:
return r.text
else:
log.debug("failed to fetch: {0} ({1})".format(url, r.status_code))
return None
|
def fetch_url(url):
"""Retrieve the content at a given URL, or return None if the source
is unreachable.
"""
try:
return urllib.urlopen(url).read()
except IOError as exc:
log.debug("failed to fetch: {0} ({1})".format(url, unicode(exc)))
return None
|
https://github.com/beetbox/beets/issues/1135
|
fetching Elephanz Stereo
Traceback (most recent call last):
File "/Users/flap/bin/beet", line 20, in <module>
beets.ui.main()
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 937, in main
_raw_main(args)
File "/Users/flap/Dev/beets/beets/ui/__init__.py", line 927, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 872, in import_func
import_files(lib, paths, query)
File "/Users/flap/Dev/beets/beets/ui/commands.py", line 844, in import_files
session.run()
File "/Users/flap/Dev/beets/beets/importer.py", line 300, in run
pl.run_parallel(QUEUE_SIZE)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/Users/flap/Dev/beets/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/Users/flap/Dev/beets/beets/importer.py", line 1263, in plugin_stage
func(session, task)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 457, in imported
False, False)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 473, in fetch_item_lyrics
lyrics = [self.get_lyrics(artist, title) for title in titles]
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 503, in get_lyrics
lyrics = backend(artist, title)
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 405, in fetch_google
if is_lyrics(lyrics, artist):
File "/Users/flap/Dev/beets/beetsplug/lyrics.py", line 302, in is_lyrics
text.decode('utf8')))
File "/usr/local/Cellar/python/2.7.8_1/Frameworks/Python.framework/Versions/2.7/lib/python2.7/encodings/utf_8.py", line 16, in decode
return codecs.utf_8_decode(input, errors, True)
UnicodeEncodeError: 'ascii' codec can't encode character u'\xe9' in position 61: ordinal not in range(128)
|
UnicodeEncodeError
|
def write_item_mtime(item, mtime):
"""Write the given mtime to an item's `mtime` field and to the mtime of the
item's file.
"""
if mtime is None:
log.warn(
"No mtime to be preserved for item '{0}'".format(
util.displayable_path(item.path)
)
)
return
# The file's mtime on disk must be in sync with the item's mtime
write_file_mtime(util.syspath(item.path), mtime)
item.mtime = mtime
|
def write_item_mtime(item, mtime):
"""Write the given mtime to an item's `mtime` field and to the mtime of the
item's file.
"""
if mtime is None:
log.warn(
"No mtime to be preserved for item {0}".format(
util.displayable_path(item.path)
)
)
return
# The file's mtime on disk must be in sync with the item's mtime
write_file_mtime(util.syspath(item.path), mtime)
item.mtime = mtime
|
https://github.com/beetbox/beets/issues/911
|
$ beet import -L
/beets/src/media/library/Younger Brother (2011) - Vaccine (9 items)
Tagging:
Younger Brother - Vaccine
URL:
http://musicbrainz.org/release/7da3a7d8-0240-4ebd-bab6-44a1a72504fe
(Similarity: 100.0%) (CD, 2011, US, SCI Fidelity Records)
Traceback (most recent call last):
File "/beets/bin/beet", line 20, in <module>
beets.ui.main()
File "/beets/src/beets/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/beets/src/beets/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/beets/src/beets/beets/ui/commands.py", line 884, in import_func
import_files(lib, paths, query)
File "/beets/src/beets/beets/ui/commands.py", line 856, in import_files
session.run()
File "/beets/src/beets/beets/importer.py", line 293, in run
pl.run_sequential()
File "/beets/src/beets/beets/util/pipeline.py", line 376, in run_sequential
list(self.pull())
File "/beets/src/beets/beets/util/pipeline.py", line 453, in pull
out = coro.send(msg)
File "/beets/src/beets/beets/util/pipeline.py", line 158, in coro
task = func(*(args + (task,)))
File "/beets/src/beets/beets/importer.py", line 1138, in manipulate_files
task.finalize(session)
File "/beets/src/beets/beets/importer.py", line 470, in finalize
self._emit_imported(session.lib)
File "/beets/src/beets/beets/importer.py", line 502, in _emit_imported
plugins.send('album_imported', lib=lib, album=self.album)
File "/beets/src/beets/beets/plugins.py", line 379, in send
handler(**args)
File "/beets/src/beets/beetsplug/importadded.py", line 77, in update_album_times
mtime = item_mtime[item.path]
KeyError: '/beets/src/media/library/Younger Brother (2011) - Vaccine/Younger Brother - Vaccine - 01 - Crystalline.flac'
|
KeyError
|
def record_import_mtime(item, source, destination):
"""Record the file mtime of an item's path before its import."""
mtime = os.stat(util.syspath(source)).st_mtime
item_mtime[destination] = mtime
log.debug(
"Recorded mtime {0} for item '{1}' imported from '{2}'".format(
mtime, util.displayable_path(destination), util.displayable_path(source)
)
)
|
def record_import_mtime(item, source, destination):
"""Record the file mtime of an item's path before import."""
if source == destination:
# Re-import of an existing library item?
return
mtime = os.stat(util.syspath(source)).st_mtime
item_mtime[destination] = mtime
log.debug(
"Recorded mtime {0} for item '{1}' imported from '{2}'".format(
mtime, util.displayable_path(destination), util.displayable_path(source)
)
)
|
https://github.com/beetbox/beets/issues/911
|
$ beet import -L
/beets/src/media/library/Younger Brother (2011) - Vaccine (9 items)
Tagging:
Younger Brother - Vaccine
URL:
http://musicbrainz.org/release/7da3a7d8-0240-4ebd-bab6-44a1a72504fe
(Similarity: 100.0%) (CD, 2011, US, SCI Fidelity Records)
Traceback (most recent call last):
File "/beets/bin/beet", line 20, in <module>
beets.ui.main()
File "/beets/src/beets/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/beets/src/beets/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/beets/src/beets/beets/ui/commands.py", line 884, in import_func
import_files(lib, paths, query)
File "/beets/src/beets/beets/ui/commands.py", line 856, in import_files
session.run()
File "/beets/src/beets/beets/importer.py", line 293, in run
pl.run_sequential()
File "/beets/src/beets/beets/util/pipeline.py", line 376, in run_sequential
list(self.pull())
File "/beets/src/beets/beets/util/pipeline.py", line 453, in pull
out = coro.send(msg)
File "/beets/src/beets/beets/util/pipeline.py", line 158, in coro
task = func(*(args + (task,)))
File "/beets/src/beets/beets/importer.py", line 1138, in manipulate_files
task.finalize(session)
File "/beets/src/beets/beets/importer.py", line 470, in finalize
self._emit_imported(session.lib)
File "/beets/src/beets/beets/importer.py", line 502, in _emit_imported
plugins.send('album_imported', lib=lib, album=self.album)
File "/beets/src/beets/beets/plugins.py", line 379, in send
handler(**args)
File "/beets/src/beets/beetsplug/importadded.py", line 77, in update_album_times
mtime = item_mtime[item.path]
KeyError: '/beets/src/media/library/Younger Brother (2011) - Vaccine/Younger Brother - Vaccine - 01 - Crystalline.flac'
|
KeyError
|
def update_album_times(lib, album):
if reimported_album(album):
log.debug(
"Album '{0}' is reimported, skipping import of added dates"
" for the album and its items.".format(util.displayable_path(album.path))
)
return
album_mtimes = []
for item in album.items():
mtime = item_mtime.pop(item.path, None)
if mtime:
album_mtimes.append(mtime)
if config["importadded"]["preserve_mtimes"].get(bool):
write_item_mtime(item, mtime)
item.store()
album.added = min(album_mtimes)
log.debug(
"Import of album '{0}', selected album.added={1} from item file mtimes.".format(
album.album, album.added
)
)
album.store()
|
def update_album_times(lib, album):
album_mtimes = []
for item in album.items():
mtime = item_mtime[item.path]
if mtime is not None:
album_mtimes.append(mtime)
if config["importadded"]["preserve_mtimes"].get(bool):
write_item_mtime(item, mtime)
item.store()
del item_mtime[item.path]
album.added = min(album_mtimes)
album.store()
|
https://github.com/beetbox/beets/issues/911
|
$ beet import -L
/beets/src/media/library/Younger Brother (2011) - Vaccine (9 items)
Tagging:
Younger Brother - Vaccine
URL:
http://musicbrainz.org/release/7da3a7d8-0240-4ebd-bab6-44a1a72504fe
(Similarity: 100.0%) (CD, 2011, US, SCI Fidelity Records)
Traceback (most recent call last):
File "/beets/bin/beet", line 20, in <module>
beets.ui.main()
File "/beets/src/beets/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/beets/src/beets/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/beets/src/beets/beets/ui/commands.py", line 884, in import_func
import_files(lib, paths, query)
File "/beets/src/beets/beets/ui/commands.py", line 856, in import_files
session.run()
File "/beets/src/beets/beets/importer.py", line 293, in run
pl.run_sequential()
File "/beets/src/beets/beets/util/pipeline.py", line 376, in run_sequential
list(self.pull())
File "/beets/src/beets/beets/util/pipeline.py", line 453, in pull
out = coro.send(msg)
File "/beets/src/beets/beets/util/pipeline.py", line 158, in coro
task = func(*(args + (task,)))
File "/beets/src/beets/beets/importer.py", line 1138, in manipulate_files
task.finalize(session)
File "/beets/src/beets/beets/importer.py", line 470, in finalize
self._emit_imported(session.lib)
File "/beets/src/beets/beets/importer.py", line 502, in _emit_imported
plugins.send('album_imported', lib=lib, album=self.album)
File "/beets/src/beets/beets/plugins.py", line 379, in send
handler(**args)
File "/beets/src/beets/beetsplug/importadded.py", line 77, in update_album_times
mtime = item_mtime[item.path]
KeyError: '/beets/src/media/library/Younger Brother (2011) - Vaccine/Younger Brother - Vaccine - 01 - Crystalline.flac'
|
KeyError
|
def update_item_times(lib, item):
if reimported_item(item):
log.debug(
"Item '{0}' is reimported, skipping import of added date.".format(
util.displayable_path(item.path)
)
)
return
mtime = item_mtime.pop(item.path, None)
if mtime:
item.added = mtime
if config["importadded"]["preserve_mtimes"].get(bool):
write_item_mtime(item, mtime)
log.debug(
"Import of item '{0}', selected item.added={1}".format(
util.displayable_path(item.path), item.added
)
)
item.store()
|
def update_item_times(lib, item):
mtime = item_mtime[item.path]
if mtime is not None:
item.added = mtime
if config["importadded"]["preserve_mtimes"].get(bool):
write_item_mtime(item, mtime)
item.store()
del item_mtime[item.path]
|
https://github.com/beetbox/beets/issues/911
|
$ beet import -L
/beets/src/media/library/Younger Brother (2011) - Vaccine (9 items)
Tagging:
Younger Brother - Vaccine
URL:
http://musicbrainz.org/release/7da3a7d8-0240-4ebd-bab6-44a1a72504fe
(Similarity: 100.0%) (CD, 2011, US, SCI Fidelity Records)
Traceback (most recent call last):
File "/beets/bin/beet", line 20, in <module>
beets.ui.main()
File "/beets/src/beets/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/beets/src/beets/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/beets/src/beets/beets/ui/commands.py", line 884, in import_func
import_files(lib, paths, query)
File "/beets/src/beets/beets/ui/commands.py", line 856, in import_files
session.run()
File "/beets/src/beets/beets/importer.py", line 293, in run
pl.run_sequential()
File "/beets/src/beets/beets/util/pipeline.py", line 376, in run_sequential
list(self.pull())
File "/beets/src/beets/beets/util/pipeline.py", line 453, in pull
out = coro.send(msg)
File "/beets/src/beets/beets/util/pipeline.py", line 158, in coro
task = func(*(args + (task,)))
File "/beets/src/beets/beets/importer.py", line 1138, in manipulate_files
task.finalize(session)
File "/beets/src/beets/beets/importer.py", line 470, in finalize
self._emit_imported(session.lib)
File "/beets/src/beets/beets/importer.py", line 502, in _emit_imported
plugins.send('album_imported', lib=lib, album=self.album)
File "/beets/src/beets/beets/plugins.py", line 379, in send
handler(**args)
File "/beets/src/beets/beetsplug/importadded.py", line 77, in update_album_times
mtime = item_mtime[item.path]
KeyError: '/beets/src/media/library/Younger Brother (2011) - Vaccine/Younger Brother - Vaccine - 01 - Crystalline.flac'
|
KeyError
|
def convert(x):
if isinstance(x, unicode):
return x
elif isinstance(x, BASESTRING):
return x.decode("utf8", "ignore")
else:
self.fail("must be a list of strings", view, True)
|
def convert(self, value, view):
if isinstance(value, bytes):
value = value.decode("utf8", "ignore")
if isinstance(value, STRING):
if self.split:
return value.split()
else:
return [value]
else:
try:
value = list(value)
except TypeError:
self.fail("must be a whitespace-separated string or a list", view, True)
if all(isinstance(x, BASESTRING) for x in value):
return value
else:
self.fail("must be a list of strings", view, True)
|
https://github.com/beetbox/beets/issues/887
|
sombra/somasis:~/MusicIncoming/ λ beet -v imp Blank\ Banshee\ -\ Blank\ Banshee\ *
user configuration: /home/somasis/.config/beets/config.yaml
Sending event: pluginload
Sending event: library_opened
data directory: /home/somasis/.config/beets
library database: /home/somasis/Music/library.blb
library directory: /home/somasis/Music
error reading Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg
Sending event: import_task_start
error reading Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg
error reading Blank Banshee - Blank Banshee 1 - �.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - �.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - �.jpg
error reading cover.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/cover.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/cover.jpg
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 01 B- Start Up.flac'
chroma: matched recordings [u'0d6b4c02-4252-40b8-a96b-ee92a7541782']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 02 Wavestep.flac'
chroma: matched recordings [u'e78f39e0-ec1e-4095-be6e-3c8ccf175d61']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 03 Bathsalts.flac'
chroma: matched recordings [u'1cd0bb95-49e3-4cea-960e-b821ba124fbe']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 04 Ammonia Clouds.flac'
chroma: matched recordings [u'32b1f0d0-ac89-45b9-af8b-2444558a4a05']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 05 Venus Death Trap.flac'
chroma: matched recordings [u'b9ae6039-6824-4fa9-82d2-22e391b0cb77']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 06 HYP\xe2\x98\xb0R OBJ\xe2\x98\xb0CT.flac'
chroma: matched recordings [u'cc9c4295-22d2-4f0f-a322-fd1225bf4c0e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 07 Photosynthesis.flac'
chroma: matched recordings [u'6f95814c-9325-4637-a183-2ce16278c62e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 08 D \xe2\x98\xb0 \xe2\x98\xb0 P $ P \xce\x9b C \xe2\x98\xb0.flac'
chroma: matched recordings [u'8f019dc2-1bfd-424d-aa25-975efc2d00f4']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 09 Dreamcast.flac'
chroma: matched recordings [u'aedd2a55-75cd-4bb2-acaf-968faae74e4e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 10 Cyber Zodiac.flac'
chroma: matched recordings [u'814922c4-1706-45b9-99ab-33af5bca27eb']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 11 Teen Pregnancy.flac'
chroma: matched recordings [u'53bd825f-bce0-4bc2-b139-c32b791f61b2']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 12 Purity Boys.flac'
chroma: matched recordings [u'01553d3f-4acb-4d38-bd64-4bd20ea56380']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 13 Visualization.flac'
chroma: matched recordings [u'f54e6b16-f91c-4a53-bf67-28a7e3bca871']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 14 World Vision.flac'
chroma: matched recordings [u'512ec8a6-b9da-4e16-a8de-2077eb5194dd']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 15 B- Shut down-Depression.flac'
chroma: matched recordings [u'a1085c33-6b6b-463e-ae6a-c3203969f0d3']
Looking up: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0
Tagging Blank Banshee - Blank Banshee 0
No album IDs found.
Search terms: Blank Banshee - Blank Banshee 0
Album might be VA: False
acoustid album candidates: 1
Discogs API Error: 503 Service Unavailable (query: Blank Banshee Blank Banshee 0
Evaluating 6 candidates.
Candidate: Blank Banshee - Blank Banshee 0
Success. Distance: 0.000000
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.714291
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.648056
Candidate: Screaming Banshee Aircrew - When All Is Said and Done
Success. Distance: 0.818253
Candidate: Gerda Blank - gerda:BLANK
Success. Distance: 0.872529
Candidate: Blank Banshee - Blank Banshee 0
Duplicate.
Sending event: import_task_start
/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0 (15 items)
Tagging:
Blank Banshee - Blank Banshee 0
URL:
http://musicbrainz.org/release/9509a46c-b2d9-4ac5-b4a9-f219a18be3b4
(Similarity: 100.0%) (Digital Media, 2012, CA)
* D ☰ ☰ P $ P Λ C ☰ -> D ☰ ☰ P $ P Λ C ☰
Sending event: import_task_choice
This album is already in the library!
[S]kip new, Keep both, Remove old? chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 01 B- Infinite Login.flac'
chroma: matched recordings [u'7afcf15d-9b85-48d2-ad44-4d60307d91ea']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 02 Eco Zones.flac'
chroma: matched recordings [u'b2332635-4963-471e-bb33-40d3be821d0e', u'd4f6d63b-8ef0-4db3-a904-ec0a561f35c0']
rchroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 03 METAMATERIAL.flac'
chroma: matched recordings [u'b2abaf84-f060-4fae-94ab-1230a25987f8', u'c3374faa-89fc-4a18-a8f9-3c242e78fb2a']
Sending event: import_task_apply
15 of 15 items replaced
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
lyrics not found: Blank Banshee - Teen Pregnancy
lyrics not found: Blank Banshee - D ☰ ☰ P $ P Λ C ☰
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 04 Anxiety Online !.flac'
chroma: matched recordings [u'23fcbedc-c9ea-4465-9221-7018ee039241']
lyrics not found: Blank Banshee - Photosynthesis
lyrics not found: Blank Banshee - Purity Boys
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 05 Java Clouds-\xd9\x85\xd8\xac\xd8\xb3\xd9\x85\xd9\x87 \xd8\xb3\xd8\xa7\xd8\xb2\xdb\x8c \xd9\x85\xd9\x87\xd8\xaa\xd8\xa7\xd8\xa8.flac'
chroma: matched recordings [u'0ee06ce3-3fea-4c94-bae4-81acbbb5abf2', u'b683574d-2abe-4473-a624-a7d9ec8c867c']
lyrics not found: Blank Banshee - Dreamcast
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 06 LSD Polyphony.flac'
chroma: matched recordings [u'40277e03-7e07-4b30-970b-0979a45c7314', u'4d80187c-6660-4330-a179-a61f1cafad13']
lyrics not found: Blank Banshee - Visualization
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 07 Conflict Minerals.flac'
chroma: matched recordings [u'5dca7ab9-8374-4542-b080-563e9a84a71b', u'acd0e9ef-0a65-4de9-be50-4210e3b12aa0']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 08 Metal Rain.flac'
chroma: matched recordings [u'da81c6b9-73b3-4428-86ce-e0c958fd8388', u'e6553f3a-dd38-407a-8350-72f75bcc3239']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 09 Big Gulp.flac'
chroma: matched recordings [u'48bb5157-f145-4a26-91f2-1bbfe3843ad4', u'5bad5e8b-a738-45c0-9702-fa580716d85b']
lyrics not found: Blank Banshee - B:/ Start Up
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 10 Doldrum Corp..flac'
chroma: matched recordings [u'40783287-5e49-4986-8c2e-98fc76bc4575', u'd48b5188-8830-4f34-ab96-99bec735d592']
lyrics not found: Blank Banshee - Bathsalts
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 11 Cyber Slums.flac'
chroma: matched recordings [u'252b73ea-fa04-47d0-b030-ff074240db78', u'4fab878d-9656-4a3f-b8d1-195d374eabf2']
lyrics not found: Blank Banshee - Cyber Zodiac
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 12 Realization.flac'
chroma: matched recordings [u'79f44bff-d5be-46f0-89a5-e5c56efa3dda']
lyrics not found: Blank Banshee - Venus Death Trap
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 13 SOLAR PLEXUS.flac'
chroma: matched recordings [u'456e401f-e3cd-44c6-b7e8-fee3dce95785']
lyrics not found: Blank Banshee - World Vision
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 14 Paradise Disc -\xe8\x8a\xb8\xe8\x83\xbd\xe5\xb1\xb1\xe5\x9f\x8e\xe7\xb5\x84.flac'
chroma: matched recordings [u'45b77e22-08f8-4e21-bff7-60c32c445f66']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 15 B- Hidden-Reality.flac'
chroma: matched recordings [u'0b2b6785-e9b9-494a-a3fd-7c7c332af4e1']
Looking up: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1
Tagging Blank Banshee - Blank Banshee 1
No album IDs found.
Search terms: Blank Banshee - Blank Banshee 1
Album might be VA: False
lyrics not found: Blank Banshee - Ammonia Clouds
lyrics not found: Blank Banshee - HYP☰R OBJ☰CT
acoustid album candidates: 1
lyrics not found: Blank Banshee - B:/ Shut down/Depression
lyrics not found: Blank Banshee - Wavestep
fetchart: using fallback art file Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg
Discogs API Error: 404 Not Found (query: Blank Banshee Blank Banshee 1
Evaluating 6 candidates.
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.234335
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.000000
Candidate: Blank Banshee - Blank Banshee 0
Success. Distance: 0.634043
Candidate: Screaming Banshee Aircrew - When All Is Said and Done
added last.fm album genre (artist): Electronic
Success. Distance: 0.822202
Sending event: database_change
Candidate: Gerda Blank - gerda:BLANK
Success. Distance: 0.882945
Candidate: Blank Banshee - Blank Banshee 1
Duplicate.
/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1 (15 items)
Tagging:
Blank Banshee - Blank Banshee 1
URL:
http://musicbrainz.org/release/5e66f0d1-6925-44ab-b96c-da7c7bfd644c
(Similarity: 100.0%) (Digital Media, 2013)
Sending event: import_task_choice
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: import_task_apply
replacing item 8860: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 04 Anxiety Online !.flac
Sending event: database_change
Sending event: item_removed
replacing item 8864: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 12 Realization.flac
Sending event: database_change
Sending event: item_removed
replacing item 8862: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 08 Metal Rain.flac
Sending event: database_change
Sending event: item_removed
replacing item 8871: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 11 Cyber Slums.flac
Sending event: database_change
Sending event: item_removed
replacing item 8868: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 13 SOLAR PLEXUS.flac
Sending event: database_change
Sending event: item_removed
replacing item 8865: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 05 Java Clouds-مجسمه سازی مهتاب.flac
Sending event: database_change
Sending event: item_removed
replacing item 8867: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 09 Big Gulp.flac
Sending event: database_change
Sending event: item_removed
replacing item 8866: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 15 B- Hidden-Reality.flac
Sending event: database_change
Sending event: item_removed
replacing item 8874: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 02 Eco Zones.flac
Sending event: database_change
Sending event: item_removed
replacing item 8870: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 03 METAMATERIAL.flac
Sending event: database_change
Sending event: item_removed
replacing item 8863: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 10 Doldrum Corp..flac
Sending event: database_change
Sending event: item_removed
replacing item 8872: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 01 B- Infinite Login.flac
Sending event: database_change
Sending event: item_removed
replacing item 8873: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 07 Conflict Minerals.flac
Sending event: database_change
Sending event: item_removed
replacing item 8861: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 14 Paradise Disc -芸能山城組.flac
Sending event: database_change
Sending event: item_removed
replacing item 8869: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 06 LSD Polyphony.flac
Sending event: database_change
Sending event: database_change
Sending event: item_removed
15 of 15 items replaced
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
added last.fm item genre (track): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Anxiety Online !
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Realization
added last.fm item genre (track): Techno
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Metal Rain
added last.fm item genre (track): Chillwave
Sending event: database_change
added last.fm item genre (track): Chillwave
Sending event: database_change
lyrics not found: Blank Banshee - Cyber Slums
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (track): Acid
lyrics not found: Blank Banshee - SOLAR PLEXUS
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
removing 15 old duplicated items
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
lyrics not found: Blank Banshee - Java Clouds/مجسمه سازی مهتاب
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
lyrics not found: Blank Banshee - Big Gulp
Sending event: database_change
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Techno -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Chillwave -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Acid -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Chillwave -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - B:/ Hidden/Reality
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - Eco Zones
lyrics not found: Blank Banshee - METAMATERIAL
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - Doldrum Corp.
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: after_write
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: import_task_files
Sending event: database_change
Sending event: album_imported
Embedding album art into Blank Banshee - Blank Banshee 0.
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: after_write
lyrics not found: Blank Banshee - B:/ Infinite Login
lyrics not found: Blank Banshee - Conflict Minerals
lyrics not found: Blank Banshee - Paradise Disc /芸能山城組
lyrics not found: Blank Banshee - LSD Polyphony
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.7', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 841, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 813, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 290, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1077, in plugin_stage
func(session, task)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 325, in fetch_art
path = art_for_album(task.album, task.paths, self.maxwidth, local)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 240, in art_for_album
out = art_in_path(path, cover_names, cautious)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 179, in art_in_path
images = sorted(images, key=lambda x: filename_priority(x, cover_names))
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 179, in <lambda>
images = sorted(images, key=lambda x: filename_priority(x, cover_names))
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 163, in filename_priority
return [idx for (idx, x) in enumerate(cover_names) if x in filename]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 48: ordinal not in range(128)
|
UnicodeDecodeError
|
def convert(x):
if isinstance(x, unicode):
return x
elif isinstance(x, BASESTRING):
return x.decode("utf8", "ignore")
else:
self.fail("must be a list of strings", view, True)
|
def convert(self, value, view):
if not isinstance(value, self.typ):
self.fail(
"must be a {0}, not {1}".format(
self.typ.__name__,
type(value).__name__,
),
view,
True,
)
return value
|
https://github.com/beetbox/beets/issues/887
|
sombra/somasis:~/MusicIncoming/ λ beet -v imp Blank\ Banshee\ -\ Blank\ Banshee\ *
user configuration: /home/somasis/.config/beets/config.yaml
Sending event: pluginload
Sending event: library_opened
data directory: /home/somasis/.config/beets
library database: /home/somasis/Music/library.blb
library directory: /home/somasis/Music
error reading Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg
Sending event: import_task_start
error reading Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg
error reading Blank Banshee - Blank Banshee 1 - �.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - �.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - �.jpg
error reading cover.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/cover.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/cover.jpg
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 01 B- Start Up.flac'
chroma: matched recordings [u'0d6b4c02-4252-40b8-a96b-ee92a7541782']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 02 Wavestep.flac'
chroma: matched recordings [u'e78f39e0-ec1e-4095-be6e-3c8ccf175d61']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 03 Bathsalts.flac'
chroma: matched recordings [u'1cd0bb95-49e3-4cea-960e-b821ba124fbe']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 04 Ammonia Clouds.flac'
chroma: matched recordings [u'32b1f0d0-ac89-45b9-af8b-2444558a4a05']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 05 Venus Death Trap.flac'
chroma: matched recordings [u'b9ae6039-6824-4fa9-82d2-22e391b0cb77']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 06 HYP\xe2\x98\xb0R OBJ\xe2\x98\xb0CT.flac'
chroma: matched recordings [u'cc9c4295-22d2-4f0f-a322-fd1225bf4c0e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 07 Photosynthesis.flac'
chroma: matched recordings [u'6f95814c-9325-4637-a183-2ce16278c62e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 08 D \xe2\x98\xb0 \xe2\x98\xb0 P $ P \xce\x9b C \xe2\x98\xb0.flac'
chroma: matched recordings [u'8f019dc2-1bfd-424d-aa25-975efc2d00f4']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 09 Dreamcast.flac'
chroma: matched recordings [u'aedd2a55-75cd-4bb2-acaf-968faae74e4e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 10 Cyber Zodiac.flac'
chroma: matched recordings [u'814922c4-1706-45b9-99ab-33af5bca27eb']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 11 Teen Pregnancy.flac'
chroma: matched recordings [u'53bd825f-bce0-4bc2-b139-c32b791f61b2']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 12 Purity Boys.flac'
chroma: matched recordings [u'01553d3f-4acb-4d38-bd64-4bd20ea56380']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 13 Visualization.flac'
chroma: matched recordings [u'f54e6b16-f91c-4a53-bf67-28a7e3bca871']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 14 World Vision.flac'
chroma: matched recordings [u'512ec8a6-b9da-4e16-a8de-2077eb5194dd']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 15 B- Shut down-Depression.flac'
chroma: matched recordings [u'a1085c33-6b6b-463e-ae6a-c3203969f0d3']
Looking up: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0
Tagging Blank Banshee - Blank Banshee 0
No album IDs found.
Search terms: Blank Banshee - Blank Banshee 0
Album might be VA: False
acoustid album candidates: 1
Discogs API Error: 503 Service Unavailable (query: Blank Banshee Blank Banshee 0
Evaluating 6 candidates.
Candidate: Blank Banshee - Blank Banshee 0
Success. Distance: 0.000000
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.714291
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.648056
Candidate: Screaming Banshee Aircrew - When All Is Said and Done
Success. Distance: 0.818253
Candidate: Gerda Blank - gerda:BLANK
Success. Distance: 0.872529
Candidate: Blank Banshee - Blank Banshee 0
Duplicate.
Sending event: import_task_start
/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0 (15 items)
Tagging:
Blank Banshee - Blank Banshee 0
URL:
http://musicbrainz.org/release/9509a46c-b2d9-4ac5-b4a9-f219a18be3b4
(Similarity: 100.0%) (Digital Media, 2012, CA)
* D ☰ ☰ P $ P Λ C ☰ -> D ☰ ☰ P $ P Λ C ☰
Sending event: import_task_choice
This album is already in the library!
[S]kip new, Keep both, Remove old? chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 01 B- Infinite Login.flac'
chroma: matched recordings [u'7afcf15d-9b85-48d2-ad44-4d60307d91ea']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 02 Eco Zones.flac'
chroma: matched recordings [u'b2332635-4963-471e-bb33-40d3be821d0e', u'd4f6d63b-8ef0-4db3-a904-ec0a561f35c0']
rchroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 03 METAMATERIAL.flac'
chroma: matched recordings [u'b2abaf84-f060-4fae-94ab-1230a25987f8', u'c3374faa-89fc-4a18-a8f9-3c242e78fb2a']
Sending event: import_task_apply
15 of 15 items replaced
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
lyrics not found: Blank Banshee - Teen Pregnancy
lyrics not found: Blank Banshee - D ☰ ☰ P $ P Λ C ☰
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 04 Anxiety Online !.flac'
chroma: matched recordings [u'23fcbedc-c9ea-4465-9221-7018ee039241']
lyrics not found: Blank Banshee - Photosynthesis
lyrics not found: Blank Banshee - Purity Boys
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 05 Java Clouds-\xd9\x85\xd8\xac\xd8\xb3\xd9\x85\xd9\x87 \xd8\xb3\xd8\xa7\xd8\xb2\xdb\x8c \xd9\x85\xd9\x87\xd8\xaa\xd8\xa7\xd8\xa8.flac'
chroma: matched recordings [u'0ee06ce3-3fea-4c94-bae4-81acbbb5abf2', u'b683574d-2abe-4473-a624-a7d9ec8c867c']
lyrics not found: Blank Banshee - Dreamcast
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 06 LSD Polyphony.flac'
chroma: matched recordings [u'40277e03-7e07-4b30-970b-0979a45c7314', u'4d80187c-6660-4330-a179-a61f1cafad13']
lyrics not found: Blank Banshee - Visualization
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 07 Conflict Minerals.flac'
chroma: matched recordings [u'5dca7ab9-8374-4542-b080-563e9a84a71b', u'acd0e9ef-0a65-4de9-be50-4210e3b12aa0']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 08 Metal Rain.flac'
chroma: matched recordings [u'da81c6b9-73b3-4428-86ce-e0c958fd8388', u'e6553f3a-dd38-407a-8350-72f75bcc3239']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 09 Big Gulp.flac'
chroma: matched recordings [u'48bb5157-f145-4a26-91f2-1bbfe3843ad4', u'5bad5e8b-a738-45c0-9702-fa580716d85b']
lyrics not found: Blank Banshee - B:/ Start Up
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 10 Doldrum Corp..flac'
chroma: matched recordings [u'40783287-5e49-4986-8c2e-98fc76bc4575', u'd48b5188-8830-4f34-ab96-99bec735d592']
lyrics not found: Blank Banshee - Bathsalts
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 11 Cyber Slums.flac'
chroma: matched recordings [u'252b73ea-fa04-47d0-b030-ff074240db78', u'4fab878d-9656-4a3f-b8d1-195d374eabf2']
lyrics not found: Blank Banshee - Cyber Zodiac
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 12 Realization.flac'
chroma: matched recordings [u'79f44bff-d5be-46f0-89a5-e5c56efa3dda']
lyrics not found: Blank Banshee - Venus Death Trap
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 13 SOLAR PLEXUS.flac'
chroma: matched recordings [u'456e401f-e3cd-44c6-b7e8-fee3dce95785']
lyrics not found: Blank Banshee - World Vision
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 14 Paradise Disc -\xe8\x8a\xb8\xe8\x83\xbd\xe5\xb1\xb1\xe5\x9f\x8e\xe7\xb5\x84.flac'
chroma: matched recordings [u'45b77e22-08f8-4e21-bff7-60c32c445f66']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 15 B- Hidden-Reality.flac'
chroma: matched recordings [u'0b2b6785-e9b9-494a-a3fd-7c7c332af4e1']
Looking up: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1
Tagging Blank Banshee - Blank Banshee 1
No album IDs found.
Search terms: Blank Banshee - Blank Banshee 1
Album might be VA: False
lyrics not found: Blank Banshee - Ammonia Clouds
lyrics not found: Blank Banshee - HYP☰R OBJ☰CT
acoustid album candidates: 1
lyrics not found: Blank Banshee - B:/ Shut down/Depression
lyrics not found: Blank Banshee - Wavestep
fetchart: using fallback art file Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg
Discogs API Error: 404 Not Found (query: Blank Banshee Blank Banshee 1
Evaluating 6 candidates.
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.234335
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.000000
Candidate: Blank Banshee - Blank Banshee 0
Success. Distance: 0.634043
Candidate: Screaming Banshee Aircrew - When All Is Said and Done
added last.fm album genre (artist): Electronic
Success. Distance: 0.822202
Sending event: database_change
Candidate: Gerda Blank - gerda:BLANK
Success. Distance: 0.882945
Candidate: Blank Banshee - Blank Banshee 1
Duplicate.
/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1 (15 items)
Tagging:
Blank Banshee - Blank Banshee 1
URL:
http://musicbrainz.org/release/5e66f0d1-6925-44ab-b96c-da7c7bfd644c
(Similarity: 100.0%) (Digital Media, 2013)
Sending event: import_task_choice
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: import_task_apply
replacing item 8860: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 04 Anxiety Online !.flac
Sending event: database_change
Sending event: item_removed
replacing item 8864: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 12 Realization.flac
Sending event: database_change
Sending event: item_removed
replacing item 8862: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 08 Metal Rain.flac
Sending event: database_change
Sending event: item_removed
replacing item 8871: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 11 Cyber Slums.flac
Sending event: database_change
Sending event: item_removed
replacing item 8868: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 13 SOLAR PLEXUS.flac
Sending event: database_change
Sending event: item_removed
replacing item 8865: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 05 Java Clouds-مجسمه سازی مهتاب.flac
Sending event: database_change
Sending event: item_removed
replacing item 8867: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 09 Big Gulp.flac
Sending event: database_change
Sending event: item_removed
replacing item 8866: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 15 B- Hidden-Reality.flac
Sending event: database_change
Sending event: item_removed
replacing item 8874: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 02 Eco Zones.flac
Sending event: database_change
Sending event: item_removed
replacing item 8870: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 03 METAMATERIAL.flac
Sending event: database_change
Sending event: item_removed
replacing item 8863: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 10 Doldrum Corp..flac
Sending event: database_change
Sending event: item_removed
replacing item 8872: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 01 B- Infinite Login.flac
Sending event: database_change
Sending event: item_removed
replacing item 8873: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 07 Conflict Minerals.flac
Sending event: database_change
Sending event: item_removed
replacing item 8861: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 14 Paradise Disc -芸能山城組.flac
Sending event: database_change
Sending event: item_removed
replacing item 8869: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 06 LSD Polyphony.flac
Sending event: database_change
Sending event: database_change
Sending event: item_removed
15 of 15 items replaced
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
added last.fm item genre (track): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Anxiety Online !
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Realization
added last.fm item genre (track): Techno
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Metal Rain
added last.fm item genre (track): Chillwave
Sending event: database_change
added last.fm item genre (track): Chillwave
Sending event: database_change
lyrics not found: Blank Banshee - Cyber Slums
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (track): Acid
lyrics not found: Blank Banshee - SOLAR PLEXUS
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
removing 15 old duplicated items
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
lyrics not found: Blank Banshee - Java Clouds/مجسمه سازی مهتاب
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
lyrics not found: Blank Banshee - Big Gulp
Sending event: database_change
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Techno -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Chillwave -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Acid -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Chillwave -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - B:/ Hidden/Reality
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - Eco Zones
lyrics not found: Blank Banshee - METAMATERIAL
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - Doldrum Corp.
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: after_write
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: import_task_files
Sending event: database_change
Sending event: album_imported
Embedding album art into Blank Banshee - Blank Banshee 0.
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: after_write
lyrics not found: Blank Banshee - B:/ Infinite Login
lyrics not found: Blank Banshee - Conflict Minerals
lyrics not found: Blank Banshee - Paradise Disc /芸能山城組
lyrics not found: Blank Banshee - LSD Polyphony
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.7', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 841, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 813, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 290, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1077, in plugin_stage
func(session, task)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 325, in fetch_art
path = art_for_album(task.album, task.paths, self.maxwidth, local)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 240, in art_for_album
out = art_in_path(path, cover_names, cautious)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 179, in art_in_path
images = sorted(images, key=lambda x: filename_priority(x, cover_names))
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 179, in <lambda>
images = sorted(images, key=lambda x: filename_priority(x, cover_names))
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 163, in filename_priority
return [idx for (idx, x) in enumerate(cover_names) if x in filename]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 48: ordinal not in range(128)
|
UnicodeDecodeError
|
def art_for_album(album, paths, maxwidth=None, local_only=False):
"""Given an Album object, returns a path to downloaded art for the
album (or None if no art is found). If `maxwidth`, then images are
resized to this maximum pixel size. If `local_only`, then only local
image files from the filesystem are returned; no network requests
are made.
"""
out = None
# Local art.
cover_names = config["fetchart"]["cover_names"].as_str_seq()
cover_names = map(util.bytestring_path, cover_names)
cautious = config["fetchart"]["cautious"].get(bool)
if paths:
for path in paths:
out = art_in_path(path, cover_names, cautious)
if out:
break
# Web art sources.
remote_priority = config["fetchart"]["remote_priority"].get(bool)
if not local_only and (remote_priority or not out):
for url in _source_urls(album):
if maxwidth:
url = ArtResizer.shared.proxy_url(maxwidth, url)
candidate = _fetch_image(url)
if candidate:
out = candidate
break
if maxwidth and out:
out = ArtResizer.shared.resize(maxwidth, out)
return out
|
def art_for_album(album, paths, maxwidth=None, local_only=False):
"""Given an Album object, returns a path to downloaded art for the
album (or None if no art is found). If `maxwidth`, then images are
resized to this maximum pixel size. If `local_only`, then only local
image files from the filesystem are returned; no network requests
are made.
"""
out = None
# Local art.
cover_names = config["fetchart"]["cover_names"].as_str_seq()
cautious = config["fetchart"]["cautious"].get(bool)
if paths:
for path in paths:
out = art_in_path(path, cover_names, cautious)
if out:
break
# Web art sources.
remote_priority = config["fetchart"]["remote_priority"].get(bool)
if not local_only and (remote_priority or not out):
for url in _source_urls(album):
if maxwidth:
url = ArtResizer.shared.proxy_url(maxwidth, url)
candidate = _fetch_image(url)
if candidate:
out = candidate
break
if maxwidth and out:
out = ArtResizer.shared.resize(maxwidth, out)
return out
|
https://github.com/beetbox/beets/issues/887
|
sombra/somasis:~/MusicIncoming/ λ beet -v imp Blank\ Banshee\ -\ Blank\ Banshee\ *
user configuration: /home/somasis/.config/beets/config.yaml
Sending event: pluginload
Sending event: library_opened
data directory: /home/somasis/.config/beets
library database: /home/somasis/Music/library.blb
library directory: /home/somasis/Music
error reading Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg
Sending event: import_task_start
error reading Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - FREE WALLPAPER√√√.jpg
error reading Blank Banshee - Blank Banshee 1 - �.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - �.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - �.jpg
error reading cover.jpg: error reading /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/cover.jpg: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/cover.jpg
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 01 B- Start Up.flac'
chroma: matched recordings [u'0d6b4c02-4252-40b8-a96b-ee92a7541782']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 02 Wavestep.flac'
chroma: matched recordings [u'e78f39e0-ec1e-4095-be6e-3c8ccf175d61']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 03 Bathsalts.flac'
chroma: matched recordings [u'1cd0bb95-49e3-4cea-960e-b821ba124fbe']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 04 Ammonia Clouds.flac'
chroma: matched recordings [u'32b1f0d0-ac89-45b9-af8b-2444558a4a05']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 05 Venus Death Trap.flac'
chroma: matched recordings [u'b9ae6039-6824-4fa9-82d2-22e391b0cb77']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 06 HYP\xe2\x98\xb0R OBJ\xe2\x98\xb0CT.flac'
chroma: matched recordings [u'cc9c4295-22d2-4f0f-a322-fd1225bf4c0e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 07 Photosynthesis.flac'
chroma: matched recordings [u'6f95814c-9325-4637-a183-2ce16278c62e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 08 D \xe2\x98\xb0 \xe2\x98\xb0 P $ P \xce\x9b C \xe2\x98\xb0.flac'
chroma: matched recordings [u'8f019dc2-1bfd-424d-aa25-975efc2d00f4']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 09 Dreamcast.flac'
chroma: matched recordings [u'aedd2a55-75cd-4bb2-acaf-968faae74e4e']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 10 Cyber Zodiac.flac'
chroma: matched recordings [u'814922c4-1706-45b9-99ab-33af5bca27eb']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 11 Teen Pregnancy.flac'
chroma: matched recordings [u'53bd825f-bce0-4bc2-b139-c32b791f61b2']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 12 Purity Boys.flac'
chroma: matched recordings [u'01553d3f-4acb-4d38-bd64-4bd20ea56380']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 13 Visualization.flac'
chroma: matched recordings [u'f54e6b16-f91c-4a53-bf67-28a7e3bca871']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 14 World Vision.flac'
chroma: matched recordings [u'512ec8a6-b9da-4e16-a8de-2077eb5194dd']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0/Blank Banshee - Blank Banshee 0 - 15 B- Shut down-Depression.flac'
chroma: matched recordings [u'a1085c33-6b6b-463e-ae6a-c3203969f0d3']
Looking up: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0
Tagging Blank Banshee - Blank Banshee 0
No album IDs found.
Search terms: Blank Banshee - Blank Banshee 0
Album might be VA: False
acoustid album candidates: 1
Discogs API Error: 503 Service Unavailable (query: Blank Banshee Blank Banshee 0
Evaluating 6 candidates.
Candidate: Blank Banshee - Blank Banshee 0
Success. Distance: 0.000000
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.714291
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.648056
Candidate: Screaming Banshee Aircrew - When All Is Said and Done
Success. Distance: 0.818253
Candidate: Gerda Blank - gerda:BLANK
Success. Distance: 0.872529
Candidate: Blank Banshee - Blank Banshee 0
Duplicate.
Sending event: import_task_start
/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 0 (15 items)
Tagging:
Blank Banshee - Blank Banshee 0
URL:
http://musicbrainz.org/release/9509a46c-b2d9-4ac5-b4a9-f219a18be3b4
(Similarity: 100.0%) (Digital Media, 2012, CA)
* D ☰ ☰ P $ P Λ C ☰ -> D ☰ ☰ P $ P Λ C ☰
Sending event: import_task_choice
This album is already in the library!
[S]kip new, Keep both, Remove old? chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 01 B- Infinite Login.flac'
chroma: matched recordings [u'7afcf15d-9b85-48d2-ad44-4d60307d91ea']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 02 Eco Zones.flac'
chroma: matched recordings [u'b2332635-4963-471e-bb33-40d3be821d0e', u'd4f6d63b-8ef0-4db3-a904-ec0a561f35c0']
rchroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 03 METAMATERIAL.flac'
chroma: matched recordings [u'b2abaf84-f060-4fae-94ab-1230a25987f8', u'c3374faa-89fc-4a18-a8f9-3c242e78fb2a']
Sending event: import_task_apply
15 of 15 items replaced
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
lyrics not found: Blank Banshee - Teen Pregnancy
lyrics not found: Blank Banshee - D ☰ ☰ P $ P Λ C ☰
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 04 Anxiety Online !.flac'
chroma: matched recordings [u'23fcbedc-c9ea-4465-9221-7018ee039241']
lyrics not found: Blank Banshee - Photosynthesis
lyrics not found: Blank Banshee - Purity Boys
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 05 Java Clouds-\xd9\x85\xd8\xac\xd8\xb3\xd9\x85\xd9\x87 \xd8\xb3\xd8\xa7\xd8\xb2\xdb\x8c \xd9\x85\xd9\x87\xd8\xaa\xd8\xa7\xd8\xa8.flac'
chroma: matched recordings [u'0ee06ce3-3fea-4c94-bae4-81acbbb5abf2', u'b683574d-2abe-4473-a624-a7d9ec8c867c']
lyrics not found: Blank Banshee - Dreamcast
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 06 LSD Polyphony.flac'
chroma: matched recordings [u'40277e03-7e07-4b30-970b-0979a45c7314', u'4d80187c-6660-4330-a179-a61f1cafad13']
lyrics not found: Blank Banshee - Visualization
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 07 Conflict Minerals.flac'
chroma: matched recordings [u'5dca7ab9-8374-4542-b080-563e9a84a71b', u'acd0e9ef-0a65-4de9-be50-4210e3b12aa0']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 08 Metal Rain.flac'
chroma: matched recordings [u'da81c6b9-73b3-4428-86ce-e0c958fd8388', u'e6553f3a-dd38-407a-8350-72f75bcc3239']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 09 Big Gulp.flac'
chroma: matched recordings [u'48bb5157-f145-4a26-91f2-1bbfe3843ad4', u'5bad5e8b-a738-45c0-9702-fa580716d85b']
lyrics not found: Blank Banshee - B:/ Start Up
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 10 Doldrum Corp..flac'
chroma: matched recordings [u'40783287-5e49-4986-8c2e-98fc76bc4575', u'd48b5188-8830-4f34-ab96-99bec735d592']
lyrics not found: Blank Banshee - Bathsalts
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 11 Cyber Slums.flac'
chroma: matched recordings [u'252b73ea-fa04-47d0-b030-ff074240db78', u'4fab878d-9656-4a3f-b8d1-195d374eabf2']
lyrics not found: Blank Banshee - Cyber Zodiac
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 12 Realization.flac'
chroma: matched recordings [u'79f44bff-d5be-46f0-89a5-e5c56efa3dda']
lyrics not found: Blank Banshee - Venus Death Trap
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 13 SOLAR PLEXUS.flac'
chroma: matched recordings [u'456e401f-e3cd-44c6-b7e8-fee3dce95785']
lyrics not found: Blank Banshee - World Vision
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 14 Paradise Disc -\xe8\x8a\xb8\xe8\x83\xbd\xe5\xb1\xb1\xe5\x9f\x8e\xe7\xb5\x84.flac'
chroma: matched recordings [u'45b77e22-08f8-4e21-bff7-60c32c445f66']
chroma: fingerprinted '/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 15 B- Hidden-Reality.flac'
chroma: matched recordings [u'0b2b6785-e9b9-494a-a3fd-7c7c332af4e1']
Looking up: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1
Tagging Blank Banshee - Blank Banshee 1
No album IDs found.
Search terms: Blank Banshee - Blank Banshee 1
Album might be VA: False
lyrics not found: Blank Banshee - Ammonia Clouds
lyrics not found: Blank Banshee - HYP☰R OBJ☰CT
acoustid album candidates: 1
lyrics not found: Blank Banshee - B:/ Shut down/Depression
lyrics not found: Blank Banshee - Wavestep
fetchart: using fallback art file Blank Banshee - Blank Banshee 0 - QR-SCAN-Print.jpg
Discogs API Error: 404 Not Found (query: Blank Banshee Blank Banshee 1
Evaluating 6 candidates.
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.234335
Candidate: Blank Banshee - Blank Banshee 1
Success. Distance: 0.000000
Candidate: Blank Banshee - Blank Banshee 0
Success. Distance: 0.634043
Candidate: Screaming Banshee Aircrew - When All Is Said and Done
added last.fm album genre (artist): Electronic
Success. Distance: 0.822202
Sending event: database_change
Candidate: Gerda Blank - gerda:BLANK
Success. Distance: 0.882945
Candidate: Blank Banshee - Blank Banshee 1
Duplicate.
/home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1 (15 items)
Tagging:
Blank Banshee - Blank Banshee 1
URL:
http://musicbrainz.org/release/5e66f0d1-6925-44ab-b96c-da7c7bfd644c
(Similarity: 100.0%) (Digital Media, 2013)
Sending event: import_task_choice
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: import_task_apply
replacing item 8860: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 04 Anxiety Online !.flac
Sending event: database_change
Sending event: item_removed
replacing item 8864: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 12 Realization.flac
Sending event: database_change
Sending event: item_removed
replacing item 8862: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 08 Metal Rain.flac
Sending event: database_change
Sending event: item_removed
replacing item 8871: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 11 Cyber Slums.flac
Sending event: database_change
Sending event: item_removed
replacing item 8868: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 13 SOLAR PLEXUS.flac
Sending event: database_change
Sending event: item_removed
replacing item 8865: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 05 Java Clouds-مجسمه سازی مهتاب.flac
Sending event: database_change
Sending event: item_removed
replacing item 8867: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 09 Big Gulp.flac
Sending event: database_change
Sending event: item_removed
replacing item 8866: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 15 B- Hidden-Reality.flac
Sending event: database_change
Sending event: item_removed
replacing item 8874: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 02 Eco Zones.flac
Sending event: database_change
Sending event: item_removed
replacing item 8870: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 03 METAMATERIAL.flac
Sending event: database_change
Sending event: item_removed
replacing item 8863: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 10 Doldrum Corp..flac
Sending event: database_change
Sending event: item_removed
replacing item 8872: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 01 B- Infinite Login.flac
Sending event: database_change
Sending event: item_removed
replacing item 8873: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 07 Conflict Minerals.flac
Sending event: database_change
Sending event: item_removed
replacing item 8861: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 14 Paradise Disc -芸能山城組.flac
Sending event: database_change
Sending event: item_removed
replacing item 8869: /home/somasis/MusicIncoming/Blank Banshee - Blank Banshee 1/Blank Banshee - Blank Banshee 1 - 06 LSD Polyphony.flac
Sending event: database_change
Sending event: database_change
Sending event: item_removed
15 of 15 items replaced
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
added last.fm item genre (track): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Anxiety Online !
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Realization
added last.fm item genre (track): Techno
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
lyrics not found: Blank Banshee - Metal Rain
added last.fm item genre (track): Chillwave
Sending event: database_change
added last.fm item genre (track): Chillwave
Sending event: database_change
lyrics not found: Blank Banshee - Cyber Slums
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (track): Acid
lyrics not found: Blank Banshee - SOLAR PLEXUS
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
added last.fm item genre (artist): Electronic
Sending event: database_change
removing 15 old duplicated items
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
lyrics not found: Blank Banshee - Java Clouds/مجسمه سازی مهتاب
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
lyrics not found: Blank Banshee - Big Gulp
Sending event: database_change
Sending event: database_change
Sending event: item_removed
deleting duplicate /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Techno -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Chillwave -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Acid -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Chillwave -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - B:/ Hidden/Reality
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - Eco Zones
lyrics not found: Blank Banshee - METAMATERIAL
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
Sending event: after_write
Sending event: item_copied
lyrics not found: Blank Banshee - Doldrum Corp.
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: after_write
Sending event: item_copied
Sending event: database_change
Sending event: database_change
Sending event: write
[zero] genre: Electronic -> None
[zero] day: 1 -> None
[zero] month: 9 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: after_write
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: database_change
Sending event: import_task_files
Sending event: database_change
Sending event: album_imported
Embedding album art into Blank Banshee - Blank Banshee 0.
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/01 - Blank Banshee - B__ Start Up.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/02 - Blank Banshee - Wavestep.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/03 - Blank Banshee - Bathsalts.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/04 - Blank Banshee - Ammonia Clouds.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/05 - Blank Banshee - Venus Death Trap.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/06 - Blank Banshee - HYPR OBJCT.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/07 - Blank Banshee - Photosynthesis.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/08 - Blank Banshee - D P $ P L C.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/09 - Blank Banshee - Dreamcast.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/10 - Blank Banshee - Cyber Zodiac.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/11 - Blank Banshee - Teen Pregnancy.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/12 - Blank Banshee - Purity Boys.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/13 - Blank Banshee - Visualization.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/14 - Blank Banshee - World Vision.flac
Sending event: after_write
Sending event: write
[zero] genre: -> None
[zero] day: 0 -> None
[zero] month: 0 -> None
auto-scrubbing /home/somasis/Music/Blank Banshee - Blank Banshee 0 (2012 - FLAC)/15 - Blank Banshee - B__ Shut down_Depression.flac
Sending event: after_write
lyrics not found: Blank Banshee - B:/ Infinite Login
lyrics not found: Blank Banshee - Conflict Minerals
lyrics not found: Blank Banshee - Paradise Disc /芸能山城組
lyrics not found: Blank Banshee - LSD Polyphony
Traceback (most recent call last):
File "/usr/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.7', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 966, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 957, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 841, in import_func
import_files(lib, paths, query)
File "/usr/lib/python2.7/site-packages/beets/ui/commands.py", line 813, in import_files
session.run()
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 290, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 299, in run
out = self.coro.send(msg)
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 181, in coro
func(*(args + (task,)))
File "/usr/lib/python2.7/site-packages/beets/importer.py", line 1077, in plugin_stage
func(session, task)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 325, in fetch_art
path = art_for_album(task.album, task.paths, self.maxwidth, local)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 240, in art_for_album
out = art_in_path(path, cover_names, cautious)
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 179, in art_in_path
images = sorted(images, key=lambda x: filename_priority(x, cover_names))
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 179, in <lambda>
images = sorted(images, key=lambda x: filename_priority(x, cover_names))
File "/usr/lib/python2.7/site-packages/beetsplug/fetchart.py", line 163, in filename_priority
return [idx for (idx, x) in enumerate(cover_names) if x in filename]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 48: ordinal not in range(128)
|
UnicodeDecodeError
|
def convert_item(dest_dir, keep_new, path_formats, command, ext, pretend=False):
while True:
item = yield
dest = item.destination(basedir=dest_dir, path_formats=path_formats)
# When keeping the new file in the library, we first move the
# current (pristine) file to the destination. We'll then copy it
# back to its old path or transcode it to a new path.
if keep_new:
original = dest
converted = replace_ext(item.path, ext)
else:
original = item.path
dest = replace_ext(dest, ext)
converted = dest
# Ensure that only one thread tries to create directories at a
# time. (The existence check is not atomic with the directory
# creation inside this function.)
if not pretend:
with _fs_lock:
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
log.info(
"Skipping {0} (target file exists)".format(
util.displayable_path(item.path)
)
)
continue
if keep_new:
if pretend:
log.info(
"mv {0} {1}".format(
util.displayable_path(item.path),
util.displayable_path(original),
)
)
else:
log.info("Moving to {0}".format(util.displayable_path(original)))
util.move(item.path, original)
if not should_transcode(item):
if pretend:
log.info(
"cp {0} {1}".format(
util.displayable_path(original),
util.displayable_path(converted),
)
)
else:
# No transcoding necessary.
log.info("Copying {0}".format(util.displayable_path(item.path)))
util.copy(original, converted)
else:
try:
encode(command, original, converted, pretend)
except subprocess.CalledProcessError:
continue
if pretend:
continue
# Write tags from the database to the converted file.
item.try_write(path=converted)
if keep_new:
# If we're keeping the transcoded file, read it again (after
# writing) to get new bitrate, duration, etc.
item.path = converted
item.read()
item.store() # Store new path and audio data.
if config["convert"]["embed"]:
album = item.get_album()
if album and album.artpath:
embed_item(item, album.artpath, itempath=converted)
plugins.send("after_convert", item=item, dest=dest, keepnew=keep_new)
|
def convert_item(dest_dir, keep_new, path_formats, command, ext, pretend=False):
while True:
item = yield
dest = item.destination(basedir=dest_dir, path_formats=path_formats)
# When keeping the new file in the library, we first move the
# current (pristine) file to the destination. We'll then copy it
# back to its old path or transcode it to a new path.
if keep_new:
original = dest
converted = replace_ext(item.path, ext)
else:
original = item.path
dest = replace_ext(dest, ext)
converted = dest
# Ensure that only one thread tries to create directories at a
# time. (The existence check is not atomic with the directory
# creation inside this function.)
if not pretend:
with _fs_lock:
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
log.info(
"Skipping {0} (target file exists)".format(
util.displayable_path(item.path)
)
)
continue
if keep_new:
if pretend:
log.info(
"mv {0} {1}".format(
util.displayable_path(item.path),
util.displayable_path(original),
)
)
else:
log.info("Moving to {0}".format(util.displayable_path(original)))
util.move(item.path, original)
if not should_transcode(item):
if pretend:
log.info(
"cp {0} {1}".format(
util.displayable_path(original),
util.displayable_path(converted),
)
)
else:
# No transcoding necessary.
log.info("Copying {0}".format(util.displayable_path(item.path)))
util.copy(original, converted)
else:
try:
encode(command, original, converted, pretend)
except subprocess.CalledProcessError:
continue
if pretend:
continue
# Write tags from the database to the converted file.
item.write(path=converted)
if keep_new:
# If we're keeping the transcoded file, read it again (after
# writing) to get new bitrate, duration, etc.
item.path = converted
item.read()
item.store() # Store new path and audio data.
if config["convert"]["embed"]:
album = item.get_album()
if album and album.artpath:
embed_item(item, album.artpath, itempath=converted)
plugins.send("after_convert", item=item, dest=dest, keepnew=keep_new)
|
https://github.com/beetbox/beets/issues/878
|
Traceback (most recent call last):
File "/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.6', 'console_scripts', 'beet')()
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 967, in main
_raw_main(args)
File "/usr/lib/python2.7/site-packages/beets/ui/__init__.py", line 958, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/lib/python2.7/site-packages/beetsplug/convert.py", line 251, in convert_func
pipe.run_parallel()
File "/usr/lib/python2.7/site-packages/beets/util/pipeline.py", line 345, in run
self.coro.send(msg)
File "/usr/lib/python2.7/site-packages/beetsplug/convert.py", line 179, in convert_item
item.write(path=converted)
File "/usr/lib/python2.7/site-packages/beets/library.py", line 369, in write
mediafile = MediaFile(path)
File "/usr/lib/python2.7/site-packages/beets/mediafile.py", line 1239, in __init__
raise FileTypeError('file type unsupported by Mutagen')
beets.mediafile.FileTypeError: file type unsupported by Mutagen
|
beets.mediafile.FileTypeError
|
def _build_m3u_filename(basename):
"""Builds unique m3u filename by appending given basename to current
date."""
basename = re.sub(r"[\s,/\\'\"]", "_", basename)
date = datetime.datetime.now().strftime("%Y%m%d_%Hh%M")
path = normpath(
os.path.join(
config["importfeeds"]["dir"].as_filename(), date + "_" + basename + ".m3u"
)
)
return path
|
def _build_m3u_filename(basename):
"""Builds unique m3u filename by appending given basename to current
date."""
basename = re.sub(r"[\s,'\"]", "_", basename)
date = datetime.datetime.now().strftime("%Y%m%d_%Hh%M")
path = normpath(
os.path.join(
config["importfeeds"]["dir"].as_filename(), date + "_" + basename + ".m3u"
)
)
return path
|
https://github.com/beetbox/beets/issues/610
|
Traceback (most recent call last):
File "/Library/Frameworks/Python.framework/Versions/2.7/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.3', 'console_scripts', 'beet')()
File "/Users/drbob/Development/beets/beets/ui/__init__.py", line 946, in main
_raw_main(args)
File "/Users/drbob/Development/beets/beets/ui/__init__.py", line 937, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Users/drbob/Development/beets/beets/ui/commands.py", line 843, in import_func
import_files(lib, paths, query)
File "/Users/drbob/Development/beets/beets/ui/commands.py", line 779, in import_files
session.run()
File "/Users/drbob/Development/beets/beets/importer.py", line 347, in run
pl.run_parallel(QUEUE_SIZE)
File "/Users/drbob/Development/beets/beets/util/pipeline.py", line 288, in run
self.coro.send(msg)
File "/Users/drbob/Development/beets/beets/importer.py", line 927, in finalize
lib=session.lib, album=album)
File "/Users/drbob/Development/beets/beets/plugins.py", line 372, in send
handler(**arguments)
File "/Users/drbob/Development/beets/beetsplug/importfeeds.py", line 127, in album_imported
_record_items(lib, album.album, album.items())
File "/Users/drbob/Development/beets/beetsplug/importfeeds.py", line 112, in _record_items
_write_m3u(m3u_path, paths)
File "/Users/drbob/Development/beets/beetsplug/importfeeds.py", line 81, in _write_m3u
with open(syspath(m3u_path), 'a') as f:
IOError: [Errno 2] No such file or directory: '/Users/drbob/Music/beets/Imports/20140313_16h26_Poor_Boy_/_Lucky_Man.m3u'
|
IOError
|
def save_history(self):
"""Save the directory in the history for incremental imports."""
if self.is_album and self.paths and not self.sentinel:
history_add(self.paths)
|
def save_history(self):
"""Save the directory in the history for incremental imports."""
if self.is_album and not self.sentinel:
history_add(self.paths)
|
https://github.com/beetbox/beets/issues/570
|
Traceback (most recent call last):
File "/usr/local/bin/beet", line 9, in <module>
load_entry_point('beets==1.3.3', 'console_scripts', 'beet')()
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 946, in main
_raw_main(args)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/__init__.py", line 937, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 843, in import_func
import_files(lib, paths, query)
File "/usr/local/lib/python2.7/dist-packages/beets/ui/commands.py", line 779, in import_files
session.run()
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 347, in run
pl.run_parallel(QUEUE_SIZE)
File "/usr/local/lib/python2.7/dist-packages/beets/util/pipeline.py", line 288, in run
self.coro.send(msg)
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 952, in finalize
task.save_history()
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 458, in save_history
history_add(self.paths)
File "/usr/local/lib/python2.7/dist-packages/beets/importer.py", line 210, in history_add
state[HISTORY_KEY].add(tuple(paths))
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def do_i_hate_this(cls, task, action_patterns):
"""Process group of patterns (warn or skip) and returns True if
task is hated and not whitelisted.
"""
if action_patterns:
for query_string in action_patterns:
query = None
if task.is_album:
query = get_query(query_string, Album)
else:
query = get_query(query_string, Item)
if any(query.match(item) for item in task.imported_items()):
return True
return False
|
def do_i_hate_this(cls, task, action_patterns):
"""Process group of patterns (warn or skip) and returns True if
task is hated and not whitelisted.
"""
if action_patterns:
for query_string in action_patterns:
query = None
if task.is_album:
query = get_query(query_string, Album)
else:
query = get_query(query_string, Item)
if any(query.match(item) for item in task.items):
return True
return False
|
https://github.com/beetbox/beets/issues/411
|
Traceback (most recent call last):
File "/usr/local/bin/beet", line 8, in <module>
load_entry_point('beets==1.3.0', 'console_scripts', 'beet')()
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/__init__.py", line 776, in main
_raw_main(args)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/__init__.py", line 768, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/commands.py", line 845, in import_func
import_files(lib, paths, query)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/commands.py", line 783, in import_files
session.run()
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/importer.py", line 341, in run
pl.run_parallel(QUEUE_SIZE)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/util/pipeline.py", line 243, in run
out = self.coro.send(msg)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/importer.py", line 952, in item_query
plugins.send('import_task_choice', session=session, task=task)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/plugins.py", line 372, in send
handler(**arguments)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beetsplug/ihate.py", line 104, in import_task_choice_event
self.config['skip_whitelist'].as_str_seq()):
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beetsplug/ihate.py", line 79, in do_i_hate_this
if not hate and task.cur_album and album_patterns:
AttributeError: 'ImportTask' object has no attribute 'cur_album'
|
AttributeError
|
def import_task_choice_event(self, session, task):
skip_queries = self.config["skip"].as_str_seq()
warn_queries = self.config["warn"].as_str_seq()
if task.choice_flag == action.APPLY:
if skip_queries or warn_queries:
self._log.debug("[ihate] processing your hate")
if self.do_i_hate_this(task, skip_queries):
task.choice_flag = action.SKIP
self._log.info("[ihate] skipped: {0}".format(summary(task)))
return
if self.do_i_hate_this(task, warn_queries):
self._log.info("[ihate] you maybe hate this: {0}".format(summary(task)))
else:
self._log.debug("[ihate] nothing to do")
else:
self._log.debug("[ihate] user made a decision, nothing to do")
|
def import_task_choice_event(self, session, task):
skip_queries = self.config["skip"].as_str_seq()
warn_queries = self.config["warn"].as_str_seq()
if task.choice_flag == action.APPLY:
if skip_queries or warn_queries:
self._log.debug("[ihate] processing your hate")
if self.do_i_hate_this(task, skip_queries):
task.choice_flag = action.SKIP
self._log.info(
"[ihate] skipped: {0} - {1}".format(task.cur_artist, task.cur_album)
)
return
if self.do_i_hate_this(task, warn_queries):
self._log.info(
"[ihate] you maybe hate this: {0} - {1}".format(
task.cur_artist, task.cur_album
)
)
else:
self._log.debug("[ihate] nothing to do")
else:
self._log.debug("[ihate] user made a decision, nothing to do")
|
https://github.com/beetbox/beets/issues/411
|
Traceback (most recent call last):
File "/usr/local/bin/beet", line 8, in <module>
load_entry_point('beets==1.3.0', 'console_scripts', 'beet')()
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/__init__.py", line 776, in main
_raw_main(args)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/__init__.py", line 768, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/commands.py", line 845, in import_func
import_files(lib, paths, query)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/commands.py", line 783, in import_files
session.run()
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/importer.py", line 341, in run
pl.run_parallel(QUEUE_SIZE)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/util/pipeline.py", line 243, in run
out = self.coro.send(msg)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/importer.py", line 952, in item_query
plugins.send('import_task_choice', session=session, task=task)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/plugins.py", line 372, in send
handler(**arguments)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beetsplug/ihate.py", line 104, in import_task_choice_event
self.config['skip_whitelist'].as_str_seq()):
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beetsplug/ihate.py", line 79, in do_i_hate_this
if not hate and task.cur_album and album_patterns:
AttributeError: 'ImportTask' object has no attribute 'cur_album'
|
AttributeError
|
def do_i_hate_this(
cls, task, genre_patterns, artist_patterns, album_patterns, whitelist_patterns
):
"""Process group of patterns (warn or skip) and returns True if
task is hated and not whitelisted.
"""
hate = False
try:
genre = task.items[0].genre
except:
genre = ""
if genre and genre_patterns:
if cls.match_patterns(genre, genre_patterns):
hate = True
if not hate and getattr(task, "cur_album", None) and album_patterns:
if cls.match_patterns(task.cur_album, album_patterns):
hate = True
if not hate and getattr(task, "cur_artist", None) and artist_patterns:
if cls.match_patterns(task.cur_artist, artist_patterns):
hate = True
if hate and whitelist_patterns:
if cls.match_patterns(task.cur_artist, whitelist_patterns):
hate = False
return hate
|
def do_i_hate_this(
cls, task, genre_patterns, artist_patterns, album_patterns, whitelist_patterns
):
"""Process group of patterns (warn or skip) and returns True if
task is hated and not whitelisted.
"""
hate = False
try:
genre = task.items[0].genre
except:
genre = ""
if genre and genre_patterns:
if cls.match_patterns(genre, genre_patterns):
hate = True
if not hate and task.cur_album and album_patterns:
if cls.match_patterns(task.cur_album, album_patterns):
hate = True
if not hate and task.cur_artist and artist_patterns:
if cls.match_patterns(task.cur_artist, artist_patterns):
hate = True
if hate and whitelist_patterns:
if cls.match_patterns(task.cur_artist, whitelist_patterns):
hate = False
return hate
|
https://github.com/beetbox/beets/issues/411
|
Traceback (most recent call last):
File "/usr/local/bin/beet", line 8, in <module>
load_entry_point('beets==1.3.0', 'console_scripts', 'beet')()
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/__init__.py", line 776, in main
_raw_main(args)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/__init__.py", line 768, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/commands.py", line 845, in import_func
import_files(lib, paths, query)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/ui/commands.py", line 783, in import_files
session.run()
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/importer.py", line 341, in run
pl.run_parallel(QUEUE_SIZE)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/util/pipeline.py", line 243, in run
out = self.coro.send(msg)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/importer.py", line 952, in item_query
plugins.send('import_task_choice', session=session, task=task)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beets/plugins.py", line 372, in send
handler(**arguments)
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beetsplug/ihate.py", line 104, in import_task_choice_event
self.config['skip_whitelist'].as_str_seq()):
File "/Library/Python/2.7/site-packages/beets-1.3.0-py2.7.egg/beetsplug/ihate.py", line 79, in do_i_hate_this
if not hate and task.cur_album and album_patterns:
AttributeError: 'ImportTask' object has no attribute 'cur_album'
|
AttributeError
|
def batch_fetch_art(lib, albums, force, maxwidth=None):
"""Fetch album art for each of the albums. This implements the manual
fetchart CLI command.
"""
for album in albums:
if album.artpath and not force:
message = "has album art"
else:
# In ordinary invocations, look for images on the
# filesystem. When forcing, however, always go to the Web
# sources.
local_paths = None if force else [album.path]
path = art_for_album(album, local_paths, maxwidth)
if path:
album.set_art(path, False)
album.store()
message = ui.colorize("green", "found album art")
else:
message = ui.colorize("red", "no art found")
log.info("{0} - {1}: {2}".format(album.albumartist, album.album, message))
|
def batch_fetch_art(lib, albums, force, maxwidth=None):
"""Fetch album art for each of the albums. This implements the manual
fetchart CLI command.
"""
for album in albums:
if album.artpath and not force:
message = "has album art"
else:
# In ordinary invocations, look for images on the
# filesystem. When forcing, however, always go to the Web
# sources.
local_paths = None if force else [album.path]
path = art_for_album(album, local_paths, maxwidth)
if path:
album.set_art(path, False)
album.store()
message = "found album art"
if config["color"]:
message = ui.colorize("red", message)
else:
message = "no art found"
if config["color"]:
message = ui.colorize("turquoise", message)
log.info("{0} - {1}: {2}".format(album.albumartist, album.album, message))
|
https://github.com/beetbox/beets/issues/508
|
Tindersticks - Chocolate (from mpc)
lyrics426 ['Tindersticks', ' ', 'Chocolate']
('lib943', [u'Tindersticks', u' ', u'Chocolate'])
('lib859', [u'Tindersticks', u' ', u'Chocolate'])
('lib800', u'Tindersticks')
('lib778', u'Tindersticks')
('lib800', u' ')
('lib778', u' ')
Traceback (most recent call last):
File "/usr/local/bin/beet", line 20, in <module>
beets.ui.main()
File "/home/winters/buildtests/beets/beets/ui/__init__.py", line 815, in main
_raw_main(args)
File "/home/winters/buildtests/beets/beets/ui/__init__.py", line 807, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/home/winters/buildtests/beets/beetsplug/lyrics.py", line 428, in func
for item in lib.items(ui.decargs(args)):
File "/home/winters/buildtests/beets/beets/library.py", line 965, in items
return self._fetch(Item, query, order)
File "/home/winters/buildtests/beets/beets/library.py", line 946, in _fetch
model_cls, get_query(query, model_cls), order_by
File "/home/winters/buildtests/beets/beets/library.py", line 874, in get_query
return query_from_strings(dbcore.AndQuery, model_cls, val)
File "/home/winters/buildtests/beets/beets/library.py", line 845, in query_from_strings
subq = construct_query_part(part, model_cls)
File "/home/winters/buildtests/beets/beets/library.py", line 805, in construct_query_part
parsed = parse_query_part(query_part, query_classes, prefixes)
File "/home/winters/buildtests/beets/beets/library.py", line 782, in parse_query_part
assert match # Regex should always match.
AssertionError
|
AssertionError
|
def dump(mydb, f, **options):
# type: (canmatrix.CanMatrix, typing.IO, **typing.Any) -> None
# create copy because export changes database
db = copy.deepcopy(mydb)
dbf_export_encoding = options.get("dbfExportEncoding", "iso-8859-1")
ignore_encoding_errors = options.get("ignoreEncodingErrors", "")
db.enum_attribs_to_keys()
if len(db.signals) > 0:
free_signals_dummy_frame = canmatrix.Frame("VECTOR__INDEPENDENT_SIG_MSG")
free_signals_dummy_frame.arbitration_id = canmatrix.ArbitrationId(
id=0x40000000, extended=True
)
free_signals_dummy_frame.signals = db.signals
db.add_frame(free_signals_dummy_frame)
out_str = """//******************************BUSMASTER Messages and signals Database ******************************//
[DATABASE_VERSION] 1.3
[PROTOCOL] CAN
[BUSMASTER_VERSION] [1.7.2]
[NUMBER_OF_MESSAGES] """
out_str += str(len(db.frames)) + "\n"
if max([x.cycle_time for x in db.frames]) > 0:
db.add_frame_defines("GenMsgCycleTime", "INT 0 65535")
if max([x.cycle_time for y in db.frames for x in y.signals]) > 0:
db.add_signal_defines("GenSigCycleTime", "INT 0 65535")
if (
max([x.initial_value for y in db.frames for x in y.signals]) > 0
or min([x.initial_value for y in db.frames for x in y.signals]) < 0
):
db.add_signal_defines("GenSigStartValue", "FLOAT 0 100000000000")
# Frames
for frame in db.frames:
if frame.is_complex_multiplexed:
logger.error(
"export complex multiplexers is not supported - ignoring frame "
+ frame.name
)
continue
# Name unMsgId m_ucLength m_ucNumOfSignals m_cDataFormat m_cFrameFormat? m_txNode
# m_cDataFormat Data format: 1-Intel, 0-Motorola -- always 1 original converter decides based on signal count.
# cFrameFormat Standard 'S' Extended 'X'
extended = "X" if frame.arbitration_id.extended == 1 else "S"
out_str += (
"[START_MSG] "
+ frame.name
+ ",%d,%d,%d,1,%c,"
% (frame.arbitration_id.id, frame.size, len(frame.signals), extended)
)
if not frame.transmitters:
frame.add_transmitter("Vector__XXX")
# DBF does not support multiple Transmitters
out_str += frame.transmitters[0] + "\n"
for signal in frame.signals:
# m_acName ucLength m_ucWhichByte m_ucStartBit
# m_ucDataFormat m_fOffset m_fScaleFactor m_acUnit m_acMultiplex m_rxNode
# m_ucDataFormat
which_byte = int(
math.floor(signal.get_startbit(bit_numbering=1, start_little=True) / 8)
+ 1
)
sign = "I"
if not signal.is_signed:
sign = "U"
if signal.is_float:
if signal.size > 32:
sign = "D"
else:
sign = "F"
if signal.factor == 0:
signal.factor = 1
out_str += (
"[START_SIGNALS] "
+ signal.name
+ ",%d,%d,%d,%c,"
% (
signal.size,
which_byte,
int(signal.get_startbit(bit_numbering=1, start_little=True)) % 8,
sign,
)
+ "{},{}".format(
float(signal.max) / float(signal.factor),
float(signal.min) / float(signal.factor),
)
)
out_str += ",%d,%s,%s" % (
signal.is_little_endian,
signal.offset,
signal.factor,
)
multiplex = ""
if signal.multiplex is not None:
if signal.multiplex == "Multiplexor":
multiplex = "M"
else:
multiplex = "m" + str(signal.multiplex)
out_str += (
","
+ signal.unit
+ ",%s," % multiplex
+ ",".join(signal.receivers)
+ "\n"
)
if len(signal.values) > 0:
for value, name in sorted(list(signal.values.items())):
out_str += '[VALUE_DESCRIPTION] "' + name + '",' + str(value) + "\n"
out_str += "[END_MSG]\n\n"
# Board units
out_str += "[NODE] "
count = 1
for ecu in db.ecus:
out_str += ecu.name
if count < len(db.ecus):
out_str += ","
count += 1
out_str += "\n"
out_str += "[START_DESC]\n\n"
# BU-descriptions
out_str += "[START_DESC_MSG]\n"
for frame in db.frames:
if frame.comment is not None:
comment = frame.comment.replace("\n", " ")
out_str += str(frame.arbitration_id.id) + ' S "' + comment + '";\n'
out_str += "[END_DESC_MSG]\n"
# Frame descriptions
out_str += "[START_DESC_NODE]\n"
for ecu in db.ecus:
if ecu.comment is not None:
comment = ecu.comment.replace("\n", " ")
out_str += ecu.name + ' "' + comment + '";\n'
out_str += "[END_DESC_NODE]\n"
# signal descriptions
out_str += "[START_DESC_SIG]\n"
for frame in db.frames:
if frame.is_complex_multiplexed:
continue
for signal in frame.signals:
if signal.comment is not None:
comment = signal.comment.replace("\n", " ")
out_str += (
"%d S " % frame.arbitration_id.id
+ signal.name
+ ' "'
+ comment
+ '";\n'
)
out_str += "[END_DESC_SIG]\n"
out_str += "[END_DESC]\n\n"
out_str += "[START_PARAM]\n"
# db-parameter
out_str += "[START_PARAM_NET]\n"
for data_type, define in sorted(list(db.global_defines.items())):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"'
+ data_type
+ '",'
+ define.definition.replace(" ", ",")
+ ","
+ default_val
+ "\n"
)
out_str += "[END_PARAM_NET]\n"
# bu-parameter
out_str += "[START_PARAM_NODE]\n"
for data_type, define in sorted(list(db.ecu_defines.items())):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"'
+ data_type
+ '",'
+ define.definition.replace(" ", ",")
+ ","
+ default_val
+ "\n"
)
out_str += "[END_PARAM_NODE]\n"
# frame-parameter
out_str += "[START_PARAM_MSG]\n"
for data_type, define in sorted(list(db.frame_defines.items())):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"' + data_type + '",' + define.definition.replace(" ", ",") + "\n"
) # + ',' + default_val + '\n'
out_str += "[END_PARAM_MSG]\n"
# signal-parameter
out_str += "[START_PARAM_SIG]\n"
for data_type, define in list(db.signal_defines.items()):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"'
+ data_type
+ '",'
+ define.definition.replace(" ", ",")
+ ","
+ default_val
+ "\n"
)
out_str += "[END_PARAM_SIG]\n"
out_str += "[START_PARAM_VAL]\n"
# board unit attributes:
out_str += "[START_PARAM_NODE_VAL]\n"
for ecu in db.ecus:
for attrib, val in sorted(list(ecu.attributes.items())):
out_str += ecu.name + ',"' + attrib + '","' + val + '"\n'
out_str += "[END_PARAM_NODE_VAL]\n"
# messages-attributes:
out_str += "[START_PARAM_MSG_VAL]\n"
for frame in db.frames:
if frame.is_complex_multiplexed:
continue
for attrib, val in sorted(list(frame.attributes.items())):
out_str += (
str(frame.arbitration_id.id) + ',S,"' + attrib + '","' + val + '"\n'
)
out_str += "[END_PARAM_MSG_VAL]\n"
# signal-attributes:
out_str += "[START_PARAM_SIG_VAL]\n"
for frame in db.frames:
if frame.is_complex_multiplexed:
continue
for signal in frame.signals:
for attrib, val in sorted(list(signal.attributes.items())):
out_str += (
str(frame.arbitration_id.id)
+ ",S,"
+ signal.name
+ ',"'
+ attrib
+ '","'
+ val
+ '"\n'
)
out_str += "[END_PARAM_SIG_VAL]\n"
out_str += "[END_PARAM_VAL]\n"
f.write(out_str.encode(dbf_export_encoding, ignore_encoding_errors))
|
def dump(mydb, f, **options):
# type: (canmatrix.CanMatrix, typing.IO, **typing.Any) -> None
# create copy because export changes database
db = copy.deepcopy(mydb)
dbf_export_encoding = options.get("dbfExportEncoding", "iso-8859-1")
ignore_encoding_errors = options.get("ignoreExportEncodingErrors", "")
db.enum_attribs_to_keys()
if len(db.signals) > 0:
free_signals_dummy_frame = canmatrix.Frame("VECTOR__INDEPENDENT_SIG_MSG")
free_signals_dummy_frame.arbitration_id = canmatrix.ArbitrationId(
id=0x40000000, extended=True
)
free_signals_dummy_frame.signals = db.signals
db.add_frame(free_signals_dummy_frame)
out_str = """//******************************BUSMASTER Messages and signals Database ******************************//
[DATABASE_VERSION] 1.3
[PROTOCOL] CAN
[BUSMASTER_VERSION] [1.7.2]
[NUMBER_OF_MESSAGES] """
out_str += str(len(db.frames)) + "\n"
if max([x.cycle_time for x in db.frames]) > 0:
db.add_frame_defines("GenMsgCycleTime", "INT 0 65535")
if max([x.cycle_time for y in db.frames for x in y.signals]) > 0:
db.add_signal_defines("GenSigCycleTime", "INT 0 65535")
if (
max([x.initial_value for y in db.frames for x in y.signals]) > 0
or min([x.initial_value for y in db.frames for x in y.signals]) < 0
):
db.add_signal_defines("GenSigStartValue", "FLOAT 0 100000000000")
# Frames
for frame in db.frames:
if frame.is_complex_multiplexed:
logger.error(
"export complex multiplexers is not supported - ignoring frame "
+ frame.name
)
continue
# Name unMsgId m_ucLength m_ucNumOfSignals m_cDataFormat m_cFrameFormat? m_txNode
# m_cDataFormat Data format: 1-Intel, 0-Motorola -- always 1 original converter decides based on signal count.
# cFrameFormat Standard 'S' Extended 'X'
extended = "X" if frame.arbitration_id.extended == 1 else "S"
out_str += (
"[START_MSG] "
+ frame.name
+ ",%d,%d,%d,1,%c,"
% (frame.arbitration_id.id, frame.size, len(frame.signals), extended)
)
if not frame.transmitters:
frame.add_transmitter("Vector__XXX")
# DBF does not support multiple Transmitters
out_str += frame.transmitters[0] + "\n"
for signal in frame.signals:
# m_acName ucLength m_ucWhichByte m_ucStartBit
# m_ucDataFormat m_fOffset m_fScaleFactor m_acUnit m_acMultiplex m_rxNode
# m_ucDataFormat
which_byte = int(
math.floor(signal.get_startbit(bit_numbering=1, start_little=True) / 8)
+ 1
)
sign = "I"
if not signal.is_signed:
sign = "U"
if signal.is_float:
if signal.size > 32:
sign = "D"
else:
sign = "F"
if signal.factor == 0:
signal.factor = 1
out_str += (
"[START_SIGNALS] "
+ signal.name
+ ",%d,%d,%d,%c,"
% (
signal.size,
which_byte,
int(signal.get_startbit(bit_numbering=1, start_little=True)) % 8,
sign,
)
+ "{},{}".format(
float(signal.max) / float(signal.factor),
float(signal.min) / float(signal.factor),
)
)
out_str += ",%d,%s,%s" % (
signal.is_little_endian,
signal.offset,
signal.factor,
)
multiplex = ""
if signal.multiplex is not None:
if signal.multiplex == "Multiplexor":
multiplex = "M"
else:
multiplex = "m" + str(signal.multiplex)
out_str += (
","
+ signal.unit
+ ",%s," % multiplex
+ ",".join(signal.receivers)
+ "\n"
)
if len(signal.values) > 0:
for value, name in sorted(list(signal.values.items())):
out_str += '[VALUE_DESCRIPTION] "' + name + '",' + str(value) + "\n"
out_str += "[END_MSG]\n\n"
# Board units
out_str += "[NODE] "
count = 1
for ecu in db.ecus:
out_str += ecu.name
if count < len(db.ecus):
out_str += ","
count += 1
out_str += "\n"
out_str += "[START_DESC]\n\n"
# BU-descriptions
out_str += "[START_DESC_MSG]\n"
for frame in db.frames:
if frame.comment is not None:
comment = frame.comment.replace("\n", " ")
out_str += str(frame.arbitration_id.id) + ' S "' + comment + '";\n'
out_str += "[END_DESC_MSG]\n"
# Frame descriptions
out_str += "[START_DESC_NODE]\n"
for ecu in db.ecus:
if ecu.comment is not None:
comment = ecu.comment.replace("\n", " ")
out_str += ecu.name + ' "' + comment + '";\n'
out_str += "[END_DESC_NODE]\n"
# signal descriptions
out_str += "[START_DESC_SIG]\n"
for frame in db.frames:
if frame.is_complex_multiplexed:
continue
for signal in frame.signals:
if signal.comment is not None:
comment = signal.comment.replace("\n", " ")
out_str += (
"%d S " % frame.arbitration_id.id
+ signal.name
+ ' "'
+ comment
+ '";\n'
)
out_str += "[END_DESC_SIG]\n"
out_str += "[END_DESC]\n\n"
out_str += "[START_PARAM]\n"
# db-parameter
out_str += "[START_PARAM_NET]\n"
for data_type, define in sorted(list(db.global_defines.items())):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"'
+ data_type
+ '",'
+ define.definition.replace(" ", ",")
+ ","
+ default_val
+ "\n"
)
out_str += "[END_PARAM_NET]\n"
# bu-parameter
out_str += "[START_PARAM_NODE]\n"
for data_type, define in sorted(list(db.ecu_defines.items())):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"'
+ data_type
+ '",'
+ define.definition.replace(" ", ",")
+ ","
+ default_val
+ "\n"
)
out_str += "[END_PARAM_NODE]\n"
# frame-parameter
out_str += "[START_PARAM_MSG]\n"
for data_type, define in sorted(list(db.frame_defines.items())):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"' + data_type + '",' + define.definition.replace(" ", ",") + "\n"
) # + ',' + default_val + '\n'
out_str += "[END_PARAM_MSG]\n"
# signal-parameter
out_str += "[START_PARAM_SIG]\n"
for data_type, define in list(db.signal_defines.items()):
default_val = define.defaultValue
if default_val is None:
default_val = "0"
out_str += (
'"'
+ data_type
+ '",'
+ define.definition.replace(" ", ",")
+ ","
+ default_val
+ "\n"
)
out_str += "[END_PARAM_SIG]\n"
out_str += "[START_PARAM_VAL]\n"
# board unit attributes:
out_str += "[START_PARAM_NODE_VAL]\n"
for ecu in db.ecus:
for attrib, val in sorted(list(ecu.attributes.items())):
out_str += ecu.name + ',"' + attrib + '","' + val + '"\n'
out_str += "[END_PARAM_NODE_VAL]\n"
# messages-attributes:
out_str += "[START_PARAM_MSG_VAL]\n"
for frame in db.frames:
if frame.is_complex_multiplexed:
continue
for attrib, val in sorted(list(frame.attributes.items())):
out_str += (
str(frame.arbitration_id.id) + ',S,"' + attrib + '","' + val + '"\n'
)
out_str += "[END_PARAM_MSG_VAL]\n"
# signal-attributes:
out_str += "[START_PARAM_SIG_VAL]\n"
for frame in db.frames:
if frame.is_complex_multiplexed:
continue
for signal in frame.signals:
for attrib, val in sorted(list(signal.attributes.items())):
out_str += (
str(frame.arbitration_id.id)
+ ",S,"
+ signal.name
+ ',"'
+ attrib
+ '","'
+ val
+ '"\n'
)
out_str += "[END_PARAM_SIG_VAL]\n"
out_str += "[END_PARAM_VAL]\n"
f.write(out_str.encode(dbf_export_encoding, ignore_encoding_errors))
|
https://github.com/ebroecker/canmatrix/issues/496
|
canconvert --deleteZeroSignals --ignoreEncodingErrors --cutLongFrames=8 -vv file.arxml file.dbf
...
...
Traceback (most recent call last):
File "/usr/local/bin/canconvert", line 11, in <module>
load_entry_point('canmatrix==0.9.1', 'console_scripts', 'canconvert')()
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/canmatrix/cli/convert.py", line 131, in cli_convert
canmatrix.convert.convert(infile, outfile, **options)
File "/usr/local/lib/python3.8/site-packages/canmatrix/convert.py", line 214, in convert
canmatrix.formats.dumpp(out_dbs, out_file_name, **options)
File "/usr/local/lib/python3.8/site-packages/canmatrix/formats/__init__.py", line 135, in dumpp
dump(db, file_object, export_type, **options)
File "/usr/local/lib/python3.8/site-packages/canmatrix/formats/__init__.py", line 110, in dump
module_instance.dump(can_matrix_or_cluster, file_object, **options) # type: ignore
File "/usr/local/lib/python3.8/site-packages/canmatrix/formats/dbf.py", line 521, in dump
f.write(out_str.encode(dbf_export_encoding, ignore_encoding_errors))
LookupError: unknown error handler name ''
|
LookupError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.