after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._add_scalars()
self._create_service_field()
self._extend_query_type()
|
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._extend_query_type()
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _extend_query_type(self):
fields = {"_service": self._service_field}
entity_type = _get_entity_type(self.type_map)
if entity_type:
self._schema.type_map[entity_type.name] = entity_type
fields["_entities"] = self._get_entities_field(entity_type)
fields.update(self._schema.query_type.fields)
self._schema.query_type = GraphQLObjectType(
name=self._schema.query_type.name,
description=self._schema.query_type.description,
fields=fields,
)
self._schema.type_map["_Service"] = self._service_type
self._schema.type_map[self._schema.query_type.name] = self._schema.query_type
|
def _extend_query_type(self):
@type(name="_Service")
class Service:
sdl: str
Any = GraphQLScalarType("_Any")
fields = {
"_service": GraphQLField(
GraphQLNonNull(Service.graphql_type),
resolve=lambda _, info: Service(sdl=print_schema(info.schema)),
)
}
entities_type = self._get_entity_type()
if entities_type:
self.type_map[entities_type.name] = entities_type
fields["_entities"] = GraphQLField(
GraphQLNonNull(GraphQLList(entities_type)),
args={"representations": GraphQLNonNull(GraphQLList(GraphQLNonNull(Any)))},
resolve=entities_resolver,
)
fields.update(self.query_type.fields)
self.query_type = GraphQLObjectType(
name=self.query_type.name,
description=self.query_type.description,
fields=fields,
)
self.type_map["_Any"] = Any
self.type_map["_Service"] = Service.graphql_type
self.type_map[self.query_type.name] = self.query_type
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def entities_resolver(self, root, info, representations):
results = []
for representation in representations:
type_name = representation.pop("__typename")
type = self.type_map[type_name]
results.append(type.definition.origin.resolve_reference(**representation))
return results
|
def entities_resolver(root, info, representations):
results = []
for representation in representations:
type_name = representation.pop("__typename")
graphql_type = info.schema.get_type(type_name)
result = get_strawberry_type_for_graphql_type(graphql_type).resolve_reference(
**representation
)
results.append(result)
return results
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def field(
f=None,
*,
name: Optional[str] = None,
is_subscription: bool = False,
description: Optional[str] = None,
resolver: Optional[Callable] = None,
permission_classes: Optional[List[Type[BasePermission]]] = None,
federation: Optional[FederationFieldParams] = None,
):
"""Annotates a method or property as a GraphQL field.
This is normally used inside a type declaration:
>>> @strawberry.type:
>>> class X:
>>> field_abc: str = strawberry.field(description="ABC")
>>> @strawberry.field(description="ABC")
>>> def field_with_resolver(self, info) -> str:
>>> return "abc"
it can be used both as decorator and as a normal function.
"""
origin_name = f.__name__ if f else None
name = name or (to_camel_case(origin_name) if origin_name else None)
wrap = StrawberryField(
field_definition=FieldDefinition(
origin_name=origin_name,
name=name,
type=None, # type: ignore
origin=f, # type: ignore
description=description,
base_resolver=resolver,
is_subscription=is_subscription,
permission_classes=permission_classes or [],
arguments=(
get_arguments_from_resolver(resolver, origin_name) if resolver else []
),
federation=federation or FederationFieldParams(),
)
)
if f:
return wrap(f)
return wrap
|
def field(
wrap=None,
*,
name=None,
description=None,
resolver=None,
is_input=False,
is_subscription=False,
permission_classes=None,
):
"""Annotates a method or property as a GraphQL field.
This is normally used inside a type declaration:
>>> @strawberry.type:
>>> class X:
>>> field_abc: str = strawberry.field(description="ABC")
>>> @strawberry.field(description="ABC")
>>> def field_with_resolver(self, info) -> str:
>>> return "abc"
it can be used both as decorator and as a normal function.
"""
field = strawberry_field(
name=name,
description=description,
resolver=resolver,
is_input=is_input,
is_subscription=is_subscription,
permission_classes=permission_classes,
)
# when calling this with parens we are going to return a strawberry_field
# instance, so it can be used as both decorator and function.
if wrap is None:
return field
# otherwise we run the decorator directly,
# when called as @strawberry.field, without parens.
return field(wrap)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def __init__(self, field_definition: FieldDefinition):
self._field_definition = field_definition
super().__init__( # type: ignore
default=dataclasses.MISSING,
default_factory=dataclasses.MISSING,
init=field_definition.base_resolver is None,
repr=True,
hash=None,
compare=True,
metadata=None,
)
|
def __init__(
self,
*,
is_input=False,
is_subscription=False,
resolver=None,
name=None,
description=None,
metadata=None,
permission_classes=None,
):
self.field_name = name
self.field_description = description
self.field_resolver = resolver
self.is_subscription = is_subscription
self.is_input = is_input
self.field_permission_classes = permission_classes
super().__init__(
# TODO:
default=dataclasses.MISSING,
default_factory=dataclasses.MISSING,
init=resolver is None,
repr=True,
hash=None,
# TODO: this needs to be False when init is False
# we could turn it to True when and if we have a default
# probably can't be True when passing a resolver
compare=is_input,
metadata=metadata,
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def __call__(self, f):
f._field_definition = self._field_definition
f._field_definition.name = f._field_definition.name or to_camel_case(f.__name__)
f._field_definition.base_resolver = f
f._field_definition.origin = f
f._field_definition.arguments = get_arguments_from_resolver(
f, f._field_definition.name
)
check_return_annotation(f._field_definition)
f._field_definition.type = f.__annotations__["return"]
return f
|
def __call__(self, wrap):
setattr(wrap, IS_STRAWBERRY_FIELD, True)
self.field_description = self.field_description or wrap.__doc__
return LazyFieldWrapper(
wrap,
is_input=self.is_input,
is_subscription=self.is_subscription,
resolver=self.field_resolver,
name=self.field_name,
description=self.field_description,
permission_classes=self.field_permission_classes,
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def __init__(
self,
schema: BaseSchema,
graphiql: bool = True,
root_value: Optional[Any] = None,
):
self.graphiql = graphiql
self.schema = schema
self.root_value = root_value
|
def __init__(self, schema, graphiql=True):
self.schema = schema
self.graphiql = graphiql
if not self.schema:
raise ValueError("You must pass in a schema to GraphQLView")
if not isinstance(self.schema, GraphQLSchema):
raise ValueError("A valid schema is required to be provided to GraphQLView")
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def dispatch_request(self):
if "text/html" in request.environ.get("HTTP_ACCEPT", ""):
if not self.graphiql:
abort(404)
template = render_graphiql_page()
return self.render_template(request, template=template)
data = request.json
try:
query = data["query"]
variables = data.get("variables")
operation_name = data.get("operationName")
except KeyError:
return Response("No valid query was provided for the request", 400)
context = {"request": request}
result = self.schema.execute_sync(
query,
variable_values=variables,
context_value=context,
operation_name=operation_name,
root_value=self.root_value,
)
response_data = {"data": result.data}
if result.errors:
response_data["errors"] = [format_graphql_error(err) for err in result.errors]
return Response(
json.dumps(response_data),
status=400 if result.errors else 200,
content_type="application/json",
)
|
def dispatch_request(self):
if "text/html" in request.environ.get("HTTP_ACCEPT", ""):
if not self.graphiql:
abort(404)
template = render_graphiql_page()
return self.render_template(request, template=template)
data = request.json
try:
query = data["query"]
variables = data.get("variables")
operation_name = data.get("operationName")
except KeyError:
return Response("No valid query was provided for the request", 400)
context = dict(request=request)
result = graphql_sync(
self.schema,
query,
variable_values=variables,
context_value=context,
operation_name=operation_name,
)
response_data = {"data": result.data}
if result.errors:
response_data["errors"] = [format_graphql_error(err) for err in result.errors]
return Response(
json.dumps(response_data),
status=400 if result.errors else 200,
content_type="application/json",
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
async def execute(
schema: GraphQLSchema,
query: str,
root_value: typing.Any = None,
context_value: typing.Any = None,
variable_values: typing.Dict[str, typing.Any] = None,
middleware: typing.List[Middleware] = None,
operation_name: str = None,
): # pragma: no cover
schema_validation_errors = validate_schema(schema)
if schema_validation_errors:
return ExecutionResult(data=None, errors=schema_validation_errors)
try:
document = parse(query)
except GraphQLError as error:
return ExecutionResult(data=None, errors=[error])
except Exception as error:
error = GraphQLError(str(error), original_error=error)
return ExecutionResult(data=None, errors=[error])
validation_errors = validate(schema, document)
if validation_errors:
return ExecutionResult(data=None, errors=validation_errors)
result = graphql_execute(
schema,
parse(query),
root_value=root_value,
middleware=middleware or [],
variable_values=variable_values,
operation_name=operation_name,
context_value=context_value,
)
if isawaitable(result):
result = await typing.cast(typing.Awaitable[ExecutionResult], result)
return result
|
async def execute(
schema: GraphQLSchema,
query: str,
root_value: typing.Any = None,
context_value: typing.Any = None,
variable_values: typing.Dict[str, typing.Any] = None,
operation_name: str = None,
):
schema_validation_errors = validate_schema(schema)
if schema_validation_errors:
return ExecutionResult(data=None, errors=schema_validation_errors)
try:
document = parse(query)
except GraphQLError as error:
return ExecutionResult(data=None, errors=[error])
except Exception as error:
error = GraphQLError(str(error), original_error=error)
return ExecutionResult(data=None, errors=[error])
validation_errors = validate(schema, document)
if validation_errors:
return ExecutionResult(data=None, errors=validation_errors)
result = graphql_excute(
schema,
parse(query),
root_value=root_value,
middleware=[DirectivesMiddleware()],
variable_values=variable_values,
operation_name=operation_name,
context_value=context_value,
)
if isawaitable(result):
result = await typing.cast(typing.Awaitable[ExecutionResult], result)
return result
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
async def subscribe(
schema: GraphQLSchema,
query: str,
root_value: typing.Any = None,
context_value: typing.Any = None,
variable_values: typing.Dict[str, typing.Any] = None,
operation_name: str = None,
) -> typing.Union[
typing.AsyncIterator[ExecutionResult], ExecutionResult
]: # pragma: no cover
document = parse(query)
return await graphql_subscribe(
schema=schema,
document=document,
root_value=root_value,
context_value=context_value,
variable_values=variable_values,
operation_name=operation_name,
)
|
async def subscribe(
schema: GraphQLSchema,
query: str,
root_value: typing.Any = None,
context_value: typing.Any = None,
variable_values: typing.Dict[str, typing.Any] = None,
operation_name: str = None,
) -> typing.Union[typing.AsyncIterator[ExecutionResult], ExecutionResult]:
document = parse(query)
return await graphql_subscribe(
schema=schema,
document=document,
root_value=root_value,
context_value=context_value,
variable_values=variable_values,
operation_name=operation_name,
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def resolve(self, next_, root, info, **kwargs):
result = next_(root, info, **kwargs)
for directive in info.field_nodes[0].directives:
directive_name = directive.name.value
if directive_name in SPECIFIED_DIRECTIVES:
continue
func = self.directives.get(directive_name).resolver
# TODO: support converting lists
arguments = {
argument.name.value: argument.value.value
for argument in directive.arguments
}
result = func(result, **arguments)
return result
|
def resolve(self, next_, root, info, **kwargs):
result = next_(root, info, **kwargs)
for directive in info.field_nodes[0].directives:
directive_name = directive.name.value
if directive_name in SPECIFIED_DIRECTIVES:
continue
func = DIRECTIVE_REGISTRY.get(directive_name)
arguments = {
argument.name.value: argument.value.value
for argument in directive.arguments
}
result = func(result, **arguments)
return result
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def print_federation_field_directive(field: Optional[FieldDefinition]) -> str:
if not field:
return ""
out = ""
if field.federation.provides:
out += f' @provides(fields: "{field.federation.provides}")'
if field.federation.requires:
out += f' @requires(fields: "{field.federation.requires}")'
if field.federation.external:
out += " @external"
return out
|
def print_federation_field_directive(field, metadata):
out = ""
if metadata and "federation" in metadata:
federation = metadata["federation"]
provides = federation.get("provides", "")
requires = federation.get("requires", "")
external = federation.get("external", False)
if provides:
out += f' @provides(fields: "{provides}")'
if requires:
out += f' @requires(fields: "{requires}")'
if external:
out += " @external"
return out
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def print_fields(type_, schema: BaseSchema) -> str:
strawberry_type = cast(TypeDefinition, schema.get_type_by_name(type_.name))
fields = []
for i, (name, field) in enumerate(type_.fields.items()):
field_definition = strawberry_type.get_field(name) if strawberry_type else None
fields.append(
print_description(field, " ", not i)
+ f" {name}"
+ print_args(field.args, " ")
+ f": {field.type}"
+ print_federation_field_directive(field_definition)
+ print_deprecated(field)
)
return print_block(fields)
|
def print_fields(type_) -> str:
strawberry_type = get_strawberry_type_for_graphql_type(type_)
strawberry_fields = dataclasses.fields(strawberry_type) if strawberry_type else []
def _get_metadata(field_name):
return next(
(
f.metadata
for f in strawberry_fields
if (getattr(f, "field_name", None) or f.name) == field_name
),
None,
)
fields = [
print_description(field, " ", not i)
+ f" {name}"
+ print_args(field.args, " ")
+ f": {field.type}"
+ print_federation_field_directive(field, _get_metadata(name))
+ print_deprecated(field)
for i, (name, field) in enumerate(type_.fields.items())
]
return print_block(fields)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def print_federation_key_directive(type_, schema: BaseSchema):
strawberry_type = cast(TypeDefinition, schema.get_type_by_name(type_.name))
if not strawberry_type:
return ""
keys = strawberry_type.federation.keys
parts = []
for key in keys:
parts.append(f'@key(fields: "{key}")')
if not parts:
return ""
return " " + " ".join(parts)
|
def print_federation_key_directive(type_):
strawberry_type = get_strawberry_type_for_graphql_type(type_)
if not strawberry_type:
return ""
keys = getattr(strawberry_type, "_federation_keys", [])
parts = []
for key in keys:
parts.append(f'@key(fields: "{key}")')
if not parts:
return ""
return " " + " ".join(parts)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def print_extends(type_, schema: BaseSchema):
strawberry_type = cast(TypeDefinition, schema.get_type_by_name(type_.name))
if strawberry_type and strawberry_type.federation.extend:
return "extend "
return ""
|
def print_extends(type_):
strawberry_type = get_strawberry_type_for_graphql_type(type_)
if strawberry_type and getattr(strawberry_type, "_federation_extend", False):
return "extend "
return ""
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def print_schema(schema: BaseSchema) -> str:
graphql_core_schema = schema._schema # type: ignore
directives = filter(
lambda n: not is_specified_directive(n), graphql_core_schema.directives
)
type_map = graphql_core_schema.type_map
types = filter(is_defined_type, map(type_map.get, sorted(type_map))) # type: ignore
return "\n\n".join(
chain(
filter(None, [print_schema_definition(graphql_core_schema)]),
(print_directive(directive) for directive in directives),
(_print_type(type_, schema) for type_ in types), # type: ignore
)
)
|
def print_schema(schema: GraphQLSchema) -> str:
return print_filtered_schema(
schema, lambda n: not is_specified_directive(n), is_defined_type
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _process_type(
cls,
*,
name: Optional[str] = None,
is_input: bool = False,
is_interface: bool = False,
description: Optional[str] = None,
federation: Optional[FederationTypeParams] = None,
):
name = name or to_camel_case(cls.__name__)
wrapped = dataclasses.dataclass(cls)
interfaces = _get_interfaces(wrapped)
wrapped._type_definition = TypeDefinition(
name=name,
is_input=is_input,
is_interface=is_interface,
is_generic=is_generic(cls),
interfaces=interfaces,
description=description,
federation=federation or FederationTypeParams(),
origin=cls,
)
return wrapped
|
def _process_type(
cls, *, name=None, is_input=False, is_interface=False, description=None
):
name = name or cls.__name__
def _get_fields(wrapped, types_replacement_map=None):
class_fields = dataclasses.fields(wrapped)
fields = {}
for class_field in class_fields:
# we want to make a copy of the original field when dealing
# with generic types and also get the actual type for the type var
if is_type_var(class_field.type) or has_type_var(class_field.type):
class_field = copy.copy(class_field)
class_field.type = get_actual_type(
class_field.type, types_replacement_map
)
# like args, a None default implies Optional
if class_field.default is None:
class_field.type = Optional[class_field.type]
field_name = getattr(class_field, "field_name", None) or to_camel_case(
class_field.name
)
description = getattr(class_field, "field_description", None)
permission_classes = getattr(class_field, "field_permission_classes", None)
resolver = getattr(class_field, "field_resolver", None) or _get_resolver(
cls, class_field.name
)
resolver.__annotations__["return"] = class_field.type
fields[field_name] = field(
resolver,
is_input=is_input,
description=description,
permission_classes=permission_classes,
).graphql_type
# supply a graphql default_value if the type annotation has a default
if class_field.default not in (dataclasses.MISSING, None):
fields[field_name].default_value = class_field.default
strawberry_fields = {}
for base in [cls, *cls.__bases__]:
strawberry_fields.update(
{
key: value
for key, value in base.__dict__.items()
if getattr(value, IS_STRAWBERRY_FIELD, False)
}
)
for key, value in strawberry_fields.items():
name = getattr(value, "field_name", None) or to_camel_case(key)
fields[name] = value.graphql_type
return fields
if is_input:
setattr(cls, IS_STRAWBERRY_INPUT, True)
elif is_interface:
setattr(cls, IS_STRAWBERRY_INTERFACE, True)
extra_kwargs = {"description": description or cls.__doc__}
wrapped = dataclasses.dataclass(cls)
if is_input:
TypeClass = GraphQLInputObjectType
elif is_interface:
TypeClass = GraphQLInterfaceType
# TODO: in future we might want to be able to override this
# for example to map a class (like a django model) to one
# type of the interface
extra_kwargs["resolve_type"] = _interface_resolve_type
else:
TypeClass = GraphQLObjectType
extra_kwargs["interfaces"] = [
klass.graphql_type
for klass in cls.__bases__
if hasattr(klass, IS_STRAWBERRY_INTERFACE)
]
graphql_type = TypeClass(
name,
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
**extra_kwargs,
)
register_type(cls, graphql_type)
return wrapped
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def type(
cls: Type = None,
*,
name: str = None,
is_input: bool = False,
is_interface: bool = False,
description: str = None,
federation: Optional[FederationTypeParams] = None,
):
"""Annotates a class as a GraphQL type.
Example usage:
>>> @strawberry.type:
>>> class X:
>>> field_abc: str = "ABC"
"""
def wrap(cls):
return _process_type(
cls,
name=name,
is_input=is_input,
is_interface=is_interface,
description=description,
federation=federation,
)
if cls is None:
return wrap
return wrap(cls)
|
def type(cls=None, *, name=None, is_input=False, is_interface=False, description=None):
"""Annotates a class as a GraphQL type.
Example usage:
>>> @strawberry.type:
>>> class X:
>>> field_abc: str = "ABC"
"""
def wrap(cls):
return _process_type(
cls,
name=name,
is_input=is_input,
is_interface=is_interface,
description=description,
)
if cls is None:
return wrap
return wrap(cls)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def wrap(cls):
return _process_type(
cls,
name=name,
is_input=is_input,
is_interface=is_interface,
description=description,
federation=federation,
)
|
def wrap(cls):
return _process_type(
cls,
name=name,
is_input=is_input,
is_interface=is_interface,
description=description,
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def union(name: str, types: Tuple[Type], *, description=None):
"""Creates a new named Union type.
Example usages:
>>> strawberry.union(
>>> "Name",
>>> (A, B),
>>> )
>>> strawberry.union(
>>> "Name",
>>> (A, B),
>>> )
"""
union_definition = UnionDefinition(name=name, description=description, types=types)
# This is currently a temporary solution, this is ok for now
# But in future we might want to change this so that it works
# properly with mypy, but there's no way to return a type like NewType does
# so we return this class instance as it allows us to reuse the rest of
# our code without doing too many changes
def _call(self):
raise ValueError("Cannot use union type directly")
union_class = type(
name,
(),
{"_union_definition": union_definition, "__call__": _call},
)
return union_class()
|
def union(name: str, types: typing.Tuple[typing.Type], *, description=None):
"""Creates a new named Union type.
Example usages:
>>> strawberry.union(
>>> "Name",
>>> (A, B),
>>> )
>>> strawberry.union(
>>> "Name",
>>> (A, B),
>>> )
"""
from .type_converter import get_graphql_type_for_annotation
def _resolve_type(root, info, _type):
if not hasattr(root, "graphql_type"):
raise WrongReturnTypeForUnion(info.field_name, str(type(root)))
if is_generic(type(root)):
return _find_type_for_generic_union(root)
if root.graphql_type not in _type.types:
raise UnallowedReturnTypeForUnion(
info.field_name, str(type(root)), _type.types
)
return root.graphql_type
# TODO: union types don't work with scalar types
# so we want to return a nice error
# also we want to make sure we have been passed
# strawberry types
graphql_type = GraphQLUnionType(
name,
[
get_graphql_type_for_annotation(type, name, force_optional=True)
for type in types
],
description=description,
)
graphql_type.resolve_type = _resolve_type
# This is currently a temporary solution, this is ok for now
# But in future we might want to change this so that it works
# properly with mypy, but there's no way to return a type like NewType does
# so we return this class instance as it allows us to reuse the rest of
# our code without doing too many changes
class X:
def __init__(self, graphql_type):
self.graphql_type = graphql_type
def __call__(self):
raise ValueError("Cannot use union type directly")
return X(graphql_type)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def pretty_print_graphql_operation(
operation_name: str, query: str, variables: typing.Dict["str", typing.Any]
): # pragma: no cover
"""Pretty print a GraphQL operation using pygments.
Won't print introspection operation to prevent noise in the output."""
if operation_name == "IntrospectionQuery":
return
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"[{now}]: {operation_name or 'No operation name'}")
print(highlight(query, GraphQLLexer(), Terminal256Formatter()))
if variables:
variables_json = json.dumps(variables, indent=4)
print(highlight(variables_json, lexers.JsonLexer(), Terminal256Formatter()))
|
def pretty_print_graphql_operation(
operation_name: str, query: str, variables: typing.Dict["str", typing.Any]
):
"""Pretty print a GraphQL operation using pygments.
Won't print introspection operation to prevent noise in the output."""
if operation_name == "IntrospectionQuery":
return
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"[{now}]: {operation_name or 'No operation name'}")
print(highlight(query, GraphQLLexer(), Terminal256Formatter()))
if variables:
variables_json = json.dumps(variables, indent=4)
print(highlight(variables_json, lexers.JsonLexer(), Terminal256Formatter()))
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def is_list(annotation: Type) -> bool:
"""Returns True if annotation is a List"""
annotation_origin = getattr(annotation, "__origin__", None)
return annotation_origin == list
|
def is_list(annotation):
"""Returns True if annotation is a typing.List"""
annotation_origin = getattr(annotation, "__origin__", None)
return annotation_origin == list
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def is_union(annotation: Type) -> bool:
"""Returns True if annotation is a Union"""
annotation_origin = getattr(annotation, "__origin__", None)
return annotation_origin == typing.Union
|
def is_union(annotation):
"""Returns True if annotation is a typing.Union"""
annotation_origin = getattr(annotation, "__origin__", None)
return annotation_origin == typing.Union
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def is_optional(annotation: Type) -> bool:
"""Returns True if the annotation is Optional[SomeType]"""
# Optionals are represented as unions
if not is_union(annotation):
return False
types = annotation.__args__
# A Union to be optional needs to have at least one None type
return any([x == None.__class__ for x in types]) # noqa:E711
|
def is_optional(annotation):
"""Returns True if the annotation is typing.Optional[SomeType]"""
# Optionals are represented as unions
if not is_union(annotation):
return False
types = annotation.__args__
# A Union to be optional needs to have at least one None type
return any([x == None.__class__ for x in types]) # noqa:E711
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def get_optional_annotation(annotation: Type) -> Type:
types = annotation.__args__
non_none_types = [x for x in types if x != None.__class__] # noqa:E711
return non_none_types[0]
|
def get_optional_annotation(annotation):
types = annotation.__args__
non_none_types = [x for x in types if x != None.__class__] # noqa:E711
return non_none_types[0]
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def get_list_annotation(annotation: Type) -> Type:
return annotation.__args__[0]
|
def get_list_annotation(annotation):
return annotation.__args__[0]
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def is_generic(annotation: Type) -> bool:
"""Returns True if the annotation is or extends a generic."""
return (
isinstance(annotation, type)
and issubclass(annotation, typing.Generic) # type:ignore
or isinstance(annotation, typing._GenericAlias) # type:ignore
and annotation.__origin__
not in (
list,
typing.Union,
tuple,
typing.ClassVar,
AsyncGenerator,
)
)
|
def is_generic(annotation) -> bool:
"""Returns True if the annotation is or extends a generic."""
return (
isinstance(annotation, type)
and issubclass(annotation, typing.Generic) # type:ignore
or isinstance(annotation, typing._GenericAlias) # type:ignore
and annotation.__origin__
not in (
list,
typing.Union,
tuple,
typing.ClassVar,
collections.abc.AsyncGenerator,
)
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def is_type_var(annotation: Type) -> bool:
"""Returns True if the annotation is a TypeVar."""
return isinstance(annotation, TypeVar) # type:ignore
|
def is_type_var(annotation) -> bool:
"""Returns True if the annotation is a TypeVar."""
return isinstance(annotation, typing.TypeVar) # type:ignore
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def has_type_var(annotation: Type) -> bool:
"""
Returns True if the annotation or any of
its argument have a TypeVar as argument.
"""
return any(
is_type_var(arg) or has_type_var(arg)
for arg in getattr(annotation, "__args__", [])
)
|
def has_type_var(annotation) -> bool:
"""
Returns True if the annotation or any of
its argument have a TypeVar as argument.
"""
return any(
is_type_var(arg) or has_type_var(arg)
for arg in getattr(annotation, "__args__", [])
)
|
https://github.com/strawberry-graphql/strawberry/issues/349
|
Traceback (most recent call last):
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 735, in fields
fields = resolve_thunk(self._fields)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 293, in resolve_thunk
return thunk() if callable(thunk) else thunk
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 133, in <lambda>
lambda types_replacement_map=None: _get_fields(wrapped, types_replacement_map),
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/type.py", line 76, in _get_fields
fields[field_name] = field(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 315, in field
return field(wrap)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/field.py", line 171, in __call__
setattr(wrap, IS_STRAWBERRY_FIELD, True)
AttributeError: 'method' object has no attribute '_is_strawberry_field'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch_7.py", line 30, in <module>
schema = strawberry.Schema(query=Query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 205, in __init__
collect_referenced_types(query)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 382, in collect_referenced_types
for field in named_type.fields.values():
File "/usr/lib/python3.8/functools.py", line 967, in __get__
val = self.func(instance)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/definition.py", line 737, in fields
raise TypeError(f"{self.name} fields cannot be resolved. {error}")
TypeError: Query fields cannot be resolved. 'method' object has no attribute '_is_strawberry_field'
Process finished with exit code 1
|
AttributeError
|
def _get_resolver(cls, field_name):
class_field = getattr(cls, field_name, None)
if class_field and getattr(class_field, "resolver", None):
return class_field.resolver
def _resolver(root, info):
if not root:
return None
field_resolver = getattr(root, field_name, None)
if getattr(field_resolver, IS_STRAWBERRY_FIELD, False):
return field_resolver(root, info)
elif field_resolver.__class__ is strawberry_field:
# TODO: support default values
return None
return field_resolver
_resolver.__name__ = field_name
return _resolver
|
def _get_resolver(cls, field_name):
class_field = getattr(cls, field_name, None)
if class_field and getattr(class_field, "resolver", None):
return class_field.resolver
def _resolver(root, info):
if not root:
return None
field_resolver = getattr(root, field_name, None)
if getattr(field_resolver, IS_STRAWBERRY_FIELD, False):
return field_resolver(root, info)
elif field_resolver.__class__ is strawberry_field:
# TODO: support default values
return None
return field_resolver
return _resolver
|
https://github.com/strawberry-graphql/strawberry/issues/377
|
Traceback (most recent call last):
File "/home/ignormies/.config/JetBrains/PyCharm2020.1/scratches/scratch.py", line 28, in <module>
schema = strawberry.Schema(query=CoolType)
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/strawberry/schema.py", line 25, in __init__
super().__init__(
File "/home/ignormies/.local/share/virtualenvs/gql-bf-XGX4szKA-py3.8/lib/python3.8/site-packages/graphql/type/schema.py", line 239, in __init__
raise TypeError(
TypeError: Schema must contain uniquely named types but contains multiple types named '_resolver'.
|
TypeError
|
def _replace_consonants(word: str, consonants: str) -> str:
_HO_HIP = "\u0e2b" # ห
_RO_RUA = "\u0e23" # ร
if not consonants:
return word
if len(consonants) == 1:
return word.replace(consonants[0], _CONSONANTS[consonants[0]][0])
i = 0
len_cons = len(consonants)
while i < len_cons:
if i == 0:
if consonants[0] == _HO_HIP:
word = word.replace(consonants[0], "")
del consonants[0]
len_cons -= 1
else:
word = word.replace(consonants[0], _CONSONANTS[consonants[0]][0])
i += 1
elif consonants[i] == _RO_RUA and i == len(word) and word[i - 1] == _RO_RUA:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
elif consonants[i] == _RO_RUA and i < len(word):
if i + 1 == len(word) and word[i] == _RO_RUA:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
elif word[i] == _RO_RUA and i + 1 < len(word):
if word[i + 1] == _RO_RUA:
word = list(word)
del word[i + 1]
if i + 2 == len_cons:
word[i] = "an"
else:
word[i] = "a"
word = "".join(word)
i += 1
elif word[i] == _RO_RUA:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
else:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
elif word[i] == _RO_RUA:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
else:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
else:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
return word
|
def _replace_consonants(word: str, consonants: str) -> str:
_HO_HIP = "\u0e2b" # ห
_RO_RUA = "\u0e23" # ร
if not consonants:
return word
if len(consonants) == 1:
return word.replace(consonants[0], _CONSONANTS[consonants[0]][0])
i = 0
len_cons = len(consonants)
while i < len_cons:
if i == 0:
if consonants[0] == _HO_HIP:
word = word.replace(consonants[0], "")
del consonants[0]
len_cons -= 1
else:
word = word.replace(consonants[0], _CONSONANTS[consonants[0]][0])
i += 1
elif consonants[i] == _RO_RUA and word[i] == _RO_RUA:
if i + 1 == len(word):
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
elif word[i + 1] == _RO_RUA:
word = list(word)
del word[i + 1]
if i + 2 == len_cons:
word[i] = "an"
else:
word[i] = "a"
word = "".join(word)
i += 1
else:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
else:
word = word.replace(consonants[i], _CONSONANTS[consonants[i]][1])
i += 1
return word
|
https://github.com/PyThaiNLP/pythainlp/issues/485
|
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-214-ffa4a144ac59> in <module>
----> 1 romanize('จรวยพร')
~/.env/lib/python3.7/site-packages/pythainlp/transliterate/core.py in romanize(text, engine)
50 from .royin import romanize
51
---> 52 return romanize(text)
53
54
~/.env/lib/python3.7/site-packages/pythainlp/transliterate/royin.py in romanize(text)
224 """
225 words = word_tokenize(text)
--> 226 romanized_words = [_romanize(word) for word in words]
227
228 return "".join(romanized_words)
~/.env/lib/python3.7/site-packages/pythainlp/transliterate/royin.py in <listcomp>(.0)
224 """
225 words = word_tokenize(text)
--> 226 romanized_words = [_romanize(word) for word in words]
227
228 return "".join(romanized_words)
~/.env/lib/python3.7/site-packages/pythainlp/transliterate/royin.py in _romanize(word)
208 word = "".join(word)
209
--> 210 word = _replace_consonants(word, consonants)
211
212 return word
~/.env/lib/python3.7/site-packages/pythainlp/transliterate/royin.py in _replace_consonants(word, consonants)
167 )
168 i += 1
--> 169 elif consonants[i] == _RO_RUA and word[i] == _RO_RUA:
170 if i + 1 == len(word):
171 word = word.replace(
IndexError: string index out of range
|
IndexError
|
def _doc2features(doc, i) -> Dict:
word = doc[i][0]
postag = doc[i][1]
# Features from current word
features = {
"word.word": word,
"word.stopword": _is_stopword(word),
"word.isthai": isthai(word),
"word.isspace": word.isspace(),
"postag": postag,
"word.isdigit": word.isdigit(),
}
if word.isdigit() and len(word) == 5:
features["word.islen5"] = True
# Features from previous word
if i > 0:
prevword = doc[i - 1][0]
prevpostag = doc[i - 1][1]
prev_features = {
"word.prevword": prevword,
"word.previsspace": prevword.isspace(),
"word.previsthai": isthai(prevword),
"word.prevstopword": _is_stopword(prevword),
"word.prevpostag": prevpostag,
"word.prevwordisdigit": prevword.isdigit(),
}
features.update(prev_features)
else:
features["BOS"] = True # Special "Beginning of Sequence" tag
# Features from next word
if i < len(doc) - 1:
nextword = doc[i + 1][0]
nextpostag = doc[i + 1][1]
next_features = {
"word.nextword": nextword,
"word.nextisspace": nextword.isspace(),
"word.nextpostag": nextpostag,
"word.nextisthai": isthai(nextword),
"word.nextstopword": _is_stopword(nextword),
"word.nextwordisdigit": nextword.isdigit(),
}
features.update(next_features)
else:
features["EOS"] = True # Special "End of Sequence" tag
return features
|
def _doc2features(doc, i) -> dict:
word = doc[i][0]
postag = doc[i][1]
# Features from current word
features = {
"word.word": word,
"word.stopword": _is_stopword(word),
"word.isthai": isthai(word),
"word.isspace": word.isspace(),
"postag": postag,
"word.isdigit": word.isdigit(),
}
if word.isdigit() and len(word) == 5:
features["word.islen5"] = True
# Features from previous word
if i > 0:
prevword = doc[i - 1][0]
prevpostag = doc[i - 1][1]
prev_features = {
"word.prevword": prevword,
"word.previsspace": prevword.isspace(),
"word.previsthai": isthai(prevword),
"word.prevstopword": _is_stopword(prevword),
"word.prevpostag": prevpostag,
"word.prevwordisdigit": prevword.isdigit(),
}
features.update(prev_features)
else:
features["BOS"] = True # Special "Beginning of Sequence" tag
# Features from next word
if i < len(doc) - 1:
nextword = doc[i + 1][0]
nextpostag = doc[i + 1][1]
next_features = {
"word.nextword": nextword,
"word.nextisspace": nextword.isspace(),
"word.nextpostag": nextpostag,
"word.nextisthai": isthai(nextword),
"word.nextstopword": _is_stopword(nextword),
"word.nextwordisdigit": nextword.isdigit(),
}
features.update(next_features)
else:
features["EOS"] = True # Special "End of Sequence" tag
return features
|
https://github.com/PyThaiNLP/pythainlp/issues/468
|
from pythainlp.tag.named_entity import ThaiNameTagger
ner = ThaiNameTagger()
ner.get_ner("ปัตตานียะลาถึงนราธิวาส")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/pythainlp/pythainlp/tag/named_entity.py", line 138, in get_ner
pos_tags = pos_tag(tokens, engine="perceptron", corpus="orchid_ud")
File "/pythainlp/pythainlp/tag/pos_tag.py", line 221, in pos_tag
from .perceptron import tag as tag_
File "/pythainlp/pythainlp/tag/perceptron.py", line 28, in <module>
_ORCHID_TAGGER = _load_tagger(_ORCHID_DATA_FILENAME)
File "/pythainlp/pythainlp/tag/perceptron.py", line 24, in _load_tagger
model = pickle.load(fh)
ModuleNotFoundError: No module named 'nltk'
|
ModuleNotFoundError
|
def __init__(self) -> None:
"""
Thai named-entity recognizer.
"""
self.crf = CRFTagger()
self.crf.open(get_corpus_path(_CORPUS_NAME))
|
def __init__(self):
"""
Thai named-entity recognizer.
"""
self.crf = CRFTagger()
self.crf.open(get_corpus_path(_CORPUS_NAME))
|
https://github.com/PyThaiNLP/pythainlp/issues/468
|
from pythainlp.tag.named_entity import ThaiNameTagger
ner = ThaiNameTagger()
ner.get_ner("ปัตตานียะลาถึงนราธิวาส")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/pythainlp/pythainlp/tag/named_entity.py", line 138, in get_ner
pos_tags = pos_tag(tokens, engine="perceptron", corpus="orchid_ud")
File "/pythainlp/pythainlp/tag/pos_tag.py", line 221, in pos_tag
from .perceptron import tag as tag_
File "/pythainlp/pythainlp/tag/perceptron.py", line 28, in <module>
_ORCHID_TAGGER = _load_tagger(_ORCHID_DATA_FILENAME)
File "/pythainlp/pythainlp/tag/perceptron.py", line 24, in _load_tagger
model = pickle.load(fh)
ModuleNotFoundError: No module named 'nltk'
|
ModuleNotFoundError
|
def _orchid_tagger():
global _ORCHID_TAGGER
if not _ORCHID_TAGGER:
_ORCHID_TAGGER = PerceptronTagger(path=_ORCHID_PATH)
return _ORCHID_TAGGER
|
def _orchid_tagger():
global _ORCHID_TAGGER
if not _ORCHID_TAGGER:
with open(_ORCHID_PATH, "rb") as fh:
_ORCHID_TAGGER = pickle.load(fh)
return _ORCHID_TAGGER
|
https://github.com/PyThaiNLP/pythainlp/issues/468
|
from pythainlp.tag.named_entity import ThaiNameTagger
ner = ThaiNameTagger()
ner.get_ner("ปัตตานียะลาถึงนราธิวาส")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/pythainlp/pythainlp/tag/named_entity.py", line 138, in get_ner
pos_tags = pos_tag(tokens, engine="perceptron", corpus="orchid_ud")
File "/pythainlp/pythainlp/tag/pos_tag.py", line 221, in pos_tag
from .perceptron import tag as tag_
File "/pythainlp/pythainlp/tag/perceptron.py", line 28, in <module>
_ORCHID_TAGGER = _load_tagger(_ORCHID_DATA_FILENAME)
File "/pythainlp/pythainlp/tag/perceptron.py", line 24, in _load_tagger
model = pickle.load(fh)
ModuleNotFoundError: No module named 'nltk'
|
ModuleNotFoundError
|
def _pud_tagger():
global _PUD_TAGGER
if not _PUD_TAGGER:
_PUD_TAGGER = PerceptronTagger(path=_PUD_PATH)
return _PUD_TAGGER
|
def _pud_tagger():
global _PUD_TAGGER
if not _PUD_TAGGER:
with open(_PUD_PATH, "rb") as fh:
_PUD_TAGGER = pickle.load(fh)
return _PUD_TAGGER
|
https://github.com/PyThaiNLP/pythainlp/issues/468
|
from pythainlp.tag.named_entity import ThaiNameTagger
ner = ThaiNameTagger()
ner.get_ner("ปัตตานียะลาถึงนราธิวาส")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/pythainlp/pythainlp/tag/named_entity.py", line 138, in get_ner
pos_tags = pos_tag(tokens, engine="perceptron", corpus="orchid_ud")
File "/pythainlp/pythainlp/tag/pos_tag.py", line 221, in pos_tag
from .perceptron import tag as tag_
File "/pythainlp/pythainlp/tag/perceptron.py", line 28, in <module>
_ORCHID_TAGGER = _load_tagger(_ORCHID_DATA_FILENAME)
File "/pythainlp/pythainlp/tag/perceptron.py", line 24, in _load_tagger
model = pickle.load(fh)
ModuleNotFoundError: No module named 'nltk'
|
ModuleNotFoundError
|
def _f1(precision: float, recall: float) -> float:
"""
Compute f1.
:param float precision
:param float recall
:return: f1
:rtype: float
"""
if precision == recall == 0:
return 0
return 2 * precision * recall / (precision + recall)
|
def _f1(precision: float, recall: float) -> float:
"""
Compute f1
:param float precision
:param float recall
:return: f1
:rtype: float
"""
if precision == recall == 0:
return 0
return 2 * precision * recall / (precision + recall)
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def _flatten_result(my_dict: dict, sep: str = ":") -> dict:
"""
Flatten two-level dictionary.
Use keys in the first level as a prefix for keys in the two levels.
For example,
my_dict = { "a": { "b": 7 } }
flatten(my_dict)
{ "a:b": 7 }
:param dict my_dict: contains stats dictionary
:param str sep: separator between the two keys (default: ":")
:return: a one-level dictionary with key combined
:rtype: dict[str, float | str]
"""
items = []
for k1, kv2 in my_dict.items():
for k2, v in kv2.items():
new_key = f"{k1}{sep}{k2}"
items.append((new_key, v))
return dict(items)
|
def _flatten_result(my_dict: dict, sep: str = ":") -> dict:
"""
Flatten two-level dictionary
Use keys in the first level as a prefix for keys in the two levels.
For example,
my_dict = { "a": { "b": 7 } }
flatten(my_dict)
{ "a:b": 7 }
:param dict my_dict: contains stats dictionary
:param str sep: separator between the two keys (default: ":")
:return: a one-level dictionary with key combined
:rtype: dict[str, float | str]
"""
items = []
for k1, kv2 in my_dict.items():
for k2, v in kv2.items():
new_key = f"{k1}{sep}{k2}"
items.append((new_key, v))
return dict(items)
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def benchmark(ref_samples: list, samples: list):
"""
Performace benchmark of samples.
Please see :meth:`pythainlp.benchmarks.word_tokenization.compute_stats` for
metrics being computed.
:param list[str] ref_samples: ground truth samples
:param list[str] samples: samples that we want to evaluate
:return: dataframe with row x col = len(samples) x len(metrics)
:rtype: pandas.DataFrame
"""
results = []
for i, (r, s) in enumerate(zip(ref_samples, samples)):
try:
r, s = preprocessing(r), preprocessing(s)
if r and s:
stats = compute_stats(r, s)
stats = _flatten_result(stats)
stats["expected"] = r
stats["actual"] = s
results.append(stats)
except:
reason = """
[Error]
Reason: %s
Pair (i=%d)
--- label
%s
--- sample
%s
""" % (
sys.exc_info(),
i,
r,
s,
)
raise SystemExit(reason)
return pd.DataFrame(results)
|
def benchmark(ref_samples: list, samples: list):
"""
Performace benchmark of samples
Please see :meth:`pythainlp.benchmarks.word_tokenization.compute_stats` for
metrics being computed.
:param list[str] ref_samples: ground truth samples
:param list[str] samples: samples that we want to evaluate
:return: dataframe with row x col = len(samples) x len(metrics)
:rtype: pandas.DataFrame
"""
results = []
for i, (r, s) in enumerate(zip(ref_samples, samples)):
try:
r, s = preprocessing(r), preprocessing(s)
if r and s:
stats = compute_stats(r, s)
stats = _flatten_result(stats)
stats["expected"] = r
stats["actual"] = s
results.append(stats)
except:
reason = """
[Error]
Reason: %s
Pair (i=%d)
--- label
%s
--- sample
%s
""" % (sys.exc_info(), i, r, s)
raise SystemExit(reason)
return pd.DataFrame(results)
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def preprocessing(txt: str, remove_space: bool = True) -> str:
"""
Clean up text before performing evaluation.
:param str text: text to be preprocessed
:param bool remove_space: whether remove white space
:return: preprocessed text
:rtype: str
"""
txt = re.sub(SURROUNDING_SEPS_RX, "", txt)
if remove_space:
txt = re.sub("\s+", "", txt)
txt = re.sub(MULTIPLE_SEPS_RX, SEPARATOR, txt)
txt = re.sub(TAG_RX, "", txt)
txt = re.sub(TAILING_SEP_RX, "", txt).strip()
return txt
|
def preprocessing(txt: str, remove_space: bool = True) -> str:
"""
Clean up text before performing evaluation
:param str text: text to be preprocessed
:param bool remove_space: whether remove white space
:return: preprocessed text
:rtype: str
"""
txt = re.sub(SURROUNDING_SEPS_RX, "", txt)
if remove_space:
txt = re.sub("\s+", "", txt)
txt = re.sub(MULTIPLE_SEPS_RX, SEPARATOR, txt)
txt = re.sub(TAG_RX, "", txt)
txt = re.sub(TAILING_SEP_RX, "", txt).strip()
return txt
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def compute_stats(ref_sample: str, raw_sample: str) -> dict:
"""
Compute statistics for tokenization quality
These statistics includes:
**Character-Level**:
True Positive, False Positive, True Negative, False Negative, Precision, Recall, and f1
**Word-Level**:
Precision, Recall, and f1
**Other**:
- Correct tokenization indicator: {0, 1} sequence indicating the correspoding
word is tokenized correctly.
:param str ref_sample: ground truth samples
:param str samples samples that we want to evaluate
:return: metrics in character and word-level and correctly tokenized word indicators
:rtype: dict[str, float | str]
"""
ref_sample = _binary_representation(ref_sample)
sample = _binary_representation(raw_sample)
# Compute charater-level statistics
c_pos_pred, c_neg_pred = np.argwhere(sample == 1), np.argwhere(sample == 0)
c_pos_pred = c_pos_pred[c_pos_pred < ref_sample.shape[0]]
c_neg_pred = c_neg_pred[c_neg_pred < ref_sample.shape[0]]
c_tp = np.sum(ref_sample[c_pos_pred] == 1)
c_fp = np.sum(ref_sample[c_pos_pred] == 0)
c_tn = np.sum(ref_sample[c_neg_pred] == 0)
c_fn = np.sum(ref_sample[c_neg_pred] == 1)
c_precision = c_tp / (c_tp + c_fp)
c_recall = c_tp / (c_tp + c_fn)
c_f1 = _f1(c_precision, c_recall)
# Compute word-level statistics
# Find correctly tokenized words in the reference sample
word_boundaries = _find_word_boudaries(ref_sample)
# Find correctly tokenized words in the sample
ss_boundaries = _find_word_boudaries(sample)
tokenization_indicators = _find_words_correctly_tokenised(
word_boundaries, ss_boundaries
)
correctly_tokenised_words = np.sum(tokenization_indicators)
w_precision = correctly_tokenised_words / np.sum(sample)
w_recall = correctly_tokenised_words / np.sum(ref_sample)
w_f1 = _f1(w_precision, w_recall)
tokenization_indicators = list(map(lambda x: str(x), tokenization_indicators))
return {
"char_level": {
"tp": c_tp,
"fp": c_fp,
"tn": c_tn,
"fn": c_fn,
"precision": c_precision,
"recall": c_recall,
"f1": c_f1,
},
"word_level": {
"precision": w_precision,
"recall": w_recall,
"f1": w_f1,
},
"global": {"tokenisation_indicators": "".join(tokenization_indicators)},
}
|
def compute_stats(ref_sample: str, raw_sample: str) -> dict:
"""
Compute statistics for tokenization quality
These statistics includes:
**Character-Level**:
True Positive, False Positive, True Negative, False Negative, Precision, Recall, and f1
**Word-Level**:
Precision, Recall, and f1
**Other**:
- Correct tokenization indicator: {0, 1} sequence indicating the correspoding
word is tokenized correctly.
:param str ref_sample: ground truth samples
:param str samples samples that we want to evaluate
:return: metrics in character and word-level and correctly tokenized word indicators
:rtype: dict[str, float | str]
"""
ref_sample = _binary_representation(ref_sample)
sample = _binary_representation(raw_sample)
# Compute charater-level statistics
c_pos_pred, c_neg_pred = np.argwhere(sample == 1), np.argwhere(sample == 0)
c_pos_pred = c_pos_pred[c_pos_pred < ref_sample.shape[0]]
c_neg_pred = c_neg_pred[c_neg_pred < ref_sample.shape[0]]
c_tp = np.sum(ref_sample[c_pos_pred] == 1)
c_fp = np.sum(ref_sample[c_pos_pred] == 0)
c_tn = np.sum(ref_sample[c_neg_pred] == 0)
c_fn = np.sum(ref_sample[c_neg_pred] == 1)
c_precision = c_tp / (c_tp + c_fp)
c_recall = c_tp / (c_tp + c_fn)
c_f1 = _f1(c_precision, c_recall)
# Compute word-level statistics
# Find correctly tokenized words in the reference sample
word_boundaries = _find_word_boudaries(ref_sample)
# Find correctly tokenized words in the sample
ss_boundaries = _find_word_boudaries(sample)
tokenization_indicators = _find_words_correctly_tokenised(
word_boundaries, ss_boundaries
)
correctly_tokenised_words = np.sum(tokenization_indicators)
w_precision = correctly_tokenised_words / np.sum(sample)
w_recall = correctly_tokenised_words / np.sum(ref_sample)
w_f1 = _f1(w_precision, w_recall)
tokenization_indicators = list(map(lambda x: str(x), tokenization_indicators))
return {
"char_level": {
"tp": c_tp,
"fp": c_fp,
"tn": c_tn,
"fn": c_fn,
"precision": c_precision,
"recall": c_recall,
"f1": c_f1,
},
"word_level": {"precision": w_precision, "recall": w_recall, "f1": w_f1},
"global": {"tokenisation_indicators": "".join(tokenization_indicators)},
}
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def _binary_representation(txt: str, verbose: bool = False):
"""
Transform text to {0, 1} sequence.
where (1) indicates that the corresponding character is the beginning of
a word. For example, ผม|ไม่|ชอบ|กิน|ผัก -> 10100...
:param str txt: input text that we want to transform
:param bool verbose: for debugging purposes
:return: {0, 1} sequence
:rtype: str
"""
chars = np.array(list(txt))
boundary = np.argwhere(chars == SEPARATOR).reshape(-1)
boundary = boundary - np.array(range(boundary.shape[0]))
bin_rept = np.zeros(len(txt) - boundary.shape[0])
bin_rept[list(boundary) + [0]] = 1
sample_wo_seps = list(txt.replace(SEPARATOR, ""))
# sanity check
assert len(sample_wo_seps) == len(bin_rept)
if verbose:
for c, m in zip(sample_wo_seps, bin_rept):
print("%s -- %d" % (c, m))
return bin_rept
|
def _binary_representation(txt: str, verbose: bool = False):
"""
Transform text to {0, 1} sequence
where (1) indicates that the corresponding character is the beginning of
a word. For example, ผม|ไม่|ชอบ|กิน|ผัก -> 10100...
:param str txt: input text that we want to transform
:param bool verbose: for debugging purposes
:return: {0, 1} sequence
:rtype: str
"""
chars = np.array(list(txt))
boundary = np.argwhere(chars == SEPARATOR).reshape(-1)
boundary = boundary - np.array(range(boundary.shape[0]))
bin_rept = np.zeros(len(txt) - boundary.shape[0])
bin_rept[list(boundary) + [0]] = 1
sample_wo_seps = list(txt.replace(SEPARATOR, ""))
# sanity check
assert len(sample_wo_seps) == len(bin_rept)
if verbose:
for c, m in zip(sample_wo_seps, bin_rept):
print("%s -- %d" % (c, m))
return bin_rept
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def _find_word_boudaries(bin_reps) -> list:
"""
Find start and end location of each word.
:param str bin_reps: binary representation of a text
:return: list of tuples (start, end)
:rtype: list[tuple(int, int)]
"""
boundary = np.argwhere(bin_reps == 1).reshape(-1)
start_idx = boundary
end_idx = boundary[1:].tolist() + [bin_reps.shape[0]]
return list(zip(start_idx, end_idx))
|
def _find_word_boudaries(bin_reps) -> list:
"""
Find start and end location of each word
:param str bin_reps: binary representation of a text
:return: list of tuples (start, end)
:rtype: list[tuple(int, int)]
"""
boundary = np.argwhere(bin_reps == 1).reshape(-1)
start_idx = boundary
end_idx = boundary[1:].tolist() + [bin_reps.shape[0]]
return list(zip(start_idx, end_idx))
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def _find_words_correctly_tokenised(
ref_boundaries: list, predicted_boundaries: list
) -> tuple:
"""
Find whether each word is correctly tokenized.
:param list[tuple(int, int)] ref_boundaries: word boundaries of reference tokenization
:param list[tuple(int, int)] predicted_boundaries: word boundareies of predicted tokenization
:return: binary sequence where 1 indicates the corresponding word is tokenized correctly
:rtype: tuple[int]
"""
ref_b = dict(zip(ref_boundaries, [1] * len(ref_boundaries)))
labels = tuple(map(lambda x: ref_b.get(x, 0), predicted_boundaries))
return labels
|
def _find_words_correctly_tokenised(
ref_boundaries: list, predicted_boundaries: list
) -> tuple:
"""
Find whether each word is correctly tokenized
:param list[tuple(int, int)] ref_boundaries: word boundaries of reference tokenization
:param list[tuple(int, int)] predicted_boundaries: word boundareies of predicted tokenization
:return: binary sequence where 1 indicates the corresponding word is tokenized correctly
:rtype: tuple[int]
"""
ref_b = dict(zip(ref_boundaries, [1] * len(ref_boundaries)))
labels = tuple(map(lambda x: ref_b.get(x, 0), predicted_boundaries))
return labels
|
https://github.com/PyThaiNLP/pythainlp/issues/353
|
# Install with no extras
pip install pythainlp
python
Python 3.7.6 (default, Dec 30 2019, 19:38:26)
[Clang 11.0.0 (clang-1100.0.33.16)] on darwin
Type "help", "copyright", "credits" or "license" for more information.
import pythainlp
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/__init__.py", line 26, in <module>
from pythainlp.benchmarks import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/__init__.py", line 1, in <module>
from .word_tokenization import benchmark
File "/Users/maybeno/workspace/Envs/newspaper-nvz7rtlC/lib/python3.7/site-packages/pythainlp/benchmarks/word_tokenization.py", line 6, in <module>
import numpy as np
ModuleNotFoundError: No module named 'numpy'
|
ModuleNotFoundError
|
def combine_install_requirements(ireqs):
"""
Return a single install requirement that reflects a combination of
all the inputs.
"""
# We will store the source ireqs in a _source_ireqs attribute;
# if any of the inputs have this, then use those sources directly.
source_ireqs = []
for ireq in ireqs:
source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
# deepcopy the accumulator so as to not modify the inputs
combined_ireq = copy.deepcopy(source_ireqs[0])
for ireq in source_ireqs[1:]:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(
sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
)
# InstallRequirements objects are assumed to come from only one source, and
# so they support only a single comes_from entry. This function breaks this
# model. As a workaround, we deterministically choose a single source for
# the comes_from entry, and add an extra _source_ireqs attribute to keep
# track of multiple sources for use within pip-tools.
if len(source_ireqs) > 1:
if any(ireq.comes_from is None for ireq in source_ireqs):
# None indicates package was directly specified.
combined_ireq.comes_from = None
else:
# Populate the comes_from field from one of the sources.
# Requirement input order is not stable, so we need to sort:
# We choose the shortest entry in order to keep the printed
# representation as concise as possible.
combined_ireq.comes_from = min(
(ireq.comes_from for ireq in source_ireqs),
key=lambda x: (len(str(x)), str(x)),
)
combined_ireq._source_ireqs = source_ireqs
return combined_ireq
|
def combine_install_requirements(ireqs):
"""
Return a single install requirement that reflects a combination of
all the inputs.
"""
# We will store the source ireqs in a _source_ireqs attribute;
# if any of the inputs have this, then use those sources directly.
source_ireqs = []
for ireq in ireqs:
source_ireqs.extend(getattr(ireq, "_source_ireqs", [ireq]))
source_ireqs.sort(key=str)
# deepcopy the accumulator so as to not modify the inputs
combined_ireq = copy.deepcopy(source_ireqs[0])
for ireq in source_ireqs[1:]:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(
sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
)
# InstallRequirements objects are assumed to come from only one source, and
# so they support only a single comes_from entry. This function breaks this
# model. As a workaround, we deterministically choose a single source for
# the comes_from entry, and add an extra _source_ireqs attribute to keep
# track of multiple sources for use within pip-tools.
if len(source_ireqs) > 1:
if any(ireq.comes_from is None for ireq in source_ireqs):
# None indicates package was directly specified.
combined_ireq.comes_from = None
else:
# Populate the comes_from field from one of the sources.
# Requirement input order is not stable, so we need to sort:
# We choose the shortest entry in order to keep the printed
# representation as concise as possible.
combined_ireq.comes_from = min(
(ireq.comes_from for ireq in source_ireqs),
key=lambda x: (len(str(x)), str(x)),
)
combined_ireq._source_ireqs = source_ireqs
return combined_ireq
|
https://github.com/jazzband/pip-tools/issues/851
|
$ cat good.in
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
edx-enterprise==1.7.2
$ cat bad.in
git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
edx-enterprise==1.7.2
$ cat doit.sh
rm -f *.txt
pip install pip-tools==3.8.0
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good38.txt good.in
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad38.txt bad.in
pip install pip-tools==3.9.0
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good39.txt good.in
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad39.txt bad.in
diff good*.txt
$ sh -x doit.sh
+ rm -f '*.txt'
+ pip install pip-tools==3.8.0
Collecting pip-tools==3.8.0
Using cached https://files.pythonhosted.org/packages/1c/a1/fc5d034448ca3ab0a8d8b97a064db05fcce6ac8d197bc1fd55e8daa84299/pip_tools-3.8.0-py2.py3-none-any.whl
Requirement already satisfied: six in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.8.0) (1.12.0)
Requirement already satisfied: click>=6 in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.8.0) (7.0)
Installing collected packages: pip-tools
Successfully installed pip-tools-3.8.0
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good38.txt good.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=good38.txt good.in
#
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad38.txt bad.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=bad38.txt bad.in
#
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip install pip-tools==3.9.0
Collecting pip-tools==3.9.0
Using cached https://files.pythonhosted.org/packages/9e/57/f793afe4057a90f072fbe9ecf20599a6b141e4ba06d0dacb0b8ab3722aa6/pip_tools-3.9.0-py2.py3-none-any.whl
Requirement already satisfied: click>=6 in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.9.0) (7.0)
Requirement already satisfied: six in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.9.0) (1.12.0)
Installing collected packages: pip-tools
Found existing installation: pip-tools 3.8.0
Uninstalling pip-tools-3.8.0:
Successfully uninstalled pip-tools-3.8.0
Successfully installed pip-tools-3.9.0
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good39.txt good.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=good39.txt good.in
#
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad39.txt bad.in
Could not find a version that satisfies the requirement djangorestframework-oauth==1.1.1 (from -r bad.in (line 1)) (from versions: 0.1.0, 0.2.0, 1.0.0, 1.0.1, 1.1.0)
Traceback (most recent call last):
File "/usr/local/virtualenvs/tmp-54acc534662972d2/bin/pip-compile", line 10, in <module>
sys.exit(cli())
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/decorators.py", line 17, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/scripts/compile.py", line 350, in cli
results = resolver.resolve(max_rounds=max_rounds)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 164, in resolve
has_changed, best_matches = self._resolve_one_round()
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 259, in _resolve_one_round
their_constraints.extend(self._iter_dependencies(best_match))
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 354, in _iter_dependencies
dependencies = self.repository.get_dependencies(ireq)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/repositories/pypi.py", line 270, in get_dependencies
download_dir, ireq, wheel_cache
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/repositories/pypi.py", line 229, in resolve_reqs
results = resolver._resolve_one(reqset, ireq)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/resolve.py", line 294, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/resolve.py", line 242, in _get_abstract_dist_for
self.require_hashes
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/operations/prepare.py", line 282, in prepare_linked_requirement
req.populate_link(finder, upgrade_allowed, require_hashes)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/req/req_install.py", line 198, in populate_link
self.link = finder.find_requirement(self, upgrade)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/index.py", line 792, in find_requirement
'No matching distribution found for %s' % req
pip._internal.exceptions.DistributionNotFound: No matching distribution found for djangorestframework-oauth==1.1.1 (from -r bad.in (line 1))
+ diff good38.txt good39.txt
5c5
< # pip-compile --no-emit-trusted-host --no-index --output-file=good38.txt good.in
---
# pip-compile --no-emit-trusted-host --no-index --output-file=good39.txt good.in
41c41
< edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
---
edx-opaque-keys==1.0.1 # via edx-drf-extensions, edx-enterprise
$ pip --version
pip 19.1.1 from /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip (python 3.7)
|
pip._internal.exceptions.DistributionNotFound
|
def constraints(self):
return set(
self._group_constraints(
chain(
sorted(self.our_constraints, key=str),
sorted(self.their_constraints, key=str),
)
)
)
|
def constraints(self):
return set(
self._group_constraints(chain(self.our_constraints, self.their_constraints))
)
|
https://github.com/jazzband/pip-tools/issues/851
|
$ cat good.in
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
edx-enterprise==1.7.2
$ cat bad.in
git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
edx-enterprise==1.7.2
$ cat doit.sh
rm -f *.txt
pip install pip-tools==3.8.0
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good38.txt good.in
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad38.txt bad.in
pip install pip-tools==3.9.0
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good39.txt good.in
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad39.txt bad.in
diff good*.txt
$ sh -x doit.sh
+ rm -f '*.txt'
+ pip install pip-tools==3.8.0
Collecting pip-tools==3.8.0
Using cached https://files.pythonhosted.org/packages/1c/a1/fc5d034448ca3ab0a8d8b97a064db05fcce6ac8d197bc1fd55e8daa84299/pip_tools-3.8.0-py2.py3-none-any.whl
Requirement already satisfied: six in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.8.0) (1.12.0)
Requirement already satisfied: click>=6 in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.8.0) (7.0)
Installing collected packages: pip-tools
Successfully installed pip-tools-3.8.0
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good38.txt good.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=good38.txt good.in
#
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad38.txt bad.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=bad38.txt bad.in
#
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip install pip-tools==3.9.0
Collecting pip-tools==3.9.0
Using cached https://files.pythonhosted.org/packages/9e/57/f793afe4057a90f072fbe9ecf20599a6b141e4ba06d0dacb0b8ab3722aa6/pip_tools-3.9.0-py2.py3-none-any.whl
Requirement already satisfied: click>=6 in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.9.0) (7.0)
Requirement already satisfied: six in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.9.0) (1.12.0)
Installing collected packages: pip-tools
Found existing installation: pip-tools 3.8.0
Uninstalling pip-tools-3.8.0:
Successfully uninstalled pip-tools-3.8.0
Successfully installed pip-tools-3.9.0
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good39.txt good.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=good39.txt good.in
#
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad39.txt bad.in
Could not find a version that satisfies the requirement djangorestframework-oauth==1.1.1 (from -r bad.in (line 1)) (from versions: 0.1.0, 0.2.0, 1.0.0, 1.0.1, 1.1.0)
Traceback (most recent call last):
File "/usr/local/virtualenvs/tmp-54acc534662972d2/bin/pip-compile", line 10, in <module>
sys.exit(cli())
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/decorators.py", line 17, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/scripts/compile.py", line 350, in cli
results = resolver.resolve(max_rounds=max_rounds)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 164, in resolve
has_changed, best_matches = self._resolve_one_round()
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 259, in _resolve_one_round
their_constraints.extend(self._iter_dependencies(best_match))
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 354, in _iter_dependencies
dependencies = self.repository.get_dependencies(ireq)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/repositories/pypi.py", line 270, in get_dependencies
download_dir, ireq, wheel_cache
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/repositories/pypi.py", line 229, in resolve_reqs
results = resolver._resolve_one(reqset, ireq)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/resolve.py", line 294, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/resolve.py", line 242, in _get_abstract_dist_for
self.require_hashes
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/operations/prepare.py", line 282, in prepare_linked_requirement
req.populate_link(finder, upgrade_allowed, require_hashes)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/req/req_install.py", line 198, in populate_link
self.link = finder.find_requirement(self, upgrade)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/index.py", line 792, in find_requirement
'No matching distribution found for %s' % req
pip._internal.exceptions.DistributionNotFound: No matching distribution found for djangorestframework-oauth==1.1.1 (from -r bad.in (line 1))
+ diff good38.txt good39.txt
5c5
< # pip-compile --no-emit-trusted-host --no-index --output-file=good38.txt good.in
---
# pip-compile --no-emit-trusted-host --no-index --output-file=good39.txt good.in
41c41
< edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
---
edx-opaque-keys==1.0.1 # via edx-drf-extensions, edx-enterprise
$ pip --version
pip 19.1.1 from /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip (python 3.7)
|
pip._internal.exceptions.DistributionNotFound
|
def _resolve_one_round(self):
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=key_from_ireq)
log.debug("Current constraints:")
for constraint in constraints:
log.debug(" {}".format(constraint))
log.debug("")
log.debug("Finding the best candidates:")
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug("")
log.debug("Finding secondary dependencies:")
their_constraints = []
for best_match in best_matches:
their_constraints.extend(self._iter_dependencies(best_match))
# Grouping constraints to make clean diff between rounds
theirs = set(self._group_constraints(sorted(their_constraints, key=str)))
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {
RequirementSummary(t) for t in self.their_constraints
}
removed = {RequirementSummary(t) for t in self.their_constraints} - {
RequirementSummary(t) for t in theirs
}
has_changed = len(diff) > 0 or len(removed) > 0
if has_changed:
log.debug("")
log.debug("New dependencies found in this round:")
for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)):
log.debug(" adding {}".format(new_dependency))
log.debug("Removed dependencies in this round:")
for removed_dependency in sorted(
removed, key=lambda req: key_from_req(req.req)
):
log.debug(" removing {}".format(removed_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
return has_changed, best_matches
|
def _resolve_one_round(self):
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=key_from_ireq)
log.debug("Current constraints:")
for constraint in constraints:
log.debug(" {}".format(constraint))
log.debug("")
log.debug("Finding the best candidates:")
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug("")
log.debug("Finding secondary dependencies:")
their_constraints = []
for best_match in best_matches:
their_constraints.extend(self._iter_dependencies(best_match))
# Grouping constraints to make clean diff between rounds
theirs = set(self._group_constraints(their_constraints))
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {
RequirementSummary(t) for t in self.their_constraints
}
removed = {RequirementSummary(t) for t in self.their_constraints} - {
RequirementSummary(t) for t in theirs
}
has_changed = len(diff) > 0 or len(removed) > 0
if has_changed:
log.debug("")
log.debug("New dependencies found in this round:")
for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)):
log.debug(" adding {}".format(new_dependency))
log.debug("Removed dependencies in this round:")
for removed_dependency in sorted(
removed, key=lambda req: key_from_req(req.req)
):
log.debug(" removing {}".format(removed_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
return has_changed, best_matches
|
https://github.com/jazzband/pip-tools/issues/851
|
$ cat good.in
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
edx-enterprise==1.7.2
$ cat bad.in
git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
edx-enterprise==1.7.2
$ cat doit.sh
rm -f *.txt
pip install pip-tools==3.8.0
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good38.txt good.in
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad38.txt bad.in
pip install pip-tools==3.9.0
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good39.txt good.in
pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad39.txt bad.in
diff good*.txt
$ sh -x doit.sh
+ rm -f '*.txt'
+ pip install pip-tools==3.8.0
Collecting pip-tools==3.8.0
Using cached https://files.pythonhosted.org/packages/1c/a1/fc5d034448ca3ab0a8d8b97a064db05fcce6ac8d197bc1fd55e8daa84299/pip_tools-3.8.0-py2.py3-none-any.whl
Requirement already satisfied: six in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.8.0) (1.12.0)
Requirement already satisfied: click>=6 in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.8.0) (7.0)
Installing collected packages: pip-tools
Successfully installed pip-tools-3.8.0
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good38.txt good.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=good38.txt good.in
#
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad38.txt bad.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=bad38.txt bad.in
#
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip install pip-tools==3.9.0
Collecting pip-tools==3.9.0
Using cached https://files.pythonhosted.org/packages/9e/57/f793afe4057a90f072fbe9ecf20599a6b141e4ba06d0dacb0b8ab3722aa6/pip_tools-3.9.0-py2.py3-none-any.whl
Requirement already satisfied: click>=6 in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.9.0) (7.0)
Requirement already satisfied: six in /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages (from pip-tools==3.9.0) (1.12.0)
Installing collected packages: pip-tools
Found existing installation: pip-tools 3.8.0
Uninstalling pip-tools-3.8.0:
Successfully uninstalled pip-tools-3.8.0
Successfully installed pip-tools-3.9.0
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o good39.txt good.in
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-trusted-host --no-index --output-file=good39.txt good.in
#
-e git+https://github.com/edx/django-rest-framework-oauth.git@0a43e8525f1e3048efe4bc70c03de308a277197c#egg=djangorestframework-oauth==1.1.1
amqp==1.4.9 # via edx-enterprise, kombu
aniso8601==7.0.0 # via edx-enterprise, tincan
anyjson==0.3.3 # via edx-enterprise, kombu
asn1crypto==0.24.0 # via cryptography, edx-enterprise
billiard==3.3.0.23 # via celery, edx-enterprise
bleach==2.1.4 # via edx-enterprise
celery==3.1.25 # via edx-enterprise
certifi==2019.6.16 # via edx-enterprise, requests
cffi==1.12.3 # via cryptography, edx-enterprise
chardet==3.0.4 # via edx-enterprise, requests
click==7.0 # via code-annotations, edx-enterprise
code-annotations==0.3.1 # via edx-enterprise
cryptography==2.7 # via django-fernet-fields, edx-enterprise
defusedxml==0.5.0 # via djangorestframework-xml, edx-enterprise
django-config-models==1.0.1 # via edx-enterprise
django-countries==4.6.1 # via edx-enterprise
django-crum==0.7.3 # via edx-enterprise, edx-rbac
django-fernet-fields==0.6 # via edx-enterprise
django-filter==1.0.4 # via edx-enterprise
django-ipware==2.1.0 # via edx-enterprise
django-model-utils==3.0.0 # via edx-enterprise, edx-rbac
django-multi-email-field==0.5.1 # via edx-enterprise
django-object-actions==0.10.0 # via edx-enterprise
django-simple-history==2.7.0 # via edx-enterprise
django-waffle==0.12.0 # via edx-django-utils, edx-drf-extensions, edx-enterprise
django==1.11.22 # via code-annotations, django-config-models, django-crum, django-fernet-fields, django-model-utils, django-multi-email-field, edx-django-oauth2-provider, edx-django-utils, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, jsonfield, rest-condition
djangorestframework-jwt==1.11.0 # via edx-drf-extensions, edx-enterprise
djangorestframework-xml==1.3.0 # via edx-enterprise
djangorestframework==3.7.7 # via django-config-models, edx-drf-extensions, edx-enterprise, rest-condition
edx-django-oauth2-provider==1.3.5 # via edx-enterprise
edx-django-utils==2.0.0 # via edx-drf-extensions, edx-enterprise
edx-drf-extensions==2.3.1 # via edx-enterprise, edx-rbac
edx-enterprise==1.7.2
edx-opaque-keys==1.0.1 # via edx-drf-extensions, edx-enterprise
edx-rbac==0.2.1 # via edx-enterprise
edx-rest-api-client==1.9.2 # via edx-enterprise
enum34==1.1.6 # via edx-enterprise
future==0.17.1 # via edx-enterprise, pyjwkest
html5lib==1.0.1 # via bleach, edx-enterprise
idna==2.8 # via edx-enterprise, requests
ipaddress==1.0.22 # via edx-enterprise
jinja2==2.10.1 # via code-annotations, edx-enterprise
jsondiff==1.1.1 # via edx-enterprise
jsonfield==2.0.2 # via edx-enterprise
kombu==3.0.37 # via celery, edx-enterprise
markupsafe==1.1.1 # via edx-enterprise, jinja2
newrelic==4.20.1.121 # via edx-django-utils, edx-enterprise
path.py==8.2.1 # via edx-enterprise
pbr==5.4.0 # via edx-enterprise, stevedore
pillow==6.1.0 # via edx-enterprise
psutil==1.2.1 # via edx-django-utils, edx-drf-extensions, edx-enterprise
pycparser==2.19 # via cffi, edx-enterprise
pycryptodomex==3.4.7 # via edx-enterprise, pyjwkest
pyjwkest==1.3.2 # via edx-drf-extensions, edx-enterprise
pyjwt==1.5.2 # via djangorestframework-jwt, edx-enterprise, edx-rest-api-client
pymongo==2.9.1 # via edx-enterprise, edx-opaque-keys
python-dateutil==2.4.0 # via edx-drf-extensions, edx-enterprise
python-slugify==1.2.6 # via code-annotations, edx-enterprise
pytz==2019.1 # via celery, django, edx-enterprise, tincan
pyyaml==5.1.1 # via code-annotations, edx-enterprise
requests==2.22.0 # via edx-drf-extensions, edx-enterprise, edx-rest-api-client, pyjwkest, slumber
rest-condition==1.0.3 # via edx-drf-extensions, edx-enterprise
rules==2.0.1 # via edx-enterprise
semantic-version==2.6.0 # via edx-drf-extensions, edx-enterprise
shortuuid==0.5.0 # via edx-django-oauth2-provider, edx-enterprise
six==1.11.0 # via bleach, cryptography, edx-drf-extensions, edx-enterprise, edx-opaque-keys, edx-rbac, html5lib, pyjwkest, python-dateutil, stevedore
slumber==0.7.1 # via edx-enterprise, edx-rest-api-client
stevedore==1.30.1 # via code-annotations, edx-enterprise, edx-opaque-keys
testfixtures==6.10.0 # via edx-enterprise
tincan==0.0.5 # via edx-enterprise
unicodecsv==0.14.1 # via edx-enterprise
unidecode==1.1.1 # via edx-enterprise, python-slugify
urllib3==1.23 # via edx-enterprise, requests
webencodings==0.5.1 # via edx-enterprise, html5lib
+ pip-compile --no-emit-trusted-host --no-index --rebuild --upgrade -o bad39.txt bad.in
Could not find a version that satisfies the requirement djangorestframework-oauth==1.1.1 (from -r bad.in (line 1)) (from versions: 0.1.0, 0.2.0, 1.0.0, 1.0.1, 1.1.0)
Traceback (most recent call last):
File "/usr/local/virtualenvs/tmp-54acc534662972d2/bin/pip-compile", line 10, in <module>
sys.exit(cli())
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 764, in __call__
return self.main(*args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 717, in main
rv = self.invoke(ctx)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 956, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/core.py", line 555, in invoke
return callback(*args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/click/decorators.py", line 17, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/scripts/compile.py", line 350, in cli
results = resolver.resolve(max_rounds=max_rounds)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 164, in resolve
has_changed, best_matches = self._resolve_one_round()
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 259, in _resolve_one_round
their_constraints.extend(self._iter_dependencies(best_match))
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/resolver.py", line 354, in _iter_dependencies
dependencies = self.repository.get_dependencies(ireq)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/repositories/pypi.py", line 270, in get_dependencies
download_dir, ireq, wheel_cache
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/piptools/repositories/pypi.py", line 229, in resolve_reqs
results = resolver._resolve_one(reqset, ireq)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/resolve.py", line 294, in _resolve_one
abstract_dist = self._get_abstract_dist_for(req_to_install)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/resolve.py", line 242, in _get_abstract_dist_for
self.require_hashes
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/operations/prepare.py", line 282, in prepare_linked_requirement
req.populate_link(finder, upgrade_allowed, require_hashes)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/req/req_install.py", line 198, in populate_link
self.link = finder.find_requirement(self, upgrade)
File "/usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip/_internal/index.py", line 792, in find_requirement
'No matching distribution found for %s' % req
pip._internal.exceptions.DistributionNotFound: No matching distribution found for djangorestframework-oauth==1.1.1 (from -r bad.in (line 1))
+ diff good38.txt good39.txt
5c5
< # pip-compile --no-emit-trusted-host --no-index --output-file=good38.txt good.in
---
# pip-compile --no-emit-trusted-host --no-index --output-file=good39.txt good.in
41c41
< edx-opaque-keys[django]==1.0.1 # via edx-drf-extensions, edx-enterprise
---
edx-opaque-keys==1.0.1 # via edx-drf-extensions, edx-enterprise
$ pip --version
pip 19.1.1 from /usr/local/virtualenvs/tmp-54acc534662972d2/lib/python3.7/site-packages/pip (python 3.7)
|
pip._internal.exceptions.DistributionNotFound
|
def get_hashes(self, ireq):
"""
Given a pinned InstallRequire, returns a set of hashes that represent
all of the files for a given requirement. It is not acceptable for an
editable or unpinned requirement to be passed to this function.
"""
if not is_pinned_requirement(ireq):
raise TypeError(
"Expected pinned requirement, not unpinned or editable, got {}".format(ireq)
)
# We need to get all of the candidates that match our current version
# pin, these will represent all of the files that could possibly
# satisfy this constraint.
with self.allow_all_wheels():
all_candidates = self.find_all_candidates(ireq.name)
candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
matching_versions = list(
ireq.specifier.filter((candidate.version for candidate in all_candidates))
)
matching_candidates = candidates_by_version[matching_versions[0]]
return {
self._get_file_hash(candidate.location) for candidate in matching_candidates
}
|
def get_hashes(self, ireq):
"""
Given a pinned InstallRequire, returns a set of hashes that represent
all of the files for a given requirement. It is not acceptable for an
editable or unpinned requirement to be passed to this function.
"""
if not is_pinned_requirement(ireq):
raise TypeError(
"Expected pinned requirement, not unpinned or editable, got {}".format(ireq)
)
# We need to get all of the candidates that match our current version
# pin, these will represent all of the files that could possibly
# satisify this constraint.
all_candidates = self.find_all_candidates(ireq.name)
candidates_by_version = lookup_table(all_candidates, key=lambda c: c.version)
matching_versions = list(
ireq.specifier.filter((candidate.version for candidate in all_candidates))
)
matching_candidates = candidates_by_version[matching_versions[0]]
return {
self._get_file_hash(candidate.location) for candidate in matching_candidates
}
|
https://github.com/jazzband/pip-tools/issues/558
|
Collecting python-ldap==2.4.42 (from -r /requirements/dev.txt (line 18))
Downloading python-ldap-2.4.42.tar.gz (297kB)
Complete output from command python setup.py egg_info:
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/tmp/pip-build-32v49evs/python-ldap/setup.py", line 53
print name + ': ' + cfg.get('_ldap', name)
^
SyntaxError: invalid syntax
|
SyntaxError
|
def _group_constraints(self, constraints):
"""
Groups constraints (remember, InstallRequirements!) by their key name,
and combining their SpecifierSets into a single InstallRequirement per
package. For example, given the following constraints:
Django<1.9,>=1.4.2
django~=1.5
Flask~=0.7
This will be combined into a single entry per package:
django~=1.5,<1.9,>=1.4.2
flask~=0.7
"""
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
ireqs = list(ireqs)
editable_ireq = first(ireqs, key=lambda ireq: ireq.editable)
if editable_ireq:
yield editable_ireq # ignore all the other specs: the editable one is the one that counts
continue
ireqs = iter(ireqs)
# deepcopy the accumulator so as to not modify the self.our_constraints invariant
combined_ireq = copy.deepcopy(next(ireqs))
combined_ireq.comes_from = None
for ireq in ireqs:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(
sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
)
yield combined_ireq
|
def _group_constraints(self, constraints):
"""
Groups constraints (remember, InstallRequirements!) by their key name,
and combining their SpecifierSets into a single InstallRequirement per
package. For example, given the following constraints:
Django<1.9,>=1.4.2
django~=1.5
Flask~=0.7
This will be combined into a single entry per package:
django~=1.5,<1.9,>=1.4.2
flask~=0.7
"""
for _, ireqs in full_groupby(constraints, key=_dep_key):
ireqs = list(ireqs)
editable_ireq = first(ireqs, key=lambda ireq: ireq.editable)
if editable_ireq:
yield editable_ireq # ignore all the other specs: the editable one is the one that counts
continue
ireqs = iter(ireqs)
# deepcopy the accumulator so as to not modify the self.our_constraints invariant
combined_ireq = copy.deepcopy(next(ireqs))
combined_ireq.comes_from = None
for ireq in ireqs:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(
sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras)))
)
yield combined_ireq
|
https://github.com/jazzband/pip-tools/issues/569
|
$ venv/bin/pip-sync dev-requirements.txt
Traceback (most recent call last):
File "venv/bin/pip-sync", line 11, in <module>
sys.exit(cli())
File "venv/lib64/python3.6/site-packages/click/core.py", line 722, in __call__
return self.main(*args, **kwargs)
File "venv/lib64/python3.6/site-packages/click/core.py", line 697, in main
rv = self.invoke(ctx)
File "venv/lib64/python3.6/site-packages/click/core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "venv/lib64/python3.6/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "venv/lib64/python3.6/site-packages/piptools/scripts/sync.py", line 74, in cli
install_flags=install_flags))
File "venv/lib64/python3.6/site-packages/piptools/sync.py", line 159, in sync
for ireq in sorted(to_install):
TypeError: '<' not supported between instances of 'InstallRequirement' and 'InstallRequirement'
|
TypeError
|
def _resolve_one_round(self):
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=key_from_ireq)
unsafe_constraints = []
original_constraints = copy.copy(constraints)
if not self.allow_unsafe:
for constraint in original_constraints:
if constraint.name in UNSAFE_PACKAGES:
constraints.remove(constraint)
constraint.req.specifier = None
unsafe_constraints.append(constraint)
log.debug("Current constraints:")
for constraint in constraints:
log.debug(" {}".format(constraint))
log.debug("")
log.debug("Finding the best candidates:")
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug("")
log.debug("Finding secondary dependencies:")
safe_constraints = []
for best_match in best_matches:
for dep in self._iter_dependencies(best_match):
if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES:
safe_constraints.append(dep)
# Grouping constraints to make clean diff between rounds
theirs = set(self._group_constraints(safe_constraints))
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {
RequirementSummary(t) for t in self.their_constraints
}
removed = {RequirementSummary(t) for t in self.their_constraints} - {
RequirementSummary(t) for t in theirs
}
unsafe = {RequirementSummary(t) for t in unsafe_constraints} - {
RequirementSummary(t) for t in self.unsafe_constraints
}
has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0
if has_changed:
log.debug("")
log.debug("New dependencies found in this round:")
for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)):
log.debug(" adding {}".format(new_dependency))
log.debug("Removed dependencies in this round:")
for removed_dependency in sorted(
removed, key=lambda req: key_from_req(req.req)
):
log.debug(" removing {}".format(removed_dependency))
log.debug("Unsafe dependencies in this round:")
for unsafe_dependency in sorted(unsafe, key=lambda req: key_from_req(req.req)):
log.debug(" remembering unsafe {}".format(unsafe_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
# Store the last round's unsafe constraints
self.unsafe_constraints = unsafe_constraints
return has_changed, best_matches
|
def _resolve_one_round(self):
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=_dep_key)
unsafe_constraints = []
original_constraints = copy.copy(constraints)
if not self.allow_unsafe:
for constraint in original_constraints:
if constraint.name in UNSAFE_PACKAGES:
constraints.remove(constraint)
constraint.req.specifier = None
unsafe_constraints.append(constraint)
log.debug("Current constraints:")
for constraint in constraints:
log.debug(" {}".format(constraint))
log.debug("")
log.debug("Finding the best candidates:")
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug("")
log.debug("Finding secondary dependencies:")
safe_constraints = []
for best_match in best_matches:
for dep in self._iter_dependencies(best_match):
if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES:
safe_constraints.append(dep)
# Grouping constraints to make clean diff between rounds
theirs = set(self._group_constraints(safe_constraints))
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {
RequirementSummary(t) for t in self.their_constraints
}
removed = {RequirementSummary(t) for t in self.their_constraints} - {
RequirementSummary(t) for t in theirs
}
unsafe = {RequirementSummary(t) for t in unsafe_constraints} - {
RequirementSummary(t) for t in self.unsafe_constraints
}
has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0
if has_changed:
log.debug("")
log.debug("New dependencies found in this round:")
for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)):
log.debug(" adding {}".format(new_dependency))
log.debug("Removed dependencies in this round:")
for removed_dependency in sorted(
removed, key=lambda req: key_from_req(req.req)
):
log.debug(" removing {}".format(removed_dependency))
log.debug("Unsafe dependencies in this round:")
for unsafe_dependency in sorted(unsafe, key=lambda req: key_from_req(req.req)):
log.debug(" remembering unsafe {}".format(unsafe_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
# Store the last round's unsafe constraints
self.unsafe_constraints = unsafe_constraints
return has_changed, best_matches
|
https://github.com/jazzband/pip-tools/issues/569
|
$ venv/bin/pip-sync dev-requirements.txt
Traceback (most recent call last):
File "venv/bin/pip-sync", line 11, in <module>
sys.exit(cli())
File "venv/lib64/python3.6/site-packages/click/core.py", line 722, in __call__
return self.main(*args, **kwargs)
File "venv/lib64/python3.6/site-packages/click/core.py", line 697, in main
rv = self.invoke(ctx)
File "venv/lib64/python3.6/site-packages/click/core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "venv/lib64/python3.6/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "venv/lib64/python3.6/site-packages/piptools/scripts/sync.py", line 74, in cli
install_flags=install_flags))
File "venv/lib64/python3.6/site-packages/piptools/sync.py", line 159, in sync
for ireq in sorted(to_install):
TypeError: '<' not supported between instances of 'InstallRequirement' and 'InstallRequirement'
|
TypeError
|
def sync(
to_install,
to_uninstall,
verbose=False,
dry_run=False,
pip_flags=None,
install_flags=None,
):
"""
Install and uninstalls the given sets of modules.
"""
if not to_uninstall and not to_install:
click.echo("Everything up-to-date")
if pip_flags is None:
pip_flags = []
if not verbose:
pip_flags += ["-q"]
if os.environ.get("VIRTUAL_ENV"):
# find pip via PATH
pip = "pip"
else:
# find pip in same directory as pip-sync entry-point script
pip = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "pip")
if to_uninstall:
if dry_run:
click.echo("Would uninstall:")
for pkg in to_uninstall:
click.echo(" {}".format(pkg))
else:
check_call([pip, "uninstall", "-y"] + pip_flags + sorted(to_uninstall))
if to_install:
if install_flags is None:
install_flags = []
if dry_run:
click.echo("Would install:")
for ireq in to_install:
click.echo(" {}".format(format_requirement(ireq)))
else:
package_args = []
for ireq in sorted(to_install, key=key_from_ireq):
if ireq.editable:
package_args.extend(["-e", str(ireq.link or ireq.req)])
else:
package_args.append(str(ireq.req))
check_call([pip, "install"] + pip_flags + install_flags + package_args)
return 0
|
def sync(
to_install,
to_uninstall,
verbose=False,
dry_run=False,
pip_flags=None,
install_flags=None,
):
"""
Install and uninstalls the given sets of modules.
"""
if not to_uninstall and not to_install:
click.echo("Everything up-to-date")
if pip_flags is None:
pip_flags = []
if not verbose:
pip_flags += ["-q"]
if os.environ.get("VIRTUAL_ENV"):
# find pip via PATH
pip = "pip"
else:
# find pip in same directory as pip-sync entry-point script
pip = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "pip")
if to_uninstall:
if dry_run:
click.echo("Would uninstall:")
for pkg in to_uninstall:
click.echo(" {}".format(pkg))
else:
check_call([pip, "uninstall", "-y"] + pip_flags + sorted(to_uninstall))
if to_install:
if install_flags is None:
install_flags = []
if dry_run:
click.echo("Would install:")
for ireq in to_install:
click.echo(" {}".format(format_requirement(ireq)))
else:
package_args = []
for ireq in sorted(to_install):
if ireq.editable:
package_args.extend(["-e", str(ireq.link or ireq.req)])
else:
package_args.append(str(ireq.req))
check_call([pip, "install"] + pip_flags + install_flags + package_args)
return 0
|
https://github.com/jazzband/pip-tools/issues/569
|
$ venv/bin/pip-sync dev-requirements.txt
Traceback (most recent call last):
File "venv/bin/pip-sync", line 11, in <module>
sys.exit(cli())
File "venv/lib64/python3.6/site-packages/click/core.py", line 722, in __call__
return self.main(*args, **kwargs)
File "venv/lib64/python3.6/site-packages/click/core.py", line 697, in main
rv = self.invoke(ctx)
File "venv/lib64/python3.6/site-packages/click/core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "venv/lib64/python3.6/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "venv/lib64/python3.6/site-packages/piptools/scripts/sync.py", line 74, in cli
install_flags=install_flags))
File "venv/lib64/python3.6/site-packages/piptools/sync.py", line 159, in sync
for ireq in sorted(to_install):
TypeError: '<' not supported between instances of 'InstallRequirement' and 'InstallRequirement'
|
TypeError
|
def rectangle(img, x, y, h, w):
"""Create a rectangular ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
x = The x-coordinate of the upper left corner of the rectangle.
y = The y-coordinate of the upper left corner of the rectangle.
h = The height of the rectangle.
w = The width of the rectangle.
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param x: int
:param y: int
:param h: int
:param w: int
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Create the rectangle contour vertices
pt1 = [x, y]
pt2 = [x, y + h - 1]
pt3 = [x + w - 1, y + h - 1]
pt4 = [x + w - 1, y]
# Create the ROI contour
roi_contour = [np.array([[pt1], [pt2], [pt3], [pt4]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
# Draw the ROI if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
# Check whether the ROI is correctly bounded inside the image
if x < 0 or y < 0 or x + w > width or y + h > height:
fatal_error("The ROI extends outside of the image!")
return roi_contour, roi_hierarchy
|
def rectangle(img, x, y, h, w):
"""Create a rectangular ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
x = The x-coordinate of the upper left corner of the rectangle.
y = The y-coordinate of the upper left corner of the rectangle.
h = The height of the rectangle.
w = The width of the rectangle.
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param x: int
:param y: int
:param h: int
:param w: int
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Check whether the ROI is correctly bounded inside the image
if x < 0 or y < 0 or x + w > width or y + h > height:
fatal_error("The ROI extends outside of the image!")
# Create the rectangle contour vertices
pt1 = [x, y]
pt2 = [x, y + h - 1]
pt3 = [x + w - 1, y + h - 1]
pt4 = [x + w - 1, y]
# Create the ROI contour
roi_contour = [np.array([[pt1], [pt2], [pt3], [pt4]], dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
# Draw the ROI if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
return roi_contour, roi_hierarchy
|
https://github.com/danforthcenter/plantcv/issues/481
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-143-af286afe5707> in <module>
4 spacing=(0, 1150),
5 ncols=1,
----> 6 nrows=2)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in multi(img, coord, radius, spacing, nrows, ncols)
262 x = coord[0] + j * spacing[0]
263 # Create a chip ROI
--> 264 rois.append(circle(img=img, x=x, y=y, r=radius))
265 # Draw the circle on the binary image
266 cv2.circle(bin_img, (x, y), radius, 255, -1)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in circle(img, x, y, r)
120 # Check whether the ROI is correctly bounded inside the image
121 if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
--> 122 fatal_error("The ROI extends outside of the image!")
123
124 # Initialize a binary image of the circle
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\fatal_error.py in fatal_error(error)
12 """
13
---> 14 raise RuntimeError(error)
RuntimeError: The ROI extends outside of the image!
|
RuntimeError
|
def circle(img, x, y, r):
"""Create a circular ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
x = The x-coordinate of the center of the circle.
y = The y-coordinate of the center of the circle.
r = The radius of the circle.
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param x: int
:param y: int
:param r: int
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Initialize a binary image of the circle
bin_img = np.zeros((height, width), dtype=np.uint8)
# Draw the circle on the binary image
cv2.circle(bin_img, (x, y), r, 255, -1)
# Use the binary image to create an ROI contour
roi_contour, roi_hierarchy = cv2.findContours(
bin_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
# Draw the ROI if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
# Check whether the ROI is correctly bounded inside the image
if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
fatal_error("The ROI extends outside of the image!")
return roi_contour, roi_hierarchy
|
def circle(img, x, y, r):
"""Create a circular ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
x = The x-coordinate of the center of the circle.
y = The y-coordinate of the center of the circle.
r = The radius of the circle.
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param x: int
:param y: int
:param r: int
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Check whether the ROI is correctly bounded inside the image
if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
fatal_error("The ROI extends outside of the image!")
# Initialize a binary image of the circle
bin_img = np.zeros((height, width), dtype=np.uint8)
# Draw the circle on the binary image
cv2.circle(bin_img, (x, y), r, 255, -1)
# Use the binary image to create an ROI contour
roi_contour, roi_hierarchy = cv2.findContours(
np.copy(bin_img), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
# Draw the ROI if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
return roi_contour, roi_hierarchy
|
https://github.com/danforthcenter/plantcv/issues/481
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-143-af286afe5707> in <module>
4 spacing=(0, 1150),
5 ncols=1,
----> 6 nrows=2)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in multi(img, coord, radius, spacing, nrows, ncols)
262 x = coord[0] + j * spacing[0]
263 # Create a chip ROI
--> 264 rois.append(circle(img=img, x=x, y=y, r=radius))
265 # Draw the circle on the binary image
266 cv2.circle(bin_img, (x, y), radius, 255, -1)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in circle(img, x, y, r)
120 # Check whether the ROI is correctly bounded inside the image
121 if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
--> 122 fatal_error("The ROI extends outside of the image!")
123
124 # Initialize a binary image of the circle
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\fatal_error.py in fatal_error(error)
12 """
13
---> 14 raise RuntimeError(error)
RuntimeError: The ROI extends outside of the image!
|
RuntimeError
|
def ellipse(img, x, y, r1, r2, angle):
"""Create an elliptical ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
x = The x-coordinate of the center of the ellipse.
y = The y-coordinate of the center of the ellipse.
r1 = The radius of the major axis.
r2 = The radius of the minor axis.
angle = The angle of rotation in degrees of the major axis.
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param x: int
:param y: int
:param r1: int
:param r2: int
:param angle: double
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Initialize a binary image of the ellipse
bin_img = np.zeros((height, width), dtype=np.uint8)
# Draw the ellipse on the binary image
cv2.ellipse(bin_img, (x, y), (r1, r2), angle, 0, 360, 255, -1)
# Use the binary image to create an ROI contour
roi_contour, roi_hierarchy = cv2.findContours(
bin_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
# Draw the ROI if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
# Checks ellipse goes outside the image by checking row and column sum of edges
if (
np.sum(bin_img[0, :])
+ np.sum(bin_img[-1, :])
+ np.sum(bin_img[:, 0])
+ np.sum(bin_img[:, -1])
> 0
) or len(roi_contour) == 0:
fatal_error("The ROI extends outside of the image, or ROI is not on the image!")
return roi_contour, roi_hierarchy
|
def ellipse(img, x, y, r1, r2, angle):
"""Create an elliptical ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
x = The x-coordinate of the center of the ellipse.
y = The y-coordinate of the center of the ellipse.
r1 = The radius of the major axis.
r2 = The radius of the minor axis.
angle = The angle of rotation in degrees of the major axis.
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param x: int
:param y: int
:param r1: int
:param r2: int
:param angle: double
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Initialize a binary image of the ellipse
bin_img = np.zeros((height, width), dtype=np.uint8)
# Draw the ellipse on the binary image
cv2.ellipse(bin_img, (x, y), (r1, r2), angle, 0, 360, 255, -1)
if (
np.sum(bin_img[0, :])
+ np.sum(bin_img[-1, :])
+ np.sum(bin_img[:, 0])
+ np.sum(bin_img[:, -1])
> 0
):
fatal_error("The ROI extends outside of the image!")
# Use the binary image to create an ROI contour
roi_contour, roi_hierarchy = cv2.findContours(
np.copy(bin_img), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
# Draw the ROI if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
return roi_contour, roi_hierarchy
|
https://github.com/danforthcenter/plantcv/issues/481
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-143-af286afe5707> in <module>
4 spacing=(0, 1150),
5 ncols=1,
----> 6 nrows=2)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in multi(img, coord, radius, spacing, nrows, ncols)
262 x = coord[0] + j * spacing[0]
263 # Create a chip ROI
--> 264 rois.append(circle(img=img, x=x, y=y, r=radius))
265 # Draw the circle on the binary image
266 cv2.circle(bin_img, (x, y), radius, 255, -1)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in circle(img, x, y, r)
120 # Check whether the ROI is correctly bounded inside the image
121 if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
--> 122 fatal_error("The ROI extends outside of the image!")
123
124 # Initialize a binary image of the circle
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\fatal_error.py in fatal_error(error)
12 """
13
---> 14 raise RuntimeError(error)
RuntimeError: The ROI extends outside of the image!
|
RuntimeError
|
def multi(img, coord, radius, spacing=None, nrows=None, ncols=None):
"""Create multiple circular ROIs on a single image
Inputs
img = Input image data.
coord = Two-element tuple of the center of the top left object (x,y) or a list of tuples identifying the center of each roi [(x1,y1),(x2,y2)]
radius = A single radius for all ROIs.
spacing = Two-element tuple of the horizontal and vertical spacing between ROIs, (x,y). Ignored if `coord` is a list and `rows` and `cols` are None.
nrows = Number of rows in ROI layout. Should be missing or None if each center coordinate pair is listed.
ncols = Number of columns in ROI layout. Should be missing or None if each center coordinate pair is listed.
Returns:
roi_contour = list of roi contours
roi_hierarchy = list of roi hierarchies
:param img: numpy.ndarray
:param coord: tuple, list
:param radius: int
:param spacing: tuple
:param nrows: int
:param ncols: int
:return mask: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Store user debug
debug = params.debug
# Temporarily disable debug
params.debug = None
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
overlap_img = np.zeros((height, width))
# Initialize a binary image of the circle that will contain all ROI
all_roi_img = np.zeros((height, width), dtype=np.uint8)
roi_contour = []
roi_hierarchy = []
# Grid of ROIs
if (
(type(coord) == tuple)
and ((nrows and ncols) is not None)
and (type(spacing) == tuple)
):
# Loop over each row
for i in range(0, nrows):
# The upper left corner is the y starting coordinate + the ROI offset * the vertical spacing
y = coord[1] + i * spacing[1]
# Loop over each column
for j in range(0, ncols):
# Initialize a binary image for each circle
bin_img = np.zeros((height, width), dtype=np.uint8)
# The upper left corner is the x starting coordinate + the ROI offset * the
# horizontal spacing between chips
x = coord[0] + j * spacing[0]
# Check whether the ROI is correctly bounded inside the image
if (
x - radius < 0
or x + radius > width
or y - radius < 0
or y + radius > height
):
fatal_error("An ROI extends outside of the image!")
# Draw the circle on the binary images
# Keep track of all roi
all_roi_img = cv2.circle(all_roi_img, (x, y), radius, 255, -1)
# Keep track of each roi individually to check overlapping
circle_img = cv2.circle(bin_img, (x, y), radius, 255, -1)
overlap_img = overlap_img + circle_img
# Make a list of contours and hierarchies
_, rc, rh = cv2.findContours(
circle_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE
)
roi_contour.append(rc)
roi_hierarchy.append(rh)
# User specified ROI centers
elif (type(coord) == list) and ((nrows and ncols) is None) and (spacing is None):
for i in range(0, len(coord)):
# Initialize a binary image for each circle
bin_img = np.zeros((height, width), dtype=np.uint8)
y = coord[i][1]
x = coord[i][0]
if (
x - radius < 0
or x + radius > width
or y - radius < 0
or y + radius > height
):
fatal_error("An ROI extends outside of the image!")
# Draw the circle on the binary image
# Keep track of all roi
all_roi_img = cv2.circle(all_roi_img, (x, y), radius, 255, -1)
# Keep track of each roi individually to check overlapping
circle_img = cv2.circle(bin_img, (x, y), radius, 255, -1)
overlap_img = overlap_img + circle_img
# Make a list of contours and hierarchies
_, rc, rh = cv2.findContours(
circle_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE
)
roi_contour.append(rc)
roi_hierarchy.append(rh)
else:
fatal_error(
"Function can either make a grid of ROIs (user must provide nrows, ncols, spacing, and coord) "
"or take custom ROI coordinates (user must provide only a list of tuples to 'coord' parameter). "
"Both options require a user-defined radius as well"
)
if np.amax(overlap_img) > 255:
print(
"WARNING: Two or more of the user defined regions of interest overlap! "
"If you only see one ROI then they may overlap exactly."
)
# Reset debug
params.debug = debug
# Draw the ROIs if requested
if params.debug is not None:
# Create an array of contours and list of hierarchy for debug image
roi_contour1, roi_hierarchy1 = cv2.findContours(
all_roi_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
_draw_roi(img=img, roi_contour=roi_contour1)
return roi_contour, roi_hierarchy
|
def multi(img, coord, radius, spacing=None, nrows=None, ncols=None):
"""Create multiple circular ROIs on a single image
Inputs
img = Input image data.
coord = Two-element tuple of the center of the top left object (x,y) or a list of tuples identifying the center of each roi [(x1,y1),(x2,y2)]
radius = A single radius for all ROIs.
spacing = Two-element tuple of the horizontal and vertical spacing between ROIs, (x,y). Ignored if `coord` is a list and `rows` and `cols` are None.
nrows = Number of rows in ROI layout. Should be missing or None if each center coordinate pair is listed.
ncols = Number of columns in ROI layout. Should be missing or None if each center coordinate pair is listed.
Returns:
roi_contour = list of roi contours
roi_hierarchy = list of roi hierarchies
:param img: numpy.ndarray
:param coord: tuple, list
:param radius: int
:param spacing: tuple
:param nrows: int
:param ncols: int
:return mask: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Initialize ROI list
rois = []
# Store user debug
debug = params.debug
# Temporarily disable debug
params.debug = None
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
# Initialize a binary image of the circle
bin_img = np.zeros((height, width), dtype=np.uint8)
roi_contour = []
roi_hierarchy = []
# Grid of ROIs
if (type(coord) == tuple) and ((nrows and ncols) is not None):
# Loop over each row
for i in range(0, nrows):
# The upper left corner is the y starting coordinate + the ROI offset * the vertical spacing
y = coord[1] + i * spacing[1]
# Loop over each column
for j in range(0, ncols):
# The upper left corner is the x starting coordinate + the ROI offset * the
# horizontal spacing between chips
x = coord[0] + j * spacing[0]
# Create a chip ROI
rois.append(circle(img=img, x=x, y=y, r=radius))
# Draw the circle on the binary image
cv2.circle(bin_img, (x, y), radius, 255, -1)
# Make a list of contours and hierarchies
roi_contour.append(
cv2.findContours(
np.copy(bin_img), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE
)[-2:][0]
)
roi_hierarchy.append(
cv2.findContours(
np.copy(bin_img), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE
)[-2:][1]
)
# Create an array of contours and list of hierarchy for when debug is set to 'plot'
roi_contour1, roi_hierarchy1 = cv2.findContours(
np.copy(bin_img), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
# User specified ROI centers
elif (type(coord) == list) and ((nrows and ncols) is None):
for i in range(0, len(coord)):
y = coord[i][1]
x = coord[i][0]
rois.append(circle(img=img, x=x, y=y, r=radius))
# Draw the circle on the binary image
cv2.circle(bin_img, (x, y), radius, 255, -1)
# Make a list of contours and hierarchies
roi_contour.append(
cv2.findContours(
np.copy(bin_img), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE
)[-2:][0]
)
roi_hierarchy.append(
cv2.findContours(
np.copy(bin_img), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE
)[-2:][1]
)
# Create an array of contours and list of hierarchy for when debug is set to 'plot'
roi_contour1, roi_hierarchy1 = cv2.findContours(
np.copy(bin_img), cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE
)[-2:]
else:
fatal_error(
"Function can either make a grid of ROIs (user must provide nrows, ncols, spacing, and coord) "
"or take custom ROI coordinates (user must provide a list of tuples to 'coord' parameter)"
)
# Reset debug
params.debug = debug
# Draw the ROIs if requested
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour1)
return roi_contour, roi_hierarchy
|
https://github.com/danforthcenter/plantcv/issues/481
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-143-af286afe5707> in <module>
4 spacing=(0, 1150),
5 ncols=1,
----> 6 nrows=2)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in multi(img, coord, radius, spacing, nrows, ncols)
262 x = coord[0] + j * spacing[0]
263 # Create a chip ROI
--> 264 rois.append(circle(img=img, x=x, y=y, r=radius))
265 # Draw the circle on the binary image
266 cv2.circle(bin_img, (x, y), radius, 255, -1)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in circle(img, x, y, r)
120 # Check whether the ROI is correctly bounded inside the image
121 if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
--> 122 fatal_error("The ROI extends outside of the image!")
123
124 # Initialize a binary image of the circle
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\fatal_error.py in fatal_error(error)
12 """
13
---> 14 raise RuntimeError(error)
RuntimeError: The ROI extends outside of the image!
|
RuntimeError
|
def custom(img, vertices):
"""Create an custom polygon ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
vertices = List of vertices of the desired polygon ROI
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param vertices: list
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
roi_contour = [np.array(vertices, dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
# Check that the ROI doesn't go off the screen
for i in vertices:
(x, y) = i
if x < 0 or x > width or y < 0 or y > height:
fatal_error("An ROI extends outside of the image!")
return roi_contour, roi_hierarchy
|
def custom(img, vertices):
"""Create an custom polygon ROI.
Inputs:
img = An RGB or grayscale image to plot the ROI on in debug mode.
vertices = List of vertices of the desired polygon ROI
Outputs:
roi_contour = An ROI set of points (contour).
roi_hierarchy = The hierarchy of ROI contour(s).
:param img: numpy.ndarray
:param vertices: list
:return roi_contour: list
:return roi_hierarchy: numpy.ndarray
"""
# Autoincrement the device counter
params.device += 1
# Get the height and width of the reference image
height, width = np.shape(img)[:2]
roi_contour = [np.array(vertices, dtype=np.int32)]
roi_hierarchy = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
if params.debug is not None:
_draw_roi(img=img, roi_contour=roi_contour)
return roi_contour, roi_hierarchy
|
https://github.com/danforthcenter/plantcv/issues/481
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-143-af286afe5707> in <module>
4 spacing=(0, 1150),
5 ncols=1,
----> 6 nrows=2)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in multi(img, coord, radius, spacing, nrows, ncols)
262 x = coord[0] + j * spacing[0]
263 # Create a chip ROI
--> 264 rois.append(circle(img=img, x=x, y=y, r=radius))
265 # Draw the circle on the binary image
266 cv2.circle(bin_img, (x, y), radius, 255, -1)
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\roi\roi_methods.py in circle(img, x, y, r)
120 # Check whether the ROI is correctly bounded inside the image
121 if x - r < 0 or x + r > width or y - r < 0 or y + r > height:
--> 122 fatal_error("The ROI extends outside of the image!")
123
124 # Initialize a binary image of the circle
~\Miniconda3\envs\test-environment\lib\site-packages\plantcv-3.6.1+51.g90ef8d0.dirty-py3.7.egg\plantcv\plantcv\fatal_error.py in fatal_error(error)
12 """
13
---> 14 raise RuntimeError(error)
RuntimeError: The ROI extends outside of the image!
|
RuntimeError
|
def options():
"""Parse command line options.
Args:
Returns:
argparse object.
Raises:
IOError: if dir does not exist.
IOError: if pipeline does not exist.
IOError: if the metadata file SnapshotInfo.csv does not exist in dir when flat is False.
ValueError: if adaptor is not phenofront or dbimportexport.
ValueError: if a metadata field is not supported.
"""
# Job start time
start_time = datetime.datetime.now().strftime("%Y-%m-%d_%H:%M:%S")
print("Starting run " + start_time + "\n", file=sys.stderr)
# These are metadata types that PlantCV deals with.
# Values are default values in the event the metadata is missing
valid_meta = {
# Camera settings
"camera": "none",
"imgtype": "none",
"zoom": "none",
"exposure": "none",
"gain": "none",
"frame": "none",
"lifter": "none",
# Date-Time
"timestamp": "none",
# Sample attributes
"id": "none",
"plantbarcode": "none",
"treatment": "none",
"cartag": "none",
# Experiment attributes
"measurementlabel": "none",
# Other
"other": "none",
}
parser = argparse.ArgumentParser(
description="Parallel imaging processing with PlantCV.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"-d",
"--dir",
help="Input directory containing images or snapshots.",
required=True,
)
parser.add_argument(
"-a",
"--adaptor",
help="Image metadata reader adaptor. PhenoFront metadata is stored in a CSV file and the "
"image file name. For the filename option, all metadata is stored in the image file "
"name. Current adaptors: phenofront, image",
default="phenofront",
)
parser.add_argument("-p", "--pipeline", help="Pipeline script file.", required=True)
parser.add_argument("-s", "--db", help="SQLite database file name.", required=True)
parser.add_argument(
"-i",
"--outdir",
help="Output directory for images. Not required by all pipelines.",
default=".",
)
parser.add_argument(
"-T", "--cpu", help="Number of CPU to use.", default=1, type=int
)
parser.add_argument(
"-c",
"--create",
help="Create output database (SQLite). Default behaviour adds to existing database. "
"Warning: activating this option will delete an existing database!",
default=False,
action="store_true",
)
parser.add_argument(
"-D",
"--dates",
help="Date range. Format: YYYY-MM-DD-hh-mm-ss_YYYY-MM-DD-hh-mm-ss. If the second date "
"is excluded then the current date is assumed.",
required=False,
)
parser.add_argument(
"-t", "--type", help="Image format type (extension).", default="png"
)
parser.add_argument(
"-l",
"--deliminator",
help="Image file name metadata deliminator character.",
default="_",
)
parser.add_argument(
"-f",
"--meta",
help="Image file name metadata format. List valid metadata fields separated by the "
"deliminator (-l/--deliminator). Valid metadata fields are: "
+ ", ".join(map(str, list(valid_meta.keys()))),
default="imgtype_camera_frame_zoom_id",
)
parser.add_argument(
"-M",
"--match",
help="Restrict analysis to images with metadata matching input criteria. Input a "
"metadata:value comma-separated list. This is an exact match search. "
"E.g. imgtype:VIS,camera:SV,zoom:z500",
required=False,
)
parser.add_argument(
"-C",
"--coprocess",
help="Coprocess the specified imgtype with the imgtype specified in --match "
"(e.g. coprocess NIR images with VIS).",
default=None,
)
parser.add_argument(
"-w",
"--writeimg",
help="Include analysis images in output.",
default=False,
action="store_true",
)
parser.add_argument(
"-o",
"--other_args",
help="Other arguments to pass to the pipeline script.",
required=False,
)
args = parser.parse_args()
if not os.path.exists(args.dir):
raise IOError("Directory does not exist: {0}".format(args.dir))
if not os.path.exists(args.pipeline):
raise IOError("File does not exist: {0}".format(args.pipeline))
if args.adaptor is "phenofront":
if not os.path.exists(args.dir + "/SnapshotInfo.csv"):
raise IOError(
"The snapshot metadata file SnapshotInfo.csv does not exist in {0}. "
"Perhaps you meant to use a different adaptor?".format(args.dir)
)
if not os.path.exists(args.outdir):
raise IOError("Directory does not exist: {0}".format(args.outdir))
args.jobdir = start_time
try:
os.makedirs(args.jobdir)
except IOError as e:
raise IOError("{0}: {1}".format(e.strerror, args.jobdir))
if args.adaptor != "phenofront" and args.adaptor != "filename":
raise ValueError("Adaptor must be either phenofront or filename")
if args.dates:
dates = args.dates.split("_")
if len(dates) == 1:
# End is current time
dates.append(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))
start = map(int, dates[0].split("-"))
end = map(int, dates[1].split("-"))
# Convert start and end dates to Unix time
start_td = datetime.datetime(*start) - datetime.datetime(1970, 1, 1)
end_td = datetime.datetime(*end) - datetime.datetime(1970, 1, 1)
args.start_date = (start_td.days * 24 * 3600) + start_td.seconds
args.end_date = (end_td.days * 24 * 3600) + end_td.seconds
else:
end = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
end_list = map(int, end.split("-"))
end_td = datetime.datetime(*end_list) - datetime.datetime(1970, 1, 1)
args.start_date = 1
args.end_date = (end_td.days * 24 * 3600) + end_td.seconds
args.valid_meta = valid_meta
args.start_time = start_time
# Image filename metadata structure
fields = args.meta.split(args.deliminator)
structure = {}
for i, field in enumerate(fields):
structure[field] = i
args.fields = structure
# Are the user-defined metadata valid?
for field in args.fields:
if field not in args.valid_meta:
raise ValueError(
"The field {0} is not a currently supported metadata type.".format(
field
)
)
# Metadata restrictions
args.imgtype = {}
if args.match is not None:
pairs = args.match.split(",")
for pair in pairs:
key, value = pair.split(":")
args.imgtype[key] = value
else:
args.imgtype["None"] = "None"
if (args.coprocess is not None) and ("imgtype" not in args.imgtype):
raise ValueError(
"When the coprocess imgtype is defined, imgtype must be included in match."
)
return args
|
def options():
"""Parse command line options.
Args:
Returns:
argparse object.
Raises:
IOError: if dir does not exist.
IOError: if pipeline does not exist.
IOError: if the metadata file SnapshotInfo.csv does not exist in dir when flat is False.
ValueError: if adaptor is not phenofront or dbimportexport.
ValueError: if a metadata field is not supported.
"""
# Job start time
start_time = datetime.datetime.now().strftime("%Y-%m-%d_%H:%M:%S")
print("Starting run " + start_time + "\n", file=sys.stderr)
# These are metadata types that PlantCV deals with.
# Values are default values in the event the metadata is missing
valid_meta = {
# Camera settings
"camera": "none",
"imgtype": "none",
"zoom": "none",
"exposure": "none",
"gain": "none",
"frame": "none",
"lifter": "none",
# Date-Time
"timestamp": "none",
# Sample attributes
"id": "none",
"plantbarcode": "none",
"treatment": "none",
"cartag": "none",
# Experiment attributes
"measurementlabel": "none",
# Other
"other": "none",
}
parser = argparse.ArgumentParser(
description="Parallel imaging processing with PlantCV.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"-d",
"--dir",
help="Input directory containing images or snapshots.",
required=True,
)
parser.add_argument(
"-a",
"--adaptor",
help="Image metadata reader adaptor. PhenoFront metadata is stored in a CSV file and the "
"image file name. For the filename option, all metadata is stored in the image file "
"name. Current adaptors: phenofront, image",
default="phenofront",
)
parser.add_argument("-p", "--pipeline", help="Pipeline script file.", required=True)
parser.add_argument("-s", "--db", help="SQLite database file name.", required=True)
parser.add_argument(
"-i",
"--outdir",
help="Output directory for images. Not required by all pipelines.",
default=".",
)
parser.add_argument(
"-T", "--cpu", help="Number of CPU to use.", default=1, type=int
)
parser.add_argument(
"-c",
"--create",
help="Create output database (SQLite). Default behaviour adds to existing database. "
"Warning: activating this option will delete an existing database!",
default=False,
action="store_true",
)
parser.add_argument(
"-m",
"--roi",
help="ROI/mask image. Required by some pipelines (vis_tv, flu_tv).",
required=False,
)
parser.add_argument(
"-D",
"--dates",
help="Date range. Format: YYYY-MM-DD-hh-mm-ss_YYYY-MM-DD-hh-mm-ss. If the second date "
"is excluded then the current date is assumed.",
required=False,
)
parser.add_argument(
"-t", "--type", help="Image format type (extension).", default="png"
)
parser.add_argument(
"-l",
"--deliminator",
help="Image file name metadata deliminator character.",
default="_",
)
parser.add_argument(
"-f",
"--meta",
help="Image file name metadata format. List valid metadata fields separated by the "
"deliminator (-l/--deliminator). Valid metadata fields are: "
+ ", ".join(map(str, list(valid_meta.keys()))),
default="imgtype_camera_frame_zoom_id",
)
parser.add_argument(
"-M",
"--match",
help="Restrict analysis to images with metadata matching input criteria. Input a "
"metadata:value comma-separated list. This is an exact match search. "
"E.g. imgtype:VIS,camera:SV,zoom:z500",
required=False,
)
parser.add_argument(
"-C",
"--coprocess",
help="Coprocess the specified imgtype with the imgtype specified in --match "
"(e.g. coprocess NIR images with VIS).",
default=None,
)
parser.add_argument(
"-w",
"--writeimg",
help="Include analysis images in output.",
default=False,
action="store_true",
)
args = parser.parse_args()
if not os.path.exists(args.dir):
raise IOError("Directory does not exist: {0}".format(args.dir))
if not os.path.exists(args.pipeline):
raise IOError("File does not exist: {0}".format(args.pipeline))
if args.adaptor is "phenofront":
if not os.path.exists(args.dir + "/SnapshotInfo.csv"):
raise IOError(
"The snapshot metadata file SnapshotInfo.csv does not exist in {0}. "
"Perhaps you meant to use a different adaptor?".format(args.dir)
)
if not os.path.exists(args.outdir):
raise IOError("Directory does not exist: {0}".format(args.outdir))
args.jobdir = start_time
try:
os.makedirs(args.jobdir)
except IOError as e:
raise IOError("{0}: {1}".format(e.strerror, args.jobdir))
if args.adaptor != "phenofront" and args.adaptor != "filename":
raise ValueError("Adaptor must be either phenofront or filename")
if args.dates:
dates = args.dates.split("_")
if len(dates) == 1:
# End is current time
dates.append(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))
start = map(int, dates[0].split("-"))
end = map(int, dates[1].split("-"))
# Convert start and end dates to Unix time
start_td = datetime.datetime(*start) - datetime.datetime(1970, 1, 1)
end_td = datetime.datetime(*end) - datetime.datetime(1970, 1, 1)
args.start_date = (start_td.days * 24 * 3600) + start_td.seconds
args.end_date = (end_td.days * 24 * 3600) + end_td.seconds
else:
end = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
end_list = map(int, end.split("-"))
end_td = datetime.datetime(*end_list) - datetime.datetime(1970, 1, 1)
args.start_date = 1
args.end_date = (end_td.days * 24 * 3600) + end_td.seconds
args.valid_meta = valid_meta
args.start_time = start_time
# Image filename metadata structure
fields = args.meta.split(args.deliminator)
structure = {}
for i, field in enumerate(fields):
structure[field] = i
args.fields = structure
# Are the user-defined metadata valid?
for field in args.fields:
if field not in args.valid_meta:
raise ValueError(
"The field {0} is not a currently supported metadata type.".format(
field
)
)
# Metadata restrictions
args.imgtype = {}
if args.match is not None:
pairs = args.match.split(",")
for pair in pairs:
key, value = pair.split(":")
args.imgtype[key] = value
else:
args.imgtype["None"] = "None"
if (args.coprocess is not None) and ("imgtype" not in args.imgtype):
raise ValueError(
"When the coprocess imgtype is defined, imgtype must be included in match."
)
return args
|
https://github.com/danforthcenter/plantcv/issues/51
|
OpenCV Error: Assertion failed ((scn == 3 || scn == 4) && (depth == CV_8U || depth == CV_32F)) in cvtColor, file /build/opencv-SviWsf/opencv-2.4.9.1+dfsg/modules/imgproc/src/color.cpp, line 3959
Traceback (most recent call last):
File "/home/jeffrey/Desktop/plantcv_test.py", line 118, in <module>
main()
File "/home/jeffrey/Desktop/plantcv_test.py", line 51, in main
device, brass_mask1 = pcv.rgb2gray_hsv(brass_mask, 'v', device, args.debug)
File "/home/jeffrey/plantcv/lib/plantcv/rgb2gray_hsv.py", line 14, in rgb2gray_hsv
cv2.error: /build/opencv-SviWsf/opencv-2.4.9.1+dfsg/modules/imgproc/src/color.cpp:3959: error: (-215) (scn == 3 || scn == 4) && (depth == CV_8U || depth == CV_32F) in function cvtColor
|
cv2.error
|
def job_builder(args, meta):
"""
Build a list of image processing jobs.
Args:
args: (object) argparse object.
meta: metadata data structure.
Returns:
Raises:
"""
# Overall job stack. List of list of jobs
job_stack = []
# Jobs/CPU (INT): divide the number of images by the number of requested CPU resources
jobs_per_cpu = args.jobcount / args.cpu
# Get the list of images
# images = list(meta.keys())
images = []
for img in list(meta.keys()):
# If a date range was requested, check whether the image is within range
if args.dates:
# Convert image datetime to unix time
timestamp = dt_parser(meta[img]["timestamp"])
time_delta = timestamp - datetime.datetime(1970, 1, 1)
unix_time = (time_delta.days * 24 * 3600) + time_delta.seconds
if unix_time < args.start_date or unix_time > args.end_date:
continue
if args.coprocess is not None:
if meta[img]["imgtype"] != args.coprocess:
images.append(img)
else:
images.append(img)
print(
"Job list will include " + str(len(images)) + " images" + "\n", file=sys.stderr
)
# For each image
for img in images:
if (args.coprocess is not None) and ("coimg" in meta[img]):
# Create an output file to store the co-image processing results and populate with metadata
coimg = meta[meta[img]["coimg"]]
coout = file_writer("./{0}/{1}.txt".format(args.jobdir, meta[img]["coimg"]))
coout.write(
"\t".join(
map(
str, ("META", "image", coimg["path"] + "/" + meta[img]["coimg"])
)
)
+ "\n"
)
# Valid metadata
for m in list(args.valid_meta.keys()):
coout.write("\t".join(map(str, ("META", m, coimg[m]))) + "\n")
# Create an output file to store the image processing results and populate with metadata
outfile = file_writer("./{0}/{1}.txt".format(args.jobdir, img))
outfile.write(
"\t".join(map(str, ("META", "image", meta[img]["path"] + "/" + img))) + "\n"
)
# Valid metadata
for m in list(args.valid_meta.keys()):
outfile.write("\t".join(map(str, ("META", m, meta[img][m]))) + "\n")
outfile.close()
# Build the job stack
# The first n - 1 CPUs will get INT jobs_per_cpu
# The last CPU will get the remainder
job = 0
# For the first n - 1 CPU
for c in range(1, args.cpu):
# List of jobs for this CPU
jobs = []
# For each job/CPU
for j in range(0, jobs_per_cpu):
# Add job to list
if args.coprocess is not None and ("coimg" in meta[images[job]]):
job_str = "python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt --coresult ./{6}/{7}.txt".format(
args.pipeline,
meta[images[job]]["path"],
images[job],
args.outdir,
args.jobdir,
images[job],
args.jobdir,
meta[images[job]]["coimg"],
)
if args.writeimg:
job_str += " --writeimg"
if args.other_args:
job_str += " " + args.other_args
jobs.append(job_str)
else:
job_str = "python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt".format(
args.pipeline,
meta[images[job]]["path"],
images[job],
args.outdir,
args.jobdir,
images[job],
)
if args.writeimg:
job_str += " --writeimg"
if args.other_args:
job_str += " " + args.other_args
jobs.append(job_str)
# Increase the job counter by 1
job += 1
# Add the CPU job list to the job stack
job_stack.append(jobs)
# Add the remaining jobs to the last CPU
jobs = []
for j in range(job, len(images)):
# Add job to list
if args.coprocess is not None and ("coimg" in meta[images[j]]):
job_str = "python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt --coresult ./{6}/{7}.txt".format(
args.pipeline,
meta[images[j]]["path"],
images[j],
args.outdir,
args.jobdir,
images[j],
args.jobdir,
meta[images[j]]["coimg"],
)
if args.writeimg:
job_str += " --writeimg"
if args.other_args:
job_str += " " + args.other_args
jobs.append(job_str)
else:
job_str = (
"python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt".format(
args.pipeline,
meta[images[j]]["path"],
images[j],
args.outdir,
args.jobdir,
images[j],
)
)
if args.writeimg:
job_str += " --writeimg"
if args.other_args:
job_str += " " + args.other_args
jobs.append(job_str)
# Add the CPU job list to the job stack
job_stack.append(jobs)
return job_stack
|
def job_builder(args, meta):
"""
Build a list of image processing jobs.
Args:
args: (object) argparse object.
meta: metadata data structure.
Returns:
Raises:
"""
# Overall job stack. List of list of jobs
job_stack = []
# Jobs/CPU (INT): divide the number of images by the number of requested CPU resources
jobs_per_cpu = args.jobcount / args.cpu
# Get the list of images
# images = list(meta.keys())
images = []
for img in list(meta.keys()):
# If a date range was requested, check whether the image is within range
if args.dates:
# Convert image datetime to unix time
timestamp = dt_parser(meta[img]["timestamp"])
time_delta = timestamp - datetime.datetime(1970, 1, 1)
unix_time = (time_delta.days * 24 * 3600) + time_delta.seconds
if unix_time < args.start_date or unix_time > args.end_date:
continue
if args.coprocess is not None:
if meta[img]["imgtype"] != args.coprocess:
images.append(img)
else:
images.append(img)
print(
"Job list will include " + str(len(images)) + " images" + "\n", file=sys.stderr
)
# For each image
for img in images:
if (args.coprocess is not None) and ("coimg" in meta[img]):
# Create an output file to store the co-image processing results and populate with metadata
coimg = meta[meta[img]["coimg"]]
coout = file_writer("./{0}/{1}.txt".format(args.jobdir, meta[img]["coimg"]))
coout.write(
"\t".join(
map(
str, ("META", "image", coimg["path"] + "/" + meta[img]["coimg"])
)
)
+ "\n"
)
# Valid metadata
for m in list(args.valid_meta.keys()):
coout.write("\t".join(map(str, ("META", m, coimg[m]))) + "\n")
# Create an output file to store the image processing results and populate with metadata
outfile = file_writer("./{0}/{1}.txt".format(args.jobdir, img))
outfile.write(
"\t".join(map(str, ("META", "image", meta[img]["path"] + "/" + img))) + "\n"
)
# Valid metadata
for m in list(args.valid_meta.keys()):
outfile.write("\t".join(map(str, ("META", m, meta[img][m]))) + "\n")
outfile.close()
# Build the job stack
# The first n - 1 CPUs will get INT jobs_per_cpu
# The last CPU will get the remainder
job = 0
# For the first n - 1 CPU
for c in range(1, args.cpu):
# List of jobs for this CPU
jobs = []
# For each job/CPU
for j in range(0, jobs_per_cpu):
# Add job to list
if args.coprocess is not None and ("coimg" in meta[images[job]]):
job_str = "python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt --coresult ./{6}/{7}.txt".format(
args.pipeline,
meta[images[job]]["path"],
images[job],
args.outdir,
args.jobdir,
images[job],
args.jobdir,
meta[images[job]]["coimg"],
)
if args.writeimg:
job_str += " --writeimg"
jobs.append(job_str)
else:
job_str = "python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt".format(
args.pipeline,
meta[images[job]]["path"],
images[job],
args.outdir,
args.jobdir,
images[job],
)
if args.writeimg:
job_str += " --writeimg"
jobs.append(job_str)
# Increase the job counter by 1
job += 1
# Add the CPU job list to the job stack
job_stack.append(jobs)
# Add the remaining jobs to the last CPU
jobs = []
for j in range(job, len(images)):
# Add job to list
if args.coprocess is not None and ("coimg" in meta[images[j]]):
job_str = "python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt --coresult ./{6}/{7}.txt".format(
args.pipeline,
meta[images[j]]["path"],
images[j],
args.outdir,
args.jobdir,
images[j],
args.jobdir,
meta[images[j]]["coimg"],
)
if args.writeimg:
job_str += " --writeimg"
jobs.append(job_str)
else:
job_str = (
"python {0} --image {1}/{2} --outdir {3} --result ./{4}/{5}.txt".format(
args.pipeline,
meta[images[j]]["path"],
images[j],
args.outdir,
args.jobdir,
images[j],
)
)
if args.writeimg:
job_str += " --writeimg"
jobs.append(job_str)
# Add the CPU job list to the job stack
job_stack.append(jobs)
return job_stack
|
https://github.com/danforthcenter/plantcv/issues/51
|
OpenCV Error: Assertion failed ((scn == 3 || scn == 4) && (depth == CV_8U || depth == CV_32F)) in cvtColor, file /build/opencv-SviWsf/opencv-2.4.9.1+dfsg/modules/imgproc/src/color.cpp, line 3959
Traceback (most recent call last):
File "/home/jeffrey/Desktop/plantcv_test.py", line 118, in <module>
main()
File "/home/jeffrey/Desktop/plantcv_test.py", line 51, in main
device, brass_mask1 = pcv.rgb2gray_hsv(brass_mask, 'v', device, args.debug)
File "/home/jeffrey/plantcv/lib/plantcv/rgb2gray_hsv.py", line 14, in rgb2gray_hsv
cv2.error: /build/opencv-SviWsf/opencv-2.4.9.1+dfsg/modules/imgproc/src/color.cpp:3959: error: (-215) (scn == 3 || scn == 4) && (depth == CV_8U || depth == CV_32F) in function cvtColor
|
cv2.error
|
def repo_all_list(self, project_key):
"""
Get all repositories list from project
:param project_key:
:return:
"""
return self.repo_list(project_key, limit=None)
|
def repo_all_list(self, project_key):
"""
Get all repositories list from project
:param project_key:
:return:
"""
url = self._url_repos(project_key)
return self.repo_list(url, limit=None)
|
https://github.com/atlassian-api/atlassian-python-api/issues/614
|
Traceback (most recent call last):
File "[ProjectPath]/task/fetch_from_git.py", line 112, in <module>
repo_all = list_of_repo()
File "[ProjectPath]/task/fetch_from_git.py", line 17, in list_of_repo
list_of_repositories = fetch()
File "[ProjectPath]\tools\bitbucket.py", line 51, in fetch
for repo in bitbucket_api.repo_all_list(project['key']):
File "[ProjectPath]\.tox\py3\lib\site-packages\atlassian\bitbucket.py", line 659, in repo_all_list
return self.repo_list(url, limit=None)
File "[ProjectPath]\.tox\py3\lib\site-packages\atlassian\bitbucket.py", line 650, in repo_list
return self._get_paged(url, params=params)
File "[ProjectPath]\.tox\py3\lib\site-packages\atlassian\bitbucket.py", line 32, in _get_paged
response = self.get(url, params=params)
File "[ProjectPath]\.tox\py3\lib\site-packages\atlassian\rest_client.py", line 257, in get
response = self.request(
File "[ProjectPath]\.tox\py3\lib\site-packages\atlassian\rest_client.py", line 233, in request
response.raise_for_status()
File "[ProjectPath]\.tox\py3\lib\site-packages\requests\models.py", line 941, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 404 Client Error: for url: https://[onPremURL]/rest/api/1.0/projects/rest/api/1.0/projects/ABACUS/repos/repos
|
requests.exceptions.HTTPError
|
def build(target_python, requirements):
"""
Builds an APK given a target Python and a set of requirements.
"""
if not requirements:
return
testapp = "setup_testapp_python2.py"
android_sdk_home = os.environ["ANDROID_SDK_HOME"]
android_ndk_home = os.environ["ANDROID_NDK_HOME"]
if target_python == TargetPython.python3:
testapp = "setup_testapp_python3.py"
requirements.add(target_python.name)
requirements = ",".join(requirements)
print("requirements:", requirements)
with current_directory("testapps/"):
try:
for line in sh.python(
testapp,
"apk",
"--sdk-dir",
android_sdk_home,
"--ndk-dir",
android_ndk_home,
"--bootstrap",
"sdl2",
"--requirements",
requirements,
_err_to_out=True,
_iter=True,
):
print(line)
except sh.ErrorReturnCode as e:
raise
|
def build(target_python, requirements):
"""
Builds an APK given a target Python and a set of requirements.
"""
if not requirements:
return
testapp = "setup_testapp_python2.py"
android_sdk_home = os.environ["ANDROID_SDK_HOME"]
android_ndk_home = os.environ["ANDROID_NDK_HOME"]
crystax_ndk_home = os.environ["CRYSTAX_NDK_HOME"]
if target_python == TargetPython.python3crystax:
android_ndk_home = crystax_ndk_home
testapp = "setup_testapp_python3.py"
requirements.add(target_python.name)
requirements = ",".join(requirements)
print("requirements:", requirements)
with current_directory("testapps/"):
try:
for line in sh.python(
testapp,
"apk",
"--sdk-dir",
android_sdk_home,
"--ndk-dir",
android_ndk_home,
"--bootstrap",
"sdl2",
"--requirements",
requirements,
_err_to_out=True,
_iter=True,
):
print(line)
except sh.ErrorReturnCode as e:
raise
|
https://github.com/kivy/python-for-android/issues/1485
|
Traceback (most recent call last):
File "./ci/rebuild_updated_recipes.py", line 99, in <module>
main()
File "./ci/rebuild_updated_recipes.py", line 95, in main
build(target_python, recipes)
File "./ci/rebuild_updated_recipes.py", line 59, in build
crystax_ndk_home = os.environ['CRYSTAX_NDK_HOME']
File "/home/user/venv/lib/python2.7/UserDict.py", line 40, in __getitem__
raise KeyError(key)
KeyError: 'CRYSTAX_NDK_HOME'
|
KeyError
|
def main():
target_python = TargetPython.python3
recipes = modified_recipes()
print("recipes modified:", recipes)
recipes -= CORE_RECIPES
print("recipes to build:", recipes)
context = Context()
build_order, python_modules, bs = get_recipe_order_and_bootstrap(
context, recipes, None
)
# fallback to python2 if default target is not compatible
if target_python.name not in build_order:
print("incompatible with {}".format(target_python.name))
target_python = TargetPython.python2
print("falling back to {}".format(target_python.name))
# removing the known broken recipe for the given target
broken_recipes = BROKEN_RECIPES[target_python]
recipes -= broken_recipes
print("recipes to build (no broken):", recipes)
build(target_python, recipes)
|
def main():
target_python = TargetPython.python3crystax
recipes = modified_recipes()
print("recipes modified:", recipes)
recipes -= CORE_RECIPES
print("recipes to build:", recipes)
context = Context()
build_order, python_modules, bs = get_recipe_order_and_bootstrap(
context, recipes, None
)
# fallback to python2 if default target is not compatible
if target_python.name not in build_order:
print("incompatible with {}".format(target_python.name))
target_python = TargetPython.python2
print("falling back to {}".format(target_python.name))
# removing the known broken recipe for the given target
broken_recipes = BROKEN_RECIPES[target_python]
recipes -= broken_recipes
print("recipes to build (no broken):", recipes)
build(target_python, recipes)
|
https://github.com/kivy/python-for-android/issues/1485
|
Traceback (most recent call last):
File "./ci/rebuild_updated_recipes.py", line 99, in <module>
main()
File "./ci/rebuild_updated_recipes.py", line 95, in main
build(target_python, recipes)
File "./ci/rebuild_updated_recipes.py", line 59, in build
crystax_ndk_home = os.environ['CRYSTAX_NDK_HOME']
File "/home/user/venv/lib/python2.7/UserDict.py", line 40, in __getitem__
raise KeyError(key)
KeyError: 'CRYSTAX_NDK_HOME'
|
KeyError
|
def _get_command_to_run(query):
params = shlex_split(query)
__check_query_params(params)
cmd = []
for c in command:
if c == "{{QUERY}}":
cmd.extend(params)
else:
cmd.append(c)
return cmd
|
def _get_command_to_run(query):
params = shlex_split(query.decode("utf-8"))
__check_query_params(params)
cmd = []
for c in command:
if c == "{{QUERY}}":
cmd.extend(params)
else:
cmd.append(c)
return cmd
|
https://github.com/searx/searx/issues/2355
|
: Traceback (most recent call last):
: File "/opt/searx/searx/searx/search.py", line 281, in search_one_offline_request_safe
: search_results = search_one_offline_request(engine, query, request_params)
: File "/opt/searx/searx/searx/search.py", line 274, in search_one_offline_request
: return engine.search(query, request_params)
: File "/opt/searx/searx/searx/engines/command.py", line 70, in search
: cmd = _get_command_to_run(query)
: File "/opt/searx/searx/searx/engines/command.py", line 83, in _get_command_to_run
: params = shlex_split(query.decode('utf-8'))
: AttributeError: 'str' object has no attribute 'decode'
|
AttributeError
|
def response(resp):
"""Get response from google's search request"""
results = []
# detect google sorry
resp_url = urlparse(resp.url)
if resp_url.netloc == "sorry.google.com" or resp_url.path == "/sorry/IndexRedirect":
raise RuntimeWarning("sorry.google.com")
if resp_url.path.startswith("/sorry"):
raise RuntimeWarning(gettext("CAPTCHA required"))
# which subdomain ?
# subdomain = resp.search_params.get('google_subdomain')
# convert the text to dom
dom = html.fromstring(resp.text)
# results --> answer
answer = eval_xpath(dom, '//div[contains(@class, "LGOjhe")]//text()')
if answer:
results.append({"answer": " ".join(answer)})
else:
logger.debug("did not found 'answer'")
# results --> number_of_results
try:
_txt = eval_xpath(dom, '//div[@id="result-stats"]//text()')[0]
_digit = "".join([n for n in _txt if n.isdigit()])
number_of_results = int(_digit)
results.append({"number_of_results": number_of_results})
except Exception as e: # pylint: disable=broad-except
logger.debug("did not 'number_of_results'")
logger.error(e, exc_info=True)
# parse results
for result in eval_xpath(dom, results_xpath):
# google *sections*
if extract_text(eval_xpath(result, g_section_with_header)):
logger.debug("ingoring <g-section-with-header>")
continue
try:
title_tag = eval_xpath(result, title_xpath)
if not title_tag:
# this not one of the common google results *section*
logger.debug('ingoring <div class="g" ../> section: missing title')
continue
title = extract_text(title_tag[0])
url = eval_xpath(result, href_xpath)[0]
content = extract_text_from_dom(result, content_xpath)
results.append({"url": url, "title": title, "content": content})
except Exception as e: # pylint: disable=broad-except
logger.error(e, exc_info=True)
# from lxml import etree
# logger.debug(etree.tostring(result, pretty_print=True))
# import pdb
# pdb.set_trace()
continue
# parse suggestion
for suggestion in eval_xpath(dom, suggestion_xpath):
# append suggestion
results.append({"suggestion": extract_text(suggestion)})
for correction in eval_xpath(dom, spelling_suggestion_xpath):
results.append({"correction": extract_text(correction)})
# return results
return results
|
def response(resp):
"""Get response from google's search request"""
results = []
# detect google sorry
resp_url = urlparse(resp.url)
if resp_url.netloc == "sorry.google.com" or resp_url.path == "/sorry/IndexRedirect":
raise RuntimeWarning("sorry.google.com")
if resp_url.path.startswith("/sorry"):
raise RuntimeWarning(gettext("CAPTCHA required"))
# which subdomain ?
# subdomain = resp.search_params.get('google_subdomain')
# convert the text to dom
dom = html.fromstring(resp.text)
# results --> answer
answer = eval_xpath(dom, '//div[contains(@class, "LGOjhe")]//text()')
if answer:
results.append({"answer": " ".join(answer)})
else:
logger.debug("did not found 'answer'")
# results --> number_of_results
try:
_txt = eval_xpath(dom, '//div[@id="result-stats"]//text()')[0]
_digit = "".join([n for n in _txt if n.isdigit()])
number_of_results = int(_digit)
results.append({"number_of_results": number_of_results})
except Exception as e: # pylint: disable=broad-except
logger.debug("did not 'number_of_results'")
logger.error(e, exc_info=True)
# parse results
for result in eval_xpath(dom, results_xpath):
# google *sections*
if extract_text(eval_xpath(result, g_section_with_header)):
logger.debug("ingoring <g-section-with-header>")
continue
try:
title = extract_text(eval_xpath(result, title_xpath)[0])
url = eval_xpath(result, href_xpath)[0]
content = extract_text_from_dom(result, content_xpath)
results.append({"url": url, "title": title, "content": content})
except Exception as e: # pylint: disable=broad-except
logger.error(e, exc_info=True)
# from lxml import etree
# logger.debug(etree.tostring(result, pretty_print=True))
# import pdb
# pdb.set_trace()
continue
# parse suggestion
for suggestion in eval_xpath(dom, suggestion_xpath):
# append suggestion
results.append({"suggestion": extract_text(suggestion)})
for correction in eval_xpath(dom, spelling_suggestion_xpath):
results.append({"correction": extract_text(correction)})
# return results
return results
|
https://github.com/searx/searx/issues/2234
|
ERROR:searx.google engine:list index out of range
Traceback (most recent call last):
File "/home/alexandre/code/zz/searx/searx/engines/google.py", line 253, in response
url = eval_xpath(result, href_xpath)[0]
IndexError: list index out of range
|
IndexError
|
def add_unresponsive_engine(self, engine_name, error_type, error_message=None):
self.unresponsive_engines.add((engine_name, error_type, error_message))
|
def add_unresponsive_engine(self, engine_error):
self.unresponsive_engines.add(engine_error)
|
https://github.com/searx/searx/issues/1920
|
Exception in thread aeb1ee8b-0fe7-4e90-8f29-0c487673c1eb:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 1331, in getresponse
response.begin()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 297, in begin
version, status, reason = self._read_status()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 258, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/socket.py", line 586, in readinto
return self._sock.recv_into(b)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py", line 326, in recv_into
raise timeout("The read operation timed out")
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/util/retry.py", line 400, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 331, in _raise_timeout
self, url, "Read timed out. (read timeout=%s)" % timeout_value
urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/searx/search.py", line 160, in search_one_http_request_safe
search_results = search_one_http_request(engine, query, request_params)
File "/home/n/p/searx/searx/search.py", line 93, in search_one_http_request
response = send_http_request(engine, request_params)
File "/home/n/p/searx/searx/search.py", line 77, in send_http_request
return req(request_params['url'], **request_args)
File "/home/n/p/searx/searx/poolrequests.py", line 133, in get
return request('get', url, **kwargs)
File "/home/n/p/searx/searx/poolrequests.py", line 109, in request
response = session.request(method=method, url=url, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/n/p/searx/searx/search.py", line 107, in search_one_request_safe
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
File "/home/n/p/searx/searx/search.py", line 189, in search_one_http_request_safe
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask_babel/__init__.py", line 548, in gettext
t = get_translations()
File "searx/webapp.py", line 168, in _get_translations
translation_locale = request.form.get('use-translation')
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 306, in _get_current_object
return self.__local()
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask/globals.py", line 37, in _lookup_req_object
raise RuntimeError(_request_ctx_err_msg)
RuntimeError: Working outside of request context.
This typically means that you attempted to use functionality that needed
an active HTTP request. Consult the documentation on testing for
information about how to avoid this problem.
|
urllib3.exceptions.ReadTimeoutError
|
def search_one_offline_request_safe(
engine_name, query, request_params, result_container, start_time, timeout_limit
):
engine = engines[engine_name]
try:
search_results = search_one_offline_request(engine, query, request_params)
if search_results:
result_container.extend(engine_name, search_results)
engine_time = time() - start_time
result_container.add_timing(engine_name, engine_time, engine_time)
with threading.RLock():
engine.stats["engine_time"] += engine_time
engine.stats["engine_time_count"] += 1
except ValueError as e:
record_offline_engine_stats_on_error(engine, result_container, start_time)
logger.exception("engine {0} : invalid input : {1}".format(engine_name, e))
except Exception as e:
record_offline_engine_stats_on_error(engine, result_container, start_time)
result_container.add_unresponsive_engine(
engine_name, "unexpected crash", str(e)
)
logger.exception("engine {0} : exception : {1}".format(engine_name, e))
|
def search_one_offline_request_safe(
engine_name, query, request_params, result_container, start_time, timeout_limit
):
engine = engines[engine_name]
try:
search_results = search_one_offline_request(engine, query, request_params)
if search_results:
result_container.extend(engine_name, search_results)
engine_time = time() - start_time
result_container.add_timing(engine_name, engine_time, engine_time)
with threading.RLock():
engine.stats["engine_time"] += engine_time
engine.stats["engine_time_count"] += 1
except ValueError as e:
record_offline_engine_stats_on_error(engine, result_container, start_time)
logger.exception("engine {0} : invalid input : {1}".format(engine_name, e))
except Exception as e:
record_offline_engine_stats_on_error(engine, result_container, start_time)
result_container.add_unresponsive_engine(
(
engine_name,
"{0}: {1}".format(gettext("unexpected crash"), e),
)
)
logger.exception("engine {0} : exception : {1}".format(engine_name, e))
|
https://github.com/searx/searx/issues/1920
|
Exception in thread aeb1ee8b-0fe7-4e90-8f29-0c487673c1eb:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 1331, in getresponse
response.begin()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 297, in begin
version, status, reason = self._read_status()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 258, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/socket.py", line 586, in readinto
return self._sock.recv_into(b)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py", line 326, in recv_into
raise timeout("The read operation timed out")
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/util/retry.py", line 400, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 331, in _raise_timeout
self, url, "Read timed out. (read timeout=%s)" % timeout_value
urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/searx/search.py", line 160, in search_one_http_request_safe
search_results = search_one_http_request(engine, query, request_params)
File "/home/n/p/searx/searx/search.py", line 93, in search_one_http_request
response = send_http_request(engine, request_params)
File "/home/n/p/searx/searx/search.py", line 77, in send_http_request
return req(request_params['url'], **request_args)
File "/home/n/p/searx/searx/poolrequests.py", line 133, in get
return request('get', url, **kwargs)
File "/home/n/p/searx/searx/poolrequests.py", line 109, in request
response = session.request(method=method, url=url, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/n/p/searx/searx/search.py", line 107, in search_one_request_safe
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
File "/home/n/p/searx/searx/search.py", line 189, in search_one_http_request_safe
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask_babel/__init__.py", line 548, in gettext
t = get_translations()
File "searx/webapp.py", line 168, in _get_translations
translation_locale = request.form.get('use-translation')
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 306, in _get_current_object
return self.__local()
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask/globals.py", line 37, in _lookup_req_object
raise RuntimeError(_request_ctx_err_msg)
RuntimeError: Working outside of request context.
This typically means that you attempted to use functionality that needed
an active HTTP request. Consult the documentation on testing for
information about how to avoid this problem.
|
urllib3.exceptions.ReadTimeoutError
|
def search_one_http_request_safe(
engine_name, query, request_params, result_container, start_time, timeout_limit
):
# set timeout for all HTTP requests
requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
# reset the HTTP total time
requests_lib.reset_time_for_thread()
#
engine = engines[engine_name]
# suppose everything will be alright
requests_exception = False
try:
# send requests and parse the results
search_results = search_one_http_request(engine, query, request_params)
# check if the engine accepted the request
if search_results is not None:
# yes, so add results
result_container.extend(engine_name, search_results)
# update engine time when there is no exception
engine_time = time() - start_time
page_load_time = requests_lib.get_time_for_thread()
result_container.add_timing(engine_name, engine_time, page_load_time)
with threading.RLock():
engine.stats["engine_time"] += engine_time
engine.stats["engine_time_count"] += 1
# update stats with the total HTTP time
engine.stats["page_load_time"] += page_load_time
engine.stats["page_load_count"] += 1
except Exception as e:
# Timing
engine_time = time() - start_time
page_load_time = requests_lib.get_time_for_thread()
result_container.add_timing(engine_name, engine_time, page_load_time)
# Record the errors
with threading.RLock():
engine.stats["errors"] += 1
if issubclass(e.__class__, requests.exceptions.Timeout):
result_container.add_unresponsive_engine(engine_name, "timeout")
# requests timeout (connect or read)
logger.error(
"engine {0} : HTTP requests timeout"
"(search duration : {1} s, timeout: {2} s) : {3}".format(
engine_name, engine_time, timeout_limit, e.__class__.__name__
)
)
requests_exception = True
elif issubclass(e.__class__, requests.exceptions.RequestException):
result_container.add_unresponsive_engine(engine_name, "request exception")
# other requests exception
logger.exception(
"engine {0} : requests exception"
"(search duration : {1} s, timeout: {2} s) : {3}".format(
engine_name, engine_time, timeout_limit, e
)
)
requests_exception = True
else:
result_container.add_unresponsive_engine(
engine_name, "unexpected crash", str(e)
)
# others errors
logger.exception("engine {0} : exception : {1}".format(engine_name, e))
# suspend or not the engine if there are HTTP errors
with threading.RLock():
if requests_exception:
# update continuous_errors / suspend_end_time
engine.continuous_errors += 1
engine.suspend_end_time = time() + min(
settings["search"]["max_ban_time_on_fail"],
engine.continuous_errors * settings["search"]["ban_time_on_fail"],
)
else:
# no HTTP error (perhaps an engine error)
# anyway, reset the suspend variables
engine.continuous_errors = 0
engine.suspend_end_time = 0
|
def search_one_http_request_safe(
engine_name, query, request_params, result_container, start_time, timeout_limit
):
# set timeout for all HTTP requests
requests_lib.set_timeout_for_thread(timeout_limit, start_time=start_time)
# reset the HTTP total time
requests_lib.reset_time_for_thread()
#
engine = engines[engine_name]
# suppose everything will be alright
requests_exception = False
try:
# send requests and parse the results
search_results = search_one_http_request(engine, query, request_params)
# check if the engine accepted the request
if search_results is not None:
# yes, so add results
result_container.extend(engine_name, search_results)
# update engine time when there is no exception
engine_time = time() - start_time
page_load_time = requests_lib.get_time_for_thread()
result_container.add_timing(engine_name, engine_time, page_load_time)
with threading.RLock():
engine.stats["engine_time"] += engine_time
engine.stats["engine_time_count"] += 1
# update stats with the total HTTP time
engine.stats["page_load_time"] += page_load_time
engine.stats["page_load_count"] += 1
except Exception as e:
# Timing
engine_time = time() - start_time
page_load_time = requests_lib.get_time_for_thread()
result_container.add_timing(engine_name, engine_time, page_load_time)
# Record the errors
with threading.RLock():
engine.stats["errors"] += 1
if issubclass(e.__class__, requests.exceptions.Timeout):
result_container.add_unresponsive_engine((engine_name, gettext("timeout")))
# requests timeout (connect or read)
logger.error(
"engine {0} : HTTP requests timeout"
"(search duration : {1} s, timeout: {2} s) : {3}".format(
engine_name, engine_time, timeout_limit, e.__class__.__name__
)
)
requests_exception = True
elif issubclass(e.__class__, requests.exceptions.RequestException):
result_container.add_unresponsive_engine(
(engine_name, gettext("request exception"))
)
# other requests exception
logger.exception(
"engine {0} : requests exception"
"(search duration : {1} s, timeout: {2} s) : {3}".format(
engine_name, engine_time, timeout_limit, e
)
)
requests_exception = True
else:
result_container.add_unresponsive_engine(
(
engine_name,
"{0}: {1}".format(gettext("unexpected crash"), e),
)
)
# others errors
logger.exception("engine {0} : exception : {1}".format(engine_name, e))
# suspend or not the engine if there are HTTP errors
with threading.RLock():
if requests_exception:
# update continuous_errors / suspend_end_time
engine.continuous_errors += 1
engine.suspend_end_time = time() + min(
settings["search"]["max_ban_time_on_fail"],
engine.continuous_errors * settings["search"]["ban_time_on_fail"],
)
else:
# no HTTP error (perhaps an engine error)
# anyway, reset the suspend variables
engine.continuous_errors = 0
engine.suspend_end_time = 0
|
https://github.com/searx/searx/issues/1920
|
Exception in thread aeb1ee8b-0fe7-4e90-8f29-0c487673c1eb:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 1331, in getresponse
response.begin()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 297, in begin
version, status, reason = self._read_status()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 258, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/socket.py", line 586, in readinto
return self._sock.recv_into(b)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py", line 326, in recv_into
raise timeout("The read operation timed out")
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/util/retry.py", line 400, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 331, in _raise_timeout
self, url, "Read timed out. (read timeout=%s)" % timeout_value
urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/searx/search.py", line 160, in search_one_http_request_safe
search_results = search_one_http_request(engine, query, request_params)
File "/home/n/p/searx/searx/search.py", line 93, in search_one_http_request
response = send_http_request(engine, request_params)
File "/home/n/p/searx/searx/search.py", line 77, in send_http_request
return req(request_params['url'], **request_args)
File "/home/n/p/searx/searx/poolrequests.py", line 133, in get
return request('get', url, **kwargs)
File "/home/n/p/searx/searx/poolrequests.py", line 109, in request
response = session.request(method=method, url=url, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/n/p/searx/searx/search.py", line 107, in search_one_request_safe
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
File "/home/n/p/searx/searx/search.py", line 189, in search_one_http_request_safe
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask_babel/__init__.py", line 548, in gettext
t = get_translations()
File "searx/webapp.py", line 168, in _get_translations
translation_locale = request.form.get('use-translation')
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 306, in _get_current_object
return self.__local()
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask/globals.py", line 37, in _lookup_req_object
raise RuntimeError(_request_ctx_err_msg)
RuntimeError: Working outside of request context.
This typically means that you attempted to use functionality that needed
an active HTTP request. Consult the documentation on testing for
information about how to avoid this problem.
|
urllib3.exceptions.ReadTimeoutError
|
def search_multiple_requests(requests, result_container, start_time, timeout_limit):
search_id = uuid4().__str__()
for engine_name, query, request_params in requests:
th = threading.Thread(
target=search_one_request_safe,
args=(
engine_name,
query,
request_params,
result_container,
start_time,
timeout_limit,
),
name=search_id,
)
th._engine_name = engine_name
th.start()
for th in threading.enumerate():
if th.name == search_id:
remaining_time = max(0.0, timeout_limit - (time() - start_time))
th.join(remaining_time)
if th.isAlive():
result_container.add_unresponsive_engine(th._engine_name, "timeout")
logger.warning("engine timeout: {0}".format(th._engine_name))
|
def search_multiple_requests(requests, result_container, start_time, timeout_limit):
search_id = uuid4().__str__()
for engine_name, query, request_params in requests:
th = threading.Thread(
target=search_one_request_safe,
args=(
engine_name,
query,
request_params,
result_container,
start_time,
timeout_limit,
),
name=search_id,
)
th._engine_name = engine_name
th.start()
for th in threading.enumerate():
if th.name == search_id:
remaining_time = max(0.0, timeout_limit - (time() - start_time))
th.join(remaining_time)
if th.isAlive():
result_container.add_unresponsive_engine(
(th._engine_name, gettext("timeout"))
)
logger.warning("engine timeout: {0}".format(th._engine_name))
|
https://github.com/searx/searx/issues/1920
|
Exception in thread aeb1ee8b-0fe7-4e90-8f29-0c487673c1eb:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 1331, in getresponse
response.begin()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 297, in begin
version, status, reason = self._read_status()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 258, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/socket.py", line 586, in readinto
return self._sock.recv_into(b)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py", line 326, in recv_into
raise timeout("The read operation timed out")
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/util/retry.py", line 400, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 331, in _raise_timeout
self, url, "Read timed out. (read timeout=%s)" % timeout_value
urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/searx/search.py", line 160, in search_one_http_request_safe
search_results = search_one_http_request(engine, query, request_params)
File "/home/n/p/searx/searx/search.py", line 93, in search_one_http_request
response = send_http_request(engine, request_params)
File "/home/n/p/searx/searx/search.py", line 77, in send_http_request
return req(request_params['url'], **request_args)
File "/home/n/p/searx/searx/poolrequests.py", line 133, in get
return request('get', url, **kwargs)
File "/home/n/p/searx/searx/poolrequests.py", line 109, in request
response = session.request(method=method, url=url, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/n/p/searx/searx/search.py", line 107, in search_one_request_safe
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
File "/home/n/p/searx/searx/search.py", line 189, in search_one_http_request_safe
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask_babel/__init__.py", line 548, in gettext
t = get_translations()
File "searx/webapp.py", line 168, in _get_translations
translation_locale = request.form.get('use-translation')
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 306, in _get_current_object
return self.__local()
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask/globals.py", line 37, in _lookup_req_object
raise RuntimeError(_request_ctx_err_msg)
RuntimeError: Working outside of request context.
This typically means that you attempted to use functionality that needed
an active HTTP request. Consult the documentation on testing for
information about how to avoid this problem.
|
urllib3.exceptions.ReadTimeoutError
|
def _get_translations():
if has_request_context() and request.form.get("use-translation") == "oc":
babel_ext = flask_babel.current_app.extensions["babel"]
return Translations.load(next(babel_ext.translation_directories), "oc")
return _flask_babel_get_translations()
|
def _get_translations():
translation_locale = request.form.get("use-translation")
if translation_locale:
babel_ext = flask_babel.current_app.extensions["babel"]
translation = Translations.load(next(babel_ext.translation_directories), "oc")
else:
translation = _flask_babel_get_translations()
return translation
|
https://github.com/searx/searx/issues/1920
|
Exception in thread aeb1ee8b-0fe7-4e90-8f29-0c487673c1eb:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 1331, in getresponse
response.begin()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 297, in begin
version, status, reason = self._read_status()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 258, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/socket.py", line 586, in readinto
return self._sock.recv_into(b)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py", line 326, in recv_into
raise timeout("The read operation timed out")
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/util/retry.py", line 400, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 331, in _raise_timeout
self, url, "Read timed out. (read timeout=%s)" % timeout_value
urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/searx/search.py", line 160, in search_one_http_request_safe
search_results = search_one_http_request(engine, query, request_params)
File "/home/n/p/searx/searx/search.py", line 93, in search_one_http_request
response = send_http_request(engine, request_params)
File "/home/n/p/searx/searx/search.py", line 77, in send_http_request
return req(request_params['url'], **request_args)
File "/home/n/p/searx/searx/poolrequests.py", line 133, in get
return request('get', url, **kwargs)
File "/home/n/p/searx/searx/poolrequests.py", line 109, in request
response = session.request(method=method, url=url, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/n/p/searx/searx/search.py", line 107, in search_one_request_safe
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
File "/home/n/p/searx/searx/search.py", line 189, in search_one_http_request_safe
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask_babel/__init__.py", line 548, in gettext
t = get_translations()
File "searx/webapp.py", line 168, in _get_translations
translation_locale = request.form.get('use-translation')
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 306, in _get_current_object
return self.__local()
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask/globals.py", line 37, in _lookup_req_object
raise RuntimeError(_request_ctx_err_msg)
RuntimeError: Working outside of request context.
This typically means that you attempted to use functionality that needed
an active HTTP request. Consult the documentation on testing for
information about how to avoid this problem.
|
urllib3.exceptions.ReadTimeoutError
|
def index():
"""Render index page.
Supported outputs: html, json, csv, rss.
"""
# output_format
output_format = request.form.get("format", "html")
if output_format not in ["html", "csv", "json", "rss"]:
output_format = "html"
# check if there is query
if request.form.get("q") is None:
if output_format == "html":
return render(
"index.html",
)
else:
return index_error(output_format, "No query"), 400
# search
search_query = None
raw_text_query = None
result_container = None
try:
search_query, raw_text_query = get_search_query_from_webapp(
request.preferences, request.form
)
# search = Search(search_query) # without plugins
search = SearchWithPlugins(search_query, request.user_plugins, request)
result_container = search.search()
except Exception as e:
# log exception
logger.exception("search error")
# is it an invalid input parameter or something else ?
if issubclass(e.__class__, SearxParameterException):
return index_error(output_format, e.message), 400
else:
return index_error(output_format, gettext("search error")), 500
# results
results = result_container.get_ordered_results()
number_of_results = result_container.results_number()
if number_of_results < result_container.results_length():
number_of_results = 0
# UI
advanced_search = request.form.get("advanced_search", None)
# Server-Timing header
request.timings = result_container.get_timings()
# output
for result in results:
if output_format == "html":
if "content" in result and result["content"]:
result["content"] = highlight_content(
escape(result["content"][:1024]), search_query.query
)
if "title" in result and result["title"]:
result["title"] = highlight_content(
escape(result["title"] or ""), search_query.query
)
else:
if result.get("content"):
result["content"] = html_to_text(result["content"]).strip()
# removing html content and whitespace duplications
result["title"] = " ".join(html_to_text(result["title"]).strip().split())
if "url" in result:
result["pretty_url"] = prettify_url(result["url"])
# TODO, check if timezone is calculated right
if "publishedDate" in result:
try: # test if publishedDate >= 1900 (datetime module bug)
result["pubdate"] = result["publishedDate"].strftime(
"%Y-%m-%d %H:%M:%S%z"
)
except ValueError:
result["publishedDate"] = None
else:
if result["publishedDate"].replace(
tzinfo=None
) >= datetime.now() - timedelta(days=1):
timedifference = datetime.now() - result["publishedDate"].replace(
tzinfo=None
)
minutes = int((timedifference.seconds / 60) % 60)
hours = int(timedifference.seconds / 60 / 60)
if hours == 0:
result["publishedDate"] = gettext(
"{minutes} minute(s) ago"
).format(minutes=minutes)
else:
result["publishedDate"] = gettext(
"{hours} hour(s), {minutes} minute(s) ago"
).format(hours=hours, minutes=minutes) # noqa
else:
result["publishedDate"] = format_date(result["publishedDate"])
if output_format == "json":
return Response(
json.dumps(
{
"query": search_query.query.decode("utf-8"),
"number_of_results": number_of_results,
"results": results,
"answers": list(result_container.answers),
"corrections": list(result_container.corrections),
"infoboxes": result_container.infoboxes,
"suggestions": list(result_container.suggestions),
"unresponsive_engines": __get_translated_errors(
result_container.unresponsive_engines
),
}, # noqa
default=lambda item: list(item) if isinstance(item, set) else item,
),
mimetype="application/json",
)
elif output_format == "csv":
csv = UnicodeWriter(StringIO())
keys = ("title", "url", "content", "host", "engine", "score", "type")
csv.writerow(keys)
for row in results:
row["host"] = row["parsed_url"].netloc
row["type"] = "result"
csv.writerow([row.get(key, "") for key in keys])
for a in result_container.answers:
row = {"title": a, "type": "answer"}
csv.writerow([row.get(key, "") for key in keys])
for a in result_container.suggestions:
row = {"title": a, "type": "suggestion"}
csv.writerow([row.get(key, "") for key in keys])
for a in result_container.corrections:
row = {"title": a, "type": "correction"}
csv.writerow([row.get(key, "") for key in keys])
csv.stream.seek(0)
response = Response(csv.stream.read(), mimetype="application/csv")
cont_disp = "attachment;Filename=searx_-_{0}.csv".format(
search_query.query.decode("utf-8")
)
response.headers.add("Content-Disposition", cont_disp)
return response
elif output_format == "rss":
response_rss = render(
"opensearch_response_rss.xml",
results=results,
answers=result_container.answers,
corrections=result_container.corrections,
suggestions=result_container.suggestions,
q=request.form["q"],
number_of_results=number_of_results,
base_url=get_base_url(),
override_theme="__common__",
)
return Response(response_rss, mimetype="text/xml")
# HTML output format
# suggestions: use RawTextQuery to get the suggestion URLs with the same bang
suggestion_urls = list(
map(
lambda suggestion: {
"url": raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
"title": suggestion,
},
result_container.suggestions,
)
)
correction_urls = list(
map(
lambda correction: {
"url": raw_text_query.changeSearchQuery(correction).getFullQuery(),
"title": correction,
},
result_container.corrections,
)
)
#
return render(
"results.html",
results=results,
q=request.form["q"],
selected_categories=search_query.categories,
pageno=search_query.pageno,
time_range=search_query.time_range,
number_of_results=format_decimal(number_of_results),
advanced_search=advanced_search,
suggestions=suggestion_urls,
answers=result_container.answers,
corrections=correction_urls,
infoboxes=result_container.infoboxes,
paging=result_container.paging,
unresponsive_engines=__get_translated_errors(
result_container.unresponsive_engines
),
current_language=match_language(
search_query.lang,
LANGUAGE_CODES,
fallback=request.preferences.get_value("language"),
),
base_url=get_base_url(),
theme=get_current_theme_name(),
favicons=global_favicons[themes.index(get_current_theme_name())],
timeout_limit=request.form.get("timeout_limit", None),
)
|
def index():
"""Render index page.
Supported outputs: html, json, csv, rss.
"""
# output_format
output_format = request.form.get("format", "html")
if output_format not in ["html", "csv", "json", "rss"]:
output_format = "html"
# check if there is query
if request.form.get("q") is None:
if output_format == "html":
return render(
"index.html",
)
else:
return index_error(output_format, "No query"), 400
# search
search_query = None
raw_text_query = None
result_container = None
try:
search_query, raw_text_query = get_search_query_from_webapp(
request.preferences, request.form
)
# search = Search(search_query) # without plugins
search = SearchWithPlugins(search_query, request.user_plugins, request)
result_container = search.search()
except Exception as e:
# log exception
logger.exception("search error")
# is it an invalid input parameter or something else ?
if issubclass(e.__class__, SearxParameterException):
return index_error(output_format, e.message), 400
else:
return index_error(output_format, gettext("search error")), 500
# results
results = result_container.get_ordered_results()
number_of_results = result_container.results_number()
if number_of_results < result_container.results_length():
number_of_results = 0
# UI
advanced_search = request.form.get("advanced_search", None)
# Server-Timing header
request.timings = result_container.get_timings()
# output
for result in results:
if output_format == "html":
if "content" in result and result["content"]:
result["content"] = highlight_content(
escape(result["content"][:1024]), search_query.query
)
if "title" in result and result["title"]:
result["title"] = highlight_content(
escape(result["title"] or ""), search_query.query
)
else:
if result.get("content"):
result["content"] = html_to_text(result["content"]).strip()
# removing html content and whitespace duplications
result["title"] = " ".join(html_to_text(result["title"]).strip().split())
if "url" in result:
result["pretty_url"] = prettify_url(result["url"])
# TODO, check if timezone is calculated right
if "publishedDate" in result:
try: # test if publishedDate >= 1900 (datetime module bug)
result["pubdate"] = result["publishedDate"].strftime(
"%Y-%m-%d %H:%M:%S%z"
)
except ValueError:
result["publishedDate"] = None
else:
if result["publishedDate"].replace(
tzinfo=None
) >= datetime.now() - timedelta(days=1):
timedifference = datetime.now() - result["publishedDate"].replace(
tzinfo=None
)
minutes = int((timedifference.seconds / 60) % 60)
hours = int(timedifference.seconds / 60 / 60)
if hours == 0:
result["publishedDate"] = gettext(
"{minutes} minute(s) ago"
).format(minutes=minutes)
else:
result["publishedDate"] = gettext(
"{hours} hour(s), {minutes} minute(s) ago"
).format(hours=hours, minutes=minutes) # noqa
else:
result["publishedDate"] = format_date(result["publishedDate"])
if output_format == "json":
return Response(
json.dumps(
{
"query": search_query.query.decode("utf-8"),
"number_of_results": number_of_results,
"results": results,
"answers": list(result_container.answers),
"corrections": list(result_container.corrections),
"infoboxes": result_container.infoboxes,
"suggestions": list(result_container.suggestions),
"unresponsive_engines": list(result_container.unresponsive_engines),
},
default=lambda item: list(item) if isinstance(item, set) else item,
),
mimetype="application/json",
)
elif output_format == "csv":
csv = UnicodeWriter(StringIO())
keys = ("title", "url", "content", "host", "engine", "score", "type")
csv.writerow(keys)
for row in results:
row["host"] = row["parsed_url"].netloc
row["type"] = "result"
csv.writerow([row.get(key, "") for key in keys])
for a in result_container.answers:
row = {"title": a, "type": "answer"}
csv.writerow([row.get(key, "") for key in keys])
for a in result_container.suggestions:
row = {"title": a, "type": "suggestion"}
csv.writerow([row.get(key, "") for key in keys])
for a in result_container.corrections:
row = {"title": a, "type": "correction"}
csv.writerow([row.get(key, "") for key in keys])
csv.stream.seek(0)
response = Response(csv.stream.read(), mimetype="application/csv")
cont_disp = "attachment;Filename=searx_-_{0}.csv".format(
search_query.query.decode("utf-8")
)
response.headers.add("Content-Disposition", cont_disp)
return response
elif output_format == "rss":
response_rss = render(
"opensearch_response_rss.xml",
results=results,
answers=result_container.answers,
corrections=result_container.corrections,
suggestions=result_container.suggestions,
q=request.form["q"],
number_of_results=number_of_results,
base_url=get_base_url(),
override_theme="__common__",
)
return Response(response_rss, mimetype="text/xml")
# HTML output format
# suggestions: use RawTextQuery to get the suggestion URLs with the same bang
suggestion_urls = list(
map(
lambda suggestion: {
"url": raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
"title": suggestion,
},
result_container.suggestions,
)
)
correction_urls = list(
map(
lambda correction: {
"url": raw_text_query.changeSearchQuery(correction).getFullQuery(),
"title": correction,
},
result_container.corrections,
)
)
#
return render(
"results.html",
results=results,
q=request.form["q"],
selected_categories=search_query.categories,
pageno=search_query.pageno,
time_range=search_query.time_range,
number_of_results=format_decimal(number_of_results),
advanced_search=advanced_search,
suggestions=suggestion_urls,
answers=result_container.answers,
corrections=correction_urls,
infoboxes=result_container.infoboxes,
paging=result_container.paging,
unresponsive_engines=result_container.unresponsive_engines,
current_language=match_language(
search_query.lang,
LANGUAGE_CODES,
fallback=request.preferences.get_value("language"),
),
base_url=get_base_url(),
theme=get_current_theme_name(),
favicons=global_favicons[themes.index(get_current_theme_name())],
timeout_limit=request.form.get("timeout_limit", None),
)
|
https://github.com/searx/searx/issues/1920
|
Exception in thread aeb1ee8b-0fe7-4e90-8f29-0c487673c1eb:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 421, in _make_request
six.raise_from(e, None)
File "<string>", line 3, in raise_from
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 416, in _make_request
httplib_response = conn.getresponse()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 1331, in getresponse
response.begin()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 297, in begin
version, status, reason = self._read_status()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/http/client.py", line 258, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/socket.py", line 586, in readinto
return self._sock.recv_into(b)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py", line 326, in recv_into
raise timeout("The read operation timed out")
socket.timeout: The read operation timed out
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 449, in send
timeout=timeout
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 720, in urlopen
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/util/retry.py", line 400, in increment
raise six.reraise(type(error), error, _stacktrace)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/packages/six.py", line 735, in reraise
raise value
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 672, in urlopen
chunked=chunked,
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 423, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/urllib3/connectionpool.py", line 331, in _raise_timeout
self, url, "Read timed out. (read timeout=%s)" % timeout_value
urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/p/searx/searx/search.py", line 160, in search_one_http_request_safe
search_results = search_one_http_request(engine, query, request_params)
File "/home/n/p/searx/searx/search.py", line 93, in search_one_http_request
response = send_http_request(engine, request_params)
File "/home/n/p/searx/searx/search.py", line 77, in send_http_request
return req(request_params['url'], **request_args)
File "/home/n/p/searx/searx/poolrequests.py", line 133, in get
return request('get', url, **kwargs)
File "/home/n/p/searx/searx/poolrequests.py", line 109, in request
response = session.request(method=method, url=url, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 533, in request
resp = self.send(prep, **send_kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/sessions.py", line 646, in send
r = adapter.send(request, **kwargs)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/requests/adapters.py", line 529, in send
raise ReadTimeout(e, request=request)
requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='gitlab.com', port=443): Read timed out. (read timeout=1.0)
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/home/n/.pyenv/versions/3.6.3/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/n/p/searx/searx/search.py", line 107, in search_one_request_safe
return search_one_http_request_safe(engine_name, query, request_params, result_container, start_time, timeout_limit)
File "/home/n/p/searx/searx/search.py", line 189, in search_one_http_request_safe
result_container.add_unresponsive_engine((engine_name, gettext('timeout')))
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask_babel/__init__.py", line 548, in gettext
t = get_translations()
File "searx/webapp.py", line 168, in _get_translations
translation_locale = request.form.get('use-translation')
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/werkzeug/local.py", line 306, in _get_current_object
return self.__local()
File "/home/n/p/searx/venv3.6/lib/python3.6/site-packages/flask/globals.py", line 37, in _lookup_req_object
raise RuntimeError(_request_ctx_err_msg)
RuntimeError: Working outside of request context.
This typically means that you attempted to use functionality that needed
an active HTTP request. Consult the documentation on testing for
information about how to avoid this problem.
|
urllib3.exceptions.ReadTimeoutError
|
def get_search_query_from_webapp(preferences, form):
# no text for the query ?
if not form.get("q"):
raise SearxParameterException("q", "")
# set blocked engines
disabled_engines = preferences.engines.get_disabled()
# parse query, if tags are set, which change
# the serch engine or search-language
raw_text_query = RawTextQuery(form["q"], disabled_engines)
raw_text_query.parse_query()
# set query
query = raw_text_query.getSearchQuery()
# get and check page number
pageno_param = form.get("pageno", "1")
if not pageno_param.isdigit() or int(pageno_param) < 1:
raise SearxParameterException("pageno", pageno_param)
query_pageno = int(pageno_param)
# get language
# set specific language if set on request, query or preferences
# TODO support search with multible languages
if len(raw_text_query.languages):
query_lang = raw_text_query.languages[-1]
elif "language" in form:
query_lang = form.get("language")
else:
query_lang = preferences.get_value("language")
# check language
if not VALID_LANGUAGE_CODE.match(query_lang):
raise SearxParameterException("language", query_lang)
# get safesearch
if "safesearch" in form:
query_safesearch = form.get("safesearch")
# first check safesearch
if not query_safesearch.isdigit():
raise SearxParameterException("safesearch", query_safesearch)
query_safesearch = int(query_safesearch)
else:
query_safesearch = preferences.get_value("safesearch")
# safesearch : second check
if query_safesearch < 0 or query_safesearch > 2:
raise SearxParameterException("safesearch", query_safesearch)
# get time_range
query_time_range = form.get("time_range")
# check time_range
if query_time_range not in ("None", None, "", "day", "week", "month", "year"):
raise SearxParameterException("time_range", query_time_range)
# query_engines
query_engines = raw_text_query.engines
# timeout_limit
query_timeout = raw_text_query.timeout_limit
if query_timeout is None and "timeout_limit" in form:
raw_time_limit = form.get("timeout_limit")
if raw_time_limit in ["None", ""]:
raw_time_limit = None
else:
try:
query_timeout = float(raw_time_limit)
except ValueError:
raise SearxParameterException("timeout_limit", raw_time_limit)
# query_categories
query_categories = []
# if engines are calculated from query,
# set categories by using that informations
if query_engines and raw_text_query.specific:
additional_categories = set()
for engine in query_engines:
if "from_bang" in engine and engine["from_bang"]:
additional_categories.add("none")
else:
additional_categories.add(engine["category"])
query_categories = list(additional_categories)
# otherwise, using defined categories to
# calculate which engines should be used
else:
# set categories/engines
load_default_categories = True
for pd_name, pd in form.items():
if pd_name == "categories":
query_categories.extend(
categ
for categ in map(unicode.strip, pd.split(","))
if categ in categories
)
elif pd_name == "engines":
pd_engines = [
{"category": engines[engine].categories[0], "name": engine}
for engine in map(unicode.strip, pd.split(","))
if engine in engines
]
if pd_engines:
query_engines.extend(pd_engines)
load_default_categories = False
elif pd_name.startswith("category_"):
category = pd_name[9:]
# if category is not found in list, skip
if category not in categories:
continue
if pd != "off":
# add category to list
query_categories.append(category)
elif category in query_categories:
# remove category from list if property is set to 'off'
query_categories.remove(category)
if not load_default_categories:
if not query_categories:
query_categories = list(
set(engine["category"] for engine in query_engines)
)
else:
# if no category is specified for this search,
# using user-defined default-configuration which
# (is stored in cookie)
if not query_categories:
cookie_categories = preferences.get_value("categories")
for ccateg in cookie_categories:
if ccateg in categories:
query_categories.append(ccateg)
# if still no category is specified, using general
# as default-category
if not query_categories:
query_categories = ["general"]
# using all engines for that search, which are
# declared under the specific categories
for categ in query_categories:
query_engines.extend(
{"category": categ, "name": engine.name}
for engine in categories[categ]
if (engine.name, categ) not in disabled_engines
)
query_engines = deduplicate_query_engines(query_engines)
return (
SearchQuery(
query,
query_engines,
query_categories,
query_lang,
query_safesearch,
query_pageno,
query_time_range,
query_timeout,
),
raw_text_query,
)
|
def get_search_query_from_webapp(preferences, form):
# no text for the query ?
if not form.get("q"):
raise SearxParameterException("q", "")
# set blocked engines
disabled_engines = preferences.engines.get_disabled()
# parse query, if tags are set, which change
# the serch engine or search-language
raw_text_query = RawTextQuery(form["q"], disabled_engines)
raw_text_query.parse_query()
# set query
query = raw_text_query.getSearchQuery()
# get and check page number
pageno_param = form.get("pageno", "1")
if not pageno_param.isdigit() or int(pageno_param) < 1:
raise SearxParameterException("pageno", pageno_param)
query_pageno = int(pageno_param)
# get language
# set specific language if set on request, query or preferences
# TODO support search with multible languages
if len(raw_text_query.languages):
query_lang = raw_text_query.languages[-1]
elif "language" in form:
query_lang = form.get("language")
else:
query_lang = preferences.get_value("language")
# check language
if not VALID_LANGUAGE_CODE.match(query_lang):
raise SearxParameterException("language", query_lang)
# get safesearch
if "safesearch" in form:
query_safesearch = form.get("safesearch")
# first check safesearch
if not query_safesearch.isdigit():
raise SearxParameterException("safesearch", query_safesearch)
query_safesearch = int(query_safesearch)
else:
query_safesearch = preferences.get_value("safesearch")
# safesearch : second check
if query_safesearch < 0 or query_safesearch > 2:
raise SearxParameterException("safesearch", query_safesearch)
# get time_range
query_time_range = form.get("time_range")
# check time_range
if query_time_range not in ("None", None, "", "day", "week", "month", "year"):
raise SearxParameterException("time_range", query_time_range)
# query_engines
query_engines = raw_text_query.engines
# timeout_limit
query_timeout = raw_text_query.timeout_limit
if query_timeout is None and "timeout_limit" in form:
raw_time_limit = form.get("timeout_limit")
try:
query_timeout = float(raw_time_limit)
except ValueError:
raise SearxParameterException("timeout_limit", raw_time_limit)
# query_categories
query_categories = []
# if engines are calculated from query,
# set categories by using that informations
if query_engines and raw_text_query.specific:
additional_categories = set()
for engine in query_engines:
if "from_bang" in engine and engine["from_bang"]:
additional_categories.add("none")
else:
additional_categories.add(engine["category"])
query_categories = list(additional_categories)
# otherwise, using defined categories to
# calculate which engines should be used
else:
# set categories/engines
load_default_categories = True
for pd_name, pd in form.items():
if pd_name == "categories":
query_categories.extend(
categ
for categ in map(unicode.strip, pd.split(","))
if categ in categories
)
elif pd_name == "engines":
pd_engines = [
{"category": engines[engine].categories[0], "name": engine}
for engine in map(unicode.strip, pd.split(","))
if engine in engines
]
if pd_engines:
query_engines.extend(pd_engines)
load_default_categories = False
elif pd_name.startswith("category_"):
category = pd_name[9:]
# if category is not found in list, skip
if category not in categories:
continue
if pd != "off":
# add category to list
query_categories.append(category)
elif category in query_categories:
# remove category from list if property is set to 'off'
query_categories.remove(category)
if not load_default_categories:
if not query_categories:
query_categories = list(
set(engine["category"] for engine in query_engines)
)
else:
# if no category is specified for this search,
# using user-defined default-configuration which
# (is stored in cookie)
if not query_categories:
cookie_categories = preferences.get_value("categories")
for ccateg in cookie_categories:
if ccateg in categories:
query_categories.append(ccateg)
# if still no category is specified, using general
# as default-category
if not query_categories:
query_categories = ["general"]
# using all engines for that search, which are
# declared under the specific categories
for categ in query_categories:
query_engines.extend(
{"category": categ, "name": engine.name}
for engine in categories[categ]
if (engine.name, categ) not in disabled_engines
)
query_engines = deduplicate_query_engines(query_engines)
return (
SearchQuery(
query,
query_engines,
query_categories,
query_lang,
query_safesearch,
query_pageno,
query_time_range,
query_timeout,
),
raw_text_query,
)
|
https://github.com/searx/searx/issues/1664
|
ERROR:searx.webapp:search error
Traceback (most recent call last):
File "searx/webapp.py", line 510, in index
search_query, raw_text_query = get_search_query_from_webapp(request.preferences, request.form)
File "/home/ave/Projects/searx/searx/search.py", line 285, in get_search_query_from_webapp
raise SearxParameterException('timeout_limit', raw_time_limit)
SearxParameterException: Invalid value "None" for parameter timeout_limit
|
SearxParameterException
|
def searx_bang(full_query):
"""check if the searchQuery contain a bang, and create fitting autocompleter results"""
# check if there is a query which can be parsed
if len(full_query.getSearchQuery()) == 0:
return []
results = []
# check if current query stats with !bang
first_char = full_query.getSearchQuery()[0]
if first_char == "!" or first_char == "?":
if len(full_query.getSearchQuery()) == 1:
# show some example queries
# TODO, check if engine is not avaliable
results.append(first_char + "images")
results.append(first_char + "wikipedia")
results.append(first_char + "osm")
else:
engine_query = full_query.getSearchQuery()[1:]
# check if query starts with categorie name
for categorie in categories:
if categorie.startswith(engine_query):
results.append(
first_char + "{categorie}".format(categorie=categorie)
)
# check if query starts with engine name
for engine in engines:
if engine.startswith(engine_query.replace("_", " ")):
results.append(
first_char + "{engine}".format(engine=engine.replace(" ", "_"))
)
# check if query starts with engine shortcut
for engine_shortcut in engine_shortcuts:
if engine_shortcut.startswith(engine_query):
results.append(
first_char
+ "{engine_shortcut}".format(engine_shortcut=engine_shortcut)
)
# check if current query stats with :bang
elif first_char == ":":
if len(full_query.getSearchQuery()) == 1:
# show some example queries
results.append(":en")
results.append(":en_us")
results.append(":english")
results.append(":united_kingdom")
else:
engine_query = full_query.getSearchQuery()[1:]
for lc in language_codes:
lang_id, lang_name, country, english_name = map(unicode.lower, lc)
# check if query starts with language-id
if lang_id.startswith(engine_query):
if len(engine_query) <= 2:
results.append(
":{lang_id}".format(lang_id=lang_id.split("-")[0])
)
else:
results.append(":{lang_id}".format(lang_id=lang_id))
# check if query starts with language name
if lang_name.startswith(engine_query) or english_name.startswith(
engine_query
):
results.append(":{lang_name}".format(lang_name=lang_name))
# check if query starts with country
if country.startswith(engine_query.replace("_", " ")):
results.append(
":{country}".format(country=country.replace(" ", "_"))
)
# remove duplicates
result_set = set(results)
# remove results which are already contained in the query
for query_part in full_query.query_parts:
if query_part in result_set:
result_set.remove(query_part)
# convert result_set back to list
return list(result_set)
|
def searx_bang(full_query):
"""check if the searchQuery contain a bang, and create fitting autocompleter results"""
# check if there is a query which can be parsed
if len(full_query.getSearchQuery()) == 0:
return []
results = []
# check if current query stats with !bang
first_char = full_query.getSearchQuery()[0]
if first_char == "!" or first_char == "?":
if len(full_query.getSearchQuery()) == 1:
# show some example queries
# TODO, check if engine is not avaliable
results.append(first_char + "images")
results.append(first_char + "wikipedia")
results.append(first_char + "osm")
else:
engine_query = full_query.getSearchQuery()[1:]
# check if query starts with categorie name
for categorie in categories:
if categorie.startswith(engine_query):
results.append(
first_char + "{categorie}".format(categorie=categorie)
)
# check if query starts with engine name
for engine in engines:
if engine.startswith(engine_query.replace("_", " ")):
results.append(
first_char + "{engine}".format(engine=engine.replace(" ", "_"))
)
# check if query starts with engine shortcut
for engine_shortcut in engine_shortcuts:
if engine_shortcut.startswith(engine_query):
results.append(
first_char
+ "{engine_shortcut}".format(engine_shortcut=engine_shortcut)
)
# check if current query stats with :bang
elif first_char == ":":
if len(full_query.getSearchQuery()) == 1:
# show some example queries
results.append(":en")
results.append(":en_us")
results.append(":english")
results.append(":united_kingdom")
else:
engine_query = full_query.getSearchQuery()[1:]
for lc in language_codes:
lang_id, lang_name, country, english_name = map(str.lower, lc)
# check if query starts with language-id
if lang_id.startswith(engine_query):
if len(engine_query) <= 2:
results.append(
":{lang_id}".format(lang_id=lang_id.split("-")[0])
)
else:
results.append(":{lang_id}".format(lang_id=lang_id))
# check if query starts with language name
if lang_name.startswith(engine_query) or english_name.startswith(
engine_query
):
results.append(":{lang_name}".format(lang_name=lang_name))
# check if query starts with country
if country.startswith(engine_query.replace("_", " ")):
results.append(
":{country}".format(country=country.replace(" ", "_"))
)
# remove duplicates
result_set = set(results)
# remove results which are already contained in the query
for query_part in full_query.query_parts:
if query_part in result_set:
result_set.remove(query_part)
# convert result_set back to list
return list(result_set)
|
https://github.com/searx/searx/issues/808
|
INFO:werkzeug:127.0.0.1 - - [03/Jan/2017 12:02:01] "GET /autocompleter?q=%3Ae HTTP/1.1" 500 -
Traceback (most recent call last):
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1994, in __call__
return self.wsgi_app(environ, start_response)
File "/home/alexandre/code/searx/query2/searx/searx/webapp.py", line 818, in __call__
return self.app(environ, start_response)
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/werkzeug/contrib/fixers.py", line 152, in __call__
return self.app(environ, start_response)
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1985, in wsgi_app
response = self.handle_exception(e)
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1540, in handle_exception
reraise(exc_type, exc_value, tb)
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1982, in wsgi_app
response = self.full_dispatch_request()
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1614, in full_dispatch_request
rv = self.handle_user_exception(e)
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1517, in handle_user_exception
reraise(exc_type, exc_value, tb)
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/home/alexandre/code/searx/query2/ve/lib/python2.7/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/home/alexandre/code/searx/query2/searx/searx/webapp.py", line 554, in autocompleter
raw_results = searx_bang(raw_text_query)
File "/home/alexandre/code/searx/query2/searx/searx/autocomplete.py", line 84, in searx_bang
lang_id, lang_name, country, english_name = map(str.lower, lc)
TypeError: descriptor 'lower' requires a 'str' object but received a 'unicode'
|
TypeError
|
def run():
app.run(
debug=settings["general"]["debug"],
use_debugger=settings["general"]["debug"],
port=settings["server"]["port"],
host=settings["server"]["bind_address"],
threaded=True,
)
|
def run():
app.run(
debug=settings["general"]["debug"],
use_debugger=settings["general"]["debug"],
port=settings["server"]["port"],
host=settings["server"]["bind_address"],
)
|
https://github.com/searx/searx/issues/662
|
ERROR:searx.search:engine crash: ixquick
Traceback (most recent call last):
File "/root/searx/searx/search.py", line 40, in search_request_wrapper
ret = fn(url, **kwargs)
File "/root/searx/searx/poolrequests.py", line 100, in post
return request('post', url, data=data, **kwargs)
File "/root/searx/searx/poolrequests.py", line 79, in request
response = session.request(method=method, url=url, **kwargs)
File "/usr/lib/python2.7/site-packages/requests/sessions.py", line 475, in request
resp = self.send(prep, **send_kwargs)
File "/usr/lib/python2.7/site-packages/requests/sessions.py", line 585, in send
r = adapter.send(request, **kwargs)
File "/usr/lib/python2.7/site-packages/requests/adapters.py", line 459, in send
raise ConnectTimeout(e, request=request)
ConnectTimeout: HTTPSConnectionPool(host='www.ixquick.com', port=443): Max retries exceeded with url: /do/search (Caused by ConnectTimeoutError(<requests.packages.urllib3.connection.VerifiedHTTPSConnection object at 0x7f067c582350>, 'Connection to www.ixquick.com timed out. (connect timeout=6.0)'))
DEBUG:requests.packages.urllib3.connectionpool:"GET /search?p=%D1%80%D0%B0%D1%81%D1%88%D0%B8%D1%84%D1%80%D0%BE%D0%B2%D0%BA%D0%B0+%D0%BA%D0%BE%D0%B4%D0%BE%D0%B2+%D0%BE%D1%88%D0%B8%D0%B1%D0%BE%D0%BA+obd-2+%D1%81%D0%BA%D0%B0%D1%87%D0%B0%D1%82%D1%8C&b=1&fl=1&vl=lang_en HTTP/1.1" 200 None
DEBUG:requests.packages.urllib3.connectionpool:"POST /do/search HTTP/1.1" 200 None
WARNING:searx.search:engine timeout: ixquick
INFO:werkzeug:127.0.0.1 - - [14/Aug/2016 19:49:35] "GET /?q=%D1%80%D0%B0%D1%81%D1%88%D0%B8%D1%84%D1%80%D0%BE%D0%B2%D0%BA%D0%B0+%D0%BA%D0%BE%D0%B4%D0%BE%D0%B2+%D0%BE%D1%88%D0%B8%D0%B1%D0%BE%D0%BA+obd-2+%D1%81%D0%BA%D0%B0%D1%87%D0%B0%D1%82%D1%8C&category_general=1&format=rss&pageno=1 HTTP/1.0" 200 -
Exception in thread Thread-1:
Traceback (most recent call last):
File "/usr/lib64/python2.7/threading.py", line 811, in __bootstrap_inner
self.run()
File "/usr/lib64/python2.7/threading.py", line 764, in run
self.__target(*self.__args, **self.__kwargs)
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 659, in inner
srv.serve_forever()
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 499, in serve_forever
HTTPServer.serve_forever(self)
File "/usr/lib64/python2.7/SocketServer.py", line 238, in serve_forever
self._handle_request_noblock()
File "/usr/lib64/python2.7/SocketServer.py", line 297, in _handle_request_noblock
self.handle_error(request, client_address)
File "/usr/lib64/python2.7/SocketServer.py", line 295, in _handle_request_noblock
self.process_request(request, client_address)
File "/usr/lib64/python2.7/SocketServer.py", line 321, in process_request
self.finish_request(request, client_address)
File "/usr/lib64/python2.7/SocketServer.py", line 334, in finish_request
self.RequestHandlerClass(request, client_address, self)
File "/usr/lib64/python2.7/SocketServer.py", line 649, in __init__
self.handle()
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 216, in handle
rv = BaseHTTPRequestHandler.handle(self)
File "/usr/lib64/python2.7/BaseHTTPServer.py", line 340, in handle
self.handle_one_request()
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 251, in handle_one_request
return self.run_wsgi()
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 193, in run_wsgi
execute(self.server.app)
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 184, in execute
write(data)
File "/usr/lib/python2.7/site-packages/werkzeug/serving.py", line 152, in write
self.send_header(key, value)
File "/usr/lib64/python2.7/BaseHTTPServer.py", line 401, in send_header
self.wfile.write("%s: %s\r\n" % (keyword, value))
IOError: [Errno 32] Broken pipe
ERROR:searx.search:engine crash: ixquick
Traceback (most recent call last):
File "/root/searx/searx/search.py", line 40, in search_request_wrapper
ret = fn(url, **kwargs)
File "/root/searx/searx/poolrequests.py", line 100, in post
return request('post', url, data=data, **kwargs)
File "/root/searx/searx/poolrequests.py", line 79, in request
response = session.request(method=method, url=url, **kwargs)
File "/usr/lib/python2.7/site-packages/requests/sessions.py", line 475, in request
resp = self.send(prep, **send_kwargs)
File "/usr/lib/python2.7/site-packages/requests/sessions.py", line 585, in send
r = adapter.send(request, **kwargs)
File "/usr/lib/python2.7/site-packages/requests/adapters.py", line 459, in send
raise ConnectTimeout(e, request=request)
ConnectTimeout: HTTPSConnectionPool(host='www.ixquick.com', port=443): Max retries exceeded with url: /do/search (Caused by ConnectTimeoutError(<requests.packages.urllib3.connection.VerifiedHTTPSConnection object at 0x7f067c4e1710>, 'Connection to www.ixquick.com timed out. (connect timeout=6.0)'))
|
IOError
|
def _one_step_forward_per_replica(self, batch):
if self.config["gradient_accumulation_steps"] == 1:
gradients, per_replica_losses = self._calculate_gradient_per_batch(batch)
self._optimizer.apply_gradients(zip(gradients, self._trainable_variables), 1.0)
else:
# gradient acummulation here.
per_replica_losses = 0.0
for i in tf.range(self.config["gradient_accumulation_steps"]):
reduced_batch = {
k: v[
i * self.config["batch_size"] : (i + 1) * self.config["batch_size"]
]
for k, v in batch.items()
}
# run 1 step accumulate
reduced_batch_losses = self._calculate_gradient_per_batch(reduced_batch)
# sum per_replica_losses
per_replica_losses += reduced_batch_losses
gradients = self._gradient_accumulator.gradients
self._optimizer.apply_gradients(zip(gradients, self._trainable_variables), 1.0)
self._gradient_accumulator.reset()
return per_replica_losses
|
def _one_step_forward_per_replica(self, batch):
if self.config["gradient_accumulation_steps"] == 1:
gradients, per_replica_losses = self._calculate_gradient_per_batch(batch)
self._optimizer.apply_gradients(zip(gradients, self._trainable_variables))
else:
# gradient acummulation here.
per_replica_losses = 0.0
for i in tf.range(self.config["gradient_accumulation_steps"]):
reduced_batch = {
k: v[
i * self.config["batch_size"] : (i + 1) * self.config["batch_size"]
]
for k, v in batch.items()
}
# run 1 step accumulate
reduced_batch_losses = self._calculate_gradient_per_batch(reduced_batch)
# sum per_replica_losses
per_replica_losses += reduced_batch_losses
gradients = self._gradient_accumulator.gradients
self._optimizer.apply_gradients(zip(gradients, self._trainable_variables))
self._gradient_accumulator.reset()
return per_replica_losses
|
https://github.com/TensorSpeech/TensorFlowTTS/issues/389
|
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/indexed_slices.py:433: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.
"Converting sparse IndexedSlices to a dense Tensor of unknown shape. "
Traceback (most recent call last):
File "/content/TensorflowTTS/ttsexamples/fastspeech2/train_fastspeech2.py", line 436, in <module>
main()
File "/content/TensorflowTTS/ttsexamples/fastspeech2/train_fastspeech2.py", line 428, in main
resume=args.resume,
File "/content/TensorflowTTS/tensorflow_tts/trainers/base_trainer.py", line 1002, in fit
self.run()
File "/content/TensorflowTTS/tensorflow_tts/trainers/base_trainer.py", line 103, in run
self._train_epoch()
File "/content/TensorflowTTS/tensorflow_tts/trainers/base_trainer.py", line 125, in _train_epoch
self._train_step(batch)
File "/content/TensorflowTTS/tensorflow_tts/trainers/base_trainer.py", line 780, in _train_step
self.one_step_forward(batch)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 780, in __call__
result = self._call(*args, **kwds)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 823, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 697, in _initialize
*args, **kwds))
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py", line 2855, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py", line 3213, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py", line 3075, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py", line 986, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 600, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py", line 973, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in user code:
/content/TensorflowTTS/tensorflow_tts/trainers/base_trainer.py:788 _one_step_forward *
per_replica_losses = self._strategy.run(
/content/TensorflowTTS/tensorflow_tts/trainers/base_trainer.py:835 _one_step_forward_per_replica *
self._optimizer.apply_gradients(
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/mixed_precision/experimental/loss_scale_optimizer.py:380 apply_gradients **
args=(grads_and_vars, name, experimental_aggregate_gradients))
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:2715 merge_call
return self._merge_call(merge_fn, args, kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:2722 _merge_call
return merge_fn(self._strategy, *args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/mixed_precision/experimental/loss_scale_optimizer.py:410 _apply_gradients_cross_replica **
do_not_apply_fn)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/smart_cond.py:59 smart_cond
name=name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper
return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/deprecation.py:507 new_func
return func(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/control_flow_ops.py:1180 cond
return cond_v2.cond_v2(pred, true_fn, false_fn, name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/cond_v2.py:85 cond_v2
op_return_value=pred)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py:986 func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/mixed_precision/experimental/loss_scale_optimizer.py:396 apply_fn
args=(grads, wrapped_vars, name, experimental_aggregate_gradients))
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:2585 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/one_device_strategy.py:367 _call_for_each_replica
return fn(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/mixed_precision/experimental/loss_scale_optimizer.py:420 _apply_gradients
experimental_aggregate_gradients=experimental_aggregate_gradients)
/content/TensorflowTTS/tensorflow_tts/optimizers/adamweightdecay.py:124 apply_gradients
(grads, _) = tf.clip_by_global_norm(grads, clip_norm=clip_norm)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper
return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/clip_ops.py:352 clip_by_global_norm
constant_op.constant(1.0, dtype=use_norm.dtype) / clip_norm)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/math_ops.py:1124 binary_op_wrapper
return func(x, y, name=name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper
return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/math_ops.py:1296 truediv
return _truediv_python3(x, y, name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/math_ops.py:1222 _truediv_python3
y = ops.convert_to_tensor(y, dtype_hint=x.dtype.base_dtype, name="y")
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/ops.py:1499 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/constant_op.py:338 _constant_tensor_conversion_function
return constant(v, dtype=dtype, name=name)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/constant_op.py:264 constant
allow_broadcast=True)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/constant_op.py:282 _constant_impl
allow_broadcast=allow_broadcast))
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/tensor_util.py:444 make_tensor_proto
raise ValueError("None values not supported.")
ValueError: None values not supported.
[train]: 0% 0/150000 [01:14<?, ?it/s]
|
ValueError
|
def fix(base_path: str, dur_path: str, trimmed_dur_path: str, use_norm: str):
for t in ["train", "valid"]:
mfa_longer = []
mfa_shorter = []
big_diff = []
not_fixed = []
pre_path = os.path.join(base_path, t)
os.makedirs(os.path.join(pre_path, "fix_dur"), exist_ok=True)
logging.info(f"FIXING {t} set ...\n")
for i in tqdm(os.listdir(os.path.join(pre_path, "ids"))):
if use_norm == "t":
mel = np.load(
os.path.join(
pre_path, "norm-feats", f"{i.split('-')[0]}-norm-feats.npy"
)
)
else:
mel = np.load(
os.path.join(
pre_path, "raw-feats", f"{i.split('-')[0]}-raw-feats.npy"
)
)
try:
dur = np.load(
os.path.join(trimmed_dur_path, f"{i.split('-')[0]}-durations.npy")
)
except:
dur = np.load(
os.path.join(dur_path, f"{i.split('-')[0]}-durations.npy")
)
l_mel = len(mel)
dur_s = np.sum(dur)
cloned = np.array(dur, copy=True)
diff = abs(l_mel - dur_s)
if abs(l_mel - dur_s) > 30: # more then 300 ms
big_diff.append([i, abs(l_mel - dur_s)])
if dur_s > l_mel:
for j in range(1, len(dur) - 1):
if diff == 0:
break
dur_val = cloned[-j]
if dur_val >= diff:
cloned[-j] -= diff
diff -= dur_val
break
else:
cloned[-j] = 0
diff -= dur_val
if j == len(dur) - 2:
not_fixed.append(i)
mfa_longer.append(abs(l_mel - dur_s))
elif dur_s < l_mel:
cloned[-1] += diff
mfa_shorter.append(abs(l_mel - dur_s))
np.save(
os.path.join(pre_path, "fix_dur", f"{i.split('-')[0]}-durations.npy"),
cloned.astype(np.int32),
allow_pickle=False,
)
logging.info(
f"{t} stats: number of mfa with longer duration: {len(mfa_longer)}, total diff: {sum(mfa_longer)}"
f", mean diff: {sum(mfa_longer) / len(mfa_longer) if len(mfa_longer) > 0 else 0}"
)
logging.info(
f"{t} stats: number of mfa with shorter duration: {len(mfa_shorter)}, total diff: {sum(mfa_shorter)}"
f", mean diff: {sum(mfa_shorter) / len(mfa_shorter) if len(mfa_shorter) > 0 else 0}"
)
logging.info(
f"{t} stats: number of files with a ''big'' duration diff: {len(big_diff)} if number>1 you should check it"
)
logging.info(f"{t} stats: not fixed len: {len(not_fixed)}\n")
|
def fix(base_path: str, dur_path: str, trimmed_dur_path: str, use_norm: str):
for t in ["train", "valid"]:
mfa_longer = []
mfa_shorter = []
big_diff = []
not_fixed = []
pre_path = os.path.join(base_path, t)
os.makedirs(os.path.join(pre_path, "fix_dur"), exist_ok=True)
logging.info(f"FIXING {t} set ...\n")
for i in tqdm(os.listdir(os.path.join(pre_path, "ids"))):
if use_norm == "t":
mel = np.load(
os.path.join(
pre_path, "norm-feats", f"{i.split('-')[0]}-norm-feats.npy"
)
)
else:
mel = np.load(
os.path.join(
pre_path, "raw-feats", f"{i.split('-')[0]}-raw-feats.npy"
)
)
try:
dur = np.load(
os.path.join(trimmed_dur_path, f"{i.split('-')[0]}-durations.npy")
)
except:
dur = np.load(
os.path.join(dur_path, f"{i.split('-')[0]}-durations.npy")
)
l_mel = len(mel)
dur_s = np.sum(dur)
cloned = np.array(dur, copy=True)
diff = abs(l_mel - dur_s)
if abs(l_mel - dur_s) > 30: # more then 300 ms
big_diff.append([i, abs(l_mel - dur_s)])
if dur_s > l_mel:
for j in range(1, len(dur) - 1):
if diff == 0:
break
dur_val = cloned[-j]
if dur_val >= diff:
cloned[-j] -= diff
diff -= dur_val
break
else:
cloned[-j] = 0
diff -= dur_val
if j == len(dur) - 2:
not_fixed.append(i)
mfa_longer.append(abs(l_mel - dur_s))
elif dur_s < l_mel:
cloned[-1] += diff
mfa_shorter.append(abs(l_mel - dur_s))
np.save(
os.path.join(pre_path, "fix_dur", f"{i.split('-')[0]}-durations.npy"),
cloned.astype(np.int32),
allow_pickle=False,
)
logging.info(
f"{t} stats: number of mfa with longer duration: {len(mfa_longer)}, total diff: {sum(mfa_longer)}"
f", mean diff: {sum(mfa_longer) / len(mfa_longer)}"
)
logging.info(
f"{t} stats: number of mfa with shorter duration: {len(mfa_shorter)}, total diff: {sum(mfa_shorter)}"
f", mean diff: {sum(mfa_shorter) / len(mfa_shorter) if len(mfa_shorter) > 0 else 0}"
)
logging.info(
f"{t} stats: number of files with a ''big'' duration diff: {len(big_diff)} if number>1 you should check it"
)
logging.info(f"{t} stats: not fixed len: {len(not_fixed)}\n")
|
https://github.com/TensorSpeech/TensorFlowTTS/issues/306
|
INFO: FIXING train set ...
100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 15/15 [00:00<00:00, 770.29it/s]
Traceback (most recent call last):
File "examples/mfa_extraction/fix_mismatch.py", line 121, in <module>
fix()
File "/usr/local/lib/python3.6/dist-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.6/dist-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.6/dist-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.6/dist-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "examples/mfa_extraction/fix_mismatch.py", line 107, in fix
f"{t} stats: number of mfa with longer duration: {len(mfa_longer)}, total diff: {sum(mfa_longer)}"
ZeroDivisionError: division by zero
|
ZeroDivisionError
|
def __init__(self, **kwargs):
super().__init__(**kwargs)
Window.bind(on_resize=self.check_position_caller)
Window.bind(on_maximize=self.set_menu_properties)
Window.bind(on_restore=self.set_menu_properties)
self.register_event_type("on_dismiss")
self.register_event_type("on_enter")
self.register_event_type("on_leave")
self.menu = self.ids.md_menu
self.target_height = 0
|
def __init__(self, **kwargs):
super().__init__(**kwargs)
Window.bind(on_resize=self.check_position_caller)
self.register_event_type("on_dismiss")
self.register_event_type("on_enter")
self.register_event_type("on_leave")
self.menu = self.ids.md_menu
self.target_height = 0
|
https://github.com/kivymd/KivyMD/issues/431
|
Traceback (most recent call last):
File "C:/Users/User/PycharmProjects/New MO Inventory System/sigh.py", line 24, in <module>
App().run()
File "C:\Users\User\AppData\Local\Programs\Python\Python38\lib\site-packages\kivy\app.py", line 949, in run
self._run_prepare()
File "C:\Users\User\AppData\Local\Programs\Python\Python38\lib\site-packages\kivy\app.py", line 919, in _run_prepare
root = self.build()
File "C:/Users/User/PycharmProjects/New MO Inventory System/sigh.py", line 21, in build
b.add_widget(Layout())
File "C:/Users/User/PycharmProjects/New MO Inventory System/sigh.py", line 9, in __init__
self.b = MDBoxLayout(orientation='vertical')
File "kivy\properties.pyx", line 498, in kivy.properties.Property.__set__
File "kivy\properties.pyx", line 533, in kivy.properties.Property.set
File "kivy\properties.pyx", line 1176, in kivy.properties.BoundedNumericProperty.check
TypeError: '<' not supported between instances of 'MDBoxLayout' and 'float'
|
TypeError
|
def build_show(self, scene):
show_m = window.ShowManager(
scene, size=(1200, 900), order_transparent=True, reset_camera=False
)
show_m.initialize()
if self.cluster and self.tractograms:
lengths = np.array([self.cla[c]["length"] for c in self.cla])
szs = [self.cla[c]["size"] for c in self.cla]
sizes = np.array(szs)
# global self.panel2, slider_length, slider_size
self.panel2 = ui.Panel2D(
size=(300, 200),
position=(850, 320),
color=(1, 1, 1),
opacity=0.1,
align="right",
)
slider_label_length = build_label(text="Length")
slider_length = ui.LineSlider2D(
min_value=lengths.min(),
max_value=np.percentile(lengths, 98),
initial_value=np.percentile(lengths, 25),
text_template="{value:.0f}",
length=140,
)
slider_label_size = build_label(text="Size")
slider_size = ui.LineSlider2D(
min_value=sizes.min(),
max_value=np.percentile(sizes, 98),
initial_value=np.percentile(sizes, 50),
text_template="{value:.0f}",
length=140,
)
# global self.length_min, size_min
self.size_min = sizes.min()
self.length_min = lengths.min()
def hide_clusters_length(slider):
self.length_min = np.round(slider.value)
for k in self.cla:
if (
self.cla[k]["length"] < self.length_min
or self.cla[k]["size"] < self.size_min
):
self.cla[k]["centroid_actor"].SetVisibility(0)
if k.GetVisibility() == 1:
k.SetVisibility(0)
else:
self.cla[k]["centroid_actor"].SetVisibility(1)
show_m.render()
def hide_clusters_size(slider):
self.size_min = np.round(slider.value)
for k in self.cla:
if (
self.cla[k]["length"] < self.length_min
or self.cla[k]["size"] < self.size_min
):
self.cla[k]["centroid_actor"].SetVisibility(0)
if k.GetVisibility() == 1:
k.SetVisibility(0)
else:
self.cla[k]["centroid_actor"].SetVisibility(1)
show_m.render()
slider_length.on_change = hide_clusters_length
self.panel2.add_element(slider_label_length, coords=(0.1, 0.333))
self.panel2.add_element(slider_length, coords=(0.4, 0.333))
slider_size.on_change = hide_clusters_size
self.panel2.add_element(slider_label_size, coords=(0.1, 0.6666))
self.panel2.add_element(slider_size, coords=(0.4, 0.6666))
scene.add(self.panel2)
text_block = build_label(HELP_MESSAGE, 16) # ui.TextBlock2D()
text_block.message = HELP_MESSAGE
help_panel = ui.Panel2D(
size=(300, 200), color=(1, 1, 1), opacity=0.1, align="left"
)
help_panel.add_element(text_block, coords=(0.05, 0.1))
scene.add(help_panel)
if len(self.images) > 0:
# !!Only first image loading supported for now')
data, affine = self.images[0]
self.panel = slicer_panel(scene, data, affine, self.world_coords)
else:
data = None
affine = None
self.win_size = scene.GetSize()
def win_callback(obj, event):
if self.win_size != obj.GetSize():
size_old = self.win_size
self.win_size = obj.GetSize()
size_change = [self.win_size[0] - size_old[0], 0]
if data is not None:
self.panel.re_align(size_change)
if self.cluster:
self.panel2.re_align(size_change)
help_panel.re_align(size_change)
show_m.initialize()
def left_click_centroid_callback(obj, event):
self.cea[obj]["selected"] = not self.cea[obj]["selected"]
self.cla[self.cea[obj]["cluster_actor"]]["selected"] = self.cea[obj]["selected"]
show_m.render()
def left_click_cluster_callback(obj, event):
if self.cla[obj]["selected"]:
self.cla[obj]["centroid_actor"].VisibilityOn()
ca = self.cla[obj]["centroid_actor"]
self.cea[ca]["selected"] = 0
obj.VisibilityOff()
self.cea[ca]["expanded"] = 0
show_m.render()
for cl in self.cla:
cl.AddObserver("LeftButtonPressEvent", left_click_cluster_callback, 1.0)
self.cla[cl]["centroid_actor"].AddObserver(
"LeftButtonPressEvent", left_click_centroid_callback, 1.0
)
self.hide_centroids = True
self.select_all = False
def key_press(obj, event):
key = obj.GetKeySym()
if self.cluster:
# hide on/off unselected centroids
if key == "h" or key == "H":
if self.hide_centroids:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
or self.cea[ca]["size"] >= self.size_min
):
if self.cea[ca]["selected"] == 0:
ca.VisibilityOff()
else:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
if self.cea[ca]["selected"] == 0:
ca.VisibilityOn()
self.hide_centroids = not self.hide_centroids
show_m.render()
# invert selection
if key == "i" or key == "I":
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
self.cea[ca]["selected"] = not self.cea[ca]["selected"]
cas = self.cea[ca]["cluster_actor"]
self.cla[cas]["selected"] = self.cea[ca]["selected"]
show_m.render()
# save current result
if key == "s" or key == "S":
saving_streamlines = Streamlines()
for bundle in self.cla.keys():
if bundle.GetVisibility():
t = self.cla[bundle]["tractogram"]
c = self.cla[bundle]["cluster"]
indices = self.tractogram_clusters[t][c]
saving_streamlines.extend(Streamlines(indices))
print("Saving result in tmp.trk")
save_trk("tmp.trk", saving_streamlines, np.eye(4))
if key == "y" or key == "Y":
active_streamlines = Streamlines()
for bundle in self.cla.keys():
if bundle.GetVisibility():
t = self.cla[bundle]["tractogram"]
c = self.cla[bundle]["cluster"]
indices = self.tractogram_clusters[t][c]
active_streamlines.extend(Streamlines(indices))
# self.tractograms = [active_streamlines]
hz2 = horizon(
[active_streamlines],
self.images,
cluster=True,
cluster_thr=5,
random_colors=self.random_colors,
length_lt=np.inf,
length_gt=0,
clusters_lt=np.inf,
clusters_gt=0,
world_coords=True,
interactive=True,
)
ren2 = hz2.build_scene()
hz2.build_show(ren2)
if key == "a" or key == "A":
if self.select_all is False:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
self.cea[ca]["selected"] = 1
cas = self.cea[ca]["cluster_actor"]
self.cla[cas]["selected"] = self.cea[ca]["selected"]
show_m.render()
self.select_all = True
else:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
self.cea[ca]["selected"] = 0
cas = self.cea[ca]["cluster_actor"]
self.cla[cas]["selected"] = self.cea[ca]["selected"]
show_m.render()
self.select_all = False
if key == "e" or key == "E":
for c in self.cea:
if self.cea[c]["selected"]:
if not self.cea[c]["expanded"]:
len_ = self.cea[c]["length"]
sz_ = self.cea[c]["size"]
if len_ >= self.length_min and sz_ >= self.size_min:
self.cea[c]["cluster_actor"].VisibilityOn()
c.VisibilityOff()
self.cea[c]["expanded"] = 1
show_m.render()
if key == "r" or key == "R":
for c in self.cea:
if (
self.cea[c]["length"] >= self.length_min
and self.cea[c]["size"] >= self.size_min
):
self.cea[c]["cluster_actor"].VisibilityOff()
c.VisibilityOn()
self.cea[c]["expanded"] = 0
show_m.render()
scene.reset_camera()
scene.zoom(1.5)
scene.reset_clipping_range()
if self.interactive:
show_m.add_window_callback(win_callback)
show_m.iren.AddObserver("KeyPressEvent", key_press)
show_m.render()
show_m.start()
else:
window.record(
scene, out_path=self.out_png, size=(1200, 900), reset_camera=False
)
|
def build_show(self, scene):
show_m = window.ShowManager(
scene, size=(1200, 900), order_transparent=True, reset_camera=False
)
show_m.initialize()
if self.cluster:
lengths = np.array([self.cla[c]["length"] for c in self.cla])
szs = [self.cla[c]["size"] for c in self.cla]
sizes = np.array(szs)
# global self.panel2, slider_length, slider_size
self.panel2 = ui.Panel2D(
size=(300, 200),
position=(850, 320),
color=(1, 1, 1),
opacity=0.1,
align="right",
)
slider_label_length = build_label(text="Length")
slider_length = ui.LineSlider2D(
min_value=lengths.min(),
max_value=np.percentile(lengths, 98),
initial_value=np.percentile(lengths, 25),
text_template="{value:.0f}",
length=140,
)
slider_label_size = build_label(text="Size")
slider_size = ui.LineSlider2D(
min_value=sizes.min(),
max_value=np.percentile(sizes, 98),
initial_value=np.percentile(sizes, 50),
text_template="{value:.0f}",
length=140,
)
# global self.length_min, size_min
self.size_min = sizes.min()
self.length_min = lengths.min()
def hide_clusters_length(slider):
self.length_min = np.round(slider.value)
for k in self.cla:
if (
self.cla[k]["length"] < self.length_min
or self.cla[k]["size"] < self.size_min
):
self.cla[k]["centroid_actor"].SetVisibility(0)
if k.GetVisibility() == 1:
k.SetVisibility(0)
else:
self.cla[k]["centroid_actor"].SetVisibility(1)
show_m.render()
def hide_clusters_size(slider):
self.size_min = np.round(slider.value)
for k in self.cla:
if (
self.cla[k]["length"] < self.length_min
or self.cla[k]["size"] < self.size_min
):
self.cla[k]["centroid_actor"].SetVisibility(0)
if k.GetVisibility() == 1:
k.SetVisibility(0)
else:
self.cla[k]["centroid_actor"].SetVisibility(1)
show_m.render()
slider_length.on_change = hide_clusters_length
self.panel2.add_element(slider_label_length, coords=(0.1, 0.333))
self.panel2.add_element(slider_length, coords=(0.4, 0.333))
slider_size.on_change = hide_clusters_size
self.panel2.add_element(slider_label_size, coords=(0.1, 0.6666))
self.panel2.add_element(slider_size, coords=(0.4, 0.6666))
scene.add(self.panel2)
text_block = build_label(HELP_MESSAGE, 16) # ui.TextBlock2D()
text_block.message = HELP_MESSAGE
help_panel = ui.Panel2D(
size=(300, 200), color=(1, 1, 1), opacity=0.1, align="left"
)
help_panel.add_element(text_block, coords=(0.05, 0.1))
scene.add(help_panel)
if len(self.images) > 0:
# !!Only first image loading supported for now')
data, affine = self.images[0]
self.panel = slicer_panel(scene, data, affine, self.world_coords)
else:
data = None
affine = None
self.win_size = scene.GetSize()
def win_callback(obj, event):
if self.win_size != obj.GetSize():
size_old = self.win_size
self.win_size = obj.GetSize()
size_change = [self.win_size[0] - size_old[0], 0]
if data is not None:
self.panel.re_align(size_change)
if self.cluster:
self.panel2.re_align(size_change)
help_panel.re_align(size_change)
show_m.initialize()
def left_click_centroid_callback(obj, event):
self.cea[obj]["selected"] = not self.cea[obj]["selected"]
self.cla[self.cea[obj]["cluster_actor"]]["selected"] = self.cea[obj]["selected"]
show_m.render()
def left_click_cluster_callback(obj, event):
if self.cla[obj]["selected"]:
self.cla[obj]["centroid_actor"].VisibilityOn()
ca = self.cla[obj]["centroid_actor"]
self.cea[ca]["selected"] = 0
obj.VisibilityOff()
self.cea[ca]["expanded"] = 0
show_m.render()
for cl in self.cla:
cl.AddObserver("LeftButtonPressEvent", left_click_cluster_callback, 1.0)
self.cla[cl]["centroid_actor"].AddObserver(
"LeftButtonPressEvent", left_click_centroid_callback, 1.0
)
self.hide_centroids = True
self.select_all = False
def key_press(obj, event):
key = obj.GetKeySym()
if self.cluster:
# hide on/off unselected centroids
if key == "h" or key == "H":
if self.hide_centroids:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
or self.cea[ca]["size"] >= self.size_min
):
if self.cea[ca]["selected"] == 0:
ca.VisibilityOff()
else:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
if self.cea[ca]["selected"] == 0:
ca.VisibilityOn()
self.hide_centroids = not self.hide_centroids
show_m.render()
# invert selection
if key == "i" or key == "I":
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
self.cea[ca]["selected"] = not self.cea[ca]["selected"]
cas = self.cea[ca]["cluster_actor"]
self.cla[cas]["selected"] = self.cea[ca]["selected"]
show_m.render()
# save current result
if key == "s" or key == "S":
saving_streamlines = Streamlines()
for bundle in self.cla.keys():
if bundle.GetVisibility():
t = self.cla[bundle]["tractogram"]
c = self.cla[bundle]["cluster"]
indices = self.tractogram_clusters[t][c]
saving_streamlines.extend(Streamlines(indices))
print("Saving result in tmp.trk")
save_trk("tmp.trk", saving_streamlines, np.eye(4))
if key == "y" or key == "Y":
active_streamlines = Streamlines()
for bundle in self.cla.keys():
if bundle.GetVisibility():
t = self.cla[bundle]["tractogram"]
c = self.cla[bundle]["cluster"]
indices = self.tractogram_clusters[t][c]
active_streamlines.extend(Streamlines(indices))
# self.tractograms = [active_streamlines]
hz2 = horizon(
[active_streamlines],
self.images,
cluster=True,
cluster_thr=5,
random_colors=self.random_colors,
length_lt=np.inf,
length_gt=0,
clusters_lt=np.inf,
clusters_gt=0,
world_coords=True,
interactive=True,
)
ren2 = hz2.build_scene()
hz2.build_show(ren2)
if key == "a" or key == "A":
if self.select_all is False:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
self.cea[ca]["selected"] = 1
cas = self.cea[ca]["cluster_actor"]
self.cla[cas]["selected"] = self.cea[ca]["selected"]
show_m.render()
self.select_all = True
else:
for ca in self.cea:
if (
self.cea[ca]["length"] >= self.length_min
and self.cea[ca]["size"] >= self.size_min
):
self.cea[ca]["selected"] = 0
cas = self.cea[ca]["cluster_actor"]
self.cla[cas]["selected"] = self.cea[ca]["selected"]
show_m.render()
self.select_all = False
if key == "e" or key == "E":
for c in self.cea:
if self.cea[c]["selected"]:
if not self.cea[c]["expanded"]:
len_ = self.cea[c]["length"]
sz_ = self.cea[c]["size"]
if len_ >= self.length_min and sz_ >= self.size_min:
self.cea[c]["cluster_actor"].VisibilityOn()
c.VisibilityOff()
self.cea[c]["expanded"] = 1
show_m.render()
if key == "r" or key == "R":
for c in self.cea:
if (
self.cea[c]["length"] >= self.length_min
and self.cea[c]["size"] >= self.size_min
):
self.cea[c]["cluster_actor"].VisibilityOff()
c.VisibilityOn()
self.cea[c]["expanded"] = 0
show_m.render()
scene.reset_camera()
scene.zoom(1.5)
scene.reset_clipping_range()
if self.interactive:
show_m.add_window_callback(win_callback)
show_m.iren.AddObserver("KeyPressEvent", key_press)
show_m.render()
show_m.start()
else:
window.record(
scene, out_path=self.out_png, size=(1200, 900), reset_camera=False
)
|
https://github.com/dipy/dipy/issues/1756
|
Traceback (most recent call last):
File "/Users/guaj0/.virtualenvs/phd-dipy/bin/dipy_horizon", line 7, in <module>
exec(compile(f.read(), __file__, 'exec'))
File "/Users/guaj0/Work/PhD/dipy/bin/dipy_horizon", line 10, in <module>
run_flow(HorizonFlow())
File "/Users/guaj0/Work/PhD/dipy/dipy/workflows/flow_runner.py", line 91, in run_flow
return flow.run(**args)
File "/Users/guaj0/Work/PhD/dipy/dipy/workflows/viz.py", line 87, in run
out_png=pjoin(out_dir, out_stealth_png))
File "/Users/guaj0/Work/PhD/dipy/dipy/viz/app.py", line 515, in horizon
hz.build_show(scene)
File "/Users/guaj0/Work/PhD/dipy/dipy/viz/app.py", line 224, in build_show
min_value=lengths.min(),
File "/Users/guaj0/.virtualenvs/phd-dipy/lib/python3.7/site-packages/numpy/core/_methods.py", line 32, in _amin
return umr_minimum(a, axis, None, out, keepdims, initial)
ValueError: zero-size array to reduction operation minimum which has no identity
|
ValueError
|
def interp_rbf(
data,
sphere_origin,
sphere_target,
function="multiquadric",
epsilon=None,
smooth=0.1,
norm="angle",
):
"""Interpolate data on the sphere, using radial basis functions.
Parameters
----------
data : (N,) ndarray
Function values on the unit sphere.
sphere_origin : Sphere
Positions of data values.
sphere_target : Sphere
M target positions for which to interpolate.
function : {'multiquadric', 'inverse', 'gaussian'}
Radial basis function.
epsilon : float
Radial basis function spread parameter. Defaults to approximate average
distance between nodes.
a good start
smooth : float
values greater than zero increase the smoothness of the
approximation with 0 as pure interpolation. Default: 0.1
norm : str
A string indicating the function that returns the
"distance" between two points.
'angle' - The angle between two vectors
'euclidean_norm' - The Euclidean distance
Returns
-------
v : (M,) ndarray
Interpolated values.
See Also
--------
scipy.interpolate.Rbf
"""
from scipy.interpolate import Rbf
def angle(x1, x2):
xx = np.arccos(np.clip((x1 * x2).sum(axis=0), -1, 1))
return np.nan_to_num(xx)
def euclidean_norm(x1, x2):
return np.sqrt(((x1 - x2) ** 2).sum(axis=0))
if norm == "angle":
norm = angle
elif norm == "euclidean_norm":
w_s = "The Eucldian norm used for interpolation is inaccurate "
w_s += "and will be deprecated in future versions. Please consider "
w_s += "using the 'angle' norm instead"
warnings.warn(w_s, DeprecationWarning)
norm = euclidean_norm
# Workaround for bug in older versions of SciPy that don't allow
# specification of epsilon None:
if epsilon is not None:
kwargs = {
"function": function,
"epsilon": epsilon,
"smooth": smooth,
"norm": norm,
}
else:
kwargs = {"function": function, "smooth": smooth, "norm": norm}
rbfi = Rbf(sphere_origin.x, sphere_origin.y, sphere_origin.z, data, **kwargs)
return rbfi(sphere_target.x, sphere_target.y, sphere_target.z)
|
def interp_rbf(
data,
sphere_origin,
sphere_target,
function="multiquadric",
epsilon=None,
smooth=0.1,
norm="angle",
):
"""Interpolate data on the sphere, using radial basis functions.
Parameters
----------
data : (N,) ndarray
Function values on the unit sphere.
sphere_origin : Sphere
Positions of data values.
sphere_target : Sphere
M target positions for which to interpolate.
function : {'multiquadric', 'inverse', 'gaussian'}
Radial basis function.
epsilon : float
Radial basis function spread parameter. Defaults to approximate average
distance between nodes.
a good start
smooth : float
values greater than zero increase the smoothness of the
approximation with 0 as pure interpolation. Default: 0.1
norm : str
A string indicating the function that returns the
"distance" between two points.
'angle' - The angle between two vectors
'euclidean_norm' - The Euclidean distance
Returns
-------
v : (M,) ndarray
Interpolated values.
See Also
--------
scipy.interpolate.Rbf
"""
from scipy.interpolate import Rbf
def angle(x1, x2):
xx = np.arccos((x1 * x2).sum(axis=0))
xx[np.isnan(xx)] = 0
return xx
def euclidean_norm(x1, x2):
return np.sqrt(((x1 - x2) ** 2).sum(axis=0))
if norm == "angle":
norm = angle
elif norm == "euclidean_norm":
w_s = "The Eucldian norm used for interpolation is inaccurate "
w_s += "and will be deprecated in future versions. Please consider "
w_s += "using the 'angle' norm instead"
warnings.warn(w_s, DeprecationWarning)
norm = euclidean_norm
# Workaround for bug in older versions of SciPy that don't allow
# specification of epsilon None:
if epsilon is not None:
kwargs = {
"function": function,
"epsilon": epsilon,
"smooth": smooth,
"norm": norm,
}
else:
kwargs = {"function": function, "smooth": smooth, "norm": norm}
rbfi = Rbf(sphere_origin.x, sphere_origin.y, sphere_origin.z, data, **kwargs)
return rbfi(sphere_target.x, sphere_target.y, sphere_target.z)
|
https://github.com/dipy/dipy/issues/1698
|
======================================================================
ERROR: dipy.core.tests.test_sphere.test_interp_rbf
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/nose/case.py", line 198, in runTest
self.test(*self.arg)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/dipy/core/tests/test_sphere.py", line 375, in test_interp_rbf
interp_data_a = interp_rbf(data, s0, s1, norm="angle")
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/dipy/core/sphere.py", line 572, in interp_rbf
**kwargs)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/scipy/interpolate/rbf.py", line 241, in __init__
self.nodes = linalg.solve(self.A, self.di)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/scipy/interpolate/rbf.py", line 247, in A
r = squareform(pdist(self.xi.T, self.norm)) # Pairwise norm
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/scipy/spatial/distance.py", line 2030, in pdist
dm[k] = metric(X[i], X[j], **kwargs)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/dipy/core/sphere.py", line 544, in angle
xx[np.isnan(xx)] = 0
TypeError: 'numpy.float64' object does not support item assignment
----------------------------------------------------------------------
|
TypeError
|
def angle(x1, x2):
xx = np.arccos(np.clip((x1 * x2).sum(axis=0), -1, 1))
return np.nan_to_num(xx)
|
def angle(x1, x2):
xx = np.arccos((x1 * x2).sum(axis=0))
xx[np.isnan(xx)] = 0
return xx
|
https://github.com/dipy/dipy/issues/1698
|
======================================================================
ERROR: dipy.core.tests.test_sphere.test_interp_rbf
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/nose/case.py", line 198, in runTest
self.test(*self.arg)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/dipy/core/tests/test_sphere.py", line 375, in test_interp_rbf
interp_data_a = interp_rbf(data, s0, s1, norm="angle")
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/dipy/core/sphere.py", line 572, in interp_rbf
**kwargs)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/scipy/interpolate/rbf.py", line 241, in __init__
self.nodes = linalg.solve(self.A, self.di)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/scipy/interpolate/rbf.py", line 247, in A
r = squareform(pdist(self.xi.T, self.norm)) # Pairwise norm
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/scipy/spatial/distance.py", line 2030, in pdist
dm[k] = metric(X[i], X[j], **kwargs)
File "/home/travis/build/nipy/dipy/venv/lib/python3.4/site-packages/dipy/core/sphere.py", line 544, in angle
xx[np.isnan(xx)] = 0
TypeError: 'numpy.float64' object does not support item assignment
----------------------------------------------------------------------
|
TypeError
|
def fit(self, data):
data_b0 = data[self.b0s_mask].mean()
data_single_b0 = np.r_[data_b0, data[~self.b0s_mask]] / data_b0
# calculates the mean signal at each b_values
means = find_signal_means(
self.b_unique, data_single_b0, self.one_0_bvals, self.srm, self.lb_matrix_signal
)
# average diffusivity initialization
x = np.array([np.pi / 4, np.pi / 4])
x, status = leastsq(forecast_error_func, x, args=(self.b_unique, means))
# transform to bound the diffusivities from 0 to 3e-03
d_par = np.cos(x[0]) ** 2 * 3e-03
d_perp = np.cos(x[1]) ** 2 * 3e-03
if d_perp >= d_par:
temp = d_par
d_par = d_perp
d_perp = temp
# round to avoid memory explosion
diff_key = str(int(np.round(d_par * 1e05))) + str(int(np.round(d_perp * 1e05)))
M_diff = self.cache_get("forecast_matrix", key=diff_key)
if M_diff is None:
M_diff = forecast_matrix(self.sh_order, d_par, d_perp, self.one_0_bvals)
self.cache_set("forecast_matrix", key=diff_key, value=M_diff)
M = M_diff * self.rho
M0 = M[:, 0]
c0 = np.sqrt(1.0 / (4 * np.pi))
# coefficients vector initialization
n_c = int((self.sh_order + 1) * (self.sh_order + 2) / 2)
coef = np.zeros(n_c)
coef[0] = c0
if int(np.round(d_par * 1e05)) > int(np.round(d_perp * 1e05)):
if self.wls:
data_r = data_single_b0 - M0 * c0
Mr = M[:, 1:]
Lr = self.lb_matrix[1:, 1:]
pseudo_inv = np.dot(
np.linalg.inv(np.dot(Mr.T, Mr) + self.lambda_lb * Lr), Mr.T
)
coef = np.dot(pseudo_inv, data_r)
coef = np.r_[c0, coef]
if self.csd:
coef, _ = csdeconv(data_single_b0, M, self.fod, tau=0.1, convergence=50)
coef = coef / coef[0] * c0
if self.pos:
c = cvxpy.Variable(M.shape[1])
design_matrix = cvxpy.Constant(M)
objective = cvxpy.Minimize(
cvxpy.sum_squares(design_matrix * c - data_single_b0)
+ self.lambda_lb * cvxpy.quad_form(c, self.lb_matrix)
)
constraints = [c[0] == c0, self.fod * c >= 0]
prob = cvxpy.Problem(objective, constraints)
try:
prob.solve(solver=cvxpy.OSQP, eps_abs=1e-05, eps_rel=1e-05)
coef = np.asarray(c.value).squeeze()
except Exception:
warn("Optimization did not find a solution")
coef = np.zeros(M.shape[1])
coef[0] = c0
return ForecastFit(self, data, coef, d_par, d_perp)
|
def fit(self, data):
data_b0 = data[self.b0s_mask].mean()
data_single_b0 = np.r_[data_b0, data[~self.b0s_mask]] / data_b0
# calculates the mean signal at each b_values
means = find_signal_means(
self.b_unique, data_single_b0, self.one_0_bvals, self.srm, self.lb_matrix_signal
)
# average diffusivity initialization
x = np.array([np.pi / 4, np.pi / 4])
x, status = leastsq(forecast_error_func, x, args=(self.b_unique, means))
# transform to bound the diffusivities from 0 to 3e-03
d_par = np.cos(x[0]) ** 2 * 3e-03
d_perp = np.cos(x[1]) ** 2 * 3e-03
if d_perp >= d_par:
temp = d_par
d_par = d_perp
d_perp = temp
# round to avoid memory explosion
diff_key = str(int(np.round(d_par * 1e05))) + str(int(np.round(d_perp * 1e05)))
M_diff = self.cache_get("forecast_matrix", key=diff_key)
if M_diff is None:
M_diff = forecast_matrix(self.sh_order, d_par, d_perp, self.one_0_bvals)
self.cache_set("forecast_matrix", key=diff_key, value=M_diff)
M = M_diff * self.rho
M0 = M[:, 0]
c0 = np.sqrt(1.0 / (4 * np.pi))
# coefficients vector initialization
n_c = int((self.sh_order + 1) * (self.sh_order + 2) / 2)
coef = np.zeros(n_c)
coef[0] = c0
if int(np.round(d_par * 1e05)) > int(np.round(d_perp * 1e05)):
if self.wls:
data_r = data_single_b0 - M0 * c0
Mr = M[:, 1:]
Lr = self.lb_matrix[1:, 1:]
pseudo_inv = np.dot(
np.linalg.inv(np.dot(Mr.T, Mr) + self.lambda_lb * Lr), Mr.T
)
coef = np.dot(pseudo_inv, data_r)
coef = np.r_[c0, coef]
if self.csd:
coef, num_it = csdeconv(
data_single_b0, M, self.fod, tau=0.1, convergence=50
)
coef = coef / coef[0] * c0
if self.pos:
c = cvxpy.Variable(M.shape[1])
design_matrix = cvxpy.Constant(M)
objective = cvxpy.Minimize(
cvxpy.sum_squares(design_matrix * c - data_single_b0)
+ self.lambda_lb * cvxpy.quad_form(c, self.lb_matrix)
)
constraints = [c[0] == c0, self.fod * c >= 0]
prob = cvxpy.Problem(objective, constraints)
try:
prob.solve()
coef = np.asarray(c.value).squeeze()
except Exception:
warn("Optimization did not find a solution")
coef = np.zeros(M.shape[1])
coef[0] = c0
return ForecastFit(self, data, coef, d_par, d_perp)
|
https://github.com/dipy/dipy/issues/1654
|
dipy (master)$nosetests dipy/reconst/tests/test_forecast.py
F.....
======================================================================
FAIL: dipy.reconst.tests.test_forecast.test_forecast_positive_constrain
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/nose/case.py", line 198, in runTest
self.test(*self.arg)
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/numpy/testing/_private/decorators.py", line 155, in skipper_func
return f(*args, **kwargs)
File "/Users/arokem/source/dipy/dipy/reconst/tests/test_forecast.py", line 53, in test_forecast_positive_constrain
assert_almost_equal(fodf[fodf < 0].sum(), 0, 2)
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/numpy/testing/_private/utils.py", line 584, in assert_almost_equal
raise AssertionError(_build_err_msg())
AssertionError:
Arrays are not almost equal to 2 decimals
ACTUAL: -0.015931951384411995
DESIRED: 0
----------------------------------------------------------------------
Ran 6 tests in 0.264s
|
AssertionError
|
def find_signal_means(b_unique, data_norm, bvals, rho, lb_matrix, w=1e-03):
r"""Calculate the mean signal for each shell.
Parameters
----------
b_unique : 1d ndarray,
unique b-values in a vector excluding zero
data_norm : 1d ndarray,
normalized diffusion signal
bvals : 1d ndarray,
the b-values
rho : 2d ndarray,
SH basis matrix for fitting the signal on each shell
lb_matrix : 2d ndarray,
Laplace-Beltrami regularization matrix
w : float,
weight for the Laplace-Beltrami regularization
Returns
-------
means : 1d ndarray
the average of the signal for each b-values
"""
lb = len(b_unique)
means = np.zeros(lb)
for u in range(lb):
ind = bvals == b_unique[u]
shell = data_norm[ind]
if np.sum(ind) > 20:
M = rho[ind, :]
pseudo_inv = np.dot(np.linalg.inv(np.dot(M.T, M) + w * lb_matrix), M.T)
coef = np.dot(pseudo_inv, shell)
means[u] = coef[0] / np.sqrt(4 * np.pi)
else:
means[u] = shell.mean()
return means
|
def find_signal_means(b_unique, data_norm, bvals, rho, lb_matrix, w=1e-03):
r"""Calculates the mean signal for each shell
Parameters
----------
b_unique : 1d ndarray,
unique b-values in a vector excluding zero
data_norm : 1d ndarray,
normalized diffusion signal
bvals : 1d ndarray,
the b-values
rho : 2d ndarray,
SH basis matrix for fitting the signal on each shell
lb_matrix : 2d ndarray,
Laplace-Beltrami regularization matrix
w : float,
weight for the Laplace-Beltrami regularization
Returns
-------
means : 1d ndarray
the average of the signal for each b-values
"""
lb = len(b_unique)
means = np.zeros(lb)
for u in range(lb):
ind = bvals == b_unique[u]
shell = data_norm[ind]
if np.sum(ind) > 20:
M = rho[ind, :]
pseudo_inv = np.dot(np.linalg.inv(np.dot(M.T, M) + w * lb_matrix), M.T)
coef = np.dot(pseudo_inv, shell)
means[u] = coef[0] / np.sqrt(4 * np.pi)
else:
means[u] = shell.mean()
return means
|
https://github.com/dipy/dipy/issues/1654
|
dipy (master)$nosetests dipy/reconst/tests/test_forecast.py
F.....
======================================================================
FAIL: dipy.reconst.tests.test_forecast.test_forecast_positive_constrain
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/nose/case.py", line 198, in runTest
self.test(*self.arg)
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/numpy/testing/_private/decorators.py", line 155, in skipper_func
return f(*args, **kwargs)
File "/Users/arokem/source/dipy/dipy/reconst/tests/test_forecast.py", line 53, in test_forecast_positive_constrain
assert_almost_equal(fodf[fodf < 0].sum(), 0, 2)
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/numpy/testing/_private/utils.py", line 584, in assert_almost_equal
raise AssertionError(_build_err_msg())
AssertionError:
Arrays are not almost equal to 2 decimals
ACTUAL: -0.015931951384411995
DESIRED: 0
----------------------------------------------------------------------
Ran 6 tests in 0.264s
|
AssertionError
|
def lb_forecast(sh_order):
r"""Returns the Laplace-Beltrami regularization matrix for FORECAST"""
n_c = int((sh_order + 1) * (sh_order + 2) / 2)
diag_lb = np.zeros(n_c)
counter = 0
for l in range(0, sh_order + 1, 2):
stop = 2 * l + 1 + counter
diag_lb[counter:stop] = (l * (l + 1)) ** 2
counter = stop
return np.diag(diag_lb)
|
def lb_forecast(sh_order):
r"""Returns the Laplace-Beltrami regularization matrix for FORECAST"""
n_c = int((sh_order + 1) * (sh_order + 2) / 2)
diag_lb = np.zeros(n_c)
counter = 0
for l in range(0, sh_order + 1, 2):
for m in range(-l, l + 1):
diag_lb[counter] = (l * (l + 1)) ** 2
counter += 1
return np.diag(diag_lb)
|
https://github.com/dipy/dipy/issues/1654
|
dipy (master)$nosetests dipy/reconst/tests/test_forecast.py
F.....
======================================================================
FAIL: dipy.reconst.tests.test_forecast.test_forecast_positive_constrain
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/nose/case.py", line 198, in runTest
self.test(*self.arg)
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/numpy/testing/_private/decorators.py", line 155, in skipper_func
return f(*args, **kwargs)
File "/Users/arokem/source/dipy/dipy/reconst/tests/test_forecast.py", line 53, in test_forecast_positive_constrain
assert_almost_equal(fodf[fodf < 0].sum(), 0, 2)
File "/Users/arokem/.virtualenvs/dipy/lib/python3.7/site-packages/numpy/testing/_private/utils.py", line 584, in assert_almost_equal
raise AssertionError(_build_err_msg())
AssertionError:
Arrays are not almost equal to 2 decimals
ACTUAL: -0.015931951384411995
DESIRED: 0
----------------------------------------------------------------------
Ran 6 tests in 0.264s
|
AssertionError
|
def comparator(operator: Comparator) -> Comparator:
"""Wrap a Version binary op method in a type-check."""
@wraps(operator)
def wrapper(self: "Version", other: Comparable) -> bool:
comparable_types = (
Version,
dict,
tuple,
list,
*String.__args__, # type: ignore
)
if not isinstance(other, comparable_types):
return NotImplemented
return operator(self, other)
return wrapper
|
def comparator(operator: Comparator) -> Comparator:
"""Wrap a Version binary op method in a type-check."""
@wraps(operator)
def wrapper(self: "Version", other: Comparable) -> bool:
comparable_types = (
Version,
dict,
tuple,
list,
*String.__args__, # type: ignore
)
if not isinstance(other, comparable_types):
raise TypeError(
"other type %r must be in %r" % (type(other), comparable_types)
)
return operator(self, other)
return wrapper
|
https://github.com/python-semver/python-semver/issues/316
|
class Foo:
... pass
v1 = semver.version.Version(1, 2, 3)
v1 < Foo()
Traceback (most recent call last)
...
TypeError: other type <class '__main__.Foo'> must be in (<class 'semver.version.Version'>, <class 'dict'>, <class 'tuple'>, <class 'list'>, <class 'str'>, <class 'bytes'>)
|
TypeError
|
def wrapper(self: "Version", other: Comparable) -> bool:
comparable_types = (
Version,
dict,
tuple,
list,
*String.__args__, # type: ignore
)
if not isinstance(other, comparable_types):
return NotImplemented
return operator(self, other)
|
def wrapper(self: "Version", other: Comparable) -> bool:
comparable_types = (
Version,
dict,
tuple,
list,
*String.__args__, # type: ignore
)
if not isinstance(other, comparable_types):
raise TypeError("other type %r must be in %r" % (type(other), comparable_types))
return operator(self, other)
|
https://github.com/python-semver/python-semver/issues/316
|
class Foo:
... pass
v1 = semver.version.Version(1, 2, 3)
v1 < Foo()
Traceback (most recent call last)
...
TypeError: other type <class '__main__.Foo'> must be in (<class 'semver.version.Version'>, <class 'dict'>, <class 'tuple'>, <class 'list'>, <class 'str'>, <class 'bytes'>)
|
TypeError
|
def comparator(operator):
"""Wrap a VersionInfo binary op method in a type-check."""
@wraps(operator)
def wrapper(self, other):
comparable_types = (VersionInfo, dict, tuple, list, str)
if not isinstance(other, comparable_types):
raise TypeError(
"other type %r must be in %r" % (type(other), comparable_types)
)
return operator(self, other)
return wrapper
|
def comparator(operator):
"""Wrap a VersionInfo binary op method in a type-check."""
@wraps(operator)
def wrapper(self, other):
comparable_types = (VersionInfo, dict, tuple)
if not isinstance(other, comparable_types):
raise TypeError(
"other type %r must be in %r" % (type(other), comparable_types)
)
return operator(self, other)
return wrapper
|
https://github.com/python-semver/python-semver/issues/244
|
v1 < [1, 2, 4]
Traceback (most recent call last):
...
TypeError: other type <class 'list'> must be in (<class 'semver.VersionInfo'>, <class 'dict'>, <class 'tuple'>)
v1 < "1.2.4"
Traceback (most recent call last):
...
TypeError: other type <class 'str'> must be in (<class 'semver.VersionInfo'>, <class 'dict'>, <class 'tuple'>)
|
TypeError
|
def wrapper(self, other):
comparable_types = (VersionInfo, dict, tuple, list, str)
if not isinstance(other, comparable_types):
raise TypeError("other type %r must be in %r" % (type(other), comparable_types))
return operator(self, other)
|
def wrapper(self, other):
comparable_types = (VersionInfo, dict, tuple)
if not isinstance(other, comparable_types):
raise TypeError("other type %r must be in %r" % (type(other), comparable_types))
return operator(self, other)
|
https://github.com/python-semver/python-semver/issues/244
|
v1 < [1, 2, 4]
Traceback (most recent call last):
...
TypeError: other type <class 'list'> must be in (<class 'semver.VersionInfo'>, <class 'dict'>, <class 'tuple'>)
v1 < "1.2.4"
Traceback (most recent call last):
...
TypeError: other type <class 'str'> must be in (<class 'semver.VersionInfo'>, <class 'dict'>, <class 'tuple'>)
|
TypeError
|
def run(self):
CleanCommand.run(self)
delete_in_root = ["build", ".cache", "dist", ".eggs", "*.egg-info", ".tox"]
delete_everywhere = ["__pycache__", "*.pyc"]
for candidate in delete_in_root:
rmtree_glob(candidate)
for visible_dir in glob("[A-Za-z0-9]*"):
for candidate in delete_everywhere:
rmtree_glob(join(visible_dir, candidate))
rmtree_glob(join(visible_dir, "*", candidate))
|
def run(self):
super(CleanCommand, self).run()
delete_in_root = ["build", ".cache", "dist", ".eggs", "*.egg-info", ".tox"]
delete_everywhere = ["__pycache__", "*.pyc"]
for candidate in delete_in_root:
rmtree_glob(candidate)
for visible_dir in glob("[A-Za-z0-9]*"):
for candidate in delete_everywhere:
rmtree_glob(join(visible_dir, candidate))
rmtree_glob(join(visible_dir, "*", candidate))
|
https://github.com/python-semver/python-semver/issues/224
|
ERROR: Execution of '/yocto/build/3.0-zeus/bbb/tmp/work/cortexa8hf-neon-poky-linux-gnueabi/python3-semver/2.9.1-r0/temp/run.do_configure.28554' failed with exit code 1:
running clean
Traceback (most recent call last):
File "setup.py", line 101, in <module>
entry_points={"console_scripts": ["pysemver = semver:main"]},
File "/yocto/build/3.0-zeus/bbb/tmp/work/cortexa8hf-neon-poky-linux-gnueabi/python3-semver/2.9.1-r0/recipe-sysroot-native/usr/lib/python3.7/site-packages/setuptools/__init__.py", line 145, in setup
return distutils.core.setup(**attrs)
File "/yocto/build/3.0-zeus/bbb/tmp/work/cortexa8hf-neon-poky-linux-gnueabi/python3-semver/2.9.1-r0/recipe-sysroot-native/usr/lib/python3.7/distutils/core.py", line 148, in setup
dist.run_commands()
File "/yocto/build/3.0-zeus/bbb/tmp/work/cortexa8hf-neon-poky-linux-gnueabi/python3-semver/2.9.1-r0/recipe-sysroot-native/usr/lib/python3.7/distutils/dist.py", line 966, in run_commands
self.run_command(cmd)
File "/yocto/build/3.0-zeus/bbb/tmp/work/cortexa8hf-neon-poky-linux-gnueabi/python3-semver/2.9.1-r0/recipe-sysroot-native/usr/lib/python3.7/distutils/dist.py", line 985, in run_command
cmd_obj.run()
File "setup.py", line 41, in run
super(CleanCommand, self).run()
File "/yocto/build/3.0-zeus/bbb/tmp/work/cortexa8hf-neon-poky-linux-gnueabi/python3-semver/2.9.1-r0/recipe-sysroot-native/usr/lib/python3.7/distutils/cmd.py", line 176, in run
% self.__class__)
RuntimeError: abstract method -- subclass <class '__main__.Clean'> must override
WARNING: exit code 1 from a shell command.
|
RuntimeError
|
def process(args):
"""Process the input from the CLI
:param args: The parsed arguments
:type args: :class:`argparse.Namespace`
:param parser: the parser instance
:type parser: :class:`argparse.ArgumentParser`
:return: result of the selected action
:rtype: str
"""
if not hasattr(args, "which"):
args.parser.print_help()
raise SystemExit()
elif args.which == "bump":
maptable = {
"major": "bump_major",
"minor": "bump_minor",
"patch": "bump_patch",
"prerelease": "bump_prerelease",
"build": "bump_build",
}
if args.bump is None:
# When bump is called without arguments,
# print the help and exit
args.parser.parse_args([args.which, "-h"])
ver = parse_version_info(args.version)
# get the respective method and call it
func = getattr(ver, maptable[args.bump])
return str(func())
elif args.which == "compare":
return str(compare(args.version1, args.version2))
|
def process(args):
"""Process the input from the CLI
:param args: The parsed arguments
:type args: :class:`argparse.Namespace`
:param parser: the parser instance
:type parser: :class:`argparse.ArgumentParser`
:return: result of the selected action
:rtype: str
"""
if args.which == "bump":
maptable = {
"major": "bump_major",
"minor": "bump_minor",
"patch": "bump_patch",
"prerelease": "bump_prerelease",
"build": "bump_build",
}
ver = parse_version_info(args.version)
# get the respective method and call it
func = getattr(ver, maptable[args.bump])
return str(func())
elif args.which == "compare":
return str(compare(args.version1, args.version2))
|
https://github.com/python-semver/python-semver/issues/192
|
$ tox -e py36
$ .tox/py36/bin/pysemver
Traceback (most recent call last):
File ".tox/py36/bin/pysemver", line 8, in <module>
sys.exit(main())
File ".tox/py36/lib/python3.6/site-packages/semver.py", line 720, in main
result = process(args)
File ".tox/py36/lib/python3.6/site-packages/semver.py", line 693, in process
if args.which == "bump":
AttributeError: 'Namespace' object has no attribute 'which'
|
AttributeError
|
def main(cliargs=None):
"""Entry point for the application script
:param list cliargs: Arguments to parse or None (=use :class:`sys.argv`)
:return: error code
:rtype: int
"""
try:
parser = createparser()
args = parser.parse_args(args=cliargs)
# Save parser instance:
args.parser = parser
result = process(args)
print(result)
return 0
except (ValueError, TypeError) as err:
print("ERROR", err, file=sys.stderr)
return 2
|
def main(cliargs=None):
"""Entry point for the application script
:param list cliargs: Arguments to parse or None (=use :class:`sys.argv`)
:return: error code
:rtype: int
"""
try:
parser = createparser()
args = parser.parse_args(args=cliargs)
# args.parser = parser
result = process(args)
print(result)
return 0
except (ValueError, TypeError) as err:
print("ERROR", err, file=sys.stderr)
return 2
|
https://github.com/python-semver/python-semver/issues/192
|
$ tox -e py36
$ .tox/py36/bin/pysemver
Traceback (most recent call last):
File ".tox/py36/bin/pysemver", line 8, in <module>
sys.exit(main())
File ".tox/py36/lib/python3.6/site-packages/semver.py", line 720, in main
result = process(args)
File ".tox/py36/lib/python3.6/site-packages/semver.py", line 693, in process
if args.which == "bump":
AttributeError: 'Namespace' object has no attribute 'which'
|
AttributeError
|
def add_trajectory(
self,
positions,
epochs,
groundtrack_show=False,
groundtrack_lead_time=None,
groundtrack_trail_time=None,
groundtrack_width=None,
groundtrack_color=None,
id_name=None,
id_description=None,
path_width=None,
path_show=None,
path_color=None,
label_fill_color=None,
label_outline_color=None,
label_font=None,
label_text=None,
label_show=None,
):
"""
Adds trajectory.
Parameters
----------
positions: ~astropy.coordinates.CartesianRepresentation
Trajectory to plot.
epochs: ~astropy.time.core.Time
Epochs for positions.
groundtrack_show: bool
If set to true, the groundtrack is
displayed.
groundtrack_lead_time: double
The time the animation is ahead of the real-time groundtrack
groundtrack_trail_time: double
The time the animation is behind the real-time groundtrack
groundtrack_width: int
Groundtrack width
groundtrack_color: list (int)
Rgba groundtrack color. By default, it is set to the path color
id_name: str
Set orbit name
id_description: str
Set orbit description
path_width: int
Path width
path_show: bool
Indicates whether the path is visible
path_color: list (int)
Rgba path color
label_fill_color: list (int)
Fill Color in rgba format
label_outline_color: list (int)
Outline Color in rgba format
label_font: str
Set label font style and size (CSS syntax)
label_text: str
Set label text
label_show: bool
Indicates whether the label is visible
"""
if self.attractor is None:
raise ValueError("An attractor must be set up first.")
positions = (
positions.represent_as(CartesianRepresentation).get_xyz(1).to(u.meter).value
)
epochs = Time(epochs, format="isot")
if len(epochs) != len(positions):
raise ValueError("Number of Points and Epochs must be equal.")
epochs = np.fromiter(
map(lambda epoch: (epoch - epochs[0]).to(u.second).value, epochs),
dtype=float,
)
positions = np.around(
np.concatenate([epochs[..., None], positions], axis=1).ravel(), 1
).tolist()
self.trajectories.append([positions, None, label_text, path_color])
start_epoch = Time(self.start_epoch, format="isot")
pckt = Packet(
id=self.i,
name=id_name,
description=id_description,
availability=TimeInterval(start=self.start_epoch, end=self.end_epoch),
position=Position(
interpolationDegree=5,
interpolationAlgorithm=InterpolationAlgorithms.LAGRANGE,
referenceFrame=ReferenceFrames.INERTIAL,
cartesian=positions,
# Use explicit UTC timezone, rather than the default, which is a local timezone.
epoch=start_epoch.datetime.replace(tzinfo=timezone.utc),
),
path=Path(
show=path_show,
width=path_width,
material=Material(
solidColor=SolidColorMaterial(color=Color.from_list(path_color))
)
if path_color is not None
else Material(
solidColor=SolidColorMaterial(color=Color.from_list([255, 255, 0]))
),
resolution=120,
),
label=Label(
text=label_text,
font=label_font if label_font is not None else "11pt Lucida Console",
show=label_show,
fillColor=Color(rgba=label_fill_color)
if label_fill_color is not None
else Color(rgba=[255, 255, 0, 255]),
outlineColor=Color(rgba=label_outline_color)
if label_outline_color is not None
else Color(rgba=[255, 255, 0, 255]),
),
billboard=Billboard(image=PIC_SATELLITE, show=True),
)
self.packets.append(pckt)
if groundtrack_show:
raise NotImplementedError("Ground tracking for trajectory not implemented yet")
self.i += 1
|
def add_trajectory(
self,
positions,
epochs,
groundtrack_show=False,
groundtrack_lead_time=None,
groundtrack_trail_time=None,
groundtrack_width=None,
groundtrack_color=None,
id_name=None,
id_description=None,
path_width=None,
path_show=None,
path_color=None,
label_fill_color=None,
label_outline_color=None,
label_font=None,
label_text=None,
label_show=None,
):
"""
Adds trajectory.
Parameters
----------
positions: ~astropy.coordinates.CartesianRepresentation
Trajectory to plot.
epochs: ~astropy.time.core.Time
Epochs for positions.
groundtrack_show: bool
If set to true, the groundtrack is
displayed.
groundtrack_lead_time: double
The time the animation is ahead of the real-time groundtrack
groundtrack_trail_time: double
The time the animation is behind the real-time groundtrack
groundtrack_width: int
Groundtrack width
groundtrack_color: list (int)
Rgba groundtrack color. By default, it is set to the path color
id_name: str
Set orbit name
id_description: str
Set orbit description
path_width: int
Path width
path_show: bool
Indicates whether the path is visible
path_color: list (int)
Rgba path color
label_fill_color: list (int)
Fill Color in rgba format
label_outline_color: list (int)
Outline Color in rgba format
label_font: str
Set label font style and size (CSS syntax)
label_text: str
Set label text
label_show: bool
Indicates whether the label is visible
"""
if self.attractor is None:
raise ValueError("An attractor must be set up first.")
positions = (
positions.represent_as(CartesianRepresentation).get_xyz(1).to(u.meter).value
)
epochs = Time(epochs, format="isot")
if len(epochs) != len(positions):
raise ValueError("Number of Points and Epochs must be equal.")
epochs = np.fromiter(
map(lambda epoch: (epoch - epochs[0]).to(u.second).value, epochs),
dtype=np.float,
)
positions = np.around(
np.concatenate([epochs[..., None], positions], axis=1).ravel(), 1
).tolist()
self.trajectories.append([positions, None, label_text, path_color])
start_epoch = Time(self.start_epoch, format="isot")
pckt = Packet(
id=self.i,
name=id_name,
description=id_description,
availability=TimeInterval(start=self.start_epoch, end=self.end_epoch),
position=Position(
interpolationDegree=5,
interpolationAlgorithm=InterpolationAlgorithms.LAGRANGE,
referenceFrame=ReferenceFrames.INERTIAL,
cartesian=positions,
# Use explicit UTC timezone, rather than the default, which is a local timezone.
epoch=start_epoch.datetime.replace(tzinfo=timezone.utc),
),
path=Path(
show=path_show,
width=path_width,
material=Material(
solidColor=SolidColorMaterial(color=Color.from_list(path_color))
)
if path_color is not None
else Material(
solidColor=SolidColorMaterial(color=Color.from_list([255, 255, 0]))
),
resolution=120,
),
label=Label(
text=label_text,
font=label_font if label_font is not None else "11pt Lucida Console",
show=label_show,
fillColor=Color(rgba=label_fill_color)
if label_fill_color is not None
else Color(rgba=[255, 255, 0, 255]),
outlineColor=Color(rgba=label_outline_color)
if label_outline_color is not None
else Color(rgba=[255, 255, 0, 255]),
),
billboard=Billboard(image=PIC_SATELLITE, show=True),
)
self.packets.append(pckt)
if groundtrack_show:
raise NotImplementedError("Ground tracking for trajectory not implemented yet")
self.i += 1
|
https://github.com/poliastro/poliastro/issues/1100
|
2021-02-09T22:42:25.5782954Z check run-test: commands[3] | python -m build .
2021-02-09T22:42:28.6022625Z Found existing installation: setuptools 49.2.1
2021-02-09T22:42:28.6436801Z Uninstalling setuptools-49.2.1:
2021-02-09T22:42:28.6526758Z Successfully uninstalled setuptools-49.2.1
...
2021-02-09T22:42:35.0995738Z Collecting astroquery>=0.3.9
2021-02-09T22:42:35.1074040Z Downloading astroquery-0.4.1.tar.gz (6.5 MB)
2021-02-09T22:42:35.6701112Z ERROR: Command errored out with exit status 1:
2021-02-09T22:42:35.6703383Z command: /tmp/build-env-260ocgnr/bin/python -c 'import sys, setuptools, tokenize; sys.argv[0] = '"'"'/tmp/pip-install-gcgcam16/astroquery/setup.py'"'"'; __file__='"'"'/tmp/pip-install-gcgcam16/astroquery/setup.py'"'"';f=getattr(tokenize, '"'"'open'"'"', open)(__file__);code=f.read().replace('"'"'\r\n'"'"', '"'"'\n'"'"');f.close();exec(compile(code, __file__, '"'"'exec'"'"'))' egg_info --egg-base /tmp/pip-pip-egg-info-3h_ptp3t
2021-02-09T22:42:35.6705974Z cwd: /tmp/pip-install-gcgcam16/astroquery/
2021-02-09T22:42:35.6706521Z Complete output (3 lines):
2021-02-09T22:42:35.6707238Z Traceback (most recent call last):
2021-02-09T22:42:35.6707664Z File "<string>", line 1, in <module>
2021-02-09T22:42:35.6708402Z ModuleNotFoundError: No module named 'setuptools'
|
ModuleNotFoundError
|
def build_ephem_interpolant(body, period, t_span, rtol=1e-5):
"""Interpolates ephemerides data
Parameters
----------
body : Body
Source body.
period : ~astropy.units.Quantity
Orbital period.
t_span : list(~astropy.units.Quantity)
Initial and final epochs.
rtol : float, optional
Relative tolerance. Controls the number of sampled data points,
defaults to 1e-5.
Returns
-------
intrp : ~scipy.interpolate.interpolate.interp1d
Interpolated function.
"""
h = (period * rtol).to(u.day).value
t_span = (t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)
t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h)) # type: ignore
r_values = np.zeros((t_values.shape[0], 3))
for i, t in enumerate(t_values):
epoch = Time(t, format="jd", scale="tdb")
r = get_body_barycentric(body.name, epoch)
r = (
ICRS(x=r.x, y=r.y, z=r.z, representation_type=CartesianRepresentation)
.transform_to(GCRS(obstime=epoch))
.represent_as(CartesianRepresentation)
)
r_values[i] = r.xyz.to(u.km)
t_values = ((t_values - t_span[0]) * u.day).to(u.s).value
return interp1d(t_values, r_values, kind="cubic", axis=0, assume_sorted=True)
|
def build_ephem_interpolant(body, period, t_span, rtol=1e-5):
"""Interpolates ephemerides data
Parameters
----------
body : Body
Source body.
period : ~astropy.units.Quantity
Orbital period.
t_span : list(~astropy.units.Quantity)
Initial and final epochs.
rtol : float, optional
Relative tolerance. Controls the number of sampled data points,
defaults to 1e-5.
Returns
-------
intrp : ~scipy.interpolate.interpolate.interp1d
Interpolated function.
"""
h = (period * rtol).to(u.day).value
t_span = (t_span[0].to(u.day).value, t_span[1].to(u.day).value + 0.01)
t_values = np.linspace(*t_span, int((t_span[1] - t_span[0]) / h))
r_values = np.zeros((t_values.shape[0], 3))
for i, t in enumerate(t_values):
epoch = Time(t, format="jd", scale="tdb")
r = get_body_barycentric(body.name, epoch)
r = (
ICRS(x=r.x, y=r.y, z=r.z, representation_type=CartesianRepresentation)
.transform_to(GCRS(obstime=epoch))
.represent_as(CartesianRepresentation)
)
r_values[i] = r.xyz.to(u.km)
t_values = ((t_values - t_span[0]) * u.day).to(u.s).value
return interp1d(t_values, r_values, kind="cubic", axis=0, assume_sorted=True)
|
https://github.com/poliastro/poliastro/issues/1100
|
2021-02-09T22:42:25.5782954Z check run-test: commands[3] | python -m build .
2021-02-09T22:42:28.6022625Z Found existing installation: setuptools 49.2.1
2021-02-09T22:42:28.6436801Z Uninstalling setuptools-49.2.1:
2021-02-09T22:42:28.6526758Z Successfully uninstalled setuptools-49.2.1
...
2021-02-09T22:42:35.0995738Z Collecting astroquery>=0.3.9
2021-02-09T22:42:35.1074040Z Downloading astroquery-0.4.1.tar.gz (6.5 MB)
2021-02-09T22:42:35.6701112Z ERROR: Command errored out with exit status 1:
2021-02-09T22:42:35.6703383Z command: /tmp/build-env-260ocgnr/bin/python -c 'import sys, setuptools, tokenize; sys.argv[0] = '"'"'/tmp/pip-install-gcgcam16/astroquery/setup.py'"'"'; __file__='"'"'/tmp/pip-install-gcgcam16/astroquery/setup.py'"'"';f=getattr(tokenize, '"'"'open'"'"', open)(__file__);code=f.read().replace('"'"'\r\n'"'"', '"'"'\n'"'"');f.close();exec(compile(code, __file__, '"'"'exec'"'"'))' egg_info --egg-base /tmp/pip-pip-egg-info-3h_ptp3t
2021-02-09T22:42:35.6705974Z cwd: /tmp/pip-install-gcgcam16/astroquery/
2021-02-09T22:42:35.6706521Z Complete output (3 lines):
2021-02-09T22:42:35.6707238Z Traceback (most recent call last):
2021-02-09T22:42:35.6707664Z File "<string>", line 1, in <module>
2021-02-09T22:42:35.6708402Z ModuleNotFoundError: No module named 'setuptools'
|
ModuleNotFoundError
|
def _sample_open(self, values, min_anomaly, max_anomaly):
# Select a sensible limiting value for non-closed orbits
# This corresponds to max(r = 3p, r = self.r)
# We have to wrap nu in [-180, 180) to compare it with the output of
# the arc cosine, which is in the range [0, 180)
# Start from -nu_limit
wrapped_nu = Angle(self.nu).wrap_at(180 * u.deg)
nu_limit = max(hyp_nu_limit(self.ecc, 3.0), abs(wrapped_nu)).to(u.rad).value
limits = [
min_anomaly.to(u.rad).value if min_anomaly is not None else -nu_limit,
max_anomaly.to(u.rad).value if max_anomaly is not None else nu_limit,
] * u.rad # type: u.Quantity
# Now we check that none of the provided values
# is outside of the hyperbolic range
nu_max = hyp_nu_limit(self.ecc) - 1e-3 * u.rad # Arbitrary delta
if not Angle(limits).is_within_bounds(-nu_max, nu_max):
warn("anomaly outside range, clipping", OrbitSamplingWarning, stacklevel=2)
limits = limits.clip(-nu_max, nu_max)
nu_values = np.linspace(*limits, values) # type: ignore
return nu_values
|
def _sample_open(self, values, min_anomaly, max_anomaly):
# Select a sensible limiting value for non-closed orbits
# This corresponds to max(r = 3p, r = self.r)
# We have to wrap nu in [-180, 180) to compare it with the output of
# the arc cosine, which is in the range [0, 180)
# Start from -nu_limit
wrapped_nu = Angle(self.nu).wrap_at(180 * u.deg)
nu_limit = max(hyp_nu_limit(self.ecc, 3.0), abs(wrapped_nu)).to(u.rad).value
limits = [
min_anomaly.to(u.rad).value if min_anomaly is not None else -nu_limit,
max_anomaly.to(u.rad).value if max_anomaly is not None else nu_limit,
] * u.rad # type: u.Quantity
# Now we check that none of the provided values
# is outside of the hyperbolic range
nu_max = hyp_nu_limit(self.ecc) - 1e-3 * u.rad # Arbitrary delta
if not Angle(limits).is_within_bounds(-nu_max, nu_max):
warn("anomaly outside range, clipping", OrbitSamplingWarning, stacklevel=2)
limits = limits.clip(-nu_max, nu_max)
nu_values = np.linspace(*limits, values)
return nu_values
|
https://github.com/poliastro/poliastro/issues/1100
|
2021-02-09T22:42:25.5782954Z check run-test: commands[3] | python -m build .
2021-02-09T22:42:28.6022625Z Found existing installation: setuptools 49.2.1
2021-02-09T22:42:28.6436801Z Uninstalling setuptools-49.2.1:
2021-02-09T22:42:28.6526758Z Successfully uninstalled setuptools-49.2.1
...
2021-02-09T22:42:35.0995738Z Collecting astroquery>=0.3.9
2021-02-09T22:42:35.1074040Z Downloading astroquery-0.4.1.tar.gz (6.5 MB)
2021-02-09T22:42:35.6701112Z ERROR: Command errored out with exit status 1:
2021-02-09T22:42:35.6703383Z command: /tmp/build-env-260ocgnr/bin/python -c 'import sys, setuptools, tokenize; sys.argv[0] = '"'"'/tmp/pip-install-gcgcam16/astroquery/setup.py'"'"'; __file__='"'"'/tmp/pip-install-gcgcam16/astroquery/setup.py'"'"';f=getattr(tokenize, '"'"'open'"'"', open)(__file__);code=f.read().replace('"'"'\r\n'"'"', '"'"'\n'"'"');f.close();exec(compile(code, __file__, '"'"'exec'"'"'))' egg_info --egg-base /tmp/pip-pip-egg-info-3h_ptp3t
2021-02-09T22:42:35.6705974Z cwd: /tmp/pip-install-gcgcam16/astroquery/
2021-02-09T22:42:35.6706521Z Complete output (3 lines):
2021-02-09T22:42:35.6707238Z Traceback (most recent call last):
2021-02-09T22:42:35.6707664Z File "<string>", line 1, in <module>
2021-02-09T22:42:35.6708402Z ModuleNotFoundError: No module named 'setuptools'
|
ModuleNotFoundError
|
def plot_ephem(self, ephem, epoch=None, *, label=None, color=None, trail=False):
"""Plots Ephem object over its sampling period.
Parameters
----------
ephem : ~poliastro.ephem.Ephem
Ephemerides to plot.
epoch : astropy.time.Time, optional
Epoch of the current position, none will be used if not given.
label : str, optional
Label of the orbit, default to the name of the body.
color : string, optional
Color of the line and the position.
trail : bool, optional
Fade the orbit trail, default to False.
"""
if self._frame is None:
raise ValueError(
"A frame must be set up first, please use "
"set_orbit_frame(orbit) or plot(orbit)"
)
super().plot_ephem(ephem, epoch, label=label, color=color, trail=trail)
|
def plot_ephem(self, ephem, epoch=None, *, label=None, color=None, trail=False):
"""Plots Ephem object over its sampling period.
Parameters
----------
ephem : ~poliastro.ephem.Ephem
Ephemerides to plot.
epoch : astropy.time.Time, optional
Epoch of the current position, none will be used if not given.
label : str, optional
Label of the orbit, default to the name of the body.
color : string, optional
Color of the line and the position.
trail : bool, optional
Fade the orbit trail, default to False.
"""
super().plot_ephem(ephem, epoch, label=label, color=color, trail=trail)
if not self._figure._in_batch_mode:
return self.show()
|
https://github.com/poliastro/poliastro/issues/1021
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-130-a5728741413f> in <module>
2
3 plotter.set_attractor(Sun)
----> 4 plotter.plot_ephem(earth)
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/core.py in plot_ephem(self, ephem, epoch, label, color, trail)
118
119 """
--> 120 super().plot_ephem(ephem, epoch, label=label, color=color, trail=trail)
121
122 if not self._figure._in_batch_mode:
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/_base.py in plot_ephem(self, ephem, epoch, label, color, trail)
286 # Do not return the result of self._plot
287 # This behavior might be overriden by subclasses
--> 288 self._plot_ephem(ephem, epoch, label=label, color=color, trail=trail)
289
290
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/_base.py in _plot_ephem(self, ephem, epoch, label, color, trail)
200 r0 = None
201
--> 202 return self.__add_trajectory(
203 coordinates, r0, label=str(label), colors=colors, dashed=False
204 )
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/_base.py in __add_trajectory(self, coordinates, position, label, colors, dashed)
119 self._trajectories.append(trajectory)
120
--> 121 self._redraw_attractor()
122
123 trace_coordinates, trace_position = self.__plot_coordinates_and_position(
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/_base.py in _redraw_attractor(self)
77 self._clear_attractor()
78
---> 79 self._draw_sphere(
80 self._attractor_radius, color, self._attractor.name,
81 )
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/core.py in _draw_sphere(self, radius, color, name, center)
275
276 def _draw_sphere(self, radius, color, name, center=[0, 0, 0] * u.km):
--> 277 x_center, y_center = self._project(
278 center[None]
279 ) # Indexing trick to add one extra dimension
~/.pyenv/versions/seminar38/lib/python3.8/site-packages/poliastro/plotting/_base.py in _project(self, rr)
296
297 def _project(self, rr):
--> 298 rr_proj = rr - rr.dot(self._frame[2])[:, None] * self._frame[2]
299 x = rr_proj.dot(self._frame[0])
300 y = rr_proj.dot(self._frame[1])
TypeError: 'NoneType' object is not subscriptable
|
TypeError
|
def newton(regime, x0, args=(), tol=1.48e-08, maxiter=50):
p0 = 1.0 * x0
for iter in range(maxiter):
if regime == "hyperbolic":
fval = _kepler_equation_hyper(p0, *args)
fder = _kepler_equation_prime_hyper(p0, *args)
else:
fval = _kepler_equation(p0, *args)
fder = _kepler_equation_prime(p0, *args)
newton_step = fval / fder
p = p0 - newton_step
if abs(p - p0) < tol:
return p
p0 = p
return np.nan
|
def newton(regime, x0, args=(), tol=1.48e-08, maxiter=50):
p0 = 1.0 * x0
for iter in range(maxiter):
if regime == "hyperbolic":
fval = _kepler_equation_hyper(p0, *args)
fder = _kepler_equation_prime_hyper(p0, *args)
else:
fval = _kepler_equation(p0, *args)
fder = _kepler_equation_prime(p0, *args)
newton_step = fval / fder
p = p0 - newton_step
if abs(p - p0) < tol:
return p
p0 = p
return 1.0
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
Notes
-----
This uses a Newton iteration on the Kepler equation.
"""
assert -np.pi <= M <= np.pi
if ecc < 0.8:
E0 = M
else:
E0 = np.pi * np.sign(M)
E = newton("elliptic", E0, args=(M, ecc))
return E
|
def M_to_E(M, ecc):
"""Eccentric anomaly from mean anomaly.
.. versionadded:: 0.4.0
Parameters
----------
M : float
Mean anomaly in radians.
ecc : float
Eccentricity.
Returns
-------
E : float
Eccentric anomaly.
Notes
-----
This uses a Newton iteration on the Kepler equation.
"""
E0 = M
E = newton("elliptic", E0, args=(M, ecc))
return E
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def _kepler_equation_near_parabolic(D, M, ecc):
return D_to_M_near_parabolic(D, ecc) - M
|
def _kepler_equation_near_parabolic(D, M, ecc):
return D_to_M_near_parabolic(ecc, D) - M
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def D_to_M_near_parabolic(D, ecc):
x = (ecc - 1.0) / (ecc + 1.0) * (D**2)
assert abs(x) < 1
S = S_x(ecc, x)
return np.sqrt(2.0 / (1.0 + ecc)) * D + np.sqrt(2.0 / (1.0 + ecc) ** 3) * (D**3) * S
|
def D_to_M_near_parabolic(ecc, D):
x = (ecc - 1.0) / (ecc + 1.0) * (D**2)
assert abs(x) < 1
S = S_x(ecc, x)
return np.sqrt(2.0 / (1.0 + ecc)) * D + np.sqrt(2.0 / (1.0 + ecc) ** 3) * (D**3) * S
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def farnocchia(k, r0, v0, tof):
r"""Propagates orbit using mean motion.
This algorithm depends on the geometric shape of the orbit.
For the case of the strong elliptic or strong hyperbolic orbits:
.. math::
M = M_{0} + \frac{\mu^{2}}{h^{3}}\left ( 1 -e^{2}\right )^{\frac{3}{2}}t
.. versionadded:: 0.9.0
Parameters
----------
k : float
Standar Gravitational parameter
r0 : ~astropy.units.Quantity
Initial position vector wrt attractor center.
v0 : ~astropy.units.Quantity
Initial velocity vector.
tof : float
Time of flight (s).
Note
----
This method takes initial :math:`\vec{r}, \vec{v}`, calculates classical orbit parameters,
increases mean anomaly and performs inverse transformation to get final :math:`\vec{r}, \vec{v}`
The logic is based on formulae (4), (6) and (7) from http://dx.doi.org/10.1007/s10569-013-9476-9
"""
# get the initial true anomaly and orbit parameters that are constant over time
p, ecc, inc, raan, argp, nu0 = rv2coe(k, r0, v0)
q = p / (1 + ecc)
delta_t0 = delta_t_from_nu(nu0, ecc, k, q)
delta_t = delta_t0 + tof
nu = nu_from_delta_t(delta_t, ecc, k, q)
return coe2rv(k, p, ecc, inc, raan, argp, nu)
|
def farnocchia(k, r0, v0, tof):
r"""Propagates orbit using mean motion.
This algorithm depends on the geometric shape of the orbit.
For the case of the strong elliptic or strong hyperbolic orbits:
.. math::
M = M_{0} + \frac{\mu^{2}}{h^{3}}\left ( 1 -e^{2}\right )^{\frac{3}{2}}t
.. versionadded:: 0.9.0
Parameters
----------
k : float
Standar Gravitational parameter
r0 : ~astropy.units.Quantity
Initial position vector wrt attractor center.
v0 : ~astropy.units.Quantity
Initial velocity vector.
tof : float
Time of flight (s).
Note
----
This method takes initial :math:`\vec{r}, \vec{v}`, calculates classical orbit parameters,
increases mean anomaly and performs inverse transformation to get final :math:`\vec{r}, \vec{v}`
The logic is based on formulae (4), (6) and (7) from http://dx.doi.org/10.1007/s10569-013-9476-9
"""
# get the initial true anomaly and orbit parameters that are constant over time
p, ecc, inc, raan, argp, nu0 = rv2coe(k, r0, v0)
# get the initial mean anomaly
M0 = nu_to_M(nu0, ecc)
if np.abs(ecc - 1.0) > 1e-2:
# strong elliptic or strong hyperbolic orbits
a = p / (1.0 - ecc**2)
n = np.sqrt(k / np.abs(a**3))
else:
# near-parabolic orbit
q = p / np.abs(1.0 + ecc)
n = np.sqrt(k / 2.0 / (q**3))
M = M0 + tof * n
nu = M_to_nu(M, ecc)
return coe2rv(k, p, ecc, inc, raan, argp, nu)
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def t_p(self):
"""Elapsed time since latest perifocal passage."""
t_p = (
delta_t_from_nu_fast(
self.nu.to_value(u.rad),
self.ecc.value,
self.attractor.k.to_value(u.km**3 / u.s**2),
self.r_p.to_value(u.km),
)
* u.s
)
return t_p
|
def t_p(self):
"""Elapsed time since latest perifocal passage."""
M = nu_to_M_fast(self.nu.to_value(u.rad), self.ecc.value) * u.rad
t_p = self.period * M / (360 * u.deg)
return t_p
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def from_classical(
cls,
attractor,
a,
ecc,
inc,
raan,
argp,
nu,
epoch=J2000,
plane=Planes.EARTH_EQUATOR,
):
"""Return `Orbit` from classical orbital elements.
Parameters
----------
attractor : Body
Main attractor.
a : ~astropy.units.Quantity
Semi-major axis.
ecc : ~astropy.units.Quantity
Eccentricity.
inc : ~astropy.units.Quantity
Inclination
raan : ~astropy.units.Quantity
Right ascension of the ascending node.
argp : ~astropy.units.Quantity
Argument of the pericenter.
nu : ~astropy.units.Quantity
True anomaly.
epoch : ~astropy.time.Time, optional
Epoch, default to J2000.
plane : ~poliastro.frames.Planes
Fundamental plane of the frame.
"""
for element in a, ecc, inc, raan, argp, nu, epoch:
if not element.isscalar:
raise ValueError(f"Elements must be scalar, got {element}")
if ecc == 1.0 * u.one:
raise ValueError("For parabolic orbits use Orbit.parabolic instead")
if not 0 * u.deg <= inc <= 180 * u.deg:
raise ValueError("Inclination must be between 0 and 180 degrees")
if ecc > 1 and a > 0:
raise ValueError("Hyperbolic orbits have negative semimajor axis")
if not -np.pi * u.rad <= nu < np.pi * u.rad:
warn("Wrapping true anomaly to -π <= nu < π", stacklevel=2)
nu = ((nu + np.pi * u.rad) % (2 * np.pi * u.rad) - np.pi * u.rad).to(nu.unit)
ss = ClassicalState(attractor, a * (1 - ecc**2), ecc, inc, raan, argp, nu, plane)
return cls(ss, epoch)
|
def from_classical(
cls,
attractor,
a,
ecc,
inc,
raan,
argp,
nu,
epoch=J2000,
plane=Planes.EARTH_EQUATOR,
):
"""Return `Orbit` from classical orbital elements.
Parameters
----------
attractor : Body
Main attractor.
a : ~astropy.units.Quantity
Semi-major axis.
ecc : ~astropy.units.Quantity
Eccentricity.
inc : ~astropy.units.Quantity
Inclination
raan : ~astropy.units.Quantity
Right ascension of the ascending node.
argp : ~astropy.units.Quantity
Argument of the pericenter.
nu : ~astropy.units.Quantity
True anomaly.
epoch : ~astropy.time.Time, optional
Epoch, default to J2000.
plane : ~poliastro.frames.Planes
Fundamental plane of the frame.
"""
for element in a, ecc, inc, raan, argp, nu, epoch:
if not element.isscalar:
raise ValueError(f"Elements must be scalar, got {element}")
if ecc == 1.0 * u.one:
raise ValueError("For parabolic orbits use Orbit.parabolic instead")
if not 0 * u.deg <= inc <= 180 * u.deg:
raise ValueError("Inclination must be between 0 and 180 degrees")
if ecc > 1 and a > 0:
raise ValueError("Hyperbolic orbits have negative semimajor axis")
ss = ClassicalState(attractor, a * (1 - ecc**2), ecc, inc, raan, argp, nu, plane)
return cls(ss, epoch)
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def from_sbdb(cls, name, **kwargs):
"""Return osculating `Orbit` by using `SBDB` from Astroquery.
Parameters
----------
name: string
Name of the body to make the request.
Returns
-------
ss: poliastro.twobody.orbit.Orbit
Orbit corresponding to body_name
Examples
--------
>>> from poliastro.twobody.orbit import Orbit
>>> apophis_orbit = Orbit.from_sbdb('apophis') # doctest: +REMOTE_DATA
"""
from poliastro.bodies import Sun
obj = SBDB.query(name, full_precision=True, **kwargs)
if "count" in obj:
# no error till now ---> more than one object has been found
# contains all the name of the objects
objects_name = obj["list"]["name"]
objects_name_in_str = "" # used to store them in string form each in new line
for i in objects_name:
objects_name_in_str += i + "\n"
raise ValueError(
str(obj["count"]) + " different objects found: \n" + objects_name_in_str
)
if "object" not in obj.keys():
raise ValueError("Object {} not found".format(name))
a = obj["orbit"]["elements"]["a"].to(u.AU) * u.AU
ecc = float(obj["orbit"]["elements"]["e"]) * u.one
inc = obj["orbit"]["elements"]["i"].to(u.deg) * u.deg
raan = obj["orbit"]["elements"]["om"].to(u.deg) * u.deg
argp = obj["orbit"]["elements"]["w"].to(u.deg) * u.deg
# Since JPL provides Mean Anomaly (M) we need to make
# the conversion to the true anomaly (nu)
M = obj["orbit"]["elements"]["ma"].to(u.rad) * u.rad
# NOTE: It is unclear how this conversion should happen,
# see https://ssd-api.jpl.nasa.gov/doc/sbdb.html
if ecc < 1:
M = (M + np.pi * u.rad) % (2 * np.pi * u.rad) - np.pi * u.rad
nu = E_to_nu(M_to_E(M, ecc), ecc)
elif ecc == 1:
nu = D_to_nu(M_to_D(M))
else:
nu = F_to_nu(M_to_F(M, ecc), ecc)
epoch = time.Time(obj["orbit"]["epoch"].to(u.d), format="jd")
ss = cls.from_classical(
Sun,
a,
ecc,
inc,
raan,
argp,
nu,
epoch=epoch.tdb,
plane=Planes.EARTH_ECLIPTIC,
)
return ss
|
def from_sbdb(cls, name, **kwargs):
"""Return osculating `Orbit` by using `SBDB` from Astroquery.
Parameters
----------
name: string
Name of the body to make the request.
Returns
-------
ss: poliastro.twobody.orbit.Orbit
Orbit corresponding to body_name
Examples
--------
>>> from poliastro.twobody.orbit import Orbit
>>> apophis_orbit = Orbit.from_sbdb('apophis') # doctest: +REMOTE_DATA
"""
from poliastro.bodies import Sun
obj = SBDB.query(name, full_precision=True, **kwargs)
if "count" in obj:
# no error till now ---> more than one object has been found
# contains all the name of the objects
objects_name = obj["list"]["name"]
objects_name_in_str = "" # used to store them in string form each in new line
for i in objects_name:
objects_name_in_str += i + "\n"
raise ValueError(
str(obj["count"]) + " different objects found: \n" + objects_name_in_str
)
if "object" not in obj.keys():
raise ValueError("Object {} not found".format(name))
a = obj["orbit"]["elements"]["a"].to(u.AU) * u.AU
ecc = float(obj["orbit"]["elements"]["e"]) * u.one
inc = obj["orbit"]["elements"]["i"].to(u.deg) * u.deg
raan = obj["orbit"]["elements"]["om"].to(u.deg) * u.deg
argp = obj["orbit"]["elements"]["w"].to(u.deg) * u.deg
# Since JPL provides Mean Anomaly (M) we need to make
# the conversion to the true anomaly (\nu)
nu = M_to_nu_fast(obj["orbit"]["elements"]["ma"].to(u.rad), ecc.value) * u.rad
epoch = time.Time(obj["orbit"]["epoch"].to(u.d), format="jd")
ss = cls.from_classical(
Sun,
a,
ecc,
inc,
raan,
argp,
nu,
epoch=epoch.tdb,
plane=Planes.EARTH_ECLIPTIC,
)
return ss
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
def time_to_anomaly(self, value):
"""Returns time required to be in a specific true anomaly.
Parameters
----------
value : ~astropy.units.Quantity
Returns
-------
tof: ~astropy.units.Quantity
Time of flight required.
"""
# Silently wrap anomaly
nu = (value + np.pi * u.rad) % (2 * np.pi * u.rad) - np.pi * u.rad
delta_t = (
delta_t_from_nu_fast(
nu.to_value(u.rad),
self.ecc.value,
self.attractor.k.to_value(u.km**3 / u.s**2),
self.r_p.to_value(u.km),
)
* u.s
)
tof = delta_t - self.t_p
return tof
|
def time_to_anomaly(self, value):
"""Returns time required to be in a specific true anomaly.
Parameters
----------
value : ~astropy.units.Quantity
Returns
-------
tof: ~astropy.units.Quantity
Time of flight required.
"""
# Compute time of flight for correct epoch
M = nu_to_M_fast(self.nu.to_value(u.rad), self.ecc.value) * u.rad
new_M = nu_to_M_fast(value.to_value(u.rad), self.ecc.value) * u.rad
tof = Angle(new_M - M).wrap_at(360 * u.deg) / self.n
return tof
|
https://github.com/poliastro/poliastro/issues/475
|
$ NUMBA_DISABLE_JIT=1 ipython --no-banner
In [1]: import numpy as np
In [2]: np.seterr(all="raise")
Out[2]: {'divide': 'warn', 'over': 'warn', 'under': 'ignore', 'invalid': 'warn'}
In [3]: import numpy as np
...: import math
...: from astropy import units as u
...: from poliastro.bodies import Earth, Moon
...: from poliastro.twobody import Orbit
...:
...: r=[8.e3, 1.e3, 0.]*u.km
...: v=[-0.5, -0.5, 0.]*u.km/u.s
...: orbit1=Orbit.from_vectors(Earth,r,v)
...: orbit2=orbit1.propagate(1.*u.h)
...:
...:
---------------------------------------------------------------------------
FloatingPointError Traceback (most recent call last)
<ipython-input-3-08d7b74965c9> in <module>()
8 v=[-0.5, -0.5, 0.]*u.km/u.s
9 orbit1=Orbit.from_vectors(Earth,r,v)
---> 10 orbit2=orbit1.propagate(1.*u.h)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/orbit.py in propagate(self, value, method, rtol, **kwargs)
403 time_of_flight = time.TimeDelta(value)
404
--> 405 return propagate(self, time_of_flight, method=method, rtol=rtol, **kwargs)
406
407 def sample(self, values=None, method=mean_motion):
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in propagate(orbit, time_of_flight, method, rtol, **kwargs)
177
178 """
--> 179 r, v = method(orbit, time_of_flight.to(u.s).value, rtol=rtol, **kwargs)
180 return orbit.from_vectors(orbit.attractor, r * u.km, v * u.km / u.s, orbit.epoch + time_of_flight, orbit.plane)
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/twobody/propagation.py in mean_motion(orbit, tofs, **kwargs)
118
119 if not hasattr(tofs, '__len__'):
--> 120 return mean_motion_fast(k, r0, v0, tofs)
121
122 results = [mean_motion_fast(k, r0, v0, tof) for tof in tofs]
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/propagation.py in mean_motion(k, r0, v0, tof)
33
34 # get the initial mean anomaly
---> 35 M0 = nu_to_M(nu0, ecc)
36 # strong elliptic or strong hyperbolic orbits
37 if np.abs(ecc - 1.0) > 1e-2:
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in nu_to_M(nu, ecc, delta)
183 else:
184 D = nu_to_D(nu)
--> 185 M = D_to_M(D, ecc)
186 return M
187
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in D_to_M(D, ecc)
155 @jit
156 def D_to_M(D, ecc):
--> 157 M = _kepler_equation_parabolic(D, 0.0, ecc)
158 return M
159
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in _kepler_equation_parabolic(D, M, ecc)
26 @jit
27 def _kepler_equation_parabolic(D, M, ecc):
---> 28 return M_parabolic(ecc, D) - M
29
30
~/.miniconda36/envs/poliastro37/lib/python3.7/site-packages/poliastro/core/angles.py in M_parabolic(ecc, D, tolerance)
41 k = 0
42 while not small_term:
---> 43 term = (ecc - 1.0 / (2.0 * k + 3.0)) * (x ** k)
44 small_term = np.abs(term) < tolerance
45 S += term
FloatingPointError: overflow encountered in double_scalars
|
FloatingPointError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.