after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def iter_native(self, result, no_ack=True, **kwargs):
self._ensure_not_eager()
results = result.results
if not results:
raise StopIteration()
# we tell the result consumer to put consumed results
# into these buckets.
bucket = deque()
for node in results:
if not hasattr(node, "_cache"):
bucket.append(node)
elif node._cache:
bucket.append(node)
else:
self._collect_into(node, bucket)
for _ in self._wait_for_pending(result, no_ack=no_ack, **kwargs):
while bucket:
node = bucket.popleft()
if not hasattr(node, "_cache"):
yield node.id, node.children
else:
yield node.id, node._cache
while bucket:
node = bucket.popleft()
yield node.id, node._cache
|
def iter_native(self, result, no_ack=True, **kwargs):
self._ensure_not_eager()
results = result.results
if not results:
raise StopIteration()
# we tell the result consumer to put consumed results
# into these buckets.
bucket = deque()
for node in results:
if node._cache:
bucket.append(node)
else:
self._collect_into(node, bucket)
for _ in self._wait_for_pending(result, no_ack=no_ack, **kwargs):
while bucket:
node = bucket.popleft()
yield node.id, node._cache
while bucket:
node = bucket.popleft()
yield node.id, node._cache
|
https://github.com/celery/celery/issues/5496
|
Steps to Reproduce
Minimally Reproducible Test Case
from celery import group, chain
from tasks import task as t
# failing sequence
task = group([ t.si(),t.si(), chain( t.si(), group([ t.si(), t.si()]))])
# working sequence
task = group([ t.si(),t.si(), chain( t.si(), group([ t.si(), t.si()]), t.si())])
async_result = task.apply_async()
result = async_result.get()
Expected Behavior
Calling get return group result.
Actual Behavior
All task finish success but calling .get() fail with traceback:
Traceback (most recent call last):
File "/my_app.py", line 111, in add_to_queue
result = async_result.get()
File "/root/.cache/pypoetry/virtualenvs/my_app/lib/python3.7/site-packages/celery/result.py", line 697, in get
on_interval=on_interval,
File "/root/.cache/pypoetry/virtualenvs/my_app/lib/python3.7/site-packages/celery/result.py", line 815, in join_native
on_message, on_interval):
File "/root/.cache/pypoetry/virtualenvs/my_app/lib/python3.7/site-packages/celery/backends/asynchronous.py", line 137, in iter_native
if node._cache:
AttributeError: 'GroupResult' object has no attribute '_cache'
|
AttributeError
|
def join_native(
self,
timeout=None,
propagate=True,
interval=0.5,
callback=None,
no_ack=True,
on_message=None,
on_interval=None,
disable_sync_subtasks=True,
):
"""Backend optimized version of :meth:`join`.
.. versionadded:: 2.2
Note that this does not support collecting the results
for different task types using different backends.
This is currently only supported by the amqp, Redis and cache
result backends.
"""
if disable_sync_subtasks:
assert_will_not_block()
order_index = (
None if callback else {result.id: i for i, result in enumerate(self.results)}
)
acc = None if callback else [None for _ in range(len(self))]
for task_id, meta in self.iter_native(
timeout, interval, no_ack, on_message, on_interval
):
if isinstance(meta, list):
value = []
for children_result in meta:
value.append(children_result.get())
else:
value = meta["result"]
if propagate and meta["status"] in states.PROPAGATE_STATES:
raise value
if callback:
callback(task_id, value)
else:
acc[order_index[task_id]] = value
return acc
|
def join_native(
self,
timeout=None,
propagate=True,
interval=0.5,
callback=None,
no_ack=True,
on_message=None,
on_interval=None,
disable_sync_subtasks=True,
):
"""Backend optimized version of :meth:`join`.
.. versionadded:: 2.2
Note that this does not support collecting the results
for different task types using different backends.
This is currently only supported by the amqp, Redis and cache
result backends.
"""
if disable_sync_subtasks:
assert_will_not_block()
order_index = (
None if callback else {result.id: i for i, result in enumerate(self.results)}
)
acc = None if callback else [None for _ in range(len(self))]
for task_id, meta in self.iter_native(
timeout, interval, no_ack, on_message, on_interval
):
value = meta["result"]
if propagate and meta["status"] in states.PROPAGATE_STATES:
raise value
if callback:
callback(task_id, value)
else:
acc[order_index[task_id]] = value
return acc
|
https://github.com/celery/celery/issues/5496
|
Steps to Reproduce
Minimally Reproducible Test Case
from celery import group, chain
from tasks import task as t
# failing sequence
task = group([ t.si(),t.si(), chain( t.si(), group([ t.si(), t.si()]))])
# working sequence
task = group([ t.si(),t.si(), chain( t.si(), group([ t.si(), t.si()]), t.si())])
async_result = task.apply_async()
result = async_result.get()
Expected Behavior
Calling get return group result.
Actual Behavior
All task finish success but calling .get() fail with traceback:
Traceback (most recent call last):
File "/my_app.py", line 111, in add_to_queue
result = async_result.get()
File "/root/.cache/pypoetry/virtualenvs/my_app/lib/python3.7/site-packages/celery/result.py", line 697, in get
on_interval=on_interval,
File "/root/.cache/pypoetry/virtualenvs/my_app/lib/python3.7/site-packages/celery/result.py", line 815, in join_native
on_message, on_interval):
File "/root/.cache/pypoetry/virtualenvs/my_app/lib/python3.7/site-packages/celery/backends/asynchronous.py", line 137, in iter_native
if node._cache:
AttributeError: 'GroupResult' object has no attribute '_cache'
|
AttributeError
|
def get(
self,
timeout=None,
propagate=True,
interval=0.5,
no_ack=True,
follow_parents=True,
callback=None,
on_message=None,
on_interval=None,
disable_sync_subtasks=True,
EXCEPTION_STATES=states.EXCEPTION_STATES,
PROPAGATE_STATES=states.PROPAGATE_STATES,
):
"""Wait until task is ready, and return its result.
Warning:
Waiting for tasks within a task may lead to deadlocks.
Please read :ref:`task-synchronous-subtasks`.
Warning:
Backends use resources to store and transmit results. To ensure
that resources are released, you must eventually call
:meth:`~@AsyncResult.get` or :meth:`~@AsyncResult.forget` on
EVERY :class:`~@AsyncResult` instance returned after calling
a task.
Arguments:
timeout (float): How long to wait, in seconds, before the
operation times out.
propagate (bool): Re-raise exception if the task failed.
interval (float): Time to wait (in seconds) before retrying to
retrieve the result. Note that this does not have any effect
when using the RPC/redis result store backends, as they don't
use polling.
no_ack (bool): Enable amqp no ack (automatically acknowledge
message). If this is :const:`False` then the message will
**not be acked**.
follow_parents (bool): Re-raise any exception raised by
parent tasks.
disable_sync_subtasks (bool): Disable tasks to wait for sub tasks
this is the default configuration. CAUTION do not enable this
unless you must.
Raises:
celery.exceptions.TimeoutError: if `timeout` isn't
:const:`None` and the result does not arrive within
`timeout` seconds.
Exception: If the remote call raised an exception then that
exception will be re-raised in the caller process.
"""
if self.ignored:
return
if disable_sync_subtasks:
assert_will_not_block()
_on_interval = promise()
if follow_parents and propagate and self.parent:
_on_interval = promise(self._maybe_reraise_parent_error, weak=True)
self._maybe_reraise_parent_error()
if on_interval:
_on_interval.then(on_interval)
if self._cache:
if propagate:
self.maybe_throw(callback=callback)
return self.result
self.backend.add_pending_result(self)
return self.backend.wait_for_pending(
self,
timeout=timeout,
interval=interval,
on_interval=_on_interval,
no_ack=no_ack,
propagate=propagate,
callback=callback,
on_message=on_message,
)
|
def get(
self,
timeout=None,
propagate=True,
interval=0.5,
no_ack=True,
follow_parents=True,
callback=None,
on_message=None,
on_interval=None,
disable_sync_subtasks=True,
EXCEPTION_STATES=states.EXCEPTION_STATES,
PROPAGATE_STATES=states.PROPAGATE_STATES,
):
"""Wait until task is ready, and return its result.
Warning:
Waiting for tasks within a task may lead to deadlocks.
Please read :ref:`task-synchronous-subtasks`.
Warning:
Backends use resources to store and transmit results. To ensure
that resources are released, you must eventually call
:meth:`~@AsyncResult.get` or :meth:`~@AsyncResult.forget` on
EVERY :class:`~@AsyncResult` instance returned after calling
a task.
Arguments:
timeout (float): How long to wait, in seconds, before the
operation times out.
propagate (bool): Re-raise exception if the task failed.
interval (float): Time to wait (in seconds) before retrying to
retrieve the result. Note that this does not have any effect
when using the RPC/redis result store backends, as they don't
use polling.
no_ack (bool): Enable amqp no ack (automatically acknowledge
message). If this is :const:`False` then the message will
**not be acked**.
follow_parents (bool): Re-raise any exception raised by
parent tasks.
disable_sync_subtasks (bool): Disable tasks to wait for sub tasks
this is the default configuration. CAUTION do not enable this
unless you must.
Raises:
celery.exceptions.TimeoutError: if `timeout` isn't
:const:`None` and the result does not arrive within
`timeout` seconds.
Exception: If the remote call raised an exception then that
exception will be re-raised in the caller process.
"""
if self.ignored:
return
if disable_sync_subtasks:
assert_will_not_block()
_on_interval = promise()
if follow_parents and propagate and self.parent:
on_interval = promise(self._maybe_reraise_parent_error, weak=True)
self._maybe_reraise_parent_error()
if on_interval:
_on_interval.then(on_interval)
if self._cache:
if propagate:
self.maybe_throw(callback=callback)
return self.result
self.backend.add_pending_result(self)
return self.backend.wait_for_pending(
self,
timeout=timeout,
interval=interval,
on_interval=_on_interval,
no_ack=no_ack,
propagate=propagate,
callback=callback,
on_message=on_message,
)
|
https://github.com/celery/celery/issues/3810
|
Traceback (most recent call last):
File "test.py", line 15, in <module>
raise ex
celery.backends.base.RuntimeError: BLAH
|
celery.backends.base.RuntimeError
|
def __call__(self, *args, **kwargs):
logger = get_logger(__name__)
handle_sigterm = lambda signum, frame: logger.info(
"SIGTERM received, waiting till the task finished"
)
signal.signal(signal.SIGTERM, handle_sigterm)
_task_stack.push(self)
self.push_request(args=args, kwargs=kwargs)
try:
return self.run(*args, **kwargs)
finally:
self.pop_request()
_task_stack.pop()
|
def __call__(self, *args, **kwargs):
_task_stack.push(self)
self.push_request(args=args, kwargs=kwargs)
try:
return self.run(*args, **kwargs)
finally:
self.pop_request()
_task_stack.pop()
|
https://github.com/celery/celery/issues/2700
|
Traceback (most recent call last):
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/billiard/pool.py", line 1171, in mark_as_worker_lost
human_status(exitcode)),
WorkerLostError: Worker exited prematurely: signal 15 (SIGTERM).
|
WorkerLostError
|
def register_with_event_loop(self, hub):
"""Register the async pool with the current event loop."""
self._result_handler.register_with_event_loop(hub)
self.handle_result_event = self._result_handler.handle_event
self._create_timelimit_handlers(hub)
self._create_process_handlers(hub)
self._create_write_handlers(hub)
# Add handler for when a process exits (calls maintain_pool)
[self._track_child_process(w, hub) for w in self._pool]
# Handle_result_event is called whenever one of the
# result queues are readable.
stale_fds = []
for fd in self._fileno_to_outq:
try:
hub.add_reader(fd, self.handle_result_event, fd)
except OSError:
logger.info(
"Encountered OSError while trying to access fd %s ", fd, exc_info=True
)
stale_fds.append(fd) # take note of stale fd
for fd in stale_fds: # Remove now defunct file descriptors
self._fileno_to_outq.pop(fd, None)
# Timers include calling maintain_pool at a regular interval
# to be certain processes are restarted.
for handler, interval in items(self.timers):
hub.call_repeatedly(interval, handler)
hub.on_tick.add(self.on_poll_start)
|
def register_with_event_loop(self, hub):
"""Register the async pool with the current event loop."""
self._result_handler.register_with_event_loop(hub)
self.handle_result_event = self._result_handler.handle_event
self._create_timelimit_handlers(hub)
self._create_process_handlers(hub)
self._create_write_handlers(hub)
# Add handler for when a process exits (calls maintain_pool)
[self._track_child_process(w, hub) for w in self._pool]
# Handle_result_event is called whenever one of the
# result queues are readable.
[hub.add_reader(fd, self.handle_result_event, fd) for fd in self._fileno_to_outq]
# Timers include calling maintain_pool at a regular interval
# to be certain processes are restarted.
for handler, interval in items(self.timers):
hub.call_repeatedly(interval, handler)
hub.on_tick.add(self.on_poll_start)
|
https://github.com/celery/celery/issues/4457
|
[user] celery.worker.consumer.consumer WARNING 2017-12-18 00:38:27,078 consumer:
Connection to broker lost. Trying to re-establish the connection...
Traceback (most recent call last):
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 320, in start
blueprint.start(self)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 596, in start
c.loop(*c.loop_args())
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/loops.py", line 47, in asynloop
obj.controller.register_with_event_loop(hub)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/worker.py", line 217, in register_with_event_loop
description='hub.register',
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/bootsteps.py", line 151, in send_all
fun(parent, *args)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/components.py", line 178, in register_with_event_loop
w.pool.register_with_event_loop(hub)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/concurrency/prefork.py", line 134, in register_with_event_loop
return reg(loop)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/concurrency/asynpool.py", line 476, in register_with_event_loop
for fd in self._fileno_to_outq]
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/concurrency/asynpool.py", line 476, in <listcomp>
for fd in self._fileno_to_outq]
File "/home/user/.envs/user/lib/python3.6/site-packages/kombu/async/hub.py", line 207, in add_reader
return self.add(fds, callback, READ | ERR, args)
File "/home/user/.envs/user/lib/python3.6/site-packages/kombu/async/hub.py", line 158, in add
self.poller.register(fd, flags)
File "/home/user/.envs/user/lib/python3.6/site-packages/kombu/utils/eventio.py", line 67, in register
self._epoll.register(fd, events)
OSError: [Errno 9] Bad file descriptor
|
OSError
|
def on_process_alive(self, pid):
"""Called when receiving the :const:`WORKER_UP` message.
Marks the process as ready to receive work.
"""
try:
proc = next(w for w in self._pool if w.pid == pid)
except StopIteration:
return logger.warning("process with pid=%s already exited", pid)
assert proc.inqW_fd not in self._fileno_to_inq
assert proc.inqW_fd not in self._all_inqueues
self._waiting_to_start.discard(proc)
self._fileno_to_inq[proc.inqW_fd] = proc
self._fileno_to_synq[proc.synqW_fd] = proc
self._all_inqueues.add(proc.inqW_fd)
|
def on_process_alive(self, pid):
"""Called when reciving the :const:`WORKER_UP` message.
Marks the process as ready to receive work.
"""
try:
proc = next(w for w in self._pool if w.pid == pid)
except StopIteration:
return logger.warning("process with pid=%s already exited", pid)
assert proc.inqW_fd not in self._fileno_to_inq
assert proc.inqW_fd not in self._all_inqueues
self._waiting_to_start.discard(proc)
self._fileno_to_inq[proc.inqW_fd] = proc
self._fileno_to_synq[proc.synqW_fd] = proc
self._all_inqueues.add(proc.inqW_fd)
|
https://github.com/celery/celery/issues/4457
|
[user] celery.worker.consumer.consumer WARNING 2017-12-18 00:38:27,078 consumer:
Connection to broker lost. Trying to re-establish the connection...
Traceback (most recent call last):
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 320, in start
blueprint.start(self)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 596, in start
c.loop(*c.loop_args())
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/loops.py", line 47, in asynloop
obj.controller.register_with_event_loop(hub)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/worker.py", line 217, in register_with_event_loop
description='hub.register',
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/bootsteps.py", line 151, in send_all
fun(parent, *args)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/worker/components.py", line 178, in register_with_event_loop
w.pool.register_with_event_loop(hub)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/concurrency/prefork.py", line 134, in register_with_event_loop
return reg(loop)
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/concurrency/asynpool.py", line 476, in register_with_event_loop
for fd in self._fileno_to_outq]
File "/home/user/.envs/user/lib/python3.6/site-packages/celery/concurrency/asynpool.py", line 476, in <listcomp>
for fd in self._fileno_to_outq]
File "/home/user/.envs/user/lib/python3.6/site-packages/kombu/async/hub.py", line 207, in add_reader
return self.add(fds, callback, READ | ERR, args)
File "/home/user/.envs/user/lib/python3.6/site-packages/kombu/async/hub.py", line 158, in add
self.poller.register(fd, flags)
File "/home/user/.envs/user/lib/python3.6/site-packages/kombu/utils/eventio.py", line 67, in register
self._epoll.register(fd, events)
OSError: [Errno 9] Bad file descriptor
|
OSError
|
def update_state(self, task_id=None, state=None, meta=None, **kwargs):
"""Update task state.
Arguments:
task_id (str): Id of the task to update.
Defaults to the id of the current task.
state (str): New state.
meta (Dict): State meta-data.
"""
if task_id is None:
task_id = self.request.id
self.backend.store_result(task_id, meta, state, request=self.request, **kwargs)
|
def update_state(self, task_id=None, state=None, meta=None, **kwargs):
"""Update task state.
Arguments:
task_id (str): Id of the task to update.
Defaults to the id of the current task.
state (str): New state.
meta (Dict): State meta-data.
"""
if task_id is None:
task_id = self.request.id
self.backend.store_result(task_id, meta, state, **kwargs)
|
https://github.com/celery/celery/issues/5470
|
Traceback (most recent call last):
worker_1_e608e69813d9 | File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 385, in trace_task
worker_1_e608e69813d9 | R = retval = fun(*args, **kwargs)
worker_1_e608e69813d9 | File "/data/alaya/api/common/celery_app.py", line 113, in __call__
worker_1_e608e69813d9 | return super(RequestContextTask, self).__call__(*args, **kwargs)
worker_1_e608e69813d9 | File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 648, in __protected_call__
worker_1_e608e69813d9 | return self.run(*args, **kwargs)
worker_1_e608e69813d9 | File "/data/alaya/api/scheduleoptimization/alns/core/alns.py", line 70, in do_something
worker_1_e608e69813d9 | "some": "metadata",
worker_1_e608e69813d9 | File "/data/alaya/api/common/celery_app.py", line 140, in update_state
worker_1_e608e69813d9 | super(RequestContextTask, self).update_state(request=request, **args)
worker_1_e608e69813d9 | File "/usr/local/lib/python2.7/dist-packages/celery/app/task.py", line 930, in update_state
worker_1_e608e69813d9 | self.backend.store_result(task_id, meta, state, **kwargs)
worker_1_e608e69813d9 | File "/data/alaya/api/common/multi_tenant_backend.py", line 51, in store_result
worker_1_e608e69813d9 | tenant = request.get('_tenant')
worker_1_e608e69813d9 | AttributeError: 'NoneType' object has no attribute 'get'
|
AttributeError
|
def _call_task_errbacks(self, request, exc, traceback):
old_signature = []
for errback in request.errbacks:
errback = self.app.signature(errback)
if not errback._app:
# Ensure all signatures have an application
errback._app = self.app
try:
if (
# Celery tasks type created with the @task decorator have
# the __header__ property, but Celery task created from
# Task class do not have this property.
# That's why we have to check if this property exists
# before checking is it partial function.
hasattr(errback.type, "__header__")
and
# workaround to support tasks with bind=True executed as
# link errors. Otherwise retries can't be used
not isinstance(errback.type.__header__, partial)
and arity_greater(errback.type.__header__, 1)
):
errback(request, exc, traceback)
else:
old_signature.append(errback)
except NotRegistered:
# Task may not be present in this worker.
# We simply send it forward for another worker to consume.
# If the task is not registered there, the worker will raise
# NotRegistered.
old_signature.append(errback)
if old_signature:
# Previously errback was called as a task so we still
# need to do so if the errback only takes a single task_id arg.
task_id = request.id
root_id = request.root_id or task_id
group(old_signature, app=self.app).apply_async(
(task_id,), parent_id=task_id, root_id=root_id
)
|
def _call_task_errbacks(self, request, exc, traceback):
old_signature = []
for errback in request.errbacks:
errback = self.app.signature(errback)
if (
# Celery tasks type created with the @task decorator have
# the __header__ property, but Celery task created from
# Task class do not have this property.
# That's why we have to check if this property exists
# before checking is it partial function.
hasattr(errback.type, "__header__")
and
# workaround to support tasks with bind=True executed as
# link errors. Otherwise retries can't be used
not isinstance(errback.type.__header__, partial)
and arity_greater(errback.type.__header__, 1)
):
errback(request, exc, traceback)
else:
old_signature.append(errback)
if old_signature:
# Previously errback was called as a task so we still
# need to do so if the errback only takes a single task_id arg.
task_id = request.id
root_id = request.root_id or task_id
group(old_signature, app=self.app).apply_async(
(task_id,), parent_id=task_id, root_id=root_id
)
|
https://github.com/celery/celery/issues/4022
|
# TaskProducer:
from celery import Signature
Signature(
'export.hello', args=['homer'],
link_error=Signature('msg.err', queue='msg')
).apply_async()
# ExportWorker:
[2017-05-09 00:14:53,458: INFO/MainProcess] Received task: export.hello[ad4ef3ea-06e8-4980-8d9c-91ae68c2305a]
[2017-05-09 00:14:53,506: INFO/PoolWorker-1] Resetting dropped connection: us-west-2.queue.amazonaws.com
[2017-05-09 00:14:53,517: INFO/PoolWorker-1] Starting new HTTPS connection (9): us-west-2.queue.amazonaws.com
[2017-05-09 00:14:53,918: WARNING/PoolWorker-1] /vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py:542: RuntimeWarning: Exception raised outside body: Task of kind 'msg.err' never registered, please make sure it's imported.:
Traceback (most recent call last):
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py", line 367, in trace_task
R = retval = fun(*args, **kwargs)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py", line 622, in __protected_call__
return self.run(*args, **kwargs)
File "/vagrant/test_worker/app/tasks.py", line 9, in hello
raise Exception("NO HOMERS ALLOWED!")
Exception: NO HOMERS ALLOWED!
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/vagrant/test_worker/env/src/kombu/kombu/utils/objects.py", line 42, in __get__
return obj.__dict__[self.__name__]
KeyError: 'type'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py", line 381, in trace_task
I, R, state, retval = on_error(task_request, exc, uuid)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py", line 323, in on_error
task, request, eager=eager, call_errbacks=call_errbacks,
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py", line 157, in handle_error_state
call_errbacks=call_errbacks)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/trace.py", line 202, in handle_failure
call_errbacks=call_errbacks,
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/backends/base.py", line 168, in mark_as_failure
self._call_task_errbacks(request, exc, traceback)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/backends/base.py", line 174, in _call_task_errbacks
if arity_greater(errback.type.__header__, 1):
File "/vagrant/test_worker/env/src/kombu/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/canvas.py", line 490, in type
return self._type or self.app.tasks[self['task']]
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/registry.py", line 19, in __missing__
raise self.NotRegistered(key)
celery.exceptions.NotRegistered: 'msg.err'
exc, exc_info.traceback)))
[2017-05-09 00:14:53,996: INFO/MainProcess] Resetting dropped connection: us-west-2.queue.amazonaws.com
[2017-05-09 00:14:53,999: INFO/MainProcess] Starting new HTTPS connection (3): us-west-2.queue.amazonaws.com
[2017-05-09 00:14:54,237: ERROR/MainProcess] Pool callback raised exception: Task of kind 'msg.err' never registered, please make sure it's imported.
Traceback (most recent call last):
File "/vagrant/test_worker/env/src/kombu/kombu/utils/objects.py", line 42, in __get__
return obj.__dict__[self.__name__]
KeyError: 'type'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/vagrant/test_worker/env/lib/python3.5/site-packages/billiard/pool.py", line 1748, in safe_apply_callback
fun(*args, **kwargs)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/worker/request.py", line 366, in on_failure
self.id, exc, request=self, store_result=self.store_errors,
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/backends/base.py", line 168, in mark_as_failure
self._call_task_errbacks(request, exc, traceback)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/backends/base.py", line 174, in _call_task_errbacks
if arity_greater(errback.type.__header__, 1):
File "/vagrant/test_worker/env/src/kombu/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/canvas.py", line 490, in type
return self._type or self.app.tasks[self['task']]
File "/vagrant/test_worker/env/lib/python3.5/site-packages/celery/app/registry.py", line 19, in __missing__
raise self.NotRegistered(key)
celery.exceptions.NotRegistered: 'msg.err'
# (MessageWorker has no output)
|
KeyError
|
def link_error(self, sig):
try:
sig = sig.clone().set(immutable=True)
except AttributeError:
# See issue #5265. I don't use isinstance because current tests
# pass a Mock object as argument.
sig["immutable"] = True
sig = Signature.from_dict(sig)
return self.tasks[0].link_error(sig)
|
def link_error(self, sig):
sig = sig.clone().set(immutable=True)
return self.tasks[0].link_error(sig)
|
https://github.com/celery/celery/issues/5265
|
Traceback (most recent call last):
File "eggs/celery-4.2.1-py2.7.egg/celery/app/trace.py", line 439, in trace_task
parent_id=uuid, root_id=root_id,
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 1232, in apply_async
return self.run(tasks, body, args, task_id=task_id, **options)
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 1277, in run
header_result = header(*partial_args, task_id=group_id, **options)
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 953, in __call__
return self.apply_async(partial_args, **options)
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 978, in apply_async
args=args, kwargs=kwargs, **options))
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 1054, in _apply_tasks
**options)
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 557, in apply_async
dict(self.options, **options) if options else self.options))
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 573, in run
task_id, group_id, chord,
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 683, in prepare_steps
task.link_error(errback)
File "eggs/celery-4.2.1-py2.7.egg/celery/canvas.py", line 1016, in link_error
sig = sig.clone().set(immutable=True)
AttributeError: 'dict' object has no attribute 'clone'
|
AttributeError
|
def setup_security(
self,
allowed_serializers=None,
key=None,
cert=None,
store=None,
digest=DEFAULT_SECURITY_DIGEST,
serializer="json",
):
"""Setup the message-signing serializer.
This will affect all application instances (a global operation).
Disables untrusted serializers and if configured to use the ``auth``
serializer will register the ``auth`` serializer with the provided
settings into the Kombu serializer registry.
Arguments:
allowed_serializers (Set[str]): List of serializer names, or
content_types that should be exempt from being disabled.
key (str): Name of private key file to use.
Defaults to the :setting:`security_key` setting.
cert (str): Name of certificate file to use.
Defaults to the :setting:`security_certificate` setting.
store (str): Directory containing certificates.
Defaults to the :setting:`security_cert_store` setting.
digest (str): Digest algorithm used when signing messages.
Default is ``sha256``.
serializer (str): Serializer used to encode messages after
they've been signed. See :setting:`task_serializer` for
the serializers supported. Default is ``json``.
"""
from celery.security import setup_security
return setup_security(
allowed_serializers, key, cert, store, digest, serializer, app=self
)
|
def setup_security(
self,
allowed_serializers=None,
key=None,
cert=None,
store=None,
digest="sha1",
serializer="json",
):
"""Setup the message-signing serializer.
This will affect all application instances (a global operation).
Disables untrusted serializers and if configured to use the ``auth``
serializer will register the ``auth`` serializer with the provided
settings into the Kombu serializer registry.
Arguments:
allowed_serializers (Set[str]): List of serializer names, or
content_types that should be exempt from being disabled.
key (str): Name of private key file to use.
Defaults to the :setting:`security_key` setting.
cert (str): Name of certificate file to use.
Defaults to the :setting:`security_certificate` setting.
store (str): Directory containing certificates.
Defaults to the :setting:`security_cert_store` setting.
digest (str): Digest algorithm used when signing messages.
Default is ``sha1``.
serializer (str): Serializer used to encode messages after
they've been signed. See :setting:`task_serializer` for
the serializers supported. Default is ``json``.
"""
from celery.security import setup_security
return setup_security(
allowed_serializers, key, cert, store, digest, serializer, app=self
)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def setup_security(
allowed_serializers=None,
key=None,
cert=None,
store=None,
digest=None,
serializer="json",
app=None,
):
"""See :meth:`@Celery.setup_security`."""
if app is None:
from celery import current_app
app = current_app._get_current_object()
_disable_insecure_serializers(allowed_serializers)
# check conf for sane security settings
conf = app.conf
if conf.task_serializer != "auth" or conf.accept_content != ["auth"]:
raise ImproperlyConfigured(SETTING_MISSING)
key = key or conf.security_key
cert = cert or conf.security_certificate
store = store or conf.security_cert_store
digest = digest or conf.security_digest
if not (key and cert and store):
raise ImproperlyConfigured(SECURITY_SETTING_MISSING)
with open(key, "r") as kf:
with open(cert, "r") as cf:
register_auth(kf.read(), cf.read(), store, digest, serializer)
registry._set_default_serializer("auth")
|
def setup_security(
allowed_serializers=None,
key=None,
cert=None,
store=None,
digest="sha1",
serializer="json",
app=None,
):
"""See :meth:`@Celery.setup_security`."""
if app is None:
from celery import current_app
app = current_app._get_current_object()
_disable_insecure_serializers(allowed_serializers)
conf = app.conf
if conf.task_serializer != "auth":
return
try:
from OpenSSL import crypto # noqa
except ImportError:
raise ImproperlyConfigured(SSL_NOT_INSTALLED)
key = key or conf.security_key
cert = cert or conf.security_certificate
store = store or conf.security_cert_store
if not (key and cert and store):
raise ImproperlyConfigured(SETTING_MISSING)
with open(key) as kf:
with open(cert) as cf:
register_auth(kf.read(), cf.read(), store, digest, serializer)
registry._set_default_serializer("auth")
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def __init__(self, cert):
with reraise_errors("Invalid certificate: {0!r}", errors=(ValueError,)):
self._cert = load_pem_x509_certificate(
ensure_bytes(cert), backend=default_backend()
)
|
def __init__(self, cert):
assert crypto is not None
with reraise_errors("Invalid certificate: {0!r}"):
self._cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def has_expired(self):
"""Check if the certificate has expired."""
return datetime.datetime.now() > self._cert.not_valid_after
|
def has_expired(self):
"""Check if the certificate has expired."""
return self._cert.has_expired()
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def get_serial_number(self):
"""Return the serial number in the certificate."""
return self._cert.serial_number
|
def get_serial_number(self):
"""Return the serial number in the certificate."""
return bytes_to_str(self._cert.get_serial_number())
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def get_issuer(self):
"""Return issuer (CA) as a string."""
return " ".join(x.value for x in self._cert.issuer)
|
def get_issuer(self):
"""Return issuer (CA) as a string."""
return " ".join(
bytes_to_str(x[1]) for x in self._cert.get_issuer().get_components()
)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def verify(self, data, signature, digest):
"""Verify signature for string containing data."""
with reraise_errors("Bad signature: {0!r}"):
padd = padding.PSS(mgf=padding.MGF1(digest), salt_length=padding.PSS.MAX_LENGTH)
self.get_pubkey().verify(signature, ensure_bytes(data), padd, digest)
|
def verify(self, data, signature, digest):
"""Verify signature for string containing data."""
with reraise_errors("Bad signature: {0!r}"):
crypto.verify(self._cert, signature, data, digest)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def __init__(self, key, password=None):
with reraise_errors("Invalid private key: {0!r}", errors=(ValueError,)):
self._key = serialization.load_pem_private_key(
ensure_bytes(key), password=password, backend=default_backend()
)
|
def __init__(self, key):
with reraise_errors("Invalid private key: {0!r}"):
self._key = crypto.load_privatekey(crypto.FILETYPE_PEM, key)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def sign(self, data, digest):
"""Sign string containing data."""
with reraise_errors("Unable to sign data: {0!r}"):
padd = padding.PSS(mgf=padding.MGF1(digest), salt_length=padding.PSS.MAX_LENGTH)
return self._key.sign(ensure_bytes(data), padd, digest)
|
def sign(self, data, digest):
"""Sign string containing data."""
with reraise_errors("Unable to sign data: {0!r}"):
return crypto.sign(self._key, ensure_bytes(data), digest)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def __init__(
self,
key=None,
cert=None,
cert_store=None,
digest=DEFAULT_SECURITY_DIGEST,
serializer="json",
):
self._key = key
self._cert = cert
self._cert_store = cert_store
self._digest = get_digest_algorithm(digest)
self._serializer = serializer
|
def __init__(
self, key=None, cert=None, cert_store=None, digest="sha1", serializer="json"
):
self._key = key
self._cert = cert
self._cert_store = cert_store
self._digest = bytes_if_py2(digest)
self._serializer = serializer
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def _unpack(self, payload, sep=str_to_bytes("\x00\x01")):
raw_payload = b64decode(ensure_bytes(payload))
first_sep = raw_payload.find(sep)
signer = raw_payload[:first_sep]
signer_cert = self._cert_store[signer]
# shift 3 bits right to get signature length
# 2048bit rsa key has a signature length of 256
# 4096bit rsa key has a signature length of 512
sig_len = signer_cert.get_pubkey().key_size >> 3
sep_len = len(sep)
signature_start_position = first_sep + sep_len
signature_end_position = signature_start_position + sig_len
signature = raw_payload[signature_start_position:signature_end_position]
v = raw_payload[signature_end_position + sep_len :].split(sep)
return {
"signer": signer,
"signature": signature,
"content_type": bytes_to_str(v[0]),
"content_encoding": bytes_to_str(v[1]),
"body": bytes_to_str(v[2]),
}
|
def _unpack(self, payload, sep=str_to_bytes("\x00\x01")):
raw_payload = b64decode(ensure_bytes(payload))
first_sep = raw_payload.find(sep)
signer = raw_payload[:first_sep]
signer_cert = self._cert_store[signer]
sig_len = signer_cert._cert.get_pubkey().bits() >> 3
signature = raw_payload[first_sep + len(sep) : first_sep + len(sep) + sig_len]
end_of_sig = first_sep + len(sep) + sig_len + len(sep)
v = raw_payload[end_of_sig:].split(sep)
return {
"signer": signer,
"signature": signature,
"content_type": bytes_to_str(v[0]),
"content_encoding": bytes_to_str(v[1]),
"body": bytes_to_str(v[2]),
}
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def register_auth(
key=None, cert=None, store=None, digest=DEFAULT_SECURITY_DIGEST, serializer="json"
):
"""Register security serializer."""
s = SecureSerializer(
key and PrivateKey(key),
cert and Certificate(cert),
store and FSCertStore(store),
digest,
serializer=serializer,
)
registry.register(
"auth",
s.serialize,
s.deserialize,
content_type="application/data",
content_encoding="utf-8",
)
|
def register_auth(key=None, cert=None, store=None, digest="sha1", serializer="json"):
"""Register security serializer."""
s = SecureSerializer(
key and PrivateKey(key),
cert and Certificate(cert),
store and FSCertStore(store),
digest=digest,
serializer=serializer,
)
registry.register(
"auth",
s.serialize,
s.deserialize,
content_type="application/data",
content_encoding="utf-8",
)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def reraise_errors(msg="{0!r}", errors=None):
"""Context reraising crypto errors as :exc:`SecurityError`."""
errors = (cryptography.exceptions,) if errors is None else errors
try:
yield
except errors as exc:
reraise(SecurityError, SecurityError(msg.format(exc)), sys.exc_info()[2])
|
def reraise_errors(msg="{0!r}", errors=None):
"""Context reraising crypto errors as :exc:`SecurityError`."""
assert crypto is not None
errors = (crypto.Error,) if errors is None else errors
try:
yield
except errors as exc:
reraise(SecurityError, SecurityError(msg.format(exc)), sys.exc_info()[2])
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def create_task_handler(self, promise=promise):
strategies = self.strategies
on_unknown_message = self.on_unknown_message
on_unknown_task = self.on_unknown_task
on_invalid_task = self.on_invalid_task
callbacks = self.on_task_message
call_soon = self.call_soon
def on_task_received(message):
# payload will only be set for v1 protocol, since v2
# will defer deserializing the message body to the pool.
payload = None
try:
type_ = message.headers["task"] # protocol v2
except TypeError:
return on_unknown_message(None, message)
except KeyError:
try:
payload = message.decode()
except Exception as exc: # pylint: disable=broad-except
return self.on_decode_error(message, exc)
try:
type_, payload = payload["task"], payload # protocol v1
except (TypeError, KeyError):
return on_unknown_message(payload, message)
try:
strategy = strategies[type_]
except KeyError as exc:
return on_unknown_task(None, message, exc)
else:
try:
strategy(
message,
payload,
promise(call_soon, (message.ack_log_error,)),
promise(call_soon, (message.reject_log_error,)),
callbacks,
)
except (InvalidTaskError, ContentDisallowed) as exc:
return on_invalid_task(payload, message, exc)
except DecodeError as exc:
return self.on_decode_error(message, exc)
return on_task_received
|
def create_task_handler(self, promise=promise):
strategies = self.strategies
on_unknown_message = self.on_unknown_message
on_unknown_task = self.on_unknown_task
on_invalid_task = self.on_invalid_task
callbacks = self.on_task_message
call_soon = self.call_soon
def on_task_received(message):
# payload will only be set for v1 protocol, since v2
# will defer deserializing the message body to the pool.
payload = None
try:
type_ = message.headers["task"] # protocol v2
except TypeError:
return on_unknown_message(None, message)
except KeyError:
try:
payload = message.decode()
except Exception as exc: # pylint: disable=broad-except
return self.on_decode_error(message, exc)
try:
type_, payload = payload["task"], payload # protocol v1
except (TypeError, KeyError):
return on_unknown_message(payload, message)
try:
strategy = strategies[type_]
except KeyError as exc:
return on_unknown_task(None, message, exc)
else:
try:
strategy(
message,
payload,
promise(call_soon, (message.ack_log_error,)),
promise(call_soon, (message.reject_log_error,)),
callbacks,
)
except InvalidTaskError as exc:
return on_invalid_task(payload, message, exc)
except DecodeError as exc:
return self.on_decode_error(message, exc)
return on_task_received
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def on_task_received(message):
# payload will only be set for v1 protocol, since v2
# will defer deserializing the message body to the pool.
payload = None
try:
type_ = message.headers["task"] # protocol v2
except TypeError:
return on_unknown_message(None, message)
except KeyError:
try:
payload = message.decode()
except Exception as exc: # pylint: disable=broad-except
return self.on_decode_error(message, exc)
try:
type_, payload = payload["task"], payload # protocol v1
except (TypeError, KeyError):
return on_unknown_message(payload, message)
try:
strategy = strategies[type_]
except KeyError as exc:
return on_unknown_task(None, message, exc)
else:
try:
strategy(
message,
payload,
promise(call_soon, (message.ack_log_error,)),
promise(call_soon, (message.reject_log_error,)),
callbacks,
)
except (InvalidTaskError, ContentDisallowed) as exc:
return on_invalid_task(payload, message, exc)
except DecodeError as exc:
return self.on_decode_error(message, exc)
|
def on_task_received(message):
# payload will only be set for v1 protocol, since v2
# will defer deserializing the message body to the pool.
payload = None
try:
type_ = message.headers["task"] # protocol v2
except TypeError:
return on_unknown_message(None, message)
except KeyError:
try:
payload = message.decode()
except Exception as exc: # pylint: disable=broad-except
return self.on_decode_error(message, exc)
try:
type_, payload = payload["task"], payload # protocol v1
except (TypeError, KeyError):
return on_unknown_message(payload, message)
try:
strategy = strategies[type_]
except KeyError as exc:
return on_unknown_task(None, message, exc)
else:
try:
strategy(
message,
payload,
promise(call_soon, (message.ack_log_error,)),
promise(call_soon, (message.reject_log_error,)),
callbacks,
)
except InvalidTaskError as exc:
return on_invalid_task(payload, message, exc)
except DecodeError as exc:
return self.on_decode_error(message, exc)
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def on_message(self, prepare, message):
_type = message.delivery_info["routing_key"]
# For redis when `fanout_patterns=False` (See Issue #1882)
if _type.split(".", 1)[0] == "task":
return
try:
handler = self.event_handlers[_type]
except KeyError:
pass
else:
return handler(message.payload)
# proto2: hostname in header; proto1: in body
hostname = message.headers.get("hostname") or message.payload["hostname"]
if hostname != self.hostname:
try:
_, event = prepare(message.payload)
self.update_state(event)
except (DecodeError, ContentDisallowed, TypeError) as exc:
logger.error(exc)
else:
self.clock.forward()
|
def on_message(self, prepare, message):
_type = message.delivery_info["routing_key"]
# For redis when `fanout_patterns=False` (See Issue #1882)
if _type.split(".", 1)[0] == "task":
return
try:
handler = self.event_handlers[_type]
except KeyError:
pass
else:
return handler(message.payload)
# proto2: hostname in header; proto1: in body
hostname = message.headers.get("hostname") or message.payload["hostname"]
if hostname != self.hostname:
_, event = prepare(message.payload)
self.update_state(event)
else:
self.clock.forward()
|
https://github.com/celery/celery/issues/5056
|
[2018-09-10 02:33:07,446: CRITICAL/MainProcess] Unrecoverable error: ContentDisallowed('Refusing to deserialize disabled content of type json (application/json)',)
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 317, in start
blueprint.start(self)
File "/usr/lib/python3.6/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/consumer.py", line 593, in start
c.loop(*c.loop_args())
File "/usr/lib/python3.6/site-packages/celery/worker/loops.py", line 91, in asynloop
next(loop)
File "/usr/lib/python3.6/site-packages/kombu/asynchronous/hub.py", line 354, in create_loop
cb(*cbargs)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 1040, in on_readable
self.cycle.on_readable(fileno)
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 337, in on_readable
chan.handlers[type]()
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 667, in _receive
ret.append(self._receive_one(c))
File "/usr/lib/python3.6/site-packages/kombu/transport/redis.py", line 697, in _receive_one
message, self._fanout_to_queue[exchange])
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 983, in _deliver
callback(message)
File "/usr/lib/python3.6/site-packages/kombu/transport/virtual/base.py", line 633, in _callback
return callback(message)
File "/usr/lib/python3.6/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python3.6/site-packages/celery/worker/consumer/gossip.py", line 201, in on_message
_, event = prepare(message.payload)
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 207, in payload
return self._decoded_cache if self._decoded_cache else self.decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 192, in decode
self._decoded_cache = self._decode()
File "/usr/lib/python3.6/site-packages/kombu/message.py", line 197, in _decode
self.content_encoding, accept=self.accept)
File "/usr/lib/python3.6/site-packages/kombu/serialization.py", line 256, in loads
raise self._for_untrusted_content(content_type, 'disabled')
kombu.exceptions.ContentDisallowed: Refusing to deserialize disabled content of type json (application/json)
|
kombu.exceptions.ContentDisallowed
|
def apply(self, args=(), kwargs={}, **options):
last, (fargs, fkwargs) = None, (args, kwargs)
for task in self.tasks:
res = task.clone(fargs, fkwargs).apply(
last and (last.get(),), **dict(self.options, **options)
)
res.parent, last, (fargs, fkwargs) = last, res, (None, None)
return last
|
def apply(self, args=(), kwargs={}, **options):
last, fargs = None, args
for task in self.tasks:
res = task.clone(fargs).apply(
last and (last.get(),), **dict(self.options, **options)
)
res.parent, last, fargs = last, res, None
return last
|
https://github.com/celery/celery/issues/4951
|
―――――――――――――――――――――――――――――――――――――――――――――――――― test_chain.test_kwargs_apply ―――――――――――――――――――――――――――――――――――――――――――――――――――
self = <t.unit.tasks.test_canvas.test_chain instance at 0x7f2725208560>
def test_kwargs_apply(self):
x = chain(self.add.s(), self.add.s(8), self.add.s(10))
res = x.apply(kwargs={'x': 1, 'y': 1}).get()
t/unit/tasks/test_canvas.py:446:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
celery/canvas.py:707: in apply
???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <EagerResult: 7b72e2aa-0455-45ae-9df9-429ecd1faf6a>, timeout = None, propagate = True, disable_sync_subtasks = True
kwargs = {}
def get(self, timeout=None, propagate=True,
disable_sync_subtasks=True, **kwargs):
if disable_sync_subtasks:
assert_will_not_block()
if self.successful():
return self.result
elif self.state in states.PROPAGATE_STATES:
if propagate:
raise self.result
E TypeError: add() takes exactly 2 arguments (0 given)
celery/result.py:995: TypeError
------------------------------------------------------ Captured stderr call -------------------------------------------------------
[2018-08-03 14:23:40,008: ERROR/MainProcess] Task t.unit.tasks.test_canvas.add[7b72e2aa-0455-45ae-9df9-429ecd1faf6a] raised unexpected: TypeError('add() takes exactly 2 arguments (0 given)',)
Traceback (most recent call last):
File "/home/developer/celery/app/trace.py", line 382, in trace_task
R = retval = fun(*args, **kwargs)
TypeError: add() takes exactly 2 arguments (0 given)
|
TypeError
|
def __init__(
self,
message,
on_ack=noop,
hostname=None,
eventer=None,
app=None,
connection_errors=None,
request_dict=None,
task=None,
on_reject=noop,
body=None,
headers=None,
decoded=False,
utc=True,
maybe_make_aware=maybe_make_aware,
maybe_iso8601=maybe_iso8601,
**opts,
):
if headers is None:
headers = message.headers
if body is None:
body = message.body
self.app = app
self.message = message
self.body = body
self.utc = utc
self._decoded = decoded
if decoded:
self.content_type = self.content_encoding = None
else:
self.content_type, self.content_encoding = (
message.content_type,
message.content_encoding,
)
self.id = headers["id"]
type = self.type = self.name = headers["task"]
self.root_id = headers.get("root_id")
self.parent_id = headers.get("parent_id")
if "shadow" in headers:
self.name = headers["shadow"] or self.name
timelimit = headers.get("timelimit", None)
if timelimit:
self.time_limits = timelimit
self.argsrepr = headers.get("argsrepr", "")
self.kwargsrepr = headers.get("kwargsrepr", "")
self.on_ack = on_ack
self.on_reject = on_reject
self.hostname = hostname or gethostname()
self.eventer = eventer
self.connection_errors = connection_errors or ()
self.task = task or self.app.tasks[type]
# timezone means the message is timezone-aware, and the only timezone
# supported at this point is UTC.
eta = headers.get("eta")
if eta is not None:
try:
eta = maybe_iso8601(eta)
except (AttributeError, ValueError, TypeError) as exc:
raise InvalidTaskError("invalid ETA value {0!r}: {1}".format(eta, exc))
self.eta = maybe_make_aware(eta, self.tzlocal)
else:
self.eta = None
expires = headers.get("expires")
if expires is not None:
try:
expires = maybe_iso8601(expires)
except (AttributeError, ValueError, TypeError) as exc:
raise InvalidTaskError(
"invalid expires value {0!r}: {1}".format(expires, exc)
)
self.expires = maybe_make_aware(expires, self.tzlocal)
else:
self.expires = None
delivery_info = message.delivery_info or {}
properties = message.properties or {}
headers.update(
{
"reply_to": properties.get("reply_to"),
"correlation_id": properties.get("correlation_id"),
"delivery_info": {
"exchange": delivery_info.get("exchange"),
"routing_key": delivery_info.get("routing_key"),
"priority": properties.get("priority"),
"redelivered": delivery_info.get("redelivered"),
},
}
)
self.request_dict = headers
|
def __init__(
self,
message,
on_ack=noop,
hostname=None,
eventer=None,
app=None,
connection_errors=None,
request_dict=None,
task=None,
on_reject=noop,
body=None,
headers=None,
decoded=False,
utc=True,
maybe_make_aware=maybe_make_aware,
maybe_iso8601=maybe_iso8601,
**opts,
):
if headers is None:
headers = message.headers
if body is None:
body = message.body
self.app = app
self.message = message
self.body = body
self.utc = utc
self._decoded = decoded
if decoded:
self.content_type = self.content_encoding = None
else:
self.content_type, self.content_encoding = (
message.content_type,
message.content_encoding,
)
self.id = headers["id"]
type = self.type = self.name = headers["task"]
self.root_id = headers.get("root_id")
self.parent_id = headers.get("parent_id")
if "shadow" in headers:
self.name = headers["shadow"] or self.name
if "timelimit" in headers:
self.time_limits = headers["timelimit"]
self.argsrepr = headers.get("argsrepr", "")
self.kwargsrepr = headers.get("kwargsrepr", "")
self.on_ack = on_ack
self.on_reject = on_reject
self.hostname = hostname or gethostname()
self.eventer = eventer
self.connection_errors = connection_errors or ()
self.task = task or self.app.tasks[type]
# timezone means the message is timezone-aware, and the only timezone
# supported at this point is UTC.
eta = headers.get("eta")
if eta is not None:
try:
eta = maybe_iso8601(eta)
except (AttributeError, ValueError, TypeError) as exc:
raise InvalidTaskError("invalid ETA value {0!r}: {1}".format(eta, exc))
self.eta = maybe_make_aware(eta, self.tzlocal)
else:
self.eta = None
expires = headers.get("expires")
if expires is not None:
try:
expires = maybe_iso8601(expires)
except (AttributeError, ValueError, TypeError) as exc:
raise InvalidTaskError(
"invalid expires value {0!r}: {1}".format(expires, exc)
)
self.expires = maybe_make_aware(expires, self.tzlocal)
else:
self.expires = None
delivery_info = message.delivery_info or {}
properties = message.properties or {}
headers.update(
{
"reply_to": properties.get("reply_to"),
"correlation_id": properties.get("correlation_id"),
"delivery_info": {
"exchange": delivery_info.get("exchange"),
"routing_key": delivery_info.get("routing_key"),
"priority": properties.get("priority"),
"redelivered": delivery_info.get("redelivered"),
},
}
)
self.request_dict = headers
|
https://github.com/celery/celery/issues/4906
|
[2018-07-16 06:09:46,229: CRITICAL/MainProcess] Unrecoverable error: TypeError("'NoneType' object is not iterable",)
Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/usr/lib/python2.7/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python2.7/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/usr/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 322, in start
blueprint.start(self)
File "/usr/lib/python2.7/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/usr/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 598, in start
c.loop(*c.loop_args())
File "/usr/lib/python2.7/site-packages/celery/worker/loops.py", line 118, in synloop
qos.update()
File "/usr/lib/python2.7/site-packages/kombu/common.py", line 417, in update
return self.set(self.value)
File "/usr/lib/python2.7/site-packages/kombu/common.py", line 410, in set
self.callback(prefetch_count=new_value)
File "/usr/lib/python2.7/site-packages/celery/worker/consumer/tasks.py", line 47, in set_prefetch_count
apply_global=qos_global,
File "/usr/lib/python2.7/site-packages/kombu/messaging.py", line 558, in qos
apply_global)
File "/usr/lib/python2.7/site-packages/amqp/channel.py", line 1812, in basic_qos
wait=spec.Basic.QosOk,
File "/usr/lib/python2.7/site-packages/amqp/abstract_channel.py", line 59, in send_method
return self.wait(wait, returns_tuple=returns_tuple)
File "/usr/lib/python2.7/site-packages/amqp/abstract_channel.py", line 79, in wait
self.connection.drain_events(timeout=timeout)
File "/usr/lib/python2.7/site-packages/amqp/connection.py", line 491, in drain_events
while not self.blocking_read(timeout):
File "/usr/lib/python2.7/site-packages/amqp/connection.py", line 497, in blocking_read
return self.on_inbound_frame(frame)
File "/usr/lib/python2.7/site-packages/amqp/method_framing.py", line 77, in on_frame
callback(channel, msg.frame_method, msg.frame_args, msg)
File "/usr/lib/python2.7/site-packages/amqp/connection.py", line 501, in on_inbound_method
method_sig, payload, content,
File "/usr/lib/python2.7/site-packages/amqp/abstract_channel.py", line 128, in dispatch_method
listener(*args)
File "/usr/lib/python2.7/site-packages/amqp/channel.py", line 1597, in _on_basic_deliver
fun(msg)
File "/usr/lib/python2.7/site-packages/kombu/messaging.py", line 624, in _receive_callback
return on_m(message) if on_m else self.receive(decoded, message)
File "/usr/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 572, in on_task_received
callbacks,
File "/usr/lib/python2.7/site-packages/celery/worker/strategy.py", line 200, in task_message_handler
handle(req)
File "/usr/lib/python2.7/site-packages/celery/worker/worker.py", line 228, in _process_task
req.execute_using_pool(self.pool)
File "/usr/lib/python2.7/site-packages/celery/worker/request.py", line 520, in execute_using_pool
time_limit, soft_time_limit = self.time_limits
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def exception_to_python(self, exc):
"""Convert serialized exception to Python exception."""
if exc:
if not isinstance(exc, BaseException):
exc_module = exc.get("exc_module")
if exc_module is None:
cls = create_exception_cls(from_utf8(exc["exc_type"]), __name__)
else:
exc_module = from_utf8(exc_module)
exc_type = from_utf8(exc["exc_type"])
try:
cls = getattr(sys.modules[exc_module], exc_type)
except KeyError:
cls = create_exception_cls(exc_type, celery.exceptions.__name__)
exc_msg = exc["exc_message"]
exc = cls(*exc_msg if isinstance(exc_msg, tuple) else exc_msg)
if self.serializer in EXCEPTION_ABLE_CODECS:
exc = get_pickled_exception(exc)
return exc
|
def exception_to_python(self, exc):
"""Convert serialized exception to Python exception."""
if exc:
if not isinstance(exc, BaseException):
exc_module = exc.get("exc_module")
if exc_module is None:
cls = create_exception_cls(from_utf8(exc["exc_type"]), __name__)
else:
exc_module = from_utf8(exc_module)
exc_type = from_utf8(exc["exc_type"])
cls = getattr(sys.modules[exc_module], exc_type)
exc_msg = exc["exc_message"]
exc = cls(*exc_msg if isinstance(exc_msg, tuple) else exc_msg)
if self.serializer in EXCEPTION_ABLE_CODECS:
exc = get_pickled_exception(exc)
return exc
|
https://github.com/celery/celery/issues/4835
|
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: Traceback (most recent call last):
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/tornado/web.py", line 1541, in _execute
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: result = method(*self.path_args, **self.path_kwargs)
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/tornado/web.py", line 2949, in wrapper
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: return method(self, *args, **kwargs)
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/flower/api/tasks.py", line 314, in get
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: response = {'task-id': taskid, 'state': result.state}
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/result.py", line 471, in state
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: return self._get_task_meta()['status']
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/result.py", line 410, in _get_task_meta
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: return self._maybe_set_cache(self.backend.get_task_meta(self.id))
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/backends/base.py", line 359, in get_task_meta
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: meta = self._get_task_meta_for(task_id)
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/backends/base.py", line 674, in _get_task_meta_for
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: return self.decode_result(meta)
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/backends/base.py", line 278, in decode_result
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: return self.meta_from_decoded(self.decode(payload))
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/backends/base.py", line 274, in meta_from_decoded
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: meta['result'] = self.exception_to_python(meta['result'])
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: File "/usr/local/lib/python3.5/dist-packages/celery/backends/base.py", line 252, in exception_to_python
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: cls = getattr(sys.modules[exc_module], exc_type)
Jun 21 01:23:24 netdocker1-eastus2 daemon INFO 94e57cb12059[92630]: KeyError: 'net_devices2.exceptions'
|
KeyError
|
def __eq__(self, other):
if isinstance(other, GroupResult):
return (
other.id == self.id
and other.results == self.results
and other.parent == self.parent
)
elif isinstance(other, string_t):
return other == self.id
return NotImplemented
|
def __eq__(self, other):
if isinstance(other, GroupResult):
return (
other.id == self.id
and other.results == self.results
and other.parent == self.parent
)
return NotImplemented
|
https://github.com/celery/celery/issues/4739
|
Traceback (most recent call last):
File "/Users/user/.pyenv/versions/3.6.1/envs/3.6.1-celery-poc/lib/python3.6/site-packages/kombu/utils/objects.py", line 42, in __get__
return obj.__dict__[self.__name__]
KeyError: 'graph'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "poc.py", line 33, in <module>
root.graph.to_dot(fh)
File "/Users/user/.pyenv/versions/3.6.1/envs/3.6.1-celery-poc/lib/python3.6/site-packages/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/Users/user/.pyenv/versions/3.6.1/envs/3.6.1-celery-poc/lib/python3.6/site-packages/celery/result.py", line 354, in graph
return self.build_graph()
File "/Users/user/.pyenv/versions/3.6.1/envs/3.6.1-celery-poc/lib/python3.6/site-packages/celery/result.py", line 314, in build_graph
graph.add_arc(node)
File "/Users/user/.pyenv/versions/3.6.1/envs/3.6.1-celery-poc/lib/python3.6/site-packages/celery/utils/graph.py", line 53, in add_arc
self.adjacent.setdefault(obj, [])
TypeError: unhashable type: 'GroupResult'
|
KeyError
|
def __new__(cls, *tasks, **kwargs):
# This forces `chain(X, Y, Z)` to work the same way as `X | Y | Z`
if not kwargs and tasks:
if len(tasks) != 1 or is_list(tasks[0]):
tasks = tasks[0] if len(tasks) == 1 else tasks
return reduce(operator.or_, tasks)
return super(chain, cls).__new__(cls, *tasks, **kwargs)
|
def __new__(cls, *tasks, **kwargs):
# This forces `chain(X, Y, Z)` to work the same way as `X | Y | Z`
if not kwargs and tasks:
if len(tasks) == 1 and is_list(tasks[0]):
# ensure chain(generator_expression) works.
tasks = tasks[0]
return reduce(operator.or_, tasks)
return super(chain, cls).__new__(cls, *tasks, **kwargs)
|
https://github.com/celery/celery/issues/4498
|
(celery) ➜ myapp git:(master) ./manage.py test_cmd
Traceback (most recent call last):
File "./manage.py", line 22, in <module>
execute_from_command_line(sys.argv)
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/django/core/management/__init__.py", line 364, in execute_from_command_line
utility.execute()
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/django/core/management/__init__.py", line 356, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/Users/admin/Projects/myapp/cel/management/commands/test_cmd.py", line 10, in handle
c = chain(AddTask().si(1, 2), AddTask().si(1, 2))()
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/celery/canvas.py", line 533, in __call__
return self.apply_async(args, kwargs)
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/celery/canvas.py", line 559, in apply_async
dict(self.options, **options) if options else self.options))
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/celery/canvas.py", line 586, in run
first_task.apply_async(**options)
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/celery/canvas.py", line 221, in apply_async
return _apply(args, kwargs, **options)
File "/Users/admin/Envs/celery/lib/python3.6/site-packages/celery/app/task.py", line 532, in apply_async
shadow = shadow or self.shadow_name(args, kwargs, options)
TypeError: shadow_name() missing 1 required positional argument: 'options'
|
TypeError
|
def as_task_v2(
self,
task_id,
name,
args=None,
kwargs=None,
countdown=None,
eta=None,
group_id=None,
expires=None,
retries=0,
chord=None,
callbacks=None,
errbacks=None,
reply_to=None,
time_limit=None,
soft_time_limit=None,
create_sent_event=False,
root_id=None,
parent_id=None,
shadow=None,
chain=None,
now=None,
timezone=None,
origin=None,
argsrepr=None,
kwargsrepr=None,
):
args = args or ()
kwargs = kwargs or {}
if not isinstance(args, (list, tuple)):
raise TypeError("task args must be a list or tuple")
if not isinstance(kwargs, Mapping):
raise TypeError("task keyword arguments must be a mapping")
if countdown: # convert countdown to ETA
self._verify_seconds(countdown, "countdown")
now = now or self.app.now()
timezone = timezone or self.app.timezone
eta = maybe_make_aware(
now + timedelta(seconds=countdown),
tz=timezone,
)
if isinstance(expires, numbers.Real):
self._verify_seconds(expires, "expires")
now = now or self.app.now()
timezone = timezone or self.app.timezone
expires = maybe_make_aware(
now + timedelta(seconds=expires),
tz=timezone,
)
if not isinstance(eta, string_t):
eta = eta and eta.isoformat()
# If we retry a task `expires` will already be ISO8601-formatted.
if not isinstance(expires, string_t):
expires = expires and expires.isoformat()
if argsrepr is None:
argsrepr = saferepr(args, self.argsrepr_maxsize)
if kwargsrepr is None:
kwargsrepr = saferepr(kwargs, self.kwargsrepr_maxsize)
if JSON_NEEDS_UNICODE_KEYS: # pragma: no cover
if callbacks:
callbacks = [utf8dict(callback) for callback in callbacks]
if errbacks:
errbacks = [utf8dict(errback) for errback in errbacks]
if chord:
chord = utf8dict(chord)
if not root_id: # empty root_id defaults to task_id
root_id = task_id
return task_message(
headers={
"lang": "py",
"task": name,
"id": task_id,
"shadow": shadow,
"eta": eta,
"expires": expires,
"group": group_id,
"retries": retries,
"timelimit": [time_limit, soft_time_limit],
"root_id": root_id,
"parent_id": parent_id,
"argsrepr": argsrepr,
"kwargsrepr": kwargsrepr,
"origin": origin or anon_nodename(),
},
properties={
"correlation_id": task_id,
"reply_to": reply_to or "",
},
body=(
args,
kwargs,
{
"callbacks": callbacks,
"errbacks": errbacks,
"chain": chain,
"chord": chord,
},
),
sent_event={
"uuid": task_id,
"root_id": root_id,
"parent_id": parent_id,
"name": name,
"args": argsrepr,
"kwargs": kwargsrepr,
"retries": retries,
"eta": eta,
"expires": expires,
}
if create_sent_event
else None,
)
|
def as_task_v2(
self,
task_id,
name,
args=None,
kwargs=None,
countdown=None,
eta=None,
group_id=None,
expires=None,
retries=0,
chord=None,
callbacks=None,
errbacks=None,
reply_to=None,
time_limit=None,
soft_time_limit=None,
create_sent_event=False,
root_id=None,
parent_id=None,
shadow=None,
chain=None,
now=None,
timezone=None,
origin=None,
argsrepr=None,
kwargsrepr=None,
):
args = args or ()
kwargs = kwargs or {}
if not isinstance(args, (list, tuple)):
raise TypeError("task args must be a list or tuple")
if not isinstance(kwargs, Mapping):
raise TypeError("task keyword arguments must be a mapping")
if countdown: # convert countdown to ETA
self._verify_seconds(countdown, "countdown")
now = now or self.app.now()
timezone = timezone or self.app.timezone
eta = maybe_make_aware(
now + timedelta(seconds=countdown),
tz=timezone,
)
if isinstance(expires, numbers.Real):
self._verify_seconds(expires, "expires")
now = now or self.app.now()
timezone = timezone or self.app.timezone
expires = maybe_make_aware(
now + timedelta(seconds=expires),
tz=timezone,
)
eta = eta and eta.isoformat()
# If we retry a task `expires` will already be ISO8601-formatted.
if not isinstance(expires, string_t):
expires = expires and expires.isoformat()
if argsrepr is None:
argsrepr = saferepr(args, self.argsrepr_maxsize)
if kwargsrepr is None:
kwargsrepr = saferepr(kwargs, self.kwargsrepr_maxsize)
if JSON_NEEDS_UNICODE_KEYS: # pragma: no cover
if callbacks:
callbacks = [utf8dict(callback) for callback in callbacks]
if errbacks:
errbacks = [utf8dict(errback) for errback in errbacks]
if chord:
chord = utf8dict(chord)
if not root_id: # empty root_id defaults to task_id
root_id = task_id
return task_message(
headers={
"lang": "py",
"task": name,
"id": task_id,
"shadow": shadow,
"eta": eta,
"expires": expires,
"group": group_id,
"retries": retries,
"timelimit": [time_limit, soft_time_limit],
"root_id": root_id,
"parent_id": parent_id,
"argsrepr": argsrepr,
"kwargsrepr": kwargsrepr,
"origin": origin or anon_nodename(),
},
properties={
"correlation_id": task_id,
"reply_to": reply_to or "",
},
body=(
args,
kwargs,
{
"callbacks": callbacks,
"errbacks": errbacks,
"chain": chain,
"chord": chord,
},
),
sent_event={
"uuid": task_id,
"root_id": root_id,
"parent_id": parent_id,
"name": name,
"args": argsrepr,
"kwargs": kwargsrepr,
"retries": retries,
"eta": eta,
"expires": expires,
}
if create_sent_event
else None,
)
|
https://github.com/celery/celery/issues/4560
|
[2018-02-26 16:29:44,350: INFO/ForkPoolWorker-5] Task tasks.add[4ecc6d45-40cc-40bc-ba79-b7efad956383] succeeded in 0.00307657103986s: 3
[2018-02-26 16:29:44,351: INFO/ForkPoolWorker-7] Task celery.chord_unlock[d17aa171-1b48-49b0-bcfa-bf961c65d0ea] retry: Retry in 1s
[2018-02-26 16:29:44,352: INFO/MainProcess] Received task: celery.chord_unlock[d17aa171-1b48-49b0-bcfa-bf961c65d0ea] ETA:[2018-02-27 00:29:45.350604+00:00]
[2018-02-26 16:29:44,353: INFO/ForkPoolWorker-4] Task tasks.add[62706ef5-97e3-444b-9037-33b64dac866d] succeeded in 0.00494244601578s: 7
[2018-02-26 16:29:45,795: ERROR/ForkPoolWorker-2] Chord 'a5329bed-f970-4969-9177-fd0c79af0be9' raised: AttributeError("'str' object has no attribute 'isoformat'",)
Traceback (most recent call last):
File "/home/steve/env/local/lib/python2.7/site-packages/celery/app/builtins.py", line 91, in unlock_chord
callback.delay(ret)
File "/home/steve/env/local/lib/python2.7/site-packages/celery/canvas.py", line 182, in delay
return self.apply_async(partial_args, partial_kwargs)
File "/home/steve/env/local/lib/python2.7/site-packages/celery/canvas.py", line 221, in apply_async
return _apply(args, kwargs, **options)
File "/home/steve/env/local/lib/python2.7/site-packages/celery/app/task.py", line 536, in apply_async
**options
File "/home/steve/env/local/lib/python2.7/site-packages/celery/app/base.py", line 729, in send_task
root_id, parent_id, shadow, chain,
File "/home/steve/env/local/lib/python2.7/site-packages/celery/app/amqp.py", line 333, in as_task_v2
eta = eta and eta.isoformat()
AttributeError: 'str' object has no attribute 'isoformat'
|
AttributeError
|
def _call_task_errbacks(self, request, exc, traceback):
old_signature = []
for errback in request.errbacks:
errback = self.app.signature(errback)
if (
# workaround to support tasks with bind=True executed as
# link errors. Otherwise retries can't be used
not isinstance(errback.type.__header__, partial)
and arity_greater(errback.type.__header__, 1)
):
errback(request, exc, traceback)
else:
old_signature.append(errback)
if old_signature:
# Previously errback was called as a task so we still
# need to do so if the errback only takes a single task_id arg.
task_id = request.id
root_id = request.root_id or task_id
group(old_signature, app=self.app).apply_async(
(task_id,), parent_id=task_id, root_id=root_id
)
|
def _call_task_errbacks(self, request, exc, traceback):
old_signature = []
for errback in request.errbacks:
errback = self.app.signature(errback)
if arity_greater(errback.type.__header__, 1):
errback(request, exc, traceback)
else:
old_signature.append(errback)
if old_signature:
# Previously errback was called as a task so we still
# need to do so if the errback only takes a single task_id arg.
task_id = request.id
root_id = request.root_id or task_id
group(old_signature, app=self.app).apply_async(
(task_id,), parent_id=task_id, root_id=root_id
)
|
https://github.com/celery/celery/issues/3723
|
[2016-12-28 07:36:12,692: INFO/MainProcess] Received task: raise_exception[b10b8451-3f4d-4cf0-b4b0-f964105cf849]
[2016-12-28 07:36:12,847: INFO/PoolWorker-5] /usr/local/lib/python2.7/dist-packages/celery/app/trace.py:542: RuntimeWarning: Exception raised outside body: TypeError('<functools.partial object at 0x7f74848dffc8> is not a Python function',):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 381, in trace_task
I, R, state, retval = on_error(task_request, exc, uuid)
File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 323, in on_error
task, request, eager=eager, call_errbacks=call_errbacks,
File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 157, in handle_error_state
call_errbacks=call_errbacks)
File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 202, in handle_failure
call_errbacks=call_errbacks,
File "/usr/local/lib/python2.7/dist-packages/celery/backends/base.py", line 168, in mark_as_failure
self._call_task_errbacks(request, exc, traceback)
File "/usr/local/lib/python2.7/dist-packages/celery/backends/base.py", line 174, in _call_task_errbacks
if arity_greater(errback.type.__header__, 1):
File "/usr/local/lib/python2.7/dist-packages/celery/utils/functional.py", line 292, in arity_greater
argspec = getfullargspec(fun)
File "/usr/local/lib/python2.7/dist-packages/vine/five.py", line 350, in getfullargspec
s = _getargspec(fun)
File "/usr/lib/python2.7/inspect.py", line 816, in getargspec
raise TypeError('{!r} is not a Python function'.format(func))
TypeError: <functools.partial object at 0x7f74848dffc8> is not a Python function
exc, exc_info.traceback)))
[2016-12-28 07:36:12,922: ERROR/MainProcess] Pool callback raised exception: TypeError('<functools.partial object at 0x7f74848dffc8> is not a Python function',)
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/billiard/pool.py", line 1748, in safe_apply_callback
fun(*args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/celery/worker/request.py", line 366, in on_failure
self.id, exc, request=self, store_result=self.store_errors,
File "/usr/local/lib/python2.7/dist-packages/celery/backends/base.py", line 168, in mark_as_failure
self._call_task_errbacks(request, exc, traceback)
File "/usr/local/lib/python2.7/dist-packages/celery/backends/base.py", line 174, in _call_task_errbacks
if arity_greater(errback.type.__header__, 1):
File "/usr/local/lib/python2.7/dist-packages/celery/utils/functional.py", line 292, in arity_greater
argspec = getfullargspec(fun)
File "/usr/local/lib/python2.7/dist-packages/vine/five.py", line 350, in getfullargspec
s = _getargspec(fun)
File "/usr/lib/python2.7/inspect.py", line 816, in getargspec
raise TypeError('{!r} is not a Python function'.format(func))
TypeError: <functools.partial object at 0x7f74848dffc8> is not a Python function
|
TypeError
|
def _index(self, id, body, **kwargs):
return self.server.index(
index=self.index, doc_type=self.doc_type, body=body, **kwargs
)
|
def _index(self, id, body, **kwargs):
return self.server.index(index=self.index, doc_type=self.doc_type, **kwargs)
|
https://github.com/celery/celery/issues/3556
|
[2016-11-07 12:16:12,380: ERROR/MainProcess] Pool callback raised exception: TypeError("index() missing 1 required positional argument: 'body'",)
Traceback (most recent call last):
File "project/lib/python3.5/site-packages/billiard/pool.py", line 1748, in safe_apply_callback
fun(*args, **kwargs)
File "project/lib/python3.5/site-packages/celery/worker/request.py", line 366, in on_failure
self.id, exc, request=self, store_result=self.store_errors,
File "project/lib/python3.5/site-packages/celery/backends/base.py", line 163, in mark_as_failure
traceback=traceback, request=request)
File "project/lib/python3.5/site-packages/celery/backends/base.py", line 309, in store_result
request=request, **kwargs)
File "project/lib/python3.5/site-packages/celery/backends/base.py", line 652, in _store_result
self.set(self.get_key_for_task(task_id), self.encode(meta))
File "project/lib/python3.5/site-packages/celery/backends/elasticsearch.py", line 80, in set
datetime.utcnow().isoformat()[:-3]
File "project/lib/python3.5/site-packages/celery/backends/elasticsearch.py", line 94, in _index
**kwargs
File "project/lib/python3.5/site-packages/elasticsearch/client/utils.py", line 69, in _wrapped
return func(*args, params=params, **kwargs)
TypeError: index() missing 1 required positional argument: 'body'
|
TypeError
|
def __or__(self, other):
# These could be implemented in each individual class,
# I'm sure, but for now we have this.
if isinstance(self, group):
if isinstance(other, group):
# group() | group() -> single group
return group(itertools.chain(self.tasks, other.tasks), app=self.app)
# group() | task -> chord
return chord(self, body=other, app=self._app)
elif isinstance(other, group):
# unroll group with one member
other = maybe_unroll_group(other)
if isinstance(self, _chain):
# chain | group() -> chain
return _chain(seq_concat_item(self.unchain_tasks(), other), app=self._app)
# task | group() -> chain
return _chain(self, other, app=self.app)
if not isinstance(self, _chain) and isinstance(other, _chain):
# task | chain -> chain
return _chain(seq_concat_seq((self,), other.unchain_tasks()), app=self._app)
elif isinstance(other, _chain):
# chain | chain -> chain
return _chain(
seq_concat_seq(self.unchain_tasks(), other.unchain_tasks()), app=self._app
)
elif isinstance(self, chord):
# chord | task -> attach to body
sig = self.clone()
sig.body = sig.body | other
return sig
elif isinstance(other, Signature):
if isinstance(self, _chain):
if self.tasks and isinstance(self.tasks[-1], group):
# CHAIN [last item is group] | TASK -> chord
sig = self.clone()
sig.tasks[-1] = chord(sig.tasks[-1], other, app=self._app)
return sig
elif self.tasks and isinstance(self.tasks[-1], chord):
# CHAIN [last item is chord] -> chain with chord body.
sig = self.clone()
sig.tasks[-1].body = sig.tasks[-1].body | other
return sig
else:
# chain | task -> chain
return _chain(
seq_concat_item(self.unchain_tasks(), other), app=self._app
)
# task | task -> chain
return _chain(self, other, app=self._app)
return NotImplemented
|
def __or__(self, other):
# These could be implemented in each individual class,
# I'm sure, but for now we have this.
if isinstance(other, chord) and len(other.tasks) == 1:
# chord with one header -> header[0] | body
other = other.tasks[0] | other.body
if isinstance(self, group):
if isinstance(other, group):
# group() | group() -> single group
return group(itertools.chain(self.tasks, other.tasks), app=self.app)
# group() | task -> chord
if len(self.tasks) == 1:
# group(ONE.s()) | other -> ONE.s() | other
# Issue #3323
return self.tasks[0] | other
return chord(self, body=other, app=self._app)
elif isinstance(other, group):
# unroll group with one member
other = maybe_unroll_group(other)
if isinstance(self, _chain):
# chain | group() -> chain
return _chain(seq_concat_item(self.unchain_tasks(), other), app=self._app)
# task | group() -> chain
return _chain(self, other, app=self.app)
if not isinstance(self, _chain) and isinstance(other, _chain):
# task | chain -> chain
return _chain(seq_concat_seq((self,), other.unchain_tasks()), app=self._app)
elif isinstance(other, _chain):
# chain | chain -> chain
return _chain(
seq_concat_seq(self.unchain_tasks(), other.unchain_tasks()), app=self._app
)
elif isinstance(self, chord):
# chord(ONE, body) | other -> ONE | body | other
# chord with one header task is unecessary.
if len(self.tasks) == 1:
return self.tasks[0] | self.body | other
# chord | task -> attach to body
sig = self.clone()
sig.body = sig.body | other
return sig
elif isinstance(other, Signature):
if isinstance(self, _chain):
if self.tasks and isinstance(self.tasks[-1], group):
# CHAIN [last item is group] | TASK -> chord
sig = self.clone()
sig.tasks[-1] = chord(sig.tasks[-1], other, app=self._app)
return sig
elif self.tasks and isinstance(self.tasks[-1], chord):
# CHAIN [last item is chord] -> chain with chord body.
sig = self.clone()
sig.tasks[-1].body = sig.tasks[-1].body | other
return sig
else:
# chain | task -> chain
return _chain(
seq_concat_item(self.unchain_tasks(), other), app=self._app
)
# task | task -> chain
return _chain(self, other, app=self._app)
return NotImplemented
|
https://github.com/celery/celery/issues/3885
|
Traceback (most recent call last):
File "<console>", line 6, in <module>
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 182, in delay
return self.apply_async(partial_args, partial_kwargs)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1245, in apply_async
return self.run(tasks, body, args, task_id=task_id, **options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1287, in run
result=results)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/backends/redis.py", line 245, in apply_chord
return header(*partial_args, **options or {})
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 962, in __call__
return self.apply_async(partial_args, **options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 987, in apply_async
args=args, kwargs=kwargs, **options))
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1063, in _apply_tasks
**options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1242, in apply_async
return (self.tasks[0] | body).set(task_id=task_id).apply_async(
KeyError: 0
|
KeyError
|
def apply_async(
self,
args=(),
kwargs={},
task_id=None,
producer=None,
publisher=None,
connection=None,
router=None,
result_cls=None,
**options,
):
kwargs = kwargs or {}
args = tuple(args) + tuple(self.args) if args and not self.immutable else self.args
body = kwargs.pop("body", None) or self.kwargs["body"]
kwargs = dict(self.kwargs["kwargs"], **kwargs)
body = body.clone(**options)
app = self._get_app(body)
tasks = (
self.tasks.clone()
if isinstance(self.tasks, group)
else group(self.tasks, app=app)
)
if app.conf.task_always_eager:
return self.apply(args, kwargs, body=body, task_id=task_id, **options)
# chord([A, B, ...], C)
return self.run(tasks, body, args, task_id=task_id, **options)
|
def apply_async(
self,
args=(),
kwargs={},
task_id=None,
producer=None,
publisher=None,
connection=None,
router=None,
result_cls=None,
**options,
):
kwargs = kwargs or {}
args = tuple(args) + tuple(self.args) if args and not self.immutable else self.args
body = kwargs.pop("body", None) or self.kwargs["body"]
kwargs = dict(self.kwargs["kwargs"], **kwargs)
body = body.clone(**options)
app = self._get_app(body)
tasks = (
self.tasks.clone()
if isinstance(self.tasks, group)
else group(self.tasks, app=app)
)
if app.conf.task_always_eager:
return self.apply(args, kwargs, body=body, task_id=task_id, **options)
if len(self.tasks) == 1:
# chord([A], B) can be optimized as A | B
# - Issue #3323
return (
(self.tasks[0] | body)
.set(task_id=task_id)
.apply_async(args, kwargs, **options)
)
# chord([A, B, ...], C)
return self.run(tasks, body, args, task_id=task_id, **options)
|
https://github.com/celery/celery/issues/3885
|
Traceback (most recent call last):
File "<console>", line 6, in <module>
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 182, in delay
return self.apply_async(partial_args, partial_kwargs)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1245, in apply_async
return self.run(tasks, body, args, task_id=task_id, **options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1287, in run
result=results)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/backends/redis.py", line 245, in apply_chord
return header(*partial_args, **options or {})
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 962, in __call__
return self.apply_async(partial_args, **options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 987, in apply_async
args=args, kwargs=kwargs, **options))
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1063, in _apply_tasks
**options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1242, in apply_async
return (self.tasks[0] | body).set(task_id=task_id).apply_async(
KeyError: 0
|
KeyError
|
def run(
self,
header,
body,
partial_args,
app=None,
interval=None,
countdown=1,
max_retries=None,
eager=False,
task_id=None,
**options,
):
app = app or self._get_app(body)
group_id = header.options.get("task_id") or uuid()
root_id = body.options.get("root_id")
body.chord_size = self.__length_hint__()
options = dict(self.options, **options) if options else self.options
if options:
options.pop("task_id", None)
body.options.update(options)
results = header.freeze(group_id=group_id, chord=body, root_id=root_id).results
bodyres = body.freeze(task_id, root_id=root_id)
# Chains should not be passed to the header tasks. See #3771
options.pop("chain", None)
# Neither should chords, for deeply nested chords to work
options.pop("chord", None)
parent = app.backend.apply_chord(
header,
partial_args,
group_id,
body,
interval=interval,
countdown=countdown,
options=options,
max_retries=max_retries,
result=results,
)
bodyres.parent = parent
return bodyres
|
def run(
self,
header,
body,
partial_args,
app=None,
interval=None,
countdown=1,
max_retries=None,
eager=False,
task_id=None,
**options,
):
app = app or self._get_app(body)
group_id = header.options.get("task_id") or uuid()
root_id = body.options.get("root_id")
body.chord_size = self.__length_hint__()
options = dict(self.options, **options) if options else self.options
if options:
options.pop("task_id", None)
body.options.update(options)
results = header.freeze(group_id=group_id, chord=body, root_id=root_id).results
bodyres = body.freeze(task_id, root_id=root_id)
# Chains should not be passed to the header tasks. See #3771
options.pop("chain", None)
parent = app.backend.apply_chord(
header,
partial_args,
group_id,
body,
interval=interval,
countdown=countdown,
options=options,
max_retries=max_retries,
result=results,
)
bodyres.parent = parent
return bodyres
|
https://github.com/celery/celery/issues/3885
|
Traceback (most recent call last):
File "<console>", line 6, in <module>
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 182, in delay
return self.apply_async(partial_args, partial_kwargs)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1245, in apply_async
return self.run(tasks, body, args, task_id=task_id, **options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1287, in run
result=results)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/backends/redis.py", line 245, in apply_chord
return header(*partial_args, **options or {})
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 962, in __call__
return self.apply_async(partial_args, **options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 987, in apply_async
args=args, kwargs=kwargs, **options))
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1063, in _apply_tasks
**options)
File "/home/maksym/projects/python_ve/lib/python3.4/site-packages/celery/canvas.py", line 1242, in apply_async
return (self.tasks[0] | body).set(task_id=task_id).apply_async(
KeyError: 0
|
KeyError
|
def get(
self,
timeout=None,
propagate=True,
interval=0.5,
callback=None,
no_ack=True,
on_message=None,
disable_sync_subtasks=True,
on_interval=None,
):
"""See :meth:`join`.
This is here for API compatibility with :class:`AsyncResult`,
in addition it uses :meth:`join_native` if available for the
current result backend.
"""
if self._cache is not None:
return self._cache
return (self.join_native if self.supports_native_join else self.join)(
timeout=timeout,
propagate=propagate,
interval=interval,
callback=callback,
no_ack=no_ack,
on_message=on_message,
disable_sync_subtasks=disable_sync_subtasks,
on_interval=on_interval,
)
|
def get(
self,
timeout=None,
propagate=True,
interval=0.5,
callback=None,
no_ack=True,
on_message=None,
disable_sync_subtasks=True,
):
"""See :meth:`join`.
This is here for API compatibility with :class:`AsyncResult`,
in addition it uses :meth:`join_native` if available for the
current result backend.
"""
if self._cache is not None:
return self._cache
return (self.join_native if self.supports_native_join else self.join)(
timeout=timeout,
propagate=propagate,
interval=interval,
callback=callback,
no_ack=no_ack,
on_message=on_message,
disable_sync_subtasks=disable_sync_subtasks,
)
|
https://github.com/celery/celery/issues/4274
|
ERROR:celery.app.builtins:Chord '70cb9fbe-4843-49ba-879e-6ffd76d63226' raised: TypeError("get() got an unexpected keyword argument 'on_interval'",)
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/celery/app/builtins.py", line 80, in unlock_chord
ret = j(timeout=3.0, propagate=True)
File "/usr/local/lib/python3.6/site-packages/celery/result.py", line 698, in join
interval=interval, no_ack=no_ack, on_interval=on_interval,
TypeError: get() got an unexpected keyword argument 'on_interval'
INFO:celery.app.trace:Task celery.chord_unlock[cac53aaf-c193-442c-b060-73577be77d0f] succeeded in 0.04921864200150594s: None
|
TypeError
|
def from_dict(cls, d, app=None):
options = d.copy()
args, options["kwargs"] = cls._unpack_args(**options["kwargs"])
return _upgrade(d, cls(*args, app=app, **options))
|
def from_dict(cls, d, app=None):
args, d["kwargs"] = cls._unpack_args(**d["kwargs"])
return _upgrade(d, cls(*args, app=app, **d))
|
https://github.com/celery/celery/issues/4223
|
celery worker -A tasks --loglevel INFO -c 10
-------------- celery@rbn-box v4.1.0 (latentcall)
---- **** -----
--- * *** * -- Linux-4.8.0-51-generic-x86_64-with-Ubuntu-16.10-yakkety 2017-08-22 21:47:22
-- * - **** ---
- ** ---------- [config]
- ** ---------- .> app: tasks:0x7f2bd97f73d0
- ** ---------- .> transport: amqp://celery:**@rbn-box:5672//
- ** ---------- .> results: mysql://celery:**@localhost/celery
- *** --- * --- .> concurrency: 10 (prefork)
-- ******* ---- .> task events: OFF (enable -E to monitor tasks in this worker)
--- ***** -----
-------------- [queues]
.> celery exchange=celery(direct) key=celery
[tasks]
. tasks.dummy
[2017-08-22 21:47:23,343: INFO/MainProcess] Connected to amqp://celery:**@rbn-box:5672//
[2017-08-22 21:47:23,352: INFO/MainProcess] mingle: searching for neighbors
[2017-08-22 21:47:24,376: INFO/MainProcess] mingle: all alone
[2017-08-22 21:47:24,407: INFO/MainProcess] celery@rbn-box ready.
[2017-08-22 21:47:36,462: INFO/MainProcess] Received task: tasks.dummy[831ff49c-cd08-4aa8-8ca5-2a3a553a5567]
[2017-08-22 21:47:36,464: INFO/MainProcess] Received task: tasks.dummy[0e1cf302-aa23-4a10-835f-76496940bd0f]
[2017-08-22 21:47:36,465: INFO/MainProcess] Received task: tasks.dummy[d02014c5-0b34-4a9c-a0d6-f8d1dc0e4a66]
[2017-08-22 21:47:36,468: INFO/MainProcess] Received task: celery.chord_unlock[5658c4ce-07dc-4208-acf2-ae00d64247b8] ETA:[2017-08-22 19:47:37.458625+00:00]
[2017-08-22 21:47:36,473: WARNING/ForkPoolWorker-10] c1-1
[2017-08-22 21:47:36,479: WARNING/ForkPoolWorker-9] c1-2
[2017-08-22 21:47:36,484: WARNING/ForkPoolWorker-8] c1-0
[2017-08-22 21:47:38,617: INFO/MainProcess] Received task: celery.chord_unlock[5658c4ce-07dc-4208-acf2-ae00d64247b8] ETA:[2017-08-22 19:47:39.597538+00:00]
[2017-08-22 21:47:38,623: INFO/ForkPoolWorker-6] Task celery.chord_unlock[5658c4ce-07dc-4208-acf2-ae00d64247b8] retry: Retry in 1s
[2017-08-22 21:47:40,504: ERROR/ForkPoolWorker-1] Task celery.chord_unlock[5658c4ce-07dc-4208-acf2-ae00d64247b8] raised unexpected: TypeError("'NoneType' object is not iterable",)
Traceback (most recent call last):
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/app/trace.py", line 374, in trace_task
R = retval = fun(*args, **kwargs)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/app/trace.py", line 629, in __protected_call__
return self.run(*args, **kwargs)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/app/builtins.py", line 59, in unlock_chord
callback = maybe_signature(callback, app)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 1390, in maybe_signature
d = signature(d)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 1365, in signature
return Signature.from_dict(varies, app=app)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 147, in from_dict
return target_cls.from_dict(d, app=app)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 534, in from_dict
tasks = [maybe_signature(task, app=app) for task in tasks]
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 1390, in maybe_signature
d = signature(d)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 1365, in signature
return Signature.from_dict(varies, app=app)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 147, in from_dict
return target_cls.from_dict(d, app=app)
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 1178, in from_dict
return _upgrade(d, cls(*args, app=app, **d))
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 1190, in __init__
dict(kwargs=kwargs, header=_maybe_group(header, app),
File "/home/rbn/.virtualenvs/celery-4.1/local/lib/python2.7/site-packages/celery-4.1.0-py2.7.egg/celery/canvas.py", line 904, in _maybe_group
tasks = [signature(t, app=app) for t in tasks]
TypeError: 'NoneType' object is not iterable
[2017-08-22 21:47:43,647: INFO/ForkPoolWorker-9] Task tasks.dummy[d02014c5-0b34-4a9c-a0d6-f8d1dc0e4a66] succeeded in 7.16881482498s: None
[2017-08-22 21:47:43,682: INFO/ForkPoolWorker-10] Task tasks.dummy[0e1cf302-aa23-4a10-835f-76496940bd0f] succeeded in 7.20971017997s: None
[2017-08-22 21:47:43,695: INFO/ForkPoolWorker-8] Task tasks.dummy[831ff49c-cd08-4aa8-8ca5-2a3a553a5567] succeeded in 7.21184302299s: None
|
TypeError
|
def _apply_callback(args, parser_error):
logger = get_logger()
patch_bin_path = None
if args.patch_bin is not None:
patch_bin_path = Path(args.patch_bin)
if not patch_bin_path.exists():
patch_bin_path = shutil.which(args.patch_bin)
if patch_bin_path:
patch_bin_path = Path(patch_bin_path)
else:
parser_error(
f'--patch-bin "{args.patch_bin}" is not a command or path to executable.'
)
for patch_dir in args.patches:
logger.info("Applying patches from %s", patch_dir)
apply_patches(
generate_patches_from_series(patch_dir, resolve=True),
args.target,
patch_bin_path=patch_bin_path,
)
|
def _apply_callback(args, parser_error):
logger = get_logger()
patch_bin_path = None
if args.patch_bin is not None:
patch_bin_path = Path(args.patch_bin)
if not patch_bin_path.exists():
patch_bin_path = shutil.which(args.patch_bin)
if patch_bin_path:
patch_bin_path = Path(patch_bin_path)
else:
parser_error(
f'--patch-bin "{args.patch_bin}" is not a command or path to executable.'
)
for patch_dir in args.patches:
logger.info("Applying patches from %s", patch_dir)
apply_patches(
generate_patches_from_series(patch_dir, resolve=True),
args.target,
patch_bin_path=args.patch_bin,
)
|
https://github.com/Eloston/ungoogled-chromium/issues/971
|
ERROR: 18 files could not be pruned.
some errors
INFO: Applying patches from /nix/store/i2ky6jiphp98mlxcc7z7ayr7rgi20z8b-ungoogled-chromium-80.0.3987.149-1/patches
Traceback (most recent call last):
File "/nix/store/i2ky6jiphp98mlxcc7z7ayr7rgi20z8b-ungoogled-chromium-80.0.3987.149-1/utils/.patches.py-wrapped", line 236, in <module>
main()
File "/nix/store/i2ky6jiphp98mlxcc7z7ayr7rgi20z8b-ungoogled-chromium-80.0.3987.149-1/utils/.patches.py-wrapped", line 232, in main
args.callback(args)
File "/nix/store/i2ky6jiphp98mlxcc7z7ayr7rgi20z8b-ungoogled-chromium-80.0.3987.149-1/utils/.patches.py-wrapped", line 189, in _apply_callback
patch_bin_path=args.patch_bin)
File "/nix/store/i2ky6jiphp98mlxcc7z7ayr7rgi20z8b-ungoogled-chromium-80.0.3987.149-1/utils/.patches.py-wrapped", line 111, in apply_patches
patch_bin_path = find_and_check_patch(patch_bin_path=patch_bin_path)
File "/nix/store/i2ky6jiphp98mlxcc7z7ayr7rgi20z8b-ungoogled-chromium-80.0.3987.149-1/utils/.patches.py-wrapped", line 62, in find_and_check_patch
if not patch_bin_path.exists():
AttributeError: 'str' object has no attribute 'exists'
|
AttributeError
|
def _apply_callback(args):
logger = get_logger()
for patch_dir in args.patches:
logger.info("Applying patches from %s", patch_dir)
apply_patches(
generate_patches_from_series(patch_dir, resolve=True),
args.target,
patch_bin_path=args.patch_bin,
)
|
def _apply_callback(args):
logger = get_logger()
for patch_dir in args.patches:
logger.info("Applying patches from %s", patch_dir)
apply_patches(
generate_patches_from_series(patch_dir, resolve=True),
args.directory,
patch_bin_path=args.patch_bin,
)
|
https://github.com/Eloston/ungoogled-chromium/issues/717
|
[/tmp/download]: rm -rf *
[/tmp/download]: git clone https://github.com/Eloston/ungoogled-chromium.git
Cloning into 'ungoogled-chromium'...
remote: Enumerating objects: 189, done.
remote: Counting objects: 100% (189/189), done.
remote: Compressing objects: 100% (146/146), done.
remote: Total 11664 (delta 67), reused 121 (delta 36), pack-reused 11475
Receiving objects: 100% (11664/11664), 6.10 MiB | 6.82 MiB/s, done.
Resolving deltas: 100% (7301/7301), done.
[/tmp/download]: cd ungoogled-chromium/
[/tmp/download/ungoogled-chromium]: ./utils/downloads.py retrieve -c build/downloads_cache -i downloads.ini
Traceback (most recent call last):
File "./utils/downloads.py", line 393, in <module>
main()
File "./utils/downloads.py", line 389, in main
args.callback(args)
File "./utils/downloads.py", line 330, in _retrieve_callback
DownloadInfo(args.ini), args.cache, args.show_progress, args.disable_ssl_verification)
File "./utils/downloads.py", line 236, in retrieve_downloads
raise FileNotFoundError(cache_dir)
FileNotFoundError: build/downloads_cache
[/tmp/download/ungoogled-chromium]:
|
FileNotFoundError
|
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
apply_parser = subparsers.add_parser(
"apply",
help="Applies patches (in GNU Quilt format) to the specified source tree",
)
apply_parser.add_argument(
"--patch-bin",
help="The GNU patch command to use. Omit to find it automatically.",
)
apply_parser.add_argument(
"target", type=Path, help="The directory tree to apply patches onto."
)
apply_parser.add_argument(
"patches",
type=Path,
nargs="+",
help="The directories containing patches to apply. They must be in GNU quilt format",
)
apply_parser.set_defaults(callback=_apply_callback)
merge_parser = subparsers.add_parser(
"merge", help="Merges patches directories in GNU quilt format"
)
merge_parser.add_argument(
"--prepend",
"-p",
action="store_true",
help=(
'If "destination" exists, prepend patches from sources into it.'
" By default, merging will fail if the destination already exists."
),
)
merge_parser.add_argument(
"destination",
type=Path,
help=(
"The directory to write the merged patches to. "
"The destination must not exist unless --prepend is specified."
),
)
merge_parser.add_argument(
"source", type=Path, nargs="+", help="The GNU quilt patches to merge."
)
merge_parser.set_defaults(callback=_merge_callback)
args = parser.parse_args()
args.callback(args)
|
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
apply_parser = subparsers.add_parser(
"apply", help="Applies a config bundle's patches to the specified source tree"
)
apply_parser.add_argument(
"--patch-bin",
help="The GNU patch command to use. Omit to find it automatically.",
)
apply_parser.add_argument(
"target", type=Path, help="The directory tree to apply patches onto."
)
apply_parser.add_argument(
"patches",
type=Path,
nargs="+",
help="The directories containing patches to apply. They must be in GNU quilt format",
)
apply_parser.set_defaults(callback=_apply_callback)
merge_parser = subparsers.add_parser(
"merge", help="Merges patches directories in GNU quilt format"
)
merge_parser.add_argument(
"--prepend",
"-p",
action="store_true",
help=(
'If "destination" exists, prepend patches from sources into it.'
" By default, merging will fail if the destination already exists."
),
)
merge_parser.add_argument(
"destination",
type=Path,
help=(
"The directory to write the merged patches to. "
"The destination must not exist unless --prepend is specified."
),
)
merge_parser.add_argument(
"source", type=Path, nargs="+", help="The GNU quilt patches to merge."
)
merge_parser.set_defaults(callback=_merge_callback)
args = parser.parse_args()
args.callback(args)
|
https://github.com/Eloston/ungoogled-chromium/issues/717
|
[/tmp/download]: rm -rf *
[/tmp/download]: git clone https://github.com/Eloston/ungoogled-chromium.git
Cloning into 'ungoogled-chromium'...
remote: Enumerating objects: 189, done.
remote: Counting objects: 100% (189/189), done.
remote: Compressing objects: 100% (146/146), done.
remote: Total 11664 (delta 67), reused 121 (delta 36), pack-reused 11475
Receiving objects: 100% (11664/11664), 6.10 MiB | 6.82 MiB/s, done.
Resolving deltas: 100% (7301/7301), done.
[/tmp/download]: cd ungoogled-chromium/
[/tmp/download/ungoogled-chromium]: ./utils/downloads.py retrieve -c build/downloads_cache -i downloads.ini
Traceback (most recent call last):
File "./utils/downloads.py", line 393, in <module>
main()
File "./utils/downloads.py", line 389, in main
args.callback(args)
File "./utils/downloads.py", line 330, in _retrieve_callback
DownloadInfo(args.ini), args.cache, args.show_progress, args.disable_ssl_verification)
File "./utils/downloads.py", line 236, in retrieve_downloads
raise FileNotFoundError(cache_dir)
FileNotFoundError: build/downloads_cache
[/tmp/download/ungoogled-chromium]:
|
FileNotFoundError
|
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--downloads-cache",
type=Path,
metavar="PATH",
default="../../downloads_cache",
help="The path to the downloads cache",
)
parser.add_argument(
"--disable-ssl-verification",
action="store_true",
help="Disables SSL verification for downloading",
)
parser.add_argument(
"--7z-path",
dest="sevenz_path",
default=SEVENZIP_USE_REGISTRY,
help=(
'Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is '
"specified, determine the path from the registry. Default: %(default)s"
),
)
args = parser.parse_args()
# Set common variables
bundle_path = Path(__file__).parent / "config_bundles/windows"
bundle = buildkit.config.ConfigBundle(bundle_path)
source_tree = Path(__file__).resolve().parent.parent
domsubcache = Path(__file__).parent / "domsubcache.tar.gz"
# Test environment
_test_python2(parser.error)
# Setup environment
if not args.downloads_cache.exists():
args.downloads_cache.mkdir()
_make_tmp_paths()
# Retrieve downloads
get_logger().info("Downloading required files...")
buildkit.downloads.retrieve_downloads(
bundle, args.downloads_cache, True, args.disable_ssl_verification
)
try:
buildkit.downloads.check_downloads(bundle, args.downloads_cache)
except buildkit.downloads.HashMismatchError as exc:
get_logger().error("File checksum does not match: %s", exc)
parser.exit(1)
# Unpack downloads
extractors = {
ExtractorEnum.SEVENZIP: args.sevenz_path,
}
get_logger().info("Unpacking downloads...")
buildkit.downloads.unpack_downloads(
bundle, args.downloads_cache, source_tree, extractors
)
# Prune binaries
unremovable_files = buildkit.extraction.prune_dir(source_tree, bundle.pruning)
if unremovable_files:
get_logger().error("Files could not be pruned: %s", unremovable_files)
parser.exit(1)
# Apply patches
buildkit.patches.apply_patches(
buildkit.patches.patch_paths_by_bundle(bundle), source_tree, patch_bin_path=None
)
# Substitute domains
buildkit.domain_substitution.apply_substitution(bundle, source_tree, domsubcache)
# Output args.gn
(source_tree / "out/Default").mkdir(parents=True)
(source_tree / "out/Default/args.gn").write_text(
"\n".join(bundle.gn_flags), encoding=ENCODING
)
(source_tree / "out/gn_build").mkdir(parents=True)
(source_tree / "out/gn_build/args.gn").write_text(
"\n".join(bundle.gn_flags), encoding=ENCODING
)
# Run GN bootstrap
_run_build_process(
shutil.which("python"),
"tools\\gn\\bootstrap\\bootstrap.py",
"-oout\\Default\\gn.exe",
"--build-path",
"out\\gn_build",
)
shutil.rmtree("out\\gn_build")
# Run gn gen
_run_build_process(
"out\\Default\\gn.exe", "gen", "out\\Default", "--fail-on-unused-args"
)
# Run ninja
_run_build_process(
"third_party\\ninja\\ninja.exe", "-C", "out\\Default", "chrome", "chromedriver"
)
|
def main():
"""CLI Entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--downloads-cache",
type=Path,
metavar="PATH",
default="../../downloads_cache",
help="The path to the downloads cache",
)
parser.add_argument(
"--disable-ssl-verification",
action="store_true",
help="Disables SSL verification for downloading",
)
parser.add_argument(
"--7z-path",
dest="sevenz_path",
default=SEVENZIP_USE_REGISTRY,
help=(
'Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is '
"specified, determine the path from the registry. Default: %(default)s"
),
)
args = parser.parse_args()
# Set common variables
bundle_path = Path(__file__).parent / "config_bundles/windows"
bundle = buildkit.config.ConfigBundle(bundle_path)
source_tree = Path(__file__).resolve().parent.parent
domsubcache = Path(__file__).parent / "domsubcache.tar.gz"
# Test environment
_test_python2(parser.error)
# Setup environment
if not args.downloads_cache.exists():
args.downloads_cache.mkdir()
_make_tmp_paths()
# Retrieve downloads
get_logger().info("Downloading required files...")
buildkit.downloads.retrieve_downloads(
bundle, args.downloads_cache, True, args.disable_ssl_verification
)
try:
buildkit.downloads.check_downloads(bundle, args.downloads_cache)
except buildkit.downloads.HashMismatchError as exc:
get_logger().error("File checksum does not match: %s", exc)
parser.exit(1)
# Unpack downloads
extractors = {
ExtractorEnum.SEVENZIP: args.sevenz_path,
}
get_logger().info("Unpacking downloads...")
buildkit.downloads.unpack_downloads(
bundle, args.downloads_cache, source_tree, extractors
)
# Prune binaries
unremovable_files = buildkit.extraction.prune_dir(source_tree, bundle.pruning)
if unremovable_files:
get_logger().error("Files could not be pruned: %s", unremovable_files)
parser.exit(1)
# Apply patches
buildkit.patches.apply_patches(
buildkit.patches.patch_paths_by_bundle(bundle), source_tree, patch_bin_path=None
)
# Substitute domains
buildkit.domain_substitution.apply_substitution(bundle, source_tree, domsubcache)
# Output args.gn
(source_tree / "out/Default").mkdir(parents=True)
(source_tree / "out/Default/args.gn").write_text(
"\n".join(bundle.gn_flags), encoding=ENCODING
)
# Run GN bootstrap
_run_build_process(
shutil.which("python"),
"tools\\gn\\bootstrap\\bootstrap.py",
"-oout\\Default\\gn.exe",
)
# Run gn gen
_run_build_process(
"out\\Default\\gn.exe", "gen", "out\\Default", "--fail-on-unused-args"
)
# Run ninja
_run_build_process(
"third_party\\ninja\\ninja.exe", "-C", "out\\Default", "chrome", "chromedriver"
)
|
https://github.com/Eloston/ungoogled-chromium/issues/494
|
osuse-leap-42-3-2018:/home/intika/chromlast/ungoogled-chromium/build/src # ./tools/gn/bootstrap/bootstrap.py -o out/Default/gn
ninja: Entering directory `/home/intika/chromlast/ungoogled-chromium/build/src/out/Release/gn_build'
[171/171] LINK gn
ERROR at //build/config/sysroot.gni:57:5: Assertion failed.
assert(
^-----
Missing sysroot (//build/linux/debian_sid_amd64-sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=amd64
See //build/config/sysroot.gni:58:9:
exec_script("//build/dir_exists.py",
^-----------------------------------
This is where it was set.
See //chrome/installer/BUILD.gn:7:1: whence it was imported.
import("//build/config/sysroot.gni")
^----------------------------------
See //BUILD.gn:67:5: which caused the file to be included.
"//chrome/installer",
^-------------------
Traceback (most recent call last):
File "./tools/gn/bootstrap/bootstrap.py", line 101, in <module>
sys.exit(main(sys.argv[1:]))
File "./tools/gn/bootstrap/bootstrap.py", line 96, in main
'--args=%s' % gn_gen_args, "--root=" + SRC_ROOT
File "/usr/lib64/python2.7/subprocess.py", line 190, in check_call
raise CalledProcessError(retcode, cmd)
subprocess.CalledProcessError: Command '['out/Default/gn', 'gen', '/home/intika/chromlast/ungoogled-chromium/build/src/out/Release', '--args= is_debug=false', '--root=/home/intika/chromlast/ungoogled-chromium/build/src']' returned non-zero exit status 1
osuse-leap-42-3-2018:/home/intika/chromlast/ungoogled-chromium/build/src # ./tools/gn/bootstrap/bootstrap.py -o out/Default/gn
ninja: Entering directory `/home/intika/chromlast/ungoogled-chromium/build/src/out/Release/gn_build'
ninja: no work to do.
ERROR at //build/config/sysroot.gni:57:5: Assertion failed.
assert(
^-----
Missing sysroot (//build/linux/debian_sid_amd64-sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=amd64
See //build/config/sysroot.gni:58:9:
exec_script("//build/dir_exists.py",
^-----------------------------------
This is where it was set.
See //chrome/installer/BUILD.gn:7:1: whence it was imported.
import("//build/config/sysroot.gni")
^----------------------------------
See //BUILD.gn:67:5: which caused the file to be included.
"//chrome/installer",
^-------------------
Traceback (most recent call last):
File "./tools/gn/bootstrap/bootstrap.py", line 101, in <module>
sys.exit(main(sys.argv[1:]))
File "./tools/gn/bootstrap/bootstrap.py", line 96, in main
'--args=%s' % gn_gen_args, "--root=" + SRC_ROOT
File "/usr/lib64/python2.7/subprocess.py", line 190, in check_call
raise CalledProcessError(retcode, cmd)
subprocess.CalledProcessError: Command '['out/Default/gn', 'gen', '/home/intika/chromlast/ungoogled-chromium/build/src/out/Release', '--args= is_debug=false', '--root=/
|
subprocess.CalledProcessError
|
def get_builder(*args, **kwargs):
"""Intelligently returns an appropriate builder instance"""
if sys.platform == "win32":
from .windows import WindowsBuilder
cls = WindowsBuilder
elif sys.platform == "darwin":
from .macos import MacOSBuilder
cls = MacOSBuilder
elif sys.platform == "linux":
from ._external import distro
dist_id, dist_version, dist_codename = distro.linux_distribution(
full_distribution_name=False
)
if dist_id == "debian" and (
dist_codename == "stretch"
or dist_codename == "sid"
or dist_version == "testing"
):
from .debian import DebianStretchBuilder
cls = DebianStretchBuilder
elif dist_id == "ubuntu":
from .debian import UbuntuXenialBuilder
cls = UbuntuXenialBuilder
elif dist_id == "arch":
from .archlinux import ArchLinuxBuilder
cls = ArchLinuxBuilder
else:
from .linux import LinuxStaticBuilder
cls = LinuxStaticBuilder
else:
raise BuilderException(
"Unsupported sys.platform value'{}'".format(sys.platform)
)
return cls(*args, **kwargs)
|
def get_builder(*args, **kwargs):
"""Intelligently returns an appropriate builder instance"""
if sys.platform == "win32":
from .windows import WindowsBuilder
cls = WindowsBuilder
elif sys.platform == "darwin":
from .macos import MacOSBuilder
cls = MacOSBuilder
elif sys.platform == "linux":
from .debian import DebianBuilder, DebianStretchBuilder, UbuntuXenialBuilder
from ._external import distro
dist_id, dist_version, dist_codename = distro.linux_distribution(
full_distribution_name=False
)
if dist_id == "debian" and (
dist_codename == "stretch"
or dist_codename == "sid"
or dist_version == "testing"
):
cls = DebianStretchBuilder
elif dist_id == "ubuntu":
cls = UbuntuXenialBuilder
else:
from .linux import LinuxStaticBuilder
cls = LinuxStaticBuilder
else:
raise BuilderException(
"Unsupported sys.platform value'{}'".format(sys.platform)
)
return cls(*args, **kwargs)
|
https://github.com/Eloston/ungoogled-chromium/issues/68
|
Now at patch ../patches/ungoogled-chromium/remove-get-help-button.patch
2016-09-29 16:58:58,672 - INFO: Running gyp command...
2016-09-29 16:58:58,673 - DEBUG: Appending resources/common/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: Appending resources/linux_static/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: GYP command: build/gyp_chromium --depth=. --check -Denable_rlz=0 -Duse_official_google_api_keys=0 -Dproprietary_codecs=1 -Dfieldtrial_testing_like_official_build=1 -Denable_hangout_services_extension=0 -Ddisable_newlib=1 -Denable_webrtc=0 -Dremoting=0 -Ddefault_apps_list_linux_dest=[] -Ddisable_nacl=1 -Dtracing_like_official_build=1 -Ddisable_fatal_linker_warnings=1 -Denable_prod_wallet_service=0 -Denable_google_now=0 -Denable_automation=0 -Denable_one_click_signin=0 -Dremove_webcore_debug_symbols=1 -Ddisable_pnacl=1 -Dbuildtype=Official -Dwerror= -Denable_hotwording=0 -Denable_pre_sync_backup=0 -Dffmpeg_branding=Chrome -Dfastbuild=2 -Dlinux_strip_binary=1 -Denable_hidpi=1 -Denable_wifi_bootstrapping=0 -Denable_remoting_host=0 -Ddefault_apps_list=[] -Dsafe_browsing=0
Traceback (most recent call last):
File "build/gyp_chromium", line 12, in <module>
execfile(__file__ + '.py')
NameError: name 'execfile' is not defined
2016-09-29 16:58:58,694 - ERROR: GYP command returned non-zero exit code: 1
|
NameError
|
def generate_package(self):
build_file_subs = dict(
changelog_version="{}-{}".format(self.chromium_version, self.release_revision),
changelog_datetime=self._get_dpkg_changelog_datetime(),
build_output=str(self.build_output),
distribution_version=self._distro_version,
)
self.logger.info("Building Debian package...")
# TODO: Copy _dpkg_dir over each other in build/ similar to resource reading
distutils.dir_util.copy_tree(str(self._dpkg_dir), str(self._sandbox_dpkg_dir))
for old_path in self._sandbox_dpkg_dir.glob("*.in"):
new_path = self._sandbox_dpkg_dir / old_path.stem
old_path.replace(new_path)
with new_path.open("r+") as new_file:
content = self.BuildFileStringTemplate(new_file.read()).substitute(
**build_file_subs
)
new_file.seek(0)
new_file.write(content)
new_file.truncate()
result = self._run_subprocess(
["dpkg-buildpackage", "-b", "-uc"], cwd=str(self._sandbox_dir)
)
if not result.returncode == 0:
raise BuilderException(
"dpkg-buildpackage returned non-zero exit code: {}".format(
result.returncode
)
)
|
def generate_package(self):
build_file_subs = dict(
changelog_version="{}-{}".format(self.chromium_version, self.release_revision),
changelog_datetime=self._get_dpkg_changelog_datetime(),
build_output=str(self.build_output),
)
self.logger.info("Building Debian package...")
# TODO: Copy _dpkg_dir over each other in build/ similar to resource reading
distutils.dir_util.copy_tree(str(self._dpkg_dir), str(self._sandbox_dpkg_dir))
for old_path in self._sandbox_dpkg_dir.glob("*.in"):
new_path = self._sandbox_dpkg_dir / old_path.stem
old_path.replace(new_path)
with new_path.open("r+") as new_file:
content = self.BuildFileStringTemplate(new_file.read()).substitute(
**build_file_subs
)
new_file.seek(0)
new_file.write(content)
new_file.truncate()
result = self._run_subprocess(
["dpkg-buildpackage", "-b", "-uc"], cwd=str(self._sandbox_dir)
)
if not result.returncode == 0:
raise BuilderException(
"dpkg-buildpackage returned non-zero exit code: {}".format(
result.returncode
)
)
|
https://github.com/Eloston/ungoogled-chromium/issues/68
|
Now at patch ../patches/ungoogled-chromium/remove-get-help-button.patch
2016-09-29 16:58:58,672 - INFO: Running gyp command...
2016-09-29 16:58:58,673 - DEBUG: Appending resources/common/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: Appending resources/linux_static/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: GYP command: build/gyp_chromium --depth=. --check -Denable_rlz=0 -Duse_official_google_api_keys=0 -Dproprietary_codecs=1 -Dfieldtrial_testing_like_official_build=1 -Denable_hangout_services_extension=0 -Ddisable_newlib=1 -Denable_webrtc=0 -Dremoting=0 -Ddefault_apps_list_linux_dest=[] -Ddisable_nacl=1 -Dtracing_like_official_build=1 -Ddisable_fatal_linker_warnings=1 -Denable_prod_wallet_service=0 -Denable_google_now=0 -Denable_automation=0 -Denable_one_click_signin=0 -Dremove_webcore_debug_symbols=1 -Ddisable_pnacl=1 -Dbuildtype=Official -Dwerror= -Denable_hotwording=0 -Denable_pre_sync_backup=0 -Dffmpeg_branding=Chrome -Dfastbuild=2 -Dlinux_strip_binary=1 -Denable_hidpi=1 -Denable_wifi_bootstrapping=0 -Denable_remoting_host=0 -Ddefault_apps_list=[] -Dsafe_browsing=0
Traceback (most recent call last):
File "build/gyp_chromium", line 12, in <module>
execfile(__file__ + '.py')
NameError: name 'execfile' is not defined
2016-09-29 16:58:58,694 - ERROR: GYP command returned non-zero exit code: 1
|
NameError
|
def _run_subprocess(self, *args, append_environ=None, **kwargs):
new_env = dict(os.environ)
if "PATH" not in new_env:
new_env["PATH"] = os.defpath
if len(new_env["PATH"]) > 0 and not new_env["PATH"].startswith(os.pathsep):
new_env["PATH"] = os.pathsep + new_env["PATH"]
new_env["PATH"] = str(self._path_overrides_dir.absolute()) + new_env["PATH"]
if not append_environ is None:
new_env.update(append_environ)
kwargs["env"] = new_env
return _util.subprocess_run(*args, **kwargs)
|
def _run_subprocess(self, *args, append_environ=None, **kwargs):
new_env = dict(os.environ)
if "PATH" not in new_env:
new_env["PATH"] = os.defpath
if len(new_env["PATH"]) > 0 and not new_env["PATH"].startswith(os.pathsep):
new_env["PATH"] = os.pathsep + new_env["PATH"]
new_env["PATH"] = str(self._path_overrides_dir) + new_env["PATH"]
if not append_environ is None:
new_env.update(append_environ)
kwargs["env"] = new_env
return _util.subprocess_run(*args, **kwargs)
|
https://github.com/Eloston/ungoogled-chromium/issues/68
|
Now at patch ../patches/ungoogled-chromium/remove-get-help-button.patch
2016-09-29 16:58:58,672 - INFO: Running gyp command...
2016-09-29 16:58:58,673 - DEBUG: Appending resources/common/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: Appending resources/linux_static/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: GYP command: build/gyp_chromium --depth=. --check -Denable_rlz=0 -Duse_official_google_api_keys=0 -Dproprietary_codecs=1 -Dfieldtrial_testing_like_official_build=1 -Denable_hangout_services_extension=0 -Ddisable_newlib=1 -Denable_webrtc=0 -Dremoting=0 -Ddefault_apps_list_linux_dest=[] -Ddisable_nacl=1 -Dtracing_like_official_build=1 -Ddisable_fatal_linker_warnings=1 -Denable_prod_wallet_service=0 -Denable_google_now=0 -Denable_automation=0 -Denable_one_click_signin=0 -Dremove_webcore_debug_symbols=1 -Ddisable_pnacl=1 -Dbuildtype=Official -Dwerror= -Denable_hotwording=0 -Denable_pre_sync_backup=0 -Dffmpeg_branding=Chrome -Dfastbuild=2 -Dlinux_strip_binary=1 -Denable_hidpi=1 -Denable_wifi_bootstrapping=0 -Denable_remoting_host=0 -Ddefault_apps_list=[] -Dsafe_browsing=0
Traceback (most recent call last):
File "build/gyp_chromium", line 12, in <module>
execfile(__file__ + '.py')
NameError: name 'execfile' is not defined
2016-09-29 16:58:58,694 - ERROR: GYP command returned non-zero exit code: 1
|
NameError
|
def setup_environment_overrides(self):
"""Sets up overrides of the build environment"""
self.logger.info("Setting up environment overrides...")
for command_name in self.path_overrides:
self.logger.debug(
"Setting command '{}' as '{}'".format(
command_name, self.path_overrides[command_name]
)
)
self._write_path_override(command_name, self.path_overrides[command_name])
|
def setup_environment_overrides(self):
"""Sets up overrides of the build environment"""
for command_name in self.path_overrides:
self._write_path_override(command_name, self.path_overrides[command_name])
|
https://github.com/Eloston/ungoogled-chromium/issues/68
|
Now at patch ../patches/ungoogled-chromium/remove-get-help-button.patch
2016-09-29 16:58:58,672 - INFO: Running gyp command...
2016-09-29 16:58:58,673 - DEBUG: Appending resources/common/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: Appending resources/linux_static/gyp_flags
2016-09-29 16:58:58,673 - DEBUG: GYP command: build/gyp_chromium --depth=. --check -Denable_rlz=0 -Duse_official_google_api_keys=0 -Dproprietary_codecs=1 -Dfieldtrial_testing_like_official_build=1 -Denable_hangout_services_extension=0 -Ddisable_newlib=1 -Denable_webrtc=0 -Dremoting=0 -Ddefault_apps_list_linux_dest=[] -Ddisable_nacl=1 -Dtracing_like_official_build=1 -Ddisable_fatal_linker_warnings=1 -Denable_prod_wallet_service=0 -Denable_google_now=0 -Denable_automation=0 -Denable_one_click_signin=0 -Dremove_webcore_debug_symbols=1 -Ddisable_pnacl=1 -Dbuildtype=Official -Dwerror= -Denable_hotwording=0 -Denable_pre_sync_backup=0 -Dffmpeg_branding=Chrome -Dfastbuild=2 -Dlinux_strip_binary=1 -Denable_hidpi=1 -Denable_wifi_bootstrapping=0 -Denable_remoting_host=0 -Ddefault_apps_list=[] -Dsafe_browsing=0
Traceback (most recent call last):
File "build/gyp_chromium", line 12, in <module>
execfile(__file__ + '.py')
NameError: name 'execfile' is not defined
2016-09-29 16:58:58,694 - ERROR: GYP command returned non-zero exit code: 1
|
NameError
|
def search_exists(self, index=None, doc_type=None, body=None, params=None):
"""
The exists API allows to easily determine if any matching documents
exist for a provided query.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-exists.html>`_
:arg index: A comma-separated list of indices to restrict the results
:arg doc_type: A comma-separated list of types to restrict the results
:arg body: A query to restrict the results specified with the Query DSL
(optional)
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg analyze_wildcard: Specify whether wildcard and prefix queries
should be analyzed (default: false)
:arg analyzer: The analyzer to use for the query string
:arg default_operator: The default operator for query string query (AND
or OR), default u'OR'
:arg df: The field to use as default where no field prefix is given in
the query string
:arg expand_wildcards: Whether to expand wildcard expression to concrete
indices that are open, closed or both., default u'open'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed)
:arg lenient: Specify whether format-based query failures (such as
providing text to a numeric field) should be ignored
:arg lowercase_expanded_terms: Specify whether query terms should be
lowercased
:arg min_score: Include only documents with a specific `_score` value in
the result
:arg preference: Specify the node or shard the operation should be
performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg routing: Specific routing value
"""
try:
self.transport.perform_request(
"POST",
_make_path(index, doc_type, "_search", "exists"),
params=params,
body=body,
)
except NotFoundError:
return False
return True
|
def search_exists(self, index=None, doc_type=None, body=None, params=None):
"""
The exists API allows to easily determine if any matching documents
exist for a provided query.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-exists.html>`_
:arg index: A comma-separated list of indices to restrict the results
:arg doc_type: A comma-separated list of types to restrict the results
:arg body: A query to restrict the results specified with the Query DSL
(optional)
:arg allow_no_indices: Whether to ignore if a wildcard indices
expression resolves into no concrete indices. (This includes `_all`
string or when no indices have been specified)
:arg analyze_wildcard: Specify whether wildcard and prefix queries
should be analyzed (default: false)
:arg analyzer: The analyzer to use for the query string
:arg default_operator: The default operator for query string query (AND
or OR), default u'OR'
:arg df: The field to use as default where no field prefix is given in
the query string
:arg expand_wildcards: Whether to expand wildcard expression to concrete
indices that are open, closed or both., default u'open'
:arg ignore_unavailable: Whether specified concrete indices should be
ignored when unavailable (missing or closed)
:arg lenient: Specify whether format-based query failures (such as
providing text to a numeric field) should be ignored
:arg lowercase_expanded_terms: Specify whether query terms should be
lowercased
:arg min_score: Include only documents with a specific `_score` value in
the result
:arg preference: Specify the node or shard the operation should be
performed on (default: random)
:arg q: Query in the Lucene query string syntax
:arg routing: Specific routing value
"""
_, data = self.transport.perform_request(
"POST",
_make_path(index, doc_type, "_search", "exists"),
params=params,
body=body,
)
return data
|
https://github.com/elastic/elasticsearch-py/issues/230
|
es.search_exists('my_index', 'my_type', params={'q': 'my_field:bar'})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/client/utils.py", line 68, in _wrapped
return func(*args, params=params, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/client/__init__.py", line 1238, in search_exists
doc_type, '_search', 'exists'), params=params, body=body)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/transport.py", line 307, in perform_request
status, headers, data = connection.perform_request(method, url, params, body, ignore=ignore, timeout=timeout)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/connection/http_urllib3.py", line 86, in perform_request
self._raise_error(response.status, raw_data)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/connection/base.py", line 102, in _raise_error
raise HTTP_EXCEPTIONS.get(status_code, TransportError)(status_code, error_message, additional_info)
elasticsearch.exceptions.NotFoundError: TransportError(404, u'{"exists":false}')
|
elasticsearch.exceptions.NotFoundError
|
def perform_request(self, method, url, params=None, body=None):
"""
Perform the actual request. Retrieve a connection from the connection
pool, pass all the information to it's perform_request method and
return the data.
If an exception was raised, mark the connection as failed and retry (up
to `max_retries` times).
If the operation was succesful and the connection used was previously
marked as dead, mark it as live, resetting it's failure count.
:arg method: HTTP method to use
:arg url: absolute url (without host) to target
:arg params: dictionary of query parameters, will be handed over to the
underlying :class:`~elasticsearch.Connection` class for serialization
:arg body: body of the request, will be serializes using serializer and
passed to the connection
"""
if body is not None:
body = self.serializer.dumps(body)
# some clients or environments don't support sending GET with body
if method in ("HEAD", "GET") and self.send_get_body_as != "GET":
# send it as post instead
if self.send_get_body_as == "POST":
method = "POST"
# or as source parameter
elif self.send_get_body_as == "source":
if params is None:
params = {}
params["source"] = body
body = None
if body is not None:
try:
body = body.encode("utf-8")
except (UnicodeDecodeError, AttributeError):
# bytes/str - no need to re-encode
pass
ignore = ()
timeout = None
if params:
timeout = params.pop("request_timeout", None)
ignore = params.pop("ignore", ())
if isinstance(ignore, int):
ignore = (ignore,)
for attempt in range(self.max_retries + 1):
connection = self.get_connection()
try:
status, headers, data = connection.perform_request(
method, url, params, body, ignore=ignore, timeout=timeout
)
except TransportError as e:
retry = False
if isinstance(e, ConnectionTimeout):
retry = self.retry_on_timeout
elif isinstance(e, ConnectionError):
retry = True
elif e.status_code in self.retry_on_status:
retry = True
if retry:
# only mark as dead if we are retrying
self.mark_dead(connection)
# raise exception on last retry
if attempt == self.max_retries:
raise
else:
raise
else:
# connection didn't fail, confirm it's live status
self.connection_pool.mark_live(connection)
if data:
data = self.deserializer.loads(data, headers.get("content-type"))
return status, data
|
def perform_request(self, method, url, params=None, body=None):
"""
Perform the actual request. Retrieve a connection from the connection
pool, pass all the information to it's perform_request method and
return the data.
If an exception was raised, mark the connection as failed and retry (up
to `max_retries` times).
If the operation was succesful and the connection used was previously
marked as dead, mark it as live, resetting it's failure count.
:arg method: HTTP method to use
:arg url: absolute url (without host) to target
:arg params: dictionary of query parameters, will be handed over to the
underlying :class:`~elasticsearch.Connection` class for serialization
:arg body: body of the request, will be serializes using serializer and
passed to the connection
"""
if body is not None:
body = self.serializer.dumps(body)
# some clients or environments don't support sending GET with body
if method == "GET" and self.send_get_body_as != "GET":
# send it as post instead
if self.send_get_body_as == "POST":
method = "POST"
# or as source parameter
elif self.send_get_body_as == "source":
if params is None:
params = {}
params["source"] = body
body = None
if body is not None:
try:
body = body.encode("utf-8")
except (UnicodeDecodeError, AttributeError):
# bytes/str - no need to re-encode
pass
ignore = ()
timeout = None
if params:
timeout = params.pop("request_timeout", None)
ignore = params.pop("ignore", ())
if isinstance(ignore, int):
ignore = (ignore,)
for attempt in range(self.max_retries + 1):
connection = self.get_connection()
try:
status, headers, data = connection.perform_request(
method, url, params, body, ignore=ignore, timeout=timeout
)
except TransportError as e:
retry = False
if isinstance(e, ConnectionTimeout):
retry = self.retry_on_timeout
elif isinstance(e, ConnectionError):
retry = True
elif e.status_code in self.retry_on_status:
retry = True
if retry:
# only mark as dead if we are retrying
self.mark_dead(connection)
# raise exception on last retry
if attempt == self.max_retries:
raise
else:
raise
else:
# connection didn't fail, confirm it's live status
self.connection_pool.mark_live(connection)
if data:
data = self.deserializer.loads(data, headers.get("content-type"))
return status, data
|
https://github.com/elastic/elasticsearch-py/issues/230
|
es.search_exists('my_index', 'my_type', params={'q': 'my_field:bar'})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/client/utils.py", line 68, in _wrapped
return func(*args, params=params, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/client/__init__.py", line 1238, in search_exists
doc_type, '_search', 'exists'), params=params, body=body)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/transport.py", line 307, in perform_request
status, headers, data = connection.perform_request(method, url, params, body, ignore=ignore, timeout=timeout)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/connection/http_urllib3.py", line 86, in perform_request
self._raise_error(response.status, raw_data)
File "/usr/local/lib/python2.7/dist-packages/elasticsearch/connection/base.py", line 102, in _raise_error
raise HTTP_EXCEPTIONS.get(status_code, TransportError)(status_code, error_message, additional_info)
elasticsearch.exceptions.NotFoundError: TransportError(404, u'{"exists":false}')
|
elasticsearch.exceptions.NotFoundError
|
def effect(self): # noqa: C901
if not self.selected:
inkex.errormsg(_("Please select one or more fill areas to break apart."))
return
elements = []
nodes = self.get_nodes()
for node in nodes:
if node.tag in SVG_PATH_TAG:
elements.append(EmbroideryElement(node))
for element in elements:
if not element.get_style("fill", "black"):
continue
# we don't want to touch valid elements
paths = element.flatten(element.parse_path())
try:
paths.sort(key=lambda point_list: Polygon(point_list).area, reverse=True)
polygon = MultiPolygon([(paths[0], paths[1:])])
if self.geom_is_valid(polygon):
continue
except ValueError:
pass
polygons = self.break_apart_paths(paths)
polygons = self.ensure_minimum_size(polygons, 5)
if self.options.method == 1:
polygons = self.combine_overlapping_polygons(polygons)
polygons = self.recombine_polygons(polygons)
if polygons:
self.polygons_to_nodes(polygons, element)
|
def effect(self):
if not self.selected:
inkex.errormsg(_("Please select one or more fill areas to break apart."))
return
elements = []
nodes = self.get_nodes()
for node in nodes:
if node.tag in SVG_PATH_TAG:
elements.append(EmbroideryElement(node))
for element in elements:
if not element.get_style("fill", "black"):
continue
# we don't want to touch valid elements
paths = element.flatten(element.parse_path())
paths.sort(key=lambda point_list: Polygon(point_list).area, reverse=True)
polygon = MultiPolygon([(paths[0], paths[1:])])
if self.geom_is_valid(polygon):
continue
polygons = self.break_apart_paths(paths)
polygons = self.ensure_minimum_size(polygons, 5)
if self.options.method == 1:
polygons = self.combine_overlapping_polygons(polygons)
polygons = self.recombine_polygons(polygons)
if polygons:
self.polygons_to_nodes(polygons, element)
|
https://github.com/inkstitch/inkstitch/issues/731
|
Ink/Stitch experienced an unexpected error.
If you'd like to help, please file an issue at https://github.com/inkstitch/inkstitch/issues and include the entire error description below:
Traceback (most recent call last):
File "/Users/runner/runners/2.169.1/work/inkstitch/inkstitch/inkstitch.py", line 44, in <module>
File "/Users/runner/runners/2.169.1/work/inkstitch/inkstitch/inkscape/share/extensions/inkex.py", line 283, in affect
File "/Users/runner/runners/2.169.1/work/inkstitch/inkstitch/lib/extensions/break_apart.py", line 33, in effect
File "/Users/runner/hostedtoolcache/Python/2.7.18/x64/lib/python2.7/site-packages/shapely/geometry/polygon.py", line 243, in __init__
File "/Users/runner/hostedtoolcache/Python/2.7.18/x64/lib/python2.7/site-packages/shapely/geometry/polygon.py", line 509, in geos_polygon_from_py
File "shapely/speedups/_speedups.pyx", line 340, in shapely.speedups._speedups.geos_linearring_from_py
ValueError: A LinearRing must have at least 3 coordinate tuples
|
ValueError
|
def break_apart_paths(self, paths):
polygons = []
for path in paths:
if len(path) < 3:
continue
linestring = LineString(path)
if not linestring.is_simple:
linestring = unary_union(linestring)
for polygon in polygonize(linestring):
polygons.append(polygon)
else:
polygon = Polygon(path).buffer(0)
polygons.append(polygon)
return polygons
|
def break_apart_paths(self, paths):
polygons = []
for path in paths:
linestring = LineString(path)
polygon = Polygon(path).buffer(0)
if not linestring.is_simple:
linestring = unary_union(linestring)
for polygon in polygonize(linestring):
polygons.append(polygon)
else:
polygons.append(polygon)
return polygons
|
https://github.com/inkstitch/inkstitch/issues/731
|
Ink/Stitch experienced an unexpected error.
If you'd like to help, please file an issue at https://github.com/inkstitch/inkstitch/issues and include the entire error description below:
Traceback (most recent call last):
File "/Users/runner/runners/2.169.1/work/inkstitch/inkstitch/inkstitch.py", line 44, in <module>
File "/Users/runner/runners/2.169.1/work/inkstitch/inkstitch/inkscape/share/extensions/inkex.py", line 283, in affect
File "/Users/runner/runners/2.169.1/work/inkstitch/inkstitch/lib/extensions/break_apart.py", line 33, in effect
File "/Users/runner/hostedtoolcache/Python/2.7.18/x64/lib/python2.7/site-packages/shapely/geometry/polygon.py", line 243, in __init__
File "/Users/runner/hostedtoolcache/Python/2.7.18/x64/lib/python2.7/site-packages/shapely/geometry/polygon.py", line 509, in geos_polygon_from_py
File "shapely/speedups/_speedups.pyx", line 340, in shapely.speedups._speedups.geos_linearring_from_py
ValueError: A LinearRing must have at least 3 coordinate tuples
|
ValueError
|
def get_doc_size(svg):
width = svg.get("width")
height = svg.get("height")
if width == "100%" and height == "100%":
# Some SVG editors set width and height to "100%". I can't find any
# solid documentation on how one is supposed to interpret that, so
# just ignore it and use the viewBox. That seems to have the intended
# result anyway.
width = None
height = None
if width is None or height is None:
# fall back to the dimensions from the viewBox
viewbox = get_viewbox(svg)
width = viewbox[2]
height = viewbox[3]
doc_width = convert_length(width)
doc_height = convert_length(height)
return doc_width, doc_height
|
def get_doc_size(svg):
width = svg.get("width")
height = svg.get("height")
if width is None or height is None:
# fall back to the dimensions from the viewBox
viewbox = get_viewbox(svg)
width = viewbox[2]
height = viewbox[3]
doc_width = convert_length(width)
doc_height = convert_length(height)
return doc_width, doc_height
|
https://github.com/inkstitch/inkstitch/issues/476
|
Traceback (most recent call last):
File "lib\elements\auto_fill.py", line 200, in to_patches
File "lib\elements\auto_fill.py", line 161, in fill_shape
File "lib\elements\auto_fill.py", line 153, in shrink_or_grow_shape
File "site-packages\backports\functools_lru_cache.py", line 113, in wrapper
File "lib\elements\fill.py", line 108, in shape
File "site-packages\backports\functools_lru_cache.py", line 113, in wrapper
File "lib\elements\fill.py", line 102, in paths
File "site-packages\backports\functools_lru_cache.py", line 113, in wrapper
File "lib\elements\element.py", line 208, in parse_path
File "lib\svg\path.py", line 9, in apply_transforms
File "lib\svg\path.py", line 40, in get_node_transform
File "site-packages\backports\functools_lru_cache.py", line 113, in wrapper
File "lib\svg\units.py", line 109, in get_viewbox_transform
File "site-packages\backports\functools_lru_cache.py", line 113, in wrapper
File "lib\svg\units.py", line 100, in get_doc_size
File "lib\svg\units.py", line 78, in convert_length
ValueError: Unknown unit: %
|
ValueError
|
def write_embroidery_file(file_path, stitch_plan, svg):
origin = get_origin(svg)
pattern = pyembroidery.EmbPattern()
for color_block in stitch_plan:
pattern.add_thread(color_block.color.pyembroidery_thread)
for stitch in color_block:
if stitch.stop:
jump_to_stop_point(pattern, svg)
command = get_command(stitch)
pattern.add_stitch_absolute(command, stitch.x, stitch.y)
pattern.add_stitch_absolute(pyembroidery.END, stitch.x, stitch.y)
# convert from pixels to millimeters
# also multiply by 10 to get tenths of a millimeter as required by pyembroidery
scale = 10 / PIXELS_PER_MM
settings = {
# correct for the origin
"translate": -origin,
# convert from pixels to millimeters
# also multiply by 10 to get tenths of a millimeter as required by pyembroidery
"scale": (scale, scale),
# This forces a jump at the start of the design and after each trim,
# even if we're close enough not to need one.
"full_jump": True,
}
try:
pyembroidery.write(pattern, file_path, settings)
except IOError as e:
# L10N low-level file error. %(error)s is (hopefully?) translated by
# the user's system automatically.
(
print >> sys.stderr,
_("Error writing to %(path)s: %(error)s")
% dict(path=file_path, error=e.message),
)
sys.exit(1)
|
def write_embroidery_file(file_path, stitch_plan, svg):
origin = get_origin(svg)
pattern = pyembroidery.EmbPattern()
for color_block in stitch_plan:
pattern.add_thread(color_block.color.pyembroidery_thread)
for stitch in color_block:
if stitch.stop:
jump_to_stop_point(pattern, svg)
command = get_command(stitch)
pattern.add_stitch_absolute(command, stitch.x, stitch.y)
pattern.add_stitch_absolute(pyembroidery.END, stitch.x, stitch.y)
# convert from pixels to millimeters
# also multiply by 10 to get tenths of a millimeter as required by pyembroidery
scale = 10 / PIXELS_PER_MM
settings = {
# correct for the origin
"translate": -origin,
# convert from pixels to millimeters
# also multiply by 10 to get tenths of a millimeter as required by pyembroidery
"scale": (scale, scale),
# This forces a jump at the start of the design and after each trim,
# even if we're close enough not to need one.
"full_jump": True,
}
pyembroidery.write(pattern, file_path, settings)
|
https://github.com/inkstitch/inkstitch/issues/279
|
Traceback (most recent call last):
File "inkstitch.py", line 24, in <module>
binary_name = script_name
File "inkscape-0.92.3\share\extensions\inkex.py", line 283, in affect
File "lib\extensions\embroider.py", line 85, in effect
File "lib\output.py", line 114, in write_embroidery_file
File "site-packages\pyembroidery\PyEmbroidery.py", line 593, in write
File "site-packages\pyembroidery\PyEmbroidery.py", line 541, in write_embroidery
IOError: [Errno 2] No such file or directory: 'C:\\Users\\%USERNAME%\\Documents\\Embroidery\\Quik Massage Logo - Embroidery.pes'
|
IOError
|
def get_output_path(self):
if self.options.output_file:
output_path = os.path.join(
os.path.expanduser(os.path.expandvars(self.options.path)),
self.options.output_file,
)
else:
csv_filename = "%s.%s" % (self.get_base_file_name(), self.options.output_format)
output_path = os.path.join(self.options.path, csv_filename)
def add_suffix(path, suffix):
if suffix > 0:
path = "%s.%s" % (path, suffix)
return path
def move_if_exists(path, suffix=0):
source = add_suffix(path, suffix)
if suffix >= self.options.max_backups:
return
dest = add_suffix(path, suffix + 1)
if os.path.exists(source):
move_if_exists(path, suffix + 1)
if os.path.exists(dest):
os.remove(dest)
os.rename(source, dest)
move_if_exists(output_path)
return output_path
|
def get_output_path(self):
if self.options.output_file:
output_path = os.path.join(self.options.path, self.options.output_file)
else:
csv_filename = "%s.%s" % (self.get_base_file_name(), self.options.output_format)
output_path = os.path.join(self.options.path, csv_filename)
def add_suffix(path, suffix):
if suffix > 0:
path = "%s.%s" % (path, suffix)
return path
def move_if_exists(path, suffix=0):
source = add_suffix(path, suffix)
if suffix >= self.options.max_backups:
return
dest = add_suffix(path, suffix + 1)
if os.path.exists(source):
move_if_exists(path, suffix + 1)
if os.path.exists(dest):
os.remove(dest)
os.rename(source, dest)
move_if_exists(output_path)
return output_path
|
https://github.com/inkstitch/inkstitch/issues/279
|
Traceback (most recent call last):
File "inkstitch.py", line 24, in <module>
binary_name = script_name
File "inkscape-0.92.3\share\extensions\inkex.py", line 283, in affect
File "lib\extensions\embroider.py", line 85, in effect
File "lib\output.py", line 114, in write_embroidery_file
File "site-packages\pyembroidery\PyEmbroidery.py", line 593, in write
File "site-packages\pyembroidery\PyEmbroidery.py", line 541, in write_embroidery
IOError: [Errno 2] No such file or directory: 'C:\\Users\\%USERNAME%\\Documents\\Embroidery\\Quik Massage Logo - Embroidery.pes'
|
IOError
|
def url_to_destination(url, service_type="external"):
parts = compat.urlparse.urlsplit(url)
hostname = parts.hostname
# preserve brackets for IPv6 URLs
if "://[" in url:
hostname = "[%s]" % hostname
try:
port = parts.port
except ValueError:
# Malformed port, just use None rather than raising an exception
port = None
default_port = default_ports.get(parts.scheme, None)
name = "%s://%s" % (parts.scheme, hostname)
resource = hostname
if not port and parts.scheme in default_ports:
port = default_ports[parts.scheme]
if port:
if port != default_port:
name += ":%d" % port
resource += ":%d" % port
return {"service": {"name": name, "resource": resource, "type": service_type}}
|
def url_to_destination(url, service_type="external"):
parts = compat.urlparse.urlsplit(url)
hostname = parts.hostname
# preserve brackets for IPv6 URLs
if "://[" in url:
hostname = "[%s]" % hostname
port = parts.port
default_port = default_ports.get(parts.scheme, None)
name = "%s://%s" % (parts.scheme, hostname)
resource = hostname
if not port and parts.scheme in default_ports:
port = default_ports[parts.scheme]
if port:
if port != default_port:
name += ":%d" % port
resource += ":%d" % port
return {"service": {"name": name, "resource": resource, "type": service_type}}
|
https://github.com/elastic/apm-agent-python/issues/798
|
Traceback (most recent call last):
File "project\venv\lib\site-packages\django\core\handlers\exception.py", line 34, in inner
response = get_response(request)
File "project\venv\lib\site-packages\django\core\handlers\base.py", line 115, in _get_response
response = self.process_exception_by_middleware(e, request)
File "project\venv\lib\site-packages\django\core\handlers\base.py", line 113, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "project\tool\src\company_tool\web\api\decorators.py", line 21, in wrapper_view_func
return view_func(request, json_data=data, *args, **kwargs)
File "project\venv\lib\site-packages\django\views\decorators\csrf.py", line 54, in wrapped_view
return view_func(*args, **kwargs)
File "project\venv\lib\site-packages\django\views\decorators\http.py", line 40, in inner
return func(request, *args, **kwargs)
File "project\tool\src\company_tool\web\api\views.py", line 27, in start
container_status = client.containers.run(image=container, command=arguments, detach=True, stdin_open=True, tty=True)
File "project\venv\lib\site-packages\docker\models\containers.py", line 802, in run
container = self.create(image=image, command=command,
File "project\venv\lib\site-packages\docker\models\containers.py", line 861, in create
resp = self.client.api.create_container(**create_kwargs)
File "project\venv\lib\site-packages\docker\api\container.py", line 430, in create_container
return self.create_container_from_config(config, name)
File "project\venv\lib\site-packages\docker\api\container.py", line 440, in create_container_from_config
res = self._post_json(u, data=config, params=params)
File "project\venv\lib\site-packages\docker\api\client.py", line 289, in _post_json
return self._post(url, data=json.dumps(data2), **kwargs)
File "project\venv\lib\site-packages\docker\utils\decorators.py", line 46, in inner
return f(self, *args, **kwargs)
File "project\venv\lib\site-packages\docker\api\client.py", line 226, in _post
return self.post(url, **self._set_request_timeout(kwargs))
File "project\venv\lib\site-packages\requests\sessions.py", line 578, in post
return self.request('POST', url, data=data, json=json, **kwargs)
File "project\venv\lib\site-packages\requests\sessions.py", line 530, in request
resp = self.send(prep, **send_kwargs)
File "project\venv\lib\site-packages\elasticapm\utils\wrapt\wrappers.py", line 561, in __call__
return self._self_wrapper(self.__wrapped__, self._self_instance,
File "project\venv\lib\site-packages\elasticapm\instrumentation\packages\base.py", line 210, in call_if_sampling
return self.call(module, method, wrapped, instance, args, kwargs)
File "project\venv\lib\site-packages\elasticapm\instrumentation\packages\requests.py", line 59, in call
return wrapped(*args, **kwargs)
File "project\venv\lib\site-packages\requests\sessions.py", line 643, in send
r = adapter.send(request, **kwargs)
File "project\venv\lib\site-packages\requests\adapters.py", line 439, in send
resp = conn.urlopen(
File "project\venv\lib\site-packages\elasticapm\utils\wrapt\wrappers.py", line 561, in __call__
return self._self_wrapper(self.__wrapped__, self._self_instance,
File "project\venv\lib\site-packages\elasticapm\instrumentation\packages\base.py", line 210, in call_if_sampling
return self.call(module, method, wrapped, instance, args, kwargs)
File "project\venv\lib\site-packages\elasticapm\instrumentation\packages\urllib3.py", line 74, in call
destination = url_to_destination(url)
File "project\venv\lib\site-packages\elasticapm\utils\__init__.py", line 146, in url_to_destination
port = parts.port
File "C:\Python38\lib\urllib\parse.py", line 174, in port
raise ValueError(message) from None
ValueError: Port could not be cast to integer value as 'None'
|
ValueError
|
def _process_queue(self):
def init_buffer():
buffer = gzip.GzipFile(
fileobj=compat.BytesIO(), mode="w", compresslevel=self._compress_level
)
data = (self._json_serializer({"metadata": self._metadata}) + "\n").encode(
"utf-8"
)
buffer.write(data)
return buffer
buffer = init_buffer()
buffer_written = False
# add some randomness to timeout to avoid stampedes of several workers that are booted at the same time
max_flush_time = (
self._max_flush_time * random.uniform(0.9, 1.1)
if self._max_flush_time
else None
)
while True:
since_last_flush = timeit.default_timer() - self._last_flush
# take max flush time into account to calculate timeout
timeout = max(0, max_flush_time - since_last_flush) if max_flush_time else None
timed_out = False
try:
event_type, data, flush = self._event_queue.get(block=True, timeout=timeout)
except compat.queue.Empty:
event_type, data, flush = None, None, None
timed_out = True
if event_type == "close":
if buffer_written:
self._flush(buffer)
self._flushed.set()
return # time to go home!
if data is not None:
buffer.write(
(self._json_serializer({event_type: data}) + "\n").encode("utf-8")
)
buffer_written = True
self._counts[event_type] += 1
queue_size = 0 if buffer.fileobj is None else buffer.fileobj.tell()
if flush:
logger.debug("forced flush")
elif timed_out or timeout == 0:
# update last flush time, as we might have waited for a non trivial amount of time in
# _event_queue.get()
since_last_flush = timeit.default_timer() - self._last_flush
logger.debug(
"flushing due to time since last flush %.3fs > max_flush_time %.3fs",
since_last_flush,
max_flush_time,
)
flush = True
elif self._max_buffer_size and queue_size > self._max_buffer_size:
logger.debug(
"flushing since queue size %d bytes > max_queue_size %d bytes",
queue_size,
self._max_buffer_size,
)
flush = True
if flush:
if buffer_written:
self._flush(buffer)
self._last_flush = timeit.default_timer()
buffer = init_buffer()
buffer_written = False
max_flush_time = (
self._max_flush_time * random.uniform(0.9, 1.1)
if self._max_flush_time
else None
)
self._flushed.set()
|
def _process_queue(self):
def init_buffer():
buffer = gzip.GzipFile(
fileobj=compat.BytesIO(), mode="w", compresslevel=self._compress_level
)
data = (self._json_serializer({"metadata": self._metadata}) + "\n").encode(
"utf-8"
)
buffer.write(data)
return buffer
buffer = init_buffer()
buffer_written = False
while True:
since_last_flush = timeit.default_timer() - self._last_flush
# take max flush time into account to calculate timeout
timeout = (
max(0, self._max_flush_time - since_last_flush)
if self._max_flush_time
else None
)
timed_out = False
try:
event_type, data, flush = self._event_queue.get(block=True, timeout=timeout)
except compat.queue.Empty:
event_type, data, flush = None, None, None
timed_out = True
if event_type == "close":
if buffer_written:
self._flush(buffer)
self._flushed.set()
return # time to go home!
if data is not None:
buffer.write(
(self._json_serializer({event_type: data}) + "\n").encode("utf-8")
)
buffer_written = True
self._counts[event_type] += 1
queue_size = 0 if buffer.fileobj is None else buffer.fileobj.tell()
if flush:
logger.debug("forced flush")
elif timed_out or timeout == 0:
# update last flush time, as we might have waited for a non trivial amount of time in
# _event_queue.get()
since_last_flush = timeit.default_timer() - self._last_flush
logger.debug(
"flushing due to time since last flush %.3fs > max_flush_time %.3fs",
since_last_flush,
self._max_flush_time,
)
flush = True
elif self._max_buffer_size and queue_size > self._max_buffer_size:
logger.debug(
"flushing since queue size %d bytes > max_queue_size %d bytes",
queue_size,
self._max_buffer_size,
)
flush = True
if flush:
if buffer_written:
self._flush(buffer)
self._last_flush = timeit.default_timer()
buffer = init_buffer()
buffer_written = False
self._flushed.set()
|
https://github.com/elastic/apm-agent-python/issues/409
|
web_1 | Failed to submit message: "Unable to reach APM Server: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) (url: http://192.168.16.100:8200/intake/v2/events)"
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 600, in urlopen
web_1 | chunked=chunked)
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 384, in _make_request
web_1 | six.raise_from(e, None)
web_1 | File "<string>", line 2, in raise_from
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 380, in _make_request
web_1 | httplib_response = conn.getresponse()
web_1 | File "/usr/local/lib/python3.7/site-packages/sentry_sdk/integrations/stdlib.py", line 48, in getresponse
web_1 | rv = real_getresponse(self, *args, **kwargs)
web_1 | File "/usr/local/lib/python3.7/http/client.py", line 1321, in getresponse
web_1 | response.begin()
web_1 | File "/usr/local/lib/python3.7/http/client.py", line 296, in begin
web_1 | version, status, reason = self._read_status()
web_1 | File "/usr/local/lib/python3.7/http/client.py", line 265, in _read_status
web_1 | raise RemoteDisconnected("Remote end closed connection without"
web_1 | http.client.RemoteDisconnected: Remote end closed connection without response
web_1 |
web_1 | During handling of the above exception, another exception occurred:
web_1 |
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.7/site-packages/elasticapm/transport/http.py", line 40, in send
web_1 | "POST", url, body=data, headers=self._headers, timeout=self._timeout, preload_content=False
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/poolmanager.py", line 323, in urlopen
web_1 | response = conn.urlopen(method, u.request_uri, **kw)
web_1 | File "/usr/local/lib/python3.7/site-packages/elasticapm/instrumentation/packages/base.py", line 106, in call_if_sampling
web_1 | return wrapped(*args, **kwargs)
web_1 | File "/usr/local/lib/python3.7/site-packages/elasticapm/instrumentation/packages/base.py", line 106, in call_if_sampling
web_1 | return wrapped(*args, **kwargs)
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 638, in urlopen
web_1 | _stacktrace=sys.exc_info()[2])
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/util/retry.py", line 367, in increment
web_1 | raise six.reraise(type(error), error, _stacktrace)
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/packages/six.py", line 685, in reraise
web_1 | raise value.with_traceback(tb)
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 600, in urlopen
web_1 | chunked=chunked)
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 384, in _make_request
web_1 | six.raise_from(e, None)
web_1 | File "<string>", line 2, in raise_from
web_1 | File "/usr/local/lib/python3.7/site-packages/urllib3/connectionpool.py", line 380, in _make_request
web_1 | httplib_response = conn.getresponse()
web_1 | File "/usr/local/lib/python3.7/site-packages/sentry_sdk/integrations/stdlib.py", line 48, in getresponse
web_1 | rv = real_getresponse(self, *args, **kwargs)
web_1 | File "/usr/local/lib/python3.7/http/client.py", line 1321, in getresponse
web_1 | response.begin()
web_1 | File "/usr/local/lib/python3.7/http/client.py", line 296, in begin
web_1 | version, status, reason = self._read_status()
web_1 | File "/usr/local/lib/python3.7/http/client.py", line 265, in _read_status
web_1 | raise RemoteDisconnected("Remote end closed connection without"
web_1 | urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
web_1 |
web_1 | During handling of the above exception, another exception occurred:
web_1 |
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.7/site-packages/elasticapm/transport/base.py", line 184, in send_sync
web_1 | self.sync_transport.send(self, data)
web_1 | File "/usr/local/lib/python3.7/site-packages/elasticapm/transport/http.py", line 53, in send
web_1 | raise TransportException(message, data, print_trace=print_trace)
web_1 | elasticapm.transport.base.TransportException: Unable to reach APM Server: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) (url: http://192.168.16.100:8200/intake/v2/events)
|
urllib3.exceptions.ProtocolError
|
def get_data_from_request(request, capture_body=False):
result = {
"env": dict(get_environ(request.environ)),
"headers": dict(get_headers(request.environ)),
"method": request.method,
"socket": {
"remote_address": request.environ.get("REMOTE_ADDR"),
"encrypted": request.is_secure,
},
"cookies": request.cookies,
}
if request.method in constants.HTTP_WITH_BODY:
body = None
if request.content_type == "application/x-www-form-urlencoded":
body = compat.multidict_to_dict(request.form)
elif request.content_type and request.content_type.startswith(
"multipart/form-data"
):
body = compat.multidict_to_dict(request.form)
if request.files:
body["_files"] = {
field: val[0].filename
if len(val) == 1
else [f.filename for f in val]
for field, val in compat.iterlists(request.files)
}
else:
try:
body = request.get_data(as_text=True)
except ClientDisconnected:
pass
if body is not None:
result["body"] = body if capture_body else "[REDACTED]"
result["url"] = get_url_dict(request.url)
return result
|
def get_data_from_request(request, capture_body=False):
result = {
"env": dict(get_environ(request.environ)),
"headers": dict(get_headers(request.environ)),
"method": request.method,
"socket": {
"remote_address": request.environ.get("REMOTE_ADDR"),
"encrypted": request.is_secure,
},
"cookies": request.cookies,
}
if request.method in constants.HTTP_WITH_BODY:
body = None
if request.content_type == "application/x-www-form-urlencoded":
body = compat.multidict_to_dict(request.form)
elif request.content_type and request.content_type.startswith(
"multipart/form-data"
):
body = compat.multidict_to_dict(request.form)
if request.files:
body["_files"] = {
field: val[0].filename
if len(val) == 1
else [f.filename for f in val]
for field, val in compat.iterlists(request.files)
}
else:
try:
body = request.data
except ClientDisconnected:
pass
if body is not None:
result["body"] = body if capture_body else "[REDACTED]"
result["url"] = get_url_dict(request.url)
return result
|
https://github.com/elastic/apm-agent-python/issues/286
|
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/flask/app.py", line 2292, in wsgi_app
response = self.full_dispatch_request()
File "/usr/local/lib/python3.6/site-packages/flask/app.py", line 1816, in full_dispatch_request
return self.finalize_request(rv)
File "/usr/local/lib/python3.6/site-packages/flask/app.py", line 1834, in finalize_request
request_finished.send(self, response=response)
File "/usr/local/lib/python3.6/site-packages/blinker/base.py", line 267, in send
for receiver in self.receivers_for(sender)]
File "/usr/local/lib/python3.6/site-packages/blinker/base.py", line 267, in <listcomp>
for receiver in self.receivers_for(sender)]
File "/usr/local/lib/python3.6/site-packages/elasticapm/contrib/flask/__init__.py", line 145, in request_finished
"request",
File "/usr/local/lib/python3.6/site-packages/elasticapm/traces.py", line 333, in set_context
data = data()
File "/usr/local/lib/python3.6/site-packages/elasticapm/contrib/flask/__init__.py", line 143, in <lambda>
request, capture_body=self.client.config.capture_body in ("transactions", "all")
File "/usr/local/lib/python3.6/site-packages/elasticapm/contrib/flask/utils.py", line 29, in get_data_from_request
body = request.data
File "/usr/local/lib/python3.6/site-packages/werkzeug/local.py", line 347, in __getattr__
return getattr(self._get_current_object(), name)
File "/usr/local/lib/python3.6/site-packages/flask_api/request.py", line 24, in data
self._parse()
File "/usr/local/lib/python3.6/site-packages/flask_api/request.py", line 53, in _parse
ret = parser.parse(self.stream, media_type, **options)
File "/usr/local/lib/python3.6/site-packages/flask_api/parsers.py", line 32, in parse
raise exceptions.ParseError(msg)
flask_api.exceptions.ParseError: JSON parse error - Expecting value: line 1 column 1 (char 0)
|
flask_api.exceptions.ParseError
|
def get_name_from_func(func):
# partials don't have `__module__` or `__name__`, so we use the values from the "inner" function
if isinstance(func, partial_types):
return "partial({})".format(get_name_from_func(func.func))
elif hasattr(func, "_partialmethod") and hasattr(func._partialmethod, "func"):
return "partial({})".format(get_name_from_func(func._partialmethod.func))
module = func.__module__
if hasattr(func, "__name__"):
view_name = func.__name__
else: # Fall back if there's no __name__
view_name = func.__class__.__name__
return "{0}.{1}".format(module, view_name)
|
def get_name_from_func(func):
# If no view was set we ignore the request
module = func.__module__
if hasattr(func, "__name__"):
view_name = func.__name__
else: # Fall back if there's no __name__
view_name = func.__class__.__name__
return "{0}.{1}".format(module, view_name)
|
https://github.com/elastic/apm-agent-python/issues/293
|
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/elasticapm/contrib/django/middleware/__init__.py", line 154, in process_response
transaction_name = get_name_from_func(request._elasticapm_view_func)
File "/usr/local/lib/python3.6/site-packages/elasticapm/utils/__init__.py", line 42, in get_name_from_func
module = func.__module__
AttributeError: 'functools.partial' object has no attribute '__module__'
|
AttributeError
|
def __init__(self, config=None, **defaults):
# configure loggers first
cls = self.__class__
self.logger = logging.getLogger("%s.%s" % (cls.__module__, cls.__name__))
self.error_logger = logging.getLogger("elasticapm.errors")
self.state = ClientState()
self.instrumentation_store = None
self.processors = []
self.filter_exception_types_dict = {}
self._send_timer = None
self._transports = {}
self._service_info = None
self.config = Config(config, default_dict=defaults)
if self.config.errors:
for msg in self.config.errors.values():
self.error_logger.error(msg)
self.config.disable_send = True
self._transport_class = import_string(self.config.transport_class)
for exc_to_filter in self.config.filter_exception_types or []:
exc_to_filter_type = exc_to_filter.split(".")[-1]
exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1])
self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module
self.processors = (
[import_string(p) for p in self.config.processors]
if self.config.processors
else []
)
if platform.python_implementation() == "PyPy":
# PyPy introduces a `_functools.partial.__call__` frame due to our use
# of `partial` in AbstractInstrumentedModule
skip_modules = ("elasticapm.", "_functools")
else:
skip_modules = ("elasticapm.",)
def frames_collector_func():
return self._get_stack_info_for_trace(
stacks.iter_stack_frames(skip_top_modules=skip_modules),
library_frame_context_lines=self.config.source_lines_span_library_frames,
in_app_frame_context_lines=self.config.source_lines_span_app_frames,
with_locals=self.config.collect_local_variables in ("all", "transactions"),
locals_processor_func=lambda local_var: varmap(
lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
),
local_var,
),
)
self.instrumentation_store = TransactionsStore(
frames_collector_func=frames_collector_func,
collect_frequency=self.config.flush_interval,
sample_rate=self.config.transaction_sample_rate,
max_spans=self.config.transaction_max_spans,
span_frames_min_duration=self.config.span_frames_min_duration_ms,
max_queue_size=self.config.max_queue_size,
ignore_patterns=self.config.transactions_ignore_patterns,
)
self.include_paths_re = (
stacks.get_path_regex(self.config.include_paths)
if self.config.include_paths
else None
)
self.exclude_paths_re = (
stacks.get_path_regex(self.config.exclude_paths)
if self.config.exclude_paths
else None
)
compat.atexit_register(self.close)
|
def __init__(self, config=None, **defaults):
# configure loggers first
cls = self.__class__
self.logger = logging.getLogger("%s.%s" % (cls.__module__, cls.__name__))
self.error_logger = logging.getLogger("elasticapm.errors")
self.state = ClientState()
self.instrumentation_store = None
self.processors = []
self.filter_exception_types_dict = {}
self._send_timer = None
self._transports = {}
self._service_info = None
self.config = Config(config, default_dict=defaults)
if self.config.errors:
for msg in self.config.errors.values():
self.error_logger.error(msg)
self.config.disable_send = True
return
self._transport_class = import_string(self.config.transport_class)
for exc_to_filter in self.config.filter_exception_types or []:
exc_to_filter_type = exc_to_filter.split(".")[-1]
exc_to_filter_module = ".".join(exc_to_filter.split(".")[:-1])
self.filter_exception_types_dict[exc_to_filter_type] = exc_to_filter_module
self.processors = (
[import_string(p) for p in self.config.processors]
if self.config.processors
else []
)
if platform.python_implementation() == "PyPy":
# PyPy introduces a `_functools.partial.__call__` frame due to our use
# of `partial` in AbstractInstrumentedModule
skip_modules = ("elasticapm.", "_functools")
else:
skip_modules = ("elasticapm.",)
def frames_collector_func():
return self._get_stack_info_for_trace(
stacks.iter_stack_frames(skip_top_modules=skip_modules),
library_frame_context_lines=self.config.source_lines_span_library_frames,
in_app_frame_context_lines=self.config.source_lines_span_app_frames,
with_locals=self.config.collect_local_variables in ("all", "transactions"),
locals_processor_func=lambda local_var: varmap(
lambda k, v: shorten(
v,
list_length=self.config.local_var_list_max_length,
string_length=self.config.local_var_max_length,
),
local_var,
),
)
self.instrumentation_store = TransactionsStore(
frames_collector_func=frames_collector_func,
collect_frequency=self.config.flush_interval,
sample_rate=self.config.transaction_sample_rate,
max_spans=self.config.transaction_max_spans,
span_frames_min_duration=self.config.span_frames_min_duration_ms,
max_queue_size=self.config.max_queue_size,
ignore_patterns=self.config.transactions_ignore_patterns,
)
self.include_paths_re = (
stacks.get_path_regex(self.config.include_paths)
if self.config.include_paths
else None
)
self.exclude_paths_re = (
stacks.get_path_regex(self.config.exclude_paths)
if self.config.exclude_paths
else None
)
compat.atexit_register(self.close)
|
https://github.com/elastic/apm-agent-python/issues/48
|
Unable to process log entry: 'DjangoClient' object has no attribute 'filter_exception_types_dict'
Traceback (most recent call last):
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/django/core/handlers/exception.py", line 41, in inner
response = get_response(request)
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/django/utils/deprecation.py", line 138, in __call__
response = self.process_request(request)
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/contrib/django/middleware/__init__.py", line 157, in process_request
self.client.begin_transaction("request")
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/base.py", line 442, in begin_transaction
self.instrumentation_store.begin_transaction(transaction_type)
AttributeError: 'DjangoClient' object has no attribute 'instrumentation_store'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/contrib/django/handlers.py", line 59, in actually_do_stuff
client.capture('Exception', exc_info=exc_info, request=request)
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/contrib/django/client.py", line 200, in capture
result = super(DjangoClient, self).capture(event_type, **kwargs)
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/base.py", line 279, in capture
data = self.build_msg_for_logging(event_type, data, date, extra, stack, **kwargs)
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/base.py", line 178, in build_msg_for_logging
if self._filter_exception_type(result):
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/base.py", line 332, in _filter_exception_type
if exc_type in self.filter_exception_types_dict:
AttributeError: 'DjangoClient' object has no attribute 'filter_exception_types_dict'
|
AttributeError
|
def handle_check(self, command, **options):
"""Check your settings for common misconfigurations"""
passed = True
client = DjangoClient()
# check if org/app and token are set:
is_set = lambda x: x and x != "None"
values = [client.config.service_name, client.config.secret_token]
if all(map(is_set, values)):
self.write("Service name and secret token are set, good job!", green)
else:
passed = False
self.write("Configuration errors detected!", red, ending="\n\n")
if not is_set(client.config.service_name):
self.write(" * SERVICE_NAME not set! ", red, ending="\n")
if not is_set(client.config.secret_token):
self.write(" * SECRET_TOKEN not set!", red, ending="\n")
self.write(CONFIG_EXAMPLE)
self.write("")
# check if we're disabled due to DEBUG:
if settings.DEBUG:
if getattr(settings, "ELASTIC_APM", {}).get("DEBUG"):
self.write(
"Note: even though you are running in DEBUG mode, we will "
'send data to the APM Server, because you set ELASTIC_APM["DEBUG"] to '
"True. You can disable ElasticAPM while in DEBUG mode like this"
"\n\n",
yellow,
)
self.write(
" ELASTIC_APM = {\n"
' "DEBUG": False,\n'
" # your other ELASTIC_APM settings\n"
" }"
)
else:
self.write(
"Looks like you're running in DEBUG mode. ElasticAPM will NOT "
"gather any data while DEBUG is set to True.\n\n",
red,
)
self.write(
"If you want to test ElasticAPM while DEBUG is set to True, you"
" can force ElasticAPM to gather data by setting"
' ELASTIC_APM["DEBUG"] to True, like this\n\n'
" ELASTIC_APM = {\n"
' "DEBUG": True,\n'
" # your other ELASTIC_APM settings\n"
" }"
)
passed = False
else:
self.write("DEBUG mode is disabled! Looking good!", green)
self.write("")
# check if middleware is set, and if it is at the first position
middleware_attr = (
"MIDDLEWARE"
if getattr(settings, "MIDDLEWARE", None) is not None
else "MIDDLEWARE_CLASSES"
)
middleware = list(getattr(settings, middleware_attr))
try:
pos = middleware.index("elasticapm.contrib.django.middleware.TracingMiddleware")
if pos == 0:
self.write("Tracing middleware is configured! Awesome!", green)
else:
self.write(
"Tracing middleware is configured, but not at the first position\n",
yellow,
)
self.write(
"ElasticAPM works best if you add it at the top of your %s setting"
% middleware_attr
)
except ValueError:
self.write("Tracing middleware not configured!", red)
self.write(
"\n"
"Add it to your %(name)s setting like this:\n\n"
" %(name)s = (\n"
' "elasticapm.contrib.django.middleware.TracingMiddleware",\n'
" # your other middleware classes\n"
" )\n" % {"name": middleware_attr}
)
self.write("")
if passed:
self.write("Looks like everything should be ready!", green)
else:
self.write("Please fix the above errors.", red)
self.write("")
return passed
|
def handle_check(self, command, **options):
"""Check your settings for common misconfigurations"""
passed = True
client = DjangoClient()
# check if org/app and token are set:
is_set = lambda x: x and x != "None"
values = [client.config.service_name, client.config.secret_token]
if all(map(is_set, values)):
self.write("Service name and secret token are set, good job!", green)
else:
passed = False
self.write("Configuration errors detected!", red, ending="\n\n")
if not is_set(client.config.service_name):
self.write(" * SERVICE_NAME not set! ", red, ending="\n")
if not is_set(client.config.secret_token):
self.write(" * SECRET_TOKEN not set!", red, ending="\n")
self.write(CONFIG_EXAMPLE)
self.write("")
# check if we're disabled due to DEBUG:
if settings.DEBUG:
if getattr(settings, "ELASTIC_APM", {}).get("DEBUG"):
self.write(
"Note: even though you are running in DEBUG mode, we will "
'send data to the APM Server, because you set ELASTIC_APM["DEBUG"] to '
"True. You can disable ElasticAPM while in DEBUG mode like this"
"\n\n",
yellow,
)
self.write(
" ELASTIC_APM = {\n"
' "DEBUG": False,\n'
" # your other ELASTIC_APM settings\n"
" }"
)
else:
self.write(
"Looks like you're running in DEBUG mode. ElasticAPM will NOT "
"gather any data while DEBUG is set to True.\n\n",
red,
)
self.write(
"If you want to test ElasticAPM while DEBUG is set to True, you"
" can force ElasticAPM to gather data by setting"
' ELASTIC_APM["DEBUG"] to True, like this\n\n'
" ELASTIC_APM = {\n"
' "DEBUG": True,\n'
" # your other ELASTIC_APM settings\n"
" }"
)
passed = False
else:
self.write("DEBUG mode is disabled! Looking good!", green)
self.write("")
# check if middleware is set, and if it is at the first position
middleware = list(
getattr(settings, "MIDDLEWARE", getattr(settings, "MIDDLEWARE_CLASSES", []))
)
try:
pos = middleware.index("elasticapm.contrib.django.middleware.TracingMiddleware")
if pos == 0:
self.write("Tracing middleware is configured! Awesome!", green)
else:
self.write(
"Tracing middleware is configured, but not at the first position\n",
yellow,
)
self.write(
"ElasticAPM works best if you add it at the top of your "
"MIDDLEWARE_CLASSES"
)
except ValueError:
self.write("Tracing middleware not configured!", red)
self.write(
"\n"
"Add it to your MIDDLEWARE_CLASSES like this:\n\n"
" MIDDLEWARE_CLASSES = (\n"
' "elasticapm.contrib.django.middleware.TracingMiddleware",\n'
" # your other middleware classes\n"
" )\n"
)
self.write("")
if passed:
self.write("Looks like everything should be ready!", green)
else:
self.write("Please fix the above errors.", red)
self.write("")
return passed
|
https://github.com/elastic/apm-agent-python/issues/188
|
$ python manage.py elasticapm check
Service name and secret token are set, good job!
DEBUG mode is disabled! Looking good!
Traceback (most recent call last):
File "manage.py", line 12, in <module>
execute_from_command_line(sys.argv)
File "/home/ubuntu/.virtualenvs/backend/lib/python3.5/site-packages/django/core/management/__init__.py", line 363, in execute_from_command_line
utility.execute()
File "/home/ubuntu/.virtualenvs/backend/lib/python3.5/site-packages/django/core/management/__init__.py", line 355, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/home/ubuntu/.virtualenvs/backend/lib/python3.5/site-packages/django/core/management/base.py", line 283, in run_from_argv
self.execute(*args, **cmd_options)
File "/home/ubuntu/.virtualenvs/backend/lib/python3.5/site-packages/django/core/management/base.py", line 330, in execute
output = self.handle(*args, **options)
File "/home/ubuntu/.virtualenvs/backend/lib/python3.5/site-packages/elasticapm/contrib/django/management/commands/elasticapm.py", line 104, in handle
)(self, subcommand, **options)
File "/home/ubuntu/.virtualenvs/backend/lib/python3.5/site-packages/elasticapm/contrib/django/management/commands/elasticapm.py", line 207, in handle_check
middleware = list(getattr(settings, 'MIDDLEWARE', getattr(settings, 'MIDDLEWARE_CLASSES', [])))
TypeError: 'NoneType' object is not iterable
Sentry is attempting to send 1 pending error messages
Waiting up to 10 seconds
Press Ctrl-C to quit
|
TypeError
|
def get_data_from_response(self, response):
result = {"status_code": response.status_code}
# Django does not expose a public API to iterate over the headers of a response.
# Unfortunately, we have to access the private _headers dictionary here, which is
# a mapping of the form lower-case-header: (Original-Header, value)
if getattr(response, "_headers", {}):
result["headers"] = {key: value[1] for key, value in response._headers.items()}
return result
|
def get_data_from_response(self, response):
result = {"status_code": str(response.status_code)}
# Django does not expose a public API to iterate over the headers of a response.
# Unfortunately, we have to access the private _headers dictionary here, which is
# a mapping of the form lower-case-header: (Original-Header, value)
if getattr(response, "_headers", {}):
result["headers"] = {key: value[1] for key, value in response._headers.items()}
return result
|
https://github.com/elastic/apm-agent-python/issues/47
|
HTTP 400:
Failed to submit message: '<no message value>'
Traceback (most recent call last):
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/transport/http_urllib3.py", line 75, in send_sync
url = Urllib3Transport.send(self, data, headers)
File "/Users/simitt/.pyenv/versions/django-test-3.6.2/lib/python3.6/site-packages/elasticapm/transport/http_urllib3.py", line 61, in send
raise TransportException(message, data, print_trace=print_trace)
elasticapm.transport.base.TransportException: HTTP 400:
|
elasticapm.transport.base.TransportException
|
def __init__(self, *args):
"""
Creates a new MetaDict instance.
"""
# Store all keys as lower-case to allow for case-insensitive indexing
# OrderedDict can be instantiated from a list of lists or a tuple of tuples
tags = dict()
if args:
args = list(args)
adict = args[0]
if isinstance(adict, list) or isinstance(adict, tuple):
items = adict
elif isinstance(adict, Mapping):
# Cast to a dict here, since a Mapping doesn't have to define
# a .items() method (but has enough methods to be converted to
# a dict)
items = dict(adict).items()
else:
raise TypeError(
f"Can not create a MetaDict from this input of type {type(adict)}"
)
self._check_str_keys(items)
tags = OrderedDict((k.lower(), v) for k, v in items)
args[0] = tags
super().__init__(*args)
# Use `copy=True` to avoid mutating the caller's keycomments
# dictionary (if they provided one).
self._prune_keycomments(copy=True)
|
def __init__(self, *args):
"""
Creates a new MetaDict instance.
"""
# Store all keys as lower-case to allow for case-insensitive indexing
# OrderedDict can be instantiated from a list of lists or a tuple of tuples
tags = dict()
if args:
args = list(args)
adict = args[0]
if isinstance(adict, list) or isinstance(adict, tuple):
items = adict
elif isinstance(adict, dict):
items = adict.items()
else:
raise TypeError(
f"Can not create a MetaDict from this input of type {type(adict)}"
)
self._check_str_keys(items)
tags = OrderedDict((k.lower(), v) for k, v in items)
args[0] = tags
super().__init__(*args)
# Use `copy=True` to avoid mutating the caller's keycomments
# dictionary (if they provided one).
self._prune_keycomments(copy=True)
|
https://github.com/sunpy/sunpy/issues/5043
|
In [1]: from astropy.io import fits
...: from sunpy.map.sources import SUVIMap
...: hdu = fits.open('dr_suvi-l2-ci195_g16_s20200220T180000Z_e20200220T180400Z_v1-0-0.fits')
...: suvimap = SUVIMap(hdu[1].data, hdu[1].header)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-2-777075e1d3d8> in <module>
2 from sunpy.map.sources import SUVIMap
3 hdu = fits.open('dr_suvi-l2-ci195_g16_s20200220T180000Z_e20200220T180400Z_v1-0-0.fits')
----> 4 suvimap = SUVIMap(hdu[1].data, hdu[1].header)
~/opt/anaconda3/lib/python3.7/site-packages/sunpy/map/sources/suvi.py in __init__(self, data, header, **kwargs)
72 def __init__(self, data, header, **kwargs):
73
---> 74 super().__init__(data, header, **kwargs)
75
76 # Fill in some missing info
~/opt/anaconda3/lib/python3.7/site-packages/sunpy/map/mapbase.py in __init__(self, data, header, plot_settings, **kwargs)
187 "Data will be truncated to the first two dimensions.", SunpyUserWarning)
188
--> 189 super().__init__(data, meta=MetaDict(header), **kwargs)
190
191 # Correct possibly missing meta keywords
~/opt/anaconda3/lib/python3.7/site-packages/sunpy/util/metadata.py in __init__(self, *args)
38 items = adict.items()
39 else:
---> 40 raise TypeError("Can not create a MetaDict from this type input")
41
42 self._check_str_keys(items)
TypeError: Can not create a MetaDict from this type input
|
TypeError
|
def world_to_pixel(self, coordinate, origin=None):
"""
Convert a world (data) coordinate to a pixel coordinate.
Parameters
----------
coordinate : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseFrame`
The coordinate object to convert to pixel coordinates.
origin : int
Deprecated.
Origin of the top-left corner. i.e. count from 0 or 1.
Normally, origin should be 0 when passing numpy indices, or 1 if
passing values from FITS header or map attributes.
Returns
-------
x : `~astropy.units.Quantity`
Pixel coordinate on the CTYPE1 axis.
y : `~astropy.units.Quantity`
Pixel coordinate on the CTYPE2 axis.
"""
self._check_origin(origin)
x, y = self.wcs.world_to_pixel(coordinate)
if origin == 1:
x += 1
y += 1
return PixelPair(x * u.pixel, y * u.pixel)
|
def world_to_pixel(self, coordinate, origin=0):
"""
Convert a world (data) coordinate to a pixel coordinate by using
`~astropy.wcs.WCS.wcs_world2pix`.
Parameters
----------
coordinate : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseFrame`
The coordinate object to convert to pixel coordinates.
origin : int
Origin of the top-left corner. i.e. count from 0 or 1.
Normally, origin should be 0 when passing numpy indices, or 1 if
passing values from FITS header or map attributes.
See `~astropy.wcs.WCS.wcs_world2pix` for more information.
Returns
-------
x : `~astropy.units.Quantity`
Pixel coordinate on the CTYPE1 axis.
y : `~astropy.units.Quantity`
Pixel coordinate on the CTYPE2 axis.
"""
if not isinstance(coordinate, (SkyCoord, astropy.coordinates.BaseCoordinateFrame)):
raise ValueError(
"world_to_pixel takes a Astropy coordinate frame or SkyCoord instance."
)
native_frame = coordinate.transform_to(self.coordinate_frame)
lon, lat = u.Quantity(self._get_lon_lat(native_frame)).to(u.deg)
x, y = self.wcs.wcs_world2pix(lon, lat, origin)
return PixelPair(x * u.pixel, y * u.pixel)
|
https://github.com/sunpy/sunpy/issues/4699
|
Traceback (most recent call last):
File "/Users/dstansby/github/sunpy/test.py", line 27, in <module>
print(m.bottom_left_coord)
File "/Users/dstansby/github/sunpy/sunpy/map/mapbase.py", line 719, in bottom_left_coord
return self.pixel_to_world(0*u.pix, 0*u.pix)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/units/decorators.py", line 251, in wrapper
return_ = wrapped_function(*func_args, **func_kwargs)
File "/Users/dstansby/github/sunpy/sunpy/map/mapbase.py", line 1146, in pixel_to_world
return SkyCoord(x, y, frame=self.coordinate_frame)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/sky_coordinate.py", line 314, in __init__
skycoord_kwargs, components, info = _parse_coordinate_data(
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/sky_coordinate_parsers.py", line 301, in _parse_coordinate_data
_components[frame_attr_name] = attr_class(arg, unit=unit)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/angles.py", line 536, in __new__
self._validate_angles()
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/angles.py", line 557, in _validate_angles
raise ValueError('Latitude angle(s) must be within -90 deg <= angle <= 90 deg, '
ValueError: Latitude angle(s) must be within -90 deg <= angle <= 90 deg, got 359.9997222222222 deg
|
ValueError
|
def pixel_to_world(self, x: u.pixel, y: u.pixel, origin=None):
"""
Convert a pixel coordinate to a data (world) coordinate.
Parameters
----------
x : `~astropy.units.Quantity`
Pixel coordinate of the CTYPE1 axis. (Normally solar-x).
y : `~astropy.units.Quantity`
Pixel coordinate of the CTYPE2 axis. (Normally solar-y).
origin : int
Deprecated.
Origin of the top-left corner. i.e. count from 0 or 1.
Normally, origin should be 0 when passing numpy indices, or 1 if
passing values from FITS header or map attributes.
Returns
-------
coord : `astropy.coordinates.SkyCoord`
A coordinate object representing the output coordinate.
"""
self._check_origin(origin)
if origin == 1:
x = x - 1 * u.pixel
y = y - 1 * u.pixel
return self.wcs.pixel_to_world(x, y)
|
def pixel_to_world(self, x: u.pixel, y: u.pixel, origin=0):
"""
Convert a pixel coordinate to a data (world) coordinate by using
`~astropy.wcs.WCS.wcs_pix2world`.
Parameters
----------
x : `~astropy.units.Quantity`
Pixel coordinate of the CTYPE1 axis. (Normally solar-x).
y : `~astropy.units.Quantity`
Pixel coordinate of the CTYPE2 axis. (Normally solar-y).
origin : int
Origin of the top-left corner. i.e. count from 0 or 1.
Normally, origin should be 0 when passing numpy indices, or 1 if
passing values from FITS header or map attributes.
See `~astropy.wcs.WCS.wcs_pix2world` for more information.
Returns
-------
coord : `astropy.coordinates.SkyCoord`
A coordinate object representing the output coordinate.
"""
# Hold the WCS instance here so we can inspect the output units after
# the pix2world call
temp_wcs = self.wcs
x, y = temp_wcs.wcs_pix2world(x, y, origin)
out_units = list(map(u.Unit, temp_wcs.wcs.cunit))
x = u.Quantity(x, out_units[0])
y = u.Quantity(y, out_units[1])
return SkyCoord(x, y, frame=self.coordinate_frame)
|
https://github.com/sunpy/sunpy/issues/4699
|
Traceback (most recent call last):
File "/Users/dstansby/github/sunpy/test.py", line 27, in <module>
print(m.bottom_left_coord)
File "/Users/dstansby/github/sunpy/sunpy/map/mapbase.py", line 719, in bottom_left_coord
return self.pixel_to_world(0*u.pix, 0*u.pix)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/units/decorators.py", line 251, in wrapper
return_ = wrapped_function(*func_args, **func_kwargs)
File "/Users/dstansby/github/sunpy/sunpy/map/mapbase.py", line 1146, in pixel_to_world
return SkyCoord(x, y, frame=self.coordinate_frame)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/sky_coordinate.py", line 314, in __init__
skycoord_kwargs, components, info = _parse_coordinate_data(
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/sky_coordinate_parsers.py", line 301, in _parse_coordinate_data
_components[frame_attr_name] = attr_class(arg, unit=unit)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/angles.py", line 536, in __new__
self._validate_angles()
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/angles.py", line 557, in _validate_angles
raise ValueError('Latitude angle(s) must be within -90 deg <= angle <= 90 deg, '
ValueError: Latitude angle(s) must be within -90 deg <= angle <= 90 deg, got 359.9997222222222 deg
|
ValueError
|
def rotate(
self,
angle: u.deg = None,
rmatrix=None,
order=4,
scale=1.0,
recenter=False,
missing=0.0,
use_scipy=False,
):
"""
Returns a new rotated and rescaled map.
Specify either a rotation angle or a rotation matrix, but not both. If
neither an angle or a rotation matrix are specified, the map will be
rotated by the rotation angle in the metadata.
The map will be rotated around the reference coordinate defined in the
meta data.
This method also updates the ``rotation_matrix`` attribute and any
appropriate header data so that they correctly describe the new map.
Parameters
----------
angle : `~astropy.units.Quantity`
The angle (degrees) to rotate counterclockwise.
rmatrix : 2x2
Linear transformation rotation matrix.
order : int 0-5
Interpolation order to be used. When using scikit-image this
parameter is passed into :func:`skimage.transform.warp` (e.g., 4
corresponds to bi-quartic interpolation).
When using scipy it is passed into
:func:`scipy.ndimage.interpolation.affine_transform` where it
controls the order of the spline. Faster performance may be
obtained at the cost of accuracy by using lower values.
Default: 4
scale : float
A scale factor for the image, default is no scaling
recenter : bool
If True, position the axis of rotation at the center of the new map
Default: False
missing : float
The numerical value to fill any missing points after rotation.
Default: 0.0
use_scipy : bool
If True, forces the rotation to use
:func:`scipy.ndimage.interpolation.affine_transform`, otherwise it
uses the :func:`skimage.transform.warp`.
Default: False, unless scikit-image can't be imported
Returns
-------
out : `~sunpy.map.GenericMap` or subclass
A new Map instance containing the rotated and rescaled data of the
original map.
See Also
--------
sunpy.image.transform.affine_transform : The routine this method calls
for the rotation.
Notes
-----
This function will remove old CROTA keywords from the header.
This function will also convert a CDi_j matrix to a PCi_j matrix.
See :func:`sunpy.image.transform.affine_transform` for details on the
transformations, situations when the underlying data is modified prior
to rotation, and differences from IDL's rot().
"""
# Put the import here to reduce sunpy.map import time
from sunpy.image.transform import affine_transform
if angle is not None and rmatrix is not None:
raise ValueError("You cannot specify both an angle and a rotation matrix.")
elif angle is None and rmatrix is None:
rmatrix = self.rotation_matrix
if order not in range(6):
raise ValueError("Order must be between 0 and 5.")
# The FITS-WCS transform is by definition defined around the
# reference coordinate in the header.
lon, lat = self._get_lon_lat(self.reference_coordinate.frame)
rotation_center = u.Quantity([lon, lat])
# Copy meta data
new_meta = self.meta.copy()
if angle is not None:
# Calculate the parameters for the affine_transform
c = np.cos(np.deg2rad(angle))
s = np.sin(np.deg2rad(angle))
rmatrix = np.array([[c, -s], [s, c]])
# Calculate the shape in pixels to contain all of the image data
extent = np.max(
np.abs(np.vstack((self.data.shape @ rmatrix, self.data.shape @ rmatrix.T))),
axis=0,
)
# Calculate the needed padding or unpadding
diff = np.asarray(np.ceil((extent - self.data.shape) / 2), dtype=int).ravel()
# Pad the image array
pad_x = int(np.max((diff[1], 0)))
pad_y = int(np.max((diff[0], 0)))
new_data = np.pad(
self.data,
((pad_y, pad_y), (pad_x, pad_x)),
mode="constant",
constant_values=(missing, missing),
)
new_meta["crpix1"] += pad_x
new_meta["crpix2"] += pad_y
# All of the following pixel calculations use a pixel origin of 0
pixel_array_center = (np.flipud(new_data.shape) - 1) / 2.0
# Create a temporary map so we can use it for the data to pixel calculation.
temp_map = self._new_instance(new_data, new_meta, self.plot_settings)
# Convert the axis of rotation from data coordinates to pixel coordinates
pixel_rotation_center = u.Quantity(
temp_map.world_to_pixel(self.reference_coordinate)
).value
del temp_map
if recenter:
pixel_center = pixel_rotation_center
else:
pixel_center = pixel_array_center
# Apply the rotation to the image data
new_data = affine_transform(
new_data.T,
np.asarray(rmatrix),
order=order,
scale=scale,
image_center=np.flipud(pixel_center),
recenter=recenter,
missing=missing,
use_scipy=use_scipy,
).T
if recenter:
new_reference_pixel = pixel_array_center
else:
# Calculate new pixel coordinates for the rotation center
new_reference_pixel = pixel_center + np.dot(
rmatrix, pixel_rotation_center - pixel_center
)
new_reference_pixel = np.array(new_reference_pixel).ravel()
# Define the new reference_pixel
new_meta["crval1"] = rotation_center[0].value
new_meta["crval2"] = rotation_center[1].value
new_meta["crpix1"] = new_reference_pixel[0] + 1 # FITS pixel origin is 1
new_meta["crpix2"] = new_reference_pixel[1] + 1 # FITS pixel origin is 1
# Unpad the array if necessary
unpad_x = -np.min((diff[1], 0))
if unpad_x > 0:
new_data = new_data[:, unpad_x:-unpad_x]
new_meta["crpix1"] -= unpad_x
unpad_y = -np.min((diff[0], 0))
if unpad_y > 0:
new_data = new_data[unpad_y:-unpad_y, :]
new_meta["crpix2"] -= unpad_y
# Calculate the new rotation matrix to store in the header by
# "subtracting" the rotation matrix used in the rotate from the old one
# That being calculate the dot product of the old header data with the
# inverse of the rotation matrix.
pc_C = np.dot(self.rotation_matrix, np.linalg.inv(rmatrix))
new_meta["PC1_1"] = pc_C[0, 0]
new_meta["PC1_2"] = pc_C[0, 1]
new_meta["PC2_1"] = pc_C[1, 0]
new_meta["PC2_2"] = pc_C[1, 1]
# Update pixel size if image has been scaled.
if scale != 1.0:
new_meta["cdelt1"] = (self.scale[0] / scale).value
new_meta["cdelt2"] = (self.scale[1] / scale).value
# Remove old CROTA kwargs because we have saved a new PCi_j matrix.
new_meta.pop("CROTA1", None)
new_meta.pop("CROTA2", None)
# Remove CDi_j header
new_meta.pop("CD1_1", None)
new_meta.pop("CD1_2", None)
new_meta.pop("CD2_1", None)
new_meta.pop("CD2_2", None)
# Create new map with the modification
new_map = self._new_instance(new_data, new_meta, self.plot_settings)
return new_map
|
def rotate(
self,
angle: u.deg = None,
rmatrix=None,
order=4,
scale=1.0,
recenter=False,
missing=0.0,
use_scipy=False,
):
"""
Returns a new rotated and rescaled map.
Specify either a rotation angle or a rotation matrix, but not both. If
neither an angle or a rotation matrix are specified, the map will be
rotated by the rotation angle in the metadata.
The map will be rotated around the reference coordinate defined in the
meta data.
This method also updates the ``rotation_matrix`` attribute and any
appropriate header data so that they correctly describe the new map.
Parameters
----------
angle : `~astropy.units.Quantity`
The angle (degrees) to rotate counterclockwise.
rmatrix : 2x2
Linear transformation rotation matrix.
order : int 0-5
Interpolation order to be used. When using scikit-image this
parameter is passed into :func:`skimage.transform.warp` (e.g., 4
corresponds to bi-quartic interpolation).
When using scipy it is passed into
:func:`scipy.ndimage.interpolation.affine_transform` where it
controls the order of the spline. Faster performance may be
obtained at the cost of accuracy by using lower values.
Default: 4
scale : float
A scale factor for the image, default is no scaling
recenter : bool
If True, position the axis of rotation at the center of the new map
Default: False
missing : float
The numerical value to fill any missing points after rotation.
Default: 0.0
use_scipy : bool
If True, forces the rotation to use
:func:`scipy.ndimage.interpolation.affine_transform`, otherwise it
uses the :func:`skimage.transform.warp`.
Default: False, unless scikit-image can't be imported
Returns
-------
out : `~sunpy.map.GenericMap` or subclass
A new Map instance containing the rotated and rescaled data of the
original map.
See Also
--------
sunpy.image.transform.affine_transform : The routine this method calls
for the rotation.
Notes
-----
This function will remove old CROTA keywords from the header.
This function will also convert a CDi_j matrix to a PCi_j matrix.
See :func:`sunpy.image.transform.affine_transform` for details on the
transformations, situations when the underlying data is modified prior
to rotation, and differences from IDL's rot().
"""
# Put the import here to reduce sunpy.map import time
from sunpy.image.transform import affine_transform
if angle is not None and rmatrix is not None:
raise ValueError("You cannot specify both an angle and a rotation matrix.")
elif angle is None and rmatrix is None:
rmatrix = self.rotation_matrix
if order not in range(6):
raise ValueError("Order must be between 0 and 5.")
# The FITS-WCS transform is by definition defined around the
# reference coordinate in the header.
lon, lat = self._get_lon_lat(self.reference_coordinate.frame)
rotation_center = u.Quantity([lon, lat])
# Copy meta data
new_meta = self.meta.copy()
if angle is not None:
# Calculate the parameters for the affine_transform
c = np.cos(np.deg2rad(angle))
s = np.sin(np.deg2rad(angle))
rmatrix = np.array([[c, -s], [s, c]])
# Calculate the shape in pixels to contain all of the image data
extent = np.max(
np.abs(np.vstack((self.data.shape @ rmatrix, self.data.shape @ rmatrix.T))),
axis=0,
)
# Calculate the needed padding or unpadding
diff = np.asarray(np.ceil((extent - self.data.shape) / 2), dtype=int).ravel()
# Pad the image array
pad_x = int(np.max((diff[1], 0)))
pad_y = int(np.max((diff[0], 0)))
new_data = np.pad(
self.data,
((pad_y, pad_y), (pad_x, pad_x)),
mode="constant",
constant_values=(missing, missing),
)
new_meta["crpix1"] += pad_x
new_meta["crpix2"] += pad_y
# All of the following pixel calculations use a pixel origin of 0
pixel_array_center = (np.flipud(new_data.shape) - 1) / 2.0
# Create a temporary map so we can use it for the data to pixel calculation.
temp_map = self._new_instance(new_data, new_meta, self.plot_settings)
# Convert the axis of rotation from data coordinates to pixel coordinates
pixel_rotation_center = u.Quantity(
temp_map.world_to_pixel(self.reference_coordinate, origin=0)
).value
del temp_map
if recenter:
pixel_center = pixel_rotation_center
else:
pixel_center = pixel_array_center
# Apply the rotation to the image data
new_data = affine_transform(
new_data.T,
np.asarray(rmatrix),
order=order,
scale=scale,
image_center=np.flipud(pixel_center),
recenter=recenter,
missing=missing,
use_scipy=use_scipy,
).T
if recenter:
new_reference_pixel = pixel_array_center
else:
# Calculate new pixel coordinates for the rotation center
new_reference_pixel = pixel_center + np.dot(
rmatrix, pixel_rotation_center - pixel_center
)
new_reference_pixel = np.array(new_reference_pixel).ravel()
# Define the new reference_pixel
new_meta["crval1"] = rotation_center[0].value
new_meta["crval2"] = rotation_center[1].value
new_meta["crpix1"] = new_reference_pixel[0] + 1 # FITS pixel origin is 1
new_meta["crpix2"] = new_reference_pixel[1] + 1 # FITS pixel origin is 1
# Unpad the array if necessary
unpad_x = -np.min((diff[1], 0))
if unpad_x > 0:
new_data = new_data[:, unpad_x:-unpad_x]
new_meta["crpix1"] -= unpad_x
unpad_y = -np.min((diff[0], 0))
if unpad_y > 0:
new_data = new_data[unpad_y:-unpad_y, :]
new_meta["crpix2"] -= unpad_y
# Calculate the new rotation matrix to store in the header by
# "subtracting" the rotation matrix used in the rotate from the old one
# That being calculate the dot product of the old header data with the
# inverse of the rotation matrix.
pc_C = np.dot(self.rotation_matrix, np.linalg.inv(rmatrix))
new_meta["PC1_1"] = pc_C[0, 0]
new_meta["PC1_2"] = pc_C[0, 1]
new_meta["PC2_1"] = pc_C[1, 0]
new_meta["PC2_2"] = pc_C[1, 1]
# Update pixel size if image has been scaled.
if scale != 1.0:
new_meta["cdelt1"] = (self.scale[0] / scale).value
new_meta["cdelt2"] = (self.scale[1] / scale).value
# Remove old CROTA kwargs because we have saved a new PCi_j matrix.
new_meta.pop("CROTA1", None)
new_meta.pop("CROTA2", None)
# Remove CDi_j header
new_meta.pop("CD1_1", None)
new_meta.pop("CD1_2", None)
new_meta.pop("CD2_1", None)
new_meta.pop("CD2_2", None)
# Create new map with the modification
new_map = self._new_instance(new_data, new_meta, self.plot_settings)
return new_map
|
https://github.com/sunpy/sunpy/issues/4699
|
Traceback (most recent call last):
File "/Users/dstansby/github/sunpy/test.py", line 27, in <module>
print(m.bottom_left_coord)
File "/Users/dstansby/github/sunpy/sunpy/map/mapbase.py", line 719, in bottom_left_coord
return self.pixel_to_world(0*u.pix, 0*u.pix)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/units/decorators.py", line 251, in wrapper
return_ = wrapped_function(*func_args, **func_kwargs)
File "/Users/dstansby/github/sunpy/sunpy/map/mapbase.py", line 1146, in pixel_to_world
return SkyCoord(x, y, frame=self.coordinate_frame)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/sky_coordinate.py", line 314, in __init__
skycoord_kwargs, components, info = _parse_coordinate_data(
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/sky_coordinate_parsers.py", line 301, in _parse_coordinate_data
_components[frame_attr_name] = attr_class(arg, unit=unit)
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/angles.py", line 536, in __new__
self._validate_angles()
File "/Users/dstansby/miniconda3/envs/dev/lib/python3.9/site-packages/astropy/coordinates/angles.py", line 557, in _validate_angles
raise ValueError('Latitude angle(s) must be within -90 deg <= angle <= 90 deg, '
ValueError: Latitude angle(s) must be within -90 deg <= angle <= 90 deg, got 359.9997222222222 deg
|
ValueError
|
def convert_input(self, value):
# Keep string here.
if isinstance(value, str):
return value, False
else:
# Upgrade the coordinate to a `SkyCoord` so that frame attributes will be merged
if isinstance(value, BaseCoordinateFrame) and not isinstance(
value, self._frame
):
value = SkyCoord(value)
return super().convert_input(value)
|
def convert_input(self, value):
# Keep string here.
if isinstance(value, str):
return value, False
else:
return super().convert_input(value)
|
https://github.com/sunpy/sunpy/issues/4237
|
---------------------------------------------------------------------------
ConvertError Traceback (most recent call last)
<ipython-input-39-8ab95e21b2a5> in <module>
1 hee = HeliocentricEarthEcliptic(45*u.deg, 35*u.deg, 17*u.km, obstime="2020-01-01")
----> 2 hee.transform_to(HeliographicStonyhurst)
~/.virtualenvs/ndcube-dev/lib/python3.8/site-packages/astropy/coordinates/baseframe.py in transform_to(self, new_frame)
1196 msg = 'Cannot transform from {0} to {1}'
1197 raise ConvertError(msg.format(self.__class__, new_frame.__class__))
-> 1198 return trans(self, new_frame)
1199
1200 def is_transformable_to(self, new_frame):
~/.virtualenvs/ndcube-dev/lib/python3.8/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
1390
1391 curr_toframe = t.tosys(**frattrs)
-> 1392 curr_coord = t(curr_coord, curr_toframe)
1393
1394 # this is safe even in the case where self.transforms is empty, because
~/.virtualenvs/ndcube-dev/lib/python3.8/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
1208 def __call__(self, fromcoord, toframe):
1209
-> 1210 M, vec = self.transform_func(fromcoord, toframe)
1211 newrep = self._apply_transform(fromcoord, M, vec)
1212
~/.virtualenvs/ndcube-dev/lib/python3.8/site-packages/sunpy/coordinates/transformations.py in wrapped_func(*args, **kwargs)
174 _layer_level += 1
175
--> 176 result = func(*args, **kwargs)
177
178 if debug_output:
~/.virtualenvs/ndcube-dev/lib/python3.8/site-packages/sunpy/coordinates/transformations.py in hcrs_to_hgs(hcrscoord, hgsframe)
541 """
542 if hgsframe.obstime is None:
--> 543 raise ConvertError("To perform this transformation, the coordinate"
544 " frame needs a specified `obstime`.")
545
ConvertError: To perform this transformation, the coordinate frame needs a specified `obstime`.
|
ConvertError
|
def _download(self, data):
"""Download all data, even if paginated."""
page = 1
results = []
while True:
data["page"] = page
fd = urllib.request.urlopen(self.url + urllib.parse.urlencode(data))
try:
result = codecs.decode(fd.read(), encoding="utf-8", errors="replace")
result = json.loads(result)
except Exception as e:
raise IOError("Failed to load return from the HEKClient.") from e
finally:
fd.close()
results.extend(result["result"])
if not result["overmax"]:
if len(results) > 0:
return HEKTable(dict_keys_same(results))
else:
return HEKTable()
page += 1
|
def _download(self, data):
"""Download all data, even if paginated."""
page = 1
results = []
while True:
data["page"] = page
fd = urllib.request.urlopen(self.url + urllib.parse.urlencode(data))
try:
result = json.load(fd)
except Exception as e:
raise IOError("Failed to load return from the HEKClient.") from e
finally:
fd.close()
results.extend(result["result"])
if not result["overmax"]:
if len(results) > 0:
return HEKTable(dict_keys_same(results))
else:
return HEKTable()
page += 1
|
https://github.com/sunpy/sunpy/issues/4087
|
Traceback (most recent call last):
File "/home/user/anaconda3/envs/pytorch/lib/python3.8/site-packages/sunpy/net/hek/hek.py", line 69, in _download
result = json.load(fd)
File "/home/user/anaconda3/envs/pytorch/lib/python3.8/json/__init__.py", line 293, in load
return loads(fp.read(),
File "/home/user/anaconda3/envs/pytorch/lib/python3.8/json/__init__.py", line 343, in loads
s = s.decode(detect_encoding(s), 'surrogatepass')
UnicodeDecodeError: 'utf-8' codec can't decode byte 0xc5 in position 33279: invalid continuation byte
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/home/user/anaconda3/envs/pytorch/lib/python3.8/site-packages/sunpy/net/hek/hek.py", line 99, in search
return self._download(ndata[0])
File "/home/user/anaconda3/envs/pytorch/lib/python3.8/site-packages/sunpy/net/hek/hek.py", line 71, in _download
raise IOError("Failed to load return from the HEKClient.") from e
OSError: Failed to load return from the HEKClient.
|
UnicodeDecodeError
|
def download(url):
path = self._cache_dir / get_filename(urlopen(url), url)
# replacement_filename returns a string and we want a Path object
path = Path(replacement_filename(path))
self._downloader.download(url, path)
shahash = hash_file(path)
return path, shahash, url
|
def download(url):
path = self._cache_dir / get_filename(urlopen(url), url)
path = replacement_filename(path)
self._downloader.download(url, path)
shahash = hash_file(path)
return path, shahash, url
|
https://github.com/sunpy/sunpy/issues/4006
|
AttributeError Traceback (most recent call last)
<ipython-input-23-1a0872c98bf5> in <module>
----> 1 maps = Map(urls)
~/anaconda3/lib/python3.7/site-packages/sunpy/map/map_factory.py in __call__(self, composite, sequence, silence_errors, *args, **kwargs)
274 """
275
--> 276 data_header_pairs, already_maps = self._parse_args(*args, **kwargs)
277
278 new_maps = list()
~/anaconda3/lib/python3.7/site-packages/sunpy/map/map_factory.py in _parse_args(self, *args, **kwargs)
215 elif isinstance(arg, str) and _is_url(arg):
216 url = arg
--> 217 path = str(cache.download(url).absolute())
218 pairs = self._read_file(path, **kwargs)
219 data_header_pairs += pairs
AttributeError: 'str' object has no attribute 'absolute'
|
AttributeError
|
def _download_and_hash(self, urls):
"""
Downloads the file and returns the path, hash and url it used to download.
Parameters
----------
urls: `list`
List of urls.
Returns
-------
`str`, `str`, `str`
Path, hash and URL of the file.
"""
def download(url):
path = self._cache_dir / get_filename(urlopen(url), url)
# replacement_filename returns a string and we want a Path object
path = Path(replacement_filename(path))
self._downloader.download(url, path)
shahash = hash_file(path)
return path, shahash, url
for url in urls:
try:
return download(url)
except Exception as e:
warn(e, SunpyUserWarning)
else:
raise RuntimeError("Download failed")
|
def _download_and_hash(self, urls):
"""
Downloads the file and returns the path, hash and url it used to download.
Parameters
----------
urls: `list`
List of urls.
Returns
-------
`str`, `str`, `str`
Path, hash and URL of the file.
"""
def download(url):
path = self._cache_dir / get_filename(urlopen(url), url)
path = replacement_filename(path)
self._downloader.download(url, path)
shahash = hash_file(path)
return path, shahash, url
for url in urls:
try:
return download(url)
except Exception as e:
warn(e, SunpyUserWarning)
else:
raise RuntimeError("Download failed")
|
https://github.com/sunpy/sunpy/issues/4006
|
AttributeError Traceback (most recent call last)
<ipython-input-23-1a0872c98bf5> in <module>
----> 1 maps = Map(urls)
~/anaconda3/lib/python3.7/site-packages/sunpy/map/map_factory.py in __call__(self, composite, sequence, silence_errors, *args, **kwargs)
274 """
275
--> 276 data_header_pairs, already_maps = self._parse_args(*args, **kwargs)
277
278 new_maps = list()
~/anaconda3/lib/python3.7/site-packages/sunpy/map/map_factory.py in _parse_args(self, *args, **kwargs)
215 elif isinstance(arg, str) and _is_url(arg):
216 url = arg
--> 217 path = str(cache.download(url).absolute())
218 pairs = self._read_file(path, **kwargs)
219 data_header_pairs += pairs
AttributeError: 'str' object has no attribute 'absolute'
|
AttributeError
|
def __init__(self, table=None, client=None):
"""
table : `astropy.table.Table`
"""
super().__init__()
self.table = table or astropy.table.QTable()
self.query_args = getattr(table, "query_args", None)
self.requests = getattr(table, "requests", None)
self._client = client
|
def __init__(self, table=None, client=None):
"""
table : `astropy.table.Table`
"""
super().__init__()
self.table = table or astropy.table.QTable()
self.query_args = None
self.requests = None
self._client = client
|
https://github.com/sunpy/sunpy/issues/2781
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-6-488027c4a327> in <module>
----> 1 Fido.fetch(res[0,0])
~/Git/sunpy/sunpy/net/fido_factory.py in fetch(self, *query_results, **kwargs)
360 for query_result in query_results:
361 for block in query_result.responses:
--> 362 reslist.append(block.client.fetch(block, **kwargs))
363
364 results = DownloadResponse(reslist)
~/Git/sunpy/sunpy/net/jsoc/jsoc.py in fetch(self, jsoc_response, path, overwrite, progress, max_conn, downloader, sleep)
532
533 # Make staging request to JSOC
--> 534 responses = self.request_data(jsoc_response)
535 # Make response iterable
536 if not isiterable(responses):
~/Git/sunpy/sunpy/net/jsoc/jsoc.py in request_data(self, jsoc_response, **kwargs)
428 requests = []
429 self.query_args = jsoc_response.query_args
--> 430 for block in jsoc_response.query_args:
431
432 ds = self._make_recordset(**block)
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def __getitem__(self, item):
if isinstance(item, int):
item = slice(item, item + 1)
ret = type(self)(self.table[item])
ret.query_args = self.query_args
ret.requests = self.requests
ret.client = self._client
warnings.warn(
"Downloading of sliced JSOC results is not supported. "
"All the files present in the original response will be downloaded.",
SunpyUserWarning,
)
return ret
|
def __getitem__(self, item):
return type(self)(self.table[item])
|
https://github.com/sunpy/sunpy/issues/2781
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-6-488027c4a327> in <module>
----> 1 Fido.fetch(res[0,0])
~/Git/sunpy/sunpy/net/fido_factory.py in fetch(self, *query_results, **kwargs)
360 for query_result in query_results:
361 for block in query_result.responses:
--> 362 reslist.append(block.client.fetch(block, **kwargs))
363
364 results = DownloadResponse(reslist)
~/Git/sunpy/sunpy/net/jsoc/jsoc.py in fetch(self, jsoc_response, path, overwrite, progress, max_conn, downloader, sleep)
532
533 # Make staging request to JSOC
--> 534 responses = self.request_data(jsoc_response)
535 # Make response iterable
536 if not isiterable(responses):
~/Git/sunpy/sunpy/net/jsoc/jsoc.py in request_data(self, jsoc_response, **kwargs)
428 requests = []
429 self.query_args = jsoc_response.query_args
--> 430 for block in jsoc_response.query_args:
431
432 ds = self._make_recordset(**block)
TypeError: 'NoneType' object is not iterable
|
TypeError
|
def fetch(
self,
query_response,
path=None,
methods=None,
site=None,
progress=True,
overwrite=False,
downloader=None,
wait=True,
):
"""
Download data specified in the query_response.
Parameters
----------
query_response : sunpy.net.vso.QueryResponse
QueryResponse containing the items to be downloaded.
path : str
Specify where the data is to be downloaded. Can refer to arbitrary
fields of the QueryResponseItem (instrument, source, time, ...) via
string formatting, moreover the file-name of the file downloaded can
be referred to as file, e.g.
"{source}/{instrument}/{time.start}/{file}".
methods : {list of str}
Download methods, defaults to URL-FILE_Rice then URL-FILE.
Methods are a concatenation of one PREFIX followed by any number of
SUFFIXES i.e. `PREFIX-SUFFIX_SUFFIX2_SUFFIX3`.
The full list of
`PREFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_PREFIX>`_
and `SUFFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_SUFFIX>`_
are listed on the VSO site.
site : str
There are a number of caching mirrors for SDO and other
instruments, some available ones are listed below.
=============== ========================================================
NSO National Solar Observatory, Tucson (US)
SAO (aka CFA) Smithonian Astronomical Observatory, Harvard U. (US)
SDAC (aka GSFC) Solar Data Analysis Center, NASA/GSFC (US)
ROB Royal Observatory of Belgium (Belgium)
MPS Max Planck Institute for Solar System Research (Germany)
UCLan University of Central Lancashire (UK)
IAS Institut Aeronautique et Spatial (France)
KIS Kiepenheuer-Institut fur Sonnenphysik Germany)
NMSU New Mexico State University (US)
=============== ========================================================
progress : `bool`, optional
If `True` show a progress bar showing how many of the total files
have been downloaded. If `False`, no progress bars will be shown at all.
overwrite : `bool` or `str`, optional
Determine how to handle downloading if a file already exists with the
same name. If `False` the file download will be skipped and the path
returned to the existing file, if `True` the file will be downloaded
and the existing file will be overwritten, if `'unique'` the filename
will be modified to be unique.
downloader : `parfive.Downloader`, optional
The download manager to use.
wait : `bool`, optional
If `False` ``downloader.download()`` will not be called. Only has
any effect if `downloader` is not `None`.
Returns
-------
out : `parfive.Results`
Object that supplies a list of filenames and any errors.
Examples
--------
>>> files = fetch(qr) # doctest:+SKIP
"""
if path is None:
path = os.path.join(config.get("downloads", "download_dir"), "{file}")
elif isinstance(path, str) and "{file}" not in path:
path = os.path.join(path, "{file}")
path = os.path.expanduser(path)
dl_set = True
if not downloader:
dl_set = False
downloader = Downloader(progress=progress)
fileids = VSOClient.by_fileid(query_response)
if not fileids:
return downloader.download() if wait else Results()
# Adding the site parameter to the info
info = {}
if site is not None:
info["site"] = site
VSOGetDataResponse = self.api.get_type("VSO:VSOGetDataResponse")
data_request = self.make_getdatarequest(query_response, methods, info)
data_response = VSOGetDataResponse(self.api.service.GetData(data_request))
err_results = self.download_all(data_response, methods, downloader, path, fileids)
if dl_set and not wait:
return err_results
results = downloader.download()
results += err_results
results._errors += err_results.errors
return results
|
def fetch(
self,
query_response,
path=None,
methods=None,
site=None,
progress=True,
overwrite=False,
downloader=None,
wait=True,
):
"""
Download data specified in the query_response.
Parameters
----------
query_response : sunpy.net.vso.QueryResponse
QueryResponse containing the items to be downloaded.
path : str
Specify where the data is to be downloaded. Can refer to arbitrary
fields of the QueryResponseItem (instrument, source, time, ...) via
string formatting, moreover the file-name of the file downloaded can
be referred to as file, e.g.
"{source}/{instrument}/{time.start}/{file}".
methods : {list of str}
Download methods, defaults to URL-FILE_Rice then URL-FILE.
Methods are a concatenation of one PREFIX followed by any number of
SUFFIXES i.e. `PREFIX-SUFFIX_SUFFIX2_SUFFIX3`.
The full list of
`PREFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_PREFIX>`_
and `SUFFIXES <https://sdac.virtualsolar.org/cgi/show_details?keyword=METHOD_SUFFIX>`_
are listed on the VSO site.
site : str
There are a number of caching mirrors for SDO and other
instruments, some available ones are listed below.
=============== ========================================================
NSO National Solar Observatory, Tucson (US)
SAO (aka CFA) Smithonian Astronomical Observatory, Harvard U. (US)
SDAC (aka GSFC) Solar Data Analysis Center, NASA/GSFC (US)
ROB Royal Observatory of Belgium (Belgium)
MPS Max Planck Institute for Solar System Research (Germany)
UCLan University of Central Lancashire (UK)
IAS Institut Aeronautique et Spatial (France)
KIS Kiepenheuer-Institut fur Sonnenphysik Germany)
NMSU New Mexico State University (US)
=============== ========================================================
progress : `bool`, optional
If `True` show a progress bar showing how many of the total files
have been downloaded. If `False`, no progress bars will be shown at all.
overwrite : `bool` or `str`, optional
Determine how to handle downloading if a file already exists with the
same name. If `False` the file download will be skipped and the path
returned to the existing file, if `True` the file will be downloaded
and the existing file will be overwritten, if `'unique'` the filename
will be modified to be unique.
downloader : `parfive.Downloader`, optional
The download manager to use.
wait : `bool`, optional
If `False` ``downloader.download()`` will not be called. Only has
any effect if `downloader` is not `None`.
Returns
-------
out : `parfive.Results`
Object that supplies a list of filenames and any errors.
Examples
--------
>>> files = fetch(qr) # doctest:+SKIP
"""
if path is None:
path = os.path.join(config.get("downloads", "download_dir"), "{file}")
elif isinstance(path, str) and "{file}" not in path:
path = os.path.join(path, "{file}")
path = os.path.expanduser(path)
dl_set = True
if not downloader:
dl_set = False
downloader = Downloader(progress=progress)
fileids = VSOClient.by_fileid(query_response)
if not fileids:
return downloader.download()
# Adding the site parameter to the info
info = {}
if site is not None:
info["site"] = site
VSOGetDataResponse = self.api.get_type("VSO:VSOGetDataResponse")
data_request = self.make_getdatarequest(query_response, methods, info)
data_response = VSOGetDataResponse(self.api.service.GetData(data_request))
err_results = self.download_all(data_response, methods, downloader, path, fileids)
if dl_set and not wait:
return err_results
results = downloader.download()
results += err_results
results._errors += err_results.errors
return results
|
https://github.com/sunpy/sunpy/issues/3292
|
Results from 2 Providers:
0 Results from the VSOClient:
Start Time End Time Source Instrument Type
float64 float64 float64 float64 float64
---------- -------- ------- ---------- -------
1 Results from the VSOClient:
Start Time [1] End Time [1] Source Instrument Type Wavelength [2]
Angstrom
str19 str19 str3 str3 str8 float64
------------------- ------------------- ------ ---------- -------- --------------
2011-01-01 00:00:08 2011-01-01 00:00:09 SDO AIA FULLDISK 304.0 .. 304.0
Files Downloaded: 0file [00:00, ?file/s]
Files Downloaded: 0%| | 0/1 [00:00<?, ?file/s]Traceback (most recent call last):
File "download_coord_data.py", line 20, in <module>
res = Fido.fetch(results, path='./{file}', downloader=downloader)
File "/home/stuart/.virtualenvs/sunpy-release/lib/python3.7/site-packages/sunpy/net/fido_factory.py", line 378, in fetch
results = downloader.download()
File "/home/stuart/Git/parfive/parfive/downloader.py", line 194, in download
future = self.run_until_complete(self._run_download(timeouts))
File "/usr/lib/python3.7/asyncio/base_events.py", line 577, in run_until_complete
raise RuntimeError('Event loop stopped before Future completed.')
RuntimeError: Event loop stopped before Future completed.
|
RuntimeError
|
def fetch(
self,
*query_results,
path=None,
max_conn=5,
progress=True,
overwrite=False,
downloader=None,
**kwargs,
):
"""
Download the records represented by
`~sunpy.net.fido_factory.UnifiedResponse` objects.
Parameters
----------
query_results : `sunpy.net.fido_factory.UnifiedResponse`
Container returned by query method, or multiple.
path : `str`
The directory to retrieve the files into. Can refer to any fields
in `UnifiedResponse.response_block_properties` via string formatting,
moreover the file-name of the file downloaded can be referred to as file,
e.g. "{source}/{instrument}/{time.start}/{file}".
max_conn : `int`, optional
The number of parallel download slots.
progress : `bool`, optional
If `True` show a progress bar showing how many of the total files
have been downloaded. If `False`, no progress bars will be shown at all.
overwrite : `bool` or `str`, optional
Determine how to handle downloading if a file already exists with the
same name. If `False` the file download will be skipped and the path
returned to the existing file, if `True` the file will be downloaded
and the existing file will be overwritten, if `'unique'` the filename
will be modified to be unique.
downloader : `parfive.Downloader`, optional
The download manager to use. If specified the ``max_conn``,
``progress`` and ``overwrite`` arguments are ignored.
Returns
-------
`parfive.Results`
Examples
--------
>>> from sunpy.net.vso.attrs import Time, Instrument
>>> unifresp = Fido.search(Time('2012/3/4','2012/3/5'), Instrument('EIT')) # doctest: +REMOTE_DATA
>>> filepaths = Fido.fetch(unifresp) # doctest: +SKIP
If any downloads fail, they can be retried by passing the `parfive.Results` object back into ``fetch``.
>>> filepaths = Fido.fetch(filepaths) # doctest: +SKIP
"""
if "wait" in kwargs:
raise ValueError("wait is not a valid keyword argument to Fido.fetch.")
if downloader is None:
downloader = Downloader(
max_conn=max_conn, progress=progress, overwrite=overwrite
)
elif not isinstance(downloader, Downloader):
raise TypeError("The downloader argument must be a parfive.Downloader object.")
# Handle retrying failed downloads
retries = [isinstance(arg, Results) for arg in query_results]
if all(retries):
results = Results()
for retry in query_results:
dr = downloader.retry(retry)
results.data += dr.data
results._errors += dr._errors
return results
elif any(retries):
raise TypeError(
"If any arguments to fetch are "
"`parfive.Results` objects, all arguments must be."
)
reslist = []
for query_result in query_results:
for block in query_result.responses:
reslist.append(
block.client.fetch(
block, path=path, downloader=downloader, wait=False, **kwargs
)
)
results = Results()
# Combine the results objects from all the clients into one Results
# object.
for result in reslist:
if result is None:
continue
if not isinstance(result, Results):
raise TypeError(
"If wait is False a client must return a parfive.Downloader and either None"
" or a parfive.Results object."
)
results.data += result.data
results._errors += result.errors
return results
|
def fetch(
self,
*query_results,
path=None,
max_conn=5,
progress=True,
overwrite=False,
downloader=None,
**kwargs,
):
"""
Download the records represented by
`~sunpy.net.fido_factory.UnifiedResponse` objects.
Parameters
----------
query_results : `sunpy.net.fido_factory.UnifiedResponse`
Container returned by query method, or multiple.
path : `str`
The directory to retrieve the files into. Can refer to any fields
in `UnifiedResponse.response_block_properties` via string formatting,
moreover the file-name of the file downloaded can be referred to as file,
e.g. "{source}/{instrument}/{time.start}/{file}".
max_conn : `int`, optional
The number of parallel download slots.
progress : `bool`, optional
If `True` show a progress bar showing how many of the total files
have been downloaded. If `False`, no progress bars will be shown at all.
overwrite : `bool` or `str`, optional
Determine how to handle downloading if a file already exists with the
same name. If `False` the file download will be skipped and the path
returned to the existing file, if `True` the file will be downloaded
and the existing file will be overwritten, if `'unique'` the filename
will be modified to be unique.
downloader : `parfive.Downloader`, optional
The download manager to use. If specified the ``max_conn``,
``progress`` and ``overwrite`` arguments are ignored.
Returns
-------
`parfive.Results`
Examples
--------
>>> from sunpy.net.vso.attrs import Time, Instrument
>>> unifresp = Fido.search(Time('2012/3/4','2012/3/5'), Instrument('EIT')) # doctest: +REMOTE_DATA
>>> filepaths = Fido.fetch(unifresp) # doctest: +SKIP
If any downloads fail, they can be retried by passing the `parfive.Results` object back into ``fetch``.
>>> filepaths = Fido.fetch(filepaths) # doctest: +SKIP
"""
if "wait" in kwargs:
raise ValueError("wait is not a valid keyword argument to Fido.fetch.")
if downloader is None:
downloader = Downloader(
max_conn=max_conn, progress=progress, overwrite=overwrite
)
elif not isinstance(downloader, Downloader):
raise TypeError("The downloader argument must be a parfive.Downloader object.")
# Handle retrying failed downloads
retries = [isinstance(arg, Results) for arg in query_results]
if all(retries):
results = Results()
for retry in query_results:
dr = downloader.retry(retry)
results.data += dr.data
results._errors += dr._errors
return results
elif any(retries):
raise TypeError(
"If any arguments to fetch are "
"`parfive.Results` objects, all arguments must be."
)
reslist = []
for query_result in query_results:
for block in query_result.responses:
reslist.append(
block.client.fetch(
block, path=path, downloader=downloader, wait=False, **kwargs
)
)
results = downloader.download()
# Combine the results objects from all the clients into one Results
# object.
for result in reslist:
if result is None:
continue
if not isinstance(result, Results):
raise TypeError(
"If wait is False a client must return a parfive.Downloader and either None"
" or a parfive.Results object."
)
results.data += result.data
results._errors += result.errors
return results
|
https://github.com/sunpy/sunpy/issues/3292
|
Results from 2 Providers:
0 Results from the VSOClient:
Start Time End Time Source Instrument Type
float64 float64 float64 float64 float64
---------- -------- ------- ---------- -------
1 Results from the VSOClient:
Start Time [1] End Time [1] Source Instrument Type Wavelength [2]
Angstrom
str19 str19 str3 str3 str8 float64
------------------- ------------------- ------ ---------- -------- --------------
2011-01-01 00:00:08 2011-01-01 00:00:09 SDO AIA FULLDISK 304.0 .. 304.0
Files Downloaded: 0file [00:00, ?file/s]
Files Downloaded: 0%| | 0/1 [00:00<?, ?file/s]Traceback (most recent call last):
File "download_coord_data.py", line 20, in <module>
res = Fido.fetch(results, path='./{file}', downloader=downloader)
File "/home/stuart/.virtualenvs/sunpy-release/lib/python3.7/site-packages/sunpy/net/fido_factory.py", line 378, in fetch
results = downloader.download()
File "/home/stuart/Git/parfive/parfive/downloader.py", line 194, in download
future = self.run_until_complete(self._run_download(timeouts))
File "/usr/lib/python3.7/asyncio/base_events.py", line 577, in run_until_complete
raise RuntimeError('Event loop stopped before Future completed.')
RuntimeError: Event loop stopped before Future completed.
|
RuntimeError
|
def _get_goes_sat_num(start, end):
"""Parses the query time to determine which GOES satellite to use."""
goes_operational = {
2: TimeRange("1980-01-04", "1983-05-01"),
5: TimeRange("1983-05-02", "1984-08-01"),
6: TimeRange("1983-06-01", "1994-08-19"),
7: TimeRange("1994-01-01", "1996-08-14"),
8: TimeRange("1996-03-21", "2003-06-19"),
9: TimeRange("1997-01-01", "1998-09-09"),
10: TimeRange("1998-07-10", "2009-12-02"),
11: TimeRange("2006-06-20", "2008-02-16"),
12: TimeRange("2002-12-13", "2007-05-09"),
13: TimeRange("2006-08-01", "2006-08-01"),
14: TimeRange("2009-12-02", "2010-11-05"),
15: TimeRange("2010-09-01", Time.now()),
}
sat_list = []
for sat_num in goes_operational:
if (
goes_operational[sat_num].start <= start <= goes_operational[sat_num].end
and goes_operational[sat_num].start <= end <= goes_operational[sat_num].end
):
# if true then the satellite with sat_num is available
sat_list.append(sat_num)
if not sat_list:
# if no satellites were found then raise an exception
raise Exception("No operational GOES satellites within time range")
else:
return sat_list
|
def _get_goes_sat_num(self, start, end):
"""Parses the query time to determine which GOES satellite to use."""
goes_operational = {
2: TimeRange("1980-01-04", "1983-05-01"),
5: TimeRange("1983-05-02", "1984-08-01"),
6: TimeRange("1983-06-01", "1994-08-19"),
7: TimeRange("1994-01-01", "1996-08-14"),
8: TimeRange("1996-03-21", "2003-06-19"),
9: TimeRange("1997-01-01", "1998-09-09"),
10: TimeRange("1998-07-10", "2009-12-02"),
11: TimeRange("2006-06-20", "2008-02-16"),
12: TimeRange("2002-12-13", "2007-05-09"),
13: TimeRange("2006-08-01", "2006-08-01"),
14: TimeRange("2009-12-02", "2010-11-05"),
15: TimeRange("2010-09-01", Time.now()),
}
sat_list = []
for sat_num in goes_operational:
if (
start >= goes_operational[sat_num].start
and start <= goes_operational[sat_num].end
and (
end >= goes_operational[sat_num].start
and end <= goes_operational[sat_num].end
)
):
# if true then the satellite with sat_num is available
sat_list.append(sat_num)
if not sat_list:
# if no satellites were found then raise an exception
raise Exception("No operational GOES satellites within time range")
else:
return sat_list
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def _parse_hdus(cls, hdulist):
header = MetaDict(OrderedDict(hdulist[0].header))
if len(hdulist) == 4:
if is_time_in_given_format(hdulist[0].header["DATE-OBS"], "%d/%m/%Y"):
start_time = Time.strptime(hdulist[0].header["DATE-OBS"], "%d/%m/%Y")
elif is_time_in_given_format(hdulist[0].header["DATE-OBS"], "%d/%m/%y"):
start_time = Time.strptime(hdulist[0].header["DATE-OBS"], "%d/%m/%y")
else:
raise ValueError("Date not recognized")
xrsb = hdulist[2].data["FLUX"][0][:, 0]
xrsa = hdulist[2].data["FLUX"][0][:, 1]
seconds_from_start = hdulist[2].data["TIME"][0]
elif 1 <= len(hdulist) <= 3:
start_time = parse_time(header["TIMEZERO"], format="utime")
seconds_from_start = hdulist[0].data[0]
xrsb = hdulist[0].data[1]
xrsa = hdulist[0].data[2]
else:
raise ValueError("Don't know how to parse this file")
times = start_time + TimeDelta(seconds_from_start * u.second)
times.precision = 9
# remove bad values as defined in header comments
xrsb[xrsb == -99999] = np.nan
xrsa[xrsa == -99999] = np.nan
# fix byte ordering
newxrsa = xrsa.byteswap().newbyteorder()
newxrsb = xrsb.byteswap().newbyteorder()
data = DataFrame(
{"xrsa": newxrsa, "xrsb": newxrsb}, index=times.isot.astype("datetime64")
)
data.sort_index(inplace=True)
# Add the units
units = OrderedDict([("xrsa", u.W / u.m**2), ("xrsb", u.W / u.m**2)])
return data, header, units
|
def _parse_hdus(cls, hdulist):
header = MetaDict(OrderedDict(hdulist[0].header))
if len(hdulist) == 4:
if is_time_in_given_format(hdulist[0].header["DATE-OBS"], "%d/%m/%Y"):
start_time = Time.strptime(hdulist[0].header["DATE-OBS"], "%d/%m/%Y")
elif is_time_in_given_format(hdulist[0].header["DATE-OBS"], "%d/%m/%y"):
start_time = Time.strptime(hdulist[0].header["DATE-OBS"], "%d/%m/%y")
else:
raise ValueError("Date not recognized")
xrsb = hdulist[2].data["FLUX"][0][:, 0]
xrsa = hdulist[2].data["FLUX"][0][:, 1]
seconds_from_start = hdulist[2].data["TIME"][0]
elif 1 <= len(hdulist) <= 3:
start_time = parse_time(header["TIMEZERO"])
seconds_from_start = hdulist[0].data[0]
xrsb = hdulist[0].data[1]
xrsa = hdulist[0].data[2]
else:
raise ValueError("Don't know how to parse this file")
times = start_time + TimeDelta(seconds_from_start * u.second)
times.precision = 9
# remove bad values as defined in header comments
xrsb[xrsb == -99999] = np.nan
xrsa[xrsa == -99999] = np.nan
# fix byte ordering
newxrsa = xrsa.byteswap().newbyteorder()
newxrsb = xrsb.byteswap().newbyteorder()
data = DataFrame(
{"xrsa": newxrsa, "xrsb": newxrsb}, index=times.isot.astype("datetime64")
)
data.sort_index(inplace=True)
# Add the units
units = OrderedDict([("xrsa", u.W / u.m**2), ("xrsb", u.W / u.m**2)])
return data, header, units
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def _read_file(fname, **kwargs):
"""
Test reading a file with sunpy.io for automatic source detection.
Parameters
----------
fname : filename
kwargs
Returns
-------
parsed : bool
True if file has been reading
pairs : list or str
List of (data, header) pairs if ``parsed`` is ``True`` or ``fname``
if ``False``
"""
if "source" not in kwargs.keys() or not kwargs["source"]:
try:
pairs = read_file(fname, **kwargs)
new_pairs = []
for pair in pairs:
filedata, filemeta = pair
if isinstance(filemeta, FileHeader):
data = filedata
meta = MetaDict(filemeta)
new_pairs.append(HDPair(data, meta))
return True, new_pairs
except UnrecognizedFileTypeError:
return False, fname
else:
return False, fname
|
def _read_file(self, fname, **kwargs):
"""
Test reading a file with sunpy.io for automatic source detection.
Parameters
----------
fname : filename
kwargs
Returns
-------
parsed : bool
True if file has been reading
pairs : list or str
List of (data, header) pairs if ``parsed`` is ``True`` or ``fname``
if ``False``
"""
if "source" not in kwargs.keys() or not kwargs["source"]:
try:
pairs = read_file(fname, **kwargs)
new_pairs = []
for pair in pairs:
filedata, filemeta = pair
if isinstance(filemeta, FileHeader):
data = filedata
meta = MetaDict(filemeta)
new_pairs.append(HDPair(data, meta))
return True, new_pairs
except UnrecognizedFileTypeError:
return False, fname
else:
return False, fname
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def _validate_meta(meta):
"""
Validate a meta argument for use as metadata.
Currently only validates by class.
"""
if isinstance(meta, astropy.io.fits.header.Header):
return True
elif isinstance(meta, sunpy.io.header.FileHeader):
return True
elif isinstance(meta, dict):
return True
else:
return False
|
def _validate_meta(self, meta):
"""
Validate a meta argument for use as metadata.
Currently only validates by class.
"""
if isinstance(meta, astropy.io.fits.header.Header):
return True
elif isinstance(meta, sunpy.io.header.FileHeader):
return True
elif isinstance(meta, dict):
return True
else:
return False
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def _validate_units(units):
"""
Validates the astropy unit-information associated with a TimeSeries.
Should be a dictionary of some form (but not MetaDict) with only
astropy units for values.
"""
warnings.simplefilter("always", Warning)
result = True
# It must be a dictionary
if not isinstance(units, dict) or isinstance(units, MetaDict):
return False
for key in units:
if not isinstance(units[key], u.UnitBase):
# If this is not a unit then this can't be a valid units dict.
return False
# Passed all the tests
return result
|
def _validate_units(self, units):
"""
Validates the astropy unit-information associated with a TimeSeries.
Should be a dictionary of some form (but not MetaDict) with only
astropy units for values.
"""
warnings.simplefilter("always", Warning)
result = True
# It must be a dictionary
if not isinstance(units, dict) or isinstance(units, MetaDict):
return False
for key in units:
if not isinstance(units[key], u.UnitBase):
# If this is not a unit then this can't be a valid units dict.
return False
# Passed all the tests
return result
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def _from_table(t):
"""
Extract the data, metadata and units from an astropy table for use in
constructing a TimeSeries.
Parameters
----------
t: `~astropy.table.table.Table`
The input table. The datetime column must be the first column or the
(single) primary key index.
Returns
-------
data : `~pandas.core.frame.DataFrame`
meta : `~sunpy.util.metadata.MetaDict`
units : `dict`
"""
table = copy.deepcopy(t)
# Default the time index to the first column
index_name = table.colnames[0]
# Check if another column is defined as the index/primary_key
if table.primary_key:
# Check there is only one primary_key/index column
if len(table.primary_key) != 1:
raise ValueError(
"Invalid input Table, TimeSeries doesn't support conversion"
" of tables with more then one index column."
)
# Extract, convert and remove the index column from the input table
index = table[index_name]
# Convert if the index is given as an astropy Time object
if isinstance(index, Time):
index = index.datetime
index = pd.to_datetime(index)
table.remove_column(index_name)
# Extract the column values from the table
data = {}
units = {}
for colname in table.colnames:
data[colname] = table[colname]
units[colname] = table[colname].unit
# Create a dataframe with this and return
df = pd.DataFrame(data=data, index=index)
return df, MetaDict(table.meta), units
|
def _from_table(self, t):
"""
Extract the data, metadata and units from an astropy table for use in
constructing a TimeSeries.
Parameters
----------
t: `~astropy.table.table.Table`
The input table. The datetime column must be the first column or the
(single) primary key index.
Returns
-------
data : `~pandas.core.frame.DataFrame`
meta : `~sunpy.util.metadata.MetaDict`
units : `dict`
"""
table = copy.deepcopy(t)
# Default the time index to the first column
index_name = table.colnames[0]
# Check if another column is defined as the index/primary_key
if table.primary_key:
# Check there is only one primary_key/index column
if len(table.primary_key) == 1:
table.primary_key[0]
else:
raise ValueError(
"Invalid input Table, TimeSeries doesn't support conversion"
" of tables with more then one index column."
)
# Extract, convert and remove the index column from the input table
index = table[index_name]
# Convert if the index is given as an astropy Time object
if isinstance(index, Time):
index = index.datetime
index = pd.to_datetime(index)
table.remove_column(index_name)
# Extract the column values from the table
data = {}
units = {}
for colname in table.colnames:
data[colname] = table[colname]
units[colname] = table[colname].unit
# Create a dataframe with this and return
df = pd.DataFrame(data=data, index=index)
return df, MetaDict(table.meta), units
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def _parse_args(self, *args, **kwargs):
"""
Parses an args list for data-header pairs. args can contain any
mixture of the following entries:
* tuples of (data, header, unit) (1)
* data, header not in a tuple (1)
* filename, which will be read
* directory, from which all files will be read
* glob, from which all files will be read
* url, which will be downloaded and read
* lists containing any of the above.
(1) Note that header/unit are optional and in either order, but data
but be the first entry in each group.
Example
-------
self._parse_args(data, header,
(data, header),
['file1', 'file2', 'file3'],
'file4',
'directory1',
'*.fits')
"""
data_header_unit_tuples = list()
data_header_pairs = list()
already_timeseries = list()
filepaths = list()
# Account for nested lists of items. Simply outputs a single list of
# items, nested lists are expanded to element level.
args = expand_list(args)
# For each of the arguments, handle each of the cases
i = 0
while i < len(args):
arg = args[i]
# Data-header pair in a tuple
if isinstance(arg, (np.ndarray, Table, pd.DataFrame)):
# Assume a Pandas Dataframe is given
data = arg
units = OrderedDict()
meta = MetaDict()
# Convert the data argument into a Pandas DataFrame if needed.
if isinstance(data, Table):
# We have an Astropy Table:
data, meta, units = self._from_table(data)
elif isinstance(data, np.ndarray):
# We have a numpy ndarray. We assume the first column is a dt index
data = pd.DataFrame(data=data[:, 1:], index=Time(data[:, 0]))
# If there are 1 or 2 more arguments:
for _ in range(2):
if len(args) > i + 1:
# If that next argument isn't data but is metaddata or units:
if not isinstance(args[i + 1], (np.ndarray, Table, pd.DataFrame)):
if self._validate_units(args[i + 1]):
units.update(args[i + 1])
i += 1 # an extra increment to account for the units
elif self._validate_meta(args[i + 1]):
# if we have an astropy.io FITS header then convert
# to preserve multi-line comments
if isinstance(args[i + 1], astropy.io.fits.header.Header):
args[i + 1] = MetaDict(
sunpy.io.header.FileHeader(args[i + 1])
)
meta.update(args[i + 1])
i += 1 # an extra increment to account for the meta
# Add a 3-tuple for this TimeSeries.
data_header_unit_tuples.append((data, meta, units))
# Filepath
elif isinstance(arg, str) and os.path.isfile(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
result = self._read_file(path, **kwargs)
data_header_pairs, filepaths = _apply_result(
data_header_pairs, filepaths, result
)
# Directory
elif isinstance(arg, str) and os.path.isdir(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
files = [os.path.join(path, elem) for elem in os.listdir(path)]
for afile in files:
# returns a boolean telling us if it were read and either a
# tuple or the original filepath for reading by a source
result = self._read_file(afile, **kwargs)
data_header_pairs, filepaths = _apply_result(
data_header_pairs, filepaths, result
)
# Glob
elif isinstance(arg, str) and "*" in arg:
files = glob.glob(os.path.expanduser(arg))
for afile in files:
# returns a boolean telling us if it were read and either a
# tuple or the original filepath for reading by a source
result = self._read_file(afile, **kwargs)
data_header_pairs, filepaths = _apply_result(
data_header_pairs, filepaths, result
)
# Already a TimeSeries
elif isinstance(arg, GenericTimeSeries):
already_timeseries.append(arg)
# A URL
elif isinstance(arg, str) and _is_url(arg):
url = arg
path = download_file(url, get_and_create_download_dir())
result = self._read_file(path, **kwargs)
data_header_pairs, filepaths = _apply_result(
data_header_pairs, filepaths, result
)
else:
raise NoMatchError("File not found or invalid input")
i += 1
# TODO:
# In the end, if there are already TimeSeries it should be put in the
# same order as the input, currently they are not.
return data_header_unit_tuples, data_header_pairs, already_timeseries, filepaths
|
def _parse_args(self, *args, **kwargs):
"""
Parses an args list for data-header pairs. args can contain any
mixture of the following entries:
* tuples of (data, header, unit) (1)
* data, header not in a tuple (1)
* filename, which will be read
* directory, from which all files will be read
* glob, from which all files will be read
* url, which will be downloaded and read
* lists containing any of the above.
(1) Note that header/unit are optional and in either order, but data
but be the first entry in each group.
Example
-------
self._parse_args(data, header,
(data, header),
['file1', 'file2', 'file3'],
'file4',
'directory1',
'*.fits')
"""
data_header_unit_tuples = list()
data_header_pairs = list()
already_timeseries = list()
filepaths = list()
# Take source kwarg if defined
source = kwargs.get("source", None)
# Account for nested lists of items. Simply outputs a single list of
# items, nested lists are expanded to element level.
args = expand_list(args)
# For each of the arguments, handle each of the cases
i = 0
while i < len(args):
arg = args[i]
# Data-header pair in a tuple
if isinstance(arg, (np.ndarray, Table, pd.DataFrame)):
# and self._validate_meta(args[i+1])):
# Assume a Pandas Dataframe is given
data = arg
units = OrderedDict()
meta = MetaDict()
# Convert the data argument into a Pandas DataFrame if needed.
if isinstance(data, Table):
# We have an Astropy Table:
data, meta, units = self._from_table(data)
elif isinstance(data, np.ndarray):
# We have a numpy ndarray. We assume the first column is a dt index
data = pd.DataFrame(data=data[:, 1:], index=Time(data[:, 0]))
# If there are 1 or 2 more arguments:
for _ in range(2):
if len(args) > i + 1:
# If that next argument isn't data but is metaddata or units:
if not isinstance(args[i + 1], (np.ndarray, Table, pd.DataFrame)):
if self._validate_units(args[i + 1]):
units.update(args[i + 1])
i += 1 # an extra increment to account for the units
elif self._validate_meta(args[i + 1]):
# if we have an astropy.io FITS header then convert
# to preserve multi-line comments
if isinstance(args[i + 1], astropy.io.fits.header.Header):
args[i + 1] = MetaDict(
sunpy.io.header.FileHeader(args[i + 1])
)
meta.update(args[i + 1])
i += 1 # an extra increment to account for the meta
# Add a 3-tuple for this TimeSeries.
data_header_unit_tuples.append((data, meta, units))
# Filepath
elif isinstance(arg, str) and os.path.isfile(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
read, result = self._read_file(path, **kwargs)
if read:
data_header_pairs.append(result)
else:
filepaths.append(result)
# Directory
elif isinstance(arg, str) and os.path.isdir(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
files = [os.path.join(path, elem) for elem in os.listdir(path)]
for afile in files:
# returns a boolean telling us if it were read and either a
# tuple or the original filepath for reading by a source
read, result = self._read_file(afile, **kwargs)
if read:
data_header_pairs.append(result)
else:
filepaths.append(result)
# Glob
elif isinstance(arg, str) and "*" in arg:
files = glob.glob(os.path.expanduser(arg))
for afile in files:
# data_header_unit_tuples += self._read_file(afile, **kwargs)
# returns a boolean telling us if it were read and either a
# tuple or the original filepath for reading by a source
read, result = self._read_file(afile, **kwargs)
if read:
data_header_pairs.append(result)
else:
filepaths.append(result)
# Already a TimeSeries
elif isinstance(arg, GenericTimeSeries):
already_timeseries.append(arg)
# A URL
elif isinstance(arg, str) and _is_url(arg):
url = arg
path = download_file(url, get_and_create_download_dir())
pairs = self._read_file(path, **kwargs)
# data_header_pairs += pairs
filepaths.append(pairs[1])
else:
# raise ValueError("File not found or invalid input")
raise NoMatchError("File not found or invalid input")
i += 1
# TODO:
# In the end, if there are already TimeSeries it should be put in the
# same order as the input, currently they are not.
return data_header_unit_tuples, data_header_pairs, already_timeseries, filepaths
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def __call__(self, *args, **kwargs):
"""Method for running the factory. Takes arbitrary arguments and
keyword arguments and passes them to a sequence of pre-registered types
to determine which is the correct TimeSeries source type to build.
Arguments args and kwargs are passed through to the validation
function and to the constructor for the final type. For TimeSeries
types, validation function must take a data-header pair as an argument.
Parameters
----------
silence_errors : `bool`, optional
If set, ignore data-header pairs which cause an exception.
Notes
-----
Extra keyword arguments are passed through to `sunpy.io.read_file` such
as `memmap` for FITS files.
"""
# Hack to get around Python 2.x not backporting PEP 3102.
silence_errors = kwargs.pop("silence_errors", False)
(data_header_unit_tuples, data_header_pairs, already_timeseries, filepaths) = (
self._parse_args(*args, **kwargs)
)
new_timeseries = list()
# The filepaths for unreadable files
for filepath in filepaths:
try:
new_ts = self._check_registered_widgets(filepath=filepath, **kwargs)
new_timeseries.append(new_ts)
except (NoMatchError, MultipleMatchError, ValidationFunctionError):
if not silence_errors:
raise
except Exception:
raise
# data_header_pairs is a list of HDUs as read by sunpy.io
# For each set of HDus find the matching class and read the
# data_header_unit_tuples by calling the _parse_hdus method
# of the class.
for pairs in data_header_pairs:
# Pairs may be x long where x is the number of HDUs in the file.
headers = [pair.header for pair in pairs]
types = []
for header in headers:
try:
match = self._get_matching_widget(meta=header, **kwargs)
if not match == GenericTimeSeries:
types.append(match)
except (MultipleMatchError, NoMatchError):
continue
if not types:
# If no specific classes have been found we can read the data
# if we only have one data header pair:
if len(pairs) == 1:
already_timeseries.append(
GenericTimeSeries(pairs[0].data, pairs[0].header)
)
else:
raise NoMatchError(
"Input read by sunpy.io can not find a "
"matching class for reading multiple HDUs"
)
if len(set(types)) > 1:
raise MultipleMatchError("Multiple HDUs return multiple matching classes.")
cls = types[0]
data_header_unit_tuples.append(cls._parse_hdus(pairs))
# Loop over each registered type and check to see if WidgetType
# matches the arguments. If it does, use that type
for triple in data_header_unit_tuples:
data, header, units = triple
# Make a MetaDict from various input types
meta = header
if isinstance(meta, astropy.io.fits.header.Header):
meta = sunpy.io.header.FileHeader(meta)
meta = MetaDict(meta)
try:
new_ts = self._check_registered_widgets(
data=data, meta=meta, units=units, **kwargs
)
new_timeseries.append(new_ts)
except (NoMatchError, MultipleMatchError, ValidationFunctionError):
if not silence_errors:
raise
except Exception:
raise
new_timeseries += already_timeseries
# Concatenate the timeseries into one if specified.
concatenate = kwargs.get("concatenate", False)
if concatenate:
# Merge all these timeseries into one.
full_timeseries = new_timeseries.pop(0)
for timeseries in new_timeseries:
full_timeseries = full_timeseries.concatenate(timeseries)
new_timeseries = [full_timeseries]
# Sanitize any units OrderedDict details
for timeseries in new_timeseries:
timeseries._sanitize_units()
# Only return single time series, not in a list if we only have one.
if len(new_timeseries) == 1:
return new_timeseries[0]
return new_timeseries
|
def __call__(self, *args, **kwargs):
"""Method for running the factory. Takes arbitrary arguments and
keyword arguments and passes them to a sequence of pre-registered types
to determine which is the correct TimeSeries source type to build.
Arguments args and kwargs are passed through to the validation
function and to the constructor for the final type. For TimeSeries
types, validation function must take a data-header pair as an argument.
Parameters
----------
silence_errors : `bool`, optional
If set, ignore data-header pairs which cause an exception.
Notes
-----
Extra keyword arguments are passed through to `sunpy.io.read_file` such
as `memmap` for FITS files.
"""
# Hack to get around Python 2.x not backporting PEP 3102.
silence_errors = kwargs.pop("silence_errors", False)
(data_header_unit_tuples, data_header_pairs, already_timeseries, filepaths) = (
self._parse_args(*args, **kwargs)
)
new_timeseries = list()
# The filepaths for unreadable files
for filepath in filepaths:
try:
new_ts = self._check_registered_widgets(filepath=filepath, **kwargs)
except (NoMatchError, MultipleMatchError, ValidationFunctionError):
if not silence_errors:
raise
except Exception:
raise
new_timeseries.append(new_ts)
# data_header_pairs is a list of HDUs as read by sunpy.io
# For each set of HDus find the matching class and read the
# data_header_unit_tuples by calling the _parse_hdus method
# of the class.
for pairs in data_header_pairs:
# Pairs may be x long where x is the number of HDUs in the file.
headers = [pair.header for pair in pairs]
types = []
for header in headers:
try:
match = self._get_matching_widget(meta=header, **kwargs)
if not match == GenericTimeSeries:
types.append(match)
except (MultipleMatchError, NoMatchError):
continue
if not types:
# If no specific classes have been found we can read the data
# if we only have one data header pair:
if len(pairs) == 1:
already_timeseries.append(
GenericTimeSeries(pairs[0].data, pairs[0].header)
)
else:
raise NoMatchError(
"Input read by sunpy.io can not find a "
"matching class for reading multiple HDUs"
)
if len(set(types)) > 1:
raise MultipleMatchError("Multiple HDUs return multiple matching classes.")
cls = types[0]
data_header_unit_tuples.append(cls._parse_hdus(pairs))
# Loop over each registered type and check to see if WidgetType
# matches the arguments. If it does, use that type
for triple in data_header_unit_tuples:
data, header, units = triple
# Make a MetaDict from various input types
meta = header
if isinstance(meta, astropy.io.fits.header.Header):
meta = sunpy.io.header.FileHeader(meta)
meta = MetaDict(meta)
try:
new_ts = self._check_registered_widgets(
data=data, meta=meta, units=units, **kwargs
)
except (NoMatchError, MultipleMatchError, ValidationFunctionError):
if not silence_errors:
raise
except Exception:
raise
new_timeseries.append(new_ts)
new_timeseries += already_timeseries
# Concatenate the timeseries into one if specified.
concatenate = kwargs.get("concatenate", False)
if concatenate:
# Merge all these timeseries into one.
full_timeseries = new_timeseries.pop(0)
for timeseries in new_timeseries:
full_timeseries = full_timeseries.concatenate(timeseries)
new_timeseries = [full_timeseries]
# Sanitize any units OrderedDict details
for timeseries in new_timeseries:
timeseries._sanitize_units()
# Only return single time series, not in a list if we only have one.
if len(new_timeseries) == 1:
return new_timeseries[0]
return new_timeseries
|
https://github.com/sunpy/sunpy/issues/3078
|
In [3]: g = TimeSeries('../flarenet/data/goes/go06860129.fits')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-3-686c02e70652> in <module>()
----> 1 g = TimeSeries('../flarenet/data/goes/go06860129.fits')
~/Projects/sunpydev/sunpy/timeseries/timeseries_factory.py in __call__(self, *args, **kwargs)
442 cls = types[0]
443
--> 444 data_header_unit_tuples.append(cls._parse_hdus(pairs))
445
446 # Loop over each registered type and check to see if WidgetType
~/Projects/sunpydev/sunpy/timeseries/sources/goes.py in _parse_hdus(cls, hdulist)
169 seconds_from_start = hdulist[2].data['TIME'][0]
170 elif 1 <= len(hdulist) <= 3:
--> 171 start_time = parse_time(header['TIMEZERO'])
172 seconds_from_start = hdulist[0].data[0]
173 xrsb = hdulist[0].data[1]
~/Projects/sunpydev/sunpy/time/time.py in parse_time(time_string, format, **kwargs)
291 rt = Time.now()
292 else:
--> 293 rt = convert_time(time_string, format=format, **kwargs)
294
295 return rt
~/.pyenv/versions/3.6.3/lib/python3.6/functools.py in wrapper(*args, **kw)
801
802 def wrapper(*args, **kw):
--> 803 return dispatch(args[0].__class__)(*args, **kw)
804
805 registry[object] = func
~/Projects/sunpydev/sunpy/time/time.py in convert_time(time_string, format, **kwargs)
138 def convert_time(time_string, format=None, **kwargs):
139 # default case when no type matches
--> 140 return Time(time_string, format=format, **kwargs)
141
142
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in __init__(self, val, val2, format, scale, precision, in_subfmt, out_subfmt, location, copy)
397 else:
398 self._init_from_vals(val, val2, format, scale, copy,
--> 399 precision, in_subfmt, out_subfmt)
400 self.SCALES = TIME_TYPES[self.scale]
401
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _init_from_vals(self, val, val2, format, scale, copy, precision, in_subfmt, out_subfmt)
452 # Parse / convert input values into internal jd1, jd2 based on format
453 self._time = self._get_time_fmt(val, val2, format, scale,
--> 454 precision, in_subfmt, out_subfmt)
455 self._format = self._time.name
456
~/.virtualenvs/sunpy-dev/lib/python3.6/site-packages/astropy/time/core.py in _get_time_fmt(self, val, val2, format, scale, precision, in_subfmt, out_subfmt)
485 format.lower() in self.FORMATS):
486 if format is None:
--> 487 raise ValueError("No time format was given, and the input is "
488 "not unique")
489 else:
ValueError: No time format was given, and the input is not unique
|
ValueError
|
def write(fname, data, header, **kwargs):
"""
Take a data header pair and write a FITS file.
Parameters
----------
fname : `str`
File name, with extension
data : `numpy.ndarray`
n-dimensional data array
header : `dict`
A header dictionary
"""
# Copy header so the one in memory is left alone while changing it for
# write.
header = header.copy()
# The comments need to be added to the header separately from the normal
# kwargs. Find and deal with them:
fits_header = fits.Header()
# Check Header
key_comments = header.pop("KEYCOMMENTS", False)
for k, v in header.items():
if isinstance(v, fits.header._HeaderCommentaryCards):
if k == "comments":
comments = str(v).split("\n")
for com in comments:
fits_header.add_comments(com)
elif k == "history":
hists = str(v).split("\n")
for hist in hists:
fits_header.add_history(hist)
elif k != "":
fits_header.append(fits.Card(k, str(v).split("\n")))
else:
fits_header.append(fits.Card(k, v))
if isinstance(key_comments, dict):
for k, v in key_comments.items():
# Check that the Card for the comment exists before trying to write to it.
if k in fits_header:
fits_header.comments[k] = v
elif key_comments:
raise TypeError("KEYCOMMENTS must be a dictionary")
if isinstance(fname, str):
fname = os.path.expanduser(fname)
fitskwargs = {"output_verify": "fix"}
fitskwargs.update(kwargs)
fits.writeto(fname, data, header=fits_header, **fitskwargs)
|
def write(fname, data, header, **kwargs):
"""
Take a data header pair and write a FITS file.
Parameters
----------
fname : `str`
File name, with extension
data : `numpy.ndarray`
n-dimensional data array
header : `dict`
A header dictionary
"""
# Copy header so the one in memory is left alone while changing it for
# write.
header = header.copy()
# The comments need to be added to the header separately from the normal
# kwargs. Find and deal with them:
fits_header = fits.Header()
# Check Header
key_comments = header.pop("KEYCOMMENTS", False)
for k, v in header.items():
if isinstance(v, fits.header._HeaderCommentaryCards):
if k == "comments":
comments = str(v).split("\n")
for com in comments:
fits_header.add_comments(com)
elif k == "history":
hists = str(v).split("\n")
for hist in hists:
fits_header.add_history(hist)
elif k != "":
fits_header.append(fits.Card(k, str(v).split("\n")))
else:
fits_header.append(fits.Card(k, v))
if isinstance(key_comments, dict):
for k, v in key_comments.items():
fits_header.comments[k] = v
elif key_comments:
raise TypeError("KEYCOMMENTS must be a dictionary")
if isinstance(fname, str):
fname = os.path.expanduser(fname)
fitskwargs = {"output_verify": "fix"}
fitskwargs.update(kwargs)
fits.writeto(fname, data, header=fits_header, **fitskwargs)
|
https://github.com/sunpy/sunpy/issues/2738
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-260-752b35ded2b2> in <module>()
----> 1 aia_sub.save("test.fits")
~/miniconda/lib/python3.6/site-packages/sunpy/map/mapbase.py in save(self, filepath, filetype, **kwargs)
930 """
931 io.write_file(filepath, self.data, self.meta, filetype=filetype,
--> 932 **kwargs)
933
934 # #### Image processing routines #### #
~/miniconda/lib/python3.6/site-packages/sunpy/io/file_tools.py in write_file(fname, data, header, filetype, **kwargs)
155 for extension, readername in _known_extensions.items():
156 if fname.endswith(extension):
--> 157 return _readers[readername].write(fname, data, header, **kwargs)
158
159 else:
~/miniconda/lib/python3.6/site-packages/sunpy/io/fits.py in write(fname, data, header, **kwargs)
197 if isinstance(key_comments, dict):
198 for k, v in key_comments.items():
--> 199 fits_header.comments[k] = v
200 elif key_comments:
201 raise TypeError("KEYCOMMENTS must be a dictionary")
~/miniconda/lib/python3.6/site-packages/astropy/io/fits/header.py in __setitem__(self, item, comment)
2030 # In this case, key/index errors should be raised; don't update
2031 # comments of nonexistent cards
-> 2032 idx = self._header._cardindex(item)
2033 value = self._header[idx]
2034 self._header[idx] = (value, comment)
~/miniconda/lib/python3.6/site-packages/astropy/io/fits/header.py in _cardindex(self, key)
1652
1653 if not indices:
-> 1654 raise KeyError("Keyword {!r} not found.".format(keyword))
1655
1656 try:
KeyError: "Keyword 'CROTA2' not found."
|
KeyError
|
def hgs_to_hgc(hgscoord, hgcframe):
"""
Transform from Heliographic Stonyhurst to Heliograpic Carrington.
"""
if hgcframe.obstime is None or np.any(hgcframe.obstime != hgscoord.obstime):
raise ValueError(
"Can not transform from Heliographic Stonyhurst to "
"Heliographic Carrington, unless both frames have matching obstime."
)
c_lon = hgscoord.spherical.lon + _carrington_offset(hgscoord.obstime).to(u.deg)
representation = SphericalRepresentation(
c_lon, hgscoord.spherical.lat, hgscoord.spherical.distance
)
hgcframe = hgcframe.__class__(obstime=hgscoord.obstime)
return hgcframe.realize_frame(representation)
|
def hgs_to_hgc(hgscoord, hgcframe):
"""
Transform from Heliographic Stonyhurst to Heliograpic Carrington.
"""
if hgcframe.obstime is None or np.any(hgcframe.obstime != hgscoord.obstime):
raise ValueError(
"Can not transform from Heliographic Stonyhurst to "
"Heliographic Carrington, unless both frames have matching obstime."
)
c_lon = hgscoord.spherical.lon + _carrington_offset(hgscoord.obstime).to(u.deg)
representation = SphericalRepresentation(c_lon, hgscoord.lat, hgscoord.radius)
hgcframe = hgcframe.__class__(obstime=hgscoord.obstime)
return hgcframe.realize_frame(representation)
|
https://github.com/sunpy/sunpy/issues/2632
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-46-77d08794e7f4> in <module>()
----> 1 foo.transform_to(sunpy.coordinates.Helioprojective(observer=obs))
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/sky_coordinate.py in transform_to(self, frame, merge_attributes)
480 # Do the transformation, returning a coordinate frame of the desired
481 # final type (not generic).
--> 482 new_coord = trans(self.frame, generic_frame)
483
484 # Finally make the new SkyCoord object from the `new_coord` and
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
1312
1313 curr_toframe = t.tosys(**frattrs)
-> 1314 curr_coord = t(curr_coord, curr_toframe)
1315
1316 # this is safe even in the case where self.transforms is empty, because
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
747
748 def __call__(self, fromcoord, toframe):
--> 749 res = self.func(fromcoord, toframe)
750 if not isinstance(res, self.tosys):
751 raise TypeError('the transformation function yielded {0} but '
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/sunpy/coordinates/transformations.py in hgs_to_hcc(heliogcoord, heliocframe)
186 # TODO: Revert this.
187 from astropy.tests.helper import quantity_allclose
--> 188 hglon = heliogcoord.lon
189 hglat = heliogcoord.lat
190 r = heliogcoord.radius
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/baseframe.py in __getattr__(self, attr)
1249 return val
1250
-> 1251 return self.__getattribute__(attr) # Raise AttributeError.
1252
1253 def __setattr__(self, attr, value):
AttributeError: 'HeliographicStonyhurst' object has no attribute 'lon'
|
AttributeError
|
def hgc_to_hgs(hgccoord, hgsframe):
"""
Convert from Heliograpic Carrington to Heliographic Stonyhurst.
"""
if hgsframe.obstime is None or np.any(hgsframe.obstime != hgccoord.obstime):
raise ValueError(
"Can not transform from Heliographic Carrington to "
"Heliographic Stonyhurst, unless both frames have matching obstime."
)
obstime = hgsframe.obstime
s_lon = hgccoord.spherical.lon - _carrington_offset(obstime).to(u.deg)
representation = SphericalRepresentation(
s_lon, hgccoord.spherical.lat, hgccoord.spherical.distance
)
return hgsframe.realize_frame(representation)
|
def hgc_to_hgs(hgccoord, hgsframe):
"""
Convert from Heliograpic Carrington to Heliographic Stonyhurst.
"""
if hgsframe.obstime is None or np.any(hgsframe.obstime != hgccoord.obstime):
raise ValueError(
"Can not transform from Heliographic Carrington to "
"Heliographic Stonyhurst, unless both frames have matching obstime."
)
obstime = hgsframe.obstime
s_lon = hgccoord.spherical.lon - _carrington_offset(obstime).to(u.deg)
representation = SphericalRepresentation(s_lon, hgccoord.lat, hgccoord.radius)
return hgsframe.realize_frame(representation)
|
https://github.com/sunpy/sunpy/issues/2632
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-46-77d08794e7f4> in <module>()
----> 1 foo.transform_to(sunpy.coordinates.Helioprojective(observer=obs))
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/sky_coordinate.py in transform_to(self, frame, merge_attributes)
480 # Do the transformation, returning a coordinate frame of the desired
481 # final type (not generic).
--> 482 new_coord = trans(self.frame, generic_frame)
483
484 # Finally make the new SkyCoord object from the `new_coord` and
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
1312
1313 curr_toframe = t.tosys(**frattrs)
-> 1314 curr_coord = t(curr_coord, curr_toframe)
1315
1316 # this is safe even in the case where self.transforms is empty, because
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
747
748 def __call__(self, fromcoord, toframe):
--> 749 res = self.func(fromcoord, toframe)
750 if not isinstance(res, self.tosys):
751 raise TypeError('the transformation function yielded {0} but '
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/sunpy/coordinates/transformations.py in hgs_to_hcc(heliogcoord, heliocframe)
186 # TODO: Revert this.
187 from astropy.tests.helper import quantity_allclose
--> 188 hglon = heliogcoord.lon
189 hglat = heliogcoord.lat
190 r = heliogcoord.radius
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/baseframe.py in __getattr__(self, attr)
1249 return val
1250
-> 1251 return self.__getattribute__(attr) # Raise AttributeError.
1252
1253 def __setattr__(self, attr, value):
AttributeError: 'HeliographicStonyhurst' object has no attribute 'lon'
|
AttributeError
|
def hgs_to_hcc(heliogcoord, heliocframe):
"""
Convert from Heliographic Stonyhurst to Heliocentric Cartesian.
"""
hglon = heliogcoord.spherical.lon
hglat = heliogcoord.spherical.lat
r = heliogcoord.spherical.distance
if r.unit is u.one and quantity_allclose(r, 1 * u.one):
r = np.ones_like(r)
r *= RSUN_METERS
if not isinstance(heliocframe.observer, BaseCoordinateFrame):
raise ConvertError(
"Cannot transform heliographic coordinates to "
"heliocentric coordinates for observer '{}' "
"without `obstime` being specified.".format(heliocframe.observer)
)
l0_rad = heliocframe.observer.lon.to(u.rad)
b0_deg = heliocframe.observer.lat
lon = np.deg2rad(hglon)
lat = np.deg2rad(hglat)
cosb = np.cos(b0_deg.to(u.rad))
sinb = np.sin(b0_deg.to(u.rad))
lon = lon - l0_rad
cosx = np.cos(lon)
sinx = np.sin(lon)
cosy = np.cos(lat)
siny = np.sin(lat)
x = r * cosy * sinx
y = r * (siny * cosb - cosy * cosx * sinb)
zz = r * (siny * sinb + cosy * cosx * cosb)
representation = CartesianRepresentation(x.to(u.km), y.to(u.km), zz.to(u.km))
return heliocframe.realize_frame(representation)
|
def hgs_to_hcc(heliogcoord, heliocframe):
"""
Convert from Heliographic Stonyhurst to Heliocentric Cartesian.
"""
hglon = heliogcoord.lon
hglat = heliogcoord.lat
r = heliogcoord.radius
if r.unit is u.one and quantity_allclose(r, 1 * u.one):
r = np.ones_like(r)
r *= RSUN_METERS
if not isinstance(heliocframe.observer, BaseCoordinateFrame):
raise ConvertError(
"Cannot transform heliographic coordinates to "
"heliocentric coordinates for observer '{}' "
"without `obstime` being specified.".format(heliocframe.observer)
)
l0_rad = heliocframe.observer.lon.to(u.rad)
b0_deg = heliocframe.observer.lat
lon = np.deg2rad(hglon)
lat = np.deg2rad(hglat)
cosb = np.cos(b0_deg.to(u.rad))
sinb = np.sin(b0_deg.to(u.rad))
lon = lon - l0_rad
cosx = np.cos(lon)
sinx = np.sin(lon)
cosy = np.cos(lat)
siny = np.sin(lat)
x = r * cosy * sinx
y = r * (siny * cosb - cosy * cosx * sinb)
zz = r * (siny * sinb + cosy * cosx * cosb)
representation = CartesianRepresentation(x.to(u.km), y.to(u.km), zz.to(u.km))
return heliocframe.realize_frame(representation)
|
https://github.com/sunpy/sunpy/issues/2632
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-46-77d08794e7f4> in <module>()
----> 1 foo.transform_to(sunpy.coordinates.Helioprojective(observer=obs))
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/sky_coordinate.py in transform_to(self, frame, merge_attributes)
480 # Do the transformation, returning a coordinate frame of the desired
481 # final type (not generic).
--> 482 new_coord = trans(self.frame, generic_frame)
483
484 # Finally make the new SkyCoord object from the `new_coord` and
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
1312
1313 curr_toframe = t.tosys(**frattrs)
-> 1314 curr_coord = t(curr_coord, curr_toframe)
1315
1316 # this is safe even in the case where self.transforms is empty, because
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/transformations.py in __call__(self, fromcoord, toframe)
747
748 def __call__(self, fromcoord, toframe):
--> 749 res = self.func(fromcoord, toframe)
750 if not isinstance(res, self.tosys):
751 raise TypeError('the transformation function yielded {0} but '
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/sunpy/coordinates/transformations.py in hgs_to_hcc(heliogcoord, heliocframe)
186 # TODO: Revert this.
187 from astropy.tests.helper import quantity_allclose
--> 188 hglon = heliogcoord.lon
189 hglat = heliogcoord.lat
190 r = heliogcoord.radius
~/anaconda/envs/synthesizar/lib/python3.6/site-packages/astropy/coordinates/baseframe.py in __getattr__(self, attr)
1249 return val
1250
-> 1251 return self.__getattribute__(attr) # Raise AttributeError.
1252
1253 def __setattr__(self, attr, value):
AttributeError: 'HeliographicStonyhurst' object has no attribute 'lon'
|
AttributeError
|
def plot(
self,
axes=None,
resample=None,
annotate=True,
interval=200,
plot_function=None,
**kwargs,
):
"""
A animation plotting routine that animates each element in the
MapCube
Parameters
----------
axes: mpl axes
axes to plot the animation on, if none uses current axes
resample: list or False
Draws the map at a lower resolution to increase the speed of
animation. Specify a list as a fraction i.e. [0.25, 0.25] to
plot at 1/4 resolution.
[Note: this will only work where the map arrays are the same size]
annotate: bool
Annotate the figure with scale and titles
interval: int
Animation interval in ms
plot_function : function
A function to be called as each map is plotted. Any variables
returned from the function will have their ``remove()`` method called
at the start of the next frame so that they are removed from the plot.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import matplotlib.animation as animation
>>> from sunpy.map import Map
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Plot the map at 1/2 original resolution
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(resample=[0.5, 0.5], colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Save an animation of the MapCube
>>> cube = Map(res, cube=True) # doctest: +SKIP
>>> ani = cube.plot() # doctest: +SKIP
>>> Writer = animation.writers['ffmpeg'] # doctest: +SKIP
>>> writer = Writer(fps=10, metadata=dict(artist='SunPy'), bitrate=1800) # doctest: +SKIP
>>> ani.save('mapcube_animation.mp4', writer=writer) # doctest: +SKIP
Save an animation with the limb at each time step
>>> def myplot(fig, ax, sunpy_map):
... p = sunpy_map.draw_limb()
... return p
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.peek(plot_function=myplot) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if not axes:
axes = wcsaxes_compat.gca_wcs(self.maps[0].wcs)
fig = axes.get_figure()
if not plot_function:
plot_function = lambda fig, ax, smap: []
removes = []
# Normal plot
def annotate_frame(i):
axes.set_title("{s.name}".format(s=self[i]))
axes.set_xlabel(
axis_labels_from_ctype(
self[i].coordinate_system[0], self[i].spatial_units[0]
)
)
axes.set_ylabel(
axis_labels_from_ctype(
self[i].coordinate_system[1], self[i].spatial_units[1]
)
)
if resample:
if self.all_maps_same_shape():
resample = u.Quantity(self.maps[0].dimensions) * np.array(resample)
ani_data = [amap.resample(resample) for amap in self.maps]
else:
raise ValueError("Maps in mapcube do not all have the same shape.")
else:
ani_data = self.maps
im = ani_data[0].plot(axes=axes, **kwargs)
def updatefig(i, im, annotate, ani_data, removes):
while removes:
removes.pop(0).remove()
im.set_array(ani_data[i].data)
im.set_cmap(ani_data[i].plot_settings["cmap"])
norm = deepcopy(ani_data[i].plot_settings["norm"])
# The following explicit call is for bugged versions of Astropy's
# ImageNormalize
norm.autoscale_None(ani_data[i].data)
im.set_norm(norm)
if wcsaxes_compat.is_wcsaxes(axes):
im.axes.reset_wcs(ani_data[i].wcs)
wcsaxes_compat.default_wcs_grid(
axes, ani_data[i].spatial_units, ani_data[i].coordinate_system
)
else:
im.set_extent(
np.concatenate((ani_data[i].xrange.value, ani_data[i].yrange.value))
)
if annotate:
annotate_frame(i)
removes += list(plot_function(fig, axes, ani_data[i]))
ani = matplotlib.animation.FuncAnimation(
fig,
updatefig,
frames=list(range(0, len(ani_data))),
fargs=[im, annotate, ani_data, removes],
interval=interval,
blit=False,
)
return ani
|
def plot(
self,
axes=None,
resample=None,
annotate=True,
interval=200,
plot_function=None,
**kwargs,
):
"""
A animation plotting routine that animates each element in the
MapCube
Parameters
----------
axes: mpl axes
axes to plot the animation on, if none uses current axes
resample: list or False
Draws the map at a lower resolution to increase the speed of
animation. Specify a list as a fraction i.e. [0.25, 0.25] to
plot at 1/4 resolution.
[Note: this will only work where the map arrays are the same size]
annotate: bool
Annotate the figure with scale and titles
interval: int
Animation interval in ms
plot_function : function
A function to be called as each map is plotted. Any variables
returned from the function will have their ``remove()`` method called
at the start of the next frame so that they are removed from the plot.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import matplotlib.animation as animation
>>> from sunpy.map import Map
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Plot the map at 1/2 original resolution
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(resample=[0.5, 0.5], colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Save an animation of the MapCube
>>> cube = Map(res, cube=True) # doctest: +SKIP
>>> ani = cube.plot() # doctest: +SKIP
>>> Writer = animation.writers['ffmpeg'] # doctest: +SKIP
>>> writer = Writer(fps=10, metadata=dict(artist='SunPy'), bitrate=1800) # doctest: +SKIP
>>> ani.save('mapcube_animation.mp4', writer=writer) # doctest: +SKIP
Save an animation with the limb at each time step
>>> def myplot(fig, ax, sunpy_map):
... p = sunpy_map.draw_limb()
... return p
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.peek(plot_function=myplot) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if not axes:
axes = wcsaxes_compat.gca_wcs(self.maps[0].wcs)
fig = axes.get_figure()
if not plot_function:
plot_function = lambda fig, ax, smap: []
removes = []
# Normal plot
def annotate_frame(i):
axes.set_title("{s.name}".format(s=self[i]))
axes.set_xlabel(
axis_labels_from_ctype(
self[i].coordinate_system[0], self[i].spatial_units[0]
)
)
axes.set_ylabel(
axis_labels_from_ctype(
self[i].coordinate_system[1], self[i].spatial_units[1]
)
)
if resample:
if self.all_maps_same_shape():
resample = u.Quantity(self.maps[0].dimensions) * np.array(resample)
ani_data = [amap.resample(resample) for amap in self.maps]
else:
raise ValueError("Maps in mapcube do not all have the same shape.")
else:
ani_data = self.maps
im = ani_data[0].plot(axes=axes, **kwargs)
def updatefig(i, im, annotate, ani_data, removes):
while removes:
removes.pop(0).remove()
im.set_array(ani_data[i].data)
im.set_cmap(ani_data[i].plot_settings["cmap"])
norm = deepcopy(ani_data[i].plot_settings["norm"])
# The following explicit call is for bugged versions of Astropy's
# ImageNormalize
norm.autoscale_None(ani_data[i].data)
im.set_norm(norm)
if wcsaxes_compat.is_wcsaxes(axes):
im.axes.reset_wcs(ani_data[i].wcs)
wcsaxes_compat.default_wcs_grid(axes)
else:
im.set_extent(
np.concatenate((ani_data[i].xrange.value, ani_data[i].yrange.value))
)
if annotate:
annotate_frame(i)
removes += list(plot_function(fig, axes, ani_data[i]))
ani = matplotlib.animation.FuncAnimation(
fig,
updatefig,
frames=list(range(0, len(ani_data))),
fargs=[im, annotate, ani_data, removes],
interval=interval,
blit=False,
)
return ani
|
https://github.com/sunpy/sunpy/issues/2626
|
TypeError Traceback (most recent call last)
<ipython-input-18-ab15991fbba2> in <module>()
12 ani = map_cube.plot()
13 #ani = map_cube.peek().get_animation() # This works but have progress bars cover axis
---> 14 ani.save('test.mp4')
~/.virtualenvs/gen/lib/python3.6/site-packages/matplotlib/animation.py in save(self, filename, writer, fps, dpi, codec, bitrate, extra_args, metadata, extra_anim, savefig_kwargs)
1193 for anim in all_anim:
1194 # Clear the initial frame
-> 1195 anim._init_draw()
1196 for data in zip(*[a.new_saved_frame_seq() for a in all_anim]):
1197 for anim, d in zip(all_anim, data):
~/.virtualenvs/gen/lib/python3.6/site-packages/matplotlib/animation.py in _init_draw(self)
1748 # artists.
1749 if self._init_func is None:
-> 1750 self._draw_frame(next(self.new_frame_seq()))
1751
1752 else:
~/.virtualenvs/gen/lib/python3.6/site-packages/matplotlib/animation.py in _draw_frame(self, framedata)
1770 # Call the func with framedata and args. If blitting is desired,
1771 # func needs to return a sequence of any artists that were modified.
-> 1772 self._drawn_artists = self._func(framedata, *self._args)
1773 if self._blit:
1774 if self._drawn_artists is None:
~/.virtualenvs/gen/lib/python3.6/site-packages/sunpy/map/mapcube.py in updatefig(i, im, annotate, ani_data, removes)
206 if wcsaxes_compat.is_wcsaxes(axes):
207 im.axes.reset_wcs(ani_data[i].wcs)
--> 208 wcsaxes_compat.default_wcs_grid(axes)
209 else:
210 im.set_extent(np.concatenate((ani_data[i].xrange.value,
TypeError: default_wcs_grid() missing 2 required positional arguments: 'units' and 'ctypes'
|
TypeError
|
def updatefig(i, im, annotate, ani_data, removes):
while removes:
removes.pop(0).remove()
im.set_array(ani_data[i].data)
im.set_cmap(ani_data[i].plot_settings["cmap"])
norm = deepcopy(ani_data[i].plot_settings["norm"])
# The following explicit call is for bugged versions of Astropy's
# ImageNormalize
norm.autoscale_None(ani_data[i].data)
im.set_norm(norm)
if wcsaxes_compat.is_wcsaxes(axes):
im.axes.reset_wcs(ani_data[i].wcs)
wcsaxes_compat.default_wcs_grid(
axes, ani_data[i].spatial_units, ani_data[i].coordinate_system
)
else:
im.set_extent(
np.concatenate((ani_data[i].xrange.value, ani_data[i].yrange.value))
)
if annotate:
annotate_frame(i)
removes += list(plot_function(fig, axes, ani_data[i]))
|
def updatefig(i, im, annotate, ani_data, removes):
while removes:
removes.pop(0).remove()
im.set_array(ani_data[i].data)
im.set_cmap(ani_data[i].plot_settings["cmap"])
norm = deepcopy(ani_data[i].plot_settings["norm"])
# The following explicit call is for bugged versions of Astropy's
# ImageNormalize
norm.autoscale_None(ani_data[i].data)
im.set_norm(norm)
if wcsaxes_compat.is_wcsaxes(axes):
im.axes.reset_wcs(ani_data[i].wcs)
wcsaxes_compat.default_wcs_grid(axes)
else:
im.set_extent(
np.concatenate((ani_data[i].xrange.value, ani_data[i].yrange.value))
)
if annotate:
annotate_frame(i)
removes += list(plot_function(fig, axes, ani_data[i]))
|
https://github.com/sunpy/sunpy/issues/2626
|
TypeError Traceback (most recent call last)
<ipython-input-18-ab15991fbba2> in <module>()
12 ani = map_cube.plot()
13 #ani = map_cube.peek().get_animation() # This works but have progress bars cover axis
---> 14 ani.save('test.mp4')
~/.virtualenvs/gen/lib/python3.6/site-packages/matplotlib/animation.py in save(self, filename, writer, fps, dpi, codec, bitrate, extra_args, metadata, extra_anim, savefig_kwargs)
1193 for anim in all_anim:
1194 # Clear the initial frame
-> 1195 anim._init_draw()
1196 for data in zip(*[a.new_saved_frame_seq() for a in all_anim]):
1197 for anim, d in zip(all_anim, data):
~/.virtualenvs/gen/lib/python3.6/site-packages/matplotlib/animation.py in _init_draw(self)
1748 # artists.
1749 if self._init_func is None:
-> 1750 self._draw_frame(next(self.new_frame_seq()))
1751
1752 else:
~/.virtualenvs/gen/lib/python3.6/site-packages/matplotlib/animation.py in _draw_frame(self, framedata)
1770 # Call the func with framedata and args. If blitting is desired,
1771 # func needs to return a sequence of any artists that were modified.
-> 1772 self._drawn_artists = self._func(framedata, *self._args)
1773 if self._blit:
1774 if self._drawn_artists is None:
~/.virtualenvs/gen/lib/python3.6/site-packages/sunpy/map/mapcube.py in updatefig(i, im, annotate, ani_data, removes)
206 if wcsaxes_compat.is_wcsaxes(axes):
207 im.axes.reset_wcs(ani_data[i].wcs)
--> 208 wcsaxes_compat.default_wcs_grid(axes)
209 else:
210 im.set_extent(np.concatenate((ani_data[i].xrange.value,
TypeError: default_wcs_grid() missing 2 required positional arguments: 'units' and 'ctypes'
|
TypeError
|
def plot(
self,
axes=None,
resample=None,
annotate=True,
interval=200,
plot_function=None,
**kwargs,
):
"""
A animation plotting routine that animates each element in the
MapCube
Parameters
----------
gamma: float
Gamma value to use for the color map
axes: mpl axes
axes to plot the animation on, if none uses current axes
resample: list or False
Draws the map at a lower resolution to increase the speed of
animation. Specify a list as a fraction i.e. [0.25, 0.25] to
plot at 1/4 resolution.
[Note: this will only work where the map arrays are the same size]
annotate: bool
Annotate the figure with scale and titles
interval: int
Animation interval in ms
plot_function : function
A function to be called as each map is plotted. Any variables
returned from the function will have their ``remove()`` method called
at the start of the next frame so that they are removed from the plot.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import matplotlib.animation as animation
>>> from sunpy.map import Map
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Plot the map at 1/2 original resolution
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(resample=[0.5, 0.5], colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Save an animation of the MapCube
>>> cube = Map(res, cube=True) # doctest: +SKIP
>>> ani = cube.plot() # doctest: +SKIP
>>> Writer = animation.writers['ffmpeg'] # doctest: +SKIP
>>> writer = Writer(fps=10, metadata=dict(artist='SunPy'), bitrate=1800) # doctest: +SKIP
>>> ani.save('mapcube_animation.mp4', writer=writer) # doctest: +SKIP
Save an animation with the limb at each time step
>>> def myplot(fig, ax, sunpy_map):
... p = sunpy_map.draw_limb()
... return p
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.peek(plot_function=myplot) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if not axes:
axes = wcsaxes_compat.gca_wcs(self.maps[0].wcs)
fig = axes.get_figure()
if not plot_function:
plot_function = lambda fig, ax, smap: []
removes = []
# Normal plot
def annotate_frame(i):
axes.set_title("{s.name}".format(s=self[i]))
axes.set_xlabel(
axis_labels_from_ctype(
self[i].coordinate_system[0], self[i].spatial_units[0]
)
)
axes.set_ylabel(
axis_labels_from_ctype(
self[i].coordinate_system[1], self[i].spatial_units[1]
)
)
if resample:
if self.all_maps_same_shape():
resample = u.Quantity(self.maps[0].dimensions) * np.array(resample)
ani_data = [amap.resample(resample) for amap in self.maps]
else:
raise ValueError("Maps in mapcube do not all have the same shape.")
else:
ani_data = self.maps
im = ani_data[0].plot(axes=axes, **kwargs)
def updatefig(i, im, annotate, ani_data, removes):
while removes:
removes.pop(0).remove()
im.set_array(ani_data[i].data)
im.set_cmap(ani_data[i].plot_settings["cmap"])
norm = deepcopy(ani_data[i].plot_settings["norm"])
# The following explicit call is for bugged versions of Astropy's
# ImageNormalize
norm.autoscale_None(ani_data[i].data)
im.set_norm(norm)
if wcsaxes_compat.is_wcsaxes(axes):
im.axes.reset_wcs(ani_data[i].wcs)
wcsaxes_compat.default_wcs_grid(axes)
else:
im.set_extent(
np.concatenate((ani_data[i].xrange.value, ani_data[i].yrange.value))
)
if annotate:
annotate_frame(i)
removes += list(plot_function(fig, axes, ani_data[i]))
ani = matplotlib.animation.FuncAnimation(
fig,
updatefig,
frames=list(range(0, len(ani_data))),
fargs=[im, annotate, ani_data, removes],
interval=interval,
blit=False,
)
return ani
|
def plot(
self,
axes=None,
resample=None,
annotate=True,
interval=200,
plot_function=None,
**kwargs,
):
"""
A animation plotting routine that animates each element in the
MapCube
Parameters
----------
gamma: float
Gamma value to use for the color map
axes: mpl axes
axes to plot the animation on, if none uses current axes
resample: list or False
Draws the map at a lower resolution to increase the speed of
animation. Specify a list as a fraction i.e. [0.25, 0.25] to
plot at 1/4 resolution.
[Note: this will only work where the map arrays are the same size]
annotate: bool
Annotate the figure with scale and titles
interval: int
Animation interval in ms
plot_function : function
A function to be called as each map is plotted. Any variables
returned from the function will have their ``remove()`` method called
at the start of the next frame so that they are removed from the plot.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> import matplotlib.animation as animation
>>> from sunpy.map import Map
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Plot the map at 1/2 original resolution
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.plot(resample=[0.5, 0.5], colorbar=True) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
Save an animation of the MapCube
>>> cube = Map(res, cube=True) # doctest: +SKIP
>>> ani = cube.plot() # doctest: +SKIP
>>> Writer = animation.writers['ffmpeg'] # doctest: +SKIP
>>> writer = Writer(fps=10, metadata=dict(artist='SunPy'), bitrate=1800) # doctest: +SKIP
>>> ani.save('mapcube_animation.mp4', writer=writer) # doctest: +SKIP
Save an animation with the limb at each time step
>>> def myplot(fig, ax, sunpy_map):
... p = sunpy_map.draw_limb()
... return p
>>> cube = Map(files, cube=True) # doctest: +SKIP
>>> ani = cube.peek(plot_function=myplot) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if not axes:
axes = wcsaxes_compat.gca_wcs(self.maps[0].wcs)
fig = axes.get_figure()
if not plot_function:
plot_function = lambda fig, ax, smap: []
removes = []
# Normal plot
def annotate_frame(i):
axes.set_title("{s.name}".format(s=self[i]))
# x-axis label
if self[0].coordinate_system.x == "HG":
xlabel = "Longitude [{lon}".format(lon=self[i].spatial_units.x)
else:
xlabel = "X-position [{xpos}]".format(xpos=self[i].spatial_units.x)
# y-axis label
if self[0].coordinate_system.y == "HG":
ylabel = "Latitude [{lat}]".format(lat=self[i].spatial_units.y)
else:
ylabel = "Y-position [{ypos}]".format(ypos=self[i].spatial_units.y)
axes.set_xlabel(xlabel)
axes.set_ylabel(ylabel)
if resample:
if self.all_maps_same_shape():
resample = u.Quantity(self.maps[0].dimensions) * np.array(resample)
ani_data = [amap.resample(resample) for amap in self.maps]
else:
raise ValueError("Maps in mapcube do not all have the same shape.")
else:
ani_data = self.maps
im = ani_data[0].plot(axes=axes, **kwargs)
def updatefig(i, im, annotate, ani_data, removes):
while removes:
removes.pop(0).remove()
im.set_array(ani_data[i].data)
im.set_cmap(ani_data[i].plot_settings["cmap"])
norm = deepcopy(ani_data[i].plot_settings["norm"])
# The following explicit call is for bugged versions of Astropy's
# ImageNormalize
norm.autoscale_None(ani_data[i].data)
im.set_norm(norm)
if wcsaxes_compat.is_wcsaxes(axes):
im.axes.reset_wcs(ani_data[i].wcs)
wcsaxes_compat.default_wcs_grid(axes)
else:
im.set_extent(
np.concatenate((ani_data[i].xrange.value, ani_data[i].yrange.value))
)
if annotate:
annotate_frame(i)
removes += list(plot_function(fig, axes, ani_data[i]))
ani = matplotlib.animation.FuncAnimation(
fig,
updatefig,
frames=list(range(0, len(ani_data))),
fargs=[im, annotate, ani_data, removes],
interval=interval,
blit=False,
)
return ani
|
https://github.com/sunpy/sunpy/issues/2295
|
Traceback (most recent call last):
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/backends/backend_qt5agg.py", line 197, in __draw_idle_agg
FigureCanvasAgg.draw(self)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/backends/backend_agg.py", line 464, in draw
self.figure.draw(self.renderer)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/artist.py", line 63, in draw_wrapper
draw(artist, renderer, *args, **kwargs)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/figure.py", line 1151, in draw
self.canvas.draw_event(renderer)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/backend_bases.py", line 1823, in draw_event
self.callbacks.process(s, event)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/cbook.py", line 554, in process
proxy(*args, **kwargs)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/cbook.py", line 416, in call
return mtd(*args, **kwargs)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/animation.py", line 881, in _start
self._init_draw()
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/animation.py", line 1540, in _init_draw
self._draw_frame(next(self.new_frame_seq()))
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/animation.py", line 1562, in _draw_frame
self._drawn_artists = self._func(framedata, *self._args)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/sunpy/map/mapcube.py", line 227, in updatefig
annotate_frame(i)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/sunpy/map/mapcube.py", line 181, in annotate_frame
if self[0].coordinate_system.x == 'HG':
AttributeError: 'SpatialPair' object has no attribute 'x'
|
AttributeError
|
def annotate_frame(i):
axes.set_title("{s.name}".format(s=self[i]))
axes.set_xlabel(
axis_labels_from_ctype(self[i].coordinate_system[0], self[i].spatial_units[0])
)
axes.set_ylabel(
axis_labels_from_ctype(self[i].coordinate_system[1], self[i].spatial_units[1])
)
|
def annotate_frame(i):
axes.set_title("{s.name}".format(s=self[i]))
# x-axis label
if self[0].coordinate_system.x == "HG":
xlabel = "Longitude [{lon}".format(lon=self[i].spatial_units.x)
else:
xlabel = "X-position [{xpos}]".format(xpos=self[i].spatial_units.x)
# y-axis label
if self[0].coordinate_system.y == "HG":
ylabel = "Latitude [{lat}]".format(lat=self[i].spatial_units.y)
else:
ylabel = "Y-position [{ypos}]".format(ypos=self[i].spatial_units.y)
axes.set_xlabel(xlabel)
axes.set_ylabel(ylabel)
|
https://github.com/sunpy/sunpy/issues/2295
|
Traceback (most recent call last):
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/backends/backend_qt5agg.py", line 197, in __draw_idle_agg
FigureCanvasAgg.draw(self)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/backends/backend_agg.py", line 464, in draw
self.figure.draw(self.renderer)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/artist.py", line 63, in draw_wrapper
draw(artist, renderer, *args, **kwargs)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/figure.py", line 1151, in draw
self.canvas.draw_event(renderer)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/backend_bases.py", line 1823, in draw_event
self.callbacks.process(s, event)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/cbook.py", line 554, in process
proxy(*args, **kwargs)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/cbook.py", line 416, in call
return mtd(*args, **kwargs)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/animation.py", line 881, in _start
self._init_draw()
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/animation.py", line 1540, in _init_draw
self._draw_frame(next(self.new_frame_seq()))
File "/media/solar1/anaconda3/lib/python3.6/site-packages/matplotlib/animation.py", line 1562, in _draw_frame
self._drawn_artists = self._func(framedata, *self._args)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/sunpy/map/mapcube.py", line 227, in updatefig
annotate_frame(i)
File "/media/solar1/anaconda3/lib/python3.6/site-packages/sunpy/map/mapcube.py", line 181, in annotate_frame
if self[0].coordinate_system.x == 'HG':
AttributeError: 'SpatialPair' object has no attribute 'x'
|
AttributeError
|
def __init__(self, lst):
"""
Input to this constructor can be one of a few things:
1. A ``QueryResponse`` object
2. A list of tuples ``(QueryResponse, client)``
"""
tmplst = []
# numfile is the number of files not the number of results.
self._numfile = 0
if isinstance(lst, (QueryResponse, vsoQueryResponse)):
if not hasattr(lst, "client"):
raise ValueError(
"A {} object is only a valid input to UnifiedResponse if it has a client attribute.".format(
type(lst).__name__
)
)
tmplst.append(lst)
self._numfile = len(lst)
else:
for block in lst:
if isinstance(block, tuple) and len(block) == 2:
block[0].client = block[1]
tmplst.append(block[0])
self._numfile += len(block[0])
elif hasattr(block, "client"):
tmplst.append(block)
self._numfile += len(block)
else:
raise ValueError(
"{} is not a valid input to UnifiedResponse.".format(type(lst))
)
self._list = tmplst
|
def __init__(self, lst):
"""
Input to this constructor can be one of a few things:
1. A list of one UnifiedResponse object
2. A list of tuples (QueryResponse, client)
"""
tmplst = []
# numfile is the number of files not the number of results.
self._numfile = 0
if isinstance(lst, QueryResponse):
if not hasattr(lst, "client"):
raise ("QueryResponse is only a valid input if it has a client attribute.")
tmplst.append(lst)
self._numfile = len(lst)
else:
for block in lst:
if isinstance(block, tuple) and len(block) == 2:
block[0].client = block[1]
tmplst.append(block[0])
self._numfile += len(block[0])
elif hasattr(block, "client"):
tmplst.append(block)
self._numfile += len(block)
else:
raise Exception(
"{} is not a valid input to UnifiedResponse.".format(type(lst))
)
self._list = tmplst
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def __getitem__(self, aslice):
"""
Support slicing the UnifiedResponse as a 2D object.
The first index is to the client and the second index is the records
returned from those clients.
"""
# Just a single int as a slice, we are just indexing client.
if isinstance(aslice, (int, slice)):
ret = self._list[aslice]
# Make sure we only have a length two slice.
elif isinstance(aslice, tuple):
if len(aslice) > 2:
raise IndexError(
"UnifiedResponse objects can only be sliced with one or two indices."
)
# Indexing both client and records, but only for one client.
if isinstance(aslice[0], int):
client_resp = self._list[aslice[0]]
ret = self._handle_record_slice(client_resp, aslice[1])
# Indexing both client and records for multiple clients.
else:
intermediate = self._list[aslice[0]]
ret = []
for client_resp in intermediate:
resp = self._handle_record_slice(client_resp, aslice[1])
ret.append(resp)
else:
raise IndexError("UnifiedResponse objects must be sliced with integers.")
return UnifiedResponse(ret)
|
def __getitem__(self, aslice):
ret = self._list[aslice]
if ret:
return type(self)(ret)
return ret
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def _repr_html_(self):
nprov = len(self)
if nprov == 1:
ret = "Results from {} Provider:</br></br>".format(len(self))
else:
ret = "Results from {} Providers:</br></br>".format(len(self))
for block in self.responses:
ret += "{} Results from the {}:</br>".format(
len(block), block.client.__class__.__name__
)
ret += block._repr_html_()
ret += "</br>"
return ret
|
def _repr_html_(self):
ret = ""
for block in self.responses:
ret += "Results from the {}:\n".format(block.client.__class__.__name__)
ret += block._repr_html_()
ret += "\n"
return ret
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def __repr__(self):
ret = super(UnifiedResponse, self).__repr__()
ret += "\n" + str(self)
return ret
|
def __repr__(self):
ret = super(UnifiedResponse, self).__repr__()
ret += "\n"
for block in self.responses:
ret += "Results from the {}:\n".format(block.client.__class__.__name__)
ret += repr(block)
ret += "\n"
return ret
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def search(self, *query):
"""
Query for data in form of multiple parameters.
Examples
--------
Query for LYRALightCurve data for the time range ('2012/3/4','2012/3/6')
>>> from sunpy.net import Fido, attrs as a
>>> unifresp = Fido.search(a.Time('2012/3/4', '2012/3/6'), a.Instrument('lyra'))
Query for data from Nobeyama Radioheliograph and RHESSI
>>> unifresp = Fido.search(a.Time('2012/3/4', '2012/3/6'),
(a.Instrument('norh') & a.Wavelength(17*u.GHz)) | a.Instrument('rhessi'))
Query for 304 Angstrom SDO AIA data with a cadence of 10 minutes
>>> import astropy.units as u
>>> from sunpy.net import Fido, attrs as a
>>> unifresp = Fido.search(a.Time('2012/3/4', '2012/3/6'),
a.Instrument('AIA'),
a.Wavelength(304*u.angstrom, 304*u.angstrom),
a.vso.Sample(10*u.minute))
Parameters
----------
query : `sunpy.net.vso.attrs`, `sunpy.net.jsoc.attrs`
A query consisting of multiple parameters which define the
requested data. The query is specified using attributes from the
VSO and the JSOC. The query can mix attributes from the VSO and
the JSOC.
Returns
-------
`sunpy.net.fido_factory.UnifiedResponse` object
Container of responses returned by clients servicing query.
Notes
-----
The conjunction 'and' transforms query into disjunctive normal form
ie. query is now of form A & B or ((A & B) | (C & D))
This helps in modularising query into parts and handling each of the
parts individually.
"""
query = attr.and_(*query)
return UnifiedResponse(query_walker.create(query, self))
|
def search(self, *query):
"""
Query for data in form of multiple parameters.
Examples
--------
Query for LYRALightCurve data for the time range ('2012/3/4','2012/3/6')
>>> from sunpy.net import Fido, attrs as a
>>> unifresp = Fido.search(a.Time('2012/3/4', '2012/3/6'), a.Instrument('lyra'))
Query for data from Nobeyama Radioheliograph and RHESSI
>>> unifresp = Fido.search(a.Time('2012/3/4', '2012/3/6'),
a.Instrument('norh') | a.Instrument('rhessi'))
Query for 304 Angstrom SDO AIA data with a cadence of 10 minutes
>>> import astropy.units as u
>>> from sunpy.net import Fido, attrs as a
>>> unifresp = Fido.search(a.Time('2012/3/4', '2012/3/6'),
a.Instrument('AIA'),
a.Wavelength(304*u.angstrom, 304*u.angstrom),
a.Sample(10*u.minute))
Parameters
----------
query : `sunpy.net.vso.attrs`, `sunpy.net.jsoc.attrs`
A query consisting of multiple parameters which define the
requested data. The query is specified using attributes from the
VSO and the JSOC. The query can mix attributes from the VSO and
the JSOC.
Returns
-------
`sunpy.net.fido_factory.UnifiedResponse` object
Container of responses returned by clients servicing query.
Notes
-----
The conjunction 'and' transforms query into disjunctive normal form
ie. query is now of form A & B or ((A & B) | (C & D))
This helps in modularising query into parts and handling each of the
parts individually.
"""
query = attr.and_(*query)
return UnifiedResponse(query_walker.create(query, self))
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def fetch(self, *query_results, **kwargs):
"""
Download the records represented by
`~sunpy.net.fido_factory.UnifiedResponse` objects.
Parameters
----------
query_results : `sunpy.net.fido_factory.UnifiedResponse`
Container returned by query method, or multiple.
wait : `bool`
fetch will wait until the download is complete before returning.
progress : `bool`
Show a progress bar while the download is running.
Returns
-------
`sunpy.net.fido_factory.DownloadResponse`
Example
--------
>>> from sunpy.net.vso.attrs import Time, Instrument
>>> unifresp = Fido.search(Time('2012/3/4','2012/3/6'), Instrument('AIA'))
>>> downresp = Fido.get(unifresp)
>>> file_paths = downresp.wait()
"""
wait = kwargs.pop("wait", True)
progress = kwargs.pop("progress", True)
reslist = []
for query_result in query_results:
for block in query_result.responses:
reslist.append(block.client.get(block, **kwargs))
results = DownloadResponse(reslist)
if wait:
return results.wait(progress=progress)
else:
return results
|
def fetch(self, query_result, wait=True, progress=True, **kwargs):
"""
Downloads the files pointed at by URLs contained within UnifiedResponse
object.
Parameters
----------
query_result : `sunpy.net.fido_factory.UnifiedResponse`
Container returned by query method.
wait : `bool`
fetch will wait until the download is complete before returning.
progress : `bool`
Show a progress bar while the download is running.
Returns
-------
`sunpy.net.fido_factory.DownloadResponse`
Example
--------
>>> from sunpy.net.vso.attrs import Time, Instrument
>>> unifresp = Fido.search(Time('2012/3/4','2012/3/6'), Instrument('AIA'))
>>> downresp = Fido.get(unifresp)
>>> file_paths = downresp.wait()
"""
reslist = []
for block in query_result.responses:
reslist.append(block.client.get(block, **kwargs))
results = DownloadResponse(reslist)
if wait:
return results.wait(progress=progress)
else:
return results
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def _make_query_to_client(self, *query):
"""
Given a query, look up the client and perform the query.
Parameters
----------
query : collection of `~sunpy.net.vso.attr` objects
Returns
-------
response : `~sunpy.net.dataretriever.client.QueryResponse`
client : `object`
Instance of client class
"""
candidate_widget_types = self._check_registered_widgets(*query)
tmpclient = candidate_widget_types[0]()
return tmpclient.query(*query), tmpclient
|
def _make_query_to_client(self, *query):
"""
Given a query, look up the client and perform the query.
Parameters
----------
query : collection of `~sunpy.net.vso.attr` objects
Returns
-------
response : `~sunpy.net.dataretriever.client.QueryResponse`
client : Instance of client class
"""
candidate_widget_types = self._check_registered_widgets(*query)
tmpclient = candidate_widget_types[0]()
return tmpclient.query(*query), tmpclient
|
https://github.com/sunpy/sunpy/issues/2140
|
from sunpy.net import Fido, attrs as a
res = Fido.search(a.Time("2016-06-07", "2016-06-08"), a.Instrument('eve') | a.Instrument('goes'))
res[0]
Traceback (most recent call last):
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 15472, in default
run_compiled_code(code, self.ctx, None, 'single')
File "/usr/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 3634, in run_compiled_code
func(code, glb, loc)
File "<xonsh-code>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 67, in __getitem__
return type(self)(ret)
File "/usr/lib/python3.6/site-packages/sunpy/net/fido_factory.py", line 57, in __init__
raise Exception("{} is not a valid input to UnifiedResponse.".format(type(lst)))
Exception: <class 'sunpy.net.vso.vso.QueryResponse'> is not a valid input to UnifiedResponse.
|
Exception
|
def backprojection(
calibrated_event_list, pixel_size=(1.0, 1.0) * u.arcsec, image_dim=(64, 64) * u.pix
):
"""
Given a stacked calibrated event list fits file create a back
projection image.
.. warning:: The image is not in the right orientation!
Parameters
----------
calibrated_event_list : string
filename of a RHESSI calibrated event list
pixel_size : `~astropy.units.Quantity` instance
the size of the pixels in arcseconds. Default is (1,1).
image_dim : `~astropy.units.Quantity` instance
the size of the output image in number of pixels
Returns
-------
out : RHESSImap
Return a backprojection map.
Examples
--------
>>> import sunpy.data.sample
>>> import sunpy.instr.rhessi as rhessi
>>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST) # doctest: +SKIP
>>> map.peek() # doctest: +SKIP
"""
pixel_size = pixel_size.to(u.arcsec)
image_dim = np.array(image_dim.to(u.pix).value, dtype=int)
afits = fits.open(calibrated_event_list)
info_parameters = afits[2]
xyoffset = info_parameters.data.field("USED_XYOFFSET")[0]
time_range = TimeRange(info_parameters.data.field("ABSOLUTE_TIME_RANGE")[0])
image = np.zeros(image_dim)
# find out what detectors were used
det_index_mask = afits[1].data.field("det_index_mask")[0]
detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
for detector in detector_list:
if detector > 0:
image = image + _backproject(
calibrated_event_list,
detector=detector,
pixel_size=pixel_size.value,
image_dim=image_dim,
)
dict_header = {
"DATE-OBS": time_range.center.strftime("%Y-%m-%d %H:%M:%S"),
"CDELT1": pixel_size[0],
"NAXIS1": image_dim[0],
"CRVAL1": xyoffset[0],
"CRPIX1": image_dim[0] / 2 + 0.5,
"CUNIT1": "arcsec",
"CTYPE1": "HPLN-TAN",
"CDELT2": pixel_size[1],
"NAXIS2": image_dim[1],
"CRVAL2": xyoffset[1],
"CRPIX2": image_dim[0] / 2 + 0.5,
"CUNIT2": "arcsec",
"CTYPE2": "HPLT-TAN",
"HGLT_OBS": 0,
"HGLN_OBS": 0,
"RSUN_OBS": solar_semidiameter_angular_size(time_range.center).value,
"RSUN_REF": sunpy.sun.constants.radius.value,
"DSUN_OBS": sunearth_distance(time_range.center) * sunpy.sun.constants.au.value,
}
result_map = sunpy.map.Map(image, dict_header)
return result_map
|
def backprojection(
calibrated_event_list, pixel_size=(1.0, 1.0) * u.arcsec, image_dim=(64, 64) * u.pix
):
"""
Given a stacked calibrated event list fits file create a back
projection image.
.. warning:: The image is not in the right orientation!
Parameters
----------
calibrated_event_list : string
filename of a RHESSI calibrated event list
pixel_size : `~astropy.units.Quantity` instance
the size of the pixels in arcseconds. Default is (1,1).
image_dim : `~astropy.units.Quantity` instance
the size of the output image in number of pixels
Returns
-------
out : RHESSImap
Return a backprojection map.
Examples
--------
>>> import sunpy.data.sample
>>> import sunpy.instr.rhessi as rhessi
>>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST) # doctest: +SKIP
>>> map.peek() # doctest: +SKIP
"""
if not isinstance(pixel_size, u.Quantity):
raise ValueError("Must be astropy Quantity in arcseconds")
try:
pixel_size = pixel_size.to(u.arcsec)
except:
raise ValueError("'{0}' is not a valid pixel_size unit".format(pixel_size.unit))
if not (isinstance(image_dim, u.Quantity) and image_dim.unit == "pix"):
raise ValueError("Must be astropy Quantity in pixels")
afits = fits.open(calibrated_event_list)
info_parameters = afits[2]
xyoffset = info_parameters.data.field("USED_XYOFFSET")[0]
time_range = TimeRange(info_parameters.data.field("ABSOLUTE_TIME_RANGE")[0])
image = np.zeros(image_dim.value)
# find out what detectors were used
det_index_mask = afits[1].data.field("det_index_mask")[0]
detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
for detector in detector_list:
if detector > 0:
image = image + _backproject(
calibrated_event_list,
detector=detector,
pixel_size=pixel_size.value,
image_dim=image_dim.value,
)
dict_header = {
"DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"),
"CDELT1": pixel_size[0],
"NAXIS1": image_dim[0],
"CRVAL1": xyoffset[0],
"CRPIX1": image_dim[0].value / 2 + 0.5,
"CUNIT1": "arcsec",
"CTYPE1": "HPLN-TAN",
"CDELT2": pixel_size[1],
"NAXIS2": image_dim[1],
"CRVAL2": xyoffset[1],
"CRPIX2": image_dim[0].value / 2 + 0.5,
"CUNIT2": "arcsec",
"CTYPE2": "HPLT-TAN",
"HGLT_OBS": 0,
"HGLN_OBS": 0,
"RSUN_OBS": solar_semidiameter_angular_size(time_range.center()).value,
"RSUN_REF": sunpy.sun.constants.radius.value,
"DSUN_OBS": sunearth_distance(time_range.center())
* sunpy.sun.constants.au.value,
}
header = sunpy.map.MetaDict(dict_header)
result_map = sunpy.map.Map(image, header)
return result_map
|
https://github.com/sunpy/sunpy/issues/1473
|
import sunpy.data
>>> import sunpy.data.sample
>>> import sunpy.instr.rhessi as rhessi
>>> sunpy.data.download_sample_data(overwrite=False)
>>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-35-2587d1ec7c8a> in <module>()
----> 1 map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)
/Users/bsipocz/munka/devel/sunpy/sunpy/instr/rhessi.py in backprojection(calibrated_event_list, pixel_size, image_dim)
405 "CDELT1": pixel_size[0],
406 "NAXIS1": image_dim[0],
--> 407 "CRVAL1": xyoffset[0],
408 "CRPIX1": image_dim[0].value/2 + 0.5,
409 "CUNIT1": "arcsec",
/Users/bsipocz/munka/devel/sunpy/sunpy/time/timerange.py in center(self)
128 value : `datetime.datetime`
129 """
--> 130 return self.start + self.dt / 2
131
132 @property
/Users/bsipocz/munka/devel/sunpy/sunpy/time/timerange.py in dt(self)
117 dt : `datetime.timedelta`
118 """
--> 119 return self._t2 - self._t1
120
121 @property
TypeError: unsupported operand type(s) for -: 'NoneType' and 'NoneType'
|
TypeError
|
def backprojection(
calibrated_event_list, pixel_size=(1.0, 1.0) * u.arcsec, image_dim=(64, 64) * u.pix
):
"""
Given a stacked calibrated event list fits file create a back
projection image.
.. warning:: The image is not in the right orientation!
Parameters
----------
calibrated_event_list : string
filename of a RHESSI calibrated event list
pixel_size : `~astropy.units.Quantity` instance
the size of the pixels in arcseconds. Default is (1,1).
image_dim : `~astropy.units.Quantity` instance
the size of the output image in number of pixels
Returns
-------
out : RHESSImap
Return a backprojection map.
Examples
--------
>>> import sunpy.data
>>> import sunpy.data.sample
>>> import sunpy.instr.rhessi as rhessi
>>> sunpy.data.download_sample_data(overwrite=False) # doctest: +SKIP
>>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST) # doctest: +SKIP
>>> map.peek() # doctest: +SKIP
"""
pixel_size = pixel_size.to(u.arcsec)
image_dim = np.array(image_dim.to(u.pix).value, dtype=int)
afits = fits.open(calibrated_event_list)
info_parameters = afits[2]
xyoffset = info_parameters.data.field("USED_XYOFFSET")[0]
time_range = TimeRange(info_parameters.data.field("ABSOLUTE_TIME_RANGE")[0])
image = np.zeros(image_dim)
# find out what detectors were used
det_index_mask = afits[1].data.field("det_index_mask")[0]
detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
for detector in detector_list:
if detector > 0:
image = image + _backproject(
calibrated_event_list,
detector=detector,
pixel_size=pixel_size.value,
image_dim=image_dim,
)
dict_header = {
"DATE-OBS": time_range.center.strftime("%Y-%m-%d %H:%M:%S"),
"CDELT1": pixel_size[0],
"NAXIS1": image_dim[0],
"CRVAL1": xyoffset[0],
"CRPIX1": image_dim[0] / 2 + 0.5,
"CUNIT1": "arcsec",
"CTYPE1": "HPLN-TAN",
"CDELT2": pixel_size[1],
"NAXIS2": image_dim[1],
"CRVAL2": xyoffset[1],
"CRPIX2": image_dim[0] / 2 + 0.5,
"CUNIT2": "arcsec",
"CTYPE2": "HPLT-TAN",
"HGLT_OBS": 0,
"HGLN_OBS": 0,
"RSUN_OBS": solar_semidiameter_angular_size(time_range.center).value,
"RSUN_REF": sunpy.sun.constants.radius.value,
"DSUN_OBS": sunearth_distance(time_range.center) * sunpy.sun.constants.au.value,
}
result_map = sunpy.map.Map(image, dict_header)
return result_map
|
def backprojection(
calibrated_event_list, pixel_size=(1.0, 1.0) * u.arcsec, image_dim=(64, 64) * u.pix
):
"""
Given a stacked calibrated event list fits file create a back
projection image.
.. warning:: The image is not in the right orientation!
Parameters
----------
calibrated_event_list : string
filename of a RHESSI calibrated event list
pixel_size : `~astropy.units.Quantity` instance
the size of the pixels in arcseconds. Default is (1,1).
image_dim : `~astropy.units.Quantity` instance
the size of the output image in number of pixels
Returns
-------
out : RHESSImap
Return a backprojection map.
Examples
--------
>>> import sunpy.data
>>> import sunpy.data.sample
>>> import sunpy.instr.rhessi as rhessi
>>> sunpy.data.download_sample_data(overwrite=False) # doctest: +SKIP
>>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST) # doctest: +SKIP
>>> map.peek() # doctest: +SKIP
"""
if not isinstance(pixel_size, u.Quantity):
raise ValueError("Must be astropy Quantity in arcseconds")
try:
pixel_size = pixel_size.to(u.arcsec)
except:
raise ValueError("'{0}' is not a valid pixel_size unit".format(pixel_size.unit))
if not (isinstance(image_dim, u.Quantity) and image_dim.unit == "pix"):
raise ValueError("Must be astropy Quantity in pixels")
afits = fits.open(calibrated_event_list)
info_parameters = afits[2]
xyoffset = info_parameters.data.field("USED_XYOFFSET")[0]
time_range = TimeRange(info_parameters.data.field("ABSOLUTE_TIME_RANGE")[0])
image = np.zeros(image_dim.value)
# find out what detectors were used
det_index_mask = afits[1].data.field("det_index_mask")[0]
detector_list = (np.arange(9) + 1) * np.array(det_index_mask)
for detector in detector_list:
if detector > 0:
image = image + _backproject(
calibrated_event_list,
detector=detector,
pixel_size=pixel_size.value,
image_dim=image_dim.value,
)
dict_header = {
"DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"),
"CDELT1": pixel_size[0],
"NAXIS1": image_dim[0],
"CRVAL1": xyoffset[0],
"CRPIX1": image_dim[0].value / 2 + 0.5,
"CUNIT1": "arcsec",
"CTYPE1": "HPLN-TAN",
"CDELT2": pixel_size[1],
"NAXIS2": image_dim[1],
"CRVAL2": xyoffset[1],
"CRPIX2": image_dim[0].value / 2 + 0.5,
"CUNIT2": "arcsec",
"CTYPE2": "HPLT-TAN",
"HGLT_OBS": 0,
"HGLN_OBS": 0,
"RSUN_OBS": solar_semidiameter_angular_size(time_range.center()).value,
"RSUN_REF": sunpy.sun.constants.radius.value,
"DSUN_OBS": sunearth_distance(time_range.center())
* sunpy.sun.constants.au.value,
}
header = sunpy.map.MetaDict(dict_header)
result_map = sunpy.map.Map(image, header)
return result_map
|
https://github.com/sunpy/sunpy/issues/1473
|
import sunpy.data
>>> import sunpy.data.sample
>>> import sunpy.instr.rhessi as rhessi
>>> sunpy.data.download_sample_data(overwrite=False)
>>> map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-35-2587d1ec7c8a> in <module>()
----> 1 map = rhessi.backprojection(sunpy.data.sample.RHESSI_EVENT_LIST)
/Users/bsipocz/munka/devel/sunpy/sunpy/instr/rhessi.py in backprojection(calibrated_event_list, pixel_size, image_dim)
405 "CDELT1": pixel_size[0],
406 "NAXIS1": image_dim[0],
--> 407 "CRVAL1": xyoffset[0],
408 "CRPIX1": image_dim[0].value/2 + 0.5,
409 "CUNIT1": "arcsec",
/Users/bsipocz/munka/devel/sunpy/sunpy/time/timerange.py in center(self)
128 value : `datetime.datetime`
129 """
--> 130 return self.start + self.dt / 2
131
132 @property
/Users/bsipocz/munka/devel/sunpy/sunpy/time/timerange.py in dt(self)
117 dt : `datetime.timedelta`
118 """
--> 119 return self._t2 - self._t1
120
121 @property
TypeError: unsupported operand type(s) for -: 'NoneType' and 'NoneType'
|
TypeError
|
def suds_unwrapper(wrapped_data):
"""
Removes suds wrapping from returned xml data
When grabbing data via votable_interceptor.last_payload from the
suds.client.Client module, it returns the xml data in an un-helpful
"<s:Envelope>" that needs to be removed. This function politely cleans
it up.
Parameters
----------
wrapped_data : `str`
Contains the wrapped xml results from a WSDL query
Returns
-------
unwrapped : `str`
The xml results with the wrapper removed
Examples
--------
>>> from sunpy.net.helio import hec Todo: Fix this example!
>>> from suds.client import Client
>>> from sunpy.net.proxyfix import WellBehavedHttpTransport
>>> votable_interceptor = hec.VotableInterceptor()
>>> client = Client(hec.parser.wsdl_retriever(), plugins=[self.votable_interceptor], transport=WellBehavedHttpTransport())
>>> client.service.getTableNames()
>>> temp = client.last_received().str()
>>> print(temp)
<?xml version="1.0" encoding="UTF-8"?>
<S:Envelope ..... >
<S:Body>
<helio:queryResponse ... >
<VOTABLE xmlns="http://www.ivoa.net/xml/VOTable/v1.1" version="1.1">
<RESOURCE>
...
</RESOURCE>
</VOTABLE>
</helio:queryResponse>
</S:Body>
</S:Envelope>
>>> temp = hec.suds_unwrapper(temp)
>>> print(temp)
<?xml version="1.0" encoding="UTF-8"?>
<VOTABLE xmlns="http://www.ivoa.net/xml/VOTable/v1.1" version="1.1">
<RESOURCE>
...
</RESOURCE>
</VOTABLE>
"""
if six.PY3 and not isinstance(wrapped_data, str):
wrapped_data = wrapped_data.decode("utf-8")
HEADER = '<?xml version="1.0" encoding="UTF-8"?>\n'
CATCH_1 = "<VOTABLE"
CATCH_2 = "</VOTABLE>\n"
# Now going to find the locations of the CATCHes in the wrapped_data
pos_1 = wrapped_data.find(CATCH_1)
pos_2 = wrapped_data.find(CATCH_2)
unwrapped = HEADER + wrapped_data[pos_1:pos_2] + CATCH_2
return unwrapped
|
def suds_unwrapper(wrapped_data):
"""
Removes suds wrapping from returned xml data
When grabbing data via votable_interceptor.last_payload from the
suds.client.Client module, it returns the xml data in an un-helpful
"<s:Envelope>" that needs to be removed. This function politely cleans
it up.
Parameters
----------
wrapped_data : `str`
Contains the wrapped xml results from a WSDL query
Returns
-------
unwrapped : `str`
The xml results with the wrapper removed
Examples
--------
>>> from sunpy.net.helio import hec Todo: Fix this example!
>>> from suds.client import Client
>>> from sunpy.net.proxyfix import WellBehavedHttpTransport
>>> votable_interceptor = hec.VotableInterceptor()
>>> client = Client(hec.parser.wsdl_retriever(), plugins=[self.votable_interceptor], transport=WellBehavedHttpTransport())
>>> client.service.getTableNames()
>>> temp = client.last_received().str()
>>> print(temp)
<?xml version="1.0" encoding="UTF-8"?>
<S:Envelope ..... >
<S:Body>
<helio:queryResponse ... >
<VOTABLE xmlns="http://www.ivoa.net/xml/VOTable/v1.1" version="1.1">
<RESOURCE>
...
</RESOURCE>
</VOTABLE>
</helio:queryResponse>
</S:Body>
</S:Envelope>
>>> temp = hec.suds_unwrapper(temp)
>>> print(temp)
<?xml version="1.0" encoding="UTF-8"?>
<VOTABLE xmlns="http://www.ivoa.net/xml/VOTable/v1.1" version="1.1">
<RESOURCE>
...
</RESOURCE>
</VOTABLE>
"""
HEADER = '<?xml version="1.0" encoding="UTF-8"?>\n'
CATCH_1 = "<VOTABLE"
CATCH_2 = "</VOTABLE>\n"
# Now going to find the locations of the CATCHes in the wrapped_data
pos_1 = wrapped_data.find(CATCH_1)
pos_2 = wrapped_data.find(CATCH_2)
unwrapped = HEADER + wrapped_data[pos_1:pos_2] + CATCH_2
return unwrapped
|
https://github.com/sunpy/sunpy/issues/2043
|
from sunpy.net.helio import hec
hc = hec.HECClient()
hc.get_table_names()
...
markup_type=markup_type))
a bytes-like object is required, not 'str'
Traceback (most recent call last):
File "/.../github/sunpy/env35/lib/python3.5/site-packages/suds/plugin.py", line 255, in __call__
method(ctx)
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 129, in received
self.last_payload = six.u(suds_unwrapper(context.reply))
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 77, in suds_unwrapper
pos_1 = wrapped_data.find(CATCH_1)
TypeError: a bytes-like object is required, not 'str'
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 235, in get_table_names
tables = votable_handler(self.votable_interceptor.last_payload)
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 114, in votable_handler
fake_file.write(xml_table)
TypeError: string argument expected, got 'NoneType'
|
TypeError
|
def votable_handler(xml_table):
"""
Returns a VOtable object from a VOtable style xml string
In order to get a VOtable object, it has to be parsed from an xml file or
file-like object. This function creates a file-like object via the
StringIO module, writes the xml data to it, then passes the file-like
object to parse_single_table() from the astropy.io.votable.table module
and thereby creates a VOtable object.
Parameters
----------
xml_table : str
Contains the VOtable style xml data
Returns
-------
votable : `astropy.io.votable.tree.Table`
A properly formatted VOtable object
Examples
--------
>>> from sunpy.net.helio import hec
>>> temp = hec.suds_unwrapper(xml_string)
>>> type(temp)
unicode
>>> temp = hec.votable_handler(temp)
>>> type(temp)
astropy.io.votable.tree.Table
"""
fake_file = six.BytesIO()
fake_file.write(six.b(xml_table))
votable = parse_single_table(fake_file)
fake_file.close()
return votable
|
def votable_handler(xml_table):
"""
Returns a VOtable object from a VOtable style xml string
In order to get a VOtable object, it has to be parsed from an xml file or
file-like object. This function creates a file-like object via the
StringIO module, writes the xml data to it, then passes the file-like
object to parse_single_table() from the astropy.io.votable.table module
and thereby creates a VOtable object.
Parameters
----------
xml_table : str
Contains the VOtable style xml data
Returns
-------
votable : `astropy.io.votable.tree.Table`
A properly formatted VOtable object
Examples
--------
>>> from sunpy.net.helio import hec
>>> temp = hec.suds_unwrapper(xml_string)
>>> type(temp)
unicode
>>> temp = hec.votable_handler(temp)
>>> type(temp)
astropy.io.votable.tree.Table
"""
fake_file = io.StringIO()
fake_file.write(xml_table)
votable = parse_single_table(fake_file)
fake_file.close()
return votable
|
https://github.com/sunpy/sunpy/issues/2043
|
from sunpy.net.helio import hec
hc = hec.HECClient()
hc.get_table_names()
...
markup_type=markup_type))
a bytes-like object is required, not 'str'
Traceback (most recent call last):
File "/.../github/sunpy/env35/lib/python3.5/site-packages/suds/plugin.py", line 255, in __call__
method(ctx)
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 129, in received
self.last_payload = six.u(suds_unwrapper(context.reply))
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 77, in suds_unwrapper
pos_1 = wrapped_data.find(CATCH_1)
TypeError: a bytes-like object is required, not 'str'
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 235, in get_table_names
tables = votable_handler(self.votable_interceptor.last_payload)
File "/.../github/sunpy/sunpy/net/helio/hec.py", line 114, in votable_handler
fake_file.write(xml_table)
TypeError: string argument expected, got 'NoneType'
|
TypeError
|
def __init__(self, data, header, plot_settings=None, **kwargs):
# If the data has more than two dimensions, the first dimensions
# (NAXIS1, NAXIS2) are used and the rest are discarded.
ndim = data.ndim
if ndim > 2:
# We create a slice that removes all but the 'last' two
# dimensions. (Note dimensions in ndarray are in reverse order)
new_2d_slice = [0] * (ndim - 2)
new_2d_slice.extend([slice(None), slice(None)])
data = data[new_2d_slice]
# Warn the user that the data has been truncated
warnings.warn_explicit(
"This file contains more than 2 dimensions. "
"Only the first two dimensions will be used."
" The truncated data will not be saved in a new file.",
Warning,
__file__,
inspect.currentframe().f_back.f_lineno,
)
super(GenericMap, self).__init__(data, meta=header, **kwargs)
# Correct possibly missing meta keywords
self._fix_date()
self._fix_naxis()
# Setup some attributes
self._nickname = None
# Validate header
# TODO: This should be a function of the header, not of the map
self._validate_meta()
self._shift = SpatialPair(0 * u.arcsec, 0 * u.arcsec)
if self.dtype == np.uint8:
norm = None
else:
norm = colors.Normalize()
# Visualization attributes
self.plot_settings = {
"cmap": cm.gray,
"norm": norm,
"interpolation": "nearest",
"origin": "lower",
}
if plot_settings:
self.plot_settings.update(plot_settings)
|
def __init__(self, data, header, plot_settings=None, **kwargs):
super(GenericMap, self).__init__(data, meta=header, **kwargs)
# Correct possibly missing meta keywords
self._fix_date()
self._fix_naxis()
# Setup some attributes
self._nickname = None
# Validate header
# TODO: This should be a function of the header, not of the map
self._validate_meta()
self._shift = SpatialPair(0 * u.arcsec, 0 * u.arcsec)
if self.dtype == np.uint8:
norm = None
else:
norm = colors.Normalize()
# Visualization attributes
self.plot_settings = {
"cmap": cm.gray,
"norm": norm,
"interpolation": "nearest",
"origin": "lower",
}
if plot_settings:
self.plot_settings.update(plot_settings)
|
https://github.com/sunpy/sunpy/issues/1919
|
In [6]: map.Map('1130643840_vv_c076-077_f8-14_t034345_t034444_XX_d002.fits')
Out[6]: ---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
/Users/kamen/anaconda/lib/python2.7/site-packages/IPython/core/formatters.pyc in __call__(self, obj)
697 type_pprinters=self.type_printers,
698 deferred_pprinters=self.deferred_printers)
--> 699 printer.pretty(obj)
700 printer.flush()
701 return stream.getvalue()
/Users/kamen/anaconda/lib/python2.7/site-packages/IPython/lib/pretty.pyc in pretty(self, obj)
381 if callable(meth):
382 return meth(obj, self, cycle)
--> 383 return _default_pprint(obj, self, cycle)
384 finally:
385 self.end_group()
/Users/kamen/anaconda/lib/python2.7/site-packages/IPython/lib/pretty.pyc in _default_pprint(obj, p, cycle)
501 if _safe_getattr(klass, '__repr__', None) not in _baseclass_reprs:
502 # A user-provided repr. Find newlines and replace them with p.break_()
--> 503 _repr_pprint(obj, p, cycle)
504 return
505 p.begin_group(1, '<')
/Users/kamen/anaconda/lib/python2.7/site-packages/IPython/lib/pretty.pyc in _repr_pprint(obj, p, cycle)
692 """A pprint that just redirects to the normal repr function."""
693 # Find newlines and replace them with p.break_()
--> 694 output = repr(obj)
695 for idx,output_line in enumerate(output.splitlines()):
696 if idx:
/Users/kamen/anaconda/lib/python2.7/site-packages/sunpy/map/mapbase.py in __repr__(self)
221 obs=self.observatory, inst=self.instrument, det=self.detector,
222 meas=self.measurement, wave=self.wavelength, date=self.date, dt=self.exposure_time,
--> 223 dim=u.Quantity(self.dimensions),
224 scale=u.Quantity(self.scale),
225 tmf=TIME_FORMAT)
/Users/kamen/anaconda/lib/python2.7/site-packages/sunpy/map/mapbase.py in dimensions(self)
301 The dimensions of the array (x axis first, y axis second).
302 """
--> 303 return Pair(*u.Quantity(np.flipud(self.data.shape), 'pixel'))
304
305 @property
TypeError: __new__() takes exactly 3 arguments (5 given)
|
TypeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.